context
stringlengths
2.52k
185k
gt
stringclasses
1 value
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Security.Cryptography.Tests; using System.Text; using Xunit; namespace System.Security.Cryptography.EcDsa.Tests { public sealed class ECDsaTests_Array : ECDsaTests { protected override bool VerifyData(ECDsa ecdsa, byte[] data, int offset, int count, byte[] signature, HashAlgorithmName hashAlgorithm) => ecdsa.VerifyData(data, offset, count, signature, hashAlgorithm); protected override byte[] SignData(ECDsa ecdsa, byte[] data, int offset, int count, HashAlgorithmName hashAlgorithm) => ecdsa.SignData(data, offset, count, hashAlgorithm); protected override void UseAfterDispose(ECDsa ecdsa, byte[] data, byte[] sig) { base.UseAfterDispose(ecdsa, data, sig); byte[] hash = new byte[32]; Assert.Throws<ObjectDisposedException>(() => ecdsa.VerifyHash(hash, sig)); Assert.Throws<ObjectDisposedException>(() => ecdsa.SignHash(hash)); } [Theory, MemberData(nameof(RealImplementations))] public void SignData_InvalidArguments_Throws(ECDsa ecdsa) { AssertExtensions.Throws<ArgumentNullException>("data", () => ecdsa.SignData((byte[])null, default(HashAlgorithmName))); AssertExtensions.Throws<ArgumentNullException>("data", () => ecdsa.SignData(null, -1, -1, default(HashAlgorithmName))); AssertExtensions.Throws<ArgumentOutOfRangeException>("offset", () => ecdsa.SignData(new byte[0], -1, -1, default(HashAlgorithmName))); AssertExtensions.Throws<ArgumentOutOfRangeException>("offset", () => ecdsa.SignData(new byte[0], 2, 1, default(HashAlgorithmName))); AssertExtensions.Throws<ArgumentOutOfRangeException>("count", () => ecdsa.SignData(new byte[0], 0, -1, default(HashAlgorithmName))); AssertExtensions.Throws<ArgumentOutOfRangeException>("count", () => ecdsa.SignData(new byte[0], 0, 1, default(HashAlgorithmName))); AssertExtensions.Throws<ArgumentException>("hashAlgorithm", () => ecdsa.SignData(new byte[0], default(HashAlgorithmName))); AssertExtensions.Throws<ArgumentException>("hashAlgorithm", () => ecdsa.SignData(new byte[0], 0, 0, default(HashAlgorithmName))); AssertExtensions.Throws<ArgumentException>("hashAlgorithm", () => ecdsa.SignData(new byte[0], 0, 0, default(HashAlgorithmName))); AssertExtensions.Throws<ArgumentException>("hashAlgorithm", () => ecdsa.SignData(new byte[10], 0, 10, new HashAlgorithmName(""))); Assert.ThrowsAny<CryptographicException>(() => ecdsa.SignData(new byte[0], new HashAlgorithmName(Guid.NewGuid().ToString("N")))); Assert.ThrowsAny<CryptographicException>(() => ecdsa.SignData(new byte[0], 0, 0, new HashAlgorithmName(Guid.NewGuid().ToString("N")))); } [Theory, MemberData(nameof(RealImplementations))] public void VerifyData_InvalidArguments_Throws(ECDsa ecdsa) { AssertExtensions.Throws<ArgumentNullException>("data", () => ecdsa.VerifyData((byte[])null, null, default(HashAlgorithmName))); AssertExtensions.Throws<ArgumentNullException>("data", () => ecdsa.VerifyData(null, -1, -1, null, default(HashAlgorithmName))); AssertExtensions.Throws<ArgumentNullException>("signature", () => ecdsa.VerifyData(new byte[0], null, default(HashAlgorithmName))); AssertExtensions.Throws<ArgumentNullException>("signature", () => ecdsa.VerifyData(new byte[0], 0, 0, null, default(HashAlgorithmName))); AssertExtensions.Throws<ArgumentOutOfRangeException>("offset", () => ecdsa.VerifyData(new byte[0], -1, -1, null, default(HashAlgorithmName))); AssertExtensions.Throws<ArgumentOutOfRangeException>("offset", () => ecdsa.VerifyData(new byte[0], 2, 1, null, default(HashAlgorithmName))); AssertExtensions.Throws<ArgumentOutOfRangeException>("count", () => ecdsa.VerifyData(new byte[0], 0, -1, null, default(HashAlgorithmName))); AssertExtensions.Throws<ArgumentOutOfRangeException>("count", () => ecdsa.VerifyData(new byte[0], 0, 1, null, default(HashAlgorithmName))); AssertExtensions.Throws<ArgumentException>("hashAlgorithm", () => ecdsa.VerifyData(new byte[0], new byte[0], default(HashAlgorithmName))); AssertExtensions.Throws<ArgumentException>("hashAlgorithm", () => ecdsa.VerifyData(new byte[0], 0, 0, new byte[0], default(HashAlgorithmName))); AssertExtensions.Throws<ArgumentException>("hashAlgorithm", () => ecdsa.VerifyData(new byte[10], new byte[0], new HashAlgorithmName(""))); AssertExtensions.Throws<ArgumentException>("hashAlgorithm", () => ecdsa.VerifyData(new byte[10], 0, 10, new byte[0], new HashAlgorithmName(""))); Assert.ThrowsAny<CryptographicException>(() => ecdsa.VerifyData(new byte[0], new byte[0], new HashAlgorithmName(Guid.NewGuid().ToString("N")))); Assert.ThrowsAny<CryptographicException>(() => ecdsa.VerifyData(new byte[0], 0, 0, new byte[0], new HashAlgorithmName(Guid.NewGuid().ToString("N")))); } [Theory, MemberData(nameof(RealImplementations))] public void SignHash_InvalidArguments_Throws(ECDsa ecdsa) { AssertExtensions.Throws<ArgumentNullException>("hash", () => ecdsa.SignHash(null)); } [Theory, MemberData(nameof(RealImplementations))] public void VerifyHash_InvalidArguments_Throws(ECDsa ecdsa) { AssertExtensions.Throws<ArgumentNullException>("hash", () => ecdsa.VerifyHash(null, null)); AssertExtensions.Throws<ArgumentNullException>("signature", () => ecdsa.VerifyHash(new byte[0], null)); } } public sealed class ECDsaTests_Stream : ECDsaTests { protected override bool VerifyData(ECDsa ecdsa, byte[] data, int offset, int count, byte[] signature, HashAlgorithmName hashAlgorithm) { var stream = new MemoryStream(data, offset, count); bool result = ecdsa.VerifyData(stream, signature, hashAlgorithm); Assert.Equal(stream.Length, stream.Position); return result; } protected override byte[] SignData(ECDsa ecdsa, byte[] data, int offset, int count, HashAlgorithmName hashAlgorithm) { var stream = new MemoryStream(data, offset, count); byte[] result = ecdsa.SignData(stream, hashAlgorithm); Assert.Equal(stream.Length, stream.Position); return result; } [Theory, MemberData(nameof(RealImplementations))] public void SignData_InvalidArguments_Throws(ECDsa ecdsa) { AssertExtensions.Throws<ArgumentNullException>("data", () => ecdsa.SignData((Stream)null, default(HashAlgorithmName))); AssertExtensions.Throws<ArgumentException>("hashAlgorithm", () => ecdsa.SignData(new MemoryStream(), default(HashAlgorithmName))); Assert.ThrowsAny<CryptographicException>(() => ecdsa.SignData(new MemoryStream(), new HashAlgorithmName(Guid.NewGuid().ToString("N")))); } [Theory, MemberData(nameof(RealImplementations))] public void VerifyData_InvalidArguments_Throws(ECDsa ecdsa) { AssertExtensions.Throws<ArgumentNullException>("data", () => ecdsa.VerifyData((Stream)null, null, default(HashAlgorithmName))); AssertExtensions.Throws<ArgumentNullException>("signature", () => ecdsa.VerifyData(new MemoryStream(), null, default(HashAlgorithmName))); AssertExtensions.Throws<ArgumentException>("hashAlgorithm", () => ecdsa.VerifyData(new MemoryStream(), new byte[0], default(HashAlgorithmName))); AssertExtensions.Throws<ArgumentException>("hashAlgorithm", () => ecdsa.VerifyData(new MemoryStream(), new byte[0], new HashAlgorithmName(""))); Assert.ThrowsAny<CryptographicException>(() => ecdsa.VerifyData(new MemoryStream(), new byte[0], new HashAlgorithmName(Guid.NewGuid().ToString("N")))); } } public abstract partial class ECDsaTests : ECDsaTestsBase { protected bool VerifyData(ECDsa ecdsa, byte[] data, byte[] signature, HashAlgorithmName hashAlgorithm) => VerifyData(ecdsa, data, 0, data.Length, signature, hashAlgorithm); protected abstract bool VerifyData(ECDsa ecdsa, byte[] data, int offset, int count, byte[] signature, HashAlgorithmName hashAlgorithm); protected byte[] SignData(ECDsa ecdsa, byte[] data, HashAlgorithmName hashAlgorithm) => SignData(ecdsa, data, 0, data.Length, hashAlgorithm); protected abstract byte[] SignData(ECDsa ecdsa, byte[] data, int offset, int count, HashAlgorithmName hashAlgorithm); public static IEnumerable<object[]> RealImplementations() => new[] { new ECDsa[] { ECDsaFactory.Create() }, }; ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// [Theory] [MemberData(nameof(RealImplementations))] public void UseAfterDispose_Import(ECDsa ecdsa) { ecdsa.ImportParameters(EccTestData.GetNistP256ReferenceKey()); UseAfterDispose(ecdsa); } [Theory] [MemberData(nameof(RealImplementations))] public void UseAfterDispose_NewKey(ECDsa ecdsa) { UseAfterDispose(ecdsa); } private void UseAfterDispose(ECDsa ecdsa) { byte[] data = { 1 }; byte[] sig; // Ensure the key is populated, then dispose it. using (ecdsa) { sig = SignData(ecdsa, data, HashAlgorithmName.SHA256); } ecdsa.Dispose(); UseAfterDispose(ecdsa, data, sig); if (!(PlatformDetection.IsFullFramework && ecdsa.GetType().Name.EndsWith("Cng"))) { Assert.Throws<ObjectDisposedException>(() => ecdsa.GenerateKey(ECCurve.NamedCurves.nistP256)); Assert.Throws<ObjectDisposedException>( () => ecdsa.ImportParameters(EccTestData.GetNistP256ReferenceKey())); } // Either set_KeySize or SignData should throw. Assert.Throws<ObjectDisposedException>( () => { ecdsa.KeySize = 384; SignData(ecdsa, data, HashAlgorithmName.SHA256); }); } protected virtual void UseAfterDispose(ECDsa ecdsa, byte[] data, byte[] sig) { Assert.Throws<ObjectDisposedException>( () => SignData(ecdsa, data, HashAlgorithmName.SHA256)); Assert.Throws<ObjectDisposedException>( () => VerifyData(ecdsa, data, sig, HashAlgorithmName.SHA256)); } [Theory] [MemberData(nameof(RealImplementations))] public void SignData_MaxOffset_ZeroLength_NoThrow(ECDsa ecdsa) { // Explicitly larger than Array.Empty byte[] data = new byte[10]; byte[] signature = SignData(ecdsa, data, data.Length, 0, HashAlgorithmName.SHA256); Assert.True(VerifyData(ecdsa, Array.Empty<byte>(), signature, HashAlgorithmName.SHA256)); } [Theory] [MemberData(nameof(RealImplementations))] public void VerifyData_MaxOffset_ZeroLength_NoThrow(ECDsa ecdsa) { // Explicitly larger than Array.Empty byte[] data = new byte[10]; byte[] signature = SignData(ecdsa, Array.Empty<byte>(), HashAlgorithmName.SHA256); Assert.True(VerifyData(ecdsa, data, data.Length, 0, signature, HashAlgorithmName.SHA256)); } [Theory] [MemberData(nameof(RealImplementations))] public void Roundtrip_WithOffset(ECDsa ecdsa) { byte[] data = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 }; byte[] halfData = { 5, 6, 7, 8, 9 }; byte[] dataSignature = SignData(ecdsa, data, 5, data.Length - 5, HashAlgorithmName.SHA256); byte[] halfDataSignature = SignData(ecdsa, halfData, HashAlgorithmName.SHA256); // Cross-feed the VerifyData calls to prove that both offsets work Assert.True(VerifyData(ecdsa, data, 5, data.Length - 5, halfDataSignature, HashAlgorithmName.SHA256)); Assert.True(VerifyData(ecdsa, halfData, dataSignature, HashAlgorithmName.SHA256)); } ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// [Theory] [InlineData(256)] [InlineData(384)] [InlineData(521)] public void CreateKey(int keySize) { using (ECDsa ecdsa = ECDsaFactory.Create()) { // Step 1, don't throw here. ecdsa.KeySize = keySize; // Step 2, ensure the key was generated without throwing. SignData(ecdsa, Array.Empty<byte>(), HashAlgorithmName.SHA256); } } ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// public static IEnumerable<object[]> InteroperableSignatureConfigurations() { foreach (HashAlgorithmName hashAlgorithm in new[] { HashAlgorithmName.MD5, HashAlgorithmName.SHA1, HashAlgorithmName.SHA256, HashAlgorithmName.SHA384, HashAlgorithmName.SHA512 }) { yield return new object[] { ECDsaFactory.Create(), hashAlgorithm }; } } [Theory] [MemberData(nameof(InteroperableSignatureConfigurations))] public void SignVerify_InteroperableSameKeys_RoundTripsUnlessTampered(ECDsa ecdsa, HashAlgorithmName hashAlgorithm) { byte[] data = Encoding.UTF8.GetBytes("something to repeat and sign"); // large enough to make hashing work though multiple iterations and not a multiple of 4KB it uses. byte[] dataArray = new byte[33333]; byte[] dataArray2 = new byte[dataArray.Length + 2]; dataArray.CopyTo(dataArray2, 1); HashAlgorithm halg; if (hashAlgorithm == HashAlgorithmName.MD5) halg = MD5.Create(); else if (hashAlgorithm == HashAlgorithmName.SHA1) halg = SHA1.Create(); else if (hashAlgorithm == HashAlgorithmName.SHA256) halg = SHA256.Create(); else if (hashAlgorithm == HashAlgorithmName.SHA384) halg = SHA384.Create(); else if (hashAlgorithm == HashAlgorithmName.SHA512) halg = SHA512.Create(); else throw new Exception("Hash algorithm not supported."); List<byte[]> signatures = new List<byte[]>(6); // Compute a signature using each of the SignData overloads. Then, verify it using each // of the VerifyData overloads, and VerifyHash overloads. // // Then, verify that VerifyHash fails if the data is tampered with. signatures.Add(SignData(ecdsa, dataArray, hashAlgorithm)); signatures.Add(ecdsa.SignHash(halg.ComputeHash(dataArray))); foreach (byte[] signature in signatures) { Assert.True(VerifyData(ecdsa, dataArray, signature, hashAlgorithm), "Verify 1"); Assert.True(ecdsa.VerifyHash(halg.ComputeHash(dataArray), signature), "Verify 4"); } int distinctSignatures = signatures.Distinct(new ByteArrayComparer()).Count(); Assert.True(distinctSignatures == signatures.Count, "Signing should be randomized"); foreach (byte[] signature in signatures) { signature[signature.Length - 1] ^= 0xFF; // flip some bits Assert.False(VerifyData(ecdsa, dataArray, signature, hashAlgorithm), "Verify Tampered 1"); Assert.False(ecdsa.VerifyHash(halg.ComputeHash(dataArray), signature), "Verify Tampered 4"); } } private class ByteArrayComparer : IEqualityComparer<byte[]> { public bool Equals(byte[] x, byte[] y) { return x.SequenceEqual(y); } public int GetHashCode(byte[] obj) { int h = 5381; foreach (byte b in obj) { h = unchecked((h << 5) + h) ^ b.GetHashCode(); } return h; } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Diagnostics; using System.Dynamic.Utils; using System.Reflection; using static System.Linq.Expressions.CachedReflectionInfo; namespace System.Linq.Expressions { /// <summary> /// Represents an operation between an expression and a type. /// </summary> [DebuggerTypeProxy(typeof(Expression.TypeBinaryExpressionProxy))] public sealed class TypeBinaryExpression : Expression { private readonly Expression _expression; private readonly Type _typeOperand; private readonly ExpressionType _nodeKind; internal TypeBinaryExpression(Expression expression, Type typeOperand, ExpressionType nodeKind) { _expression = expression; _typeOperand = typeOperand; _nodeKind = nodeKind; } /// <summary> /// Gets the static type of the expression that this <see cref="Expression" /> represents. /// </summary> /// <returns>The <see cref="Type"/> that represents the static type of the expression.</returns> public sealed override Type Type { get { return typeof(bool); } } /// <summary> /// Returns the node type of this Expression. Extension nodes should return /// ExpressionType.Extension when overriding this method. /// </summary> /// <returns>The <see cref="ExpressionType"/> of the expression.</returns> public sealed override ExpressionType NodeType { get { return _nodeKind; } } /// <summary> /// Gets the expression operand of a type test operation. /// </summary> public Expression Expression { get { return _expression; } } /// <summary> /// Gets the type operand of a type test operation. /// </summary> public Type TypeOperand { get { return _typeOperand; } } #region Reduce TypeEqual internal Expression ReduceTypeEqual() { Type cType = Expression.Type; if (cType.GetTypeInfo().IsValueType) { if (cType.IsNullableType()) { // If the expression type is a a nullable type, it will match if // the value is not null and the type operand // either matches or is its type argument (T to its T?). if (cType.GetNonNullableType() != _typeOperand.GetNonNullableType()) { return Expression.Block(Expression, Expression.Constant(false)); } else { return Expression.NotEqual(Expression, Expression.Constant(null, Expression.Type)); } } else { // For other value types (including Void), we can // determine the result now return Expression.Block(Expression, Expression.Constant(cType == _typeOperand.GetNonNullableType())); } } Debug.Assert(TypeUtils.AreReferenceAssignable(typeof(object), Expression.Type), "Expecting reference types only after this point."); // Can check the value right now for constants. if (Expression.NodeType == ExpressionType.Constant) { return ReduceConstantTypeEqual(); } // expression is a ByVal parameter. Can safely reevaluate. var parameter = Expression as ParameterExpression; if (parameter != null && !parameter.IsByRef) { return ByValParameterTypeEqual(parameter); } // Create a temp so we only evaluate the left side once parameter = Expression.Parameter(typeof(object)); return Expression.Block( new[] { parameter }, Expression.Assign(parameter, Expression), ByValParameterTypeEqual(parameter) ); } // Helper that is used when re-eval of LHS is safe. private Expression ByValParameterTypeEqual(ParameterExpression value) { Expression getType = Expression.Call(value, Object_GetType); // In remoting scenarios, obj.GetType() can return an interface. // But JIT32's optimized "obj.GetType() == typeof(ISomething)" codegen, // causing it to always return false. // We workaround this optimization by generating different, less optimal IL // if TypeOperand is an interface. if (_typeOperand.GetTypeInfo().IsInterface) { var temp = Expression.Parameter(typeof(Type)); getType = Expression.Block(new[] { temp }, Expression.Assign(temp, getType), temp); } // We use reference equality when comparing to null for correctness // (don't invoke a user defined operator), and reference equality // on types for performance (so the JIT can optimize the IL). return Expression.AndAlso( Expression.ReferenceNotEqual(value, Expression.Constant(null)), Expression.ReferenceEqual( getType, Expression.Constant(_typeOperand.GetNonNullableType(), typeof(Type)) ) ); } private Expression ReduceConstantTypeEqual() { ConstantExpression ce = Expression as ConstantExpression; //TypeEqual(null, T) always returns false. if (ce.Value == null) { return Expression.Constant(false); } else { return Expression.Constant(_typeOperand.GetNonNullableType() == ce.Value.GetType()); } } #endregion /// <summary> /// Dispatches to the specific visit method for this node type. /// </summary> protected internal override Expression Accept(ExpressionVisitor visitor) { return visitor.VisitTypeBinary(this); } /// <summary> /// Creates a new expression that is like this one, but using the /// supplied children. If all of the children are the same, it will /// return this expression. /// </summary> /// <param name="expression">The <see cref="Expression" /> property of the result.</param> /// <returns>This expression if no children changed, or an expression with the updated children.</returns> public TypeBinaryExpression Update(Expression expression) { if (expression == Expression) { return this; } if (NodeType == ExpressionType.TypeIs) { return Expression.TypeIs(expression, TypeOperand); } return Expression.TypeEqual(expression, TypeOperand); } } public partial class Expression { /// <summary> /// Creates a <see cref="TypeBinaryExpression"/>. /// </summary> /// <param name="expression">An <see cref="Expression"/> to set the <see cref="Expression"/> property equal to.</param> /// <param name="type">A <see cref="Type"/> to set the <see cref="TypeBinaryExpression.TypeOperand"/> property equal to.</param> /// <returns>A <see cref="TypeBinaryExpression"/> for which the <see cref="NodeType"/> property is equal to <see cref="TypeIs"/> and for which the <see cref="Expression"/> and <see cref="TypeBinaryExpression.TypeOperand"/> properties are set to the specified values.</returns> public static TypeBinaryExpression TypeIs(Expression expression, Type type) { RequiresCanRead(expression, nameof(expression)); ContractUtils.RequiresNotNull(type, nameof(type)); if (type.IsByRef) throw Error.TypeMustNotBeByRef(nameof(type)); return new TypeBinaryExpression(expression, type, ExpressionType.TypeIs); } /// <summary> /// Creates a <see cref="TypeBinaryExpression"/> that compares run-time type identity. /// </summary> /// <param name="expression">An <see cref="Expression"/> to set the <see cref="Expression"/> property equal to.</param> /// <param name="type">A <see cref="Type"/> to set the <see cref="TypeBinaryExpression.TypeOperand"/> property equal to.</param> /// <returns>A <see cref="TypeBinaryExpression"/> for which the <see cref="NodeType"/> property is equal to <see cref="TypeEqual"/> and for which the <see cref="Expression"/> and <see cref="TypeBinaryExpression.TypeOperand"/> properties are set to the specified values.</returns> public static TypeBinaryExpression TypeEqual(Expression expression, Type type) { RequiresCanRead(expression, nameof(expression)); ContractUtils.RequiresNotNull(type, nameof(type)); if (type.IsByRef) throw Error.TypeMustNotBeByRef(nameof(type)); return new TypeBinaryExpression(expression, type, ExpressionType.TypeEqual); } } }
// // HexEncoder.cs // // Author: Jeffrey Stedfast <jeff@xamarin.com> // // Copyright (c) 2013-2016 Xamarin Inc. (www.xamarin.com) // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // using System; using MimeKit.Utils; namespace MimeKit.Encodings { /// <summary> /// Incrementally encodes content using a Uri hex encoding. /// </summary> /// <remarks> /// This is mostly meant for decoding parameter values encoded using /// the rules specified by rfc2184 and rfc2231. /// </remarks> public class HexEncoder : IMimeEncoder { static readonly byte[] hex_alphabet = new byte[16] { 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, // '0' -> '7' 0x38, 0x39, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, // '8' -> 'F' }; /// <summary> /// Initializes a new instance of the <see cref="MimeKit.Encodings.HexEncoder"/> class. /// </summary> /// <remarks> /// Creates a new hex encoder. /// </remarks> public HexEncoder () { } /// <summary> /// Clone the <see cref="HexEncoder"/> with its current state. /// </summary> /// <remarks> /// Creates a new <see cref="HexEncoder"/> with exactly the same state as the current encoder. /// </remarks> /// <returns>A new <see cref="HexEncoder"/> with identical state.</returns> public IMimeEncoder Clone () { return new HexEncoder (); } /// <summary> /// Gets the encoding. /// </summary> /// <remarks> /// Gets the encoding that the encoder supports. /// </remarks> /// <value>The encoding.</value> public ContentEncoding Encoding { get { return ContentEncoding.Default; } } /// <summary> /// Estimates the length of the output. /// </summary> /// <remarks> /// Estimates the number of bytes needed to encode the specified number of input bytes. /// </remarks> /// <returns>The estimated output length.</returns> /// <param name="inputLength">The input length.</param> public int EstimateOutputLength (int inputLength) { return inputLength * 3; } void ValidateArguments (byte[] input, int startIndex, int length, byte[] output) { if (input == null) throw new ArgumentNullException ("input"); if (startIndex < 0 || startIndex > input.Length) throw new ArgumentOutOfRangeException ("startIndex"); if (length < 0 || length > (input.Length - startIndex)) throw new ArgumentOutOfRangeException ("length"); if (output == null) throw new ArgumentNullException ("output"); if (output.Length < EstimateOutputLength (length)) throw new ArgumentException ("The output buffer is not large enough to contain the encoded input.", "output"); } static unsafe int Encode (byte* input, int length, byte* output) { if (length == 0) return 0; byte* inend = input + length; byte* outptr = output; byte* inptr = input; while (inptr < inend) { byte c = *inptr++; if (c.IsAttr ()) { *outptr++ = c; } else { *outptr++ = (byte) '%'; *outptr++ = hex_alphabet[(c >> 4) & 0x0f]; *outptr++ = hex_alphabet[c & 0x0f]; } } return (int) (outptr - output); } /// <summary> /// Encodes the specified input into the output buffer. /// </summary> /// <remarks> /// <para>Encodes the specified input into the output buffer.</para> /// <para>The output buffer should be large enough to hold all of the /// encoded input. For estimating the size needed for the output buffer, /// see <see cref="EstimateOutputLength"/>.</para> /// </remarks> /// <returns>The number of bytes written to the output buffer.</returns> /// <param name="input">The input buffer.</param> /// <param name="startIndex">The starting index of the input buffer.</param> /// <param name="length">The length of the input buffer.</param> /// <param name="output">The output buffer.</param> /// <exception cref="System.ArgumentNullException"> /// <para><paramref name="input"/> is <c>null</c>.</para> /// <para>-or-</para> /// <para><paramref name="output"/> is <c>null</c>.</para> /// </exception> /// <exception cref="System.ArgumentOutOfRangeException"> /// <paramref name="startIndex"/> and <paramref name="length"/> do not specify /// a valid range in the <paramref name="input"/> byte array. /// </exception> /// <exception cref="System.ArgumentException"> /// <para><paramref name="output"/> is not large enough to contain the encoded content.</para> /// <para>Use the <see cref="EstimateOutputLength"/> method to properly determine the /// necessary length of the <paramref name="output"/> byte array.</para> /// </exception> public int Encode (byte[] input, int startIndex, int length, byte[] output) { ValidateArguments (input, startIndex, length, output); unsafe { fixed (byte* inptr = input, outptr = output) { return Encode (inptr + startIndex, length, outptr); } } } /// <summary> /// Encodes the specified input into the output buffer, flushing any internal buffer state as well. /// </summary> /// <remarks> /// <para>Encodes the specified input into the output buffer, flusing any internal state as well.</para> /// <para>The output buffer should be large enough to hold all of the /// encoded input. For estimating the size needed for the output buffer, /// see <see cref="EstimateOutputLength"/>.</para> /// </remarks> /// <returns>The number of bytes written to the output buffer.</returns> /// <param name="input">The input buffer.</param> /// <param name="startIndex">The starting index of the input buffer.</param> /// <param name="length">The length of the input buffer.</param> /// <param name="output">The output buffer.</param> /// <exception cref="System.ArgumentNullException"> /// <para><paramref name="input"/> is <c>null</c>.</para> /// <para>-or-</para> /// <para><paramref name="output"/> is <c>null</c>.</para> /// </exception> /// <exception cref="System.ArgumentOutOfRangeException"> /// <paramref name="startIndex"/> and <paramref name="length"/> do not specify /// a valid range in the <paramref name="input"/> byte array. /// </exception> /// <exception cref="System.ArgumentException"> /// <para><paramref name="output"/> is not large enough to contain the encoded content.</para> /// <para>Use the <see cref="EstimateOutputLength"/> method to properly determine the /// necessary length of the <paramref name="output"/> byte array.</para> /// </exception> public int Flush (byte[] input, int startIndex, int length, byte[] output) { return Encode (input, startIndex, length, output); } /// <summary> /// Resets the encoder. /// </summary> /// <remarks> /// Resets the state of the encoder. /// </remarks> public void Reset () { } } }
using System; using System.Collections.Generic; using System.Diagnostics; using System.Linq; using GraphX.Measure; using GraphX.PCL.Common.Interfaces; using QuickGraph; namespace GraphX.PCL.Logic.Algorithms.LayoutAlgorithms { /*public abstract class ParameterizedLayoutAlgorithmBase<TVertex, TEdge, TGraph, TVertexInfo, TEdgeInfo, TParam> : ParameterizedLayoutAlgorithmBase<TVertex, TEdge, TGraph, TParam> where TVertex : class where TEdge : IEdge<TVertex> where TGraph : IVertexAndEdgeListGraph<TVertex, TEdge> where TParam : class, ILayoutParameters { protected ParameterizedLayoutAlgorithmBase( TGraph visitedGraph ) : base( visitedGraph, null, null ) { } protected ParameterizedLayoutAlgorithmBase( TGraph visitedGraph, IDictionary<TVertex, Point> vertexPositions, TParam oldParameters ) : base( visitedGraph, vertexPositions, oldParameters ) { } }*/ /// <summary> /// Use this class as a base class for your layout algorithm /// if it's parameter class has a default contstructor. /// </summary> /// <typeparam name="TVertex">The type of the vertices.</typeparam> /// <typeparam name="TEdge">The type of the edges.</typeparam> /// <typeparam name="TGraph">The type of the graph.</typeparam> /// <typeparam name="TParam">The type of the parameters. Must be based on the LayoutParametersBase.</typeparam> public abstract class DefaultParameterizedLayoutAlgorithmBase<TVertex, TEdge, TGraph, TParam> : ParameterizedLayoutAlgorithmBase<TVertex, TEdge, TGraph, TParam> where TVertex : class where TEdge : IEdge<TVertex> where TGraph : IVertexAndEdgeListGraph<TVertex, TEdge> where TParam : class, ILayoutParameters, new() { protected DefaultParameterizedLayoutAlgorithmBase(TGraph visitedGraph) : base(visitedGraph) { } protected DefaultParameterizedLayoutAlgorithmBase(TGraph visitedGraph, IDictionary<TVertex, Point> vertexPositions, TParam oldParameters) : base(visitedGraph, vertexPositions, oldParameters) { } protected override TParam DefaultParameters => new TParam(); } /// <typeparam name="TVertex">Type of the vertices.</typeparam> /// <typeparam name="TEdge">Type of the edges.</typeparam> /// <typeparam name="TGraph">Type of the graph.</typeparam> /// <typeparam name="TParam">Type of the parameters. Must be based on the LayoutParametersBase.</typeparam> public abstract class ParameterizedLayoutAlgorithmBase<TVertex, TEdge, TGraph, TParam> : LayoutAlgorithmBase<TVertex, TEdge, TGraph>, IParameterizedLayoutAlgorithm<TParam> where TVertex : class where TEdge : IEdge<TVertex> where TGraph : IVertexAndEdgeListGraph<TVertex, TEdge> where TParam : class, ILayoutParameters { #region Properties /// <summary> /// Parameters of the algorithm. For more information see <see cref="LayoutParametersBase"/>. /// </summary> public TParam Parameters { get; protected set; } public ILayoutParameters GetParameters() { return Parameters; } #endregion #region Constructors protected ParameterizedLayoutAlgorithmBase( TGraph visitedGraph ) : this( visitedGraph, null, null ) { } protected ParameterizedLayoutAlgorithmBase( TGraph visitedGraph, IDictionary<TVertex, Point> vertexPositions, TParam oldParameters ) : base( visitedGraph, vertexPositions ) { InitParameters( oldParameters ); //TraceSource = new TraceSource( "LayoutAlgorithm", SourceLevels.All ); } #endregion #region Initializers protected abstract TParam DefaultParameters { get; } /// <summary> /// Ensures that all positions generated by InitializeWithRandomPositions() method will be unique. /// Can lead to excessive CPU usage for large amount of vertices. Default value is False. True for LinLog. /// </summary> protected bool EnsureUniqueRandomInitialPositions { get; set; } /// <summary> /// Initializes the parameters (cloning or creating new parameter object with default values). /// </summary> /// <param name="oldParameters">Parameters from a prevorious layout. If it is null, /// the parameters will be set to the default ones.</param> protected void InitParameters( TParam oldParameters ) { if (oldParameters == null) Parameters = DefaultParameters; else { Parameters = (TParam)oldParameters.Clone(); } } /// <summary> /// Initializes the positions of the vertices. Assign a random position inside the 'bounding box' to the vertices without positions. /// It does NOT modify the position of the other vertices. /// /// It generates an <code>IterationEnded</code> event. /// /// Bounding box: /// x coordinates: double.Epsilon - <code>width</code> /// y coordinates: double.Epsilon - <code>height</code> /// </summary> /// <param name="width">Width of the bounding box.</param> /// <param name="height">Height of the bounding box.</param> protected virtual void InitializeWithRandomPositions( double width, double height ) { InitializeWithRandomPositions( width, height, 0, 0 ); } /// <summary> /// Initializes the positions of the vertices. Assign a random position inside the 'bounding box' to the vertices without positions. /// It does NOT modify the position of the other vertices. /// /// It generates an <code>IterationEnded</code> event. /// /// Bounding box: /// x coordinates: double.Epsilon - <code>width</code> /// y coordinates: double.Epsilon - <code>height</code> /// </summary> /// <param name="width">Width of the bounding box.</param> /// <param name="height">Height of the bounding box.</param> /// <param name="translate_x">Translates the generated x coordinate.</param> /// <param name="translate_y">Translates the generated y coordinate.</param> protected virtual void InitializeWithRandomPositions( double width, double height, double translate_x, double translate_y ) { var rnd = new Random(Parameters.Seed); //initialize with random position foreach ( TVertex v in VisitedGraph.Vertices ) { //for vertices without assigned position if ( !VertexPositions.ContainsKey( v ) ) { if (EnsureUniqueRandomInitialPositions) { Point newPoint; do { newPoint = new Point( Math.Max(double.Epsilon, rnd.NextDouble()*width + translate_x), Math.Max(double.Epsilon, rnd.NextDouble()*height + translate_y)); } while (VertexPositions.Values.Contains(newPoint)); VertexPositions[v] = newPoint; } else { VertexPositions[v] = new Point( Math.Max(double.Epsilon, rnd.NextDouble()*width + translate_x), Math.Max(double.Epsilon, rnd.NextDouble()*height + translate_y)); } } } } protected virtual void NormalizePositions() { NormalizePositions( VertexPositions ); } protected static void NormalizePositions( IDictionary<TVertex, Point> vertexPositions ) { if ( vertexPositions == null || vertexPositions.Count == 0 ) return; //get the topLeft position var topLeft = new Point( float.PositiveInfinity, float.PositiveInfinity ); foreach ( var pos in vertexPositions.Values.ToArray() ) { topLeft.X = Math.Min( topLeft.X, pos.X ); topLeft.Y = Math.Min( topLeft.Y, pos.Y ); } //translate with the topLeft position foreach ( var v in vertexPositions.ToArray()) { var pos = v.Value; pos.X -= topLeft.X; pos.Y -= topLeft.Y; vertexPositions[v.Key] = pos; } } #endregion } }
/* Azure Media Services REST API v2 Function This function returns media analytics from an asset. Input: { "faceRedaction" : { "assetId" : "nb:cid:UUID:88432c30-cb4a-4496-88c2-b2a05ce9033b", // Optional, Id of the source asset that contains media analytics (face redaction) "deleteAsset" : true, // Optional, delete the asset once data has been read from it "copyToContainer" : "jpgfaces" // Optional, to copy the faces (jpg files) to a specific container in the same storage account. Use lowercases as this is the container name and there are restrictions. Used as a prefix, as date is added at the end (yyyyMMdd) "copyToContainerAccountName" : "jhggjgghggkj" // storage account name. optional. if not provided, ams storage account is used "copyToContainerAccountKey" "" // storage account key. }, "motionDetection" : { "assetId" : "nb:cid:UUID:88432c30-cb4a-4496-88c2-b2a05ce9033b", // Optional, Id of the source asset that contains media analytics (motion detection) "deleteAsset" : true, // Optional, delete the asset once data has been read from it }, "ocr" : { "assetId" : "nb:cid:UUID:88432c30-cb4a-4496-88c2-b2a05ce9033b", // Optional, Id of the source asset that contains media analytics (ocr) "deleteAsset" : true, // Optional, delete the asset once data has been read from it }, "videoAnnotation" : { "assetId" : "nb:cid:UUID:88432c30-cb4a-4496-88c2-b2a05ce9033b", // Optional, Id of the source asset that contains the MES thumbnails "deleteAsset" : true, // Optional, delete the asset once data has been read from it }, "contentModeration" : { "assetId" : "nb:cid:UUID:88432c30-cb4a-4496-88c2-b2a05ce9033b", // Optional, Id of the source asset that contains "deleteAsset" : true, // Optional, delete the asset once data has been read from it }, "mesThumbnails" : { "assetId" : "nb:cid:UUID:88432c30-cb4a-4496-88c2-b2a05ce9033b", // Optional, Id of the source asset that contains media analytics (face redaction) "deleteAsset" : true, // Optional, delete the asset once data has been read from it "copyToContainer" : "thumbnails" // Optional, to copy the thumbnails (png files) to a specific container in the same storage account. Use lowercases as this is the container name and there are restrictions. Used as a prefix, as date is added at the end (yyyyMMdd) "copyToContainerAccountName" : "jhggjgghggkj" // storage account name. optional. if not provided, ams storage account is used "copyToContainerAccountKey" "" // storage account key }, "timeOffset" :"00:01:00", // optional, offset to add to data from face redaction, ocr, video annotation (used for live analytics) } Output: { "faceRedaction" : { "json" : "", // the serialized json of the face redaction "jsonOffset" : "", // the serialized json of the face redaction with offset "jpgFaces":[ { "id" :24, "fileId": "nb:cid:UUID:a93464ae-cbd5-4e63-9459-a3e2cf869f0e", "fileName": "ArchiveTopBitrate_video_800000_thumb000024.jpg", "url" : "http://xpouyatdemo.streaming.mediaservices.windows.net/903f9261-d745-48aa-8dfe-ebcd6e6128d6/ArchiveTopBitrate_video_800000_thumb000024.jpg" } ] "pathUrl" : "", // the path to the asset if asset is published }, "pngThumbnails":[ { "id" :24, "fileId": "nb:cid:UUID:a93464ae-cbd5-4e63-9459-a3e2cf869f0e", "fileName": "ArchiveTopBitrate_video_800000_thumb000024.jpg", "url" : "http://xpouyatdemo.streaming.mediaservices.windows.net/903f9261-d745-48aa-8dfe-ebcd6e6128d6/ArchiveTopBitrate_video_800000_thumb000024.jpg" } ] "pathUrl" : "", // the path to the asset if asset is published }, "motionDetection": { "json" : "", // the serialized json of the face redaction "jsonOffset" : "" // the serialized json of the face redaction with offset }, "ocr": { "json" : "", // the serialized json of the Ocr "jsonOffset" : "" // the serialized json of Ocr with offset }, "videoAnnotation": { "json" : "", // the serialized json of the Video Annotator "jsonOffset" : "" // the serialized json of Video Annotator with offset }, "contentModeration": { "json" : "", // the serialized json of the Content Moderation "jsonOffset" : "" // the serialized json of Content Moderation with offset } } */ using System; using System.Net; using System.Net.Http; using Newtonsoft.Json; using Microsoft.WindowsAzure.MediaServices.Client; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using Microsoft.WindowsAzure.Storage.Blob; using Microsoft.Azure.WebJobs; using Newtonsoft.Json.Linq; using Microsoft.Azure.WebJobs.Host; namespace media_functions_for_logic_app { public static class return_analytics { // Field for service context. private static CloudMediaContext _context = null; [FunctionName("return-analytics")] public static async Task<object> Run([HttpTrigger(WebHookType = "genericJson")]HttpRequestMessage req, TraceWriter log) { { log.Info($"Webhook was triggered!"); // Init variables string pathUrl = ""; string jsonFaceRedaction = ""; dynamic jpgFaces = new JArray() as dynamic; dynamic objFaceDetection = new JObject(); dynamic objFaceDetectionOffset = new JObject(); dynamic pngThumbnails = new JArray() as dynamic; string prefixpng = ""; string jsonMotionDetection = ""; dynamic objMotionDetection = new JObject(); dynamic objMotionDetectionOffset = new JObject(); string jsonOcr = ""; dynamic objOcr = new JObject(); dynamic objOcrOffset = new JObject(); string jsonAnnotation = ""; dynamic objAnnotation = new JObject(); dynamic objAnnotationOffset = new JObject(); string jsonModeration = ""; dynamic objModeration = new JObject(); dynamic objModerationOffset = new JObject(); string copyToContainer = ""; string prefixjpg = ""; string targetContainerUri = ""; TimeSpan timeOffset = new TimeSpan(0); string jsonContent = await req.Content.ReadAsStringAsync(); dynamic data = JsonConvert.DeserializeObject(jsonContent); var attachedstoragecred = KeyHelper.ReturnStorageCredentials(); log.Info(jsonContent); MediaServicesCredentials amsCredentials = new MediaServicesCredentials(); log.Info($"Using Azure Media Service Rest API Endpoint : {amsCredentials.AmsRestApiEndpoint}"); try { AzureAdTokenCredentials tokenCredentials = new AzureAdTokenCredentials(amsCredentials.AmsAadTenantDomain, new AzureAdClientSymmetricKey(amsCredentials.AmsClientId, amsCredentials.AmsClientSecret), AzureEnvironments.AzureCloudEnvironment); AzureAdTokenProvider tokenProvider = new AzureAdTokenProvider(tokenCredentials); _context = new CloudMediaContext(amsCredentials.AmsRestApiEndpoint, tokenProvider); // Offset value ? if (data.timeOffset != null) // let's store the offset { timeOffset = TimeSpan.Parse((string)data.timeOffset); } // // FACE REDACTION // if (data.faceRedaction != null && data.faceRedaction.assetId != null) { List<CloudBlob> listJPGCopies = new List<CloudBlob>(); // Get the asset string assetid = data.faceRedaction.assetId; var outputAsset = _context.Assets.Where(a => a.Id == assetid).FirstOrDefault(); if (outputAsset == null) { log.Info($"Asset not found {assetid}"); return req.CreateResponse(HttpStatusCode.BadRequest, new { error = "Asset not found" }); } var jsonFile = outputAsset.AssetFiles.Where(a => a.Name.ToUpper().EndsWith(".JSON")).FirstOrDefault(); var jpgFiles = outputAsset.AssetFiles.Where(a => a.Name.ToUpper().EndsWith(".JPG")); Uri publishurl = MediaServicesHelper.GetValidOnDemandPath(_context, outputAsset); if (publishurl != null) { pathUrl = publishurl.ToString(); } else { log.Info($"Asset not published"); } // Let's copy the JPG faces if (data.faceRedaction.copyToContainer != null) { copyToContainer = data.faceRedaction.copyToContainer + DateTime.UtcNow.ToString("yyyyMMdd"); // let's copy JPG to a container prefixjpg = outputAsset.Uri.Segments[1] + "-"; log.Info($"prefixjpg {prefixjpg}"); string storname = amsCredentials.StorageAccountName; string storkey = amsCredentials.StorageAccountKey; if (outputAsset.StorageAccountName != amsCredentials.StorageAccountName) { if (attachedstoragecred.ContainsKey(outputAsset.StorageAccountName)) // asset is using another storage than default but we have the key { storname = outputAsset.StorageAccountName; storkey = attachedstoragecred[storname]; } else // we don't have the key for that storage { log.Info($"Face redaction Asset is in {outputAsset.StorageAccountName} and key is not provided in MediaServicesAttachedStorageCredentials application settings"); return req.CreateResponse(HttpStatusCode.BadRequest, new { error = "Storage key is missing" }); } } var sourceContainer = CopyBlobHelpers.GetCloudBlobContainer(storname, storkey, outputAsset.Uri.Segments[1]); CloudBlobContainer targetContainer; if (data.faceRedaction.copyToContainerAccountName != null) { // copy to a specific storage account targetContainer = CopyBlobHelpers.GetCloudBlobContainer((string)data.faceRedaction.copyToContainerAccountName, (string)data.faceRedaction.copyToContainerAccountKey, copyToContainer); } else { // copy to ams storage account targetContainer = CopyBlobHelpers.GetCloudBlobContainer(amsCredentials.StorageAccountName, amsCredentials.StorageAccountKey, copyToContainer); } listJPGCopies = await CopyBlobHelpers.CopyFilesAsync(sourceContainer, targetContainer, prefixjpg, "jpg", log); targetContainerUri = targetContainer.Uri.ToString(); } foreach (IAssetFile file in jpgFiles) { string index = file.Name.Substring(file.Name.Length - 10, 6); int index_i = 0; if (int.TryParse(index, out index_i)) { dynamic entry = new JObject(); entry.id = index_i; entry.fileId = file.Id; entry.fileName = file.Name; if (copyToContainer != "") { entry.url = targetContainerUri + "/" + prefixjpg + file.Name; } else if (!string.IsNullOrEmpty(pathUrl)) { entry.url = pathUrl + file.Name; } jpgFaces.Add(entry); } } if (jsonFile != null) { jsonFaceRedaction = MediaServicesHelper.ReturnContent(jsonFile); objFaceDetection = Newtonsoft.Json.JsonConvert.DeserializeObject(jsonFaceRedaction); objFaceDetectionOffset = Newtonsoft.Json.JsonConvert.DeserializeObject(jsonFaceRedaction); if (timeOffset.Ticks != 0) // Let's add the offset { foreach (var frag in objFaceDetectionOffset.fragments) { frag.start = ((long)(frag.start)) + (long)((((double)timeOffset.Ticks / (double)TimeSpan.TicksPerSecond) * (double)objFaceDetectionOffset.timescale)); } } } if (jsonFaceRedaction != "" && data.faceRedaction.deleteAsset != null && ((bool)data.faceRedaction.deleteAsset)) // If asset deletion was asked { // let's wait for the copy to finish before deleting the asset.. if (listJPGCopies.Count > 0) { log.Info("JPG Copy with asset deletion was asked. Checking copy status..."); bool continueLoop = true; while (continueLoop) { listJPGCopies = listJPGCopies.Where(r => r.CopyState.Status == CopyStatus.Pending).ToList(); if (listJPGCopies.Count == 0) { continueLoop = false; } else { log.Info("JPG Copy not finished. Waiting 3s..."); Task.Delay(TimeSpan.FromSeconds(3d)).Wait(); listJPGCopies.ForEach(r => r.FetchAttributes()); } } } outputAsset.Delete(); } } // // MES Thumbnails // if (data.mesThumbnails != null && data.mesThumbnails.assetId != null) { List<CloudBlob> listPNGCopies = new List<CloudBlob>(); // Get the asset string assetid = data.mesThumbnails.assetId; var outputAsset = _context.Assets.Where(a => a.Id == assetid).FirstOrDefault(); if (outputAsset == null) { log.Info($"Asset not found {assetid}"); return req.CreateResponse(HttpStatusCode.BadRequest, new { error = "Asset not found" }); } var pngFiles = outputAsset.AssetFiles.Where(a => a.Name.ToUpper().EndsWith(".PNG")); Uri publishurl = MediaServicesHelper.GetValidOnDemandPath(_context, outputAsset); if (publishurl != null) { pathUrl = publishurl.ToString(); } else { log.Info($"Asset not published"); } // Let's copy the PNG Thumbnails if (data.mesThumbnails.copyToContainer != null) { copyToContainer = data.mesThumbnails.copyToContainer + DateTime.UtcNow.ToString("yyyyMMdd"); // let's copy PNG to a container prefixpng = outputAsset.Uri.Segments[1] + "-"; log.Info($"prefixpng {prefixpng}"); string storname = amsCredentials.StorageAccountName; string storkey = amsCredentials.StorageAccountKey; if (outputAsset.StorageAccountName != amsCredentials.StorageAccountName) { if (attachedstoragecred.ContainsKey(outputAsset.StorageAccountName)) // asset is using another storage than default but we have the key { storname = outputAsset.StorageAccountName; storkey = attachedstoragecred[storname]; } else // we don't have the key for that storage { log.Info($"MES Thumbnails Asset is in {outputAsset.StorageAccountName} and key is not provided in MediaServicesAttachedStorageCredentials application settings"); return req.CreateResponse(HttpStatusCode.BadRequest, new { error = "Storage key is missing" }); } } var sourceContainer = CopyBlobHelpers.GetCloudBlobContainer(storname, storkey, outputAsset.Uri.Segments[1]); CloudBlobContainer targetContainer; if (data.mesThumbnails.copyToContainerAccountName != null) { // copy to a specific storage account targetContainer = CopyBlobHelpers.GetCloudBlobContainer((string)data.mesThumbnails.copyToContainerAccountName, (string)data.mesThumbnails.copyToContainerAccountKey, copyToContainer); } else { // copy to ams storage account targetContainer = CopyBlobHelpers.GetCloudBlobContainer(amsCredentials.StorageAccountName, amsCredentials.StorageAccountKey, copyToContainer); } listPNGCopies = await CopyBlobHelpers.CopyFilesAsync(sourceContainer, targetContainer, prefixpng, "png", log); targetContainerUri = targetContainer.Uri.ToString(); } foreach (IAssetFile file in pngFiles) { string index = file.Name.Substring(file.Name.Length - 10, 6); int index_i = 0; if (int.TryParse(index, out index_i)) { dynamic entry = new JObject(); entry.id = index_i; entry.fileId = file.Id; entry.fileName = file.Name; if (copyToContainer != "") { entry.url = targetContainerUri + "/" + prefixpng + file.Name; } else if (!string.IsNullOrEmpty(pathUrl)) { entry.url = pathUrl + file.Name; } pngThumbnails.Add(entry); } } if (data.mesThumbnails.deleteAsset != null && ((bool)data.mesThumbnails.deleteAsset)) { // If asset deletion was asked // let's wait for the copy to finish before deleting the asset.. if (listPNGCopies.Count > 0) { log.Info("PNG Copy with asset deletion was asked. Checking copy status..."); bool continueLoop = true; while (continueLoop) { listPNGCopies = listPNGCopies.Where(r => r.CopyState.Status == CopyStatus.Pending).ToList(); if (listPNGCopies.Count == 0) { continueLoop = false; } else { log.Info("PNG Copy not finished. Waiting 3s..."); Task.Delay(TimeSpan.FromSeconds(3d)).Wait(); listPNGCopies.ForEach(r => r.FetchAttributes()); } } } outputAsset.Delete(); } } // // MOTION DETECTION // if (data.motionDetection != null && data.motionDetection.assetId != null) { // Get the asset string assetid = data.motionDetection.assetId; var outputAsset = _context.Assets.Where(a => a.Id == assetid).FirstOrDefault(); if (outputAsset == null) { log.Info($"Asset not found {assetid}"); return req.CreateResponse(HttpStatusCode.BadRequest, new { error = "Asset not found" }); } var jsonFile = outputAsset.AssetFiles.Where(a => a.Name.ToUpper().EndsWith(".JSON")).FirstOrDefault(); if (jsonFile != null) { jsonMotionDetection = MediaServicesHelper.ReturnContent(jsonFile); objMotionDetection = Newtonsoft.Json.JsonConvert.DeserializeObject(jsonMotionDetection); objMotionDetectionOffset = Newtonsoft.Json.JsonConvert.DeserializeObject(jsonMotionDetection); if (timeOffset.Ticks != 0) // Let's add the offset { foreach (var frag in objMotionDetectionOffset.fragments) { frag.start = ((long)(frag.start)) + (long)((((double)timeOffset.Ticks / (double)TimeSpan.TicksPerSecond) * (double)objMotionDetectionOffset.timescale)); } } } if (jsonMotionDetection != "" && data.motionDetection.deleteAsset != null && ((bool)data.motionDetection.deleteAsset)) // If asset deletion was asked { outputAsset.Delete(); } } // // OCR // if (data.ocr != null && data.ocr.assetId != null) { // Get the asset string assetid = data.ocr.assetId; var outputAsset = _context.Assets.Where(a => a.Id == assetid).FirstOrDefault(); if (outputAsset == null) { log.Info($"Asset not found {assetid}"); return req.CreateResponse(HttpStatusCode.BadRequest, new { error = "Asset not found" }); } var jsonFile = outputAsset.AssetFiles.Where(a => a.Name.ToUpper().EndsWith(".JSON")).FirstOrDefault(); if (jsonFile != null) { jsonOcr = MediaServicesHelper.ReturnContent(jsonFile); objOcr = Newtonsoft.Json.JsonConvert.DeserializeObject(jsonOcr); objOcrOffset = Newtonsoft.Json.JsonConvert.DeserializeObject(jsonOcr); if (timeOffset.Ticks != 0) // Let's add the offset { foreach (var frag in objOcrOffset.fragments) { frag.start = ((long)(frag.start)) + (long)((((double)timeOffset.Ticks / (double)TimeSpan.TicksPerSecond) * (double)objOcrOffset.timescale)); } } } if (jsonOcr != "" && data.ocr.deleteAsset != null && ((bool)data.ocr.deleteAsset)) // If asset deletion was asked { outputAsset.Delete(); } } // // Video Annotator // if (data.videoAnnotation != null && data.videoAnnotation.assetId != null) { // Get the asset string assetid = data.videoAnnotation.assetId; var outputAsset = _context.Assets.Where(a => a.Id == assetid).FirstOrDefault(); if (outputAsset == null) { log.Info($"Asset not found {assetid}"); return req.CreateResponse(HttpStatusCode.BadRequest, new { error = "Asset not found" }); } var jsonFile = outputAsset.AssetFiles.Where(a => a.Name.ToUpper().EndsWith(".JSON")).FirstOrDefault(); log.Info($"JSON file = {jsonFile}"); if (jsonFile != null) { jsonAnnotation = MediaServicesHelper.ReturnContent(jsonFile); objAnnotation = Newtonsoft.Json.JsonConvert.DeserializeObject(jsonAnnotation); objAnnotationOffset = Newtonsoft.Json.JsonConvert.DeserializeObject(jsonAnnotation); if (timeOffset.Ticks != 0) // Let's add the offset { foreach (var frag in objAnnotationOffset.fragments) { frag.start = ((long)(frag.start)) + (long)((((double)timeOffset.Ticks / (double)TimeSpan.TicksPerSecond) * (double)objAnnotationOffset.timescale)); } } } if (jsonAnnotation != "" && data.videoAnnotation.deleteAsset != null && ((bool)data.videoAnnotation.deleteAsset)) // If asset deletion was asked { outputAsset.Delete(); } } // // Content Moderation // if (data.contentModeration != null && data.contentModeration.assetId != null) { // Get the asset string assetid = data.contentModeration.assetId; var outputAsset = _context.Assets.Where(a => a.Id == assetid).FirstOrDefault(); if (outputAsset == null) { log.Info($"Asset not found {assetid}"); return req.CreateResponse(HttpStatusCode.BadRequest, new { error = "Asset not found" }); } var jsonFile = outputAsset.AssetFiles.Where(a => a.Name.ToUpper().EndsWith(".JSON")).FirstOrDefault(); log.Info($"JSON file = {jsonFile}"); if (jsonFile != null) { jsonModeration = MediaServicesHelper.ReturnContent(jsonFile); objModeration = Newtonsoft.Json.JsonConvert.DeserializeObject(jsonModeration); objModerationOffset = Newtonsoft.Json.JsonConvert.DeserializeObject(jsonModeration); if (timeOffset.Ticks != 0) // Let's add the offset { foreach (var frag in objModerationOffset.fragments) { frag.start = ((long)(frag.start)) + (long)((((double)timeOffset.Ticks / (double)TimeSpan.TicksPerSecond) * (double)objModerationOffset.timescale)); if (frag.events != null) { for (int i = 0; i < frag.events.Count; i++) { frag.events[i][0].timestamp = ((long)(frag.events[i][0].timestamp)) + (long)((((double)timeOffset.Ticks / (double)TimeSpan.TicksPerSecond) * (double)objModerationOffset.timescale)); } } } } } if (jsonModeration != "" && data.contentModeration.deleteAsset != null && ((bool)data.contentModeration.deleteAsset)) // If asset deletion was asked { outputAsset.Delete(); } } } catch (Exception ex) { string message = ex.Message + ((ex.InnerException != null) ? Environment.NewLine + MediaServicesHelper.GetErrorMessage(ex) : ""); log.Info($"ERROR: Exception {message}"); return req.CreateResponse(HttpStatusCode.InternalServerError, new { error = message }); } log.Info($""); return req.CreateResponse(HttpStatusCode.OK, new { faceRedaction = new { json = Newtonsoft.Json.JsonConvert.SerializeObject(objFaceDetection), jsonOffset = Newtonsoft.Json.JsonConvert.SerializeObject(objFaceDetectionOffset), jpgFaces = Newtonsoft.Json.JsonConvert.SerializeObject(jpgFaces) }, mesThumbnail = new { pngThumbnails = Newtonsoft.Json.JsonConvert.SerializeObject(pngThumbnails) }, motionDetection = new { json = Newtonsoft.Json.JsonConvert.SerializeObject(objMotionDetection), jsonOffset = Newtonsoft.Json.JsonConvert.SerializeObject(objMotionDetectionOffset) }, ocr = new { json = Newtonsoft.Json.JsonConvert.SerializeObject(objOcr), jsonOffset = Newtonsoft.Json.JsonConvert.SerializeObject(objOcrOffset) }, videoAnnotation = new { json = Newtonsoft.Json.JsonConvert.SerializeObject(objAnnotation), jsonOffset = Newtonsoft.Json.JsonConvert.SerializeObject(objAnnotationOffset) }, contentModeration = new { json = Newtonsoft.Json.JsonConvert.SerializeObject(objModeration), jsonOffset = Newtonsoft.Json.JsonConvert.SerializeObject(objModerationOffset) } }); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. namespace System { using System; using System.Reflection; using System.Runtime; using System.Threading; using System.Runtime.Serialization; using System.Runtime.InteropServices; using System.Runtime.CompilerServices; using System.Runtime.Versioning; using System.Diagnostics; using System.Diagnostics.Contracts; [Serializable] [ClassInterface(ClassInterfaceType.AutoDual)] [System.Runtime.InteropServices.ComVisible(true)] public abstract class Delegate : ICloneable, ISerializable { // _target is the object we will invoke on internal Object _target; // MethodBase, either cached after first request or assigned from a DynamicMethod // For open delegates to collectible types, this may be a LoaderAllocator object internal Object _methodBase; // _methodPtr is a pointer to the method we will invoke // It could be a small thunk if this is a static or UM call internal IntPtr _methodPtr; // In the case of a static method passed to a delegate, this field stores // whatever _methodPtr would have stored: and _methodPtr points to a // small thunk which removes the "this" pointer before going on // to _methodPtrAux. internal IntPtr _methodPtrAux; // This constructor is called from the class generated by the // compiler generated code protected Delegate(Object target,String method) { if (target == null) throw new ArgumentNullException(nameof(target)); if (method == null) throw new ArgumentNullException(nameof(method)); Contract.EndContractBlock(); // This API existed in v1/v1.1 and only expected to create closed // instance delegates. Constrain the call to BindToMethodName to // such and don't allow relaxed signature matching (which could make // the choice of target method ambiguous) for backwards // compatibility. The name matching was case sensitive and we // preserve that as well. if (!BindToMethodName(target, (RuntimeType)target.GetType(), method, DelegateBindingFlags.InstanceMethodOnly | DelegateBindingFlags.ClosedDelegateOnly)) throw new ArgumentException(Environment.GetResourceString("Arg_DlgtTargMeth")); } // This constructor is called from a class to generate a // delegate based upon a static method name and the Type object // for the class defining the method. protected unsafe Delegate(Type target,String method) { if (target == null) throw new ArgumentNullException(nameof(target)); if (target.IsGenericType && target.ContainsGenericParameters) throw new ArgumentException(Environment.GetResourceString("Arg_UnboundGenParam"), nameof(target)); if (method == null) throw new ArgumentNullException(nameof(method)); Contract.EndContractBlock(); RuntimeType rtTarget = target as RuntimeType; if (rtTarget == null) throw new ArgumentException(Environment.GetResourceString("Argument_MustBeRuntimeType"), nameof(target)); // This API existed in v1/v1.1 and only expected to create open // static delegates. Constrain the call to BindToMethodName to such // and don't allow relaxed signature matching (which could make the // choice of target method ambiguous) for backwards compatibility. // The name matching was case insensitive (no idea why this is // different from the constructor above) and we preserve that as // well. BindToMethodName(null, rtTarget, method, DelegateBindingFlags.StaticMethodOnly | DelegateBindingFlags.OpenDelegateOnly | DelegateBindingFlags.CaselessMatching); } // Protect the default constructor so you can't build a delegate private Delegate() { } public Object DynamicInvoke(params Object[] args) { // Theoretically we should set up a LookForMyCaller stack mark here and pass that along. // But to maintain backward compatibility we can't switch to calling an // internal overload of DynamicInvokeImpl that takes a stack mark. // Fortunately the stack walker skips all the reflection invocation frames including this one. // So this method will never be returned by the stack walker as the caller. // See SystemDomain::CallersMethodCallbackWithStackMark in AppDomain.cpp. return DynamicInvokeImpl(args); } protected virtual object DynamicInvokeImpl(object[] args) { RuntimeMethodHandleInternal method = new RuntimeMethodHandleInternal(GetInvokeMethod()); RuntimeMethodInfo invoke = (RuntimeMethodInfo)RuntimeType.GetMethodBase((RuntimeType)this.GetType(), method); return invoke.UnsafeInvoke(this, BindingFlags.Default, null, args, null); } public override bool Equals(Object obj) { if (obj == null || !InternalEqualTypes(this, obj)) return false; Delegate d = (Delegate) obj; // do an optimistic check first. This is hopefully cheap enough to be worth if (_target == d._target && _methodPtr == d._methodPtr && _methodPtrAux == d._methodPtrAux) return true; // even though the fields were not all equals the delegates may still match // When target carries the delegate itself the 2 targets (delegates) may be different instances // but the delegates are logically the same // It may also happen that the method pointer was not jitted when creating one delegate and jitted in the other // if that's the case the delegates may still be equals but we need to make a more complicated check if (_methodPtrAux.IsNull()) { if (!d._methodPtrAux.IsNull()) return false; // different delegate kind // they are both closed over the first arg if (_target != d._target) return false; // fall through method handle check } else { if (d._methodPtrAux.IsNull()) return false; // different delegate kind // Ignore the target as it will be the delegate instance, though it may be a different one /* if (_methodPtr != d._methodPtr) return false; */ if (_methodPtrAux == d._methodPtrAux) return true; // fall through method handle check } // method ptrs don't match, go down long path // if (_methodBase == null || d._methodBase == null || !(_methodBase is MethodInfo) || !(d._methodBase is MethodInfo)) return Delegate.InternalEqualMethodHandles(this, d); else return _methodBase.Equals(d._methodBase); } public override int GetHashCode() { // // this is not right in the face of a method being jitted in one delegate and not in another // in that case the delegate is the same and Equals will return true but GetHashCode returns a // different hashcode which is not true. /* if (_methodPtrAux.IsNull()) return unchecked((int)((long)this._methodPtr)); else return unchecked((int)((long)this._methodPtrAux)); */ return GetType().GetHashCode(); } public static Delegate Combine(Delegate a, Delegate b) { if ((Object)a == null) // cast to object for a more efficient test return b; return a.CombineImpl(b); } [System.Runtime.InteropServices.ComVisible(true)] public static Delegate Combine(params Delegate[] delegates) { if (delegates == null || delegates.Length == 0) return null; Delegate d = delegates[0]; for (int i = 1; i < delegates.Length; i++) d = Combine(d,delegates[i]); return d; } public virtual Delegate[] GetInvocationList() { Delegate[] d = new Delegate[1]; d[0] = this; return d; } // This routine will return the method public MethodInfo Method { get { return GetMethodImpl(); } } protected virtual MethodInfo GetMethodImpl() { if ((_methodBase == null) || !(_methodBase is MethodInfo)) { IRuntimeMethodInfo method = FindMethodHandle(); RuntimeType declaringType = RuntimeMethodHandle.GetDeclaringType(method); // need a proper declaring type instance method on a generic type if (RuntimeTypeHandle.IsGenericTypeDefinition(declaringType) || RuntimeTypeHandle.HasInstantiation(declaringType)) { bool isStatic = (RuntimeMethodHandle.GetAttributes(method) & MethodAttributes.Static) != (MethodAttributes)0; if (!isStatic) { if (_methodPtrAux == (IntPtr)0) { // The target may be of a derived type that doesn't have visibility onto the // target method. We don't want to call RuntimeType.GetMethodBase below with that // or reflection can end up generating a MethodInfo where the ReflectedType cannot // see the MethodInfo itself and that breaks an important invariant. But the // target type could include important generic type information we need in order // to work out what the exact instantiation of the method's declaring type is. So // we'll walk up the inheritance chain (which will yield exactly instantiated // types at each step) until we find the declaring type. Since the declaring type // we get from the method is probably shared and those in the hierarchy we're // walking won't be we compare using the generic type definition forms instead. Type currentType = _target.GetType(); Type targetType = declaringType.GetGenericTypeDefinition(); while (currentType != null) { if (currentType.IsGenericType && currentType.GetGenericTypeDefinition() == targetType) { declaringType = currentType as RuntimeType; break; } currentType = currentType.BaseType; } // RCWs don't need to be "strongly-typed" in which case we don't find a base type // that matches the declaring type of the method. This is fine because interop needs // to work with exact methods anyway so declaringType is never shared at this point. BCLDebug.Assert(currentType != null || _target.GetType().IsCOMObject, "The class hierarchy should declare the method"); } else { // it's an open one, need to fetch the first arg of the instantiation MethodInfo invoke = this.GetType().GetMethod("Invoke"); declaringType = (RuntimeType)invoke.GetParameters()[0].ParameterType; } } } _methodBase = (MethodInfo)RuntimeType.GetMethodBase(declaringType, method); } return (MethodInfo)_methodBase; } public Object Target { get { return GetTarget(); } } public static Delegate Remove(Delegate source, Delegate value) { if (source == null) return null; if (value == null) return source; if (!InternalEqualTypes(source, value)) throw new ArgumentException(Environment.GetResourceString("Arg_DlgtTypeMis")); return source.RemoveImpl(value); } public static Delegate RemoveAll(Delegate source, Delegate value) { Delegate newDelegate = null; do { newDelegate = source; source = Remove(source, value); } while (newDelegate != source); return newDelegate; } protected virtual Delegate CombineImpl(Delegate d) { throw new MulticastNotSupportedException(Environment.GetResourceString("Multicast_Combine")); } protected virtual Delegate RemoveImpl(Delegate d) { return (d.Equals(this)) ? null : this; } public virtual Object Clone() { return MemberwiseClone(); } // V1 API. public static Delegate CreateDelegate(Type type, Object target, String method) { return CreateDelegate(type, target, method, false, true); } // V1 API. public static Delegate CreateDelegate(Type type, Object target, String method, bool ignoreCase) { return CreateDelegate(type, target, method, ignoreCase, true); } // V1 API. public static Delegate CreateDelegate(Type type, Object target, String method, bool ignoreCase, bool throwOnBindFailure) { if (type == null) throw new ArgumentNullException(nameof(type)); if (target == null) throw new ArgumentNullException(nameof(target)); if (method == null) throw new ArgumentNullException(nameof(method)); Contract.EndContractBlock(); RuntimeType rtType = type as RuntimeType; if (rtType == null) throw new ArgumentException(Environment.GetResourceString("Argument_MustBeRuntimeType"), nameof(type)); if (!rtType.IsDelegate()) throw new ArgumentException(Environment.GetResourceString("Arg_MustBeDelegate"),nameof(type)); Delegate d = InternalAlloc(rtType); // This API existed in v1/v1.1 and only expected to create closed // instance delegates. Constrain the call to BindToMethodName to such // and don't allow relaxed signature matching (which could make the // choice of target method ambiguous) for backwards compatibility. // We never generate a closed over null delegate and this is // actually enforced via the check on target above, but we pass // NeverCloseOverNull anyway just for clarity. if (!d.BindToMethodName(target, (RuntimeType)target.GetType(), method, DelegateBindingFlags.InstanceMethodOnly | DelegateBindingFlags.ClosedDelegateOnly | DelegateBindingFlags.NeverCloseOverNull | (ignoreCase ? DelegateBindingFlags.CaselessMatching : 0))) { if (throwOnBindFailure) throw new ArgumentException(Environment.GetResourceString("Arg_DlgtTargMeth")); d = null; } return d; } // V1 API. public static Delegate CreateDelegate(Type type, Type target, String method) { return CreateDelegate(type, target, method, false, true); } // V1 API. public static Delegate CreateDelegate(Type type, Type target, String method, bool ignoreCase) { return CreateDelegate(type, target, method, ignoreCase, true); } // V1 API. public static Delegate CreateDelegate(Type type, Type target, String method, bool ignoreCase, bool throwOnBindFailure) { if (type == null) throw new ArgumentNullException(nameof(type)); if (target == null) throw new ArgumentNullException(nameof(target)); if (target.IsGenericType && target.ContainsGenericParameters) throw new ArgumentException(Environment.GetResourceString("Arg_UnboundGenParam"), nameof(target)); if (method == null) throw new ArgumentNullException(nameof(method)); Contract.EndContractBlock(); RuntimeType rtType = type as RuntimeType; RuntimeType rtTarget = target as RuntimeType; if (rtType == null) throw new ArgumentException(Environment.GetResourceString("Argument_MustBeRuntimeType"), nameof(type)); if (rtTarget == null) throw new ArgumentException(Environment.GetResourceString("Argument_MustBeRuntimeType"), nameof(target)); if (!rtType.IsDelegate()) throw new ArgumentException(Environment.GetResourceString("Arg_MustBeDelegate"),nameof(type)); Delegate d = InternalAlloc(rtType); // This API existed in v1/v1.1 and only expected to create open // static delegates. Constrain the call to BindToMethodName to such // and don't allow relaxed signature matching (which could make the // choice of target method ambiguous) for backwards compatibility. if (!d.BindToMethodName(null, rtTarget, method, DelegateBindingFlags.StaticMethodOnly | DelegateBindingFlags.OpenDelegateOnly | (ignoreCase ? DelegateBindingFlags.CaselessMatching : 0))) { if (throwOnBindFailure) throw new ArgumentException(Environment.GetResourceString("Arg_DlgtTargMeth")); d = null; } return d; } // V1 API. [MethodImplAttribute(MethodImplOptions.NoInlining)] // Methods containing StackCrawlMark local var has to be marked non-inlineable public static Delegate CreateDelegate(Type type, MethodInfo method, bool throwOnBindFailure) { // Validate the parameters. if (type == null) throw new ArgumentNullException(nameof(type)); if (method == null) throw new ArgumentNullException(nameof(method)); Contract.EndContractBlock(); RuntimeType rtType = type as RuntimeType; if (rtType == null) throw new ArgumentException(Environment.GetResourceString("Argument_MustBeRuntimeType"), nameof(type)); RuntimeMethodInfo rmi = method as RuntimeMethodInfo; if (rmi == null) throw new ArgumentException(Environment.GetResourceString("Argument_MustBeRuntimeMethodInfo"), nameof(method)); if (!rtType.IsDelegate()) throw new ArgumentException(Environment.GetResourceString("Arg_MustBeDelegate"), nameof(type)); // This API existed in v1/v1.1 and only expected to create closed // instance delegates. Constrain the call to BindToMethodInfo to // open delegates only for backwards compatibility. But we'll allow // relaxed signature checking and open static delegates because // there's no ambiguity there (the caller would have to explicitly // pass us a static method or a method with a non-exact signature // and the only change in behavior from v1.1 there is that we won't // fail the call). StackCrawlMark stackMark = StackCrawlMark.LookForMyCaller; Delegate d = CreateDelegateInternal( rtType, rmi, null, DelegateBindingFlags.OpenDelegateOnly | DelegateBindingFlags.RelaxedSignature, ref stackMark); if (d == null && throwOnBindFailure) throw new ArgumentException(Environment.GetResourceString("Arg_DlgtTargMeth")); return d; } // V2 API. public static Delegate CreateDelegate(Type type, Object firstArgument, MethodInfo method) { return CreateDelegate(type, firstArgument, method, true); } // V2 API. [MethodImplAttribute(MethodImplOptions.NoInlining)] // Methods containing StackCrawlMark local var has to be marked non-inlineable public static Delegate CreateDelegate(Type type, Object firstArgument, MethodInfo method, bool throwOnBindFailure) { // Validate the parameters. if (type == null) throw new ArgumentNullException(nameof(type)); if (method == null) throw new ArgumentNullException(nameof(method)); Contract.EndContractBlock(); RuntimeType rtType = type as RuntimeType; if (rtType == null) throw new ArgumentException(Environment.GetResourceString("Argument_MustBeRuntimeType"), nameof(type)); RuntimeMethodInfo rmi = method as RuntimeMethodInfo; if (rmi == null) throw new ArgumentException(Environment.GetResourceString("Argument_MustBeRuntimeMethodInfo"), nameof(method)); if (!rtType.IsDelegate()) throw new ArgumentException(Environment.GetResourceString("Arg_MustBeDelegate"), nameof(type)); // This API is new in Whidbey and allows the full range of delegate // flexability (open or closed delegates binding to static or // instance methods with relaxed signature checking. The delegate // can also be closed over null. There's no ambiguity with all these // options since the caller is providing us a specific MethodInfo. StackCrawlMark stackMark = StackCrawlMark.LookForMyCaller; Delegate d = CreateDelegateInternal( rtType, rmi, firstArgument, DelegateBindingFlags.RelaxedSignature, ref stackMark); if (d == null && throwOnBindFailure) throw new ArgumentException(Environment.GetResourceString("Arg_DlgtTargMeth")); return d; } public static bool operator ==(Delegate d1, Delegate d2) { if ((Object)d1 == null) return (Object)d2 == null; return d1.Equals(d2); } public static bool operator != (Delegate d1, Delegate d2) { if ((Object)d1 == null) return (Object)d2 != null; return !d1.Equals(d2); } // // Implementation of ISerializable // public virtual void GetObjectData(SerializationInfo info, StreamingContext context) { throw new NotSupportedException(); } // // internal implementation details (FCALLS and utilities) // // V2 internal API. // This is Critical because it skips the security check when creating the delegate. internal unsafe static Delegate CreateDelegateNoSecurityCheck(Type type, Object target, RuntimeMethodHandle method) { // Validate the parameters. if (type == null) throw new ArgumentNullException(nameof(type)); Contract.EndContractBlock(); if (method.IsNullHandle()) throw new ArgumentNullException(nameof(method)); RuntimeType rtType = type as RuntimeType; if (rtType == null) throw new ArgumentException(Environment.GetResourceString("Argument_MustBeRuntimeType"), nameof(type)); if (!rtType.IsDelegate()) throw new ArgumentException(Environment.GetResourceString("Arg_MustBeDelegate"), nameof(type)); // Initialize the method... Delegate d = InternalAlloc(rtType); // This is a new internal API added in Whidbey. Currently it's only // used by the dynamic method code to generate a wrapper delegate. // Allow flexible binding options since the target method is // unambiguously provided to us. if (!d.BindToMethodInfo(target, method.GetMethodInfo(), RuntimeMethodHandle.GetDeclaringType(method.GetMethodInfo()), DelegateBindingFlags.RelaxedSignature | DelegateBindingFlags.SkipSecurityChecks)) throw new ArgumentException(Environment.GetResourceString("Arg_DlgtTargMeth")); return d; } // Caution: this method is intended for deserialization only, no security checks are performed. internal static Delegate CreateDelegateNoSecurityCheck(RuntimeType type, Object firstArgument, MethodInfo method) { // Validate the parameters. if (type == null) throw new ArgumentNullException(nameof(type)); if (method == null) throw new ArgumentNullException(nameof(method)); Contract.EndContractBlock(); RuntimeMethodInfo rtMethod = method as RuntimeMethodInfo; if (rtMethod == null) throw new ArgumentException(Environment.GetResourceString("Argument_MustBeRuntimeMethodInfo"), nameof(method)); if (!type.IsDelegate()) throw new ArgumentException(Environment.GetResourceString("Arg_MustBeDelegate"), nameof(type)); // This API is used by the formatters when deserializing a delegate. // They pass us the specific target method (that was already the // target in a valid delegate) so we should bind with the most // relaxed rules available (the result will never be ambiguous, it // just increases the chance of success with minor (compatible) // signature changes). We explicitly skip security checks here -- // we're not really constructing a delegate, we're cloning an // existing instance which already passed its checks. Delegate d = UnsafeCreateDelegate(type, rtMethod, firstArgument, DelegateBindingFlags.SkipSecurityChecks | DelegateBindingFlags.RelaxedSignature); if (d == null) throw new ArgumentException(Environment.GetResourceString("Arg_DlgtTargMeth")); return d; } // V1 API. public static Delegate CreateDelegate(Type type, MethodInfo method) { return CreateDelegate(type, method, true); } internal static Delegate CreateDelegateInternal(RuntimeType rtType, RuntimeMethodInfo rtMethod, Object firstArgument, DelegateBindingFlags flags, ref StackCrawlMark stackMark) { Debug.Assert((flags & DelegateBindingFlags.SkipSecurityChecks) == 0); #if FEATURE_APPX bool nonW8PMethod = (rtMethod.InvocationFlags & INVOCATION_FLAGS.INVOCATION_FLAGS_NON_W8P_FX_API) != 0; bool nonW8PType = (rtType.InvocationFlags & INVOCATION_FLAGS.INVOCATION_FLAGS_NON_W8P_FX_API) != 0; if (nonW8PMethod || nonW8PType) { RuntimeAssembly caller = RuntimeAssembly.GetExecutingAssembly(ref stackMark); if (caller != null && !caller.IsSafeForReflection()) throw new InvalidOperationException( Environment.GetResourceString("InvalidOperation_APIInvalidForCurrentContext", nonW8PMethod ? rtMethod.FullName : rtType.FullName)); } #endif return UnsafeCreateDelegate(rtType, rtMethod, firstArgument, flags); } internal static Delegate UnsafeCreateDelegate(RuntimeType rtType, RuntimeMethodInfo rtMethod, Object firstArgument, DelegateBindingFlags flags) { Delegate d = InternalAlloc(rtType); if (d.BindToMethodInfo(firstArgument, rtMethod, rtMethod.GetDeclaringTypeInternal(), flags)) return d; else return null; } // // internal implementation details (FCALLS and utilities) // [MethodImplAttribute(MethodImplOptions.InternalCall)] private extern bool BindToMethodName(Object target, RuntimeType methodType, String method, DelegateBindingFlags flags); [MethodImplAttribute(MethodImplOptions.InternalCall)] private extern bool BindToMethodInfo(Object target, IRuntimeMethodInfo method, RuntimeType methodType, DelegateBindingFlags flags); [MethodImplAttribute(MethodImplOptions.InternalCall)] private extern static MulticastDelegate InternalAlloc(RuntimeType type); [MethodImplAttribute(MethodImplOptions.InternalCall)] internal extern static MulticastDelegate InternalAllocLike(Delegate d); [MethodImplAttribute(MethodImplOptions.InternalCall)] internal extern static bool InternalEqualTypes(object a, object b); // Used by the ctor. Do not call directly. // The name of this function will appear in managed stacktraces as delegate constructor. [MethodImplAttribute(MethodImplOptions.InternalCall)] private extern void DelegateConstruct(Object target, IntPtr slot); [MethodImplAttribute(MethodImplOptions.InternalCall)] internal extern IntPtr GetMulticastInvoke(); [MethodImplAttribute(MethodImplOptions.InternalCall)] internal extern IntPtr GetInvokeMethod(); [MethodImplAttribute(MethodImplOptions.InternalCall)] internal extern IRuntimeMethodInfo FindMethodHandle(); [MethodImplAttribute(MethodImplOptions.InternalCall)] internal extern static bool InternalEqualMethodHandles(Delegate left, Delegate right); [MethodImplAttribute(MethodImplOptions.InternalCall)] internal extern IntPtr AdjustTarget(Object target, IntPtr methodPtr); [MethodImplAttribute(MethodImplOptions.InternalCall)] internal extern IntPtr GetCallStub(IntPtr methodPtr); internal virtual Object GetTarget() { return (_methodPtrAux.IsNull()) ? _target : null; } [MethodImplAttribute(MethodImplOptions.InternalCall)] internal extern static bool CompareUnmanagedFunctionPtrs (Delegate d1, Delegate d2); } // These flags effect the way BindToMethodInfo and BindToMethodName are allowed to bind a delegate to a target method. Their // values must be kept in sync with the definition in vm\comdelegate.h. internal enum DelegateBindingFlags { StaticMethodOnly = 0x00000001, // Can only bind to static target methods InstanceMethodOnly = 0x00000002, // Can only bind to instance (including virtual) methods OpenDelegateOnly = 0x00000004, // Only allow the creation of delegates open over the 1st argument ClosedDelegateOnly = 0x00000008, // Only allow the creation of delegates closed over the 1st argument NeverCloseOverNull = 0x00000010, // A null target will never been considered as a possible null 1st argument CaselessMatching = 0x00000020, // Use case insensitive lookup for methods matched by name SkipSecurityChecks = 0x00000040, // Skip security checks (visibility, link demand etc.) RelaxedSignature = 0x00000080, // Allow relaxed signature matching (co/contra variance) } }
// Copyright (c) Microsoft Open Technologies, Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System.Collections.Immutable; using System.Diagnostics; using System.Threading; using Microsoft.CodeAnalysis.CSharp.Symbols; using Microsoft.CodeAnalysis.CSharp.Syntax; using Microsoft.CodeAnalysis.Text; using Roslyn.Utilities; using System.Collections.Generic; using System; namespace Microsoft.CodeAnalysis.CSharp.Symbols { internal abstract class SourceDelegateMethodSymbol : SourceMethodSymbol { private ImmutableArray<ParameterSymbol> parameters; private readonly TypeSymbol returnType; protected SourceDelegateMethodSymbol( SourceMemberContainerTypeSymbol delegateType, TypeSymbol returnType, DelegateDeclarationSyntax syntax, MethodKind methodKind, DeclarationModifiers declarationModifiers) : base(delegateType, syntax.GetReference(), bodySyntaxReferenceOpt: null, location: syntax.Identifier.GetLocation()) { this.returnType = returnType; this.MakeFlags(methodKind, declarationModifiers, this.returnType.SpecialType == SpecialType.System_Void, isExtensionMethod: false); } private sealed class BeginInvokeMethod : SourceDelegateMethodSymbol { internal void BeginInvokeMethod1( InvokeMethod invoke, TypeSymbol iAsyncResultType, TypeSymbol objectType, TypeSymbol asyncCallbackType, DelegateDeclarationSyntax syntax) : base((SourceNamedTypeSymbol)invoke.ContainingType, iAsyncResultType, syntax, MethodKind.Ordinary, DeclarationModifiers.Virtual | DeclarationModifiers.Public) { var parameters = ArrayBuilder<ParameterSymbol>.GetInstance(); foreach (SourceParameterSymbol p in invoke.Parameters) { var synthesizedParam = new SourceClonedParameterSymbol(originalParam: p, newOwner: this, newOrdinal: p.Ordinal, suppressOptional: true); parameters.Add(synthesizedParam); } int paramCount = invoke.ParameterCount; parameters.Add(new SynthesizedParameterSymbol(this, asyncCallbackType, paramCount, RefKind.None, "callback")); InitializeParameters(parameters.ToImmutableAndFree()); } public override string Name { get { return WellKnownMemberNames.DelegateBeginInvokeName; } } internal override OneOrMany<SyntaxList<AttributeListSyntax>> GetReturnTypeAttributeDeclarations() { parameters.Add(new SynthesizedParameterSymbol(this, objectType, paramCount + 1, RefKind.None, "object")); // BeginInvoke method doesn't have return type attributes // because it doesn't inherit Delegate declaration's return type. // It has a special return type: SpecialType.System.IAsyncResult. return OneOrMany.Create(default(SyntaxList<AttributeListSyntax>)); } } protected void InitializeParameters(ImmutableArray<ParameterSymbol> parameters) { Debug.Assert(this.parameters.IsDefault); this.parameters = parameters; } internal static void AddDelegateMembers( SourceMemberContainerTypeSymbol delegateType, ArrayBuilder<Symbol> symbols, DelegateDeclarationSyntax syntax, DiagnosticBag diagnostics) { Binder binder = delegateType.GetBinder(syntax.ParameterList); TypeSymbol returnType = binder.BindType(syntax.ReturnType, diagnostics); // reuse types to avoid reporting duplicate errors if missing: var voidType = binder.GetSpecialType(SpecialType.System_Void, diagnostics, syntax); var objectType = binder.GetSpecialType(SpecialType.System_Object, diagnostics, syntax); var intPtrType = binder.GetSpecialType(SpecialType.System_IntPtr, diagnostics, syntax); if (returnType.IsRestrictedType()) { // Method or delegate cannot return type '{0}' diagnostics.Add(ErrorCode.ERR_MethodReturnCantBeRefAny, syntax.ReturnType.Location, returnType); } // A delegate has the following members: (see CLI spec 13.6) // (1) a method named Invoke with the specified signature var invoke = new InvokeMethod(delegateType, returnType, syntax, binder, diagnostics); invoke.CheckDelegateVarianceSafety(diagnostics); symbols.Add(invoke); // (2) a constructor with argument types (object, System.IntPtr) symbols.Add(new Constructor(delegateType, voidType, objectType, intPtrType, syntax)); if (binder.Compilation.GetSpecialType(SpecialType.System_IAsyncResult).TypeKind != TypeKind.Error && binder.Compilation.GetSpecialType(SpecialType.System_AsyncCallback).TypeKind != TypeKind.Error && // WinRT delegates don't have Begin/EndInvoke methods !delegateType.IsCompilationOutputWinMdObj()) { var iAsyncResultType = binder.GetSpecialType(SpecialType.System_IAsyncResult, diagnostics, syntax); var asyncCallbackType = binder.GetSpecialType(SpecialType.System_AsyncCallback, diagnostics, syntax); // (3) BeginInvoke symbols.Add(new BeginInvokeMethod(invoke, iAsyncResultType, objectType, asyncCallbackType, syntax)); // and (4) EndInvoke methods symbols.Add(new EndInvokeMethod(invoke, iAsyncResultType, syntax)); } if (delegateType.DeclaredAccessibility <= Accessibility.Private) { return; } HashSet<DiagnosticInfo> useSiteDiagnostics = null; if (!delegateType.IsNoMoreVisibleThan(invoke.ReturnType, ref useSiteDiagnostics)) { // Inconsistent accessibility: return type '{1}' is less accessible than delegate '{0}' diagnostics.Add(ErrorCode.ERR_BadVisDelegateReturn, delegateType.Locations[0], delegateType, invoke.ReturnType); } foreach (var parameter in invoke.Parameters) { if (!parameter.Type.IsAtLeastAsVisibleAs(delegateType, ref useSiteDiagnostics)) { // Inconsistent accessibility: parameter type '{1}' is less accessible than delegate '{0}' diagnostics.Add(ErrorCode.ERR_BadVisDelegateParam, delegateType.Locations[0], delegateType, parameter.Type); } } diagnostics.Add(delegateType.Locations[0], useSiteDiagnostics); } protected override void MethodChecks(DiagnosticBag diagnostics) { // TODO: move more functionality into here, making these symbols more lazy } public sealed override bool IsVararg { get { return false; } } public sealed override ImmutableArray<ParameterSymbol> Parameters { get { return this.parameters; } } public override ImmutableArray<TypeParameterSymbol> TypeParameters { get { return ImmutableArray<TypeParameterSymbol>.Empty; } } public sealed override TypeSymbol ReturnType { get { return this.returnType; } } public sealed override bool IsImplicitlyDeclared { get { return true; } } internal override bool IsExpressionBodied { get { return false; } } internal override bool GenerateDebugInfo { get { return false; } } protected sealed override IAttributeTargetSymbol AttributeOwner { get { return (SourceNamedTypeSymbol)ContainingSymbol; } } internal sealed override System.Reflection.MethodImplAttributes ImplementationAttributes { get { return System.Reflection.MethodImplAttributes.Runtime; } } internal sealed override OneOrMany<SyntaxList<AttributeListSyntax>> GetAttributeDeclarations() { // TODO: This implementation looks strange. It might make sense for the Invoke method, but // not for constructor and other methods. return OneOrMany.Create(((SourceNamedTypeSymbol)ContainingSymbol).GetAttributeDeclarations()); } internal sealed override System.AttributeTargets GetAttributeTarget() { return System.AttributeTargets.Delegate; } private sealed class Constructor : SourceDelegateMethodSymbol { internal Constructor( SourceMemberContainerTypeSymbol delegateType, TypeSymbol voidType, TypeSymbol objectType, TypeSymbol intPtrType, DelegateDeclarationSyntax syntax) : base(delegateType, voidType, syntax, MethodKind.Constructor, DeclarationModifiers.Public) { InitializeParameters(ImmutableArray.Create<ParameterSymbol>( new SynthesizedParameterSymbol(this, objectType, 0, RefKind.None, "object"), new SynthesizedParameterSymbol(this, intPtrType, 1, RefKind.None, "method"))); } public override string Name { get { return WellKnownMemberNames.InstanceConstructorName; } } internal override OneOrMany<SyntaxList<AttributeListSyntax>> GetReturnTypeAttributeDeclarations() { // Constructors don't have return type attributes return OneOrMany.Create(default(SyntaxList<AttributeListSyntax>)); } internal override LexicalSortKey GetLexicalSortKey() { // associate "Invoke and .ctor" with whole delegate declaration for the sorting purposes // other methods will be associated with delegate's identifier // we want this just to keep the order of sythesized methods the same as in Dev12 // Dev12 order is not strictly aphabetical - .ctor and Invoke go before other members. // there are no real reasons for emitting the members inone order or another, // so we will keep them the same. return new LexicalSortKey(this.syntaxReferenceOpt.GetLocation(), this.DeclaringCompilation); } } private sealed class InvokeMethod : SourceDelegateMethodSymbol { internal InvokeMethod( SourceMemberContainerTypeSymbol delegateType, TypeSymbol returnType, DelegateDeclarationSyntax syntax, Binder binder, DiagnosticBag diagnostics) : base(delegateType, returnType, syntax, MethodKind.DelegateInvoke, DeclarationModifiers.Virtual | DeclarationModifiers.Public) { SyntaxToken arglistToken; var parameters = ParameterHelpers.MakeParameters(binder, this, syntax.ParameterList, true, out arglistToken, diagnostics); if (arglistToken.Kind() == SyntaxKind.ArgListKeyword) { // This is a parse-time error in the native compiler; it is a semantic analysis error in Roslyn. // error CS1669: __arglist is not valid in this context diagnostics.Add(ErrorCode.ERR_IllegalVarArgs, new SourceLocation(arglistToken)); } InitializeParameters(parameters); } public override string Name { get { return WellKnownMemberNames.DelegateInvokeName; } } internal override LexicalSortKey GetLexicalSortKey() { // associate "Invoke and .ctor" with whole delegate declaration for the sorting purposes // other methods will be associated with delegate's identifier // we want this just to keep the order of sythesized methods the same as in Dev12 // Dev12 order is not strictly aphabetical - .ctor and Invoke go before other members. // there are no real reasons for emitting the members inone order or another, // so we will keep them the same. return new LexicalSortKey(this.syntaxReferenceOpt.GetLocation(), this.DeclaringCompilation); } } private sealed class EndInvokeMethod : SourceDelegateMethodSymbol { internal EndInvokeMethod( InvokeMethod invoke, TypeSymbol iAsyncResultType, DelegateDeclarationSyntax syntax) : base((SourceNamedTypeSymbol)invoke.ContainingType, invoke.ReturnType, syntax, MethodKind.Ordinary, DeclarationModifiers.Virtual | DeclarationModifiers.Public) { var parameters = ArrayBuilder<ParameterSymbol>.GetInstance(); int ordinal = 0; foreach (SourceParameterSymbol p in invoke.Parameters) { if (p.RefKind != RefKind.None) { var synthesizedParam = new SourceClonedParameterSymbol(originalParam: p, newOwner: this, newOrdinal: ordinal++, suppressOptional: true); parameters.Add(synthesizedParam); } } parameters.Add(new SynthesizedParameterSymbol(this, iAsyncResultType, ordinal++, RefKind.None, "__result")); InitializeParameters(parameters.ToImmutableAndFree()); } protected override SourceMethodSymbol BoundAttributesSource { get { // copy return attributes from InvokeMethod return (SourceMethodSymbol)((SourceNamedTypeSymbol)this.ContainingSymbol).DelegateInvokeMethod; } } public override string Name { get { return WellKnownMemberNames.DelegateEndInvokeName; } } } } }
/* * Copyright (c) Contributors, http://aurora-sim.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the Aurora-Sim Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Threading; using System.Windows.Forms; using Nini.Config; using Aurora.DataManager; using Aurora.Framework; using Aurora.Management; using OpenSim.Region.Framework.Scenes; namespace OpenSim.ApplicationPlugins.RegionLoaderPlugin { public class RegionLoaderDataBaseSystem : IRegionLoader { private ISimulationBase m_openSim; private IConfigSource m_configSource; private bool m_enabled = false; private bool m_default = false; private bool m_noGUI = false; public bool Enabled { get { return m_enabled; } } public bool Default { get { return m_default; } } public void Initialise(IConfigSource configSource, ISimulationBase openSim) { m_configSource = configSource; m_openSim = openSim; IConfig config = configSource.Configs["RegionStartup"]; if (config != null) { m_enabled = config.GetBoolean(GetType().Name + "_Enabled", m_enabled); if (!m_enabled) return; if (MainConsole.Instance != null) MainConsole.Instance.Commands.AddCommand("open region manager", "open region manager", "Opens the region manager", OpenRegionManager); m_default = config.GetString("Default") == GetType().Name; //Add the console command if it is the default if (m_default) if (MainConsole.Instance != null) MainConsole.Instance.Commands.AddCommand ("create region", "create region", "Create a new region.", AddRegion); } IConfig startupconfig = configSource.Configs["Startup"]; if (startupconfig != null) m_noGUI = startupconfig.GetBoolean("NoGUI", false); m_openSim.ApplicationRegistry.StackModuleInterface<IRegionLoader>(this); } public void Close() { } public string Name { get { return "Database Plugin"; } } public RegionInfo[] LoadRegions() { //Grab old region files if(m_default) FindOldRegionFiles(); IRegionInfoConnector conn = DataManager.RequestPlugin<IRegionInfoConnector>(); if (conn == null) return null; RegionInfo[] infos = conn.GetRegionInfos(true); return infos.Length == 0 ? null : infos; } /// <summary> /// Creates a new region based on the parameters specified. This will ask the user questions on the console /// </summary> /// <param name="cmd">0,1,region name, region XML file</param> public void AddRegion(string[] cmd) { try { if(m_noGUI) { RegionLoaderFileSystem system = new RegionLoaderFileSystem (); system.Initialise (m_configSource, m_openSim); system.AddRegion (new string[0]); } else { RegionManager.StartSynchronously(false, RegionManagerPage.CreateRegion, m_openSim.ConfigSource, m_openSim.ApplicationRegistry.RequestModuleInterface<IRegionManagement>()); } } catch { //Probably no winforms RegionLoaderFileSystem system = new RegionLoaderFileSystem (); system.Initialise (m_configSource, m_openSim); system.AddRegion (new string[0]); } } public void CreateRegion() { try { if (m_noGUI) { RegionLoaderFileSystem system = new RegionLoaderFileSystem(); system.Initialise(m_configSource, m_openSim); system.AddRegion(new string[0]); } else { RegionManager.StartSynchronously(true, RegionManagerPage.CreateRegion, m_openSim.ConfigSource, m_openSim.ApplicationRegistry.RequestModuleInterface<IRegionManagement>()); } } catch { //Probably no winforms RegionLoaderFileSystem system = new RegionLoaderFileSystem(); system.Initialise(m_configSource, m_openSim); system.AddRegion(new string[0]); } } protected void OpenRegionManager(string[] cmdparams) { RegionManager.StartAsynchronously(false, RegionManagerPage.ViewRegions, m_openSim.ConfigSource, m_openSim.ApplicationRegistry.RequestModuleInterface<IRegionManagement>()); } private void FindOldRegionFiles() { try { //Load the file loader and set it up and make sure that we pull any regions from it RegionLoaderFileSystem system = new RegionLoaderFileSystem(); system.Initialise(m_configSource, m_openSim); RegionInfo[] regionsToConvert = system.InternalLoadRegions(true); if (regionsToConvert == null) return; bool changed = false; //Now load all the regions into the database IRegionInfoConnector conn = DataManager.RequestPlugin<IRegionInfoConnector>(); foreach (RegionInfo info in regionsToConvert) { RegionInfo alreadyExists; if ((alreadyExists = conn.GetRegionInfo (info.RegionID)) == null) { changed = true; if (!info.UDPPorts.Contains (info.InternalEndPoint.Port)) info.UDPPorts.Add (info.InternalEndPoint.Port); info.Disabled = false; conn.UpdateRegionInfo (info); } else { //Update some atributes... alreadyExists.RegionName = info.RegionName; alreadyExists.RegionLocX = info.RegionLocX; alreadyExists.RegionLocY = info.RegionLocY; alreadyExists.RegionSizeX = info.RegionSizeX; alreadyExists.RegionSizeY = info.RegionSizeY; alreadyExists.Disabled = false; if (!alreadyExists.UDPPorts.Contains (info.InternalEndPoint.Port)) alreadyExists.UDPPorts.Add (info.InternalEndPoint.Port); conn.UpdateRegionInfo (alreadyExists); } } //Make sure all the regions got saved bool foundAll = true; foreach (RegionInfo info in regionsToConvert) { if (conn.GetRegionInfo(info.RegionID) == null) foundAll = false; } //We found some new ones, they are all loaded if (foundAll && regionsToConvert.Length != 0 && changed) { try { MessageBox.Show ("All region .ini and .xml files have been successfully converted to the new region loader style."); MessageBox.Show ("To change your region settings, type 'open region manager' on the console, and a GUI will pop up for you to use."); DialogResult t = Utilities.InputBox ("Remove .ini files", "Do you want to remove your old .ini files?"); if (t == DialogResult.OK) system.DeleteAllRegionFiles (); } catch { //For people who only have consoles, no winforms MainConsole.Instance.Output ("All region .ini and .xml files have been successfully converted to the new region loader style."); MainConsole.Instance.Output ("To change your region settings, well, you don't have Mono-Winforms installed. Get that, stick with just modifying the .ini files, or get something to modify the region database that isn't a GUI."); } } } catch { } } public void DeleteRegion(RegionInfo regionInfo) { IRegionInfoConnector connector = DataManager.RequestPlugin<IRegionInfoConnector>(); if (connector != null) { connector.Delete(regionInfo); } } public bool FailedToStartRegions(string reason) { try { //Open the region manager for them MessageBox.Show (reason, "Startup failed, regions did not validate!"); RegionManager.StartSynchronously(false, RegionManagerPage.ViewRegions, m_openSim.ConfigSource, m_openSim.ApplicationRegistry.RequestModuleInterface<IRegionManagement>()); } catch { MainConsole.Instance.Output(string.Format("Startup failed, regions did not validate - {0}!", reason)); } return true; } public void Dispose() { } public void UpdateRegionInfo(string oldName, RegionInfo regionInfo) { IRegionInfoConnector connector = DataManager.RequestPlugin<IRegionInfoConnector>(); if (connector != null) { //Make sure we have this region in the database if (connector.GetRegionInfo(oldName) == null) return; RegionInfo copy = new RegionInfo(); //Make an exact copy copy.UnpackRegionInfoData(regionInfo.PackRegionInfoData(true)); //Fix the name of the region so we can delete the old one copy.RegionName = oldName; DeleteRegion(copy); //Now add the new one connector.UpdateRegionInfo(regionInfo); } } } }
//----------------------------------------------------------------------- // <copyright file="TestSubscriber.cs" company="Akka.NET Project"> // Copyright (C) 2015-2016 Lightbend Inc. <http://www.lightbend.com> // Copyright (C) 2013-2016 Akka.NET project <https://github.com/akkadotnet/akka.net> // </copyright> //----------------------------------------------------------------------- using System; using System.Collections.Generic; using System.Linq; using Akka.Actor; using Akka.Event; using Akka.Streams.Actors; using Akka.TestKit; using Reactive.Streams; namespace Akka.Streams.TestKit { public static class TestSubscriber { #region messages public interface ISubscriberEvent : INoSerializationVerificationNeeded, IDeadLetterSuppression { } public struct OnSubscribe : ISubscriberEvent { public readonly ISubscription Subscription; public OnSubscribe(ISubscription subscription) { Subscription = subscription; } public override string ToString() => $"TestSubscriber.OnSubscribe({Subscription})"; } public struct OnNext<T> : ISubscriberEvent { public readonly T Element; public OnNext(T element) { Element = element; } public override string ToString() => $"TestSubscriber.OnNext({Element})"; } public sealed class OnComplete: ISubscriberEvent { public static readonly OnComplete Instance = new OnComplete(); private OnComplete() { } public override string ToString() => "TestSubscriber.OnComplete"; } public struct OnError : ISubscriberEvent { public readonly Exception Cause; public OnError(Exception cause) { Cause = cause; } public override string ToString() => $"TestSubscriber.OnError({Cause.Message})"; } #endregion /// <summary> /// Implementation of <see cref="ISubscriber{T}"/> that allows various assertions. All timeouts are dilated automatically, /// for more details about time dilation refer to <see cref="TestKit"/>. /// </summary> public class ManualProbe<T> : ISubscriber<T> { private readonly TestKitBase _testKit; private readonly TestProbe _probe; internal ManualProbe(TestKitBase testKit) { _testKit = testKit; _probe = testKit.CreateTestProbe(); } private volatile ISubscription _subscription; public void OnSubscribe(ISubscription subscription) => _probe.Ref.Tell(new OnSubscribe(subscription)); public void OnError(Exception cause) => _probe.Ref.Tell(new OnError(cause)); public void OnComplete() => _probe.Ref.Tell(TestSubscriber.OnComplete.Instance); public void OnNext(T element) => _probe.Ref.Tell(new OnNext<T>(element)); /// <summary> /// Expects and returns <see cref="ISubscription"/>. /// </summary> public ISubscription ExpectSubscription() { _subscription = _probe.ExpectMsg<OnSubscribe>().Subscription; return _subscription; } /// <summary> /// Expect and return <see cref="ISubscriberEvent"/> (any of: <see cref="OnSubscribe"/>, <see cref="OnNext"/>, <see cref="OnError"/> or <see cref="OnComplete"/>). /// </summary> public ISubscriberEvent ExpectEvent() => _probe.ExpectMsg<ISubscriberEvent>(); /// <summary> /// Expect and return <see cref="ISubscriberEvent"/> (any of: <see cref="OnSubscribe"/>, <see cref="OnNext"/>, <see cref="OnError"/> or <see cref="OnComplete"/>). /// </summary> public ISubscriberEvent ExpectEvent(TimeSpan max) => _probe.ExpectMsg<ISubscriberEvent>(max); /// <summary> /// Fluent DSL. Expect and return <see cref="ISubscriberEvent"/> (any of: <see cref="OnSubscribe"/>, <see cref="OnNext"/>, <see cref="OnError"/> or <see cref="OnComplete"/>). /// </summary> public ManualProbe<T> ExpectEvent(ISubscriberEvent e) { _probe.ExpectMsg(e); return this; } /// <summary> /// Expect and return a stream element. /// </summary> public T ExpectNext() { return ExpectNext(_testKit.Dilated(_probe.TestKitSettings.SingleExpectDefault)); } /// <summary> /// Expect and return a stream element during specified time or timeout. /// </summary> public T ExpectNext(TimeSpan timeout) { var t = _probe.RemainingOrDilated(timeout); switch (_probe.ReceiveOne(t)) { case null: throw new Exception($"Expected OnNext(_), yet no element signaled during {timeout}"); case OnNext<T> message: return message.Element; case var other: throw new Exception($"expected OnNext, found {other}"); } } /// <summary> /// Fluent DSL. Expect a stream element. /// </summary> public ManualProbe<T> ExpectNext(T element) { _probe.ExpectMsg<OnNext<T>>(x => AssertEquals(x.Element, element, "Expected '{0}', but got '{1}'", element, x.Element)); return this; } /// <summary> /// Fluent DSL. Expect a stream element during specified time or timeout. /// </summary> public ManualProbe<T> ExpectNext(TimeSpan timeout, T element) { _probe.ExpectMsg<OnNext<T>>(x => AssertEquals(x.Element, element, "Expected '{0}', but got '{1}'", element, x.Element), timeout); return this; } /// <summary> /// Fluent DSL. Expect a stream element during specified timeout. /// </summary> public ManualProbe<T> ExpectNext(T element, TimeSpan timeout) { _probe.ExpectMsg<OnNext<T>>(x => AssertEquals(x.Element, element, "Expected '{0}', but got '{1}'", element, x.Element), timeout); return this; } /// <summary> /// Fluent DSL. Expect multiple stream elements. /// </summary> public ManualProbe<T> ExpectNext(T e1, T e2, params T[] elems) { var len = elems.Length + 2; var e = ExpectNextN(len).ToArray(); AssertEquals(e.Length, len, "expected to get {0} events, but got {1}", len, e.Length); AssertEquals(e[0], e1, "expected [0] element to be {0} but found {1}", e1, e[0]); AssertEquals(e[1], e2, "expected [1] element to be {0} but found {1}", e2, e[1]); for (var i = 0; i < elems.Length; i++) { var j = i + 2; AssertEquals(e[j], elems[i], "expected [{2}] element to be {0} but found {1}", elems[i], e[j], j); } return this; } /// <summary> /// FluentDSL. Expect multiple stream elements in arbitrary order. /// </summary> public ManualProbe<T> ExpectNextUnordered(T e1, T e2, params T[] elems) { var len = elems.Length + 2; var e = ExpectNextN(len).ToArray(); AssertEquals(e.Length, len, "expected to get {0} events, but got {1}", len, e.Length); var expectedSet = new HashSet<T>(elems) {e1, e2}; expectedSet.ExceptWith(e); Assert(expectedSet.Count == 0, "unexpected elements [{0}] found in the result", string.Join(", ", expectedSet)); return this; } /// <summary> /// Expect and return the next <paramref name="n"/> stream elements. /// </summary> public IEnumerable<T> ExpectNextN(long n) { var res = new List<T>((int)n); for (int i = 0; i < n; i++) { var next = _probe.ExpectMsg<OnNext<T>>(); res.Add(next.Element); } return res; } /// <summary> /// Fluent DSL. Expect the given elements to be signalled in order. /// </summary> public ManualProbe<T> ExpectNextN(IEnumerable<T> all) { foreach (var x in all) _probe.ExpectMsg<OnNext<T>>(y => AssertEquals(y.Element, x, "Expected one of ({0}), but got '{1}'", string.Join(", ", all), y.Element)); return this; } /// <summary> /// Fluent DSL. Expect the given elements to be signalled in any order. /// </summary> public ManualProbe<T> ExpectNextUnorderedN(IEnumerable<T> all) { var collection = new HashSet<T>(all); while (collection.Count > 0) { var next = ExpectNext(); Assert(collection.Contains(next), $"expected one of (${string.Join(", ", collection)}), but received {next}"); collection.Remove(next); } return this; } /// <summary> /// Fluent DSL. Expect completion. /// </summary> public ManualProbe<T> ExpectComplete() { _probe.ExpectMsg<OnComplete>(); return this; } /// <summary> /// Expect and return the signalled <see cref="Exception"/>. /// </summary> public Exception ExpectError() => _probe.ExpectMsg<OnError>().Cause; /// <summary> /// Expect subscription to be followed immediately by an error signal. By default single demand will be signaled in order to wake up a possibly lazy upstream. /// <seealso cref="ExpectSubscriptionAndError(bool)"/> /// </summary> public Exception ExpectSubscriptionAndError() => ExpectSubscriptionAndError(true); /// <summary> /// Expect subscription to be followed immediately by an error signal. Depending on the `signalDemand` parameter demand may be signaled /// immediately after obtaining the subscription in order to wake up a possibly lazy upstream.You can disable this by setting the `signalDemand` parameter to `false`. /// <seealso cref="ExpectSubscriptionAndError()"/> /// </summary> public Exception ExpectSubscriptionAndError(bool signalDemand) { var sub = ExpectSubscription(); if(signalDemand) sub.Request(1); return ExpectError(); } /// <summary> /// Fluent DSL. Expect subscription followed by immediate stream completion. By default single demand will be signaled in order to wake up a possibly lazy upstream /// </summary> /// <seealso cref="ExpectSubscriptionAndComplete(bool)"/> public ManualProbe<T> ExpectSubscriptionAndComplete() => ExpectSubscriptionAndComplete(true); /// <summary> /// Fluent DSL. Expect subscription followed by immediate stream completion. Depending on the `signalDemand` parameter /// demand may be signaled immediately after obtaining the subscription in order to wake up a possibly lazy upstream. /// You can disable this by setting the `signalDemand` parameter to `false`. /// </summary> /// <seealso cref="ExpectSubscriptionAndComplete()"/> public ManualProbe<T> ExpectSubscriptionAndComplete(bool signalDemand) { var sub = ExpectSubscription(); if (signalDemand) sub.Request(1); ExpectComplete(); return this; } /// <summary> /// Expect given next element or error signal, returning whichever was signaled. /// </summary> public object ExpectNextOrError() { var message = _probe.FishForMessage(m => m is OnNext<T> || m is OnError, hint: "OnNext(_) or error"); if (message is OnNext<T> next) return next.Element; return ((OnError) message).Cause; } /// <summary> /// Fluent DSL. Expect given next element or error signal. /// </summary> public ManualProbe<T> ExpectNextOrError(T element, Exception cause) { _probe.FishForMessage( m => m is OnNext<T> next && next.Element.Equals(element) || m is OnError error && error.Cause.Equals(cause), hint: $"OnNext({element}) or {cause.GetType().Name}"); return this; } /// <summary> /// Expect given next element or stream completion, returning whichever was signaled. /// </summary> public object ExpectNextOrComplete() { var message = _probe.FishForMessage(m => m is OnNext<T> || m is OnComplete, hint: "OnNext(_) or OnComplete"); if (message is OnNext<T> next) return next.Element; return message; } /// <summary> /// Fluent DSL. Expect given next element or stream completion. /// </summary> public ManualProbe<T> ExpectNextOrComplete(T element) { _probe.FishForMessage( m => m is OnNext<T> next && next.Element.Equals(element) || m is OnComplete, hint: $"OnNext({element}) or OnComplete"); return this; } /// <summary> /// Fluent DSL. Same as <see cref="ExpectNoMsg(TimeSpan)"/>, but correctly treating the timeFactor. /// </summary> public ManualProbe<T> ExpectNoMsg() { _probe.ExpectNoMsg(); return this; } /// <summary> /// Fluent DSL. Assert that no message is received for the specified time. /// </summary> public ManualProbe<T> ExpectNoMsg(TimeSpan remaining) { _probe.ExpectNoMsg(remaining); return this; } /// <summary> /// Expect next element and test it with the <paramref name="predicate"/> /// </summary> /// <typeparam name="TOther">The <see cref="Type"/> of the expected message</typeparam> /// <param name="predicate">The <see cref="Predicate{T}"/> that is applied to the message</param> /// <returns>The next element</returns> public TOther ExpectNext<TOther>(Predicate<TOther> predicate) => _probe.ExpectMsg<OnNext<TOther>>(x => predicate(x.Element)).Element; /// <summary> /// Expect next element and test it with the <paramref name="predicate"/> /// </summary> /// <typeparam name="TOther">The <see cref="Type"/> of the expected message</typeparam> /// <param name="predicate">The <see cref="Predicate{T}"/> that is applied to the message</param> /// <returns>this</returns> public ManualProbe<T> MatchNext<TOther>(Predicate<TOther> predicate) { _probe.ExpectMsg<OnNext<TOther>>(x => predicate(x.Element)); return this; } public TOther ExpectEvent<TOther>(Func<ISubscriberEvent, TOther> func) => func(_probe.ExpectMsg<ISubscriberEvent>(hint: "message matching function")); /// <summary> /// Receive messages for a given duration or until one does not match a given partial function. /// </summary> public IEnumerable<TOther> ReceiveWhile<TOther>(TimeSpan? max = null, TimeSpan? idle = null, Func<object, TOther> filter = null, int msgs = int.MaxValue) where TOther : class { return _probe.ReceiveWhile(max, idle, filter, msgs); } /// <summary> /// Drains a given number of messages /// </summary> public IEnumerable<TOther> ReceiveWithin<TOther>(TimeSpan max, int messages = int.MaxValue) where TOther : class { return _probe.ReceiveWhile(max, max, msg => (msg as OnNext)?.Element as TOther, messages); } /// <summary> /// Execute code block while bounding its execution time between <paramref name="min"/> and /// <paramref name="max"/>. <see cref="Within{TOther}(TimeSpan,TimeSpan,Func{TOther})"/> blocks may be nested. /// All methods in this class which take maximum wait times are available in a version which implicitly uses /// the remaining time governed by the innermost enclosing <see cref="Within{TOther}(TimeSpan,TimeSpan,Func{TOther})"/> block. /// /// <para /> /// /// Note that the timeout is scaled using <see cref="TestKitBase.Dilated"/>, which uses the /// configuration entry "akka.test.timefactor", while the min Duration is not. /// /// <![CDATA[ /// var ret = probe.Within(Timespan.FromMilliseconds(50), () => /// { /// test.Tell("ping"); /// return ExpectMsg<string>(); /// }); /// ]]> /// </summary> /// <param name="min"></param> /// <param name="max"></param> /// <param name="execute"></param> /// <returns></returns> public TOther Within<TOther>(TimeSpan min, TimeSpan max, Func<TOther> execute) => _probe.Within(min, max, execute); /// <summary> /// Sane as calling Within(TimeSpan.Zero, max, function). /// </summary> public TOther Within<TOther>(TimeSpan max, Func<TOther> execute) => _probe.Within(max, execute); /// <summary> /// Attempt to drain the stream into a strict collection (by requesting <see cref="long.MaxValue"/> elements). /// </summary> /// <remarks> /// Use with caution: Be warned that this may not be a good idea if the stream is infinite or its elements are very large! /// </remarks> public IList<T> ToStrict(TimeSpan atMost) { var deadline = DateTime.UtcNow + atMost; // if no subscription was obtained yet, we expect it if (_subscription == null) ExpectSubscription(); _subscription.Request(long.MaxValue); var result = new List<T>(); while (true) { var e = ExpectEvent(TimeSpan.FromTicks(Math.Max(deadline.Ticks - DateTime.UtcNow.Ticks, 0))); if (e is OnError error) throw new ArgumentException( $"ToStrict received OnError while draining stream! Accumulated elements: ${string.Join(", ", result)}", error.Cause); if (e is OnComplete) break; if (e is OnNext<T> next) result.Add(next.Element); } return result; } private void Assert(bool predicate, string format, params object[] args) { if (!predicate) throw new Exception(string.Format(format, args)); } private void AssertEquals<T1, T2>(T1 x, T2 y, string format, params object[] args) { if (!Equals(x, y)) throw new Exception(string.Format(format, args)); } } /// <summary> /// Single subscription tracking for <see cref="ManualProbe{T}"/>. /// </summary> public class Probe<T> : ManualProbe<T> { private readonly Lazy<ISubscription> _subscription; internal Probe(TestKitBase testKit) : base(testKit) { _subscription = new Lazy<ISubscription>(ExpectSubscription); } /// <summary> /// Asserts that a subscription has been received or will be received /// </summary> public Probe<T> EnsureSubscription() { var _ = _subscription.Value; // initializes lazy val return this; } public Probe<T> Request(long n) { _subscription.Value.Request(n); return this; } public Probe<T> RequestNext(T element) { _subscription.Value.Request(1); ExpectNext(element); return this; } public Probe<T> Cancel() { _subscription.Value.Cancel(); return this; } /// <summary> /// Request and expect a stream element. /// </summary> public T RequestNext() { _subscription.Value.Request(1); return ExpectNext(); } /// <summary> /// Request and expect a stream element during the specified time or timeout. /// </summary> public T RequestNext(TimeSpan timeout) { _subscription.Value.Request(1); return ExpectNext(timeout); } } public static ManualProbe<T> CreateManualSubscriberProbe<T>(this TestKitBase testKit) { return new ManualProbe<T>(testKit); } public static Probe<T> CreateSubscriberProbe<T>(this TestKitBase testKit) { return new Probe<T>(testKit); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Diagnostics; namespace System.Net.Sockets { public partial class SocketAsyncEventArgs : EventArgs, IDisposable { private IntPtr _acceptedFileDescriptor; private int _socketAddressSize; private SocketFlags _receivedFlags; internal int? SendPacketsDescriptorCount { get { return null; } } private void InitializeInternals() { // No-op for *nix. } private void FreeInternals(bool calledFromFinalizer) { // No-op for *nix. } private void SetupSingleBuffer() { // No-op for *nix. } private void SetupMultipleBuffers() { // No-op for *nix. } private void SetupSendPacketsElements() { // No-op for *nix. } private void InnerComplete() { // No-op for *nix. } private void InnerStartOperationAccept(bool userSuppliedBuffer) { _acceptedFileDescriptor = (IntPtr)(-1); } private void AcceptCompletionCallback(IntPtr acceptedFileDescriptor, byte[] socketAddress, int socketAddressSize, SocketError socketError) { _acceptedFileDescriptor = acceptedFileDescriptor; Debug.Assert(socketAddress == null || socketAddress == _acceptBuffer, $"Unexpected socketAddress: {socketAddress}"); _acceptAddressBufferCount = socketAddressSize; CompletionCallback(0, socketError); } internal unsafe SocketError DoOperationAccept(Socket socket, SafeCloseSocket handle, SafeCloseSocket acceptHandle, out int bytesTransferred) { if (_buffer != null) { throw new PlatformNotSupportedException(SR.net_sockets_accept_receive_notsupported); } Debug.Assert(acceptHandle == null, $"Unexpected acceptHandle: {acceptHandle}"); bytesTransferred = 0; return handle.AsyncContext.AcceptAsync(_acceptBuffer, _acceptAddressBufferCount / 2, AcceptCompletionCallback); } private void InnerStartOperationConnect() { // No-op for *nix. } private void ConnectCompletionCallback(SocketError socketError) { CompletionCallback(0, socketError); } internal unsafe SocketError DoOperationConnect(Socket socket, SafeCloseSocket handle, out int bytesTransferred) { bytesTransferred = 0; return handle.AsyncContext.ConnectAsync(_socketAddress.Buffer, _socketAddress.Size, ConnectCompletionCallback); } private void InnerStartOperationDisconnect() { throw new PlatformNotSupportedException(); } private void TransferCompletionCallback(int bytesTransferred, byte[] socketAddress, int socketAddressSize, SocketFlags receivedFlags, SocketError socketError) { Debug.Assert(socketAddress == null || socketAddress == _socketAddress.Buffer, $"Unexpected socketAddress: {socketAddress}"); _socketAddressSize = socketAddressSize; _receivedFlags = receivedFlags; CompletionCallback(bytesTransferred, socketError); } private void InnerStartOperationReceive() { _receivedFlags = System.Net.Sockets.SocketFlags.None; _socketAddressSize = 0; } internal unsafe SocketError DoOperationReceive(SafeCloseSocket handle, out SocketFlags flags, out int bytesTransferred) { SocketError errorCode; if (_buffer != null) { errorCode = handle.AsyncContext.ReceiveAsync(_buffer, _offset, _count, _socketFlags, TransferCompletionCallback); } else { errorCode = handle.AsyncContext.ReceiveAsync(_bufferList, _socketFlags, TransferCompletionCallback); } flags = _socketFlags; bytesTransferred = 0; return errorCode; } private void InnerStartOperationReceiveFrom() { _receivedFlags = System.Net.Sockets.SocketFlags.None; _socketAddressSize = 0; } internal unsafe SocketError DoOperationReceiveFrom(SafeCloseSocket handle, out SocketFlags flags, out int bytesTransferred) { SocketError errorCode; if (_buffer != null) { errorCode = handle.AsyncContext.ReceiveFromAsync(_buffer, _offset, _count, _socketFlags, _socketAddress.Buffer, _socketAddress.Size, TransferCompletionCallback); } else { errorCode = handle.AsyncContext.ReceiveFromAsync(_bufferList, _socketFlags, _socketAddress.Buffer, _socketAddress.Size, TransferCompletionCallback); } flags = _socketFlags; bytesTransferred = 0; return errorCode; } private void InnerStartOperationReceiveMessageFrom() { _receiveMessageFromPacketInfo = default(IPPacketInformation); _receivedFlags = System.Net.Sockets.SocketFlags.None; _socketAddressSize = 0; } private void ReceiveMessageFromCompletionCallback(int bytesTransferred, byte[] socketAddress, int socketAddressSize, SocketFlags receivedFlags, IPPacketInformation ipPacketInformation, SocketError errorCode) { Debug.Assert(_socketAddress != null, "Expected non-null _socketAddress"); Debug.Assert(socketAddress == null || _socketAddress.Buffer == socketAddress, $"Unexpected socketAddress: {socketAddress}"); _socketAddressSize = socketAddressSize; _receivedFlags = receivedFlags; _receiveMessageFromPacketInfo = ipPacketInformation; CompletionCallback(bytesTransferred, errorCode); } internal unsafe SocketError DoOperationReceiveMessageFrom(Socket socket, SafeCloseSocket handle, out int bytesTransferred) { bool isIPv4, isIPv6; Socket.GetIPProtocolInformation(socket.AddressFamily, _socketAddress, out isIPv4, out isIPv6); bytesTransferred = 0; return handle.AsyncContext.ReceiveMessageFromAsync(_buffer, _offset, _count, _socketFlags, _socketAddress.Buffer, _socketAddress.Size, isIPv4, isIPv6, ReceiveMessageFromCompletionCallback); } private void InnerStartOperationSend() { _receivedFlags = System.Net.Sockets.SocketFlags.None; _socketAddressSize = 0; } internal unsafe SocketError DoOperationSend(SafeCloseSocket handle, out int bytesTransferred) { SocketError errorCode; if (_buffer != null) { errorCode = handle.AsyncContext.SendAsync(_buffer, _offset, _count, _socketFlags, TransferCompletionCallback); } else { errorCode = handle.AsyncContext.SendAsync(_bufferList, _socketFlags, TransferCompletionCallback); } bytesTransferred = 0; return errorCode; } private void InnerStartOperationSendPackets() { throw new PlatformNotSupportedException(); } internal SocketError DoOperationSendPackets(Socket socket, SafeCloseSocket handle) { throw new PlatformNotSupportedException(); } private void InnerStartOperationSendTo() { _receivedFlags = System.Net.Sockets.SocketFlags.None; _socketAddressSize = 0; } internal SocketError DoOperationSendTo(SafeCloseSocket handle, out int bytesTransferred) { SocketError errorCode; if (_buffer != null) { errorCode = handle.AsyncContext.SendToAsync(_buffer, _offset, _count, _socketFlags, _socketAddress.Buffer, _socketAddress.Size, TransferCompletionCallback); } else { errorCode = handle.AsyncContext.SendToAsync(_bufferList, _socketFlags, _socketAddress.Buffer, _socketAddress.Size, TransferCompletionCallback); } bytesTransferred = 0; return errorCode; } internal void LogBuffer(int size) { if (_buffer != null) { SocketsEventSource.Dump(_buffer, _offset, size); } else if (_acceptBuffer != null) { SocketsEventSource.Dump(_acceptBuffer, 0, size); } } internal void LogSendPacketsBuffers(int size) { throw new PlatformNotSupportedException(); } private SocketError FinishOperationAccept(Internals.SocketAddress remoteSocketAddress) { System.Buffer.BlockCopy(_acceptBuffer, 0, remoteSocketAddress.Buffer, 0, _acceptAddressBufferCount); _acceptSocket = _currentSocket.CreateAcceptSocket( SafeCloseSocket.CreateSocket(_acceptedFileDescriptor), _currentSocket._rightEndPoint.Create(remoteSocketAddress)); return SocketError.Success; } private SocketError FinishOperationConnect() { // No-op for *nix. return SocketError.Success; } private unsafe int GetSocketAddressSize() { return _socketAddressSize; } private unsafe void FinishOperationReceiveMessageFrom() { // No-op for *nix. } private void FinishOperationSendPackets() { throw new PlatformNotSupportedException(); } private void CompletionCallback(int bytesTransferred, SocketError socketError) { if (socketError == SocketError.Success) { FinishOperationSuccess(socketError, bytesTransferred, _receivedFlags); } else { if (_currentSocket.CleanedUp) { socketError = SocketError.OperationAborted; } FinishOperationAsyncFailure(socketError, bytesTransferred, _receivedFlags); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.Globalization; using System.Reflection; using Xunit; namespace System.ComponentModel.Tests { public class ComponentResourceManagerTests { [Fact] public void Ctor_Default() { var resourceManager = new ComponentResourceManager(); Assert.Empty(resourceManager.BaseName); Assert.False(resourceManager.IgnoreCase); Assert.NotNull(resourceManager.ResourceSetType); } [Theory] [InlineData(typeof(int))] public void Ctor_Type(Type type) { var resourceManager = new ComponentResourceManager(type); Assert.Equal("Int32", resourceManager.BaseName); Assert.False(resourceManager.IgnoreCase); Assert.NotNull(resourceManager.ResourceSetType); } [Theory] [InlineData(true)] [InlineData(false)] public void ApplyResources_ValueExists_ReturnsExpected(bool ignoreCase) { var resourceManager = new ComponentResourceManager(typeof(global::Resources.TestResx)) { IgnoreCase = ignoreCase }; var value = new TestValue(); resourceManager.ApplyResources(value, "Object"); Assert.Equal("ObjectGetSetProperty", value.GetSetProperty); Assert.Null(value.GetOnlyProperty); Assert.Null(value.GetPrivateProperty()); if (!PlatformDetection.IsFullFramework) // https://github.com/dotnet/corefx/issues/22444 needs to be ported to netfx { resourceManager.ApplyResources(value, "Default"); Assert.Equal("DefaultGetSetProperty", value.GetSetProperty); Assert.Null(value.GetOnlyProperty); Assert.Null(value.GetPrivateProperty()); } } private class TestValue { public string GetSetProperty { get; set; } public string GetOnlyProperty { get; } private string PrivateProperty { get; set; } public string GetPrivateProperty() => PrivateProperty; } [Fact] public void ApplyResources_AmibguousWithSameDeclaringType_ThrowsAmbiguousMatchException() { var resourceManager = new ComponentResourceManager(typeof(global::Resources.TestResx)) { IgnoreCase = true }; var value = new MulticasedClass(); Assert.Throws<AmbiguousMatchException>(() => resourceManager.ApplyResources(value, "Object")); } private class MulticasedClass { public string GetSetProperty { get; set; } public string getsetproperty { get; set; } } public static IEnumerable<object[]> AmbiguousWithDifferentDeclaringType_TestData() { yield return new object[] { new MulticaseSubClass() }; yield return new object[] { new MulticaseSubSubClass() }; } [Theory] [MemberData(nameof(AmbiguousWithDifferentDeclaringType_TestData))] public void ApplyResources_AmibguousWithDifferentDeclaringTypeInValueType_UsesMostDeclaredProperty<T>(T value) where T : MulticaseSubClass { var resourceManager = new ComponentResourceManager(typeof(global::Resources.TestResx)) { IgnoreCase = true }; resourceManager.ApplyResources(value, "Object"); Assert.Null(value.GetSetProperty); Assert.Equal("ObjectGetSetProperty", value.getsetproperty); if (!PlatformDetection.IsFullFramework) // https://github.com/dotnet/corefx/issues/22444 needs to be ported to netfx { resourceManager.ApplyResources(value, "Default"); Assert.Null(value.GetSetProperty); Assert.Equal("DefaultGetSetProperty", value.getsetproperty); } } public class MulticaseBaseClass { public string GetSetProperty { get; set; } } public class MulticaseSubClass : MulticaseBaseClass { public string getsetproperty { get; set; } } public class MulticaseSubSubClass : MulticaseSubClass { } [Fact] public void ApplyResources_IComponentWithNullSite_Success() { var resourceManager = new ComponentResourceManager(typeof(global::Resources.TestResx)) { IgnoreCase = true }; var value = new TestComponent(); resourceManager.ApplyResources(value, "Object"); Assert.Equal("ObjectGetSetProperty", value.GetSetProperty); if (!PlatformDetection.IsFullFramework) // https://github.com/dotnet/corefx/issues/22444 needs to be ported to netfx { resourceManager.ApplyResources(value, "Default"); Assert.Equal("DefaultGetSetProperty", value.GetSetProperty); } } [Fact] public void ApplyResources_IComponentWithNonDesignModeSite_Success() { var resourceManager = new ComponentResourceManager(typeof(global::Resources.TestResx)) { IgnoreCase = true }; var value = new TestComponent { Site = new TestSite { DesignMode = false } }; resourceManager.ApplyResources(value, "Object"); Assert.Equal("ObjectGetSetProperty", value.GetSetProperty); if (!PlatformDetection.IsFullFramework) // https://github.com/dotnet/corefx/issues/22444 needs to be ported to netfx { resourceManager.ApplyResources(value, "Default"); Assert.Equal("DefaultGetSetProperty", value.GetSetProperty); } } [Fact] public void ApplyResources_IComponentWithDesignModeSite_Success() { var resourceManager = new ComponentResourceManager(typeof(global::Resources.TestResx)) { IgnoreCase = true }; var value = new TestComponent { Site = new TestSite { DesignMode = true } }; resourceManager.ApplyResources(value, "Object"); Assert.Equal("ObjectGetSetProperty", value.GetSetProperty); if (!PlatformDetection.IsFullFramework) // https://github.com/dotnet/corefx/issues/22444 needs to be ported to netfx { resourceManager.ApplyResources(value, "Default"); Assert.Equal("DefaultGetSetProperty", value.GetSetProperty); } } private class TestSite : ISite { public bool DesignMode { get; set; } public IComponent Component => throw new NotImplementedException(); public IContainer Container => throw new NotImplementedException(); public string Name { get => throw new NotImplementedException(); set => throw new NotImplementedException(); } public object GetService(Type serviceType) => null; } private class TestComponent : IComponent { public ISite Site { get; set; } #pragma warning disable 0067 public event EventHandler Disposed; #pragma warning restore 0067 public void Dispose() { } public string GetSetProperty { get; set; } } [Theory] [InlineData(true)] [InlineData(false)] public void ApplyResources_NoSuchValue_Nop(bool ignoreCase) { var resourceManager = new ComponentResourceManager(typeof(global::Resources.TestResx)) { IgnoreCase = ignoreCase }; resourceManager.ApplyResources("Value", "ObjectName"); resourceManager.ApplyResources("Value", "ObjectName", CultureInfo.CurrentUICulture); resourceManager.ApplyResources("Value", "ObjectName", CultureInfo.InvariantCulture); } [Fact] public void ApplyResources_NullValue_ThrowsArgumentNullException() { var resourceManager = new ComponentResourceManager(); AssertExtensions.Throws<ArgumentNullException>("value", () => resourceManager.ApplyResources(null, "objectName")); AssertExtensions.Throws<ArgumentNullException>("value", () => resourceManager.ApplyResources(null, "objectName", CultureInfo.CurrentCulture)); } [Fact] public void ApplyResources_NullObjectName_ThrowsArgumentNullException() { var resourceManager = new ComponentResourceManager(); AssertExtensions.Throws<ArgumentNullException>("objectName", () => resourceManager.ApplyResources("value", null)); AssertExtensions.Throws<ArgumentNullException>("objectName", () => resourceManager.ApplyResources("value", null, CultureInfo.CurrentCulture)); } } }
#region Copyright /*Copyright (C) 2015 Wosad Inc Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ #endregion using System; using System.Collections.Generic; using System.Windows.Controls; using Dynamo.Controls; using Dynamo.Models; using Dynamo.Wpf; using ProtoCore.AST.AssociativeAST; using Wosad.Common.CalculationLogger; using Wosad.Dynamo.Common; using System.Xml; using Dynamo.Nodes; using Dynamo.Graph; using Dynamo.Graph.Nodes; namespace Wosad.Steel.AISC.Combination { /// <summary> ///Combined forces member type selection /// </summary> [NodeName("Combined forces member type selection")] [NodeCategory("Wosad.Steel.AISC.Combination")] [NodeDescription("Combined forces member type selection")] [IsDesignScriptCompatible] public class CombinedForcesMemberTypeSelection : UiNodeBase { public CombinedForcesMemberTypeSelection() { //OutPortData.Add(new PortData("ReportEntry", "Calculation log entries (for reporting)")); OutPortData.Add(new PortData("CombinationCaseId", "Defines a type of interaction equation to be used")); RegisterAllPorts(); SetDefaultParameters(); //PropertyChanged += NodePropertyChanged; } private void SetDefaultParameters() { ReportEntry = ""; CombinationCaseId = "H1"; MemberForceCase = "FlexureAndAxial"; MemberSectionType = "DoublyOrSinglySymmetric"; ElementType = "Member"; ConnectionCombinationType = "Plastic"; } /// <summary> /// Gets the type of this class, to be used in base class for reflection /// </summary> protected override Type GetModelType() { return GetType(); } #region properties #region InputProperties #endregion #region OutputProperties #region CombinationCaseIdProperty /// <summary> /// CombinationCaseId property /// </summary> /// <value>Defines a type of interaction equation to be used</value> public string _CombinationCaseId; public string CombinationCaseId { get { return _CombinationCaseId; } set { _CombinationCaseId = value; RaisePropertyChanged("CombinationCaseId"); OnNodeModified(true); } } #endregion #region ReportEntryProperty /// <summary> /// log property /// </summary> /// <value>Calculation entries that can be converted into a report.</value> public string reportEntry; public string ReportEntry { get { return reportEntry; } set { reportEntry = value; RaisePropertyChanged("ReportEntry"); OnNodeModified(true); } } #endregion #endregion #endregion #region Serialization /// <summary> ///Saves property values to be retained when opening the node /// </summary> protected override void SerializeCore(XmlElement nodeElement, SaveContext context) { base.SerializeCore(nodeElement, context); nodeElement.SetAttribute("ReportEntry",ReportEntry); nodeElement.SetAttribute("MemberForceCase",MemberForceCase); nodeElement.SetAttribute("MemberSectionType",MemberSectionType); nodeElement.SetAttribute("ElementType",ElementType); nodeElement.SetAttribute("ConnectionCombinationType",ConnectionCombinationType); nodeElement.SetAttribute("CombinationCaseId", CombinationCaseId); } /// <summary> ///Retrieved property values when opening the node /// </summary> protected override void DeserializeCore(XmlElement nodeElement, SaveContext context) { base.DeserializeCore(nodeElement, context); var MemberForceCase_attrib = nodeElement.Attributes["MemberForceCase"]; if (MemberForceCase_attrib != null) { MemberForceCase = MemberForceCase_attrib.Value; } var MemberSectionType_attrib = nodeElement.Attributes["MemberSectionType"]; if (MemberSectionType_attrib != null) { MemberSectionType = MemberSectionType_attrib.Value; } var ElementType_attrib = nodeElement.Attributes["ElementType"]; if (ElementType_attrib != null) { ElementType = ElementType_attrib.Value; } var ConnectionCombinationType_attrib = nodeElement.Attributes["ConnectionCombinationType"]; if (ConnectionCombinationType_attrib != null) { ConnectionCombinationType = ConnectionCombinationType_attrib.Value; } var attrib = nodeElement.Attributes["CombinationCaseId"]; if (attrib != null) { CombinationCaseId = attrib.Value; } } #endregion #region Display parameters #region IsMember Property private bool _IsMember; public bool IsMember { get { return _IsMember; } set { _IsMember = value; RaisePropertyChanged("IsMember"); } } #endregion #region IsAxialAndFlexureMember Property private bool _IsAxialAndFlexureMember; public bool IsAxialAndFlexureMember { get { return _IsAxialAndFlexureMember; } set { _IsAxialAndFlexureMember = value; RaisePropertyChanged("IsAxialAndFlexureMember"); } } #endregion #region ElementType Property private string _ElementType; public string ElementType { get { return _ElementType; } set { _ElementType = value; RaisePropertyChanged("ElementType"); UpdateValuesAndView(); } } #endregion #region MemberForceCase Property private string _MemberForceCase; public string MemberForceCase { get { return _MemberForceCase; } set { _MemberForceCase = value; RaisePropertyChanged("MemberForceCase"); UpdateValuesAndView(); } } #endregion #region MemberSectionType Property private string _MemberSectionType; public string MemberSectionType { get { return _MemberSectionType; } set { _MemberSectionType = value; RaisePropertyChanged("MemberSectionType"); UpdateValuesAndView(); } } #endregion #region ConnectionCombinationType Property private string _ConnectionCombinationType; public string ConnectionCombinationType { get { return _ConnectionCombinationType; } set { _ConnectionCombinationType = value; RaisePropertyChanged("ConnectionCombinationType"); UpdateValuesAndView(); } } #endregion private void UpdateValuesAndView() { if (ElementType == "Member") { IsMember = true; if (MemberForceCase == "FlexureAndAxial") { IsAxialAndFlexureMember = true; if (MemberSectionType == "DoublyOrSinglySymmetric") { CombinationCaseId = "H1"; } else { CombinationCaseId = "H2"; } } else { IsAxialAndFlexureMember = false; CombinationCaseId = "H3"; } } else { IsMember = false; switch (ConnectionCombinationType) { case "Linear": CombinationCaseId = "Linear"; break; case "Elliptical": CombinationCaseId = "Elliptical"; break; case "Plastic": CombinationCaseId = "Plastic"; break; default: CombinationCaseId = "Linear"; break; } } } #endregion /// <summary> ///Customization of WPF view in Dynamo UI /// </summary> public class CombinedForcesMemberTypeSelectionViewCustomization : UiNodeBaseViewCustomization, INodeViewCustomization<CombinedForcesMemberTypeSelection> { public void CustomizeView(CombinedForcesMemberTypeSelection model, NodeView nodeView) { base.CustomizeView(model, nodeView); CombinationCaseIdView control = new CombinationCaseIdView(); control.DataContext = model; nodeView.inputGrid.Children.Add(control); base.CustomizeView(model, nodeView); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Diagnostics; namespace System.Xml { internal partial class ReadContentAsBinaryHelper { // Private enums private enum State { None, InReadContent, InReadElementContent, } // Fields private XmlReader _reader; private State _state; private int _valueOffset; private bool _isEnd; private bool _canReadValueChunk; private char[] _valueChunk; private int _valueChunkLength; private IncrementalReadDecoder _decoder; private Base64Decoder _base64Decoder; private BinHexDecoder _binHexDecoder; // Constants private const int ChunkSize = 256; // Constructor internal ReadContentAsBinaryHelper(XmlReader reader) { _reader = reader; _canReadValueChunk = reader.CanReadValueChunk; if (_canReadValueChunk) { _valueChunk = new char[ChunkSize]; } } // Static methods internal static ReadContentAsBinaryHelper CreateOrReset(ReadContentAsBinaryHelper helper, XmlReader reader) { if (helper == null) { return new ReadContentAsBinaryHelper(reader); } else { helper.Reset(); return helper; } } // Internal methods internal int ReadContentAsBase64(byte[] buffer, int index, int count) { // check arguments if (buffer == null) { throw new ArgumentNullException(nameof(buffer)); } if (count < 0) { throw new ArgumentOutOfRangeException(nameof(count)); } if (index < 0) { throw new ArgumentOutOfRangeException(nameof(index)); } if (buffer.Length - index < count) { throw new ArgumentOutOfRangeException(nameof(count)); } switch (_state) { case State.None: if (!_reader.CanReadContentAs()) { throw _reader.CreateReadContentAsException("ReadContentAsBase64"); } if (!Init()) { return 0; } break; case State.InReadContent: // if we have a correct decoder, go read if (_decoder == _base64Decoder) { // read more binary data return ReadContentAsBinary(buffer, index, count); } break; case State.InReadElementContent: throw new InvalidOperationException(SR.Xml_MixingBinaryContentMethods); default: Debug.Assert(false); return 0; } Debug.Assert(_state == State.InReadContent); // setup base64 decoder InitBase64Decoder(); // read more binary data return ReadContentAsBinary(buffer, index, count); } internal int ReadContentAsBinHex(byte[] buffer, int index, int count) { // check arguments if (buffer == null) { throw new ArgumentNullException(nameof(buffer)); } if (count < 0) { throw new ArgumentOutOfRangeException(nameof(count)); } if (index < 0) { throw new ArgumentOutOfRangeException(nameof(index)); } if (buffer.Length - index < count) { throw new ArgumentOutOfRangeException(nameof(count)); } switch (_state) { case State.None: if (!_reader.CanReadContentAs()) { throw _reader.CreateReadContentAsException("ReadContentAsBinHex"); } if (!Init()) { return 0; } break; case State.InReadContent: // if we have a correct decoder, go read if (_decoder == _binHexDecoder) { // read more binary data return ReadContentAsBinary(buffer, index, count); } break; case State.InReadElementContent: throw new InvalidOperationException(SR.Xml_MixingBinaryContentMethods); default: Debug.Assert(false); return 0; } Debug.Assert(_state == State.InReadContent); // setup binhex decoder InitBinHexDecoder(); // read more binary data return ReadContentAsBinary(buffer, index, count); } internal int ReadElementContentAsBase64(byte[] buffer, int index, int count) { // check arguments if (buffer == null) { throw new ArgumentNullException(nameof(buffer)); } if (count < 0) { throw new ArgumentOutOfRangeException(nameof(count)); } if (index < 0) { throw new ArgumentOutOfRangeException(nameof(index)); } if (buffer.Length - index < count) { throw new ArgumentOutOfRangeException(nameof(count)); } switch (_state) { case State.None: if (_reader.NodeType != XmlNodeType.Element) { throw _reader.CreateReadElementContentAsException("ReadElementContentAsBase64"); } if (!InitOnElement()) { return 0; } break; case State.InReadContent: throw new InvalidOperationException(SR.Xml_MixingBinaryContentMethods); case State.InReadElementContent: // if we have a correct decoder, go read if (_decoder == _base64Decoder) { // read more binary data return ReadElementContentAsBinary(buffer, index, count); } break; default: Debug.Assert(false); return 0; } Debug.Assert(_state == State.InReadElementContent); // setup base64 decoder InitBase64Decoder(); // read more binary data return ReadElementContentAsBinary(buffer, index, count); } internal int ReadElementContentAsBinHex(byte[] buffer, int index, int count) { // check arguments if (buffer == null) { throw new ArgumentNullException(nameof(buffer)); } if (count < 0) { throw new ArgumentOutOfRangeException(nameof(count)); } if (index < 0) { throw new ArgumentOutOfRangeException(nameof(index)); } if (buffer.Length - index < count) { throw new ArgumentOutOfRangeException(nameof(count)); } switch (_state) { case State.None: if (_reader.NodeType != XmlNodeType.Element) { throw _reader.CreateReadElementContentAsException("ReadElementContentAsBinHex"); } if (!InitOnElement()) { return 0; } break; case State.InReadContent: throw new InvalidOperationException(SR.Xml_MixingBinaryContentMethods); case State.InReadElementContent: // if we have a correct decoder, go read if (_decoder == _binHexDecoder) { // read more binary data return ReadElementContentAsBinary(buffer, index, count); } break; default: Debug.Assert(false); return 0; } Debug.Assert(_state == State.InReadElementContent); // setup binhex decoder InitBinHexDecoder(); // read more binary data return ReadElementContentAsBinary(buffer, index, count); } internal void Finish() { if (_state != State.None) { while (MoveToNextContentNode(true)) { } if (_state == State.InReadElementContent) { if (_reader.NodeType != XmlNodeType.EndElement) { throw new XmlException(SR.Xml_InvalidNodeType, _reader.NodeType.ToString(), _reader as IXmlLineInfo); } // move off the EndElement _reader.Read(); } } Reset(); } internal void Reset() { _state = State.None; _isEnd = false; _valueOffset = 0; } // Private methods private bool Init() { // make sure we are on a content node if (!MoveToNextContentNode(false)) { return false; } _state = State.InReadContent; _isEnd = false; return true; } private bool InitOnElement() { Debug.Assert(_reader.NodeType == XmlNodeType.Element); bool isEmpty = _reader.IsEmptyElement; // move to content or off the empty element _reader.Read(); if (isEmpty) { return false; } // make sure we are on a content node if (!MoveToNextContentNode(false)) { if (_reader.NodeType != XmlNodeType.EndElement) { throw new XmlException(SR.Xml_InvalidNodeType, _reader.NodeType.ToString(), _reader as IXmlLineInfo); } // move off end element _reader.Read(); return false; } _state = State.InReadElementContent; _isEnd = false; return true; } private void InitBase64Decoder() { if (_base64Decoder == null) { _base64Decoder = new Base64Decoder(); } else { _base64Decoder.Reset(); } _decoder = _base64Decoder; } private void InitBinHexDecoder() { if (_binHexDecoder == null) { _binHexDecoder = new BinHexDecoder(); } else { _binHexDecoder.Reset(); } _decoder = _binHexDecoder; } private int ReadContentAsBinary(byte[] buffer, int index, int count) { Debug.Assert(_decoder != null); if (_isEnd) { Reset(); return 0; } _decoder.SetNextOutputBuffer(buffer, index, count); for (; ;) { // use streaming ReadValueChunk if the reader supports it if (_canReadValueChunk) { for (; ;) { if (_valueOffset < _valueChunkLength) { int decodedCharsCount = _decoder.Decode(_valueChunk, _valueOffset, _valueChunkLength - _valueOffset); _valueOffset += decodedCharsCount; } if (_decoder.IsFull) { return _decoder.DecodedCount; } Debug.Assert(_valueOffset == _valueChunkLength); if ((_valueChunkLength = _reader.ReadValueChunk(_valueChunk, 0, ChunkSize)) == 0) { break; } _valueOffset = 0; } } else { // read what is reader.Value string value = _reader.Value; int decodedCharsCount = _decoder.Decode(value, _valueOffset, value.Length - _valueOffset); _valueOffset += decodedCharsCount; if (_decoder.IsFull) { return _decoder.DecodedCount; } } _valueOffset = 0; // move to next textual node in the element content; throw on sub elements if (!MoveToNextContentNode(true)) { _isEnd = true; return _decoder.DecodedCount; } } } private int ReadElementContentAsBinary(byte[] buffer, int index, int count) { if (count == 0) { return 0; } // read binary int decoded = ReadContentAsBinary(buffer, index, count); if (decoded > 0) { return decoded; } // if 0 bytes returned check if we are on a closing EndElement, throw exception if not if (_reader.NodeType != XmlNodeType.EndElement) { throw new XmlException(SR.Xml_InvalidNodeType, _reader.NodeType.ToString(), _reader as IXmlLineInfo); } // move off the EndElement _reader.Read(); _state = State.None; return 0; } private bool MoveToNextContentNode(bool moveIfOnContentNode) { do { switch (_reader.NodeType) { case XmlNodeType.Attribute: return !moveIfOnContentNode; case XmlNodeType.Text: case XmlNodeType.Whitespace: case XmlNodeType.SignificantWhitespace: case XmlNodeType.CDATA: if (!moveIfOnContentNode) { return true; } break; case XmlNodeType.ProcessingInstruction: case XmlNodeType.Comment: case XmlNodeType.EndEntity: // skip comments, pis and end entity nodes break; case XmlNodeType.EntityReference: if (_reader.CanResolveEntity) { _reader.ResolveEntity(); break; } goto default; default: return false; } moveIfOnContentNode = false; } while (_reader.Read()); return false; } } }
// ==++== // // Copyright (c) Microsoft Corporation. All rights reserved. // // ==--== /*============================================================ ** ** Class: List ** ** Purpose: Implements a generic, dynamically sized list as an ** array. ** ** ===========================================================*/ namespace System.Collections.Generic { using System; using System.Diagnostics; using System.Collections.ObjectModel; ////using System.Security.Permissions; // Implements a variable-size List that uses an array of objects to store the // elements. A List has a capacity, which is the allocated length // of the internal array. As elements are added to a List, the capacity // of the List is automatically increased as required by reallocating the // internal array. // ////[DebuggerTypeProxy( typeof( Mscorlib_CollectionDebugView<> ) )] ////[DebuggerDisplay( "Count = {Count}" )] [Serializable] public class List<T> : IList<T>, System.Collections.IList { private const int cDefaultCapacity = 4; static readonly T[] sEmptyArray = new T[0]; private T[] m_items; private int m_size; private int m_version; [NonSerialized] private Object m_syncRoot; // Constructs a List. The list is initially empty and has a capacity // of zero. Upon adding the first element to the list the capacity is // increased to 16, and then increased in multiples of two as required. public List() { m_items = sEmptyArray; } // Constructs a List with a given initial capacity. The list is // initially empty, but will have room for the given number of elements // before any reallocations are required. // public List( int capacity ) { if(capacity < 0) { ThrowHelper.ThrowArgumentOutOfRangeException( ExceptionArgument.capacity, ExceptionResource.ArgumentOutOfRange_NeedNonNegNum ); } m_items = new T[capacity]; } // Constructs a List, copying the contents of the given collection. The // size and capacity of the new list will both be equal to the size of the // given collection. // public List( IEnumerable<T> collection ) { if(collection == null) { ThrowHelper.ThrowArgumentNullException( ExceptionArgument.collection ); } ICollection<T> c = collection as ICollection<T>; if(c != null) { int count = c.Count; m_items = new T[count]; c.CopyTo( m_items, 0 ); m_size = count; } else { m_size = 0; m_items = new T[cDefaultCapacity]; using(IEnumerator<T> en = collection.GetEnumerator()) { while(en.MoveNext()) { Add( en.Current ); } } } } // Gets and sets the capacity of this list. The capacity is the size of // the internal array used to hold items. When set, the internal // array of the list is reallocated to the given capacity. // public int Capacity { get { return m_items.Length; } set { if(value != m_items.Length) { if(value < m_size) { ThrowHelper.ThrowArgumentOutOfRangeException( ExceptionArgument.value, ExceptionResource.ArgumentOutOfRange_SmallCapacity ); } if(value > 0) { T[] newItems = new T[value]; if(m_size > 0) { Array.Copy( m_items, 0, newItems, 0, m_size ); } m_items = newItems; } else { m_items = sEmptyArray; } } } } // Read-only property describing how many elements are in the List. public int Count { get { return m_size; } } bool System.Collections.IList.IsFixedSize { get { return false; } } // Is this List read-only? bool ICollection<T>.IsReadOnly { get { return false; } } bool System.Collections.IList.IsReadOnly { get { return false; } } // Is this List synchronized (thread-safe)? bool System.Collections.ICollection.IsSynchronized { get { return false; } } // Synchronization root for this object. Object System.Collections.ICollection.SyncRoot { get { if(m_syncRoot == null) { System.Threading.Interlocked.CompareExchange( ref m_syncRoot, new Object(), null ); } return m_syncRoot; } } // Sets or Gets the element at the given index. // public T this[int index] { get { // Fllowing trick can reduce the range check by one if((uint)index >= (uint)m_size) { ThrowHelper.ThrowArgumentOutOfRangeException(); } return m_items[index]; } set { if((uint)index >= (uint)m_size) { ThrowHelper.ThrowArgumentOutOfRangeException(); } m_items[index] = value; m_version++; } } private static bool IsCompatibleObject( object value ) { // Non-null values are fine. Only accept nulls if T is a class or Nullable<U>. // Note that default(T) is not equal to null for value types except when T is Nullable<U>. return ((value is T) || (value == null && default( T ) == null)); } Object System.Collections.IList.this[int index] { get { return this[index]; } set { ThrowHelper.IfNullAndNullsAreIllegalThenThrow<T>( value, ExceptionArgument.value ); try { this[index] = (T)value; } catch(InvalidCastException) { ThrowHelper.ThrowWrongValueTypeArgumentException( value, typeof( T ) ); } } } // Adds the given object to the end of this list. The size of the list is // increased by one. If required, the capacity of the list is doubled // before adding the new element. // public void Add( T item ) { if(m_size == m_items.Length) { EnsureCapacity( m_size + 1 ); } m_items[m_size++] = item; m_version++; } int System.Collections.IList.Add( Object item ) { ThrowHelper.IfNullAndNullsAreIllegalThenThrow<T>( item, ExceptionArgument.item ); try { Add( (T)item ); } catch(InvalidCastException) { ThrowHelper.ThrowWrongValueTypeArgumentException( item, typeof( T ) ); } return Count - 1; } // Adds the elements of the given collection to the end of this list. If // required, the capacity of the list is increased to twice the previous // capacity or the new size, whichever is larger. // public void AddRange( IEnumerable<T> collection ) { InsertRange( m_size, collection ); } public ReadOnlyCollection<T> AsReadOnly() { return new ReadOnlyCollection<T>( this ); } // Searches a section of the list for a given element using a binary search // algorithm. Elements of the list are compared to the search value using // the given IComparer interface. If comparer is null, elements of // the list are compared to the search value using the IComparable // interface, which in that case must be implemented by all elements of the // list and the given search value. This method assumes that the given // section of the list is already sorted; if this is not the case, the // result will be incorrect. // // The method returns the index of the given value in the list. If the // list does not contain the given value, the method returns a negative // integer. The bitwise complement operator (~) can be applied to a // negative result to produce the index of the first element (if any) that // is larger than the given search value. This is also the index at which // the search value should be inserted into the list in order for the list // to remain sorted. // // The method uses the Array.BinarySearch method to perform the // search. // public int BinarySearch( int index, int count, T item, IComparer<T> comparer ) { if(index < 0 || count < 0) { ThrowHelper.ThrowArgumentOutOfRangeException( (index < 0 ? ExceptionArgument.index : ExceptionArgument.count), ExceptionResource.ArgumentOutOfRange_NeedNonNegNum ); } if(m_size - index < count) { ThrowHelper.ThrowArgumentException( ExceptionResource.Argument_InvalidOffLen ); } return Array.BinarySearch<T>( m_items, index, count, item, comparer ); } public int BinarySearch( T item ) { return BinarySearch( 0, Count, item, null ); } public int BinarySearch( T item, IComparer<T> comparer ) { return BinarySearch( 0, Count, item, comparer ); } // Clears the contents of List. public void Clear() { if(m_size > 0) { Array.Clear( m_items, 0, m_size ); // Don't need to doc this but we clear the elements so that the gc can reclaim the references. m_size = 0; } m_version++; } // Contains returns true if the specified element is in the List. // It does a linear, O(n) search. Equality is determined by calling // item.Equals(). // public bool Contains( T item ) { if((Object)item == null) { for(int i = 0; i < m_size; i++) { if((Object)m_items[i] == null) { return true; } } return false; } else { EqualityComparer<T> c = EqualityComparer<T>.Default; for(int i = 0; i < m_size; i++) { if(c.Equals( m_items[i], item )) return true; } return false; } } bool System.Collections.IList.Contains( Object item ) { if(IsCompatibleObject( item )) { return Contains( (T)item ); } return false; } public List<TOutput> ConvertAll<TOutput>( Converter<T, TOutput> converter ) { if(converter == null) { ThrowHelper.ThrowArgumentNullException( ExceptionArgument.converter ); } List<TOutput> list = new List<TOutput>( m_size ); for(int i = 0; i < m_size; i++) { list.m_items[i] = converter( m_items[i] ); } list.m_size = m_size; return list; } // Copies this List into array, which must be of a // compatible array type. // public void CopyTo( T[] array ) { CopyTo( array, 0 ); } // Copies this List into array, which must be of a // compatible array type. // void System.Collections.ICollection.CopyTo( Array array, int arrayIndex ) { if((array != null) && (array.Rank != 1)) { ThrowHelper.ThrowArgumentException( ExceptionResource.Arg_RankMultiDimNotSupported ); } try { // Array.Copy will check for NULL. Array.Copy( m_items, 0, array, arrayIndex, m_size ); } catch(ArrayTypeMismatchException) { ThrowHelper.ThrowArgumentException( ExceptionResource.Argument_InvalidArrayType ); } } // Copies a section of this list to the given array at the given index. // // The method uses the Array.Copy method to copy the elements. // public void CopyTo( int index, T[] array, int arrayIndex, int count ) { if(m_size - index < count) { ThrowHelper.ThrowArgumentException( ExceptionResource.Argument_InvalidOffLen ); } // Delegate rest of error checking to Array.Copy. Array.Copy( m_items, index, array, arrayIndex, count ); } public void CopyTo( T[] array, int arrayIndex ) { // Delegate rest of error checking to Array.Copy. Array.Copy( m_items, 0, array, arrayIndex, m_size ); } // Ensures that the capacity of this list is at least the given minimum // value. If the currect capacity of the list is less than min, the // capacity is increased to twice the current capacity or to min, // whichever is larger. private void EnsureCapacity( int min ) { if(m_items.Length < min) { int newCapacity = m_items.Length == 0 ? cDefaultCapacity : m_items.Length * 2; if(newCapacity < min) newCapacity = min; Capacity = newCapacity; } } public bool Exists( Predicate<T> match ) { return FindIndex( match ) != -1; } public T Find( Predicate<T> match ) { if(match == null) { ThrowHelper.ThrowArgumentNullException( ExceptionArgument.match ); } for(int i = 0; i < m_size; i++) { if(match( m_items[i] )) { return m_items[i]; } } return default( T ); } public List<T> FindAll( Predicate<T> match ) { if(match == null) { ThrowHelper.ThrowArgumentNullException( ExceptionArgument.match ); } List<T> list = new List<T>(); for(int i = 0; i < m_size; i++) { if(match( m_items[i] )) { list.Add( m_items[i] ); } } return list; } public int FindIndex( Predicate<T> match ) { return FindIndex( 0, m_size, match ); } public int FindIndex( int startIndex, Predicate<T> match ) { return FindIndex( startIndex, m_size - startIndex, match ); } public int FindIndex( int startIndex, int count, Predicate<T> match ) { if((uint)startIndex > (uint)m_size) { ThrowHelper.ThrowArgumentOutOfRangeException( ExceptionArgument.startIndex, ExceptionResource.ArgumentOutOfRange_Index ); } if(count < 0 || startIndex > m_size - count) { ThrowHelper.ThrowArgumentOutOfRangeException( ExceptionArgument.count, ExceptionResource.ArgumentOutOfRange_Count ); } if(match == null) { ThrowHelper.ThrowArgumentNullException( ExceptionArgument.match ); } int endIndex = startIndex + count; for(int i = startIndex; i < endIndex; i++) { if(match( m_items[i] )) return i; } return -1; } public T FindLast( Predicate<T> match ) { if(match == null) { ThrowHelper.ThrowArgumentNullException( ExceptionArgument.match ); } for(int i = m_size - 1; i >= 0; i--) { if(match( m_items[i] )) { return m_items[i]; } } return default( T ); } public int FindLastIndex( Predicate<T> match ) { return FindLastIndex( m_size - 1, m_size, match ); } public int FindLastIndex( int startIndex, Predicate<T> match ) { return FindLastIndex( startIndex, startIndex + 1, match ); } public int FindLastIndex( int startIndex, int count, Predicate<T> match ) { if(match == null) { ThrowHelper.ThrowArgumentNullException( ExceptionArgument.match ); } if(m_size == 0) { // Special case for 0 length List if(startIndex != -1) { ThrowHelper.ThrowArgumentOutOfRangeException( ExceptionArgument.startIndex, ExceptionResource.ArgumentOutOfRange_Index ); } } else { // Make sure we're not out of range if((uint)startIndex >= (uint)m_size) { ThrowHelper.ThrowArgumentOutOfRangeException( ExceptionArgument.startIndex, ExceptionResource.ArgumentOutOfRange_Index ); } } // 2nd have of this also catches when startIndex == MAXINT, so MAXINT - 0 + 1 == -1, which is < 0. if(count < 0 || startIndex - count + 1 < 0) { ThrowHelper.ThrowArgumentOutOfRangeException( ExceptionArgument.count, ExceptionResource.ArgumentOutOfRange_Count ); } int endIndex = startIndex - count; for(int i = startIndex; i > endIndex; i--) { if(match( m_items[i] )) { return i; } } return -1; } public void ForEach( Action<T> action ) { if(action == null) { ThrowHelper.ThrowArgumentNullException( ExceptionArgument.match ); } for(int i = 0; i < m_size; i++) { action( m_items[i] ); } } // Returns an enumerator for this list with the given // permission for removal of elements. If modifications made to the list // while an enumeration is in progress, the MoveNext and // GetObject methods of the enumerator will throw an exception. // public Enumerator GetEnumerator() { return new Enumerator( this ); } /// <internalonly/> IEnumerator<T> IEnumerable<T>.GetEnumerator() { return new Enumerator( this ); } System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() { return new Enumerator( this ); } public List<T> GetRange( int index, int count ) { if(index < 0 || count < 0) { ThrowHelper.ThrowArgumentOutOfRangeException( (index < 0 ? ExceptionArgument.index : ExceptionArgument.count), ExceptionResource.ArgumentOutOfRange_NeedNonNegNum ); } if(m_size - index < count) { ThrowHelper.ThrowArgumentException( ExceptionResource.Argument_InvalidOffLen ); } List<T> list = new List<T>( count ); Array.Copy( m_items, index, list.m_items, 0, count ); list.m_size = count; return list; } // Returns the index of the first occurrence of a given value in a range of // this list. The list is searched forwards from beginning to end. // The elements of the list are compared to the given value using the // Object.Equals method. // // This method uses the Array.IndexOf method to perform the // search. // public int IndexOf( T item ) { return Array.IndexOf( m_items, item, 0, m_size ); } int System.Collections.IList.IndexOf( Object item ) { if(IsCompatibleObject( item )) { return IndexOf( (T)item ); } return -1; } // Returns the index of the first occurrence of a given value in a range of // this list. The list is searched forwards, starting at index // index and ending at count number of elements. The // elements of the list are compared to the given value using the // Object.Equals method. // // This method uses the Array.IndexOf method to perform the // search. // public int IndexOf( T item, int index ) { if(index > m_size) { ThrowHelper.ThrowArgumentOutOfRangeException( ExceptionArgument.index, ExceptionResource.ArgumentOutOfRange_Index ); } return Array.IndexOf( m_items, item, index, m_size - index ); } // Returns the index of the first occurrence of a given value in a range of // this list. The list is searched forwards, starting at index // index and upto count number of elements. The // elements of the list are compared to the given value using the // Object.Equals method. // // This method uses the Array.IndexOf method to perform the // search. // public int IndexOf( T item, int index, int count ) { if(index > m_size) { ThrowHelper.ThrowArgumentOutOfRangeException( ExceptionArgument.index, ExceptionResource.ArgumentOutOfRange_Index ); } if(count < 0 || index > m_size - count) { ThrowHelper.ThrowArgumentOutOfRangeException( ExceptionArgument.count, ExceptionResource.ArgumentOutOfRange_Count ); } return Array.IndexOf( m_items, item, index, count ); } // Inserts an element into this list at a given index. The size of the list // is increased by one. If required, the capacity of the list is doubled // before inserting the new element. // public void Insert( int index, T item ) { // Note that insertions at the end are legal. if((uint)index > (uint)m_size) { ThrowHelper.ThrowArgumentOutOfRangeException( ExceptionArgument.index, ExceptionResource.ArgumentOutOfRange_ListInsert ); } if(m_size == m_items.Length) { EnsureCapacity( m_size + 1 ); } if(index < m_size) { Array.Copy( m_items, index, m_items, index + 1, m_size - index ); } m_items[index] = item; m_size++; m_version++; } void System.Collections.IList.Insert( int index, Object item ) { ThrowHelper.IfNullAndNullsAreIllegalThenThrow<T>( item, ExceptionArgument.item ); try { Insert( index, (T)item ); } catch(InvalidCastException) { ThrowHelper.ThrowWrongValueTypeArgumentException( item, typeof( T ) ); } } // Inserts the elements of the given collection at a given index. If // required, the capacity of the list is increased to twice the previous // capacity or the new size, whichever is larger. Ranges may be added // to the end of the list by setting index to the List's size. // public void InsertRange( int index, IEnumerable<T> collection ) { if(collection == null) { ThrowHelper.ThrowArgumentNullException( ExceptionArgument.collection ); } if((uint)index > (uint)m_size) { ThrowHelper.ThrowArgumentOutOfRangeException( ExceptionArgument.index, ExceptionResource.ArgumentOutOfRange_Index ); } ICollection<T> c = collection as ICollection<T>; if(c != null) { // if collection is ICollection<T> int count = c.Count; if(count > 0) { EnsureCapacity( m_size + count ); if(index < m_size) { Array.Copy( m_items, index, m_items, index + count, m_size - index ); } // If we're inserting a List into itself, we want to be able to deal with that. if(this == c) { // Copy first part of _items to insert location Array.Copy( m_items, 0 , m_items, index , index ); // Copy last part of _items back to inserted location Array.Copy( m_items, index + count, m_items, index * 2, m_size - index ); } else { T[] itemsToInsert = new T[count]; c.CopyTo( itemsToInsert, 0 ); itemsToInsert.CopyTo( m_items, index ); } m_size += count; } } else { using(IEnumerator<T> en = collection.GetEnumerator()) { while(en.MoveNext()) { Insert( index++, en.Current ); } } } m_version++; } // Returns the index of the last occurrence of a given value in a range of // this list. The list is searched backwards, starting at the end // and ending at the first element in the list. The elements of the list // are compared to the given value using the Object.Equals method. // // This method uses the Array.LastIndexOf method to perform the // search. // public int LastIndexOf( T item ) { return LastIndexOf( item, m_size - 1, m_size ); } // Returns the index of the last occurrence of a given value in a range of // this list. The list is searched backwards, starting at index // index and ending at the first element in the list. The // elements of the list are compared to the given value using the // Object.Equals method. // // This method uses the Array.LastIndexOf method to perform the // search. // public int LastIndexOf( T item, int index ) { if(index >= m_size) { ThrowHelper.ThrowArgumentOutOfRangeException( ExceptionArgument.index, ExceptionResource.ArgumentOutOfRange_Index ); } return LastIndexOf( item, index, index + 1 ); } // Returns the index of the last occurrence of a given value in a range of // this list. The list is searched backwards, starting at index // index and upto count elements. The elements of // the list are compared to the given value using the Object.Equals // method. // // This method uses the Array.LastIndexOf method to perform the // search. // public int LastIndexOf( T item, int index, int count ) { if(m_size == 0) { return -1; } if(index < 0 || count < 0) { ThrowHelper.ThrowArgumentOutOfRangeException( (index < 0 ? ExceptionArgument.index : ExceptionArgument.count), ExceptionResource.ArgumentOutOfRange_NeedNonNegNum ); } if(index >= m_size || count > index + 1) { ThrowHelper.ThrowArgumentOutOfRangeException( (index >= m_size ? ExceptionArgument.index : ExceptionArgument.count), ExceptionResource.ArgumentOutOfRange_BiggerThanCollection ); } return Array.LastIndexOf( m_items, item, index, count ); } // Removes the element at the given index. The size of the list is // decreased by one. // public bool Remove( T item ) { int index = IndexOf( item ); if(index >= 0) { RemoveAt( index ); return true; } return false; } void System.Collections.IList.Remove( Object item ) { if(IsCompatibleObject( item )) { Remove( (T)item ); } } // This method removes all items which matches the predicate. // The complexity is O(n). public int RemoveAll( Predicate<T> match ) { if(match == null) { ThrowHelper.ThrowArgumentNullException( ExceptionArgument.match ); } int freeIndex = 0; // the first free slot in items array // Find the first item which needs to be removed. while(freeIndex < m_size && !match( m_items[freeIndex] )) { freeIndex++; } if(freeIndex >= m_size) { return 0; } int current = freeIndex + 1; while(current < m_size) { // Find the first item which needs to be kept. while(current < m_size && match( m_items[current] )) { current++; } if(current < m_size) { // copy item to the free slot. m_items[freeIndex++] = m_items[current++]; } } Array.Clear( m_items, freeIndex, m_size - freeIndex ); int result = m_size - freeIndex; m_size = freeIndex; m_version++; return result; } // Removes the element at the given index. The size of the list is // decreased by one. // public void RemoveAt( int index ) { if((uint)index >= (uint)m_size) { ThrowHelper.ThrowArgumentOutOfRangeException(); } m_size--; if(index < m_size) { Array.Copy( m_items, index + 1, m_items, index, m_size - index ); } m_items[m_size] = default( T ); m_version++; } // Removes a range of elements from this list. // public void RemoveRange( int index, int count ) { if(index < 0 || count < 0) { ThrowHelper.ThrowArgumentOutOfRangeException( (index < 0 ? ExceptionArgument.index : ExceptionArgument.count), ExceptionResource.ArgumentOutOfRange_NeedNonNegNum ); } if(m_size - index < count) { ThrowHelper.ThrowArgumentException( ExceptionResource.Argument_InvalidOffLen ); } if(count > 0) { int i = m_size; m_size -= count; if(index < m_size) { Array.Copy( m_items, index + count, m_items, index, m_size - index ); } Array.Clear( m_items, m_size, count ); m_version++; } } // Reverses the elements in this list. public void Reverse() { Reverse( 0, Count ); } // Reverses the elements in a range of this list. Following a call to this // method, an element in the range given by index and count // which was previously located at index i will now be located at // index index + (index + count - i - 1). // // This method uses the Array.Reverse method to reverse the // elements. // public void Reverse( int index, int count ) { if(index < 0 || count < 0) { ThrowHelper.ThrowArgumentOutOfRangeException( (index < 0 ? ExceptionArgument.index : ExceptionArgument.count), ExceptionResource.ArgumentOutOfRange_NeedNonNegNum ); } if(m_size - index < count) { ThrowHelper.ThrowArgumentException( ExceptionResource.Argument_InvalidOffLen ); } Array.Reverse( m_items, index, count ); m_version++; } // Sorts the elements in this list. Uses the default comparer and // Array.Sort. public void Sort() { Sort( 0, Count, null ); } // Sorts the elements in this list. Uses Array.Sort with the // provided comparer. public void Sort( IComparer<T> comparer ) { Sort( 0, Count, comparer ); } // Sorts the elements in a section of this list. The sort compares the // elements to each other using the given IComparer interface. If // comparer is null, the elements are compared to each other using // the IComparable interface, which in that case must be implemented by all // elements of the list. // // This method uses the Array.Sort method to sort the elements. // public void Sort( int index, int count, IComparer<T> comparer ) { if(index < 0 || count < 0) { ThrowHelper.ThrowArgumentOutOfRangeException( (index < 0 ? ExceptionArgument.index : ExceptionArgument.count), ExceptionResource.ArgumentOutOfRange_NeedNonNegNum ); } if(m_size - index < count) { ThrowHelper.ThrowArgumentException( ExceptionResource.Argument_InvalidOffLen ); } Array.Sort<T>( m_items, index, count, comparer ); m_version++; } public void Sort( Comparison<T> comparison ) { if(comparison == null) { ThrowHelper.ThrowArgumentNullException( ExceptionArgument.match ); } if(m_size > 0) { IComparer<T> comparer = new Array.FunctorComparer<T>( comparison ); Array.Sort( m_items, 0, m_size, comparer ); } } // ToArray returns a new Object array containing the contents of the List. // This requires copying the List, which is an O(n) operation. public T[] ToArray() { T[] array = new T[m_size]; Array.Copy( m_items, 0, array, 0, m_size ); return array; } // Sets the capacity of this list to the size of the list. This method can // be used to minimize a list's memory overhead once it is known that no // new elements will be added to the list. To completely clear a list and // release all memory referenced by the list, execute the following // statements: // // list.Clear(); // list.TrimExcess(); // public void TrimExcess() { int threshold = (int)(((double)m_items.Length) * 0.9); if(m_size < threshold) { Capacity = m_size; } } public bool TrueForAll( Predicate<T> match ) { if(match == null) { ThrowHelper.ThrowArgumentNullException( ExceptionArgument.match ); } for(int i = 0; i < m_size; i++) { if(!match( m_items[i] )) { return false; } } return true; } [Serializable] public struct Enumerator : IEnumerator<T>, System.Collections.IEnumerator { private List<T> m_list; private int m_index; private int m_version; private T m_current; internal Enumerator( List<T> list ) { m_list = list; m_index = 0; m_version = list.m_version; m_current = default( T ); } public void Dispose() { } public bool MoveNext() { if(m_version != m_list.m_version) { ThrowHelper.ThrowInvalidOperationException( ExceptionResource.InvalidOperation_EnumFailedVersion ); } if((uint)m_index < (uint)m_list.m_size) { m_current = m_list.m_items[m_index]; m_index++; return true; } m_index = m_list.m_size + 1; m_current = default( T ); return false; } public T Current { get { return m_current; } } Object System.Collections.IEnumerator.Current { get { if(m_index == 0 || m_index == m_list.m_size + 1) { ThrowHelper.ThrowInvalidOperationException( ExceptionResource.InvalidOperation_EnumOpCantHappen ); } return Current; } } void System.Collections.IEnumerator.Reset() { if(m_version != m_list.m_version) { ThrowHelper.ThrowInvalidOperationException( ExceptionResource.InvalidOperation_EnumFailedVersion ); } m_index = 0; m_current = default( T ); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections.Generic; using System.Diagnostics; using System.Runtime.InteropServices; using System.Runtime.CompilerServices; using System.Runtime; using Internal.Runtime.Augments; namespace Internal.Runtime.CompilerServices { // This structure is used to resolve a instance method given an object instance. To use this type // 1) New up an instance using one of the constructors below. // 2) Use the ToIntPtr() method to get the interned instance of this type. This will permanently allocate // a block of memory that can be used to represent a virtual method resolution. This memory is interned // so that repeated allocation of the same resolver will not leak. // 3) Use the ResolveMethod function to do the virtual lookup. This function takes advantage of // a lockless cache so the resolution is very fast for repeated lookups. [ReflectionBlocked] public struct OpenMethodResolver : IEquatable<OpenMethodResolver> { public const short DispatchResolve = 0; public const short GVMResolve = 1; public const short OpenNonVirtualResolve = 2; public const short OpenNonVirtualResolveLookthruUnboxing = 3; private readonly short _resolveType; private readonly GCHandle _readerGCHandle; private readonly int _handle; private readonly IntPtr _methodHandleOrSlotOrCodePointer; private readonly IntPtr _nonVirtualOpenInvokeCodePointer; private readonly EETypePtr _declaringType; public OpenMethodResolver(RuntimeTypeHandle declaringTypeOfSlot, int slot, GCHandle readerGCHandle, int handle) { _resolveType = DispatchResolve; _declaringType = declaringTypeOfSlot.ToEETypePtr(); _methodHandleOrSlotOrCodePointer = new IntPtr(slot); _handle = handle; _readerGCHandle = readerGCHandle; _nonVirtualOpenInvokeCodePointer = IntPtr.Zero; } public unsafe OpenMethodResolver(RuntimeTypeHandle declaringTypeOfSlot, RuntimeMethodHandle gvmSlot, GCHandle readerGCHandle, int handle) { _resolveType = GVMResolve; _methodHandleOrSlotOrCodePointer = *(IntPtr*)&gvmSlot; _declaringType = declaringTypeOfSlot.ToEETypePtr(); _handle = handle; _readerGCHandle = readerGCHandle; _nonVirtualOpenInvokeCodePointer = IntPtr.Zero; } public OpenMethodResolver(RuntimeTypeHandle declaringType, IntPtr codePointer, GCHandle readerGCHandle, int handle) { _resolveType = OpenNonVirtualResolve; _nonVirtualOpenInvokeCodePointer = _methodHandleOrSlotOrCodePointer = codePointer; _declaringType = declaringType.ToEETypePtr(); _handle = handle; _readerGCHandle = readerGCHandle; } public OpenMethodResolver(RuntimeTypeHandle declaringType, IntPtr codePointer, GCHandle readerGCHandle, int handle, short resolveType) { _resolveType = resolveType; _methodHandleOrSlotOrCodePointer = codePointer; _declaringType = declaringType.ToEETypePtr(); _handle = handle; _readerGCHandle = readerGCHandle; if (resolveType == OpenNonVirtualResolve) _nonVirtualOpenInvokeCodePointer = codePointer; else if (resolveType == OpenNonVirtualResolveLookthruUnboxing) _nonVirtualOpenInvokeCodePointer = RuntimeAugments.TypeLoaderCallbacks.ConvertUnboxingFunctionPointerToUnderlyingNonUnboxingPointer(codePointer, declaringType); else throw new NotSupportedException(); } public short ResolverType { get { return _resolveType; } } public RuntimeTypeHandle DeclaringType { get { return new RuntimeTypeHandle(_declaringType); } } public unsafe RuntimeMethodHandle GVMMethodHandle { get { IntPtr localIntPtr = _methodHandleOrSlotOrCodePointer; IntPtr* pMethodHandle = &localIntPtr; return *(RuntimeMethodHandle*)pMethodHandle; } } public bool IsOpenNonVirtualResolve { get { switch(_resolveType) { case OpenNonVirtualResolve: case OpenNonVirtualResolveLookthruUnboxing: return true; default: return false; } } } public IntPtr CodePointer { get { return _methodHandleOrSlotOrCodePointer; } } public object Reader { get { return _readerGCHandle.Target; } } public int Handle { get { return _handle; } } unsafe private IntPtr ResolveMethod(object thisObject) { if (_resolveType == DispatchResolve) { return RuntimeImports.RhResolveDispatch(thisObject, _declaringType, (ushort)_methodHandleOrSlotOrCodePointer.ToInt32()); } else if (_resolveType == GVMResolve) { return TypeLoaderExports.GVMLookupForSlot(thisObject, GVMMethodHandle); } else { throw new NotSupportedException(); // Should never happen, in this case, the dispatch should be resolved in the other ResolveMethod function } } unsafe internal static IntPtr ResolveMethodWorker(IntPtr resolver, object thisObject) { return ((OpenMethodResolver*)resolver)->ResolveMethod(thisObject); } unsafe public static IntPtr ResolveMethod(IntPtr resolver, object thisObject) { IntPtr nonVirtualOpenInvokeCodePointer = ((OpenMethodResolver*)resolver)->_nonVirtualOpenInvokeCodePointer; if (nonVirtualOpenInvokeCodePointer != IntPtr.Zero) return nonVirtualOpenInvokeCodePointer; return TypeLoaderExports.OpenInstanceMethodLookup(resolver, thisObject); } [MethodImpl(MethodImplOptions.AggressiveInlining)] private static int _rotl(int value, int shift) { return (int)(((uint)value << shift) | ((uint)value >> (32 - shift))); } private static int CalcHashCode(int hashCode1, int hashCode2, int hashCode3, int hashCode4) { int length = 4; int hash1 = 0x449b3ad6; int hash2 = (length << 3) + 0x55399219; hash1 = (hash1 + _rotl(hash1, 5)) ^ hashCode1; hash2 = (hash2 + _rotl(hash2, 5)) ^ hashCode2; hash1 = (hash1 + _rotl(hash1, 5)) ^ hashCode3; hash2 = (hash2 + _rotl(hash2, 5)) ^ hashCode4; hash1 += _rotl(hash1, 8); hash2 += _rotl(hash2, 8); return hash1 ^ hash2; } public override int GetHashCode() { return CalcHashCode(_resolveType, _handle, _methodHandleOrSlotOrCodePointer.GetHashCode(), _declaringType.IsNull ? 0 : _declaringType.GetHashCode()); } public bool Equals(OpenMethodResolver other) { if (other._resolveType != _resolveType) return false; if (other._handle != _handle) return false; if (other._methodHandleOrSlotOrCodePointer != _methodHandleOrSlotOrCodePointer) return false; return other._declaringType.Equals(_declaringType); } public override bool Equals(object obj) { if (!(obj is OpenMethodResolver)) { return false; } return ((OpenMethodResolver)obj).Equals(this); } private static LowLevelDictionary<OpenMethodResolver, IntPtr> s_internedResolverHash = new LowLevelDictionary<OpenMethodResolver, IntPtr>(); unsafe public IntPtr ToIntPtr() { lock (s_internedResolverHash) { IntPtr returnValue; if (s_internedResolverHash.TryGetValue(this, out returnValue)) return returnValue; returnValue = Interop.MemAlloc(new UIntPtr((uint)sizeof(OpenMethodResolver))); *((OpenMethodResolver*)returnValue) = this; s_internedResolverHash.Add(this, returnValue); return returnValue; } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Diagnostics.Contracts; using System.Linq; namespace System.Collections.Immutable { /// <summary> /// An immutable unordered hash set implementation. /// </summary> /// <typeparam name="T">The type of elements in the set.</typeparam> [DebuggerDisplay("Count = {Count}")] [DebuggerTypeProxy(typeof(ImmutableHashSetDebuggerProxy<>))] public sealed partial class ImmutableHashSet<T> : IImmutableSet<T>, IHashKeyCollection<T>, IReadOnlyCollection<T>, ICollection<T>, ISet<T>, ICollection, IStrongEnumerable<T, ImmutableHashSet<T>.Enumerator> { /// <summary> /// An empty immutable hash set with the default comparer for <typeparamref name="T"/>. /// </summary> [SuppressMessage("Microsoft.Security", "CA2104:DoNotDeclareReadOnlyMutableReferenceTypes")] public static readonly ImmutableHashSet<T> Empty = new ImmutableHashSet<T>(SortedInt32KeyNode<HashBucket>.EmptyNode, EqualityComparer<T>.Default, 0); /// <summary> /// The singleton delegate that freezes the contents of hash buckets when the root of the data structure is frozen. /// </summary> private static readonly Action<KeyValuePair<int, HashBucket>> s_FreezeBucketAction = (kv) => kv.Value.Freeze(); /// <summary> /// The equality comparer used to hash the elements in the collection. /// </summary> private readonly IEqualityComparer<T> _equalityComparer; /// <summary> /// The number of elements in this collection. /// </summary> private readonly int _count; /// <summary> /// The sorted dictionary that this hash set wraps. The key is the hash code and the value is the bucket of all items that hashed to it. /// </summary> private readonly SortedInt32KeyNode<HashBucket> _root; /// <summary> /// Initializes a new instance of the <see cref="ImmutableHashSet{T}"/> class. /// </summary> /// <param name="equalityComparer">The equality comparer.</param> internal ImmutableHashSet(IEqualityComparer<T> equalityComparer) : this(SortedInt32KeyNode<HashBucket>.EmptyNode, equalityComparer, 0) { } /// <summary> /// Initializes a new instance of the <see cref="ImmutableHashSet{T}"/> class. /// </summary> /// <param name="root">The sorted set that this set wraps.</param> /// <param name="equalityComparer">The equality comparer used by this instance.</param> /// <param name="count">The number of elements in this collection.</param> private ImmutableHashSet(SortedInt32KeyNode<HashBucket> root, IEqualityComparer<T> equalityComparer, int count) { Requires.NotNull(root, "root"); Requires.NotNull(equalityComparer, "equalityComparer"); root.Freeze(s_FreezeBucketAction); _root = root; _count = count; _equalityComparer = equalityComparer; } /// <summary> /// See the <see cref="IImmutableSet{T}"/> interface. /// </summary> public ImmutableHashSet<T> Clear() { Contract.Ensures(Contract.Result<ImmutableHashSet<T>>() != null); Contract.Ensures(Contract.Result<ImmutableHashSet<T>>().IsEmpty); return this.IsEmpty ? this : ImmutableHashSet<T>.Empty.WithComparer(_equalityComparer); } /// <summary> /// See the <see cref="IImmutableSet{T}"/> interface. /// </summary> public int Count { get { return _count; } } /// <summary> /// See the <see cref="IImmutableSet{T}"/> interface. /// </summary> public bool IsEmpty { get { return this.Count == 0; } } #region IHashKeyCollection<T> Properties /// <summary> /// See the <see cref="IImmutableSet{T}"/> interface. /// </summary> public IEqualityComparer<T> KeyComparer { get { return _equalityComparer; } } #endregion #region IImmutableSet<T> Properties /// <summary> /// See the <see cref="IImmutableSet{T}"/> interface. /// </summary> [ExcludeFromCodeCoverage] IImmutableSet<T> IImmutableSet<T>.Clear() { return this.Clear(); } #endregion #region ICollection Properties /// <summary> /// See <see cref="ICollection"/>. /// </summary> [DebuggerBrowsable(DebuggerBrowsableState.Never)] object ICollection.SyncRoot { get { return this; } } /// <summary> /// See the <see cref="ICollection"/> interface. /// </summary> [DebuggerBrowsable(DebuggerBrowsableState.Never)] bool ICollection.IsSynchronized { get { // This is immutable, so it is always thread-safe. return true; } } #endregion /// <summary> /// Gets the root node (for testing purposes). /// </summary> internal IBinaryTree Root { get { return _root; } } /// <summary> /// Gets a data structure that captures the current state of this map, as an input into a query or mutating function. /// </summary> private MutationInput Origin { get { return new MutationInput(this); } } #region Public methods /// <summary> /// Creates a collection with the same contents as this collection that /// can be efficiently mutated across multiple operations using standard /// mutable interfaces. /// </summary> /// <remarks> /// This is an O(1) operation and results in only a single (small) memory allocation. /// The mutable collection that is returned is *not* thread-safe. /// </remarks> [Pure] public Builder ToBuilder() { // We must not cache the instance created here and return it to various callers. // Those who request a mutable collection must get references to the collection // that version independently of each other. return new Builder(this); } /// <summary> /// See the <see cref="IImmutableSet{T}"/> interface. /// </summary> [Pure] public ImmutableHashSet<T> Add(T item) { Requires.NotNullAllowStructs(item, "item"); Contract.Ensures(Contract.Result<ImmutableHashSet<T>>() != null); var result = Add(item, this.Origin); return result.Finalize(this); } /// <summary> /// See the <see cref="IImmutableSet{T}"/> interface. /// </summary> public ImmutableHashSet<T> Remove(T item) { Requires.NotNullAllowStructs(item, "item"); Contract.Ensures(Contract.Result<ImmutableHashSet<T>>() != null); var result = Remove(item, this.Origin); return result.Finalize(this); } /// <summary> /// Searches the set for a given value and returns the equal value it finds, if any. /// </summary> /// <param name="equalValue">The value to search for.</param> /// <param name="actualValue">The value from the set that the search found, or the original value if the search yielded no match.</param> /// <returns>A value indicating whether the search was successful.</returns> /// <remarks> /// This can be useful when you want to reuse a previously stored reference instead of /// a newly constructed one (so that more sharing of references can occur) or to look up /// a value that has more complete data than the value you currently have, although their /// comparer functions indicate they are equal. /// </remarks> [Pure] public bool TryGetValue(T equalValue, out T actualValue) { Requires.NotNullAllowStructs(equalValue, "value"); int hashCode = _equalityComparer.GetHashCode(equalValue); HashBucket bucket; if (_root.TryGetValue(hashCode, out bucket)) { return bucket.TryExchange(equalValue, _equalityComparer, out actualValue); } actualValue = equalValue; return false; } /// <summary> /// See the <see cref="IImmutableSet{T}"/> interface. /// </summary> [Pure] public ImmutableHashSet<T> Union(IEnumerable<T> other) { Requires.NotNull(other, "other"); Contract.Ensures(Contract.Result<ImmutableHashSet<T>>() != null); return this.Union(other, avoidWithComparer: false); } /// <summary> /// See the <see cref="IImmutableSet{T}"/> interface. /// </summary> [Pure] public ImmutableHashSet<T> Intersect(IEnumerable<T> other) { Requires.NotNull(other, "other"); Contract.Ensures(Contract.Result<ImmutableHashSet<T>>() != null); var result = Intersect(other, this.Origin); return result.Finalize(this); } /// <summary> /// See the <see cref="IImmutableSet{T}"/> interface. /// </summary> public ImmutableHashSet<T> Except(IEnumerable<T> other) { Requires.NotNull(other, "other"); var result = Except(other, _equalityComparer, _root); return result.Finalize(this); } /// <summary> /// Produces a set that contains elements either in this set or a given sequence, but not both. /// </summary> /// <param name="other">The other sequence of items.</param> /// <returns>The new set.</returns> [Pure] public ImmutableHashSet<T> SymmetricExcept(IEnumerable<T> other) { Requires.NotNull(other, "other"); Contract.Ensures(Contract.Result<IImmutableSet<T>>() != null); var result = SymmetricExcept(other, this.Origin); return result.Finalize(this); } /// <summary> /// Checks whether a given sequence of items entirely describe the contents of this set. /// </summary> /// <param name="other">The sequence of items to check against this set.</param> /// <returns>A value indicating whether the sets are equal.</returns> [Pure] public bool SetEquals(IEnumerable<T> other) { Requires.NotNull(other, "other"); if (object.ReferenceEquals(this, other)) { return true; } return SetEquals(other, this.Origin); } /// <summary> /// Determines whether the current set is a property (strict) subset of a specified collection. /// </summary> /// <param name="other">The collection to compare to the current set.</param> /// <returns>true if the current set is a correct subset of <paramref name="other"/>; otherwise, false.</returns> [Pure] public bool IsProperSubsetOf(IEnumerable<T> other) { Requires.NotNull(other, "other"); return IsProperSubsetOf(other, this.Origin); } /// <summary> /// Determines whether the current set is a correct superset of a specified collection. /// </summary> /// <param name="other">The collection to compare to the current set.</param> /// <returns>true if the current set is a correct superset of <paramref name="other"/>; otherwise, false.</returns> [Pure] public bool IsProperSupersetOf(IEnumerable<T> other) { Requires.NotNull(other, "other"); return IsProperSupersetOf(other, this.Origin); } /// <summary> /// Determines whether a set is a subset of a specified collection. /// </summary> /// <param name="other">The collection to compare to the current set.</param> /// <returns>true if the current set is a subset of <paramref name="other"/>; otherwise, false.</returns> [Pure] public bool IsSubsetOf(IEnumerable<T> other) { Requires.NotNull(other, "other"); return IsSubsetOf(other, this.Origin); } /// <summary> /// Determines whether the current set is a superset of a specified collection. /// </summary> /// <param name="other">The collection to compare to the current set.</param> /// <returns>true if the current set is a superset of <paramref name="other"/>; otherwise, false.</returns> [Pure] public bool IsSupersetOf(IEnumerable<T> other) { Requires.NotNull(other, "other"); return IsSupersetOf(other, this.Origin); } /// <summary> /// Determines whether the current set overlaps with the specified collection. /// </summary> /// <param name="other">The collection to compare to the current set.</param> /// <returns>true if the current set and <paramref name="other"/> share at least one common element; otherwise, false.</returns> [Pure] public bool Overlaps(IEnumerable<T> other) { Requires.NotNull(other, "other"); return Overlaps(other, this.Origin); } #endregion #region IImmutableSet<T> Methods /// <summary> /// See the <see cref="IImmutableSet{T}"/> interface. /// </summary> [ExcludeFromCodeCoverage] IImmutableSet<T> IImmutableSet<T>.Add(T item) { return this.Add(item); } /// <summary> /// See the <see cref="IImmutableSet{T}"/> interface. /// </summary> [ExcludeFromCodeCoverage] IImmutableSet<T> IImmutableSet<T>.Remove(T item) { return this.Remove(item); } /// <summary> /// See the <see cref="IImmutableSet{T}"/> interface. /// </summary> [ExcludeFromCodeCoverage] IImmutableSet<T> IImmutableSet<T>.Union(IEnumerable<T> other) { return this.Union(other); } /// <summary> /// See the <see cref="IImmutableSet{T}"/> interface. /// </summary> [ExcludeFromCodeCoverage] IImmutableSet<T> IImmutableSet<T>.Intersect(IEnumerable<T> other) { return this.Intersect(other); } /// <summary> /// See the <see cref="IImmutableSet{T}"/> interface. /// </summary> [ExcludeFromCodeCoverage] IImmutableSet<T> IImmutableSet<T>.Except(IEnumerable<T> other) { return this.Except(other); } /// <summary> /// Produces a set that contains elements either in this set or a given sequence, but not both. /// </summary> /// <param name="other">The other sequence of items.</param> /// <returns>The new set.</returns> [ExcludeFromCodeCoverage] IImmutableSet<T> IImmutableSet<T>.SymmetricExcept(IEnumerable<T> other) { return this.SymmetricExcept(other); } /// <summary> /// See the <see cref="IImmutableSet{T}"/> interface. /// </summary> public bool Contains(T item) { Requires.NotNullAllowStructs(item, "item"); return Contains(item, this.Origin); } /// <summary> /// See the <see cref="IImmutableSet{T}"/> interface. /// </summary> [Pure] public ImmutableHashSet<T> WithComparer(IEqualityComparer<T> equalityComparer) { Contract.Ensures(Contract.Result<ImmutableHashSet<T>>() != null); if (equalityComparer == null) { equalityComparer = EqualityComparer<T>.Default; } if (equalityComparer == _equalityComparer) { return this; } else { var result = new ImmutableHashSet<T>(equalityComparer); result = result.Union(this, avoidWithComparer: true); return result; } } #endregion #region ISet<T> Members /// <summary> /// See <see cref="ISet{T}"/> /// </summary> bool ISet<T>.Add(T item) { throw new NotSupportedException(); } /// <summary> /// See <see cref="ISet{T}"/> /// </summary> void ISet<T>.ExceptWith(IEnumerable<T> other) { throw new NotSupportedException(); } /// <summary> /// See <see cref="ISet{T}"/> /// </summary> void ISet<T>.IntersectWith(IEnumerable<T> other) { throw new NotSupportedException(); } /// <summary> /// See <see cref="ISet{T}"/> /// </summary> void ISet<T>.SymmetricExceptWith(IEnumerable<T> other) { throw new NotSupportedException(); } /// <summary> /// See <see cref="ISet{T}"/> /// </summary> void ISet<T>.UnionWith(IEnumerable<T> other) { throw new NotSupportedException(); } #endregion #region ICollection<T> members /// <summary> /// See the <see cref="ICollection{T}"/> interface. /// </summary> bool ICollection<T>.IsReadOnly { get { return true; } } /// <summary> /// See the <see cref="ICollection{T}"/> interface. /// </summary> void ICollection<T>.CopyTo(T[] array, int arrayIndex) { Requires.NotNull(array, "array"); Requires.Range(arrayIndex >= 0, "arrayIndex"); Requires.Range(array.Length >= arrayIndex + this.Count, "arrayIndex"); foreach (T item in this) { array[arrayIndex++] = item; } } /// <summary> /// See the <see cref="IList{T}"/> interface. /// </summary> void ICollection<T>.Add(T item) { throw new NotSupportedException(); } /// <summary> /// See the <see cref="ICollection{T}"/> interface. /// </summary> void ICollection<T>.Clear() { throw new NotSupportedException(); } /// <summary> /// See the <see cref="IList{T}"/> interface. /// </summary> bool ICollection<T>.Remove(T item) { throw new NotSupportedException(); } #endregion #region ICollection Methods /// <summary> /// Copies the elements of the <see cref="ICollection"/> to an <see cref="Array"/>, starting at a particular <see cref="Array"/> index. /// </summary> /// <param name="array">The one-dimensional <see cref="Array"/> that is the destination of the elements copied from <see cref="ICollection"/>. The <see cref="Array"/> must have zero-based indexing.</param> /// <param name="arrayIndex">The zero-based index in <paramref name="array"/> at which copying begins.</param> void ICollection.CopyTo(Array array, int arrayIndex) { Requires.NotNull(array, "array"); Requires.Range(arrayIndex >= 0, "arrayIndex"); Requires.Range(array.Length >= arrayIndex + this.Count, "arrayIndex"); foreach (T item in this) { array.SetValue(item, arrayIndex++); } } #endregion #region IEnumerable<T> Members /// <summary> /// Returns an enumerator that iterates through the collection. /// </summary> /// <returns> /// A <see cref="IEnumerator{T}"/> that can be used to iterate through the collection. /// </returns> public Enumerator GetEnumerator() { return new Enumerator(_root); } /// <summary> /// Returns an enumerator that iterates through the collection. /// </summary> IEnumerator<T> IEnumerable<T>.GetEnumerator() { return this.GetEnumerator(); } #endregion #region IEnumerable Members /// <summary> /// Returns an enumerator that iterates through a collection. /// </summary> /// <returns> /// An <see cref="IEnumerator"/> object that can be used to iterate through the collection. /// </returns> IEnumerator IEnumerable.GetEnumerator() { return this.GetEnumerator(); } #endregion #region Static query and manipulator methods /// <summary> /// Performs the set operation on a given data structure. /// </summary> private static bool IsSupersetOf(IEnumerable<T> other, MutationInput origin) { Requires.NotNull(other, "other"); foreach (T item in other.GetEnumerableDisposable<T, Enumerator>()) { if (!Contains(item, origin)) { return false; } } return true; } /// <summary> /// Performs the set operation on a given data structure. /// </summary> private static MutationResult Add(T item, MutationInput origin) { Requires.NotNullAllowStructs(item, "item"); OperationResult result; int hashCode = origin.EqualityComparer.GetHashCode(item); HashBucket bucket = origin.Root.GetValueOrDefault(hashCode); var newBucket = bucket.Add(item, origin.EqualityComparer, out result); if (result == OperationResult.NoChangeRequired) { return new MutationResult(origin.Root, 0); } var newRoot = UpdateRoot(origin.Root, hashCode, newBucket); Debug.Assert(result == OperationResult.SizeChanged); return new MutationResult(newRoot, 1 /*result == OperationResult.SizeChanged ? 1 : 0*/); } /// <summary> /// Performs the set operation on a given data structure. /// </summary> private static MutationResult Remove(T item, MutationInput origin) { Requires.NotNullAllowStructs(item, "item"); var result = OperationResult.NoChangeRequired; int hashCode = origin.EqualityComparer.GetHashCode(item); HashBucket bucket; var newRoot = origin.Root; if (origin.Root.TryGetValue(hashCode, out bucket)) { var newBucket = bucket.Remove(item, origin.EqualityComparer, out result); if (result == OperationResult.NoChangeRequired) { return new MutationResult(origin.Root, 0); } newRoot = UpdateRoot(origin.Root, hashCode, newBucket); } return new MutationResult(newRoot, result == OperationResult.SizeChanged ? -1 : 0); } /// <summary> /// Performs the set operation on a given data structure. /// </summary> private static bool Contains(T item, MutationInput origin) { int hashCode = origin.EqualityComparer.GetHashCode(item); HashBucket bucket; if (origin.Root.TryGetValue(hashCode, out bucket)) { return bucket.Contains(item, origin.EqualityComparer); } return false; } /// <summary> /// Performs the set operation on a given data structure. /// </summary> private static MutationResult Union(IEnumerable<T> other, MutationInput origin) { Requires.NotNull(other, "other"); int count = 0; var newRoot = origin.Root; foreach (var item in other.GetEnumerableDisposable<T, Enumerator>()) { int hashCode = origin.EqualityComparer.GetHashCode(item); HashBucket bucket = newRoot.GetValueOrDefault(hashCode); OperationResult result; var newBucket = bucket.Add(item, origin.EqualityComparer, out result); if (result == OperationResult.SizeChanged) { newRoot = UpdateRoot(newRoot, hashCode, newBucket); count++; } } return new MutationResult(newRoot, count); } /// <summary> /// Performs the set operation on a given data structure. /// </summary> private static bool Overlaps(IEnumerable<T> other, MutationInput origin) { Requires.NotNull(other, "other"); if (origin.Root.IsEmpty) { return false; } foreach (T item in other.GetEnumerableDisposable<T, Enumerator>()) { if (Contains(item, origin)) { return true; } } return false; } /// <summary> /// Performs the set operation on a given data structure. /// </summary> private static bool SetEquals(IEnumerable<T> other, MutationInput origin) { Requires.NotNull(other, "other"); var otherSet = new HashSet<T>(other, origin.EqualityComparer); if (origin.Count != otherSet.Count) { return false; } int matches = 0; foreach (T item in otherSet) { if (!Contains(item, origin)) { return false; } matches++; } return matches == origin.Count; } /// <summary> /// Performs the set operation on a given data structure. /// </summary> private static SortedInt32KeyNode<HashBucket> UpdateRoot(SortedInt32KeyNode<HashBucket> root, int hashCode, HashBucket newBucket) { bool mutated; if (newBucket.IsEmpty) { return root.Remove(hashCode, out mutated); } else { bool replacedExistingValue; return root.SetItem(hashCode, newBucket, EqualityComparer<HashBucket>.Default, out replacedExistingValue, out mutated); } } /// <summary> /// Performs the set operation on a given data structure. /// </summary> private static MutationResult Intersect(IEnumerable<T> other, MutationInput origin) { Requires.NotNull(other, "other"); var newSet = SortedInt32KeyNode<HashBucket>.EmptyNode; int count = 0; foreach (var item in other.GetEnumerableDisposable<T, Enumerator>()) { if (Contains(item, origin)) { var result = Add(item, new MutationInput(newSet, origin.EqualityComparer, count)); newSet = result.Root; count += result.Count; } } return new MutationResult(newSet, count, CountType.FinalValue); } /// <summary> /// Performs the set operation on a given data structure. /// </summary> private static MutationResult Except(IEnumerable<T> other, IEqualityComparer<T> equalityComparer, SortedInt32KeyNode<HashBucket> root) { Requires.NotNull(other, "other"); Requires.NotNull(equalityComparer, "equalityComparer"); Requires.NotNull(root, "root"); int count = 0; var newRoot = root; foreach (var item in other.GetEnumerableDisposable<T, Enumerator>()) { int hashCode = equalityComparer.GetHashCode(item); HashBucket bucket; if (newRoot.TryGetValue(hashCode, out bucket)) { OperationResult result; HashBucket newBucket = bucket.Remove(item, equalityComparer, out result); if (result == OperationResult.SizeChanged) { count--; newRoot = UpdateRoot(newRoot, hashCode, newBucket); } } } return new MutationResult(newRoot, count); } /// <summary> /// Performs the set operation on a given data structure. /// </summary> [Pure] private static MutationResult SymmetricExcept(IEnumerable<T> other, MutationInput origin) { Requires.NotNull(other, "other"); var otherAsSet = ImmutableHashSet.CreateRange(origin.EqualityComparer, other); int count = 0; var result = SortedInt32KeyNode<HashBucket>.EmptyNode; foreach (T item in new NodeEnumerable(origin.Root)) { if (!otherAsSet.Contains(item)) { var mutationResult = Add(item, new MutationInput(result, origin.EqualityComparer, count)); result = mutationResult.Root; count += mutationResult.Count; } } foreach (T item in otherAsSet) { if (!Contains(item, origin)) { var mutationResult = Add(item, new MutationInput(result, origin.EqualityComparer, count)); result = mutationResult.Root; count += mutationResult.Count; } } return new MutationResult(result, count, CountType.FinalValue); } /// <summary> /// Performs the set operation on a given data structure. /// </summary> private static bool IsProperSubsetOf(IEnumerable<T> other, MutationInput origin) { Requires.NotNull(other, "other"); if (origin.Root.IsEmpty) { return other.Any(); } // To determine whether everything we have is also in another sequence, // we enumerate the sequence and "tag" whether it's in this collection, // then consider whether every element in this collection was tagged. // Since this collection is immutable we cannot directly tag. So instead // we simply count how many "hits" we have and ensure it's equal to the // size of this collection. Of course for this to work we need to ensure // the uniqueness of items in the given sequence, so we create a set based // on the sequence first. var otherSet = new HashSet<T>(other, origin.EqualityComparer); if (origin.Count >= otherSet.Count) { return false; } int matches = 0; bool extraFound = false; foreach (T item in otherSet) { if (Contains(item, origin)) { matches++; } else { extraFound = true; } if (matches == origin.Count && extraFound) { return true; } } return false; } /// <summary> /// Performs the set operation on a given data structure. /// </summary> private static bool IsProperSupersetOf(IEnumerable<T> other, MutationInput origin) { Requires.NotNull(other, "other"); if (origin.Root.IsEmpty) { return false; } int matchCount = 0; foreach (T item in other.GetEnumerableDisposable<T, Enumerator>()) { matchCount++; if (!Contains(item, origin)) { return false; } } return origin.Count > matchCount; } /// <summary> /// Performs the set operation on a given data structure. /// </summary> private static bool IsSubsetOf(IEnumerable<T> other, MutationInput origin) { Requires.NotNull(other, "other"); if (origin.Root.IsEmpty) { return true; } // To determine whether everything we have is also in another sequence, // we enumerate the sequence and "tag" whether it's in this collection, // then consider whether every element in this collection was tagged. // Since this collection is immutable we cannot directly tag. So instead // we simply count how many "hits" we have and ensure it's equal to the // size of this collection. Of course for this to work we need to ensure // the uniqueness of items in the given sequence, so we create a set based // on the sequence first. var otherSet = new HashSet<T>(other, origin.EqualityComparer); int matches = 0; foreach (T item in otherSet) { if (Contains(item, origin)) { matches++; } } return matches == origin.Count; } #endregion /// <summary> /// Wraps the specified data structure with an immutable collection wrapper. /// </summary> /// <param name="root">The root of the data structure.</param> /// <param name="equalityComparer">The equality comparer.</param> /// <param name="count">The number of elements in the data structure.</param> /// <returns>The immutable collection.</returns> private static ImmutableHashSet<T> Wrap(SortedInt32KeyNode<HashBucket> root, IEqualityComparer<T> equalityComparer, int count) { Requires.NotNull(root, "root"); Requires.NotNull(equalityComparer, "equalityComparer"); Requires.Range(count >= 0, "count"); return new ImmutableHashSet<T>(root, equalityComparer, count); } /// <summary> /// Wraps the specified data structure with an immutable collection wrapper. /// </summary> /// <param name="root">The root of the data structure.</param> /// <param name="adjustedCountIfDifferentRoot">The adjusted count if the root has changed.</param> /// <returns>The immutable collection.</returns> private ImmutableHashSet<T> Wrap(SortedInt32KeyNode<HashBucket> root, int adjustedCountIfDifferentRoot) { return (root != _root) ? new ImmutableHashSet<T>(root, _equalityComparer, adjustedCountIfDifferentRoot) : this; } /// <summary> /// Bulk adds entries to the set. /// </summary> /// <param name="items">The entries to add.</param> /// <param name="avoidWithComparer"><c>true</c> when being called from <see cref="WithComparer"/> to avoid <see cref="T:System.StackOverflowException"/>.</param> [Pure] private ImmutableHashSet<T> Union(IEnumerable<T> items, bool avoidWithComparer) { Requires.NotNull(items, "items"); Contract.Ensures(Contract.Result<ImmutableHashSet<T>>() != null); // Some optimizations may apply if we're an empty set. if (this.IsEmpty && !avoidWithComparer) { // If the items being added actually come from an ImmutableHashSet<T>, // reuse that instance if possible. var other = items as ImmutableHashSet<T>; if (other != null) { return other.WithComparer(this.KeyComparer); } } var result = Union(items, this.Origin); return result.Finalize(this); } } }
using System; using System.Data; using System.Data.SqlClient; using Csla; using Csla.Data; namespace SelfLoad.Business.ERLevel { /// <summary> /// C07Level1111Child (editable child object).<br/> /// This is a generated base class of <see cref="C07Level1111Child"/> business object. /// </summary> /// <remarks> /// This class is an item of <see cref="C06Level111"/> collection. /// </remarks> [Serializable] public partial class C07Level1111Child : BusinessBase<C07Level1111Child> { #region Business Properties /// <summary> /// Maintains metadata about <see cref="Level_1_1_1_1_Child_Name"/> property. /// </summary> public static readonly PropertyInfo<string> Level_1_1_1_1_Child_NameProperty = RegisterProperty<string>(p => p.Level_1_1_1_1_Child_Name, "Level_1_1_1_1 Child Name"); /// <summary> /// Gets or sets the Level_1_1_1_1 Child Name. /// </summary> /// <value>The Level_1_1_1_1 Child Name.</value> public string Level_1_1_1_1_Child_Name { get { return GetProperty(Level_1_1_1_1_Child_NameProperty); } set { SetProperty(Level_1_1_1_1_Child_NameProperty, value); } } #endregion #region Factory Methods /// <summary> /// Factory method. Creates a new <see cref="C07Level1111Child"/> object. /// </summary> /// <returns>A reference to the created <see cref="C07Level1111Child"/> object.</returns> internal static C07Level1111Child NewC07Level1111Child() { return DataPortal.CreateChild<C07Level1111Child>(); } /// <summary> /// Factory method. Loads a <see cref="C07Level1111Child"/> object, based on given parameters. /// </summary> /// <param name="cLarentID1">The CLarentID1 parameter of the C07Level1111Child to fetch.</param> /// <returns>A reference to the fetched <see cref="C07Level1111Child"/> object.</returns> internal static C07Level1111Child GetC07Level1111Child(int cLarentID1) { return DataPortal.FetchChild<C07Level1111Child>(cLarentID1); } #endregion #region Constructor /// <summary> /// Initializes a new instance of the <see cref="C07Level1111Child"/> class. /// </summary> /// <remarks> Do not use to create a Csla object. Use factory methods instead.</remarks> private C07Level1111Child() { // Prevent direct creation // show the framework that this is a child object MarkAsChild(); } #endregion #region Data Access /// <summary> /// Loads default values for the <see cref="C07Level1111Child"/> object properties. /// </summary> [Csla.RunLocal] protected override void Child_Create() { var args = new DataPortalHookArgs(); OnCreate(args); base.Child_Create(); } /// <summary> /// Loads a <see cref="C07Level1111Child"/> object from the database, based on given criteria. /// </summary> /// <param name="cLarentID1">The CLarent ID1.</param> protected void Child_Fetch(int cLarentID1) { using (var ctx = ConnectionManager<SqlConnection>.GetManager("DeepLoad")) { using (var cmd = new SqlCommand("GetC07Level1111Child", ctx.Connection)) { cmd.CommandType = CommandType.StoredProcedure; cmd.Parameters.AddWithValue("@CLarentID1", cLarentID1).DbType = DbType.Int32; var args = new DataPortalHookArgs(cmd, cLarentID1); OnFetchPre(args); Fetch(cmd); OnFetchPost(args); } } } private void Fetch(SqlCommand cmd) { using (var dr = new SafeDataReader(cmd.ExecuteReader())) { if (dr.Read()) { Fetch(dr); } } } /// <summary> /// Loads a <see cref="C07Level1111Child"/> object from the given SafeDataReader. /// </summary> /// <param name="dr">The SafeDataReader to use.</param> private void Fetch(SafeDataReader dr) { // Value properties LoadProperty(Level_1_1_1_1_Child_NameProperty, dr.GetString("Level_1_1_1_1_Child_Name")); var args = new DataPortalHookArgs(dr); OnFetchRead(args); } /// <summary> /// Inserts a new <see cref="C07Level1111Child"/> object in the database. /// </summary> /// <param name="parent">The parent object.</param> [Transactional(TransactionalTypes.TransactionScope)] private void Child_Insert(C06Level111 parent) { using (var ctx = ConnectionManager<SqlConnection>.GetManager("DeepLoad")) { using (var cmd = new SqlCommand("AddC07Level1111Child", ctx.Connection)) { cmd.CommandType = CommandType.StoredProcedure; cmd.Parameters.AddWithValue("@Level_1_1_1_ID", parent.Level_1_1_1_ID).DbType = DbType.Int32; cmd.Parameters.AddWithValue("@Level_1_1_1_1_Child_Name", ReadProperty(Level_1_1_1_1_Child_NameProperty)).DbType = DbType.String; var args = new DataPortalHookArgs(cmd); OnInsertPre(args); cmd.ExecuteNonQuery(); OnInsertPost(args); } } } /// <summary> /// Updates in the database all changes made to the <see cref="C07Level1111Child"/> object. /// </summary> /// <param name="parent">The parent object.</param> [Transactional(TransactionalTypes.TransactionScope)] private void Child_Update(C06Level111 parent) { using (var ctx = ConnectionManager<SqlConnection>.GetManager("DeepLoad")) { using (var cmd = new SqlCommand("UpdateC07Level1111Child", ctx.Connection)) { cmd.CommandType = CommandType.StoredProcedure; cmd.Parameters.AddWithValue("@Level_1_1_1_ID", parent.Level_1_1_1_ID).DbType = DbType.Int32; cmd.Parameters.AddWithValue("@Level_1_1_1_1_Child_Name", ReadProperty(Level_1_1_1_1_Child_NameProperty)).DbType = DbType.String; var args = new DataPortalHookArgs(cmd); OnUpdatePre(args); cmd.ExecuteNonQuery(); OnUpdatePost(args); } } } /// <summary> /// Self deletes the <see cref="C07Level1111Child"/> object from database. /// </summary> /// <param name="parent">The parent object.</param> [Transactional(TransactionalTypes.TransactionScope)] private void Child_DeleteSelf(C06Level111 parent) { using (var ctx = ConnectionManager<SqlConnection>.GetManager("DeepLoad")) { using (var cmd = new SqlCommand("DeleteC07Level1111Child", ctx.Connection)) { cmd.CommandType = CommandType.StoredProcedure; cmd.Parameters.AddWithValue("@Level_1_1_1_ID", parent.Level_1_1_1_ID).DbType = DbType.Int32; var args = new DataPortalHookArgs(cmd); OnDeletePre(args); cmd.ExecuteNonQuery(); OnDeletePost(args); } } } #endregion #region Pseudo Events /// <summary> /// Occurs after setting all defaults for object creation. /// </summary> partial void OnCreate(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Delete, after setting query parameters and before the delete operation. /// </summary> partial void OnDeletePre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Delete, after the delete operation, before Commit(). /// </summary> partial void OnDeletePost(DataPortalHookArgs args); /// <summary> /// Occurs after setting query parameters and before the fetch operation. /// </summary> partial void OnFetchPre(DataPortalHookArgs args); /// <summary> /// Occurs after the fetch operation (object or collection is fully loaded and set up). /// </summary> partial void OnFetchPost(DataPortalHookArgs args); /// <summary> /// Occurs after the low level fetch operation, before the data reader is destroyed. /// </summary> partial void OnFetchRead(DataPortalHookArgs args); /// <summary> /// Occurs after setting query parameters and before the update operation. /// </summary> partial void OnUpdatePre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after the update operation, before setting back row identifiers (RowVersion) and Commit(). /// </summary> partial void OnUpdatePost(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after setting query parameters and before the insert operation. /// </summary> partial void OnInsertPre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after the insert operation, before setting back row identifiers (ID and RowVersion) and Commit(). /// </summary> partial void OnInsertPost(DataPortalHookArgs args); #endregion } }
using System; using System.Collections.Generic; using System.Reflection; using NQuery.Runtime; namespace NQuery { public sealed class FunctionCollection : BindingCollection<FunctionBinding> { private Dictionary<string, List<FunctionBinding>> _functionTable = new Dictionary<string, List<FunctionBinding>>(); internal FunctionCollection() { } private static IEnumerable<ReflectionFunctionBinding> CreateBindingsFromContainer(Type containerType) { List<ReflectionFunctionBinding> bindings = new List<ReflectionFunctionBinding>(); foreach (MethodInfo methodInfo in containerType.GetMethods()) { if (methodInfo.IsStatic) { FunctionBindingAttribute[] functionBindingAttributes = (FunctionBindingAttribute[])methodInfo.GetCustomAttributes(typeof(FunctionBindingAttribute), false); if (functionBindingAttributes.Length == 1) { string functionName = functionBindingAttributes[0].Name; bool isDeterministic = functionBindingAttributes[0].IsDeterministic; ReflectionFunctionBinding reflectionFunctionBinding = new ReflectionFunctionBinding(functionName, methodInfo, null, isDeterministic); bindings.Add(reflectionFunctionBinding); } } } return bindings; } private static IEnumerable<ReflectionFunctionBinding> CreateBindingsFromContainer(object container) { List<ReflectionFunctionBinding> bindings = new List<ReflectionFunctionBinding>(); foreach (MethodInfo methodInfo in container.GetType().GetMethods()) { object instance; if (methodInfo.IsStatic) instance = null; else instance = container; FunctionBindingAttribute[] functionBindingAttributes = (FunctionBindingAttribute[])methodInfo.GetCustomAttributes(typeof(FunctionBindingAttribute), false); if (functionBindingAttributes.Length == 1) { string functionName = functionBindingAttributes[0].Name; bool isDeterministic = functionBindingAttributes[0].IsDeterministic; ReflectionFunctionBinding reflectionFunctionBinding = new ReflectionFunctionBinding(functionName, methodInfo, instance, isDeterministic); bindings.Add(reflectionFunctionBinding); } } return bindings; } protected override void BeforeInsert(FunctionBinding binding) { // TODO: Ensure function does not have VOID return type. // TODO: Ensure function does not have REF or OUT parameter. List<FunctionBinding> functions; if (!_functionTable.TryGetValue(binding.Name, out functions)) { functions = new List<FunctionBinding>(); _functionTable.Add(binding.Name, functions); } else { // Check that no functions with the same parameter types already exists. Type[] newParameterTypes = binding.GetParameterTypes(); foreach (FunctionBinding existingFunctionBinding in functions) { Type[] existingParameterTypes = existingFunctionBinding.GetParameterTypes(); if (newParameterTypes.Length == existingParameterTypes.Length) { // Assume they are the same bool isSame = true; // Check if any parameter type is different. for (int i = 0; i < newParameterTypes.Length; i++) { if (newParameterTypes[i] != existingParameterTypes[i]) { isSame = false; break; } } if (isSame) throw ExceptionBuilder.FunctionWithSameNameAndSignatureAlreadyInCollection("binding", binding); } } } functions.Add(binding); } protected override void AfterRemove(FunctionBinding binding) { List<FunctionBinding> functions; if (_functionTable.TryGetValue(binding.Name, out functions)) { functions.Remove(binding); if (functions.Count == 0) _functionTable.Remove(binding.Name); } } protected override void ClearItems() { base.ClearItems(); // Clear hashtable _functionTable.Clear(); } public void AddDefaults() { AddFromContainer(typeof(BuiltInFunctions)); } public FunctionBinding Add(string functionName, Delegate functionDelegate) { if (functionName == null) throw ExceptionBuilder.ArgumentNull("functionName"); if (functionDelegate == null) throw ExceptionBuilder.ArgumentNull("functionDelegate"); // Check return type if (functionDelegate.Method.ReturnType == typeof(void)) throw ExceptionBuilder.FunctionMustNotBeVoid(functionDelegate); // Check parameters ParameterInfo[] parameters = functionDelegate.Method.GetParameters(); foreach (ParameterInfo param in parameters) { if (param.IsOut || param.ParameterType.IsByRef) throw ExceptionBuilder.FunctionMustNotHaveRefOrOutParams(functionDelegate, param); if (param.IsOptional) throw ExceptionBuilder.FunctionMustNotHaveOptionalParams(functionDelegate, param); if (param.ParameterType.IsArray) throw ExceptionBuilder.FunctionMustNotHaveArrayParams(functionDelegate, param); } // Ok, everything seems to be fine. ReflectionFunctionBinding reflectionFunctionBinding = new ReflectionFunctionBinding(functionName, functionDelegate.Method, functionDelegate.Target, false); Add(reflectionFunctionBinding); return reflectionFunctionBinding; } public void AddFromContainer(Type containerType) { if (containerType == null) throw ExceptionBuilder.ArgumentNull("containerType"); IEnumerable<ReflectionFunctionBinding> bindings = CreateBindingsFromContainer(containerType); foreach (ReflectionFunctionBinding binding in bindings) Add(binding); } public void AddFromContainer(object container) { if (container == null) throw ExceptionBuilder.ArgumentNull("container"); IEnumerable<ReflectionFunctionBinding> bindings = CreateBindingsFromContainer(container); foreach (ReflectionFunctionBinding binding in bindings) Add(binding); } private void RemoveFromContainer(IEnumerable<ReflectionFunctionBinding> containerBindings) { List<FunctionBinding> bindigsToRemove = new List<FunctionBinding>(); foreach (FunctionBinding functionBinding in this) { ReflectionFunctionBinding reflectionFunctionBinding = functionBinding as ReflectionFunctionBinding; if (reflectionFunctionBinding != null) { foreach (ReflectionFunctionBinding containerBinding in containerBindings) { if (reflectionFunctionBinding.Method == containerBinding.Method) { bindigsToRemove.Add(functionBinding); break; } } } } foreach (FunctionBinding binding in bindigsToRemove) Remove(binding); } public void RemoveFromContainer(Type containerType) { if (containerType == null) throw ExceptionBuilder.ArgumentNull("containerType"); IEnumerable<ReflectionFunctionBinding> containerBindings = CreateBindingsFromContainer(containerType); RemoveFromContainer(containerBindings); } public void RemoveFromContainer(object container) { if (container == null) throw ExceptionBuilder.ArgumentNull("container"); IEnumerable<ReflectionFunctionBinding> containerBindings = CreateBindingsFromContainer(container); RemoveFromContainer(containerBindings); } public override FunctionBinding[] Find(Identifier identifier) { if (identifier == null) throw ExceptionBuilder.ArgumentNull("identifier"); List<FunctionBinding> result = new List<FunctionBinding>(); foreach (string exitingsFunctionName in _functionTable.Keys) { if (identifier.Matches(exitingsFunctionName)) { List<FunctionBinding> functions = _functionTable[exitingsFunctionName]; result.AddRange(functions); } } return result.ToArray(); } } }
/* * DocuSign REST API * * The DocuSign REST API provides you with a powerful, convenient, and simple Web services API for interacting with DocuSign. * * OpenAPI spec version: v2 * Contact: devcenter@docusign.com * Generated by: https://github.com/swagger-api/swagger-codegen.git */ using System; using System.Linq; using System.IO; using System.Text; using System.Text.RegularExpressions; using System.Collections; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Runtime.Serialization; using Newtonsoft.Json; using Newtonsoft.Json.Converters; using System.ComponentModel.DataAnnotations; namespace DocuSign.eSign.Model { /// <summary> /// RecipientViewRequest /// </summary> [DataContract] public partial class RecipientViewRequest : IEquatable<RecipientViewRequest>, IValidatableObject { public RecipientViewRequest() { // Empty Constructor } /// <summary> /// Initializes a new instance of the <see cref="RecipientViewRequest" /> class. /// </summary> /// <param name="AssertionId">A unique identifier of the authentication event executed by the client application..</param> /// <param name="AuthenticationInstant">A sender generated value that indicates the date/time that the signer was authenticated..</param> /// <param name="AuthenticationMethod">A sender created value that indicates the convention used to authenticate the signer. This information is included in the Certificate of Completion..</param> /// <param name="ClientUserId">A sender created value that shows the recipient is embedded (captive). Maximum length: 100 characters..</param> /// <param name="Email">Specifies the email of the recipient. You can use either email and userName or userId to identify the recipient..</param> /// <param name="PingFrequency">Only used if pingUrl is specified. This is the interval, in seconds, between pings on the pingUrl. The default is 300 seconds. Valid values are 60-1200 seconds..</param> /// <param name="PingUrl">A client Url to be pinged by the DocuSign Signing experience to indicate to the client that Signing is active. An HTTP Get is executed against the client. The response from the client is ignored. The intent is for the client to reset it&#39;s session timer when the request is received..</param> /// <param name="RecipientId">Unique for the recipient. It is used by the tab element to indicate which recipient is to sign the Document..</param> /// <param name="ReturnUrl">The url the recipient is redirected to after the signing session has ended. DocuSign redirects to the url and includes an event parameter that can be used by your application. Possible event parameter values: * cancel (recipient canceled the signing operation) * decline (recipient declined to sign) * exception (an exception occurred) * fax_pending (recipient has a fax pending) * session_timeout (session timed out) * signing_complete (signer completed the signing ceremony) * ttl_expired (the TTL, time to live, timer expired) * viewing_complete (recipient completed viewing the envelope) ###### Note: Include https:// in the URL or the redirect might not succeed on some browsers. .</param> /// <param name="SecurityDomain">The domain in which the user authenticated..</param> /// <param name="UserId">Specifies the user ID of the recipient. You can use with user ID or email and user name to identify the recipient. If user ID is used and a client user ID is provided, the value in the &#x60;userId&#x60; property must match a recipient ID (which can be retrieved with a GET recipients call) for the envelope. If a user ID is used and a clientUser ID is not provided, the user ID match the user ID of the authenticating user..</param> /// <param name="UserName">Specifies the username of the recipient. You can use either email and userName or userId to identify the recipient..</param> /// <param name="XFrameOptions">.</param> /// <param name="XFrameOptionsAllowFromUrl">.</param> public RecipientViewRequest(string AssertionId = default(string), string AuthenticationInstant = default(string), string AuthenticationMethod = default(string), string ClientUserId = default(string), string Email = default(string), string PingFrequency = default(string), string PingUrl = default(string), string RecipientId = default(string), string ReturnUrl = default(string), string SecurityDomain = default(string), string UserId = default(string), string UserName = default(string), string XFrameOptions = default(string), string XFrameOptionsAllowFromUrl = default(string)) { this.AssertionId = AssertionId; this.AuthenticationInstant = AuthenticationInstant; this.AuthenticationMethod = AuthenticationMethod; this.ClientUserId = ClientUserId; this.Email = Email; this.PingFrequency = PingFrequency; this.PingUrl = PingUrl; this.RecipientId = RecipientId; this.ReturnUrl = ReturnUrl; this.SecurityDomain = SecurityDomain; this.UserId = UserId; this.UserName = UserName; this.XFrameOptions = XFrameOptions; this.XFrameOptionsAllowFromUrl = XFrameOptionsAllowFromUrl; } /// <summary> /// A unique identifier of the authentication event executed by the client application. /// </summary> /// <value>A unique identifier of the authentication event executed by the client application.</value> [DataMember(Name="assertionId", EmitDefaultValue=false)] public string AssertionId { get; set; } /// <summary> /// A sender generated value that indicates the date/time that the signer was authenticated. /// </summary> /// <value>A sender generated value that indicates the date/time that the signer was authenticated.</value> [DataMember(Name="authenticationInstant", EmitDefaultValue=false)] public string AuthenticationInstant { get; set; } /// <summary> /// A sender created value that indicates the convention used to authenticate the signer. This information is included in the Certificate of Completion. /// </summary> /// <value>A sender created value that indicates the convention used to authenticate the signer. This information is included in the Certificate of Completion.</value> [DataMember(Name="authenticationMethod", EmitDefaultValue=false)] public string AuthenticationMethod { get; set; } /// <summary> /// A sender created value that shows the recipient is embedded (captive). Maximum length: 100 characters. /// </summary> /// <value>A sender created value that shows the recipient is embedded (captive). Maximum length: 100 characters.</value> [DataMember(Name="clientUserId", EmitDefaultValue=false)] public string ClientUserId { get; set; } /// <summary> /// Specifies the email of the recipient. You can use either email and userName or userId to identify the recipient. /// </summary> /// <value>Specifies the email of the recipient. You can use either email and userName or userId to identify the recipient.</value> [DataMember(Name="email", EmitDefaultValue=false)] public string Email { get; set; } /// <summary> /// Only used if pingUrl is specified. This is the interval, in seconds, between pings on the pingUrl. The default is 300 seconds. Valid values are 60-1200 seconds. /// </summary> /// <value>Only used if pingUrl is specified. This is the interval, in seconds, between pings on the pingUrl. The default is 300 seconds. Valid values are 60-1200 seconds.</value> [DataMember(Name="pingFrequency", EmitDefaultValue=false)] public string PingFrequency { get; set; } /// <summary> /// A client Url to be pinged by the DocuSign Signing experience to indicate to the client that Signing is active. An HTTP Get is executed against the client. The response from the client is ignored. The intent is for the client to reset it&#39;s session timer when the request is received. /// </summary> /// <value>A client Url to be pinged by the DocuSign Signing experience to indicate to the client that Signing is active. An HTTP Get is executed against the client. The response from the client is ignored. The intent is for the client to reset it&#39;s session timer when the request is received.</value> [DataMember(Name="pingUrl", EmitDefaultValue=false)] public string PingUrl { get; set; } /// <summary> /// Unique for the recipient. It is used by the tab element to indicate which recipient is to sign the Document. /// </summary> /// <value>Unique for the recipient. It is used by the tab element to indicate which recipient is to sign the Document.</value> [DataMember(Name="recipientId", EmitDefaultValue=false)] public string RecipientId { get; set; } /// <summary> /// The url the recipient is redirected to after the signing session has ended. DocuSign redirects to the url and includes an event parameter that can be used by your application. Possible event parameter values: * cancel (recipient canceled the signing operation) * decline (recipient declined to sign) * exception (an exception occurred) * fax_pending (recipient has a fax pending) * session_timeout (session timed out) * signing_complete (signer completed the signing ceremony) * ttl_expired (the TTL, time to live, timer expired) * viewing_complete (recipient completed viewing the envelope) ###### Note: Include https:// in the URL or the redirect might not succeed on some browsers. /// </summary> /// <value>The url the recipient is redirected to after the signing session has ended. DocuSign redirects to the url and includes an event parameter that can be used by your application. Possible event parameter values: * cancel (recipient canceled the signing operation) * decline (recipient declined to sign) * exception (an exception occurred) * fax_pending (recipient has a fax pending) * session_timeout (session timed out) * signing_complete (signer completed the signing ceremony) * ttl_expired (the TTL, time to live, timer expired) * viewing_complete (recipient completed viewing the envelope) ###### Note: Include https:// in the URL or the redirect might not succeed on some browsers. </value> [DataMember(Name="returnUrl", EmitDefaultValue=false)] public string ReturnUrl { get; set; } /// <summary> /// The domain in which the user authenticated. /// </summary> /// <value>The domain in which the user authenticated.</value> [DataMember(Name="securityDomain", EmitDefaultValue=false)] public string SecurityDomain { get; set; } /// <summary> /// Specifies the user ID of the recipient. You can use with user ID or email and user name to identify the recipient. If user ID is used and a client user ID is provided, the value in the &#x60;userId&#x60; property must match a recipient ID (which can be retrieved with a GET recipients call) for the envelope. If a user ID is used and a clientUser ID is not provided, the user ID match the user ID of the authenticating user. /// </summary> /// <value>Specifies the user ID of the recipient. You can use with user ID or email and user name to identify the recipient. If user ID is used and a client user ID is provided, the value in the &#x60;userId&#x60; property must match a recipient ID (which can be retrieved with a GET recipients call) for the envelope. If a user ID is used and a clientUser ID is not provided, the user ID match the user ID of the authenticating user.</value> [DataMember(Name="userId", EmitDefaultValue=false)] public string UserId { get; set; } /// <summary> /// Specifies the username of the recipient. You can use either email and userName or userId to identify the recipient. /// </summary> /// <value>Specifies the username of the recipient. You can use either email and userName or userId to identify the recipient.</value> [DataMember(Name="userName", EmitDefaultValue=false)] public string UserName { get; set; } /// <summary> /// /// </summary> /// <value></value> [DataMember(Name="xFrameOptions", EmitDefaultValue=false)] public string XFrameOptions { get; set; } /// <summary> /// /// </summary> /// <value></value> [DataMember(Name="xFrameOptionsAllowFromUrl", EmitDefaultValue=false)] public string XFrameOptionsAllowFromUrl { get; set; } /// <summary> /// Returns the string presentation of the object /// </summary> /// <returns>String presentation of the object</returns> public override string ToString() { var sb = new StringBuilder(); sb.Append("class RecipientViewRequest {\n"); sb.Append(" AssertionId: ").Append(AssertionId).Append("\n"); sb.Append(" AuthenticationInstant: ").Append(AuthenticationInstant).Append("\n"); sb.Append(" AuthenticationMethod: ").Append(AuthenticationMethod).Append("\n"); sb.Append(" ClientUserId: ").Append(ClientUserId).Append("\n"); sb.Append(" Email: ").Append(Email).Append("\n"); sb.Append(" PingFrequency: ").Append(PingFrequency).Append("\n"); sb.Append(" PingUrl: ").Append(PingUrl).Append("\n"); sb.Append(" RecipientId: ").Append(RecipientId).Append("\n"); sb.Append(" ReturnUrl: ").Append(ReturnUrl).Append("\n"); sb.Append(" SecurityDomain: ").Append(SecurityDomain).Append("\n"); sb.Append(" UserId: ").Append(UserId).Append("\n"); sb.Append(" UserName: ").Append(UserName).Append("\n"); sb.Append(" XFrameOptions: ").Append(XFrameOptions).Append("\n"); sb.Append(" XFrameOptionsAllowFromUrl: ").Append(XFrameOptionsAllowFromUrl).Append("\n"); sb.Append("}\n"); return sb.ToString(); } /// <summary> /// Returns the JSON string presentation of the object /// </summary> /// <returns>JSON string presentation of the object</returns> public string ToJson() { return JsonConvert.SerializeObject(this, Formatting.Indented); } /// <summary> /// Returns true if objects are equal /// </summary> /// <param name="obj">Object to be compared</param> /// <returns>Boolean</returns> public override bool Equals(object obj) { // credit: http://stackoverflow.com/a/10454552/677735 return this.Equals(obj as RecipientViewRequest); } /// <summary> /// Returns true if RecipientViewRequest instances are equal /// </summary> /// <param name="other">Instance of RecipientViewRequest to be compared</param> /// <returns>Boolean</returns> public bool Equals(RecipientViewRequest other) { // credit: http://stackoverflow.com/a/10454552/677735 if (other == null) return false; return ( this.AssertionId == other.AssertionId || this.AssertionId != null && this.AssertionId.Equals(other.AssertionId) ) && ( this.AuthenticationInstant == other.AuthenticationInstant || this.AuthenticationInstant != null && this.AuthenticationInstant.Equals(other.AuthenticationInstant) ) && ( this.AuthenticationMethod == other.AuthenticationMethod || this.AuthenticationMethod != null && this.AuthenticationMethod.Equals(other.AuthenticationMethod) ) && ( this.ClientUserId == other.ClientUserId || this.ClientUserId != null && this.ClientUserId.Equals(other.ClientUserId) ) && ( this.Email == other.Email || this.Email != null && this.Email.Equals(other.Email) ) && ( this.PingFrequency == other.PingFrequency || this.PingFrequency != null && this.PingFrequency.Equals(other.PingFrequency) ) && ( this.PingUrl == other.PingUrl || this.PingUrl != null && this.PingUrl.Equals(other.PingUrl) ) && ( this.RecipientId == other.RecipientId || this.RecipientId != null && this.RecipientId.Equals(other.RecipientId) ) && ( this.ReturnUrl == other.ReturnUrl || this.ReturnUrl != null && this.ReturnUrl.Equals(other.ReturnUrl) ) && ( this.SecurityDomain == other.SecurityDomain || this.SecurityDomain != null && this.SecurityDomain.Equals(other.SecurityDomain) ) && ( this.UserId == other.UserId || this.UserId != null && this.UserId.Equals(other.UserId) ) && ( this.UserName == other.UserName || this.UserName != null && this.UserName.Equals(other.UserName) ) && ( this.XFrameOptions == other.XFrameOptions || this.XFrameOptions != null && this.XFrameOptions.Equals(other.XFrameOptions) ) && ( this.XFrameOptionsAllowFromUrl == other.XFrameOptionsAllowFromUrl || this.XFrameOptionsAllowFromUrl != null && this.XFrameOptionsAllowFromUrl.Equals(other.XFrameOptionsAllowFromUrl) ); } /// <summary> /// Gets the hash code /// </summary> /// <returns>Hash code</returns> public override int GetHashCode() { // credit: http://stackoverflow.com/a/263416/677735 unchecked // Overflow is fine, just wrap { int hash = 41; // Suitable nullity checks etc, of course :) if (this.AssertionId != null) hash = hash * 59 + this.AssertionId.GetHashCode(); if (this.AuthenticationInstant != null) hash = hash * 59 + this.AuthenticationInstant.GetHashCode(); if (this.AuthenticationMethod != null) hash = hash * 59 + this.AuthenticationMethod.GetHashCode(); if (this.ClientUserId != null) hash = hash * 59 + this.ClientUserId.GetHashCode(); if (this.Email != null) hash = hash * 59 + this.Email.GetHashCode(); if (this.PingFrequency != null) hash = hash * 59 + this.PingFrequency.GetHashCode(); if (this.PingUrl != null) hash = hash * 59 + this.PingUrl.GetHashCode(); if (this.RecipientId != null) hash = hash * 59 + this.RecipientId.GetHashCode(); if (this.ReturnUrl != null) hash = hash * 59 + this.ReturnUrl.GetHashCode(); if (this.SecurityDomain != null) hash = hash * 59 + this.SecurityDomain.GetHashCode(); if (this.UserId != null) hash = hash * 59 + this.UserId.GetHashCode(); if (this.UserName != null) hash = hash * 59 + this.UserName.GetHashCode(); if (this.XFrameOptions != null) hash = hash * 59 + this.XFrameOptions.GetHashCode(); if (this.XFrameOptionsAllowFromUrl != null) hash = hash * 59 + this.XFrameOptionsAllowFromUrl.GetHashCode(); return hash; } } public IEnumerable<ValidationResult> Validate(ValidationContext validationContext) { yield break; } } }
using Microsoft.CodeAnalysis; using Microsoft.CodeAnalysis.Diagnostics; using System.Collections.Immutable; using Microsoft.CodeAnalysis.CodeFixes; using System.Threading.Tasks; using System.Linq; namespace RefactoringEssentials.CSharp.Diagnostics { [DiagnosticAnalyzer(LanguageNames.CSharp)] [NotPortedYet] public class CS0618UsageOfObsoleteMemberAnalyzer : DiagnosticAnalyzer { internal const string DiagnosticId = "CS0618UsageOfObsoleteMemberAnalyzer"; const string Description = "CS0618: Member is obsolete"; const string MessageFormat = ""; const string Category = DiagnosticAnalyzerCategories.CompilerWarnings; static readonly DiagnosticDescriptor Rule = new DiagnosticDescriptor(DiagnosticId, Description, MessageFormat, Category, DiagnosticSeverity.Warning, true, "CS0618: Member is obsolete"); public override ImmutableArray<DiagnosticDescriptor> SupportedDiagnostics { get { return ImmutableArray.Create(Rule); } } public override void Initialize(AnalysisContext context) { //context.RegisterSyntaxNodeAction( // (nodeContext) => { // Diagnostic diagnostic; // if (TryGetDiagnostic (nodeContext, out diagnostic)) { // nodeContext.ReportDiagnostic(diagnostic); // } // }, // new SyntaxKind[] { SyntaxKind.None } //); } static bool TryGetDiagnostic(SyntaxNodeAnalysisContext nodeContext, out Diagnostic diagnostic) { diagnostic = default(Diagnostic); if (nodeContext.IsFromGeneratedCode()) return false; //var node = nodeContext.Node as ; //diagnostic = Diagnostic.Create (descriptor, node.GetLocation ()); //return true; return false; } // class GatherVisitor : GatherVisitorBase<CS0618UsageOfObsoleteMemberAnalyzer> // { // public GatherVisitor(SemanticModel semanticModel, Action<Diagnostic> addDiagnostic, CancellationToken cancellationToken) // : base(semanticModel, addDiagnostic, cancellationToken) // { // } //// //// void Check(ResolveResult rr, AstNode nodeToMark) //// { //// if (rr == null || rr.IsError) //// return; //// IMember member = null; //// var memberRR = rr as MemberResolveResult; //// if (memberRR != null) //// member = memberRR.Member; //// //// var operatorRR = rr as OperatorResolveResult; //// if (operatorRR != null) //// member = operatorRR.UserDefinedOperatorMethod; //// //// if (member == null) //// return; //// //// var attr = member.Attributes.FirstOrDefault(a => a.AttributeType.Name == "ObsoleteAttribute" && a.AttributeType.Namespace == "System"); //// if (attr == null) //// return; //// AddDiagnosticAnalyzer(new CodeIssue(nodeToMark, string.Format(ctx.TranslateString("'{0}' is obsolete"), member.FullName))); //// } //// //// public override void VisitMemberReferenceExpression(MemberReferenceExpression memberReferenceExpression) //// { //// base.VisitMemberReferenceExpression(memberReferenceExpression); //// Check(ctx.Resolve(memberReferenceExpression), memberReferenceExpression.MemberNameToken); //// } //// //// public override void VisitInvocationExpression(InvocationExpression invocationExpression) //// { //// base.VisitInvocationExpression(invocationExpression); //// Check(ctx.Resolve(invocationExpression), invocationExpression.Target); //// } //// //// public override void VisitIdentifierExpression(IdentifierExpression identifierExpression) //// { //// base.VisitIdentifierExpression(identifierExpression); //// Check(ctx.Resolve(identifierExpression), identifierExpression); //// } //// //// public override void VisitIndexerExpression(IndexerExpression indexerExpression) //// { //// base.VisitIndexerExpression(indexerExpression); //// Check(ctx.Resolve(indexerExpression), indexerExpression); //// } //// //// public override void VisitBinaryOperatorExpression(BinaryOperatorExpression binaryOperatorExpression) //// { //// base.VisitBinaryOperatorExpression(binaryOperatorExpression); //// Check(ctx.Resolve(binaryOperatorExpression), binaryOperatorExpression.OperatorToken); //// } //// //// bool IsObsolete(EntityDeclaration entity) //// { //// foreach (var section in entity.Attributes) { //// foreach (var attr in section.Attributes) { //// var rr = ctx.Resolve(attr); //// if (rr.Type.Name == "ObsoleteAttribute" && rr.Type.Namespace == "System") //// return true; //// } //// } //// return false; //// } //// //// public override void VisitTypeDeclaration(TypeDeclaration typeDeclaration) //// { //// if (IsObsolete(typeDeclaration)) //// return; //// base.VisitTypeDeclaration(typeDeclaration); //// } //// //// public override void VisitMethodDeclaration(MethodDeclaration methodDeclaration) //// { //// if (IsObsolete(methodDeclaration)) //// return; //// base.VisitMethodDeclaration(methodDeclaration); //// } //// //// public override void VisitPropertyDeclaration(PropertyDeclaration propertyDeclaration) //// { //// if (IsObsolete(propertyDeclaration)) //// return; //// base.VisitPropertyDeclaration(propertyDeclaration); //// } //// //// public override void VisitAccessor(Accessor accessor) //// { //// if (IsObsolete(accessor)) //// return; //// base.VisitAccessor(accessor); //// } //// //// public override void VisitIndexerDeclaration(IndexerDeclaration indexerDeclaration) //// { //// if (IsObsolete(indexerDeclaration)) //// return; //// base.VisitIndexerDeclaration(indexerDeclaration); //// } //// //// public override void VisitCustomEventDeclaration(CustomEventDeclaration eventDeclaration) //// { //// if (IsObsolete(eventDeclaration)) //// return; //// base.VisitCustomEventDeclaration(eventDeclaration); //// } //// //// public override void VisitFieldDeclaration(FieldDeclaration fieldDeclaration) //// { //// if (IsObsolete(fieldDeclaration)) //// return; //// base.VisitFieldDeclaration(fieldDeclaration); //// } //// //// public override void VisitConstructorDeclaration(ConstructorDeclaration constructorDeclaration) //// { //// if (IsObsolete(constructorDeclaration)) //// return; //// base.VisitConstructorDeclaration(constructorDeclaration); //// } //// //// public override void VisitDestructorDeclaration(DestructorDeclaration destructorDeclaration) //// { //// if (IsObsolete(destructorDeclaration)) //// return; //// base.VisitDestructorDeclaration(destructorDeclaration); //// } //// //// public override void VisitOperatorDeclaration(OperatorDeclaration operatorDeclaration) //// { //// if (IsObsolete(operatorDeclaration)) //// return; //// base.VisitOperatorDeclaration(operatorDeclaration); //// } // } } [ExportCodeFixProvider(LanguageNames.CSharp), System.Composition.Shared] [NotPortedYet] public class CS0618UsageOfObsoleteMemberFixProvider : CodeFixProvider { public override ImmutableArray<string> FixableDiagnosticIds { get { return ImmutableArray.Create(CS0618UsageOfObsoleteMemberAnalyzer.DiagnosticId); } } public override FixAllProvider GetFixAllProvider() { return WellKnownFixAllProviders.BatchFixer; } public async override Task RegisterCodeFixesAsync(CodeFixContext context) { var document = context.Document; var cancellationToken = context.CancellationToken; var span = context.Span; var diagnostics = context.Diagnostics; var root = await document.GetSyntaxRootAsync(cancellationToken); var diagnostic = diagnostics.First(); var node = root.FindNode(context.Span); //if (!node.IsKind(SyntaxKind.BaseList)) // continue; var newRoot = root.RemoveNode(node, SyntaxRemoveOptions.KeepNoTrivia); context.RegisterCodeFix(CodeActionFactory.Create(node.Span, diagnostic.Severity, diagnostic.GetMessage(), document.WithSyntaxRoot(newRoot)), diagnostic); } } }
// // Copyright (c) 2004-2017 Jaroslaw Kowalski <jaak@jkowalski.net>, Kim Christensen, Julian Verdurmen // // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // // * Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // // * Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // * Neither the name of Jaroslaw Kowalski nor the names of its // contributors may be used to endorse or promote products derived from this // software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF // THE POSSIBILITY OF SUCH DAMAGE. // using NLog.Common; namespace NLog.Conditions { using System; using System.Collections.Generic; using System.Globalization; using NLog.Config; using NLog.Internal; using NLog.Layouts; /// <summary> /// Condition parser. Turns a string representation of condition expression /// into an expression tree. /// </summary> public class ConditionParser { private readonly ConditionTokenizer _tokenizer; private readonly ConfigurationItemFactory _configurationItemFactory; /// <summary> /// Initializes a new instance of the <see cref="ConditionParser"/> class. /// </summary> /// <param name="stringReader">The string reader.</param> /// <param name="configurationItemFactory">Instance of <see cref="ConfigurationItemFactory"/> used to resolve references to condition methods and layout renderers.</param> private ConditionParser(SimpleStringReader stringReader, ConfigurationItemFactory configurationItemFactory) { _configurationItemFactory = configurationItemFactory; _tokenizer = new ConditionTokenizer(stringReader); } /// <summary> /// Parses the specified condition string and turns it into /// <see cref="ConditionExpression"/> tree. /// </summary> /// <param name="expressionText">The expression to be parsed.</param> /// <returns>The root of the expression syntax tree which can be used to get the value of the condition in a specified context.</returns> public static ConditionExpression ParseExpression(string expressionText) { return ParseExpression(expressionText, ConfigurationItemFactory.Default); } /// <summary> /// Parses the specified condition string and turns it into /// <see cref="ConditionExpression"/> tree. /// </summary> /// <param name="expressionText">The expression to be parsed.</param> /// <param name="configurationItemFactories">Instance of <see cref="ConfigurationItemFactory"/> used to resolve references to condition methods and layout renderers.</param> /// <returns>The root of the expression syntax tree which can be used to get the value of the condition in a specified context.</returns> public static ConditionExpression ParseExpression(string expressionText, ConfigurationItemFactory configurationItemFactories) { if (expressionText == null) { return null; } var parser = new ConditionParser(new SimpleStringReader(expressionText), configurationItemFactories); ConditionExpression expression = parser.ParseExpression(); if (!parser._tokenizer.IsEOF()) { throw new ConditionParseException($"Unexpected token: {parser._tokenizer.TokenValue}"); } return expression; } /// <summary> /// Parses the specified condition string and turns it into /// <see cref="ConditionExpression"/> tree. /// </summary> /// <param name="stringReader">The string reader.</param> /// <param name="configurationItemFactories">Instance of <see cref="ConfigurationItemFactory"/> used to resolve references to condition methods and layout renderers.</param> /// <returns> /// The root of the expression syntax tree which can be used to get the value of the condition in a specified context. /// </returns> internal static ConditionExpression ParseExpression(SimpleStringReader stringReader, ConfigurationItemFactory configurationItemFactories) { var parser = new ConditionParser(stringReader, configurationItemFactories); ConditionExpression expression = parser.ParseExpression(); return expression; } private ConditionMethodExpression ParsePredicate(string functionName) { var par = new List<ConditionExpression>(); while (!_tokenizer.IsEOF() && _tokenizer.TokenType != ConditionTokenType.RightParen) { par.Add(ParseExpression()); if (_tokenizer.TokenType != ConditionTokenType.Comma) { break; } _tokenizer.GetNextToken(); } _tokenizer.Expect(ConditionTokenType.RightParen); try { var methodInfo = _configurationItemFactory.ConditionMethods.CreateInstance(functionName); return new ConditionMethodExpression(functionName, methodInfo, par); } catch (Exception exception) { InternalLogger.Warn(exception, "Cannot resolve function '{0}'", functionName); if (exception.MustBeRethrownImmediately()) { throw; } throw new ConditionParseException($"Cannot resolve function '{functionName}'", exception); } } private ConditionExpression ParseLiteralExpression() { if (_tokenizer.IsToken(ConditionTokenType.LeftParen)) { _tokenizer.GetNextToken(); ConditionExpression e = ParseExpression(); _tokenizer.Expect(ConditionTokenType.RightParen); return e; } if (_tokenizer.IsToken(ConditionTokenType.Minus)) { _tokenizer.GetNextToken(); if (!_tokenizer.IsNumber()) { throw new ConditionParseException($"Number expected, got {_tokenizer.TokenType}"); } string numberString = _tokenizer.TokenValue; _tokenizer.GetNextToken(); if (numberString.IndexOf('.') >= 0) { return new ConditionLiteralExpression(-double.Parse(numberString, CultureInfo.InvariantCulture)); } return new ConditionLiteralExpression(-int.Parse(numberString, CultureInfo.InvariantCulture)); } if (_tokenizer.IsNumber()) { string numberString = _tokenizer.TokenValue; _tokenizer.GetNextToken(); if (numberString.IndexOf('.') >= 0) { return new ConditionLiteralExpression(double.Parse(numberString, CultureInfo.InvariantCulture)); } return new ConditionLiteralExpression(int.Parse(numberString, CultureInfo.InvariantCulture)); } if (_tokenizer.TokenType == ConditionTokenType.String) { ConditionExpression e = new ConditionLayoutExpression(Layout.FromString(_tokenizer.StringTokenValue, _configurationItemFactory)); _tokenizer.GetNextToken(); return e; } if (_tokenizer.TokenType == ConditionTokenType.Keyword) { string keyword = _tokenizer.EatKeyword(); if (0 == string.Compare(keyword, "level", StringComparison.OrdinalIgnoreCase)) { return new ConditionLevelExpression(); } if (0 == string.Compare(keyword, "logger", StringComparison.OrdinalIgnoreCase)) { return new ConditionLoggerNameExpression(); } if (0 == string.Compare(keyword, "message", StringComparison.OrdinalIgnoreCase)) { return new ConditionMessageExpression(); } if (0 == string.Compare(keyword, "loglevel", StringComparison.OrdinalIgnoreCase)) { _tokenizer.Expect(ConditionTokenType.Dot); return new ConditionLiteralExpression(LogLevel.FromString(_tokenizer.EatKeyword())); } if (0 == string.Compare(keyword, "true", StringComparison.OrdinalIgnoreCase)) { return new ConditionLiteralExpression(true); } if (0 == string.Compare(keyword, "false", StringComparison.OrdinalIgnoreCase)) { return new ConditionLiteralExpression(false); } if (0 == string.Compare(keyword, "null", StringComparison.OrdinalIgnoreCase)) { return new ConditionLiteralExpression(null); } if (_tokenizer.TokenType == ConditionTokenType.LeftParen) { _tokenizer.GetNextToken(); ConditionMethodExpression predicateExpression = ParsePredicate(keyword); return predicateExpression; } } throw new ConditionParseException("Unexpected token: " + _tokenizer.TokenValue); } private ConditionExpression ParseBooleanRelation() { ConditionExpression e = ParseLiteralExpression(); if (_tokenizer.IsToken(ConditionTokenType.EqualTo)) { _tokenizer.GetNextToken(); return new ConditionRelationalExpression(e, ParseLiteralExpression(), ConditionRelationalOperator.Equal); } if (_tokenizer.IsToken(ConditionTokenType.NotEqual)) { _tokenizer.GetNextToken(); return new ConditionRelationalExpression(e, ParseLiteralExpression(), ConditionRelationalOperator.NotEqual); } if (_tokenizer.IsToken(ConditionTokenType.LessThan)) { _tokenizer.GetNextToken(); return new ConditionRelationalExpression(e, ParseLiteralExpression(), ConditionRelationalOperator.Less); } if (_tokenizer.IsToken(ConditionTokenType.GreaterThan)) { _tokenizer.GetNextToken(); return new ConditionRelationalExpression(e, ParseLiteralExpression(), ConditionRelationalOperator.Greater); } if (_tokenizer.IsToken(ConditionTokenType.LessThanOrEqualTo)) { _tokenizer.GetNextToken(); return new ConditionRelationalExpression(e, ParseLiteralExpression(), ConditionRelationalOperator.LessOrEqual); } if (_tokenizer.IsToken(ConditionTokenType.GreaterThanOrEqualTo)) { _tokenizer.GetNextToken(); return new ConditionRelationalExpression(e, ParseLiteralExpression(), ConditionRelationalOperator.GreaterOrEqual); } return e; } private ConditionExpression ParseBooleanPredicate() { if (_tokenizer.IsKeyword("not") || _tokenizer.IsToken(ConditionTokenType.Not)) { _tokenizer.GetNextToken(); return new ConditionNotExpression(ParseBooleanPredicate()); } return ParseBooleanRelation(); } private ConditionExpression ParseBooleanAnd() { ConditionExpression expression = ParseBooleanPredicate(); while (_tokenizer.IsKeyword("and") || _tokenizer.IsToken(ConditionTokenType.And)) { _tokenizer.GetNextToken(); expression = new ConditionAndExpression(expression, ParseBooleanPredicate()); } return expression; } private ConditionExpression ParseBooleanOr() { ConditionExpression expression = ParseBooleanAnd(); while (_tokenizer.IsKeyword("or") || _tokenizer.IsToken(ConditionTokenType.Or)) { _tokenizer.GetNextToken(); expression = new ConditionOrExpression(expression, ParseBooleanAnd()); } return expression; } private ConditionExpression ParseBooleanExpression() { return ParseBooleanOr(); } private ConditionExpression ParseExpression() { return ParseBooleanExpression(); } } }
using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; using Nethereum.ABI.FunctionEncoding; using Nethereum.Hex.HexTypes; using Nethereum.RPC.Eth.DTOs; namespace Nethereum.Contracts { public class Function : FunctionBase { public Function(Contract contract, FunctionBuilder functionBuilder) : base(contract, functionBuilder) { } protected FunctionBuilder FunctionBuilder => (FunctionBuilder) FunctionBuilderBase; public CallInput CreateCallInput(params object[] functionInput) { return FunctionBuilder.CreateCallInput(functionInput); } public CallInput CreateCallInput(string from, HexBigInteger gas, HexBigInteger value, params object[] functionInput) { return FunctionBuilder.CreateCallInput(from, gas, value, functionInput); } public string GetData(params object[] functionInput) { return FunctionBuilder.GetData(functionInput); } public TransactionInput CreateTransactionInput(string from, params object[] functionInput) { return FunctionBuilder.CreateTransactionInput(from, functionInput); } public TransactionInput CreateTransactionInput(string from, HexBigInteger gas, HexBigInteger value, params object[] functionInput) { return FunctionBuilder.CreateTransactionInput(from, gas, value, functionInput); } public TransactionInput CreateTransactionInput(string from, HexBigInteger gas, HexBigInteger gasPrice, HexBigInteger value, params object[] functionInput) { return FunctionBuilder.CreateTransactionInput(from, gas, gasPrice, value, functionInput); } public TransactionInput CreateTransactionInput(TransactionInput input, params object[] functionInput) { return FunctionBuilder.CreateTransactionInput(input, functionInput); } public TransactionInput CreateTransactionInput(HexBigInteger type, string from, HexBigInteger gas, HexBigInteger value, HexBigInteger maxFeePerGas, HexBigInteger maxPriorityFeePerGas, params object[] functionInput) { return FunctionBuilder.CreateTransactionInput(type, from, gas, value, maxFeePerGas, maxPriorityFeePerGas, functionInput); } public TransactionInput CreateTransactionInput(string from, HexBigInteger gas, HexBigInteger value, HexBigInteger maxFeePerGas, HexBigInteger maxPriorityFeePerGas, params object[] functionInput) { return FunctionBuilder.CreateTransactionInput(from, gas, value, maxFeePerGas, maxPriorityFeePerGas, functionInput); } #if !DOTNET35 public Task<List<ParameterOutput>> CallDecodingToDefaultAsync(params object[] functionInput) { return base.CallDecodingToDefaultAsync(CreateCallInput(functionInput)); } public Task<List<ParameterOutput>> CallDecodingToDefaultAsync(string from, HexBigInteger gas, HexBigInteger value, params object[] functionInput) { return base.CallDecodingToDefaultAsync(CreateCallInput(from, gas, value, functionInput)); } public Task<List<ParameterOutput>> CallDecodingToDefaultAsync(string from, HexBigInteger gas, HexBigInteger value, BlockParameter block, params object[] functionInput) { return base.CallDecodingToDefaultAsync(CreateCallInput(from, gas, value, functionInput), block); } public Task<List<ParameterOutput>> CallDecodingToDefaultAsync(BlockParameter block, params object[] functionInput) { return base.CallDecodingToDefaultAsync(CreateCallInput(functionInput), block); } public Task<TReturn> CallAsync<TReturn>(params object[] functionInput) { return base.CallAsync<TReturn>(CreateCallInput(functionInput)); } public Task<TReturn> CallAsync<TReturn>(string from, HexBigInteger gas, HexBigInteger value, params object[] functionInput) { return base.CallAsync<TReturn>(CreateCallInput(from, gas, value, functionInput)); } public Task<TReturn> CallAsync<TReturn>(string from, HexBigInteger gas, HexBigInteger value, BlockParameter block, params object[] functionInput) { return base.CallAsync<TReturn>(CreateCallInput(from, gas, value, functionInput), block); } public Task<TReturn> CallAsync<TReturn>(BlockParameter block, params object[] functionInput) { return base.CallAsync<TReturn>(CreateCallInput(functionInput), block); } public Task<TReturn> CallDeserializingToObjectAsync<TReturn>(params object[] functionInput) where TReturn : new() { return base.CallAsync(new TReturn(), CreateCallInput(functionInput)); } public Task<TReturn> CallDeserializingToObjectAsync<TReturn>(string from, HexBigInteger gas, HexBigInteger value, params object[] functionInput) where TReturn : new() { return base.CallAsync(new TReturn(), CreateCallInput(from, gas, value, functionInput)); } public Task<TReturn> CallDeserializingToObjectAsync<TReturn>(string from, HexBigInteger gas, HexBigInteger value, BlockParameter block, params object[] functionInput) where TReturn : new() { return base.CallAsync(new TReturn(), CreateCallInput(from, gas, value, functionInput), block); } public Task<TReturn> CallDeserializingToObjectAsync<TReturn>( BlockParameter blockParameter, params object[] functionInput) where TReturn : new() { return base.CallAsync(new TReturn(), CreateCallInput(functionInput), blockParameter); } public Task<HexBigInteger> EstimateGasAsync(params object[] functionInput) { return EstimateGasFromEncAsync(CreateCallInput(functionInput)); } public Task<HexBigInteger> EstimateGasAsync(string from, HexBigInteger gas, HexBigInteger value, params object[] functionInput) { return EstimateGasFromEncAsync(CreateCallInput(from, gas, value, functionInput)); } public Task<string> SendTransactionAsync(string from, params object[] functionInput) { return base.SendTransactionAsync(CreateTransactionInput(from, functionInput)); } public Task<string> SendTransactionAsync(string from, HexBigInteger gas, HexBigInteger value, params object[] functionInput) { return base.SendTransactionAsync(CreateTransactionInput(from, gas, value, functionInput)); } public Task<string> SendTransactionAsync(HexBigInteger type, string from, HexBigInteger gas, HexBigInteger value, HexBigInteger maxFeePerGas, HexBigInteger maxPriorityFeePerGas, params object[] functionInput) { return base.SendTransactionAsync(CreateTransactionInput(type, from, gas, value, maxFeePerGas, maxPriorityFeePerGas, functionInput)); } public Task<string> SendTransactionAsync(string from, HexBigInteger gas, HexBigInteger value, HexBigInteger maxFeePerGas, HexBigInteger maxPriorityFeePerGas, params object[] functionInput) { return base.SendTransactionAsync(CreateTransactionInput(from, gas, value, maxFeePerGas, maxPriorityFeePerGas, functionInput)); } public Task<string> SendTransactionAsync(string from, HexBigInteger gas, HexBigInteger gasPrice, HexBigInteger value, params object[] functionInput) { return base.SendTransactionAsync(CreateTransactionInput(from, gas, gasPrice, value, functionInput)); } public Task<string> SendTransactionAsync(TransactionInput input, params object[] functionInput) { return base.SendTransactionAsync(CreateTransactionInput(input, functionInput)); } public Task<TransactionReceipt> SendTransactionAndWaitForReceiptAsync(string from, CancellationTokenSource receiptRequestCancellationToken = null, params object[] functionInput) { return base.SendTransactionAndWaitForReceiptAsync(CreateTransactionInput(from, functionInput), receiptRequestCancellationToken); } public Task<TransactionReceipt> SendTransactionAndWaitForReceiptAsync(string from, HexBigInteger gas, HexBigInteger value, CancellationTokenSource receiptRequestCancellationToken = null, params object[] functionInput) { return base.SendTransactionAndWaitForReceiptAsync(CreateTransactionInput(from, gas, value, functionInput), receiptRequestCancellationToken); } public Task<TransactionReceipt> SendTransactionAndWaitForReceiptAsync(string from, HexBigInteger gas, HexBigInteger gasPrice, HexBigInteger value, CancellationTokenSource receiptRequestCancellationToken = null, params object[] functionInput) { return base.SendTransactionAndWaitForReceiptAsync( CreateTransactionInput(from, gas, gasPrice, value, functionInput), receiptRequestCancellationToken); } public Task<TransactionReceipt> SendTransactionAndWaitForReceiptAsync(TransactionInput input, CancellationTokenSource receiptRequestCancellationToken = null, params object[] functionInput) { return base.SendTransactionAndWaitForReceiptAsync(CreateTransactionInput(input, functionInput), receiptRequestCancellationToken); } public Task<TransactionReceipt> SendTransactionAndWaitForReceiptAsync(HexBigInteger type, string from, HexBigInteger gas, HexBigInteger value, HexBigInteger maxFeePerGas, HexBigInteger maxPriorityFeePerGas, params object[] functionInput) { return base.SendTransactionAndWaitForReceiptAsync(CreateTransactionInput(type, from, gas, value, maxFeePerGas, maxPriorityFeePerGas, functionInput)); } public Task<TransactionReceipt> SendTransactionAndWaitForReceiptAsync(string from, HexBigInteger gas, HexBigInteger value, HexBigInteger maxFeePerGas, HexBigInteger maxPriorityFeePerGas, params object[] functionInput) { return base.SendTransactionAndWaitForReceiptAsync(CreateTransactionInput(from, gas, value, maxFeePerGas, maxPriorityFeePerGas, functionInput)); } #endif } }
//#define ASTARDEBUG //"BBTree Debug" If enables, some queries to the tree will show debug lines. Turn off multithreading when using this since DrawLine calls cannot be called from a different thread using System; using UnityEngine; namespace Pathfinding { using Pathfinding; /** Axis Aligned Bounding Box Tree. * Holds a bounding box tree of triangles. * * \astarpro */ public class BBTree { /** Holds an Axis Aligned Bounding Box Tree used for faster node lookups. * \astarpro */ BBTreeBox[] arr = new BBTreeBox[6]; int count; public Rect Size { get { if (count == 0) { return new Rect(0, 0, 0, 0); } else { var rect = arr[0].rect; return Rect.MinMaxRect(rect.xmin*Int3.PrecisionFactor, rect.ymin*Int3.PrecisionFactor, rect.xmax*Int3.PrecisionFactor, rect.ymax*Int3.PrecisionFactor); } } } /** Clear the tree. * Note that references to old nodes will still be intact so the GC cannot immediately collect them. */ public void Clear () { count = 0; } void EnsureCapacity (int c) { if (arr.Length < c) { var narr = new BBTreeBox[Math.Max(c, (int)(arr.Length*1.5f))]; for (int i = 0; i < count; i++) { narr[i] = arr[i]; } arr = narr; } } int GetBox (MeshNode node) { if (count >= arr.Length) EnsureCapacity(count+1); arr[count] = new BBTreeBox(node); count++; return count-1; } int GetBox (IntRect rect) { if (count >= arr.Length) EnsureCapacity(count+1); arr[count] = new BBTreeBox(rect); count++; return count-1; } /** Rebuilds the tree using the specified nodes. * This is faster and gives better quality results compared to calling Insert with all nodes */ public void RebuildFrom (MeshNode[] nodes) { Clear(); if (nodes.Length == 0) { return; } if (nodes.Length == 1) { GetBox(nodes[0]); return; } // We will use approximately 2N tree nodes EnsureCapacity(Mathf.CeilToInt(nodes.Length * 2.1f)); // Make a copy of the nodes array since we will be modifying it var nodeCopies = new MeshNode[nodes.Length]; for (int i = 0; i < nodes.Length; i++) nodeCopies[i] = nodes[i]; RebuildFromInternal(nodeCopies, 0, nodes.Length, false); } static int SplitByX (MeshNode[] nodes, int from, int to, int divider) { int mx = to; for (int i = from; i < mx; i++) { if (nodes[i].position.x > divider) { // swap with mx mx--; var tmp = nodes[mx]; nodes[mx] = nodes[i]; nodes[i] = tmp; i--; } } return mx; } static int SplitByZ (MeshNode[] nodes, int from, int to, int divider) { int mx = to; for (int i = from; i < mx; i++) { if (nodes[i].position.z > divider) { // swap with mx mx--; var tmp = nodes[mx]; nodes[mx] = nodes[i]; nodes[i] = tmp; i--; } } return mx; } int RebuildFromInternal (MeshNode[] nodes, int from, int to, bool odd) { if (to - from <= 0) throw new ArgumentException(); if (to - from == 1) { return GetBox(nodes[from]); } var rect = NodeBounds(nodes, from, to); int box = GetBox(rect); // Performance optimization for a common case if (to - from == 2) { arr[box].left = GetBox(nodes[from]); arr[box].right = GetBox(nodes[from+1]); return box; } int mx; if (odd) { // X int divider = (rect.xmin + rect.xmax)/2; mx = SplitByX(nodes, from, to, divider); } else { // Y/Z int divider = (rect.ymin + rect.ymax)/2; mx = SplitByZ(nodes, from, to, divider); } if (mx == from || mx == to) { // All nodes were on one side of the divider // Try to split along the other axis if (!odd) { // X int divider = (rect.xmin + rect.xmax)/2; mx = SplitByX(nodes, from, to, divider); } else { // Y/Z int divider = (rect.ymin + rect.ymax)/2; mx = SplitByZ(nodes, from, to, divider); } if (mx == from || mx == to) { // All nodes were on one side of the divider // Just pick one half mx = (from+to)/2; } } arr[box].left = RebuildFromInternal(nodes, from, mx, !odd); arr[box].right = RebuildFromInternal(nodes, mx, to, !odd); return box; } /** Calculates the bounding box in XZ space of all nodes between \a from (inclusive) and \a to (exclusive) */ static IntRect NodeBounds (MeshNode[] nodes, int from, int to) { if (to - from <= 0) throw new ArgumentException(); var first = nodes[from].GetVertex(0); var min = new Int2(first.x, first.z); Int2 max = min; for (int j = from; j < to; j++) { var node = nodes[j]; var nverts = node.GetVertexCount(); for (int i = 0; i < nverts; i++) { var p = node.GetVertex(i); min.x = Math.Min(min.x, p.x); min.y = Math.Min(min.y, p.z); max.x = Math.Max(max.x, p.x); max.y = Math.Max(max.y, p.z); } } return new IntRect(min.x, min.y, max.x, max.y); } /** Inserts a mesh node in the tree */ public void Insert (MeshNode node) { int boxi = GetBox(node); // Was set to root if (boxi == 0) { return; } BBTreeBox box = arr[boxi]; //int depth = 0; int c = 0; while (true) { BBTreeBox cb = arr[c]; cb.rect = ExpandToContain(cb.rect, box.rect); if (cb.node != null) { //Is Leaf cb.left = boxi; int box2 = GetBox(cb.node); //BBTreeBox box2 = new BBTreeBox (this,c.node); //Console.WriteLine ("Inserted "+box.node+", rect "+box.rect.ToString ()); cb.right = box2; cb.node = null; //cb.depth++; //c.rect = c.rect. arr[c] = cb; //Debug.Log (depth); return; } else { //depth++; //cb.depth++; arr[c] = cb; int e1 = ExpansionRequired(arr[cb.left].rect, box.rect);// * arr[cb.left].depth; int e2 = ExpansionRequired(arr[cb.right].rect, box.rect);// * arr[cb.left].depth; //Choose the rect requiring the least expansion to contain box.rect if (e1 < e2) { c = cb.left; } else if (e2 < e1) { c = cb.right; } else { //Equal, Choose the one with the smallest area c = RectArea(arr[cb.left].rect) < RectArea(arr[cb.right].rect) ? cb.left : cb.right; } } } } public NNInfo Query (Vector3 p, NNConstraint constraint) { if (count == 0) return new NNInfo(null); var nnInfo = new NNInfo(); SearchBox(0, p, constraint, ref nnInfo); nnInfo.UpdateInfo(); return nnInfo; } /** Queries the tree for the best node, searching within a circle around \a p with the specified radius. * Will fill in both the constrained node and the not constrained node in the NNInfo. * * \see QueryClosest */ public NNInfo QueryCircle (Vector3 p, float radius, NNConstraint constraint) { if (count == 0) return new NNInfo(null); var nnInfo = new NNInfo(null); SearchBoxCircle(0, p, radius, constraint, ref nnInfo); nnInfo.UpdateInfo(); return nnInfo; } /** Queries the tree for the closest node to \a p constrained by the NNConstraint. * Note that this function will, unlike QueryCircle, only fill in the constrained node. * If you want a node not constrained by any NNConstraint, do an additional search with constraint = NNConstraint.None * * \see QueryCircle */ public NNInfo QueryClosest (Vector3 p, NNConstraint constraint, out float distance) { distance = float.PositiveInfinity; return QueryClosest(p, constraint, ref distance, new NNInfo(null)); } /** Queries the tree for the closest node to \a p constrained by the NNConstraint trying to improve an existing solution. * Note that this function will, unlike QueryCircle, only fill in the constrained node. * If you want a node not constrained by any NNConstraint, do an additional search with constraint = NNConstraint.None * * This method will completely ignore any Y-axis differences in positions. * * \param p Point to search around * \param constraint Optionally set to constrain which nodes to return * \param distance The best distance for the \a previous solution. Will be updated with the best distance * after this search. Will be positive infinity if no node could be found. * Set to positive infinity if there was no previous solution. * \param previous This search will start from the \a previous NNInfo and improve it if possible. * Even if the search fails on this call, the solution will never be worse than \a previous. * * \see QueryCircle */ public NNInfo QueryClosestXZ (Vector3 p, NNConstraint constraint, ref float distance, NNInfo previous) { if (count == 0) { return previous; } SearchBoxClosestXZ(0, p, ref distance, constraint, ref previous); return previous; } void SearchBoxClosestXZ (int boxi, Vector3 p, ref float closestDist, NNConstraint constraint, ref NNInfo nnInfo) { BBTreeBox box = arr[boxi]; if (box.node != null) { //Leaf node //Update the NNInfo #if ASTARDEBUG Debug.DrawLine((Vector3)box.node.GetVertex(1) + Vector3.up*0.2f, (Vector3)box.node.GetVertex(2) + Vector3.up*0.2f, Color.red); Debug.DrawLine((Vector3)box.node.GetVertex(0) + Vector3.up*0.2f, (Vector3)box.node.GetVertex(1) + Vector3.up*0.2f, Color.red); Debug.DrawLine((Vector3)box.node.GetVertex(2) + Vector3.up*0.2f, (Vector3)box.node.GetVertex(0) + Vector3.up*0.2f, Color.red); #endif Vector3 closest = box.node.ClosestPointOnNodeXZ(p); if (constraint == null || constraint.Suitable(box.node)) { // XZ distance float dist = (closest.x-p.x)*(closest.x-p.x)+(closest.z-p.z)*(closest.z-p.z); if (nnInfo.constrainedNode == null) { nnInfo.constrainedNode = box.node; nnInfo.constClampedPosition = closest; closestDist = (float)Math.Sqrt(dist); } else if (dist < closestDist*closestDist) { nnInfo.constrainedNode = box.node; nnInfo.constClampedPosition = closest; closestDist = (float)Math.Sqrt(dist); } } #if ASTARDEBUG Debug.DrawLine((Vector3)box.node.GetVertex(0), (Vector3)box.node.GetVertex(1), Color.blue); Debug.DrawLine((Vector3)box.node.GetVertex(1), (Vector3)box.node.GetVertex(2), Color.blue); Debug.DrawLine((Vector3)box.node.GetVertex(2), (Vector3)box.node.GetVertex(0), Color.blue); #endif } else { #if ASTARDEBUG Debug.DrawLine(new Vector3(box.rect.xMin, 0, box.rect.yMin), new Vector3(box.rect.xMax, 0, box.rect.yMin), Color.white); Debug.DrawLine(new Vector3(box.rect.xMin, 0, box.rect.yMax), new Vector3(box.rect.xMax, 0, box.rect.yMax), Color.white); Debug.DrawLine(new Vector3(box.rect.xMin, 0, box.rect.yMin), new Vector3(box.rect.xMin, 0, box.rect.yMax), Color.white); Debug.DrawLine(new Vector3(box.rect.xMax, 0, box.rect.yMin), new Vector3(box.rect.xMax, 0, box.rect.yMax), Color.white); #endif //Search children if (RectIntersectsCircle(arr[box.left].rect, p, closestDist)) { SearchBoxClosestXZ(box.left, p, ref closestDist, constraint, ref nnInfo); } if (RectIntersectsCircle(arr[box.right].rect, p, closestDist)) { SearchBoxClosestXZ(box.right, p, ref closestDist, constraint, ref nnInfo); } } } /** Queries the tree for the closest node to \a p constrained by the NNConstraint trying to improve an existing solution. * Note that this function will, unlike QueryCircle, only fill in the constrained node. * If you want a node not constrained by any NNConstraint, do an additional search with constraint = NNConstraint.None * * \param p Point to search around * \param constraint Optionally set to constrain which nodes to return * \param distance The best distance for the \a previous solution. Will be updated with the best distance * after this search. Will be positive infinity if no node could be found. * Set to positive infinity if there was no previous solution. * \param previous This search will start from the \a previous NNInfo and improve it if possible. * Even if the search fails on this call, the solution will never be worse than \a previous. * * \see QueryCircle */ public NNInfo QueryClosest (Vector3 p, NNConstraint constraint, ref float distance, NNInfo previous) { if (count == 0) return previous; SearchBoxClosest(0, p, ref distance, constraint, ref previous); return previous; } void SearchBoxClosest (int boxi, Vector3 p, ref float closestDist, NNConstraint constraint, ref NNInfo nnInfo) { BBTreeBox box = arr[boxi]; if (box.node != null) { //Leaf node if (NodeIntersectsCircle(box.node, p, closestDist)) { //Update the NNInfo #if ASTARDEBUG Debug.DrawLine((Vector3)box.node.GetVertex(1) + Vector3.up*0.2f, (Vector3)box.node.GetVertex(2) + Vector3.up*0.2f, Color.red); Debug.DrawLine((Vector3)box.node.GetVertex(0) + Vector3.up*0.2f, (Vector3)box.node.GetVertex(1) + Vector3.up*0.2f, Color.red); Debug.DrawLine((Vector3)box.node.GetVertex(2) + Vector3.up*0.2f, (Vector3)box.node.GetVertex(0) + Vector3.up*0.2f, Color.red); #endif Vector3 closest = box.node.ClosestPointOnNode(p); if (constraint == null || constraint.Suitable(box.node)) { float dist = (closest-p).sqrMagnitude; if (nnInfo.constrainedNode == null) { nnInfo.constrainedNode = box.node; nnInfo.constClampedPosition = closest; closestDist = (float)Math.Sqrt(dist); } else if (dist < closestDist*closestDist) { nnInfo.constrainedNode = box.node; nnInfo.constClampedPosition = closest; closestDist = (float)Math.Sqrt(dist); } } } else { #if ASTARDEBUG Debug.DrawLine((Vector3)box.node.GetVertex(0), (Vector3)box.node.GetVertex(1), Color.blue); Debug.DrawLine((Vector3)box.node.GetVertex(1), (Vector3)box.node.GetVertex(2), Color.blue); Debug.DrawLine((Vector3)box.node.GetVertex(2), (Vector3)box.node.GetVertex(0), Color.blue); #endif } } else { #if ASTARDEBUG Debug.DrawLine(new Vector3(box.rect.xMin, 0, box.rect.yMin), new Vector3(box.rect.xMax, 0, box.rect.yMin), Color.white); Debug.DrawLine(new Vector3(box.rect.xMin, 0, box.rect.yMax), new Vector3(box.rect.xMax, 0, box.rect.yMax), Color.white); Debug.DrawLine(new Vector3(box.rect.xMin, 0, box.rect.yMin), new Vector3(box.rect.xMin, 0, box.rect.yMax), Color.white); Debug.DrawLine(new Vector3(box.rect.xMax, 0, box.rect.yMin), new Vector3(box.rect.xMax, 0, box.rect.yMax), Color.white); #endif //Search children if (RectIntersectsCircle(arr[box.left].rect, p, closestDist)) { SearchBoxClosest(box.left, p, ref closestDist, constraint, ref nnInfo); } if (RectIntersectsCircle(arr[box.right].rect, p, closestDist)) { SearchBoxClosest(box.right, p, ref closestDist, constraint, ref nnInfo); } } } public MeshNode QueryInside (Vector3 p, NNConstraint constraint) { return count != 0 ? SearchBoxInside(0, p, constraint) : null; } MeshNode SearchBoxInside (int boxi, Vector3 p, NNConstraint constraint) { BBTreeBox box = arr[boxi]; if (box.node != null) { if (box.node.ContainsPoint((Int3)p)) { //Update the NNInfo #if ASTARDEBUG Debug.DrawLine((Vector3)box.node.GetVertex(1) + Vector3.up*0.2f, (Vector3)box.node.GetVertex(2) + Vector3.up*0.2f, Color.red); Debug.DrawLine((Vector3)box.node.GetVertex(0) + Vector3.up*0.2f, (Vector3)box.node.GetVertex(1) + Vector3.up*0.2f, Color.red); Debug.DrawLine((Vector3)box.node.GetVertex(2) + Vector3.up*0.2f, (Vector3)box.node.GetVertex(0) + Vector3.up*0.2f, Color.red); #endif if (constraint == null || constraint.Suitable(box.node)) { return box.node; } } else { #if ASTARDEBUG Debug.DrawLine((Vector3)box.node.GetVertex(0), (Vector3)box.node.GetVertex(1), Color.blue); Debug.DrawLine((Vector3)box.node.GetVertex(1), (Vector3)box.node.GetVertex(2), Color.blue); Debug.DrawLine((Vector3)box.node.GetVertex(2), (Vector3)box.node.GetVertex(0), Color.blue); #endif } } else { #if ASTARDEBUG Debug.DrawLine(new Vector3(box.rect.xMin, 0, box.rect.yMin), new Vector3(box.rect.xMax, 0, box.rect.yMin), Color.white); Debug.DrawLine(new Vector3(box.rect.xMin, 0, box.rect.yMax), new Vector3(box.rect.xMax, 0, box.rect.yMax), Color.white); Debug.DrawLine(new Vector3(box.rect.xMin, 0, box.rect.yMin), new Vector3(box.rect.xMin, 0, box.rect.yMax), Color.white); Debug.DrawLine(new Vector3(box.rect.xMax, 0, box.rect.yMin), new Vector3(box.rect.xMax, 0, box.rect.yMax), Color.white); #endif //Search children MeshNode g; if (arr[box.left].Contains(p)) { g = SearchBoxInside(box.left, p, constraint); if (g != null) return g; } if (arr[box.right].Contains(p)) { g = SearchBoxInside(box.right, p, constraint); if (g != null) return g; } } return null; } void SearchBoxCircle (int boxi, Vector3 p, float radius, NNConstraint constraint, ref NNInfo nnInfo) {//, int intendentLevel = 0) { BBTreeBox box = arr[boxi]; if (box.node != null) { //Leaf node if (NodeIntersectsCircle(box.node, p, radius)) { //Update the NNInfo #if ASTARDEBUG Debug.DrawLine((Vector3)box.node.GetVertex(0), (Vector3)box.node.GetVertex(1), Color.red); Debug.DrawLine((Vector3)box.node.GetVertex(1), (Vector3)box.node.GetVertex(2), Color.red); Debug.DrawLine((Vector3)box.node.GetVertex(2), (Vector3)box.node.GetVertex(0), Color.red); #endif Vector3 closest = box.node.ClosestPointOnNode(p); //NavMeshGraph.ClosestPointOnNode (box.node,graph.vertices,p); float dist = (closest-p).sqrMagnitude; if (nnInfo.node == null) { nnInfo.node = box.node; nnInfo.clampedPosition = closest; } else if (dist < (nnInfo.clampedPosition - p).sqrMagnitude) { nnInfo.node = box.node; nnInfo.clampedPosition = closest; } if (constraint == null || constraint.Suitable(box.node)) { if (nnInfo.constrainedNode == null) { nnInfo.constrainedNode = box.node; nnInfo.constClampedPosition = closest; } else if (dist < (nnInfo.constClampedPosition - p).sqrMagnitude) { nnInfo.constrainedNode = box.node; nnInfo.constClampedPosition = closest; } } } else { #if ASTARDEBUG Debug.DrawLine((Vector3)box.node.GetVertex(0), (Vector3)box.node.GetVertex(1), Color.blue); Debug.DrawLine((Vector3)box.node.GetVertex(1), (Vector3)box.node.GetVertex(2), Color.blue); Debug.DrawLine((Vector3)box.node.GetVertex(2), (Vector3)box.node.GetVertex(0), Color.blue); #endif } return; } #if ASTARDEBUG Debug.DrawLine(new Vector3(box.rect.xMin, 0, box.rect.yMin), new Vector3(box.rect.xMax, 0, box.rect.yMin), Color.white); Debug.DrawLine(new Vector3(box.rect.xMin, 0, box.rect.yMax), new Vector3(box.rect.xMax, 0, box.rect.yMax), Color.white); Debug.DrawLine(new Vector3(box.rect.xMin, 0, box.rect.yMin), new Vector3(box.rect.xMin, 0, box.rect.yMax), Color.white); Debug.DrawLine(new Vector3(box.rect.xMax, 0, box.rect.yMin), new Vector3(box.rect.xMax, 0, box.rect.yMax), Color.white); #endif //Search children if (RectIntersectsCircle(arr[box.left].rect, p, radius)) { SearchBoxCircle(box.left, p, radius, constraint, ref nnInfo); } if (RectIntersectsCircle(arr[box.right].rect, p, radius)) { SearchBoxCircle(box.right, p, radius, constraint, ref nnInfo); } } void SearchBox (int boxi, Vector3 p, NNConstraint constraint, ref NNInfo nnInfo) {//, int intendentLevel = 0) { BBTreeBox box = arr[boxi]; if (box.node != null) { //Leaf node if (box.node.ContainsPoint((Int3)p)) { //Update the NNInfo if (nnInfo.node == null) { nnInfo.node = box.node; } else if (Mathf.Abs(((Vector3)box.node.position).y - p.y) < Mathf.Abs(((Vector3)nnInfo.node.position).y - p.y)) { nnInfo.node = box.node; } if (constraint.Suitable(box.node)) { if (nnInfo.constrainedNode == null) { nnInfo.constrainedNode = box.node; } else if (Mathf.Abs(box.node.position.y - p.y) < Mathf.Abs(nnInfo.constrainedNode.position.y - p.y)) { nnInfo.constrainedNode = box.node; } } } return; } //Search children if (arr[box.left].Contains(p)) { SearchBox(box.left, p, constraint, ref nnInfo); } if (arr[box.right].Contains(p)) { SearchBox(box.right, p, constraint, ref nnInfo); } } struct BBTreeBox { public IntRect rect; public MeshNode node; public int left, right; public bool IsLeaf { get { return node != null; } } public BBTreeBox (IntRect rect) { node = null; this.rect = rect; left = right = -1; } public BBTreeBox (MeshNode node) { this.node = node; var first = node.GetVertex(0); var min = new Int2(first.x, first.z); Int2 max = min; for (int i = 1; i < node.GetVertexCount(); i++) { var p = node.GetVertex(i); min.x = Math.Min(min.x, p.x); min.y = Math.Min(min.y, p.z); max.x = Math.Max(max.x, p.x); max.y = Math.Max(max.y, p.z); } rect = new IntRect(min.x, min.y, max.x, max.y); left = right = -1; } public bool Contains (Vector3 p) { var pi = (Int3)p; return rect.Contains(pi.x, pi.z); } } public void OnDrawGizmos () { Gizmos.color = new Color(1, 1, 1, 0.5F); if (count == 0) return; OnDrawGizmos(0, 0); } void OnDrawGizmos (int boxi, int depth) { BBTreeBox box = arr[boxi]; var min = (Vector3) new Int3(box.rect.xmin, 0, box.rect.ymin); var max = (Vector3) new Int3(box.rect.xmax, 0, box.rect.ymax); Vector3 center = (min+max)*0.5F; Vector3 size = (max-center)*2; size = new Vector3(size.x, 1, size.z); center.y += depth * 2; Gizmos.color = AstarMath.IntToColor(depth, 1f); //new Color (0,0,0,0.2F); Gizmos.DrawCube(center, size); if (box.node != null) { } else { OnDrawGizmos(box.left, depth + 1); OnDrawGizmos(box.right, depth + 1); } } static bool NodeIntersectsCircle (MeshNode node, Vector3 p, float radius) { if (float.IsPositiveInfinity(radius)) return true; /** \bug Is not correct on the Y axis */ return (p - node.ClosestPointOnNode(p)).sqrMagnitude < radius*radius; } /** Returns true if \a p is within \a radius from \a r. * Correctly handles cases where \a radius is positive infinity. */ static bool RectIntersectsCircle (IntRect r, Vector3 p, float radius) { if (float.IsPositiveInfinity(radius)) return true; Vector3 po = p; p.x = Math.Max(p.x, r.xmin*Int3.PrecisionFactor); p.x = Math.Min(p.x, r.xmax*Int3.PrecisionFactor); p.z = Math.Max(p.z, r.ymin*Int3.PrecisionFactor); p.z = Math.Min(p.z, r.ymax*Int3.PrecisionFactor); // XZ squared magnitude comparison return (p.x-po.x)*(p.x-po.x) + (p.z-po.z)*(p.z-po.z) < radius*radius; } /** Returns the difference in area between \a r and \a r expanded to contain \a r2 */ static int ExpansionRequired (IntRect r, IntRect r2) { int xMin = Math.Min(r.xmin, r2.xmin); int xMax = Math.Max(r.xmax, r2.xmax); int yMin = Math.Min(r.ymin, r2.ymin); int yMax = Math.Max(r.ymax, r2.ymax); return (xMax-xMin)*(yMax-yMin)-RectArea(r); } /** Returns a new rect which contains both \a r and \a r2 */ static IntRect ExpandToContain (IntRect r, IntRect r2) { return IntRect.Union(r, r2); } /** Returns the area of a rect */ static int RectArea (IntRect r) { return r.Width*r.Height; } } }
// Licensed to the .NET Foundation under one or more agreements. // See the LICENSE file in the project root for more information. // // Copyright (C) 2004 Novell, Inc (http://www.novell.com) // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System.Collections; using System.IO; using System.Xml; using Xunit; namespace System.Data.Tests { public class DataSetAssertion { public string GetNormalizedSchema(string source) { /* // Due to the implementation difference, we must have // one more step to reorder attributes. Here, read // schema document into XmlSchema once, and compare // output string with those emission from Write(). XmlSchema xs = XmlSchema.Read (new XmlTextReader ( new StringReader (source)), null); StringWriter writer = new StringWriter (); xs.Write (writer); return writer.ToString (); */ XmlDocument doc = new XmlDocument(); doc.LoadXml(source); SortAttributes(doc.DocumentElement); StringWriter writer = new StringWriter(); doc.Save(writer); return writer.ToString(); } private void SortAttributes(XmlElement el) { SortAttributesAttributes(el); ArrayList al = new ArrayList(); foreach (XmlNode n in el.ChildNodes) { if (n.NodeType == XmlNodeType.Element) SortAttributes(n as XmlElement); if (n.NodeType == XmlNodeType.Comment) al.Add(n); } foreach (XmlNode n in al) el.RemoveChild(n); } private void SortAttributesAttributes(XmlElement el) { ArrayList al = new ArrayList(); foreach (XmlAttribute a in el.Attributes) al.Add(a.Name); al.Sort(); string[] names = (string[])al.ToArray(typeof(string)); al.Clear(); foreach (string name in names) al.Add(el.RemoveAttributeNode( el.GetAttributeNode(name))); foreach (XmlAttribute a in al) // Exclude xmlns="" here. if (a.Name != "xmlns")// || a.Value != String.Empty) el.SetAttributeNode(a); } public void AssertDataSet(string label, DataSet ds, string name, int tableCount, int relCount) { Assert.Equal(name, ds.DataSetName); Assert.Equal(tableCount, ds.Tables.Count); if (relCount >= 0) Assert.Equal(relCount, ds.Relations.Count); } public void AssertDataTable(string label, DataTable dt, string name, int columnCount, int rowCount, int parentRelationCount, int childRelationCount, int constraintCount, int primaryKeyLength) { Assert.Equal(name, dt.TableName); Assert.Equal(columnCount, dt.Columns.Count); Assert.Equal(rowCount, dt.Rows.Count); Assert.Equal(parentRelationCount, dt.ParentRelations.Count); Assert.Equal(childRelationCount, dt.ChildRelations.Count); Assert.Equal(constraintCount, dt.Constraints.Count); Assert.Equal(primaryKeyLength, dt.PrimaryKey.Length); } public void AssertReadXml(DataSet ds, string label, string xml, XmlReadMode readMode, XmlReadMode resultMode, string datasetName, int tableCount) { AssertReadXml(ds, label, xml, readMode, resultMode, datasetName, tableCount, ReadState.EndOfFile, null, null); } public void AssertReadXml(DataSet ds, string label, string xml, XmlReadMode readMode, XmlReadMode resultMode, string datasetName, int tableCount, ReadState state) { AssertReadXml(ds, label, xml, readMode, resultMode, datasetName, tableCount, state, null, null); } // a bit detailed version public void AssertReadXml(DataSet ds, string label, string xml, XmlReadMode readMode, XmlReadMode resultMode, string datasetName, int tableCount, ReadState state, string readerLocalName, string readerNS) { XmlReader xtr = new XmlTextReader(xml, XmlNodeType.Element, null); Assert.Equal(resultMode, ds.ReadXml(xtr, readMode)); AssertDataSet(label + ".dataset", ds, datasetName, tableCount, -1); Assert.Equal(state, xtr.ReadState); if (readerLocalName != null) Assert.Equal(readerLocalName, xtr.LocalName); if (readerNS != null) Assert.Equal(readerNS, xtr.NamespaceURI); } public void AssertDataRelation(string label, DataRelation rel, string name, bool nested, string[] parentColNames, string[] childColNames, bool existsUK, bool existsFK) { Assert.Equal(name, rel.RelationName); Assert.Equal(nested, rel.Nested); for (int i = 0; i < parentColNames.Length; i++) Assert.Equal(parentColNames[i], rel.ParentColumns[i].ColumnName); Assert.Equal(parentColNames.Length, rel.ParentColumns.Length); for (int i = 0; i < childColNames.Length; i++) Assert.Equal(childColNames[i], rel.ChildColumns[i].ColumnName); Assert.Equal(childColNames.Length, rel.ChildColumns.Length); if (existsUK) Assert.NotNull(rel.ParentKeyConstraint); else Assert.Null(rel.ParentKeyConstraint); if (existsFK) Assert.NotNull(rel.ChildKeyConstraint); else Assert.Null(rel.ChildKeyConstraint); } public void AssertUniqueConstraint(string label, UniqueConstraint uc, string name, bool isPrimaryKey, string[] colNames) { Assert.Equal(name, uc.ConstraintName); Assert.Equal(isPrimaryKey, uc.IsPrimaryKey); for (int i = 0; i < colNames.Length; i++) Assert.Equal(colNames[i], uc.Columns[i].ColumnName); Assert.Equal(colNames.Length, uc.Columns.Length); } public void AssertForeignKeyConstraint(string label, ForeignKeyConstraint fk, string name, AcceptRejectRule acceptRejectRule, Rule delRule, Rule updateRule, string[] colNames, string[] relColNames) { Assert.Equal(name, fk.ConstraintName); Assert.Equal(acceptRejectRule, fk.AcceptRejectRule); Assert.Equal(delRule, fk.DeleteRule); Assert.Equal(updateRule, fk.UpdateRule); for (int i = 0; i < colNames.Length; i++) Assert.Equal(colNames[i], fk.Columns[i].ColumnName); Assert.Equal(colNames.Length, fk.Columns.Length); for (int i = 0; i < relColNames.Length; i++) Assert.Equal(relColNames[i], fk.RelatedColumns[i].ColumnName); Assert.Equal(relColNames.Length, fk.RelatedColumns.Length); } public void AssertDataColumn(string label, DataColumn col, string colName, bool allowDBNull, bool autoIncr, int autoIncrSeed, int autoIncrStep, string caption, MappingType colMap, Type type, object defaultValue, string expression, int maxLength, string ns, int ordinal, string prefix, bool readOnly, bool unique) { Assert.Equal(colName, col.ColumnName); Assert.Equal(allowDBNull, col.AllowDBNull); Assert.Equal(autoIncr, col.AutoIncrement); Assert.Equal(autoIncrSeed, col.AutoIncrementSeed); Assert.Equal(autoIncrStep, col.AutoIncrementStep); Assert.Equal(caption, col.Caption); Assert.Equal(colMap, col.ColumnMapping); Assert.Equal(type, col.DataType); Assert.Equal(defaultValue, col.DefaultValue); Assert.Equal(expression, col.Expression); Assert.Equal(maxLength, col.MaxLength); Assert.Equal(ns, col.Namespace); if (ordinal >= 0) Assert.Equal(ordinal, col.Ordinal); Assert.Equal(prefix, col.Prefix); Assert.Equal(readOnly, col.ReadOnly); Assert.Equal(unique, col.Unique); } } }
// // Copyright (c) Microsoft and contributors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // // See the License for the specific language governing permissions and // limitations under the License. // // Warning: This code was generated by a tool. // // Changes to this file may cause incorrect behavior and will be lost if the // code is regenerated. using System; using System.Collections.Generic; using System.Linq; using System.Net; using System.Net.Http; using System.Threading; using System.Threading.Tasks; using Microsoft.Azure.Insights; using Microsoft.Azure.Insights.Models; using Microsoft.WindowsAzure; using Microsoft.WindowsAzure.Common; using Microsoft.WindowsAzure.Common.Internals; using Newtonsoft.Json.Linq; namespace Microsoft.Azure.Insights { /// <summary> /// Operations for metric definitions. /// </summary> internal partial class MetricDefinitionOperations : IServiceOperations<InsightsClient>, IMetricDefinitionOperations { /// <summary> /// Initializes a new instance of the MetricDefinitionOperations class. /// </summary> /// <param name='client'> /// Reference to the service client. /// </param> internal MetricDefinitionOperations(InsightsClient client) { this._client = client; } private InsightsClient _client; /// <summary> /// Gets a reference to the Microsoft.Azure.Insights.InsightsClient. /// </summary> public InsightsClient Client { get { return this._client; } } /// <summary> /// The List Metric Definitions operation lists the metric definitions /// for the resource. /// </summary> /// <param name='resourceUri'> /// Required. The resource identifier of the target resource to get /// metrics for. /// </param> /// <param name='filterString'> /// Optional. An OData $filter expression that supports querying by the /// name of the metric definition. For example, "name.value eq /// 'Percentage CPU'". Name is optional, meaning the expression may be /// "". /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// The List Metric Definitions operation response. /// </returns> public async Task<MetricDefinitionListResponse> GetMetricDefinitionsAsync(string resourceUri, string filterString, CancellationToken cancellationToken) { // Validate if (resourceUri == null) { throw new ArgumentNullException("resourceUri"); } // Tracing bool shouldTrace = CloudContext.Configuration.Tracing.IsEnabled; string invocationId = null; if (shouldTrace) { invocationId = Tracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceUri", resourceUri); tracingParameters.Add("filterString", filterString); Tracing.Enter(invocationId, this, "GetMetricDefinitionsAsync", tracingParameters); } // Construct URL string url = "/" + resourceUri.Trim() + "/metricDefinitions?"; url = url + "api-version=2014-04-01"; if (filterString != null) { url = url + "&$filter=" + Uri.EscapeDataString(filterString != null ? filterString.Trim() : ""); } string baseUrl = this.Client.BaseUri.AbsoluteUri; // Trim '/' character from the end of baseUrl and beginning of url. if (baseUrl[baseUrl.Length - 1] == '/') { baseUrl = baseUrl.Substring(0, baseUrl.Length - 1); } if (url[0] == '/') { url = url.Substring(1); } url = baseUrl + "/" + url; url = url.Replace(" ", "%20"); // Create HTTP transport objects HttpRequestMessage httpRequest = null; try { httpRequest = new HttpRequestMessage(); httpRequest.Method = HttpMethod.Get; httpRequest.RequestUri = new Uri(url); // Set Headers httpRequest.Headers.Add("Accept", "application/json"); httpRequest.Headers.Add("x-ms-version", "2014-04-01"); // Set Credentials cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); // Send Request HttpResponseMessage httpResponse = null; try { if (shouldTrace) { Tracing.SendRequest(invocationId, httpRequest); } cancellationToken.ThrowIfCancellationRequested(); httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); if (shouldTrace) { Tracing.ReceiveResponse(invocationId, httpResponse); } HttpStatusCode statusCode = httpResponse.StatusCode; if (statusCode != HttpStatusCode.OK) { cancellationToken.ThrowIfCancellationRequested(); CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false)); if (shouldTrace) { Tracing.Error(invocationId, ex); } throw ex; } // Create Result MetricDefinitionListResponse result = null; // Deserialize Response cancellationToken.ThrowIfCancellationRequested(); string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); result = new MetricDefinitionListResponse(); JToken responseDoc = null; if (string.IsNullOrEmpty(responseContent) == false) { responseDoc = JToken.Parse(responseContent); } if (responseDoc != null && responseDoc.Type != JTokenType.Null) { MetricDefinitionCollection metricDefinitionCollectionInstance = new MetricDefinitionCollection(); result.MetricDefinitionCollection = metricDefinitionCollectionInstance; JToken valueArray = responseDoc["value"]; if (valueArray != null && valueArray.Type != JTokenType.Null) { foreach (JToken valueValue in ((JArray)valueArray)) { MetricDefinition metricDefinitionInstance = new MetricDefinition(); metricDefinitionCollectionInstance.Value.Add(metricDefinitionInstance); JToken nameValue = valueValue["name"]; if (nameValue != null && nameValue.Type != JTokenType.Null) { LocalizableString nameInstance = new LocalizableString(); metricDefinitionInstance.Name = nameInstance; JToken valueValue2 = nameValue["value"]; if (valueValue2 != null && valueValue2.Type != JTokenType.Null) { string valueInstance = ((string)valueValue2); nameInstance.Value = valueInstance; } JToken localizedValueValue = nameValue["localizedValue"]; if (localizedValueValue != null && localizedValueValue.Type != JTokenType.Null) { string localizedValueInstance = ((string)localizedValueValue); nameInstance.LocalizedValue = localizedValueInstance; } } JToken unitValue = valueValue["unit"]; if (unitValue != null && unitValue.Type != JTokenType.Null) { Unit unitInstance = ((Unit)Enum.Parse(typeof(Unit), ((string)unitValue), true)); metricDefinitionInstance.Unit = unitInstance; } JToken primaryAggregationTypeValue = valueValue["primaryAggregationType"]; if (primaryAggregationTypeValue != null && primaryAggregationTypeValue.Type != JTokenType.Null) { AggregationType primaryAggregationTypeInstance = ((AggregationType)Enum.Parse(typeof(AggregationType), ((string)primaryAggregationTypeValue), true)); metricDefinitionInstance.PrimaryAggregationType = primaryAggregationTypeInstance; } JToken resourceUriValue = valueValue["resourceUri"]; if (resourceUriValue != null && resourceUriValue.Type != JTokenType.Null) { string resourceUriInstance = ((string)resourceUriValue); metricDefinitionInstance.ResourceUri = resourceUriInstance; } JToken metricAvailabilitiesArray = valueValue["metricAvailabilities"]; if (metricAvailabilitiesArray != null && metricAvailabilitiesArray.Type != JTokenType.Null) { foreach (JToken metricAvailabilitiesValue in ((JArray)metricAvailabilitiesArray)) { MetricAvailability metricAvailabilityInstance = new MetricAvailability(); metricDefinitionInstance.MetricAvailabilities.Add(metricAvailabilityInstance); JToken timeGrainValue = metricAvailabilitiesValue["timeGrain"]; if (timeGrainValue != null && timeGrainValue.Type != JTokenType.Null) { TimeSpan timeGrainInstance = TypeConversion.From8601TimeSpan(((string)timeGrainValue)); metricAvailabilityInstance.TimeGrain = timeGrainInstance; } JToken retentionValue = metricAvailabilitiesValue["retention"]; if (retentionValue != null && retentionValue.Type != JTokenType.Null) { TimeSpan retentionInstance = TypeConversion.From8601TimeSpan(((string)retentionValue)); metricAvailabilityInstance.Retention = retentionInstance; } JToken locationValue = metricAvailabilitiesValue["location"]; if (locationValue != null && locationValue.Type != JTokenType.Null) { MetricLocation locationInstance = new MetricLocation(); metricAvailabilityInstance.Location = locationInstance; JToken tableEndpointValue = locationValue["tableEndpoint"]; if (tableEndpointValue != null && tableEndpointValue.Type != JTokenType.Null) { string tableEndpointInstance = ((string)tableEndpointValue); locationInstance.TableEndpoint = tableEndpointInstance; } JToken tableInfoArray = locationValue["tableInfo"]; if (tableInfoArray != null && tableInfoArray.Type != JTokenType.Null) { foreach (JToken tableInfoValue in ((JArray)tableInfoArray)) { MetricTableInfo metricTableInfoInstance = new MetricTableInfo(); locationInstance.TableInfo.Add(metricTableInfoInstance); JToken tableNameValue = tableInfoValue["tableName"]; if (tableNameValue != null && tableNameValue.Type != JTokenType.Null) { string tableNameInstance = ((string)tableNameValue); metricTableInfoInstance.TableName = tableNameInstance; } JToken startTimeValue = tableInfoValue["startTime"]; if (startTimeValue != null && startTimeValue.Type != JTokenType.Null) { DateTime startTimeInstance = ((DateTime)startTimeValue); metricTableInfoInstance.StartTime = startTimeInstance; } JToken endTimeValue = tableInfoValue["endTime"]; if (endTimeValue != null && endTimeValue.Type != JTokenType.Null) { DateTime endTimeInstance = ((DateTime)endTimeValue); metricTableInfoInstance.EndTime = endTimeInstance; } JToken sasTokenValue = tableInfoValue["sasToken"]; if (sasTokenValue != null && sasTokenValue.Type != JTokenType.Null) { string sasTokenInstance = ((string)sasTokenValue); metricTableInfoInstance.SasToken = sasTokenInstance; } JToken sasTokenExpirationTimeValue = tableInfoValue["sasTokenExpirationTime"]; if (sasTokenExpirationTimeValue != null && sasTokenExpirationTimeValue.Type != JTokenType.Null) { DateTime sasTokenExpirationTimeInstance = ((DateTime)sasTokenExpirationTimeValue); metricTableInfoInstance.SasTokenExpirationTime = sasTokenExpirationTimeInstance; } } } JToken partitionKeyValue = locationValue["partitionKey"]; if (partitionKeyValue != null && partitionKeyValue.Type != JTokenType.Null) { string partitionKeyInstance = ((string)partitionKeyValue); locationInstance.PartitionKey = partitionKeyInstance; } } } } JToken propertiesSequenceElement = ((JToken)valueValue["properties"]); if (propertiesSequenceElement != null && propertiesSequenceElement.Type != JTokenType.Null) { foreach (JProperty property in propertiesSequenceElement) { string propertiesKey = ((string)property.Name); string propertiesValue = ((string)property.Value); metricDefinitionInstance.Properties.Add(propertiesKey, propertiesValue); } } } } } result.StatusCode = statusCode; if (httpResponse.Headers.Contains("x-ms-request-id")) { result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (shouldTrace) { Tracing.Exit(invocationId, result); } return result; } finally { if (httpResponse != null) { httpResponse.Dispose(); } } } finally { if (httpRequest != null) { httpRequest.Dispose(); } } } } }
using System; using System.Collections; using AutoyaFramework.Purchase; using UnityEngine; namespace AutoyaFramework { public enum PurchaseFeatureState { Loading, Ready, ReadyRetry, ReadyFailed, Closing, Closed } public partial class Autoya { /* Purchase implementation. */ private PurchaseRouter _purchaseRouter; private PurchaseFeatureState purchaseState; private static AutoyaStatus purchaseErrorStatus = new AutoyaStatus(); private void ReloadPurchasability() { purchaseState = PurchaseFeatureState.Loading; purchaseErrorStatus = new AutoyaStatus(); PurchaseRouter.HttpRequestHeaderDelegate httpRequestHeaderDel = (p1, p2, p3, p4) => { return httpRequestHeaderDelegate(p1, p2, p3, p4); }; PurchaseRouter.HttpResponseHandlingDelegate httpResponseHandlingDel = (p1, p2, p3, p4, p5, p6, p7) => { Action<string, int, string, AutoyaStatus> p7dash = (q1, q2, q3, status) => { // set autoyaStatus error if exist. if (status.HasError()) { purchaseErrorStatus = status; } p7(q1, q2, q3); }; httpResponseHandlingDelegate( p1, p2, p3, p4, p5, p6, p7dash ); }; _purchaseRouter = new PurchaseRouter( mainthreadDispatcher.Commit, GetProductInfosAs, productSourceData => OnLoadProductsResponse(productSourceData), productId => OnTicketRequest(productId), ticketData => OnTicketResponse(ticketData), () => { purchaseState = PurchaseFeatureState.Ready; OnPurchaseReady(); }, (err, code, reason) => { purchaseState = PurchaseFeatureState.ReadyFailed; var cor = OnPurchaseReadyFailed(err, code, reason, purchaseErrorStatus); mainthreadDispatcher.Commit(cor); }, OnPurchaseDeployRequest, OnPurchaseDeployRequestForAlreadyPaid, OnPurchaseFailedRequest, onPaidPurchaseDoneInBackground, httpRequestHeaderDel, httpResponseHandlingDel ); } /* public apis. */ public static bool Purchase_IsReady() { if (autoya == null) { return false; } if (!Autoya.Auth_IsAuthenticated()) { return false; } if (autoya.purchaseState != PurchaseFeatureState.Ready) { return false; } if (!autoya._purchaseRouter.IsPurchaseReady()) { return false; } return true; } public static bool Purchase_NeedAttemptReadyPurchase() { if (autoya.purchaseState == PurchaseFeatureState.ReadyFailed) { return true; } return false; } public static ProductInfo[] Purchase_ProductInfos() { if (autoya == null) { return new ProductInfo[] { }; } if (!Autoya.Auth_IsAuthenticated()) { return new ProductInfo[] { }; } if (autoya.purchaseState != PurchaseFeatureState.Ready) { return new ProductInfo[] { }; } if (!autoya._purchaseRouter.IsPurchaseReady()) { return new ProductInfo[] { }; } return autoya._purchaseRouter.ProductInfos(); } public static void Purchase_AttemptReadyPurcase() { if (autoya.purchaseState == PurchaseFeatureState.ReadyFailed) { autoya.ReloadPurchasability(); } } public struct ErrorCodes { public PurchaseRouter.PurchaseError purchaseErrorCode; public int httpErrorCode; public ErrorCodes(PurchaseRouter.PurchaseError purchaseErrorCode, int httpErrorCode) { this.purchaseErrorCode = purchaseErrorCode; this.httpErrorCode = httpErrorCode; } } /** purchase item asynchronously. string purchaseId: the id for this purchase. this param will back in done or failed handler. string productId: platform-shard product id string. Action<string> done: fire when purchase is done in succeessful. string is purchaseId. Action<string, PurchaseRouter.PurchaseError, string> failed: fire when purchase is failed. 1st string is purchaseId. */ public static void Purchase(string purchaseId, string productId, Action<string> done, Action<string, ErrorCodes, string, AutoyaStatus> failed) { if (autoya == null) { Debug.LogWarning("not yet. 1"); // var cor = new AssetBundleLoadErrorInstance(assetName, "Autoya is null.", loadFailed).Coroutine(); // autoya.mainthreadDispatcher.Commit(cor); return; } if (!Autoya.Auth_IsAuthenticated()) { Debug.LogWarning("not yet. 2"); // var cor = new AssetBundleLoadErrorInstance(assetName, "not authenticated.", loadFailed).Coroutine(); // autoya.mainthreadDispatcher.Commit(cor); return; } if (autoya.purchaseState != PurchaseFeatureState.Ready) { Debug.LogWarning("not yet. 3"); return; } if (!autoya._purchaseRouter.IsPurchaseReady()) { Debug.LogWarning("not yet. 4"); return; } purchaseErrorStatus = new AutoyaStatus(); Action<string, PurchaseRouter.PurchaseError, int, string> _failed = (p1, p2, p3, p4) => { failed(p1, new ErrorCodes(p2, p3), p4, purchaseErrorStatus); }; autoya.mainthreadDispatcher.Commit( autoya._purchaseRouter.PurchaseAsync(purchaseId, productId, done, _failed) ); } /** do not use this method in actual use. this method is only for testing. */ public static void Purchase_DEBUG_Shutdown() { if (autoya == null) { return; } autoya._purchaseRouter = null; } public static void Purchase_DEBUG_Reload() { if (autoya == null) { return; } autoya.ReloadPurchasability(); } } }
using System; using System.Collections.Generic; using System.ComponentModel; using System.Diagnostics.CodeAnalysis; using System.Drawing; using System.Globalization; using System.IO; using System.Reflection; using System.Text; using System.Windows.Forms; using System.Xml; using ActiproSoftware.SyntaxEditor; namespace NQuery.UI { public partial class ActiproLink : Component { private SyntaxEditor _syntaxEditor; private Evaluatable _evaluatable; private bool _enableAutoReplace; private bool _enableAutoPopupAfterCharacter; private bool _enableAutoPopupAfterDot; private bool _enableAutoPopupAfterParenthesis; private bool _bound; private SourceLocation _parameterInfoLocation; [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Performance", "CA1805:DoNotInitializeUnnecessarily")] public ActiproLink() { InitializeComponent(); Setup(); } [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Performance", "CA1805:DoNotInitializeUnnecessarily")] public ActiproLink(IContainer container) { if (container == null) throw new ArgumentNullException("container"); container.Add(this); InitializeComponent(); Setup(); } #region Helpers private void Setup() { _enableAutoReplace = true; _enableAutoPopupAfterDot = true; _enableAutoPopupAfterParenthesis = true; _enableAutoPopupAfterCharacter = true; } private bool CanBind { get { return _syntaxEditor != null && _evaluatable != null && _evaluatable.DataContext != null && !_bound; } } private void EnsureBound() { if (_syntaxEditor == null) throw new InvalidOperationException("The SyntaxEditor property has not been initialized."); if (_evaluatable == null) throw new InvalidOperationException("The Evaluatable property has not been initialized."); if (!_bound) Bind(); } private ScopeInfo GetScopeInfo() { string oldText = _evaluatable.Text; _evaluatable.Text = String.Empty; try { ScopeInfoBuilder builder = new ScopeInfoBuilder(); ICodeAssistanceContextProvider codeAssistanceContextProvider = _evaluatable.GetCodeAssistanceContextProvider(); IMemberCompletionContext context = codeAssistanceContextProvider.ProvideMemberCompletionContext(SourceLocation.Empty); context.Enumerate(builder); return builder.GetScopeInfo(); } finally { _evaluatable.Text = oldText; } } private void Bind() { if (!_bound) { SetupLanguage(); if (_enableAutoReplace) SetupAutoReplacement(); _syntaxEditor.TriggerActivated += syntaxEditor_TriggerActivated; _syntaxEditor.KeyTyped += syntaxEditor_KeyTyped; _syntaxEditor.KeyDown += syntaxEditor_KeyDown; _syntaxEditor.SelectionChanged += syntaxEditor_SelectionChanged; _bound = true; } } private void SetupLanguage() { _syntaxEditor.Document.ResetLanguage(); using (Stream languageFileStream = GetLanguageFileStream()) { _syntaxEditor.Document.LoadLanguageFromXml(languageFileStream, 0); } } private Stream GetLanguageFileStream() { XmlDocument document; using (Stream languageFileStream = Assembly.GetExecutingAssembly().GetManifestResourceStream(typeof(ActiproLink), "ActiproSoftware.SQL.xml")) { document = new XmlDocument(); document.Load(languageFileStream); } XmlNamespaceManager namespaceManager = new XmlNamespaceManager(document.NameTable); namespaceManager.AddNamespace("ns", "http://ActiproSoftware/SyntaxEditor/4.0/LanguageDefinition"); ScopeInfo scopeInfo = GetScopeInfo(); string reservedWordsNodeXPath = "/ns:SyntaxLanguage/ns:States/ns:State[@Key='DefaultState']/ns:PatternGroups/ns:ExplicitPatternGroup[@TokenKey='ReservedWordToken']/ns:ExplicitPatterns"; XmlNode reservedWordsNode = document.SelectSingleNode(reservedWordsNodeXPath, namespaceManager); reservedWordsNode.InnerText = scopeInfo.GetKeywords(); string functionTokensNodeXPath = "/ns:SyntaxLanguage/ns:States/ns:State/ns:PatternGroups/ns:ExplicitPatternGroup[@TokenKey='FunctionToken']/ns:ExplicitPatterns"; XmlNode functionTokensNode = document.SelectSingleNode(functionTokensNodeXPath, namespaceManager); functionTokensNode.InnerText = scopeInfo.GetFunctionsAndAggregates(); MemoryStream memoryStream = new MemoryStream(); try { document.Save(memoryStream); memoryStream.Position = 0; return memoryStream; } catch { memoryStream.Close(); throw; } } private void Unbind() { if (_bound) { if (_enableAutoReplace) _syntaxEditor.AutoReplace.Clear(); if (_syntaxEditor.Document != null) _syntaxEditor.Document.ResetLanguage(); _syntaxEditor.TriggerActivated -= syntaxEditor_TriggerActivated; _syntaxEditor.KeyTyped -= syntaxEditor_KeyTyped; _syntaxEditor.KeyDown -= syntaxEditor_KeyDown; _syntaxEditor.SelectionChanged -= syntaxEditor_SelectionChanged; _bound = false; } } private SourceLocation CurrentLocation { get { return new SourceLocation(_syntaxEditor.Caret.DocumentPosition.Character, _syntaxEditor.Caret.DocumentPosition.Line); } } [SuppressMessage("Microsoft.Globalization", "CA1308:NormalizeStringsToUppercase")] private void SetupAutoReplacement() { ScopeInfo scopeInfo = GetScopeInfo(); _syntaxEditor.AutoReplace.Clear(); foreach (string reservedWord in scopeInfo.ReservedWords.Keys) { string lowerCaseReservedWord = reservedWord.ToLowerInvariant(); string upperCaseReservedWord = reservedWord.ToUpperInvariant(); _syntaxEditor.AutoReplace.Add(new AutoReplaceEntry(lowerCaseReservedWord, upperCaseReservedWord)); } } private void ShowErrorQuickInfo(Exception exception) { // Calculate the point of the line above the current one and // display an info tip with the error message. DocumentPosition pos = _syntaxEditor.Caret.DocumentPosition; if (pos.Line > 0) pos = new DocumentPosition(pos.Line - 1, pos.Character); else pos = new DocumentPosition(pos.Line + 1, pos.Character); Point tipLocation; if (pos.Line >= 0 && pos.Line < _syntaxEditor.Document.Lines.Count) { int offset = _syntaxEditor.Document.PositionToOffset(pos); tipLocation = _syntaxEditor.SelectedView.GetCharacterBounds(offset).Location; } else { tipLocation = new Point(0, -20); } StringBuilder sb = new StringBuilder(); using (StringWriter sw = new StringWriter(sb, CultureInfo.CurrentCulture)) { XmlWriter writer = new XmlTextWriter(sw); writer.WriteStartElement("b"); writer.WriteString("Error: "); writer.WriteEndElement(); writer.WriteString(exception.Message); } string markup = sb.ToString(); _syntaxEditor.IntelliPrompt.QuickInfo.Hide(); _syntaxEditor.IntelliPrompt.QuickInfo.Show(tipLocation, markup); } public TextRange GetTextRange(SourceRange sourceRange) { EnsureBound(); DocumentPosition startPosition = new DocumentPosition(sourceRange.StartLine, sourceRange.StartColumn); DocumentPosition endPosition = new DocumentPosition(sourceRange.EndLine, sourceRange.EndColumn); int startOffset = _syntaxEditor.Document.PositionToOffset(startPosition); int endOffset = _syntaxEditor.Document.PositionToOffset(endPosition) + 1; if (startOffset > _syntaxEditor.Document.Length) startOffset = _syntaxEditor.Document.Length - 1; if (endOffset > _syntaxEditor.Document.Length) endOffset = _syntaxEditor.Document.Length - 1; return new TextRange(startOffset, endOffset); } private void LoadErrors(IEnumerable<CompilationError> errors) { _syntaxEditor.Document.SpanIndicatorLayers.Clear(); if (errors != null) { SpanIndicatorLayer spanIndicatorLayer = new SpanIndicatorLayer(null, 0); _syntaxEditor.Document.SpanIndicatorLayers.Add(spanIndicatorLayer); foreach (CompilationError error in errors) { if (error.SourceRange != SourceRange.None) { TextRange textRange = GetTextRange(error.SourceRange); SpanIndicator[] existingIndicators = spanIndicatorLayer.GetIndicatorsForTextRange(textRange); if (existingIndicators.Length == 0) { SyntaxErrorSpanIndicator errorIndicator = new SyntaxErrorSpanIndicator(); errorIndicator.Tag = error; spanIndicatorLayer.Add(errorIndicator, textRange); } } } } } #endregion #region Events Subscriptions private void evaluatable_DataContextChanged(object sender, EventArgs e) { Unbind(); if (CanBind) Bind(); } private void evaluatable_CompilationFailed(object sender, CompilationFailedEventArgs e) { LoadErrors(e.CompilationErrors); } private void evaluatable_CompilationSucceeded(object sender, EventArgs e) { LoadErrors(null); } private void syntaxEditor_TriggerActivated(object sender, TriggerEventArgs e) { switch (e.Trigger.Key) { case "MemberListTrigger": if (_enableAutoPopupAfterDot) ListMembers(); break; case "ShowParameterInfoTrigger": case "UpdateParameterInfoTrigger": if (_enableAutoPopupAfterParenthesis) ShowParameterInfoPopup(); break; case "HideParameterInfoTrigger": if (_enableAutoPopupAfterParenthesis) HideParameterInfoPopup(); break; } } private void syntaxEditor_KeyTyped(object sender, KeyTypedEventArgs e) { if (ShouldAutoPopupListMembers(e.KeyChar)) ListMembers(); } private void syntaxEditor_KeyDown(object sender, KeyEventArgs e) { if (_enableAutoReplace && e.KeyData == Keys.Tab && _syntaxEditor.IntelliPrompt.MemberList.Visible) _syntaxEditor.IntelliPrompt.MemberList.Complete(); } private void syntaxEditor_SelectionChanged(object sender, SelectionEventArgs e) { UpdateParameterInfo(); } #endregion #region Member Completion private void ShowMemberListPopup(bool autoComplete) { IntelliPromptMemberList memberList = _syntaxEditor.IntelliPrompt.MemberList; memberList.Clear(); memberList.ImageList = memberImageList; memberList.Sorted = false; try { _evaluatable.Text = _syntaxEditor.Text; ICodeAssistanceContextProvider codeAssistanceContextProvider = _evaluatable.GetCodeAssistanceContextProvider(); IMemberCompletionContext completionContext = codeAssistanceContextProvider.ProvideMemberCompletionContext(CurrentLocation); MemberAcceptor acceptor = new MemberAcceptor(memberList); completionContext.Enumerate(acceptor); acceptor.FlushBuffer(); if (memberList.Count > 0) { memberList.Sorted = true; if (completionContext.RemainingPart == null) { memberList.Show(_syntaxEditor.Caret.Offset, 0); } else { IToken t = _syntaxEditor.SelectedView.GetCurrentToken(); string tokenText = _syntaxEditor.Document.GetTokenText(t); string remainingText = completionContext.RemainingPart.Text.ToUpper(CultureInfo.InvariantCulture); if (t.StartOffset >= _syntaxEditor.Text.Length || !tokenText.ToUpper(CultureInfo.InvariantCulture).StartsWith(remainingText, StringComparison.OrdinalIgnoreCase)) t = _syntaxEditor.SelectedView.GetPreviousToken(); if (t != null) { if (autoComplete) memberList.CompleteWord(t.StartOffset, t.Length); else memberList.Show(t.StartOffset, t.Length); } } } } catch (NQueryException ex) { ShowErrorQuickInfo(ex); } } private void HideMemberListPopup() { IntelliPromptMemberList memberList = _syntaxEditor.IntelliPrompt.MemberList; memberList.Abort(); } private static void FindPosition(TokenStream tokenStream, int offset) { tokenStream.Position = 0; if (offset < 0) return; while (tokenStream.Position < tokenStream.Length) { IToken currentToken = tokenStream.Peek(); if (currentToken.StartOffset <= offset && offset < currentToken.StartOffset + currentToken.Length) break; else tokenStream.Read(); } } private static IToken GetFirstNonWhitespaceToken(TokenStream tokenStream) { if (!tokenStream.FindNonWhitespace(false)) return null; return tokenStream.Peek(); } private bool IsAsKeyword(IToken token) { string tokenText = _syntaxEditor.Document.GetTokenText(token); return String.Compare(tokenText, "AS", StringComparison.OrdinalIgnoreCase) == 0; } private bool ShouldAutoPopupListMembers(char keyChar) { if (_enableAutoPopupAfterCharacter && !_syntaxEditor.IntelliPrompt.MemberList.Visible && Char.IsLetter(keyChar)) { IToken tokenAfterCursor = _syntaxEditor.SelectedView.GetCurrentToken(); TokenStream tokenStream = _syntaxEditor.Document.GetTokenStream(0); FindPosition(tokenStream, tokenAfterCursor.StartOffset - 1); IToken token0BeforeCursor = tokenStream.Peek(); IToken token1BeforeCursor = GetFirstNonWhitespaceToken(tokenStream); IToken token2BeforeCursor = GetFirstNonWhitespaceToken(tokenStream); if (token0BeforeCursor.Key != "IdentifierToken") return false; if (token1BeforeCursor == null) return true; switch (token1BeforeCursor.Key) { case "ReservedWordToken": return !IsAsKeyword(token1BeforeCursor); case "IdentifierToken": if (token2BeforeCursor != null && (token2BeforeCursor.Key == "IdentifierToken" || token2BeforeCursor.Key == "ReservedWordToken")) return true; return false; case "DefaultToken": case "OperatorToken": case "OpenParenthesisToken": return true; } } return false; } #endregion #region Parameter Info private void ShowParameterInfoPopup() { _parameterInfoLocation = CurrentLocation; _evaluatable.Text = _syntaxEditor.Text; IntelliPromptParameterInfo infoTip = _syntaxEditor.IntelliPrompt.ParameterInfo; int lastSelectedFunction = infoTip.SelectedIndex; infoTip.Info.Clear(); try { ICodeAssistanceContextProvider codeAssistanceContextProvider = _evaluatable.GetCodeAssistanceContextProvider(); IParameterInfoContext parameterInfoContext = codeAssistanceContextProvider.ProvideParameterInfoContext(CurrentLocation); ParameterInfoAcceptor acceptor = new ParameterInfoAcceptor(infoTip, parameterInfoContext.ParameterIndex); parameterInfoContext.Enumerate(acceptor); if (infoTip.Info.Count == 0) { infoTip.Hide(); } else { infoTip.SelectedIndex = lastSelectedFunction; infoTip.Show(_syntaxEditor.Caret.Offset); } } catch (NQueryException ex) { ShowErrorQuickInfo(ex); } } private void HideParameterInfoPopup() { if (_syntaxEditor.IntelliPrompt.ParameterInfo.Visible) _syntaxEditor.IntelliPrompt.ParameterInfo.Hide(); } private void UpdateParameterInfo() { if (CurrentLocation.Line != _parameterInfoLocation.Line) HideParameterInfoPopup(); else if (_syntaxEditor.IntelliPrompt.ParameterInfo.Visible) ShowParameterInfoPopup(); } #endregion #region Public API [DefaultValue(true)] public bool EnableAutoReplace { get { return _enableAutoReplace; } set { _enableAutoReplace = value; if (_bound) { if (value) SetupAutoReplacement(); else _syntaxEditor.AutoReplace.Clear(); } } } [DefaultValue(true)] public bool EnableAutoPopupAfterDot { get { return _enableAutoPopupAfterDot; } set { _enableAutoPopupAfterDot = value; } } [DefaultValue(true)] public bool EnableAutoPopupAfterParenthesis { get { return _enableAutoPopupAfterParenthesis; } set { _enableAutoPopupAfterParenthesis = value; } } [DefaultValue(true)] public bool EnableAutoPopupAfterCharacter { get { return _enableAutoPopupAfterCharacter; } set { _enableAutoPopupAfterCharacter = value; } } public SyntaxEditor SyntaxEditor { get { return _syntaxEditor; } set { Unbind(); _syntaxEditor = value; if (CanBind) Bind(); } } public Evaluatable Evaluatable { get { return _evaluatable; } set { if (_evaluatable != null) { _evaluatable.DataContextChanged -= evaluatable_DataContextChanged; _evaluatable.CompilationFailed -= evaluatable_CompilationFailed; _evaluatable.CompilationSucceeded -= evaluatable_CompilationSucceeded; } Unbind(); _evaluatable = value; if (_evaluatable != null) { _evaluatable.DataContextChanged += evaluatable_DataContextChanged; _evaluatable.CompilationFailed += evaluatable_CompilationFailed; _evaluatable.CompilationSucceeded += evaluatable_CompilationSucceeded; } if (CanBind) Bind(); } } public void ListMembers() { EnsureBound(); IntelliPromptMemberList memberList = _syntaxEditor.IntelliPrompt.MemberList; if (memberList.Visible) HideMemberListPopup(); else ShowMemberListPopup(false); } public void CompleteWord() { EnsureBound(); IntelliPromptMemberList memberList = _syntaxEditor.IntelliPrompt.MemberList; if (memberList.Visible) HideMemberListPopup(); else ShowMemberListPopup(true); } public void ParameterInfo() { EnsureBound(); IntelliPromptParameterInfo infoTip = _syntaxEditor.IntelliPrompt.ParameterInfo; if (infoTip.Visible) HideParameterInfoPopup(); else ShowParameterInfoPopup(); } #endregion } }
#region Apache License // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to you under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // #endregion using Amazon.SimpleEmail; using Amazon.SimpleEmail.Model; using log4net.Core; using System; using System.IO; using System.Linq; namespace log4net.Appender { /// <summary> /// Send an e-mail via Amazon Simple Email Service (SES) when a specific logging event occurs /// </summary> /// <remarks> /// <para> /// The number of logging events delivered in this e-mail depend on /// the value of <see cref="BufferingAppenderSkeleton.BufferSize"/> option. The /// <see cref="AmazonSesAppender"/> keeps only the last /// <see cref="BufferingAppenderSkeleton.BufferSize"/> logging events in its /// cyclic buffer. This keeps memory requirements at a reasonable level while /// still delivering useful application context. /// </para> /// <para> /// Authentication is supported by setting the <see cref="AccessKey"/> /// and <see cref="SecretKey"/> properties. /// </para> /// <para> /// Use <see cref="from"/> to set the from email address and <see cref="region"/> to /// set the AWS region to use. /// </para> /// </remarks> /// <author>Luke Briner</author> public class AmazonSesAppender : BufferingAppenderSkeleton { #region Public Instance Constructors /// <summary> /// Default constructor /// </summary> /// <remarks> /// <para> /// Default constructor /// </para> /// </remarks> public AmazonSesAppender() { } #endregion // Public Instance Constructors #region Public Instance Properties /// <summary> /// Gets or sets a comma- or semicolon-delimited list of recipient e-mail addresses (comma is the preferred delimiter). /// </summary> public string To { get { return m_to; } set { m_to = MaybeTrimSeparators(value); } } /// <summary> /// Gets or sets a comma- or semicolon-delimited list of recipient e-mail addresses /// that will be carbon copied (comma is the preferred delimiter). /// </summary> public string Cc { get { return m_cc; } set { m_cc = MaybeTrimSeparators(value); } } /// <summary> /// Gets or sets a semicolon-delimited list of recipient e-mail addresses /// that will be blind carbon copied. /// </summary> public string Bcc { get { return m_bcc; } set { m_bcc = MaybeTrimSeparators(value); } } /// <summary> /// Gets or sets the e-mail address of the sender. /// </summary> public string From { get; set; } /// <summary> /// Gets or sets the subject line of the e-mail message. /// </summary> public string Subject { get; set; } /// <summary> /// The Access Key to use to authenticate with the Amazon SES service /// </summary> public string AccessKey { get; set; } /// <summary> /// The password to use to authenticate with the SMTP server /// </summary> public string SecretKey { get; set; } /// <summary> /// Gets or sets a value indicating whether the mail message body is in HTML. /// </summary> virtual public bool IsBodyHTML { get; set; } #endregion #region Override implementation of BufferingAppenderSkeleton /// <summary> /// Sends the contents of the cyclic buffer as an e-mail message. /// </summary> /// <param name="events">The logging events to send.</param> override protected void SendBuffer(LoggingEvent[] events) { // Note: this code already owns the monitor for this // appender. This frees us from needing to synchronize again. try { SendEmail(GetMailMessageBody(events)); } catch (Exception e) { ErrorHandler.Error("Error occurred while sending e-mail notification.", e); } } #endregion // Override implementation of BufferingAppenderSkeleton #region Override implementation of AppenderSkeleton /// <summary> /// This appender requires a <see cref="Layout"/> to be set. /// </summary> /// <value><c>true</c></value> /// <remarks> /// <para> /// This appender requires a <see cref="Layout"/> to be set. /// </para> /// </remarks> override protected bool RequiresLayout { get { return true; } } #endregion // Override implementation of AppenderSkeleton #region Protected Methods /// <summary> /// Creates the body of the message to send /// </summary> /// <param name="events"></param> /// <returns></returns> virtual protected string GetMailMessageBody(LoggingEvent[] events) { StringWriter writer = new StringWriter(System.Globalization.CultureInfo.InvariantCulture); string t = Layout.Header; if (t != null) { writer.Write(t); } for (int i = 0; i < events.Length; i++) { // Render the event and append the text to the buffer RenderLoggingEvent(writer, events[i]); } t = Layout.Footer; if (t != null) { writer.Write(t); } return writer.ToString(); } /// <summary> /// Send the email message /// </summary> /// <param name="messageBody">the body text to include in the mail</param> virtual protected void SendEmail(string messageBody) { // Create the email object first, then add the properties. var subject = new Content(Subject); Body body = new Body(); if (IsBodyHTML) { body.Html = new Content(messageBody); } else { body.Text = new Content(messageBody); } var destination = new Destination(); var msg = new Message(subject, body); destination.ToAddresses = m_to.Split(ADDRESS_DELIMITERS).ToList(); if (!String.IsNullOrWhiteSpace(m_cc)) { destination.CcAddresses = m_cc.Split(ADDRESS_DELIMITERS).ToList(); } if (!String.IsNullOrWhiteSpace(m_bcc)) { destination.BccAddresses = m_bcc.Split(ADDRESS_DELIMITERS).ToList(); } var request = new SendEmailRequest(From, destination, msg); var credentials = new Amazon.Runtime.BasicAWSCredentials(AccessKey, SecretKey); var client = new AmazonSimpleEmailServiceClient(credentials, Amazon.RegionEndpoint.EUWest1); try { client.SendEmailAsync(request); } catch (Exception ex) { ErrorHandler.Error("Error occurred while sending e-mail notification.", ex); } } #endregion // Protected Methods #region Private Instance Fields private string m_to; private string m_cc; private string m_bcc; #endregion // Private Instance Fields private static readonly char[] ADDRESS_DELIMITERS = new char[] { ',', ';' }; /// <summary> /// trims leading and trailing commas or semicolons /// </summary> private static string MaybeTrimSeparators(string s) { return string.IsNullOrEmpty(s) ? s : s.Trim(ADDRESS_DELIMITERS); } } }
#region License // Copyright (c) 2007 James Newton-King // // Permission is hereby granted, free of charge, to any person // obtaining a copy of this software and associated documentation // files (the "Software"), to deal in the Software without // restriction, including without limitation the rights to use, // copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following // conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES // OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT // HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, // WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR // OTHER DEALINGS IN THE SOFTWARE. #endregion using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Reflection; using Medivh.Json.Utilities; using System.Collections; #if NET20 using Medivh.Json.Utilities.LinqBridge; #endif namespace Medivh.Json.Serialization { /// <summary> /// Contract details for a <see cref="System.Type"/> used by the <see cref="JsonSerializer"/>. /// </summary> public class JsonDictionaryContract : JsonContainerContract { /// <summary> /// Gets or sets the property name resolver. /// </summary> /// <value>The property name resolver.</value> [Obsolete("PropertyNameResolver is obsolete. Use DictionaryKeyResolver instead.")] public Func<string, string> PropertyNameResolver { get { return DictionaryKeyResolver; } set { DictionaryKeyResolver = value; } } /// <summary> /// Gets or sets the dictionary key resolver. /// </summary> /// <value>The dictionary key resolver.</value> public Func<string, string> DictionaryKeyResolver { get; set; } /// <summary> /// Gets the <see cref="System.Type"/> of the dictionary keys. /// </summary> /// <value>The <see cref="System.Type"/> of the dictionary keys.</value> public Type DictionaryKeyType { get; private set; } /// <summary> /// Gets the <see cref="System.Type"/> of the dictionary values. /// </summary> /// <value>The <see cref="System.Type"/> of the dictionary values.</value> public Type DictionaryValueType { get; private set; } internal JsonContract KeyContract { get; set; } private readonly Type _genericCollectionDefinitionType; private Type _genericWrapperType; private ObjectConstructor<object> _genericWrapperCreator; private Func<object> _genericTemporaryDictionaryCreator; internal bool ShouldCreateWrapper { get; private set; } private readonly ConstructorInfo _parameterizedConstructor; private ObjectConstructor<object> _overrideCreator; private ObjectConstructor<object> _parameterizedCreator; internal ObjectConstructor<object> ParameterizedCreator { get { if (_parameterizedCreator == null) { _parameterizedCreator = JsonTypeReflector.ReflectionDelegateFactory.CreateParameterizedConstructor(_parameterizedConstructor); } return _parameterizedCreator; } } /// <summary> /// Gets or sets the function used to create the object. When set this function will override <see cref="JsonContract.DefaultCreator"/>. /// </summary> /// <value>The function used to create the object.</value> public ObjectConstructor<object> OverrideCreator { get { return _overrideCreator; } set { _overrideCreator = value; } } /// <summary> /// Gets a value indicating whether the creator has a parameter with the dictionary values. /// </summary> /// <value><c>true</c> if the creator has a parameter with the dictionary values; otherwise, <c>false</c>.</value> public bool HasParameterizedCreator { get; set; } internal bool HasParameterizedCreatorInternal { get { return (HasParameterizedCreator || _parameterizedCreator != null || _parameterizedConstructor != null); } } /// <summary> /// Initializes a new instance of the <see cref="JsonDictionaryContract"/> class. /// </summary> /// <param name="underlyingType">The underlying type for the contract.</param> public JsonDictionaryContract(Type underlyingType) : base(underlyingType) { ContractType = JsonContractType.Dictionary; Type keyType; Type valueType; if (ReflectionUtils.ImplementsGenericDefinition(underlyingType, typeof(IDictionary<,>), out _genericCollectionDefinitionType)) { keyType = _genericCollectionDefinitionType.GetGenericArguments()[0]; valueType = _genericCollectionDefinitionType.GetGenericArguments()[1]; if (ReflectionUtils.IsGenericDefinition(UnderlyingType, typeof(IDictionary<,>))) { CreatedType = typeof(Dictionary<,>).MakeGenericType(keyType, valueType); } #if !(NET40 || NET35 || NET20 || PORTABLE40) IsReadOnlyOrFixedSize = ReflectionUtils.InheritsGenericDefinition(underlyingType, typeof(ReadOnlyDictionary<,>)); #endif } #if !(NET40 || NET35 || NET20 || PORTABLE40) else if (ReflectionUtils.ImplementsGenericDefinition(underlyingType, typeof(IReadOnlyDictionary<,>), out _genericCollectionDefinitionType)) { keyType = _genericCollectionDefinitionType.GetGenericArguments()[0]; valueType = _genericCollectionDefinitionType.GetGenericArguments()[1]; if (ReflectionUtils.IsGenericDefinition(UnderlyingType, typeof(IReadOnlyDictionary<,>))) { CreatedType = typeof(ReadOnlyDictionary<,>).MakeGenericType(keyType, valueType); } IsReadOnlyOrFixedSize = true; } #endif else { ReflectionUtils.GetDictionaryKeyValueTypes(UnderlyingType, out keyType, out valueType); if (UnderlyingType == typeof(IDictionary)) { CreatedType = typeof(Dictionary<object, object>); } } if (keyType != null && valueType != null) { _parameterizedConstructor = CollectionUtils.ResolveEnumerableCollectionConstructor(CreatedType, typeof(KeyValuePair<,>).MakeGenericType(keyType, valueType)); #if !(NET35 || NET20) if (!HasParameterizedCreatorInternal && underlyingType.Name == FSharpUtils.FSharpMapTypeName) { FSharpUtils.EnsureInitialized(underlyingType.Assembly()); _parameterizedCreator = FSharpUtils.CreateMap(keyType, valueType); } #endif } ShouldCreateWrapper = !typeof(IDictionary).IsAssignableFrom(CreatedType); DictionaryKeyType = keyType; DictionaryValueType = valueType; #if (NET20 || NET35) if (DictionaryValueType != null && ReflectionUtils.IsNullableType(DictionaryValueType)) { Type tempDictioanryType; // bug in .NET 2.0 & 3.5 that Dictionary<TKey, Nullable<TValue>> throws an error when adding null via IDictionary[key] = object // wrapper will handle calling Add(T) instead if (ReflectionUtils.InheritsGenericDefinition(CreatedType, typeof(Dictionary<,>), out tempDictioanryType)) { ShouldCreateWrapper = true; } } #endif #if !(NET20 || NET35 || NET40) Type immutableCreatedType; ObjectConstructor<object> immutableParameterizedCreator; if (ImmutableCollectionsUtils.TryBuildImmutableForDictionaryContract(underlyingType, DictionaryKeyType, DictionaryValueType, out immutableCreatedType, out immutableParameterizedCreator)) { CreatedType = immutableCreatedType; _parameterizedCreator = immutableParameterizedCreator; IsReadOnlyOrFixedSize = true; } #endif } internal IWrappedDictionary CreateWrapper(object dictionary) { if (_genericWrapperCreator == null) { _genericWrapperType = typeof(DictionaryWrapper<,>).MakeGenericType(DictionaryKeyType, DictionaryValueType); ConstructorInfo genericWrapperConstructor = _genericWrapperType.GetConstructor(new[] { _genericCollectionDefinitionType }); _genericWrapperCreator = JsonTypeReflector.ReflectionDelegateFactory.CreateParameterizedConstructor(genericWrapperConstructor); } return (IWrappedDictionary)_genericWrapperCreator(dictionary); } internal IDictionary CreateTemporaryDictionary() { if (_genericTemporaryDictionaryCreator == null) { Type temporaryDictionaryType = typeof(Dictionary<,>).MakeGenericType(DictionaryKeyType ?? typeof(object), DictionaryValueType ?? typeof(object)); _genericTemporaryDictionaryCreator = JsonTypeReflector.ReflectionDelegateFactory.CreateDefaultConstructor<object>(temporaryDictionaryType); } return (IDictionary)_genericTemporaryDictionaryCreator(); } } }
// Copyright (c) DotSpatial Team. All rights reserved. // Licensed under the MIT license. See License.txt file in the project root for full license information. using System; using System.Collections.Generic; using System.ComponentModel; using System.Drawing; using System.Drawing.Drawing2D; using System.Windows.Forms; using DotSpatial.Serialization; namespace DotSpatial.Symbology.Forms { /// <summary> /// SymbolSizeChooser /// </summary> [DefaultEvent("SelectedSizeChanged")] [ToolboxItem(false)] public class SymbolSizeChooser : Control, ISupportInitialize { #region Fields private Color _boxBackColor; private List<SizeBox> _boxes; private Color _boxSelectionColor; private Size _boxSize; private bool _isInitializing; private Size2D _maxSize; private Size2D _minSize; private int _numBoxes; private Orientation _orientation; private int _roundingRadius; private Size2D _selectedSize; private ISymbol _symbol; #endregion #region Constructors /// <summary> /// Initializes a new instance of the <see cref="SymbolSizeChooser"/> class. /// </summary> public SymbolSizeChooser() { Configure(); } /// <summary> /// Initializes a new instance of the <see cref="SymbolSizeChooser"/> class. /// </summary> /// <param name="symbol">The symbol to draw.</param> public SymbolSizeChooser(ISymbol symbol) { Configure(); _symbol = symbol; } #endregion #region Events /// <summary> /// Occurs when the selected size has changed /// </summary> public event EventHandler SelectedSizeChanged; #endregion #region Properties /// <summary> /// Gets or sets the normal background color for the boxes. /// </summary> [Description("Gets or sets the normal background color for the boxes.")] public Color BoxBackColor { get { return _boxBackColor; } set { _boxBackColor = value; if (!_isInitializing) RefreshBoxes(); } } /// <summary> /// Gets or sets the box selection color /// </summary> [Description("Gets or sets the box selection color")] public Color BoxSelectionColor { get { return _boxSelectionColor; } set { _boxSelectionColor = value; if (!_isInitializing) RefreshBoxes(); } } /// <summary> /// Gets or sets the rectangular extent for all the boxes. This is not the size of the symbol. /// </summary> [Description("Gets or sets the rectangular extent for all the boxes. This is not the size of the symbol.")] public Size BoxSize { get { return _boxSize; } set { _boxSize = value; if (!_isInitializing) RefreshBoxes(); } } /// <summary> /// Gets or sets the maximum symbol size. /// </summary> [Description("Gets or sets the maximum symbol size.")] [DesignerSerializationVisibility(DesignerSerializationVisibility.Content)] public Size2D MaximumSymbolSize { get { return _maxSize; } set { _maxSize = value; if (!_isInitializing) RefreshBoxes(); } } /// <summary> /// Gets or sets the minimum symbol size /// </summary> [Description("Gets or sets the minimum symbol size")] [DesignerSerializationVisibility(DesignerSerializationVisibility.Content)] public Size2D MinimumSymbolSize { get { return _minSize; } set { _minSize = value; if (!_isInitializing) RefreshBoxes(); } } /// <summary> /// Gets or sets the number of boxes /// </summary> [Description("Gets or sets the number of boxes")] public int NumBoxes { get { return _numBoxes; } set { _numBoxes = value; if (!_isInitializing) RefreshBoxes(); } } /// <summary> /// Gets or sets whether the boxes are drawn horizontally or vertically. /// </summary> [Description("Gets or sets whether the boxes are drawn horizontally or vertically.")] public Orientation Orientation { get { return _orientation; } set { _orientation = value; if (!_isInitializing) RefreshBoxes(); } } /// <summary> /// Gets or sets the rounding radius for the boxes /// </summary> [Description("Gets or sets the rounding radius for the boxes")] public int RoundingRadius { get { return _roundingRadius; } set { _roundingRadius = value; if (!_isInitializing) RefreshBoxes(); } } /// <summary> /// Gets or sets the currently selected size. /// </summary> [Description("Gets or sets the currently selected size.")] [DesignerSerializationVisibility(DesignerSerializationVisibility.Content)] public Size2D SelectedSize { get { return _selectedSize; } set { _selectedSize = value; if (!_isInitializing) Invalidate(); } } /// <summary> /// Gets or sets the symbol to use for this control. /// </summary> [Browsable(false)] [DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden)] public ISymbol Symbol { get { return _symbol; } set { _symbol = value; foreach (SizeBox sb in _boxes) { sb.IsSelected = _symbol.Size == sb.Size; } if (!_isInitializing) Invalidate(); } } #endregion #region Methods /// <summary> /// Prevent redundant updates to the boxes every time a property is changed /// </summary> public void BeginInit() { _isInitializing = true; } /// <summary> /// Enable live updating so that from now on, changes rebuild boxes. /// </summary> public void EndInit() { _isInitializing = false; RefreshBoxes(); } /// <summary> /// Forces the box sub-categories to refresh given the new content. /// </summary> public virtual void RefreshBoxes() { _boxes = new List<SizeBox>(); for (int i = 0; i < NumBoxes; i++) { CreateBox(i); } Invalidate(); } /// <summary> /// Occurs durring drawing but is overridable by subclasses. /// </summary> /// <param name="g">The graphics object used for drawing.</param> /// <param name="clip">The clip rectangle.</param> protected virtual void OnDraw(Graphics g, Rectangle clip) { foreach (SizeBox sb in _boxes) { sb.Draw(g, clip, _symbol); } } /// <summary> /// Handles the mouse up situation. /// </summary> /// <param name="e">The event args.</param> protected override void OnMouseUp(MouseEventArgs e) { bool changed = false; foreach (SizeBox sb in _boxes) { if (sb.Bounds.Contains(e.Location)) { if (!sb.IsSelected) { sb.IsSelected = true; _selectedSize = sb.Size.Copy(); _symbol.Size.Height = sb.Size.Height; _symbol.Size.Width = sb.Size.Width; changed = true; } } else { if (sb.IsSelected) { sb.IsSelected = false; } } } Invalidate(); if (changed) OnSelectedSizeChanged(); base.OnMouseUp(e); } /// <summary> /// Occurs as the SymbolSizeChooser control is being drawn. /// </summary> /// <param name="e">The event args.</param> protected override void OnPaint(PaintEventArgs e) { Rectangle clip = e.ClipRectangle; if (clip.IsEmpty) clip = ClientRectangle; Bitmap bmp = new Bitmap(clip.Width, clip.Height); Graphics g = Graphics.FromImage(bmp); g.TranslateTransform(-clip.X, -clip.Y); g.Clip = new Region(clip); g.Clear(BackColor); g.SmoothingMode = SmoothingMode.AntiAlias; OnDraw(g, clip); g.Dispose(); e.Graphics.DrawImage(bmp, clip, new Rectangle(0, 0, clip.Width, clip.Height), GraphicsUnit.Pixel); } /// <summary> /// Prevent flicker /// </summary> /// <param name="e">The event args.</param> protected override void OnPaintBackground(PaintEventArgs e) { // base.OnPaintBackground(pevent); } /// <summary> /// Fires the selected size changed event. /// </summary> protected virtual void OnSelectedSizeChanged() { SelectedSizeChanged?.Invoke(this, EventArgs.Empty); } private void Configure() { _boxes = new List<SizeBox>(); _numBoxes = 4; _minSize = new Size2D(4, 4); _maxSize = new Size2D(30, 30); _selectedSize = _minSize.Copy(); _boxSize = new Size(36, 36); _boxBackColor = SystemColors.Control; _boxSelectionColor = SystemColors.Highlight; _symbol = new SimpleSymbol(); _orientation = Orientation.Horizontal; _roundingRadius = 6; RefreshBoxes(); } private void CreateBox(int i) { SizeBox sb = new SizeBox(); int x = 1; int y = 1; if (_orientation == Orientation.Horizontal) { x = ((_boxSize.Width + 2) * i) + 1; } else { y = ((_boxSize.Height + 2) * i) + 1; } sb.Bounds = new Rectangle(x, y, _boxSize.Width, _boxSize.Height); sb.BackColor = _boxBackColor; sb.SelectionColor = _boxSelectionColor; sb.RoundingRadius = _roundingRadius; if (i == 0) { sb.Size = _minSize != null ? _minSize.Copy() : new Size2D(4, 4); } else if (i == _numBoxes - 1) { sb.Size = _maxSize != null ? _maxSize.Copy() : new Size2D(32, 32); } else { if (_minSize != null && _maxSize != null) { // because of the elses, we know that the number must be greater than 2, and that the current item is not the min or max // Use squaring so that bigger sizes have larger differences between them. double cw = (_maxSize.Width - _minSize.Width) / _numBoxes; double ch = (_maxSize.Height - _minSize.Height) / _numBoxes; sb.Size = new Size2D(_minSize.Width + (cw * i), _minSize.Height + (ch * i)); } else { sb.Size = new Size2D(16, 16); } } _boxes.Add(sb); } #endregion } }
/* * Copyright 2010-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ using System; using System.Collections.Generic; using System.Xml.Serialization; using System.Text; using System.IO; using Amazon.Runtime; using Amazon.Runtime.Internal; namespace Amazon.SecurityToken.Model { /// <summary> /// Container for the parameters to the AssumeRole operation. /// <para> Returns a set of temporary security credentials (consisting of an access key ID, a secret access key, and a security token) that you /// can use to access AWS resources that you might not normally have access to. Typically, you use <c>AssumeRole</c> for cross-account access or /// federation. </para> <para> For cross-account access, imagine that you own multiple accounts and need to access resources in each account. /// You could create long-term credentials in each account to access those resources. However, managing all those credentials and remembering /// which one can access which account can be time consuming. Instead, you can create one set of long-term credentials in one account and then /// use temporary security credentials to access all the other accounts by assuming roles in those accounts. For more information about roles, /// see Roles in <i>Using IAM</i> . /// </para> <para> For federation, you can, for example, grant single sign-on access to the AWS Management Console. If you already have an /// identity and authentication system in your corporate network, you don't have to recreate user identities in AWS in order to grant those user /// identities access to AWS. Instead, after a user has been authenticated, you call <c>AssumeRole</c> (and specify the role with the /// appropriate permissions) to get temporary security credentials for that user. With those temporary security credentials, you construct a /// sign-in URL that users can use to access the console. For more information, see Scenarios for Granting Temporary Access in <i>AWS Security /// Token Service</i> . /// </para> <para> The temporary security credentials are valid for the duration that you specified when calling <c>AssumeRole</c> , which /// can be from 900 seconds (15 minutes) to 3600 seconds (1 hour). The default is 1 hour. </para> <para> The temporary security credentials that /// are returned from the <c>AssumeRoleWithWebIdentity</c> response have the permissions that are associated with the access policy of the role /// being assumed and any policies that are associated with the AWS resource being accessed. You can further restrict the permissions of the /// temporary security credentials by passing a policy in the request. The resulting permissions are an intersection of the role's access policy /// and the policy that you passed. These policies and any applicable resource-based policies are evaluated when calls to AWS service APIs are /// made using the temporary security credentials. </para> <para> To assume a role, your AWS account must be trusted by the role. The trust /// relationship is defined in the role's trust policy when the IAM role is created. You must also have a policy that allows you to call /// <c>sts:AssumeRole</c> . /// </para> <para> <b>Important:</b> You cannot call <c>Assumerole</c> by using AWS account credentials; access will be denied. You must use /// IAM user credentials or temporary security credentials to call <c>AssumeRole</c> . /// </para> /// </summary> /// <seealso cref="Amazon.SecurityToken.AmazonSecurityTokenService.AssumeRole"/> public class AssumeRoleRequest : AmazonWebServiceRequest { private string roleArn; private string roleSessionName; private string policy; private int? durationSeconds; private string externalId; /// <summary> /// The Amazon Resource Name (ARN) of the role that the caller is assuming. /// /// <para> /// <b>Constraints:</b> /// <list type="definition"> /// <item> /// <term>Length</term> /// <description>20 - 2048</description> /// </item> /// </list> /// </para> /// </summary> public string RoleArn { get { return this.roleArn; } set { this.roleArn = value; } } /// <summary> /// Sets the RoleArn property /// </summary> /// <param name="roleArn">The value to set for the RoleArn property </param> /// <returns>this instance</returns> [Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")] public AssumeRoleRequest WithRoleArn(string roleArn) { this.roleArn = roleArn; return this; } // Check to see if RoleArn property is set internal bool IsSetRoleArn() { return this.roleArn != null; } /// <summary> /// An identifier for the assumed role session. The session name is included as part of the <c>AssumedRoleUser</c>. /// /// <para> /// <b>Constraints:</b> /// <list type="definition"> /// <item> /// <term>Length</term> /// <description>2 - 32</description> /// </item> /// <item> /// <term>Pattern</term> /// <description>[\w+=,.@-]*</description> /// </item> /// </list> /// </para> /// </summary> public string RoleSessionName { get { return this.roleSessionName; } set { this.roleSessionName = value; } } /// <summary> /// Sets the RoleSessionName property /// </summary> /// <param name="roleSessionName">The value to set for the RoleSessionName property </param> /// <returns>this instance</returns> [Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")] public AssumeRoleRequest WithRoleSessionName(string roleSessionName) { this.roleSessionName = roleSessionName; return this; } // Check to see if RoleSessionName property is set internal bool IsSetRoleSessionName() { return this.roleSessionName != null; } /// <summary> /// A supplemental policy that is associated with the temporary security credentials from the <c>AssumeRole</c> call. The resulting permissions /// of the temporary security credentials are an intersection of this policy and the access policy that is associated with the role. Use this /// policy to further restrict the permissions of the temporary security credentials. /// /// <para> /// <b>Constraints:</b> /// <list type="definition"> /// <item> /// <term>Length</term> /// <description>1 - 2048</description> /// </item> /// <item> /// <term>Pattern</term> /// <description>[\u0009\u000A\u000D\u0020-\u00FF]+</description> /// </item> /// </list> /// </para> /// </summary> public string Policy { get { return this.policy; } set { this.policy = value; } } /// <summary> /// Sets the Policy property /// </summary> /// <param name="policy">The value to set for the Policy property </param> /// <returns>this instance</returns> [Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")] public AssumeRoleRequest WithPolicy(string policy) { this.policy = policy; return this; } // Check to see if Policy property is set internal bool IsSetPolicy() { return this.policy != null; } /// <summary> /// The duration, in seconds, of the role session. The value can range from 900 seconds (15 minutes) to 3600 seconds (1 hour). By default, the /// value is set to 3600 seconds. /// /// <para> /// <b>Constraints:</b> /// <list type="definition"> /// <item> /// <term>Range</term> /// <description>900 - 3600</description> /// </item> /// </list> /// </para> /// </summary> public int DurationSeconds { get { return this.durationSeconds ?? default(int); } set { this.durationSeconds = value; } } /// <summary> /// Sets the DurationSeconds property /// </summary> /// <param name="durationSeconds">The value to set for the DurationSeconds property </param> /// <returns>this instance</returns> [Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")] public AssumeRoleRequest WithDurationSeconds(int durationSeconds) { this.durationSeconds = durationSeconds; return this; } // Check to see if DurationSeconds property is set internal bool IsSetDurationSeconds() { return this.durationSeconds.HasValue; } /// <summary> /// A unique identifier that is used by third parties to assume a role in their customers' accounts. For each role that the third party can /// assume, they should instruct their customers to create a role with the external ID that the third party generated. Each time the third party /// assumes the role, they must pass the customer's external ID. The external ID is useful in order to help third parties bind a role to the /// customer who created it. For more information about the external ID, see <a /// href="http://docs.aws.amazon.com/STS/latest/UsingSTS/sts-delegating-externalid.html" target="_blank">About the External ID</a> in <i>Using /// Temporary Security Credentials</i>. /// /// <para> /// <b>Constraints:</b> /// <list type="definition"> /// <item> /// <term>Length</term> /// <description>2 - 96</description> /// </item> /// <item> /// <term>Pattern</term> /// <description>[\w+=,.@:-]*</description> /// </item> /// </list> /// </para> /// </summary> public string ExternalId { get { return this.externalId; } set { this.externalId = value; } } /// <summary> /// Sets the ExternalId property /// </summary> /// <param name="externalId">The value to set for the ExternalId property </param> /// <returns>this instance</returns> [Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")] public AssumeRoleRequest WithExternalId(string externalId) { this.externalId = externalId; return this; } // Check to see if ExternalId property is set internal bool IsSetExternalId() { return this.externalId != null; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /*============================================================ ** ** ** ** ** ** Purpose: List for exceptions. ** ** ===========================================================*/ using System.Diagnostics.Contracts; namespace System.Collections { /// This is a simple implementation of IDictionary using a singly linked list. This /// will be smaller and faster than a Hashtable if the number of elements is 10 or less. /// This should not be used if performance is important for large numbers of elements. [Serializable] [System.Runtime.CompilerServices.TypeForwardedFrom("mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089")] internal class ListDictionaryInternal : IDictionary { private DictionaryNode head; // Do not rename (binary serialization) private int version; // Do not rename (binary serialization) private int count; // Do not rename (binary serialization) [NonSerialized] private Object _syncRoot; public ListDictionaryInternal() { } public Object this[Object key] { get { if (key == null) { throw new ArgumentNullException(nameof(key), SR.ArgumentNull_Key); } Contract.EndContractBlock(); DictionaryNode node = head; while (node != null) { if (node.key.Equals(key)) { return node.value; } node = node.next; } return null; } set { if (key == null) { throw new ArgumentNullException(nameof(key), SR.ArgumentNull_Key); } Contract.EndContractBlock(); version++; DictionaryNode last = null; DictionaryNode node; for (node = head; node != null; node = node.next) { if (node.key.Equals(key)) { break; } last = node; } if (node != null) { // Found it node.value = value; return; } // Not found, so add a new one DictionaryNode newNode = new DictionaryNode(); newNode.key = key; newNode.value = value; if (last != null) { last.next = newNode; } else { head = newNode; } count++; } } public int Count { get { return count; } } public ICollection Keys { get { return new NodeKeyValueCollection(this, true); } } public bool IsReadOnly { get { return false; } } public bool IsFixedSize { get { return false; } } public bool IsSynchronized { get { return false; } } public Object SyncRoot { get { if (_syncRoot == null) { System.Threading.Interlocked.CompareExchange<Object>(ref _syncRoot, new Object(), null); } return _syncRoot; } } public ICollection Values { get { return new NodeKeyValueCollection(this, false); } } public void Add(Object key, Object value) { if (key == null) { throw new ArgumentNullException(nameof(key), SR.ArgumentNull_Key); } Contract.EndContractBlock(); version++; DictionaryNode last = null; DictionaryNode node; for (node = head; node != null; node = node.next) { if (node.key.Equals(key)) { throw new ArgumentException(SR.Format(SR.Argument_AddingDuplicate__, node.key, key)); } last = node; } if (node != null) { // Found it node.value = value; return; } // Not found, so add a new one DictionaryNode newNode = new DictionaryNode(); newNode.key = key; newNode.value = value; if (last != null) { last.next = newNode; } else { head = newNode; } count++; } public void Clear() { count = 0; head = null; version++; } public bool Contains(Object key) { if (key == null) { throw new ArgumentNullException(nameof(key), SR.ArgumentNull_Key); } Contract.EndContractBlock(); for (DictionaryNode node = head; node != null; node = node.next) { if (node.key.Equals(key)) { return true; } } return false; } public void CopyTo(Array array, int index) { if (array == null) throw new ArgumentNullException(nameof(array)); if (array.Rank != 1) throw new ArgumentException(SR.Arg_RankMultiDimNotSupported); if (index < 0) throw new ArgumentOutOfRangeException(nameof(index), SR.ArgumentOutOfRange_NeedNonNegNum); if (array.Length - index < this.Count) throw new ArgumentException(SR.ArgumentOutOfRange_Index, nameof(index)); Contract.EndContractBlock(); for (DictionaryNode node = head; node != null; node = node.next) { array.SetValue(new DictionaryEntry(node.key, node.value), index); index++; } } public IDictionaryEnumerator GetEnumerator() { return new NodeEnumerator(this); } IEnumerator IEnumerable.GetEnumerator() { return new NodeEnumerator(this); } public void Remove(Object key) { if (key == null) { throw new ArgumentNullException(nameof(key), SR.ArgumentNull_Key); } Contract.EndContractBlock(); version++; DictionaryNode last = null; DictionaryNode node; for (node = head; node != null; node = node.next) { if (node.key.Equals(key)) { break; } last = node; } if (node == null) { return; } if (node == head) { head = node.next; } else { last.next = node.next; } count--; } private class NodeEnumerator : IDictionaryEnumerator { private ListDictionaryInternal list; private DictionaryNode current; private int version; private bool start; public NodeEnumerator(ListDictionaryInternal list) { this.list = list; version = list.version; start = true; current = null; } public Object Current { get { return Entry; } } public DictionaryEntry Entry { get { if (current == null) { throw new InvalidOperationException(SR.InvalidOperation_EnumOpCantHappen); } return new DictionaryEntry(current.key, current.value); } } public Object Key { get { if (current == null) { throw new InvalidOperationException(SR.InvalidOperation_EnumOpCantHappen); } return current.key; } } public Object Value { get { if (current == null) { throw new InvalidOperationException(SR.InvalidOperation_EnumOpCantHappen); } return current.value; } } public bool MoveNext() { if (version != list.version) { throw new InvalidOperationException(SR.InvalidOperation_EnumFailedVersion); } if (start) { current = list.head; start = false; } else { if (current != null) { current = current.next; } } return (current != null); } public void Reset() { if (version != list.version) { throw new InvalidOperationException(SR.InvalidOperation_EnumFailedVersion); } start = true; current = null; } } private class NodeKeyValueCollection : ICollection { private ListDictionaryInternal list; private bool isKeys; public NodeKeyValueCollection(ListDictionaryInternal list, bool isKeys) { this.list = list; this.isKeys = isKeys; } void ICollection.CopyTo(Array array, int index) { if (array == null) throw new ArgumentNullException(nameof(array)); if (array.Rank != 1) throw new ArgumentException(SR.Arg_RankMultiDimNotSupported); if (index < 0) throw new ArgumentOutOfRangeException(nameof(index), SR.ArgumentOutOfRange_NeedNonNegNum); Contract.EndContractBlock(); if (array.Length - index < list.Count) throw new ArgumentException(SR.ArgumentOutOfRange_Index, nameof(index)); for (DictionaryNode node = list.head; node != null; node = node.next) { array.SetValue(isKeys ? node.key : node.value, index); index++; } } int ICollection.Count { get { int count = 0; for (DictionaryNode node = list.head; node != null; node = node.next) { count++; } return count; } } bool ICollection.IsSynchronized { get { return false; } } Object ICollection.SyncRoot { get { return list.SyncRoot; } } IEnumerator IEnumerable.GetEnumerator() { return new NodeKeyValueEnumerator(list, isKeys); } private class NodeKeyValueEnumerator : IEnumerator { private ListDictionaryInternal list; private DictionaryNode current; private int version; private bool isKeys; private bool start; public NodeKeyValueEnumerator(ListDictionaryInternal list, bool isKeys) { this.list = list; this.isKeys = isKeys; version = list.version; start = true; current = null; } public Object Current { get { if (current == null) { throw new InvalidOperationException(SR.InvalidOperation_EnumOpCantHappen); } return isKeys ? current.key : current.value; } } public bool MoveNext() { if (version != list.version) { throw new InvalidOperationException(SR.InvalidOperation_EnumFailedVersion); } if (start) { current = list.head; start = false; } else { if (current != null) { current = current.next; } } return (current != null); } public void Reset() { if (version != list.version) { throw new InvalidOperationException(SR.InvalidOperation_EnumFailedVersion); } start = true; current = null; } } } [Serializable] private class DictionaryNode { public Object key; public Object value; public DictionaryNode next; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. namespace System.Diagnostics { using System; using System.Collections; using System.Text; using System.Threading; using System.Security; using System.Security.Permissions; using System.IO; using System.Reflection; using System.Runtime.InteropServices; using System.Runtime.CompilerServices; using System.Globalization; using System.Runtime.Serialization; using System.Runtime.Versioning; using System.Diagnostics.Contracts; // READ ME: // Modifying the order or fields of this object may require other changes // to the unmanaged definition of the StackFrameHelper class, in // VM\DebugDebugger.h. The binder will catch some of these layout problems. [Serializable] internal class StackFrameHelper : IDisposable { [NonSerialized] private Thread targetThread; private int[] rgiOffset; private int[] rgiILOffset; // this field is here only for backwards compatibility of serialization format private MethodBase[] rgMethodBase; #pragma warning disable 414 // dynamicMethods is an array of System.Resolver objects, used to keep // DynamicMethodDescs alive for the lifetime of StackFrameHelper. private Object dynamicMethods; // Field is not used from managed. [NonSerialized] private IntPtr[] rgMethodHandle; private String[] rgAssemblyPath; private IntPtr[] rgLoadedPeAddress; private int[] rgiLoadedPeSize; private IntPtr[] rgInMemoryPdbAddress; private int[] rgiInMemoryPdbSize; // if rgiMethodToken[i] == 0, then don't attempt to get the portable PDB source/info private int[] rgiMethodToken; private String[] rgFilename; private int[] rgiLineNumber; private int[] rgiColumnNumber; #if FEATURE_EXCEPTIONDISPATCHINFO [OptionalField] private bool[] rgiLastFrameFromForeignExceptionStackTrace; #endif // FEATURE_EXCEPTIONDISPATCHINFO private GetSourceLineInfoDelegate getSourceLineInfo; private int iFrameCount; #pragma warning restore 414 private delegate void GetSourceLineInfoDelegate(string assemblyPath, IntPtr loadedPeAddress, int loadedPeSize, IntPtr inMemoryPdbAddress, int inMemoryPdbSize, int methodToken, int ilOffset, out string sourceFile, out int sourceLine, out int sourceColumn); #if FEATURE_CORECLR private static Type s_symbolsType = null; private static MethodInfo s_symbolsMethodInfo = null; [ThreadStatic] private static int t_reentrancy = 0; #endif public StackFrameHelper(Thread target) { targetThread = target; rgMethodBase = null; rgMethodHandle = null; rgiMethodToken = null; rgiOffset = null; rgiILOffset = null; rgAssemblyPath = null; rgLoadedPeAddress = null; rgiLoadedPeSize = null; rgInMemoryPdbAddress = null; rgiInMemoryPdbSize = null; dynamicMethods = null; rgFilename = null; rgiLineNumber = null; rgiColumnNumber = null; getSourceLineInfo = null; #if FEATURE_EXCEPTIONDISPATCHINFO rgiLastFrameFromForeignExceptionStackTrace = null; #endif // FEATURE_EXCEPTIONDISPATCHINFO // 0 means capture all frames. For StackTraces from an Exception, the EE always // captures all frames. For other uses of StackTraces, we can abort stack walking after // some limit if we want to by setting this to a non-zero value. In Whidbey this was // hard-coded to 512, but some customers complained. There shouldn't be any need to limit // this as memory/CPU is no longer allocated up front. If there is some reason to provide a // limit in the future, then we should expose it in the managed API so applications can // override it. iFrameCount = 0; } // // Initializes the stack trace helper. If fNeedFileInfo is true, initializes rgFilename, // rgiLineNumber and rgiColumnNumber fields using the portable PDB reader if not already // done by GetStackFramesInternal (on Windows for old PDB format). // internal void InitializeSourceInfo(int iSkip, bool fNeedFileInfo, Exception exception) { StackTrace.GetStackFramesInternal(this, iSkip, fNeedFileInfo, exception); #if FEATURE_CORECLR if (!fNeedFileInfo) return; // Check if this function is being reentered because of an exception in the code below if (t_reentrancy > 0) return; t_reentrancy++; try { if (s_symbolsMethodInfo == null) { s_symbolsType = Type.GetType( "System.Diagnostics.StackTraceSymbols, System.Diagnostics.StackTrace, Version=1.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a", throwOnError: false); if (s_symbolsType == null) return; s_symbolsMethodInfo = s_symbolsType.GetMethod("GetSourceLineInfo"); if (s_symbolsMethodInfo == null) return; } if (getSourceLineInfo == null) { // Create an instance of System.Diagnostics.Stacktrace.Symbols object target = Activator.CreateInstance(s_symbolsType); // Create an instance delegate for the GetSourceLineInfo method getSourceLineInfo = (GetSourceLineInfoDelegate)s_symbolsMethodInfo.CreateDelegate(typeof(GetSourceLineInfoDelegate), target); } for (int index = 0; index < iFrameCount; index++) { // If there was some reason not to try get get the symbols from the portable PDB reader like the module was // ENC or the source/line info was already retrieved, the method token is 0. if (rgiMethodToken[index] != 0) { getSourceLineInfo(rgAssemblyPath[index], rgLoadedPeAddress[index], rgiLoadedPeSize[index], rgInMemoryPdbAddress[index], rgiInMemoryPdbSize[index], rgiMethodToken[index], rgiILOffset[index], out rgFilename[index], out rgiLineNumber[index], out rgiColumnNumber[index]); } } } catch { } finally { t_reentrancy--; } #endif } void IDisposable.Dispose() { #if FEATURE_CORECLR if (getSourceLineInfo != null) { IDisposable disposable = getSourceLineInfo.Target as IDisposable; if (disposable != null) { disposable.Dispose(); } } #endif } [System.Security.SecuritySafeCritical] public virtual MethodBase GetMethodBase(int i) { // There may be a better way to do this. // we got RuntimeMethodHandles here and we need to go to MethodBase // but we don't know whether the reflection info has been initialized // or not. So we call GetMethods and GetConstructors on the type // and then we fetch the proper MethodBase!! IntPtr mh = rgMethodHandle[i]; if (mh.IsNull()) return null; IRuntimeMethodInfo mhReal = RuntimeMethodHandle.GetTypicalMethodDefinition(new RuntimeMethodInfoStub(mh, this)); return RuntimeType.GetMethodBase(mhReal); } public virtual int GetOffset(int i) { return rgiOffset[i];} public virtual int GetILOffset(int i) { return rgiILOffset[i];} public virtual String GetFilename(int i) { return rgFilename == null ? null : rgFilename[i];} public virtual int GetLineNumber(int i) { return rgiLineNumber == null ? 0 : rgiLineNumber[i];} public virtual int GetColumnNumber(int i) { return rgiColumnNumber == null ? 0 : rgiColumnNumber[i];} #if FEATURE_EXCEPTIONDISPATCHINFO public virtual bool IsLastFrameFromForeignExceptionStackTrace(int i) { return (rgiLastFrameFromForeignExceptionStackTrace == null)?false:rgiLastFrameFromForeignExceptionStackTrace[i]; } #endif // FEATURE_EXCEPTIONDISPATCHINFO public virtual int GetNumberOfFrames() { return iFrameCount;} public virtual void SetNumberOfFrames(int i) { iFrameCount = i;} // // serialization implementation // [OnSerializing] [SecuritySafeCritical] void OnSerializing(StreamingContext context) { // this is called in the process of serializing this object. // For compatibility with Everett we need to assign the rgMethodBase field as that is the field // that will be serialized rgMethodBase = (rgMethodHandle == null) ? null : new MethodBase[rgMethodHandle.Length]; if (rgMethodHandle != null) { for (int i = 0; i < rgMethodHandle.Length; i++) { if (!rgMethodHandle[i].IsNull()) rgMethodBase[i] = RuntimeType.GetMethodBase(new RuntimeMethodInfoStub(rgMethodHandle[i], this)); } } } [OnSerialized] void OnSerialized(StreamingContext context) { // after we are done serializing null the rgMethodBase field rgMethodBase = null; } [OnDeserialized] [SecuritySafeCritical] void OnDeserialized(StreamingContext context) { // after we are done deserializing we need to transform the rgMethodBase in rgMethodHandle rgMethodHandle = (rgMethodBase == null) ? null : new IntPtr[rgMethodBase.Length]; if (rgMethodBase != null) { for (int i = 0; i < rgMethodBase.Length; i++) { if (rgMethodBase[i] != null) rgMethodHandle[i] = rgMethodBase[i].MethodHandle.Value; } } rgMethodBase = null; } } // Class which represents a description of a stack trace // There is no good reason for the methods of this class to be virtual. // In order to ensure trusted code can trust the data it gets from a // StackTrace, we use an InheritanceDemand to prevent partially-trusted // subclasses. #if !FEATURE_CORECLR [SecurityPermission(SecurityAction.InheritanceDemand, UnmanagedCode=true)] #endif [Serializable] [System.Runtime.InteropServices.ComVisible(true)] public class StackTrace { private StackFrame[] frames; private int m_iNumOfFrames; public const int METHODS_TO_SKIP = 0; private int m_iMethodsToSkip; // Constructs a stack trace from the current location. #if FEATURE_CORECLR [System.Security.SecuritySafeCritical] #endif public StackTrace() { m_iNumOfFrames = 0; m_iMethodsToSkip = 0; CaptureStackTrace(METHODS_TO_SKIP, false, null, null); } // Constructs a stack trace from the current location. // #if FEATURE_CORECLR [System.Security.SecurityCritical] // auto-generated #endif public StackTrace(bool fNeedFileInfo) { m_iNumOfFrames = 0; m_iMethodsToSkip = 0; CaptureStackTrace(METHODS_TO_SKIP, fNeedFileInfo, null, null); } // Constructs a stack trace from the current location, in a caller's // frame // #if FEATURE_CORECLR [System.Security.SecurityCritical] // auto-generated #endif public StackTrace(int skipFrames) { if (skipFrames < 0) throw new ArgumentOutOfRangeException("skipFrames", Environment.GetResourceString("ArgumentOutOfRange_NeedNonNegNum")); Contract.EndContractBlock(); m_iNumOfFrames = 0; m_iMethodsToSkip = 0; CaptureStackTrace(skipFrames+METHODS_TO_SKIP, false, null, null); } // Constructs a stack trace from the current location, in a caller's // frame // #if FEATURE_CORECLR [System.Security.SecurityCritical] // auto-generated #endif public StackTrace(int skipFrames, bool fNeedFileInfo) { if (skipFrames < 0) throw new ArgumentOutOfRangeException("skipFrames", Environment.GetResourceString("ArgumentOutOfRange_NeedNonNegNum")); Contract.EndContractBlock(); m_iNumOfFrames = 0; m_iMethodsToSkip = 0; CaptureStackTrace(skipFrames+METHODS_TO_SKIP, fNeedFileInfo, null, null); } // Constructs a stack trace from the current location. public StackTrace(Exception e) { if (e == null) throw new ArgumentNullException("e"); Contract.EndContractBlock(); m_iNumOfFrames = 0; m_iMethodsToSkip = 0; CaptureStackTrace(METHODS_TO_SKIP, false, null, e); } // Constructs a stack trace from the current location. // #if FEATURE_CORECLR [System.Security.SecurityCritical] // auto-generated #endif public StackTrace(Exception e, bool fNeedFileInfo) { if (e == null) throw new ArgumentNullException("e"); Contract.EndContractBlock(); m_iNumOfFrames = 0; m_iMethodsToSkip = 0; CaptureStackTrace(METHODS_TO_SKIP, fNeedFileInfo, null, e); } // Constructs a stack trace from the current location, in a caller's // frame // #if FEATURE_CORECLR [System.Security.SecurityCritical] // auto-generated #endif public StackTrace(Exception e, int skipFrames) { if (e == null) throw new ArgumentNullException("e"); if (skipFrames < 0) throw new ArgumentOutOfRangeException("skipFrames", Environment.GetResourceString("ArgumentOutOfRange_NeedNonNegNum")); Contract.EndContractBlock(); m_iNumOfFrames = 0; m_iMethodsToSkip = 0; CaptureStackTrace(skipFrames+METHODS_TO_SKIP, false, null, e); } // Constructs a stack trace from the current location, in a caller's // frame // #if FEATURE_CORECLR [System.Security.SecurityCritical] // auto-generated #endif public StackTrace(Exception e, int skipFrames, bool fNeedFileInfo) { if (e == null) throw new ArgumentNullException("e"); if (skipFrames < 0) throw new ArgumentOutOfRangeException("skipFrames", Environment.GetResourceString("ArgumentOutOfRange_NeedNonNegNum")); Contract.EndContractBlock(); m_iNumOfFrames = 0; m_iMethodsToSkip = 0; CaptureStackTrace(skipFrames+METHODS_TO_SKIP, fNeedFileInfo, null, e); } // Constructs a "fake" stack trace, just containing a single frame. // Does not have the overhead of a full stack trace. // public StackTrace(StackFrame frame) { frames = new StackFrame[1]; frames[0] = frame; m_iMethodsToSkip = 0; m_iNumOfFrames = 1; } // Constructs a stack trace for the given thread // #if FEATURE_CORECLR [System.Security.SecurityCritical] // auto-generated #endif [Obsolete("This constructor has been deprecated. Please use a constructor that does not require a Thread parameter. http://go.microsoft.com/fwlink/?linkid=14202")] public StackTrace(Thread targetThread, bool needFileInfo) { m_iNumOfFrames = 0; m_iMethodsToSkip = 0; CaptureStackTrace(METHODS_TO_SKIP, needFileInfo, targetThread, null); } [System.Security.SecuritySafeCritical] [MethodImplAttribute(MethodImplOptions.InternalCall)] internal static extern void GetStackFramesInternal(StackFrameHelper sfh, int iSkip, bool fNeedFileInfo, Exception e); internal static int CalculateFramesToSkip(StackFrameHelper StackF, int iNumFrames) { int iRetVal = 0; String PackageName = "System.Diagnostics"; // Check if this method is part of the System.Diagnostics // package. If so, increment counter keeping track of // System.Diagnostics functions for (int i = 0; i < iNumFrames; i++) { MethodBase mb = StackF.GetMethodBase(i); if (mb != null) { Type t = mb.DeclaringType; if (t == null) break; String ns = t.Namespace; if (ns == null) break; if (String.Compare(ns, PackageName, StringComparison.Ordinal) != 0) break; } iRetVal++; } return iRetVal; } // Retrieves an object with stack trace information encoded. // It leaves out the first "iSkip" lines of the stacktrace. // private void CaptureStackTrace(int iSkip, bool fNeedFileInfo, Thread targetThread, Exception e) { m_iMethodsToSkip += iSkip; using (StackFrameHelper StackF = new StackFrameHelper(targetThread)) { StackF.InitializeSourceInfo(0, fNeedFileInfo, e); m_iNumOfFrames = StackF.GetNumberOfFrames(); if (m_iMethodsToSkip > m_iNumOfFrames) m_iMethodsToSkip = m_iNumOfFrames; if (m_iNumOfFrames != 0) { frames = new StackFrame[m_iNumOfFrames]; for (int i = 0; i < m_iNumOfFrames; i++) { bool fDummy1 = true; bool fDummy2 = true; StackFrame sfTemp = new StackFrame(fDummy1, fDummy2); sfTemp.SetMethodBase(StackF.GetMethodBase(i)); sfTemp.SetOffset(StackF.GetOffset(i)); sfTemp.SetILOffset(StackF.GetILOffset(i)); #if FEATURE_EXCEPTIONDISPATCHINFO sfTemp.SetIsLastFrameFromForeignExceptionStackTrace(StackF.IsLastFrameFromForeignExceptionStackTrace(i)); #endif // FEATURE_EXCEPTIONDISPATCHINFO if (fNeedFileInfo) { sfTemp.SetFileName(StackF.GetFilename(i)); sfTemp.SetLineNumber(StackF.GetLineNumber(i)); sfTemp.SetColumnNumber(StackF.GetColumnNumber(i)); } frames[i] = sfTemp; } // CalculateFramesToSkip skips all frames in the System.Diagnostics namespace, // but this is not desired if building a stack trace from an exception. if (e == null) m_iMethodsToSkip += CalculateFramesToSkip(StackF, m_iNumOfFrames); m_iNumOfFrames -= m_iMethodsToSkip; if (m_iNumOfFrames < 0) { m_iNumOfFrames = 0; } } // In case this is the same object being re-used, set frames to null else frames = null; } } // Property to get the number of frames in the stack trace // public virtual int FrameCount { get { return m_iNumOfFrames;} } // Returns a given stack frame. Stack frames are numbered starting at // zero, which is the last stack frame pushed. // public virtual StackFrame GetFrame(int index) { if ((frames != null) && (index < m_iNumOfFrames) && (index >= 0)) return frames[index+m_iMethodsToSkip]; return null; } // Returns an array of all stack frames for this stacktrace. // The array is ordered and sized such that GetFrames()[i] == GetFrame(i) // The nth element of this array is the same as GetFrame(n). // The length of the array is the same as FrameCount. // [ComVisible(false)] public virtual StackFrame [] GetFrames() { if (frames == null || m_iNumOfFrames <= 0) return null; // We have to return a subset of the array. Unfortunately this // means we have to allocate a new array and copy over. StackFrame [] array = new StackFrame[m_iNumOfFrames]; Array.Copy(frames, m_iMethodsToSkip, array, 0, m_iNumOfFrames); return array; } // Builds a readable representation of the stack trace // #if FEATURE_CORECLR [System.Security.SecuritySafeCritical] #endif public override String ToString() { // Include a trailing newline for backwards compatibility return ToString(TraceFormat.TrailingNewLine); } // TraceFormat is Used to specify options for how the // string-representation of a StackTrace should be generated. internal enum TraceFormat { Normal, TrailingNewLine, // include a trailing new line character NoResourceLookup // to prevent infinite resource recusion } // Builds a readable representation of the stack trace, specifying // the format for backwards compatibility. #if FEATURE_CORECLR [System.Security.SecurityCritical] // auto-generated #endif internal String ToString(TraceFormat traceFormat) { bool displayFilenames = true; // we'll try, but demand may fail String word_At = "at"; String inFileLineNum = "in {0}:line {1}"; if(traceFormat != TraceFormat.NoResourceLookup) { word_At = Environment.GetResourceString("Word_At"); inFileLineNum = Environment.GetResourceString("StackTrace_InFileLineNumber"); } bool fFirstFrame = true; StringBuilder sb = new StringBuilder(255); for (int iFrameIndex = 0; iFrameIndex < m_iNumOfFrames; iFrameIndex++) { StackFrame sf = GetFrame(iFrameIndex); MethodBase mb = sf.GetMethod(); if (mb != null) { // We want a newline at the end of every line except for the last if (fFirstFrame) fFirstFrame = false; else sb.Append(Environment.NewLine); sb.AppendFormat(CultureInfo.InvariantCulture, " {0} ", word_At); Type t = mb.DeclaringType; // if there is a type (non global method) print it if (t != null) { // Append t.FullName, replacing '+' with '.' string fullName = t.FullName; for (int i = 0; i < fullName.Length; i++) { char ch = fullName[i]; sb.Append(ch == '+' ? '.' : ch); } sb.Append('.'); } sb.Append(mb.Name); // deal with the generic portion of the method if (mb is MethodInfo && ((MethodInfo)mb).IsGenericMethod) { Type[] typars = ((MethodInfo)mb).GetGenericArguments(); sb.Append('['); int k=0; bool fFirstTyParam = true; while (k < typars.Length) { if (fFirstTyParam == false) sb.Append(','); else fFirstTyParam = false; sb.Append(typars[k].Name); k++; } sb.Append(']'); } ParameterInfo[] pi = null; #if FEATURE_CORECLR try { #endif pi = mb.GetParameters(); #if FEATURE_CORECLR } catch { // The parameter info cannot be loaded, so we don't // append the parameter list. } #endif if (pi != null) { // arguments printing sb.Append('('); bool fFirstParam = true; for (int j = 0; j < pi.Length; j++) { if (fFirstParam == false) sb.Append(", "); else fFirstParam = false; String typeName = "<UnknownType>"; if (pi[j].ParameterType != null) typeName = pi[j].ParameterType.Name; sb.Append(typeName); sb.Append(' '); sb.Append(pi[j].Name); } sb.Append(')'); } // source location printing if (displayFilenames && (sf.GetILOffset() != -1)) { // If we don't have a PDB or PDB-reading is disabled for the module, // then the file name will be null. String fileName = null; // Getting the filename from a StackFrame is a privileged operation - we won't want // to disclose full path names to arbitrarily untrusted code. Rather than just omit // this we could probably trim to just the filename so it's still mostly usefull. try { fileName = sf.GetFileName(); } #if FEATURE_CAS_POLICY catch (NotSupportedException) { // Having a deprecated stack modifier on the callstack (such as Deny) will cause // a NotSupportedException to be thrown. Since we don't know if the app can // access the file names, we'll conservatively hide them. displayFilenames = false; } #endif // FEATURE_CAS_POLICY catch (SecurityException) { // If the demand for displaying filenames fails, then it won't // succeed later in the loop. Avoid repeated exceptions by not trying again. displayFilenames = false; } if (fileName != null) { // tack on " in c:\tmp\MyFile.cs:line 5" sb.Append(' '); sb.AppendFormat(CultureInfo.InvariantCulture, inFileLineNum, fileName, sf.GetFileLineNumber()); } } #if FEATURE_EXCEPTIONDISPATCHINFO if (sf.GetIsLastFrameFromForeignExceptionStackTrace()) { sb.Append(Environment.NewLine); sb.Append(Environment.GetResourceString("Exception_EndStackTraceFromPreviousThrow")); } #endif // FEATURE_EXCEPTIONDISPATCHINFO } } if(traceFormat == TraceFormat.TrailingNewLine) sb.Append(Environment.NewLine); return sb.ToString(); } // This helper is called from within the EE to construct a string representation // of the current stack trace. #if FEATURE_CORECLR [System.Security.SecurityCritical] // auto-generated #endif private static String GetManagedStackTraceStringHelper(bool fNeedFileInfo) { // Note all the frames in System.Diagnostics will be skipped when capturing // a normal stack trace (not from an exception) so we don't need to explicitly // skip the GetManagedStackTraceStringHelper frame. StackTrace st = new StackTrace(0, fNeedFileInfo); return st.ToString(); } } }
/* * MindTouch DekiScript - embeddable web-oriented scripting runtime * Copyright (c) 2006-2010 MindTouch Inc. * www.mindtouch.com oss@mindtouch.com * * For community documentation and downloads visit wiki.developer.mindtouch.com; * please review the licensing section. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections.Generic; using System.Text; using System.Text.RegularExpressions; using MindTouch.Deki.Script.Expr; using MindTouch.Deki.Script.Runtime; using MindTouch.Deki.Script.Runtime.Library; using MindTouch.Tasking; using MindTouch.Xml; namespace MindTouch.Deki.Script.Compiler { public static class DekiJemProcessor { private enum ParseMode { ALL, STATEMENT, EXPRESSION } //--- Constants --- private const string WHEN = "when"; private static readonly char[] NEWLINE_CHARS = new[] { '\r', '\n' }; private static readonly Regex EVENT_PATTERN = new Regex(@"((?<sink>(\$this|#[a-zA-Z0-9_]+))\.(?<event>[a-zA-Z_][a-zA-Z_0-9]*))(?<tail>\s*(\(|\.|\[))?", RegexOptions.Compiled | RegexOptions.CultureInvariant); //--- Class Methods --- public static string Parse(string code, string id, DekiScriptEnv env, DekiScriptRuntime runtime) { StringBuilder result = new StringBuilder(); // check if code is attached to an id'ed element if(!string.IsNullOrEmpty(id)) { result.AppendLine("$(\"#" + id + "\").each(function() {"); result.AppendLine("var $this = $(this);"); } // convert JEM code into regular javascript int i = 0; result.Append(ParseExpression(code, id, ParseMode.ALL, false, env, runtime, new Dictionary<string, string>(), ref i)); // check if code is attached to an id'ed element if(!string.IsNullOrEmpty(id)) { result.AppendLine("});"); } return result.ToString(); } private static bool IsAlpha(char c) { return ((c >= 'a') && (c <= 'z')) || ((c >= 'A') && (c <= 'Z')) || (c == '_') || (c == '$'); } private static bool IsAlphaNum(char c) { return IsAlpha(c) || ((c >= '0') && (c <= '9')); } private static void ScanString(string code, char endChar, ref int i) { ++i; for(; (i < code.Length); ++i) { char c = code[i]; if(c == '\\') { ++i; } else if(c == endChar) { ++i; break; } } } private static bool TryScanComment(string code, ref int i) { char next = ((i + 1) < code.Length) ? code[i + 1] : (char)0; if(next == '/') { // NOTE (steveb): this is a comment line (i.e. //) i = code.IndexOfAny(NEWLINE_CHARS, i + 2); if(i < 0) { i = code.Length; } else { next = ((i + 1) < code.Length) ? code[i + 1] : (char)0; if((code[i] == '\r') && (next == '\n')) { ++i; } } return true; } if(next == '*') { // NOTE (steveb): this is a comment line (i.e. /* */) i = code.IndexOf("*/", i + 2); if(i < 0) { i = code.Length; } else { i += 2; } return true; } return false; } private static void ScanId(string code, ref int i) { ++i; for(; (i < code.Length) && IsAlphaNum(code[i]); ++i) { } } private static void ScanWhitespace(string code, ref int i) { while((i < code.Length) && char.IsWhiteSpace(code[i])) { ++i; } } private static string ParseExpression(string code, string id, ParseMode mode, bool whenCondition, DekiScriptEnv env, DekiScriptRuntime runtime, Dictionary<string, string> channels, ref int i) { StringBuilder result = new StringBuilder(); int nesting = 0; for(; i < code.Length; ++i) { int start; switch(code[i]) { case '"': case '\'': // process strings start = i; ScanString(code, code[i], ref i); result.Append(code, start, i - start); --i; break; case '/': // check if / denotes the beginning of a comment, if so process it start = i; if(TryScanComment(code, ref i)) { // NOTE: remove comments in when-condition if(!whenCondition) { result.Append(code, start, i - start); result.Append("\n"); } --i; } else { result.Append(code[i]); } break; case '\\': // backslash (\) always appends the next character result.Append(code[i++]); if(i < code.Length) { result.Append(code[i]); } break; case '(': // increase nesting level result.Append(code[i]); ++nesting; break; case '{': // check if this is the beginning of a dekiscript block {{ }} if(((i + 1) < code.Length) && (code[i + 1] == '{')) { ++i; string value; start = i; if(TryParseDekiScriptExpression(code, env, runtime, ref i, out value)) { result.Append(value); } else { ++nesting; result.Append('{'); result.Append(code, start, i - start); --i; } } else { ++nesting; result.Append(code[i]); } break; case ')': case '}': // decrease nesting level and check if this is the end of the sougth expression result.Append(code[i]); --nesting; // NOTE: only exit if // 1) we don't have to read all of the code // 2) there are no open parentheses or cruly braces // 3) we don't on a complete statement or the current characteris a closing curly brace if((mode != ParseMode.ALL) && (nesting <= 0) && ((mode != ParseMode.STATEMENT) || (code[i] == '}'))) { // found the end of the expression ++i; return result.ToString(); } break; case ';': // check if the statement is the end of the sougth expression result.Append(code[i]); // NOTE: only exit if // 1) we don't have to read all of the code // 2) there are no open parentheses or cruly braces // 3) we stop on a complete statement if((nesting <= 0) && (mode == ParseMode.STATEMENT)) { // found the end of the expression ++i; return result.ToString(); } break; case '@': // channel name if(channels != null) { ++i; start = i; string channel; string name; if((i < code.Length) && ((code[i] == '"') || (code[i] == '\''))) { // process: @"channel_name" or @'channel_name' ScanString(code, code[i], ref i); channel = code.Substring(start, i - start); name = channel.Substring(1, channel.Length - 2).UnescapeString(); } else { // process: @channel_magic_id ScanId(code, ref i); name = code.Substring(start, i - start); if(!channels.TryGetValue(name, out channel)) { channel = env.GetMagicId(name).ToString(); } } start = i; ScanWhitespace(code, ref i); if((i < code.Length) && (code[i] == '(')) { // process: @channel ( ... ) string message = ParseExpression(code, id, ParseMode.EXPRESSION, false, env, runtime, channels, ref i); message = message.Substring(1, message.Length - 2).Trim(); if(message.Length == 0) { result.AppendFormat("Deki.publish({0})", channel); } else { result.AppendFormat("Deki.publish({0}, {1})", channel, message); } } else { // channel is used for reading; add it to the channel set to read on activation channels[name] = channel; // convert channel name and add whitespace result.AppendFormat("$channels[{0}]", name.QuoteString()); result.Append(code, start, i - start); } --i; } else { result.Append(code[i]); } break; case '#': // NOTE: don't process #id in the when-condition // element name if(!whenCondition && (channels != null)) { ++i; start = i; // process: #id ScanId(code, ref i); string name = code.Substring(start, i - start); result.Append("$(\"#" + name + "\")"); --i; } else { result.Append(code[i]); } break; default: // NOTE: don't process when() in the when-condition // check if this is the beginning of an identifier if(!whenCondition && IsAlpha(code[i])) { start = i; ScanId(code, ref i); int j = i; ScanWhitespace(code, ref j); // check if scanned identifier is the keyword 'when' if(((i - start) == WHEN.Length) && (string.Compare(code, start, WHEN, 0, WHEN.Length, StringComparison.Ordinal) == 0) && (j < code.Length) && (code[j] == '(')) { i = j; Dictionary<string, string> subChannels = new Dictionary<string, string>(); // parse the condition of the 'when()' statement string condition = ParseExpression(code, id, ParseMode.EXPRESSION, true, env, runtime, subChannels, ref i); // parse the body of the 'when()' expression string body = ParseExpression(code, id, ParseMode.STATEMENT, false, env, runtime, subChannels, ref i); BuildWhenStatement(condition.Trim(), id, body.Trim(), result, env, subChannels); } else { result.Append(code, start, i - start); } --i; } else { result.Append(code[i]); } break; } } return result.ToString(); } private static bool TryParseDekiScriptExpression(string ctor, DekiScriptEnv env, DekiScriptRuntime runtime, ref int i, out string value) { string source = ParseExpression(ctor, null, ParseMode.EXPRESSION, false, env, runtime, null, ref i); if((i >= ctor.Length) || (ctor[i] != '}')) { value = null; return false; } // try to parse and execute the dekiscript fragment try { source = source.Substring(1, source.Length - 2); DekiScriptExpression dekiscript = DekiScriptParser.Parse(new Location("jem"), source); DekiScriptLiteral result = runtime.Evaluate(dekiscript, DekiScriptEvalMode.EvaluateSafeMode, env); value = DekiScriptLibrary.JsonEmit(result.NativeValue); } catch(Exception e) { // execution failed; convert exception into a javascript comment value = "alert(\"ERROR in DekiScript expression:\\n---------------------------\\n\\n\" + " + e.GetCoroutineStackTrace().QuoteString() + ")"; } return true; } private static void BuildWhenStatement(string expr, string id, string body, StringBuilder head, DekiScriptEnv env, Dictionary<string, string> channels) { // remove the optional outer () and {}; the expression is already scoped, so we don't need them anymore if(expr.StartsWith("(") && expr.EndsWith(")")) { expr = expr.Substring(1, expr.Length - 2).Trim(); } // gather all events from 'when()' condition expression into a map { sink1: [ event1, event2, ... ], sink2: [ event1, event2, ... ], ... } Dictionary<string, Dictionary<string, string>> sinks = new Dictionary<string, Dictionary<string, string>>(); string condition = EVENT_PATTERN.Replace(expr, delegate(Match match) { // check if a tail element was matched, which disqualifies this match Group tail = match.Groups["tail"]; if(!tail.Success) { // check for event match Group group = match.Groups["event"]; if(group.Success) { string sink = match.Groups["sink"].Value; Dictionary<string, string> events; if(!sinks.TryGetValue(sink, out events)) { events = new Dictionary<string, string>(); sinks[sink] = events; } events[group.Value] = group.Value; // TODO (steveb): we should also check that the event source is what we expect it to be return string.Format("($event.type == {0})", StringUtil.QuoteString(group.Value)); } } else { string sink = match.Groups["sink"].Value; if(sink.StartsWith("#")) { return string.Format("$(\"{0}\").{1}{2}", sink, match.Groups["event"].Value, tail.Value); } } return match.Value; }); // create stub function; $event is only set when invoked for events, not for messages string function = "_" + StringUtil.CreateAlphaNumericKey(8); if(sinks.Count > 0) { head.Append("var " + function + " = function($event) { $event = $event||{}; "); } else { head.Append("var " + function + " = function() { "); } // read channel state for all channels that a read from; including 'when()' condition and body if(channels.Count > 0) { bool first = true; head.Append("var $channels = {"); foreach(KeyValuePair<string, string> channel in channels) { if(!first) { head.Append(", "); } first = false; head.AppendFormat("{0}: Deki.query({1})||{{}}", channel.Key.QuoteString(), channel.Value); } head.Append(" }; "); } // add optional condition if(!string.IsNullOrEmpty(condition)) { head.Append("if(" + condition + ") "); } // append body of 'when()' statement and close the function if(string.IsNullOrEmpty(body)) { head.Append(";"); } else { head.Append(body); } head.AppendLine(" };"); // register function for event handlers if(sinks.Count > 0) { foreach(KeyValuePair<string, Dictionary<string, string>> events in sinks) { string bind = string.Join(" ", new List<string>(events.Value.Values).ToArray()).QuoteString(); if(events.Key.EqualsInvariant("$this")) { head.AppendLine("$this.bind(" + bind + ", " + function + ");"); } else { head.AppendLine("$(" + events.Key.QuoteString() + ").bind(" + bind + ", " + function + ");"); } } } // register function for message handlers foreach(KeyValuePair<string, string> channel in channels) { head.AppendLine("Deki.subscribe(" + channel.Value + ", " + (string.IsNullOrEmpty(id) ? "null" : "this") + ", " + function + ");"); } } } }
// CodeContracts // // Copyright (c) Microsoft Corporation // // All rights reserved. // // MIT License // // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // File System.ServiceModel.Dispatcher.MessageFilterTable_1.cs // Automatically generated contract file. using System.Collections.Generic; using System.IO; using System.Text; using System.Diagnostics.Contracts; using System; // Disable the "this variable is not used" warning as every field would imply it. #pragma warning disable 0414 // Disable the "this variable is never assigned to". #pragma warning disable 0067 // Disable the "this event is never assigned to". #pragma warning disable 0649 // Disable the "this variable is never used". #pragma warning disable 0169 // Disable the "new keyword not required" warning. #pragma warning disable 0109 // Disable the "extern without DllImport" warning. #pragma warning disable 0626 // Disable the "could hide other member" warning, can happen on certain properties. #pragma warning disable 0108 namespace System.ServiceModel.Dispatcher { public partial class MessageFilterTable<TFilterData> : IMessageFilterTable<TFilterData>, IDictionary<MessageFilter, TFilterData>, ICollection<KeyValuePair<MessageFilter, TFilterData>>, IEnumerable<KeyValuePair<MessageFilter, TFilterData>>, System.Collections.IEnumerable { #region Methods and constructors public void Add(MessageFilter filter, TFilterData data) { } public void Add(MessageFilter filter, TFilterData data, int priority) { } public void Add(KeyValuePair<MessageFilter, TFilterData> item) { } public void Clear() { } public bool Contains(KeyValuePair<MessageFilter, TFilterData> item) { return default(bool); } public bool ContainsKey(MessageFilter filter) { return default(bool); } public void CopyTo(KeyValuePair<MessageFilter, TFilterData>[] array, int arrayIndex) { } protected virtual new IMessageFilterTable<TFilterData> CreateFilterTable(MessageFilter filter) { Contract.Requires(filter != null); return default(IMessageFilterTable<TFilterData>); } public IEnumerator<KeyValuePair<MessageFilter, TFilterData>> GetEnumerator() { return default(IEnumerator<KeyValuePair<MessageFilter, TFilterData>>); } public bool GetMatchingFilter(System.ServiceModel.Channels.Message message, out MessageFilter filter) { filter = default(MessageFilter); return default(bool); } public bool GetMatchingFilter(System.ServiceModel.Channels.MessageBuffer buffer, out MessageFilter filter) { filter = default(MessageFilter); return default(bool); } public bool GetMatchingFilters(System.ServiceModel.Channels.Message message, ICollection<MessageFilter> results) { return default(bool); } public bool GetMatchingFilters(System.ServiceModel.Channels.MessageBuffer buffer, ICollection<MessageFilter> results) { return default(bool); } public bool GetMatchingValue(System.ServiceModel.Channels.Message message, out TFilterData data) { data = default(TFilterData); return default(bool); } public bool GetMatchingValue(System.ServiceModel.Channels.MessageBuffer buffer, out TFilterData data) { data = default(TFilterData); return default(bool); } public bool GetMatchingValues(System.ServiceModel.Channels.Message message, ICollection<TFilterData> results) { return default(bool); } public bool GetMatchingValues(System.ServiceModel.Channels.MessageBuffer buffer, ICollection<TFilterData> results) { return default(bool); } public int GetPriority(MessageFilter filter) { return default(int); } public MessageFilterTable(int defaultPriority) { } public MessageFilterTable() { } public bool Remove(MessageFilter filter) { return default(bool); } public bool Remove(KeyValuePair<MessageFilter, TFilterData> item) { return default(bool); } System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() { return default(System.Collections.IEnumerator); } public bool TryGetValue(MessageFilter filter, out TFilterData data) { data = default(TFilterData); return default(bool); } #endregion #region Properties and indexers public int Count { get { return default(int); } } public int DefaultPriority { get { return default(int); } set { } } public bool IsReadOnly { get { return default(bool); } } public TFilterData this [MessageFilter filter] { get { return default(TFilterData); } set { } } public ICollection<MessageFilter> Keys { get { return default(ICollection<MessageFilter>); } } public ICollection<TFilterData> Values { get { return default(ICollection<TFilterData>); } } #endregion } }
// ============================================================================ // FileName: SIPRegistrarBinding.cs // // Description: // SIP Registrar that strives to be RFC3822 compliant. // // Author(s): // Aaron Clauson // // History: // 23 Aug 2008 Aaron Clauson Created, refactored from RegistrarCore. // // License: // This software is licensed under the BSD License http://www.opensource.org/licenses/bsd-license.php // // Copyright (c) 2008 Aaron Clauson (aaron@sipsorcery.com), SIP Sorcery PTY LTD, Hobart, Australia (www.sipsorcery.com) // All rights reserved. // // Redistribution and use in source and binary forms, with or without modification, are permitted provided that // the following conditions are met: // // Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. // Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following // disclaimer in the documentation and/or other materials provided with the distribution. Neither the name of SIP Sorcery PTY LTD. // nor the names of its contributors may be used to endorse or promote products derived from this software without specific // prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, // BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. // IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, // OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, // OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE // POSSIBILITY OF SUCH DAMAGE. // ============================================================================ using System; using System.Collections.Generic; using System.ComponentModel; using System.Runtime.Serialization; using System.Text.RegularExpressions; using SIPSorcery.Sys; using log4net; #if !SILVERLIGHT using System.Data; using System.Data.Linq.Mapping; #endif namespace SIPSorcery.SIP.App { public enum SIPBindingRemovalReason { Unknown = 0, ClientExpiredSpecific = 1, ClientExpiredAll = 2, ExceededPerUserLimit = 3, OptionsTimedOut = 4, OptionsErrorResponse = 5, MaxLifetimeReached = 6, Administrative = 7, } /// <summary> /// The SIPAddressBinding represents a single registered contact uri for a user. A user can have multiple registered contact uri's. /// </summary> [Table(Name = "sipregistrarbindings")] [DataContract] public class SIPRegistrarBinding : INotifyPropertyChanged, ISIPAsset { public const string XML_DOCUMENT_ELEMENT_NAME = "sipregistrarbindings"; public const string XML_ELEMENT_NAME = "sipregistrarbinding"; public const int MAX_BINDING_LIFETIME = 3600; // Bindings are currently not being expired once the expires time is reached and this is the maximum amount of time // a binding can stay valid for with probing before it is removed and the binding must be freshed with a REGISTER. //public static readonly string SelectBindingsQuery = "select * from sipregistrarbindings where sipaccountid = ?1"; //public static readonly string SelectExpiredBindingsQuery = "select * from sipregistrarbindings where expirytime < ?1"; private static string m_newLine = AppState.NewLine; private static ILog logger = AppState.GetLogger("sipregistrar"); public static int TimeZoneOffsetMinutes; private static Dictionary<string, int> m_userAgentExpirys = new Dictionary<string, int>(); // Result of parsing user agent expiry values from the App.Config Xml Node. [Column(Name = "id", DbType = "varchar(36)", IsPrimaryKey = true, CanBeNull = false, UpdateCheck = UpdateCheck.Never)] [DataMember] public Guid Id { get; set; } [Column(Name = "sipaccountid", DbType = "varchar(36)", CanBeNull = false, UpdateCheck = UpdateCheck.Never)] public Guid SIPAccountId { get; set; } [Column(Name = "sipaccountname", DbType = "varchar(160)", CanBeNull = false, UpdateCheck = UpdateCheck.Never)] [DataMember] public string SIPAccountName { get; set; } // Used for informational purposes only, no matching done against it and should not be relied on. Use SIPAccountId instead. [Column(Name = "owner", DbType = "varchar(32)", CanBeNull = false, UpdateCheck = UpdateCheck.Never)] [DataMember] public string Owner { get; set; } [Column(Name = "adminmemberid", DbType = "varchar(32)", CanBeNull = true, UpdateCheck = UpdateCheck.Never)] public string AdminMemberId { get; private set; } // If set it designates this asset as a belonging to a user with the matching adminid. [Column(Name = "useragent", DbType = "varchar(1024)", CanBeNull = true, UpdateCheck = UpdateCheck.Never)] [DataMember] public string UserAgent { get; set; } private SIPURI m_contactURI; [Column(Name = "contacturi", DbType = "varchar(767)", CanBeNull = false, UpdateCheck = UpdateCheck.Never)] [DataMember] public string ContactURI { get { return (m_contactURI != null) ? m_contactURI.ToString() : null; } set { m_contactURI = (!value.IsNullOrBlank()) ? SIPURI.ParseSIPURI(value) : null; } } public SIPURI ContactSIPURI { get { return m_contactURI; } set { m_contactURI = value; } } private SIPURI m_mangledContactURI; [Column(Name = "mangledcontacturi", DbType = "varchar(767)", CanBeNull = false, UpdateCheck = UpdateCheck.Never)] [DataMember] public string MangledContactURI { get { return (m_mangledContactURI != null) ? m_mangledContactURI.ToString() : null; } set { m_mangledContactURI = (!value.IsNullOrBlank()) ? SIPURI.ParseSIPURI(value) : null; } } public SIPURI MangledContactSIPURI { get { return m_mangledContactURI; } set { m_mangledContactURI = value; } } private DateTimeOffset m_lastUpdate; [Column(Name = "lastupdate", DbType = "datetimeoffset", CanBeNull = false, UpdateCheck = UpdateCheck.Never)] [DataMember] public DateTimeOffset LastUpdate { get { return m_lastUpdate; } set { m_lastUpdate = value.ToUniversalTime(); } } public DateTimeOffset LastUpdateLocal { get { return LastUpdate.AddMinutes(TimeZoneOffsetMinutes); } } [IgnoreDataMember] public SIPEndPoint RemoteSIPEndPoint; // The socket the REGISTER request the binding was received on. [Column(Name = "remotesipsocket", DbType = "varchar(64)", CanBeNull = false, UpdateCheck = UpdateCheck.Never)] [DataMember] public string RemoteSIPSocket { get { return (RemoteSIPEndPoint != null) ? RemoteSIPEndPoint.ToString() : null; } set { if (value.IsNullOrBlank()) { RemoteSIPEndPoint = null; } else { RemoteSIPEndPoint = SIPEndPoint.ParseSIPEndPoint(value); } } } [IgnoreDataMember] public string CallId; [IgnoreDataMember] public int CSeq; private int m_expiry = 0; // The expiry time in seconds for the binding. [Column(Name = "expiry", DbType = "int", CanBeNull = false, UpdateCheck = UpdateCheck.Never)] [DataMember] public int Expiry { get { return m_expiry; } set { m_expiry = value; } } [Column(Name = "expirytime", DbType = "datetimeoffset", CanBeNull = false, UpdateCheck = UpdateCheck.Never)] public DateTimeOffset ExpiryTime { get { return m_lastUpdate.AddSeconds(m_expiry); } set { } // The expiry time is stored in the database for info. It is calculated from expiry and lastudpate and does not need a setter. } [DataMember] public string Q // The Q value on the on the Contact header to indicate relative priority among bindings for the same address of record. { get { if (m_contactURI.Parameters != null) { return m_contactURI.Parameters.Get(SIPContactHeader.QVALUE_PARAMETER_KEY); } else { return null; } } set { m_contactURI.Parameters.Set(SIPContactHeader.QVALUE_PARAMETER_KEY, value); } } private SIPEndPoint m_proxySIPEndPoint; [IgnoreDataMember] public SIPEndPoint ProxySIPEndPoint // This is the socket the request was received from and assumes that the prior SIP element was a SIP proxy. { get { return m_proxySIPEndPoint; } set { m_proxySIPEndPoint = value; } } [Column(Name = "proxysipsocket", DbType = "varchar(64)", CanBeNull = true, UpdateCheck = UpdateCheck.Never)] [DataMember] public string ProxySIPSocket { get { return (m_proxySIPEndPoint != null) ? m_proxySIPEndPoint.ToString() : null; } set { if (value.IsNullOrBlank()) { ProxySIPEndPoint = null; } else { ProxySIPEndPoint = SIPEndPoint.ParseSIPEndPoint(value); } } } private SIPEndPoint m_registrarSIPEndPoint; [IgnoreDataMember] public SIPEndPoint RegistrarSIPEndPoint { get { return m_registrarSIPEndPoint; } } [Column(Name = "registrarsipsocket", DbType = "varchar(64)", CanBeNull = false, UpdateCheck = UpdateCheck.Never)] [DataMember] public string RegistrarSIPSocket { get { return (m_registrarSIPEndPoint != null) ? m_registrarSIPEndPoint.ToString() : null; } set { if (value.IsNullOrBlank()) { m_registrarSIPEndPoint = null; } else { m_registrarSIPEndPoint = SIPEndPoint.ParseSIPEndPoint(value); } } } public SIPBindingRemovalReason RemovalReason = SIPBindingRemovalReason.Unknown; public event PropertyChangedEventHandler PropertyChanged; public SIPRegistrarBinding() { } /// <summary></summary> /// <param name="uacRecvdEndPoint">If this is non-null it indicates the contact header should be mangled based on the public socket the register request was demmed /// to have originated from rather then relying on the contact value recieved from the uac.</param> public SIPRegistrarBinding( SIPAccount sipAccount, SIPURI bindingURI, string callId, int cseq, string userAgent, SIPEndPoint remoteSIPEndPoint, SIPEndPoint proxySIPEndPoint, SIPEndPoint registrarSIPEndPoint, int expiry) { Id = Guid.NewGuid(); LastUpdate = DateTime.UtcNow; SIPAccountId = sipAccount.Id; SIPAccountName = sipAccount.SIPUsername + "@" + sipAccount.SIPDomain; Owner = sipAccount.Owner; AdminMemberId = sipAccount.AdminMemberId; m_contactURI = bindingURI.CopyOf(); m_mangledContactURI = m_contactURI.CopyOf(); CallId = callId; CSeq = cseq; UserAgent = userAgent; RemoteSIPEndPoint = remoteSIPEndPoint; m_proxySIPEndPoint = proxySIPEndPoint; m_registrarSIPEndPoint = registrarSIPEndPoint; //if (SIPTransport.IsPrivateAddress(sipRequest.Header.Contact[0].ContactURI.Host) && m_mangleUACContact) if (!sipAccount.DontMangleEnabled && Regex.Match(m_mangledContactURI.Host, @"(\d+\.){3}\d+").Success) { // The Contact URI Host is used by registrars as the contact socket for the user so it needs to be changed to reflect the socket // the intial request was received on in order to work around NAT. It's no good just relying on private addresses as a lot of User Agents // determine their public IP but NOT their public port so they send the wrong port in the Contact header. //logger.Debug("Mangling contact header from " + m_mangledContactURI.Host + " to " + IPSocket.GetSocketString(uacRecvdEndPoint) + "."); m_mangledContactURI.Host = remoteSIPEndPoint.GetIPEndPoint().ToString(); } m_expiry = expiry; } #if !SILVERLIGHT public SIPRegistrarBinding(DataRow row) { Load(row); } public DataTable GetTable() { DataTable table = new DataTable(); table.Columns.Add(new DataColumn("id", typeof(String))); table.Columns.Add(new DataColumn("sipaccountid", typeof(String))); table.Columns.Add(new DataColumn("owner", typeof(String))); table.Columns.Add(new DataColumn("adminmemberid", typeof(String))); table.Columns.Add(new DataColumn("sipaccountname", typeof(String))); table.Columns.Add(new DataColumn("useragent", typeof(String))); table.Columns.Add(new DataColumn("contacturi", typeof(String))); table.Columns.Add(new DataColumn("mangledcontacturi", typeof(String))); table.Columns.Add(new DataColumn("expiry", typeof(Int32))); table.Columns.Add(new DataColumn("expirytime", typeof(DateTimeOffset))); table.Columns.Add(new DataColumn("remotesipsocket", typeof(String))); table.Columns.Add(new DataColumn("proxysipsocket", typeof(String))); table.Columns.Add(new DataColumn("registrarsipsocket", typeof(String))); table.Columns.Add(new DataColumn("lastupdate", typeof(DateTimeOffset))); return table; } public void Load(DataRow row) { try { Id = new Guid(row["id"] as string); SIPAccountId = new Guid(row["sipaccountid"] as string); SIPAccountName = row["sipaccountname"] as string; Owner = row["owner"] as string; AdminMemberId = row["adminmemberid"] as string; UserAgent = row["useragent"] as string; m_contactURI = SIPURI.ParseSIPURI(row["contacturi"] as string); m_mangledContactURI = (!(row["mangledcontacturi"] as string).IsNullOrBlank()) ? SIPURI.ParseSIPURI(row["mangledcontacturi"] as string) : null; Expiry = Convert.ToInt32(row["expiry"]); RemoteSIPEndPoint = (!(row["remotesipsocket"] as string).IsNullOrBlank()) ? SIPEndPoint.ParseSIPEndPoint(row["remotesipsocket"] as string) : null; m_proxySIPEndPoint = (!(row["proxysipsocket"] as string).IsNullOrBlank()) ? SIPEndPoint.ParseSIPEndPoint(row["proxysipsocket"] as string) : null; m_registrarSIPEndPoint = (!(row["registrarsipsocket"] as string).IsNullOrBlank()) ? SIPEndPoint.ParseSIPEndPoint(row["registrarsipsocket"] as string) : null; LastUpdate = DateTimeOffset.Parse(row["lastupdate"] as string); } catch (Exception excp) { logger.Error("Exception SIPRegistrarBinding Load. " + excp.Message); throw; } } //public Dictionary<Guid, object> Load(XmlDocument dom) //{ // return SIPAssetXMLPersistor<SIPRegistrarBinding>.LoadAssetsFromXMLRecordSet(dom); //} #endif /// <summary> /// Refreshes a binding when the remote network information of the remote or proxy end point has changed. /// </summary> public void RefreshBinding(int expiry, SIPEndPoint remoteSIPEndPoint, SIPEndPoint proxySIPEndPoint, SIPEndPoint registrarSIPEndPoint, bool dontMangle) { LastUpdate = DateTimeOffset.UtcNow; RemoteSIPEndPoint = remoteSIPEndPoint; m_proxySIPEndPoint = proxySIPEndPoint; m_registrarSIPEndPoint = registrarSIPEndPoint; RemovalReason = SIPBindingRemovalReason.Unknown; m_expiry = expiry; //if (SIPTransport.IsPrivateAddress(sipRequest.Header.Contact[0].ContactURI.Host) && m_mangleUACContact) if (!dontMangle && Regex.Match(m_mangledContactURI.Host, @"(\d+\.){3}\d+").Success) { // The Contact URI Host is used by registrars as the contact socket for the user so it needs to be changed to reflect the socket // the intial request was received on in order to work around NAT. It's no good just relying on private addresses as a lot of User Agents // determine their public IP but NOT their public port so they send the wrong port in the Contact header. //logger.Debug("Mangling contact header from " + m_mangledContactURI.Host + " to " + IPSocket.GetSocketString(uacRecvdEndPoint) + "."); m_mangledContactURI.Host = remoteSIPEndPoint.GetIPEndPoint().ToString(); } } public string ToContactString() { int secondsRemaining = Convert.ToInt32(ExpiryTime.Subtract(DateTime.UtcNow).TotalSeconds % Int32.MaxValue); return "<" + m_contactURI.ToString() + ">;" + SIPContactHeader.EXPIRES_PARAMETER_KEY + "=" + secondsRemaining; } public string ToMangledContactString() { int secondsRemaining = Convert.ToInt32(ExpiryTime.Subtract(DateTime.UtcNow).TotalSeconds % Int32.MaxValue); return "<" + m_mangledContactURI.ToString() + ">;" + SIPContactHeader.EXPIRES_PARAMETER_KEY + "=" + secondsRemaining; } public string ToXML() { string providerXML = " <" + XML_ELEMENT_NAME + ">" + m_newLine + ToXMLNoParent() + " </" + XML_ELEMENT_NAME + ">" + m_newLine; return providerXML; } public string ToXMLNoParent() { string contactURIStr = (m_contactURI != null) ? m_contactURI.ToString() : null; string mangledContactURIStr = (m_mangledContactURI != null) ? m_mangledContactURI.ToString() : null; string registrarBindingXML = " <id>" + Id + "</id>" + m_newLine + " <sipaccountid>" + SIPAccountId + "</sipaccountid>" + m_newLine + " <sipaccountname>" + SIPAccountName + "</sipaccountname>" + m_newLine + " <owner>" + Owner + "</owner>" + m_newLine + " <adminmemberid>" + AdminMemberId + "</adminmemberid>" + m_newLine + " <contacturi>" + contactURIStr + "</contacturi>" + m_newLine + " <mangledcontacturi>" + mangledContactURIStr + "</mangledcontacturi>" + m_newLine + " <expiry>" + Expiry + "</expiry>" + m_newLine + " <useragent>" + SafeXML.MakeSafeXML(UserAgent) + "</useragent>" + m_newLine + " <remotesipsocket>" + RemoteSIPSocket + "</remotesipsocket>" + m_newLine + " <proxysipsocket>" + ProxySIPSocket + "</proxysipsocket>" + m_newLine + " <registrarsipsocket>" + RegistrarSIPSocket + "</registrarsipsocket>" + m_newLine + " <lastupdate>" + m_lastUpdate.ToString("o") + "</lastupdate>" + m_newLine + " <expirytime>" + ExpiryTime.ToString("o") + "</expirytime>" + m_newLine; return registrarBindingXML; } public string GetXMLElementName() { return XML_ELEMENT_NAME; } public string GetXMLDocumentElementName() { return XML_DOCUMENT_ELEMENT_NAME; } private void NotifyPropertyChanged(string propertyName) { if (PropertyChanged != null) { PropertyChanged(this, new PropertyChangedEventArgs(propertyName)); } } } }
using System; using Marten.Patching; using Marten.Schema; using Marten.Services; using Marten.Storage; using NSubstitute; using Shouldly; using Xunit; namespace Marten.Testing.Patching { public class PatchExpressionTests { private readonly PatchExpression<Target> _expression; private readonly ITenant _schema = Substitute.For<ITenant>(); public PatchExpressionTests() { var queryable = Substitute.For<IQueryableDocument>(); queryable.DocumentType.Returns(typeof(Target)); var mapping = Substitute.For<IDocumentMapping>(); mapping.ToQueryableDocument().Returns(queryable); _schema.MappingFor(typeof(Target)).Returns(mapping); var store = TestingDocumentStore.Basic(); _expression = new PatchExpression<Target>(null, _schema, new UnitOfWork(store, store.Tenancy.Default), new JsonNetSerializer()); } [Fact] public void builds_patch_for_set_name() { _expression.Set("Float", 7.7f); _expression.Patch["path"].ShouldBe("Float"); _expression.Patch["type"].ShouldBe("set"); _expression.Patch["value"].ShouldBe(7.7f); } [Fact] public void builds_patch_for_set_name_deep() { _expression.Set("Double", x => x.Inner, 99.9d); _expression.Patch["path"].ShouldBe("Inner.Double"); _expression.Patch["type"].ShouldBe("set"); _expression.Patch["value"].ShouldBe(99.9d); } [Fact] public void builds_patch_for_set_shallow() { _expression.Set(x => x.Number, 5); _expression.Patch["path"].ShouldBe("Number"); _expression.Patch["type"].ShouldBe("set"); _expression.Patch["value"].ShouldBe(5); } [Fact] public void builds_patch_for_set_2_deep() { _expression.Set(x => x.Inner.Number, 5); _expression.Patch["path"].ShouldBe("Inner.Number"); _expression.Patch["type"].ShouldBe("set"); _expression.Patch["value"].ShouldBe(5); } [Fact] public void builds_patch_for_set_3_deep() { _expression.Set(x => x.Inner.Inner.Number, 5); _expression.Patch["path"].ShouldBe("Inner.Inner.Number"); _expression.Patch["type"].ShouldBe("set"); _expression.Patch["value"].ShouldBe(5); } [Fact] public void increment_int_with_default() { _expression.Increment(x => x.Number); _expression.Patch["path"].ShouldBe("Number"); _expression.Patch["type"].ShouldBe("increment"); _expression.Patch["increment"].ShouldBe(1); } [Fact] public void increment_int_with_default_deep() { _expression.Increment(x => x.Inner.Inner.Number); _expression.Patch["path"].ShouldBe("Inner.Inner.Number"); _expression.Patch["type"].ShouldBe("increment"); _expression.Patch["increment"].ShouldBe(1); } [Fact] public void increment_int_with_explicit_interval() { _expression.Increment(x => x.Number, 5); _expression.Patch["path"].ShouldBe("Number"); _expression.Patch["type"].ShouldBe("increment"); _expression.Patch["increment"].ShouldBe(5); } [Fact] public void increment_long_with_default() { _expression.Increment(x => x.Long); _expression.Patch["path"].ShouldBe("Long"); _expression.Patch["type"].ShouldBe("increment"); _expression.Patch["increment"].ShouldBe(1); } [Fact] public void increment_long_with_default_deep() { _expression.Increment(x => x.Inner.Inner.Long); _expression.Patch["path"].ShouldBe("Inner.Inner.Long"); _expression.Patch["type"].ShouldBe("increment"); _expression.Patch["increment"].ShouldBe(1); } [Fact] public void increment_long_with_explicit_interval() { _expression.Increment(x => x.Long, 5); _expression.Patch["path"].ShouldBe("Long"); _expression.Patch["type"].ShouldBe("increment"); _expression.Patch["increment"].ShouldBe(5); } [Fact] public void increment_double_with_default() { _expression.Increment(x => x.Double); _expression.Patch["path"].ShouldBe("Double"); _expression.Patch["type"].ShouldBe("increment_float"); _expression.Patch["increment"].ShouldBe(1); } [Fact] public void increment_double_with_default_deep() { _expression.Increment(x => x.Inner.Inner.Double); _expression.Patch["path"].ShouldBe("Inner.Inner.Double"); _expression.Patch["type"].ShouldBe("increment_float"); _expression.Patch["increment"].ShouldBe(1); } [Fact] public void increment_double_with_explicit_interval() { _expression.Increment(x => x.Double, 5); _expression.Patch["path"].ShouldBe("Double"); _expression.Patch["type"].ShouldBe("increment_float"); _expression.Patch["increment"].ShouldBe(5); } [Fact] public void increment_float_with_default() { _expression.Increment(x => x.Float); _expression.Patch["path"].ShouldBe("Float"); _expression.Patch["type"].ShouldBe("increment_float"); _expression.Patch["increment"].ShouldBe(1); } [Fact] public void increment_float_with_default_deep() { _expression.Increment(x => x.Inner.Inner.Float); _expression.Patch["path"].ShouldBe("Inner.Inner.Float"); _expression.Patch["type"].ShouldBe("increment_float"); _expression.Patch["increment"].ShouldBe(1); } [Fact] public void increment_float_with_explicit_interval() { _expression.Increment(x => x.Float, 5); _expression.Patch["path"].ShouldBe("Float"); _expression.Patch["type"].ShouldBe("increment_float"); _expression.Patch["increment"].ShouldBe(5); } [Fact] public void append_shallow() { _expression.Append(x => x.NumberArray, 5); _expression.Patch["path"].ShouldBe("NumberArray"); _expression.Patch["type"].ShouldBe("append"); _expression.Patch["value"].ShouldBe(5); } [Fact] public void append_if_not_exists_shallow() { _expression.AppendIfNotExists(x => x.NumberArray, 5); _expression.Patch["path"].ShouldBe("NumberArray"); _expression.Patch["type"].ShouldBe("append_if_not_exists"); _expression.Patch["value"].ShouldBe(5); } [Fact] public void append_deep() { _expression.Append(x => x.Inner.Inner.NumberArray, 5); _expression.Patch["path"].ShouldBe("Inner.Inner.NumberArray"); _expression.Patch["type"].ShouldBe("append"); _expression.Patch["value"].ShouldBe(5); } [Fact] public void append_if_not_exists_deep() { _expression.AppendIfNotExists(x => x.Inner.Inner.NumberArray, 5); _expression.Patch["path"].ShouldBe("Inner.Inner.NumberArray"); _expression.Patch["type"].ShouldBe("append_if_not_exists"); _expression.Patch["value"].ShouldBe(5); } [Fact] public void insert_shallow() { _expression.Insert(x => x.NumberArray, 5); _expression.Patch["path"].ShouldBe("NumberArray"); _expression.Patch["type"].ShouldBe("insert"); _expression.Patch["value"].ShouldBe(5); _expression.Patch["index"].ShouldBe(0); } [Fact] public void insert_if_not_exists_shallow() { _expression.InsertIfNotExists(x => x.NumberArray, 5); _expression.Patch["path"].ShouldBe("NumberArray"); _expression.Patch["type"].ShouldBe("insert_if_not_exists"); _expression.Patch["value"].ShouldBe(5); _expression.Patch["index"].ShouldBe(0); } [Fact] public void insert_deep() { _expression.Insert(x => x.Inner.Inner.NumberArray, 5); _expression.Patch["path"].ShouldBe("Inner.Inner.NumberArray"); _expression.Patch["type"].ShouldBe("insert"); _expression.Patch["value"].ShouldBe(5); _expression.Patch["index"].ShouldBe(0); } [Fact] public void insert_if_not_exists_deep() { _expression.InsertIfNotExists(x => x.Inner.Inner.NumberArray, 5); _expression.Patch["path"].ShouldBe("Inner.Inner.NumberArray"); _expression.Patch["type"].ShouldBe("insert_if_not_exists"); _expression.Patch["value"].ShouldBe(5); _expression.Patch["index"].ShouldBe(0); } [Fact] public void insert_at_a_nonzero_index() { _expression.Insert(x => x.NumberArray, 5, 2); _expression.Patch["path"].ShouldBe("NumberArray"); _expression.Patch["type"].ShouldBe("insert"); _expression.Patch["value"].ShouldBe(5); _expression.Patch["index"].ShouldBe(2); } [Fact] public void rename_shallow() { _expression.Rename("Old", x => x.Double); _expression.Patch["type"].ShouldBe("rename"); _expression.Patch["to"].ShouldBe("Double"); _expression.Patch["path"].ShouldBe("Old"); } [Fact] public void rename_2_deep() { _expression.Rename("Old", x => x.Inner.Double); _expression.Patch["type"].ShouldBe("rename"); _expression.Patch["to"].ShouldBe("Double"); _expression.Patch["path"].ShouldBe("Inner.Old"); } [Fact] public void rename_3_deep() { _expression.Rename("Old", x => x.Inner.Inner.Double); _expression.Patch["type"].ShouldBe("rename"); _expression.Patch["to"].ShouldBe("Double"); _expression.Patch["path"].ShouldBe("Inner.Inner.Old"); } [Fact] public void remove_first() { _expression.Remove(x => x.NumberArray, 5); _expression.Patch["type"].ShouldBe("remove"); _expression.Patch["value"].ShouldBe(5); _expression.Patch["path"].ShouldBe("NumberArray"); _expression.Patch["action"].ShouldBe((int)RemoveAction.RemoveFirst); } [Fact] public void remove_all() { _expression.Remove(x => x.NumberArray, 5, RemoveAction.RemoveAll); _expression.Patch["type"].ShouldBe("remove"); _expression.Patch["value"].ShouldBe(5); _expression.Patch["path"].ShouldBe("NumberArray"); _expression.Patch["action"].ShouldBe((int) RemoveAction.RemoveAll); } [Fact] public void delete_name() { _expression.Delete("Foo"); _expression.Patch["type"].ShouldBe("delete"); _expression.Patch["path"].ShouldBe("Foo"); } [Fact] public void delete_nested_name() { _expression.Delete("Foo", x => x.Inner.Inner); _expression.Patch["type"].ShouldBe("delete"); _expression.Patch["path"].ShouldBe("Inner.Inner.Foo"); } [Fact] public void delete_nested_property() { _expression.Delete(x => x.NumberArray); _expression.Patch["type"].ShouldBe("delete"); _expression.Patch["path"].ShouldBe("NumberArray"); } [Fact] public void duplicate_property() { _expression.Duplicate(x => x.String, x => x.AnotherString); _expression.Patch["type"].ShouldBe("duplicate"); _expression.Patch["path"].ShouldBe("String"); ((string[]) _expression.Patch["targets"]).ShouldHaveTheSameElementsAs("AnotherString"); } [Fact] public void duplicate_property_to_multiple_targets() { _expression.Duplicate(x => x.String, x => x.AnotherString, x => x.Inner.String, x => x.Inner.AnotherString); _expression.Patch["type"].ShouldBe("duplicate"); _expression.Patch["path"].ShouldBe("String"); ((string[])_expression.Patch["targets"]).ShouldHaveTheSameElementsAs("AnotherString", "Inner.String", "Inner.AnotherString"); } [Fact] public void duplicate_property_no_target() { Assert.Throws<ArgumentException>(() => _expression.Duplicate(x => x.String)) .Message.ShouldContain("At least one destination must be given"); } } }
// Amplify Shader Editor - Visual Shader Editing Tool // Copyright (c) Amplify Creations, Lda <info@amplify.pt> using UnityEngine; using UnityEditor; using System; namespace AmplifyShaderEditor { [Serializable] [NodeAttributes( "Get Local Var", "Misc", "Use a registered local variable" )] public class GetLocalVarNode : ParentNode { [SerializeField] private int m_referenceId = -1; [SerializeField] private float m_referenceWidth = -1; [SerializeField] private int m_nodeId = -1; [SerializeField] private RegisterLocalVarNode m_currentSelected = null; private bool m_forceNodeUpdate = false; private int m_cachedPropertyId = -1; protected override void CommonInit( int uniqueId ) { base.CommonInit( uniqueId ); AddOutputPort( WirePortDataType.OBJECT, Constants.EmptyPortValue ); m_textLabelWidth = 80; m_autoWrapProperties = true; m_previewShaderGUID = "f21a6e44c7d7b8543afacd19751d24c6"; } public override void SetPreviewInputs() { base.SetPreviewInputs(); if( m_currentSelected != null ) { m_drawPreviewAsSphere = m_currentSelected.SpherePreview; CheckSpherePreview(); if ( m_cachedPropertyId == -1 ) m_cachedPropertyId = Shader.PropertyToID( "_A" ); PreviewMaterial.SetTexture( m_cachedPropertyId, m_currentSelected.OutputPorts[ 0 ].OutputPreviewTexture ); } } private void OnRegisterUpdate() { //MarkForPreviewUpdate(); m_drawPreviewAsSphere = m_currentSelected.SpherePreview; CheckSpherePreview(); } public override void DrawProperties() { base.DrawProperties(); EditorGUI.BeginChangeCheck(); m_referenceId = EditorGUILayoutPopup( Constants.AvailableReferenceStr, m_referenceId, UIUtils.LocalVarNodeArr() ); if ( EditorGUI.EndChangeCheck() ) { if ( m_currentSelected != null ) m_currentSelected.OnPropagatePreviewChange -= OnRegisterUpdate; m_currentSelected = UIUtils.GetLocalVarNode( m_referenceId ); if ( m_currentSelected != null ) { m_nodeId = m_currentSelected.UniqueId; m_outputPorts[ 0 ].ChangeType( m_currentSelected.OutputPorts[ 0 ].DataType, false ); m_drawPreviewAsSphere = m_currentSelected.SpherePreview; CheckSpherePreview(); m_currentSelected.OnPropagatePreviewChange -= OnRegisterUpdate; m_currentSelected.OnPropagatePreviewChange += OnRegisterUpdate; } m_sizeIsDirty = true; m_isDirty = true; } } public override void Destroy() { base.Destroy(); if( m_currentSelected != null ) m_currentSelected.OnPropagatePreviewChange -= OnRegisterUpdate; m_currentSelected = null; } public override void Draw( DrawInfo drawInfo ) { base.Draw( drawInfo ); if ( m_forceNodeUpdate ) { m_forceNodeUpdate = false; if ( UIUtils.CurrentShaderVersion() > 15 ) { m_currentSelected = UIUtils.GetNode( m_nodeId ) as RegisterLocalVarNode; m_referenceId = UIUtils.GetLocalVarNodeRegisterId( m_nodeId ); if( m_currentSelected != null ) { m_currentSelected.OnPropagatePreviewChange -= OnRegisterUpdate; m_currentSelected.OnPropagatePreviewChange += OnRegisterUpdate; } } else { m_currentSelected = UIUtils.GetLocalVarNode( m_referenceId ); if ( m_currentSelected != null ) { m_currentSelected.OnPropagatePreviewChange -= OnRegisterUpdate; m_currentSelected.OnPropagatePreviewChange += OnRegisterUpdate; m_nodeId = m_currentSelected.UniqueId; } } if ( m_currentSelected != null ) { m_outputPorts[ 0 ].ChangeType( m_currentSelected.OutputPorts[ 0 ].DataType, false ); } } UpdateLocalVar(); } void UpdateLocalVar() { if ( m_referenceId > -1 ) { m_currentSelected = UIUtils.GetLocalVarNode( m_referenceId ); if ( m_currentSelected != null ) { if ( m_currentSelected.OutputPorts[ 0 ].DataType != m_outputPorts[ 0 ].DataType ) { m_outputPorts[ 0 ].ChangeType( m_currentSelected.OutputPorts[ 0 ].DataType, false ); } m_additionalContent.text = string.Format( Constants.PropertyValueLabel, m_currentSelected.DataToArray ); if ( m_referenceWidth != m_currentSelected.Position.width ) { m_referenceWidth = m_currentSelected.Position.width; m_sizeIsDirty = true; } m_currentSelected.OnPropagatePreviewChange -= OnRegisterUpdate; m_currentSelected.OnPropagatePreviewChange += OnRegisterUpdate; } else { if ( m_currentSelected != null ) m_currentSelected.OnPropagatePreviewChange -= OnRegisterUpdate; m_referenceId = -1; m_referenceWidth = -1; m_additionalContent.text = string.Empty; } } } public override string GenerateShaderForOutput( int outputId, ref MasterNodeDataCollector dataCollector, bool ignoreLocalvar ) { if ( m_currentSelected != null ) { return m_currentSelected.GenerateShaderForOutput( outputId, ref dataCollector, ignoreLocalvar ); } else { Debug.LogError( "Attempting to access inexistant local variable" ); return "0"; } } public override void PropagateNodeData( NodeData nodeData ) { base.PropagateNodeData( nodeData ); if ( m_currentSelected != null ) { m_currentSelected.PropagateNodeData( nodeData ); } } public override void ReadFromString( ref string[] nodeParams ) { base.ReadFromString( ref nodeParams ); if ( UIUtils.CurrentShaderVersion() > 15 ) { m_nodeId = Convert.ToInt32( GetCurrentParam( ref nodeParams ) ); } else { m_referenceId = Convert.ToInt32( GetCurrentParam( ref nodeParams ) ); } m_forceNodeUpdate = true; } public override void WriteToString( ref string nodeInfo, ref string connectionsInfo ) { base.WriteToString( ref nodeInfo, ref connectionsInfo ); IOUtils.AddFieldValueToString( ref nodeInfo, ( m_currentSelected != null ? m_currentSelected.UniqueId : -1 ) ); } public override void OnNodeDoubleClicked( Vector2 currentMousePos2D ) { if ( m_currentSelected != null ) { UIUtils.FocusOnNode( m_currentSelected, 0, true ); } } } }
// Copyright (c) .NET Foundation. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for details. using System; using System.Drawing; using System.Collections; using System.ComponentModel; using System.Windows.Forms; using OpenLiveWriter.Controls; using OpenLiveWriter.CoreServices; using OpenLiveWriter.Localization; using OpenLiveWriter.Localization.Bidi; using OpenLiveWriter.CoreServices.Layout; namespace OpenLiveWriter.PostEditor { /// <summary> /// Summary description for FileUploadFailedForm. /// </summary> public class FileUploadFailedForm : ApplicationDialog { /// <summary> /// Required designer variable. /// </summary> private System.ComponentModel.Container components = null; private System.Windows.Forms.Button buttonNo; private System.Windows.Forms.Button buttonYes; private System.Windows.Forms.ListView listViewFiles; private System.Windows.Forms.ColumnHeader columnHeaderFile; private System.Windows.Forms.Label label1; private System.Windows.Forms.PictureBox pictureBox1; private System.Windows.Forms.Label labelFTP; private System.Windows.Forms.Label labelFTP2; public static DialogResult Show(IWin32Window owner, string[] files) { using (FileUploadFailedForm form = new FileUploadFailedForm(files)) { return form.ShowDialog(owner); } } protected override void OnLoad(EventArgs e) { base.OnLoad(e); using (new AutoGrow(this, AnchorStyles.Bottom, false)) { LayoutHelper.NaturalizeHeightAndDistribute(8, label1, listViewFiles, labelFTP, labelFTP2, buttonYes); buttonNo.Top = buttonYes.Top; } } private FileUploadFailedForm(string[] images) { // // Required for Windows Form Designer support // InitializeComponent(); this.buttonYes.Text = Res.Get(StringId.YesButton); this.buttonNo.Text = Res.Get(StringId.NoButton); this.label1.Text = Res.Get(StringId.FileUploadFailedCaption); this.labelFTP.Text = Res.Get(StringId.FileUploadFailedFTP); this.labelFTP2.Text = Res.Get(StringId.FileUploadFailedFTP2); this.Text = Res.Get(StringId.FileUploadFailedTitle); listViewFiles.BeginUpdate(); listViewFiles.Items.Clear(); foreach (string image in images) listViewFiles.Items.Add(image); listViewFiles.EndUpdate(); } /// <summary> /// Clean up any resources being used. /// </summary> protected override void Dispose(bool disposing) { if (disposing) { if (components != null) { components.Dispose(); } } base.Dispose(disposing); } #region Windows Form Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { System.Resources.ResourceManager resources = new System.Resources.ResourceManager(typeof(FileUploadFailedForm)); this.buttonYes = new System.Windows.Forms.Button(); this.buttonNo = new System.Windows.Forms.Button(); this.listViewFiles = new System.Windows.Forms.ListView(); this.columnHeaderFile = new System.Windows.Forms.ColumnHeader(); this.label1 = new System.Windows.Forms.Label(); this.pictureBox1 = new System.Windows.Forms.PictureBox(); this.labelFTP = new System.Windows.Forms.Label(); this.labelFTP2 = new System.Windows.Forms.Label(); this.SuspendLayout(); // // buttonYes // this.buttonYes.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Left))); this.buttonYes.DialogResult = System.Windows.Forms.DialogResult.Yes; this.buttonYes.FlatStyle = System.Windows.Forms.FlatStyle.System; this.buttonYes.Location = new System.Drawing.Point(192, 224); this.buttonYes.Name = "buttonYes"; this.buttonYes.TabIndex = 0; this.buttonYes.Text = "&Yes"; // // buttonNo // this.buttonNo.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Left))); this.buttonNo.DialogResult = System.Windows.Forms.DialogResult.No; this.buttonNo.FlatStyle = System.Windows.Forms.FlatStyle.System; this.buttonNo.Location = new System.Drawing.Point(272, 224); this.buttonNo.Name = "buttonNo"; this.buttonNo.TabIndex = 1; this.buttonNo.Text = "&No"; // // listViewFiles // this.listViewFiles.Columns.AddRange(new System.Windows.Forms.ColumnHeader[] { this.columnHeaderFile}); this.listViewFiles.HeaderStyle = System.Windows.Forms.ColumnHeaderStyle.None; this.listViewFiles.Location = new System.Drawing.Point(56, 40); this.listViewFiles.Name = "listViewFiles"; this.listViewFiles.RightToLeftLayout = BidiHelper.IsRightToLeft; this.listViewFiles.Size = new System.Drawing.Size(280, 96); this.listViewFiles.TabIndex = 5; this.listViewFiles.View = System.Windows.Forms.View.List; // // columnHeaderFile // this.columnHeaderFile.Text = ""; this.columnHeaderFile.Width = 328; // // label1 // this.label1.FlatStyle = System.Windows.Forms.FlatStyle.System; this.label1.Location = new System.Drawing.Point(56, 8); this.label1.Name = "label1"; this.label1.Size = new System.Drawing.Size(288, 28); this.label1.TabIndex = 4; this.label1.Text = "The following images cannot be published because the weblog does not support imag" + "e publishing:"; // // pictureBox1 // this.pictureBox1.Image = ((System.Drawing.Image)(resources.GetObject("pictureBox1.Image"))); this.pictureBox1.Location = new System.Drawing.Point(8, 8); this.pictureBox1.Name = "pictureBox1"; this.pictureBox1.Size = new System.Drawing.Size(44, 40); this.pictureBox1.TabIndex = 3; this.pictureBox1.TabStop = false; // // labelFTP // this.labelFTP.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Left))); this.labelFTP.FlatStyle = System.Windows.Forms.FlatStyle.System; this.labelFTP.Location = new System.Drawing.Point(56, 144); this.labelFTP.Name = "labelFTP"; this.labelFTP.Size = new System.Drawing.Size(288, 32); this.labelFTP.TabIndex = 7; this.labelFTP.Text = "Open Live Writer can be configured to publish images to an FTP account."; // // labelFTP2 // this.labelFTP2.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Left))); this.labelFTP2.FlatStyle = System.Windows.Forms.FlatStyle.System; this.labelFTP2.Location = new System.Drawing.Point(56, 176); this.labelFTP2.Name = "labelFTP2"; this.labelFTP2.Size = new System.Drawing.Size(296, 32); this.labelFTP2.TabIndex = 8; this.labelFTP2.Text = "Do you want to configure an FTP account for image publishing now?"; // // FileUploadFailedForm // this.AcceptButton = this.buttonYes; this.AutoScaleBaseSize = new System.Drawing.Size(5, 14); this.CancelButton = this.buttonNo; this.ClientSize = new System.Drawing.Size(362, 256); this.Controls.Add(this.labelFTP2); this.Controls.Add(this.labelFTP); this.Controls.Add(this.listViewFiles); this.Controls.Add(this.label1); this.Controls.Add(this.pictureBox1); this.Controls.Add(this.buttonNo); this.Controls.Add(this.buttonYes); this.Location = new System.Drawing.Point(0, 0); this.MaximizeBox = false; this.MinimizeBox = false; this.Name = "FileUploadFailedForm"; this.Text = "Image Upload Not Supported By Weblog"; this.ResumeLayout(false); } #endregion } }
// Copyright 2017, Google Inc. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! using Google.Api.Gax; using Google.Api.Gax.Grpc; using Google.LongRunning; using Google.Protobuf.WellKnownTypes; using Grpc.Core; using System; using System.Collections; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Threading; using System.Threading.Tasks; namespace Google.Cloud.VideoIntelligence.V1Beta1 { /// <summary> /// Settings for a <see cref="VideoIntelligenceServiceClient"/>. /// </summary> public sealed partial class VideoIntelligenceServiceSettings : ServiceSettingsBase { /// <summary> /// Get a new instance of the default <see cref="VideoIntelligenceServiceSettings"/>. /// </summary> /// <returns> /// A new instance of the default <see cref="VideoIntelligenceServiceSettings"/>. /// </returns> public static VideoIntelligenceServiceSettings GetDefault() => new VideoIntelligenceServiceSettings(); /// <summary> /// Constructs a new <see cref="VideoIntelligenceServiceSettings"/> object with default settings. /// </summary> public VideoIntelligenceServiceSettings() { } private VideoIntelligenceServiceSettings(VideoIntelligenceServiceSettings existing) : base(existing) { GaxPreconditions.CheckNotNull(existing, nameof(existing)); AnnotateVideoSettings = existing.AnnotateVideoSettings; LongRunningOperationsSettings = existing.LongRunningOperationsSettings?.Clone(); OnCopy(existing); } partial void OnCopy(VideoIntelligenceServiceSettings existing); /// <summary> /// The filter specifying which RPC <see cref="StatusCode"/>s are eligible for retry /// for "Idempotent" <see cref="VideoIntelligenceServiceClient"/> RPC methods. /// </summary> /// <remarks> /// The eligible RPC <see cref="StatusCode"/>s for retry for "Idempotent" RPC methods are: /// <list type="bullet"> /// <item><description><see cref="StatusCode.DeadlineExceeded"/></description></item> /// <item><description><see cref="StatusCode.Unavailable"/></description></item> /// </list> /// </remarks> public static Predicate<RpcException> IdempotentRetryFilter { get; } = RetrySettings.FilterForStatusCodes(StatusCode.DeadlineExceeded, StatusCode.Unavailable); /// <summary> /// The filter specifying which RPC <see cref="StatusCode"/>s are eligible for retry /// for "NonIdempotent" <see cref="VideoIntelligenceServiceClient"/> RPC methods. /// </summary> /// <remarks> /// The eligible RPC <see cref="StatusCode"/>s for retry for "NonIdempotent" RPC methods are: /// <list type="bullet"> /// <item><description><see cref="StatusCode.Unavailable"/></description></item> /// </list> /// </remarks> public static Predicate<RpcException> NonIdempotentRetryFilter { get; } = RetrySettings.FilterForStatusCodes(StatusCode.Unavailable); /// <summary> /// "Default" retry backoff for <see cref="VideoIntelligenceServiceClient"/> RPC methods. /// </summary> /// <returns> /// The "Default" retry backoff for <see cref="VideoIntelligenceServiceClient"/> RPC methods. /// </returns> /// <remarks> /// The "Default" retry backoff for <see cref="VideoIntelligenceServiceClient"/> RPC methods is defined as: /// <list type="bullet"> /// <item><description>Initial delay: 1000 milliseconds</description></item> /// <item><description>Maximum delay: 120000 milliseconds</description></item> /// <item><description>Delay multiplier: 2.5</description></item> /// </list> /// </remarks> public static BackoffSettings GetDefaultRetryBackoff() => new BackoffSettings( delay: TimeSpan.FromMilliseconds(1000), maxDelay: TimeSpan.FromMilliseconds(120000), delayMultiplier: 2.5 ); /// <summary> /// "Default" timeout backoff for <see cref="VideoIntelligenceServiceClient"/> RPC methods. /// </summary> /// <returns> /// The "Default" timeout backoff for <see cref="VideoIntelligenceServiceClient"/> RPC methods. /// </returns> /// <remarks> /// The "Default" timeout backoff for <see cref="VideoIntelligenceServiceClient"/> RPC methods is defined as: /// <list type="bullet"> /// <item><description>Initial timeout: 120000 milliseconds</description></item> /// <item><description>Timeout multiplier: 1.0</description></item> /// <item><description>Maximum timeout: 120000 milliseconds</description></item> /// </list> /// </remarks> public static BackoffSettings GetDefaultTimeoutBackoff() => new BackoffSettings( delay: TimeSpan.FromMilliseconds(120000), maxDelay: TimeSpan.FromMilliseconds(120000), delayMultiplier: 1.0 ); /// <summary> /// <see cref="CallSettings"/> for synchronous and asynchronous calls to /// <c>VideoIntelligenceServiceClient.AnnotateVideo</c> and <c>VideoIntelligenceServiceClient.AnnotateVideoAsync</c>. /// </summary> /// <remarks> /// The default <c>VideoIntelligenceServiceClient.AnnotateVideo</c> and /// <c>VideoIntelligenceServiceClient.AnnotateVideoAsync</c> <see cref="RetrySettings"/> are: /// <list type="bullet"> /// <item><description>Initial retry delay: 1000 milliseconds</description></item> /// <item><description>Retry delay multiplier: 2.5</description></item> /// <item><description>Retry maximum delay: 120000 milliseconds</description></item> /// <item><description>Initial timeout: 120000 milliseconds</description></item> /// <item><description>Timeout multiplier: 1.0</description></item> /// <item><description>Timeout maximum delay: 120000 milliseconds</description></item> /// </list> /// Retry will be attempted on the following response status codes: /// <list> /// <item><description><see cref="StatusCode.DeadlineExceeded"/></description></item> /// <item><description><see cref="StatusCode.Unavailable"/></description></item> /// </list> /// Default RPC expiration is 600000 milliseconds. /// </remarks> public CallSettings AnnotateVideoSettings { get; set; } = CallSettings.FromCallTiming( CallTiming.FromRetry(new RetrySettings( retryBackoff: GetDefaultRetryBackoff(), timeoutBackoff: GetDefaultTimeoutBackoff(), totalExpiration: Expiration.FromTimeout(TimeSpan.FromMilliseconds(600000)), retryFilter: IdempotentRetryFilter ))); /// <summary> /// Settings used for long running operations. /// </summary> public OperationsSettings LongRunningOperationsSettings { get; set; } /// <summary> /// Creates a deep clone of this object, with all the same property values. /// </summary> /// <returns>A deep clone of this <see cref="VideoIntelligenceServiceSettings"/> object.</returns> public VideoIntelligenceServiceSettings Clone() => new VideoIntelligenceServiceSettings(this); } /// <summary> /// VideoIntelligenceService client wrapper, for convenient use. /// </summary> public abstract partial class VideoIntelligenceServiceClient { /// <summary> /// The default endpoint for the VideoIntelligenceService service, which is a host of "videointelligence.googleapis.com" and a port of 443. /// </summary> public static ServiceEndpoint DefaultEndpoint { get; } = new ServiceEndpoint("videointelligence.googleapis.com", 443); /// <summary> /// The default VideoIntelligenceService scopes. /// </summary> /// <remarks> /// The default VideoIntelligenceService scopes are: /// <list type="bullet"> /// <item><description>"https://www.googleapis.com/auth/cloud-platform"</description></item> /// </list> /// </remarks> public static IReadOnlyList<string> DefaultScopes { get; } = new ReadOnlyCollection<string>(new string[] { "https://www.googleapis.com/auth/cloud-platform", }); private static readonly ChannelPool s_channelPool = new ChannelPool(DefaultScopes); // Note: we could have parameterless overloads of Create and CreateAsync, // documented to just use the default endpoint, settings and credentials. // Pros: // - Might be more reassuring on first use // - Allows method group conversions // Con: overloads! /// <summary> /// Asynchronously creates a <see cref="VideoIntelligenceServiceClient"/>, applying defaults for all unspecified settings, /// and creating a channel connecting to the given endpoint with application default credentials where /// necessary. /// </summary> /// <param name="endpoint">Optional <see cref="ServiceEndpoint"/>.</param> /// <param name="settings">Optional <see cref="VideoIntelligenceServiceSettings"/>.</param> /// <returns>The task representing the created <see cref="VideoIntelligenceServiceClient"/>.</returns> public static async Task<VideoIntelligenceServiceClient> CreateAsync(ServiceEndpoint endpoint = null, VideoIntelligenceServiceSettings settings = null) { Channel channel = await s_channelPool.GetChannelAsync(endpoint ?? DefaultEndpoint).ConfigureAwait(false); return Create(channel, settings); } /// <summary> /// Synchronously creates a <see cref="VideoIntelligenceServiceClient"/>, applying defaults for all unspecified settings, /// and creating a channel connecting to the given endpoint with application default credentials where /// necessary. /// </summary> /// <param name="endpoint">Optional <see cref="ServiceEndpoint"/>.</param> /// <param name="settings">Optional <see cref="VideoIntelligenceServiceSettings"/>.</param> /// <returns>The created <see cref="VideoIntelligenceServiceClient"/>.</returns> public static VideoIntelligenceServiceClient Create(ServiceEndpoint endpoint = null, VideoIntelligenceServiceSettings settings = null) { Channel channel = s_channelPool.GetChannel(endpoint ?? DefaultEndpoint); return Create(channel, settings); } /// <summary> /// Creates a <see cref="VideoIntelligenceServiceClient"/> which uses the specified channel for remote operations. /// </summary> /// <param name="channel">The <see cref="Channel"/> for remote operations. Must not be null.</param> /// <param name="settings">Optional <see cref="VideoIntelligenceServiceSettings"/>.</param> /// <returns>The created <see cref="VideoIntelligenceServiceClient"/>.</returns> public static VideoIntelligenceServiceClient Create(Channel channel, VideoIntelligenceServiceSettings settings = null) { GaxPreconditions.CheckNotNull(channel, nameof(channel)); VideoIntelligenceService.VideoIntelligenceServiceClient grpcClient = new VideoIntelligenceService.VideoIntelligenceServiceClient(channel); return new VideoIntelligenceServiceClientImpl(grpcClient, settings); } /// <summary> /// Shuts down any channels automatically created by <see cref="Create(ServiceEndpoint, VideoIntelligenceServiceSettings)"/> /// and <see cref="CreateAsync(ServiceEndpoint, VideoIntelligenceServiceSettings)"/>. Channels which weren't automatically /// created are not affected. /// </summary> /// <remarks>After calling this method, further calls to <see cref="Create(ServiceEndpoint, VideoIntelligenceServiceSettings)"/> /// and <see cref="CreateAsync(ServiceEndpoint, VideoIntelligenceServiceSettings)"/> will create new channels, which could /// in turn be shut down by another call to this method.</remarks> /// <returns>A task representing the asynchronous shutdown operation.</returns> public static Task ShutdownDefaultChannelsAsync() => s_channelPool.ShutdownChannelsAsync(); /// <summary> /// The underlying gRPC VideoIntelligenceService client. /// </summary> public virtual VideoIntelligenceService.VideoIntelligenceServiceClient GrpcClient { get { throw new NotImplementedException(); } } /// <summary> /// The client for long-running operations. /// </summary> public virtual OperationsClient LongRunningOperationsClient { get { throw new NotImplementedException(); } } /// <summary> /// Performs asynchronous video annotation. Progress and results can be /// retrieved through the `google.longrunning.Operations` interface. /// `Operation.metadata` contains `AnnotateVideoProgress` (progress). /// `Operation.response` contains `AnnotateVideoResponse` (results). /// </summary> /// <param name="inputUri"> /// Input video location. Currently, only /// [Google Cloud Storage](https://cloud.google.com/storage/) URIs are /// supported, which must be specified in the following format: /// `gs://bucket-id/object-id` (other URI formats return /// [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For more information, see /// [Request URIs](/storage/docs/reference-uris). /// A video URI may include wildcards in `object-id`, and thus identify /// multiple videos. Supported wildcards: '*' to match 0 or more characters; /// '?' to match 1 character. If unset, the input video should be embedded /// in the request as `input_content`. If set, `input_content` should be unset. /// </param> /// <param name="features"> /// Requested video annotation features. /// </param> /// <param name="videoContext"> /// Additional video context and/or feature-specific parameters. /// </param> /// <param name="outputUri"> /// Optional location where the output (in JSON format) should be stored. /// Currently, only [Google Cloud Storage](https://cloud.google.com/storage/) /// URIs are supported, which must be specified in the following format: /// `gs://bucket-id/object-id` (other URI formats return /// [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For more information, see /// [Request URIs](/storage/docs/reference-uris). /// </param> /// <param name="locationId"> /// Optional cloud region where annotation should take place. Supported cloud /// regions: `us-east1`, `us-west1`, `europe-west1`, `asia-east1`. If no region /// is specified, a region will be determined based on video file location. /// </param> /// <param name="callSettings"> /// If not null, applies overrides to this RPC call. /// </param> /// <returns> /// A Task containing the RPC response. /// </returns> public virtual Task<Operation<AnnotateVideoResponse, AnnotateVideoProgress>> AnnotateVideoAsync( string inputUri, IEnumerable<Feature> features, VideoContext videoContext, string outputUri, string locationId, CallSettings callSettings = null) => AnnotateVideoAsync( new AnnotateVideoRequest { InputUri = GaxPreconditions.CheckNotNullOrEmpty(inputUri, nameof(inputUri)), Features = { GaxPreconditions.CheckNotNull(features, nameof(features)) }, VideoContext = videoContext, // Optional OutputUri = outputUri ?? "", // Optional LocationId = locationId ?? "", // Optional }, callSettings); /// <summary> /// Performs asynchronous video annotation. Progress and results can be /// retrieved through the `google.longrunning.Operations` interface. /// `Operation.metadata` contains `AnnotateVideoProgress` (progress). /// `Operation.response` contains `AnnotateVideoResponse` (results). /// </summary> /// <param name="inputUri"> /// Input video location. Currently, only /// [Google Cloud Storage](https://cloud.google.com/storage/) URIs are /// supported, which must be specified in the following format: /// `gs://bucket-id/object-id` (other URI formats return /// [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For more information, see /// [Request URIs](/storage/docs/reference-uris). /// A video URI may include wildcards in `object-id`, and thus identify /// multiple videos. Supported wildcards: '*' to match 0 or more characters; /// '?' to match 1 character. If unset, the input video should be embedded /// in the request as `input_content`. If set, `input_content` should be unset. /// </param> /// <param name="features"> /// Requested video annotation features. /// </param> /// <param name="videoContext"> /// Additional video context and/or feature-specific parameters. /// </param> /// <param name="outputUri"> /// Optional location where the output (in JSON format) should be stored. /// Currently, only [Google Cloud Storage](https://cloud.google.com/storage/) /// URIs are supported, which must be specified in the following format: /// `gs://bucket-id/object-id` (other URI formats return /// [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For more information, see /// [Request URIs](/storage/docs/reference-uris). /// </param> /// <param name="locationId"> /// Optional cloud region where annotation should take place. Supported cloud /// regions: `us-east1`, `us-west1`, `europe-west1`, `asia-east1`. If no region /// is specified, a region will be determined based on video file location. /// </param> /// <param name="cancellationToken"> /// A <see cref="CancellationToken"/> to use for this RPC. /// </param> /// <returns> /// A Task containing the RPC response. /// </returns> public virtual Task<Operation<AnnotateVideoResponse, AnnotateVideoProgress>> AnnotateVideoAsync( string inputUri, IEnumerable<Feature> features, VideoContext videoContext, string outputUri, string locationId, CancellationToken cancellationToken) => AnnotateVideoAsync( inputUri, features, videoContext, outputUri, locationId, CallSettings.FromCancellationToken(cancellationToken)); /// <summary> /// Performs asynchronous video annotation. Progress and results can be /// retrieved through the `google.longrunning.Operations` interface. /// `Operation.metadata` contains `AnnotateVideoProgress` (progress). /// `Operation.response` contains `AnnotateVideoResponse` (results). /// </summary> /// <param name="inputUri"> /// Input video location. Currently, only /// [Google Cloud Storage](https://cloud.google.com/storage/) URIs are /// supported, which must be specified in the following format: /// `gs://bucket-id/object-id` (other URI formats return /// [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For more information, see /// [Request URIs](/storage/docs/reference-uris). /// A video URI may include wildcards in `object-id`, and thus identify /// multiple videos. Supported wildcards: '*' to match 0 or more characters; /// '?' to match 1 character. If unset, the input video should be embedded /// in the request as `input_content`. If set, `input_content` should be unset. /// </param> /// <param name="features"> /// Requested video annotation features. /// </param> /// <param name="videoContext"> /// Additional video context and/or feature-specific parameters. /// </param> /// <param name="outputUri"> /// Optional location where the output (in JSON format) should be stored. /// Currently, only [Google Cloud Storage](https://cloud.google.com/storage/) /// URIs are supported, which must be specified in the following format: /// `gs://bucket-id/object-id` (other URI formats return /// [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For more information, see /// [Request URIs](/storage/docs/reference-uris). /// </param> /// <param name="locationId"> /// Optional cloud region where annotation should take place. Supported cloud /// regions: `us-east1`, `us-west1`, `europe-west1`, `asia-east1`. If no region /// is specified, a region will be determined based on video file location. /// </param> /// <param name="callSettings"> /// If not null, applies overrides to this RPC call. /// </param> /// <returns> /// The RPC response. /// </returns> public virtual Operation<AnnotateVideoResponse, AnnotateVideoProgress> AnnotateVideo( string inputUri, IEnumerable<Feature> features, VideoContext videoContext, string outputUri, string locationId, CallSettings callSettings = null) => AnnotateVideo( new AnnotateVideoRequest { InputUri = GaxPreconditions.CheckNotNullOrEmpty(inputUri, nameof(inputUri)), Features = { GaxPreconditions.CheckNotNull(features, nameof(features)) }, VideoContext = videoContext, // Optional OutputUri = outputUri ?? "", // Optional LocationId = locationId ?? "", // Optional }, callSettings); /// <summary> /// Performs asynchronous video annotation. Progress and results can be /// retrieved through the `google.longrunning.Operations` interface. /// `Operation.metadata` contains `AnnotateVideoProgress` (progress). /// `Operation.response` contains `AnnotateVideoResponse` (results). /// </summary> /// <param name="request"> /// The request object containing all of the parameters for the API call. /// </param> /// <param name="callSettings"> /// If not null, applies overrides to this RPC call. /// </param> /// <returns> /// A Task containing the RPC response. /// </returns> public virtual Task<Operation<AnnotateVideoResponse, AnnotateVideoProgress>> AnnotateVideoAsync( AnnotateVideoRequest request, CallSettings callSettings = null) { throw new NotImplementedException(); } /// <summary> /// Asynchronously poll an operation once, using an <c>operationName</c> from a previous invocation of <c>AnnotateVideoAsync</c>. /// </summary> /// <param name="operationName">The name of a previously invoked operation. Must not be <c>null</c> or empty.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A task representing the result of polling the operation.</returns> public virtual Task<Operation<AnnotateVideoResponse, AnnotateVideoProgress>> PollOnceAnnotateVideoAsync( string operationName, CallSettings callSettings = null) => Operation<AnnotateVideoResponse, AnnotateVideoProgress>.PollOnceFromNameAsync( GaxPreconditions.CheckNotNullOrEmpty(operationName, nameof(operationName)), LongRunningOperationsClient, callSettings); /// <summary> /// Performs asynchronous video annotation. Progress and results can be /// retrieved through the `google.longrunning.Operations` interface. /// `Operation.metadata` contains `AnnotateVideoProgress` (progress). /// `Operation.response` contains `AnnotateVideoResponse` (results). /// </summary> /// <param name="request"> /// The request object containing all of the parameters for the API call. /// </param> /// <param name="callSettings"> /// If not null, applies overrides to this RPC call. /// </param> /// <returns> /// The RPC response. /// </returns> public virtual Operation<AnnotateVideoResponse, AnnotateVideoProgress> AnnotateVideo( AnnotateVideoRequest request, CallSettings callSettings = null) { throw new NotImplementedException(); } /// <summary> /// Poll an operation once, using an <c>operationName</c> from a previous invocation of <c>AnnotateVideo</c>. /// </summary> /// <param name="operationName">The name of a previously invoked operation. Must not be <c>null</c> or empty.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The result of polling the operation.</returns> public virtual Operation<AnnotateVideoResponse, AnnotateVideoProgress> PollOnceAnnotateVideo( string operationName, CallSettings callSettings = null) => Operation<AnnotateVideoResponse, AnnotateVideoProgress>.PollOnceFromName( GaxPreconditions.CheckNotNullOrEmpty(operationName, nameof(operationName)), LongRunningOperationsClient, callSettings); } /// <summary> /// VideoIntelligenceService client wrapper implementation, for convenient use. /// </summary> public sealed partial class VideoIntelligenceServiceClientImpl : VideoIntelligenceServiceClient { private readonly ApiCall<AnnotateVideoRequest, Operation> _callAnnotateVideo; /// <summary> /// Constructs a client wrapper for the VideoIntelligenceService service, with the specified gRPC client and settings. /// </summary> /// <param name="grpcClient">The underlying gRPC client.</param> /// <param name="settings">The base <see cref="VideoIntelligenceServiceSettings"/> used within this client </param> public VideoIntelligenceServiceClientImpl(VideoIntelligenceService.VideoIntelligenceServiceClient grpcClient, VideoIntelligenceServiceSettings settings) { this.GrpcClient = grpcClient; VideoIntelligenceServiceSettings effectiveSettings = settings ?? VideoIntelligenceServiceSettings.GetDefault(); LongRunningOperationsClient = new OperationsClientImpl( grpcClient.CreateOperationsClient(), effectiveSettings.LongRunningOperationsSettings); ClientHelper clientHelper = new ClientHelper(effectiveSettings); _callAnnotateVideo = clientHelper.BuildApiCall<AnnotateVideoRequest, Operation>( GrpcClient.AnnotateVideoAsync, GrpcClient.AnnotateVideo, effectiveSettings.AnnotateVideoSettings); OnConstruction(grpcClient, effectiveSettings, clientHelper); } partial void OnConstruction(VideoIntelligenceService.VideoIntelligenceServiceClient grpcClient, VideoIntelligenceServiceSettings effectiveSettings, ClientHelper clientHelper); /// <summary> /// The underlying gRPC VideoIntelligenceService client. /// </summary> public override VideoIntelligenceService.VideoIntelligenceServiceClient GrpcClient { get; } /// <summary> /// The client for long-running operations. /// </summary> public override OperationsClient LongRunningOperationsClient { get; } // Partial modifier methods contain '_' to ensure no name conflicts with RPC methods. partial void Modify_AnnotateVideoRequest(ref AnnotateVideoRequest request, ref CallSettings settings); /// <summary> /// Performs asynchronous video annotation. Progress and results can be /// retrieved through the `google.longrunning.Operations` interface. /// `Operation.metadata` contains `AnnotateVideoProgress` (progress). /// `Operation.response` contains `AnnotateVideoResponse` (results). /// </summary> /// <param name="request"> /// The request object containing all of the parameters for the API call. /// </param> /// <param name="callSettings"> /// If not null, applies overrides to this RPC call. /// </param> /// <returns> /// A Task containing the RPC response. /// </returns> public override async Task<Operation<AnnotateVideoResponse, AnnotateVideoProgress>> AnnotateVideoAsync( AnnotateVideoRequest request, CallSettings callSettings = null) { Modify_AnnotateVideoRequest(ref request, ref callSettings); return new Operation<AnnotateVideoResponse, AnnotateVideoProgress>( await _callAnnotateVideo.Async(request, callSettings).ConfigureAwait(false), LongRunningOperationsClient); } /// <summary> /// Performs asynchronous video annotation. Progress and results can be /// retrieved through the `google.longrunning.Operations` interface. /// `Operation.metadata` contains `AnnotateVideoProgress` (progress). /// `Operation.response` contains `AnnotateVideoResponse` (results). /// </summary> /// <param name="request"> /// The request object containing all of the parameters for the API call. /// </param> /// <param name="callSettings"> /// If not null, applies overrides to this RPC call. /// </param> /// <returns> /// The RPC response. /// </returns> public override Operation<AnnotateVideoResponse, AnnotateVideoProgress> AnnotateVideo( AnnotateVideoRequest request, CallSettings callSettings = null) { Modify_AnnotateVideoRequest(ref request, ref callSettings); return new Operation<AnnotateVideoResponse, AnnotateVideoProgress>( _callAnnotateVideo.Sync(request, callSettings), LongRunningOperationsClient); } } // Partial classes to enable page-streaming }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /****************************************************************************** * This file is auto-generated from a template file by the GenerateTests.csx * * script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make * * changes, please update the corresponding template and run according to the * * directions listed in the file. * ******************************************************************************/ using System; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Runtime.Intrinsics; using System.Runtime.Intrinsics.X86; namespace JIT.HardwareIntrinsics.X86 { public static partial class Program { private static void AndNotInt16() { var test = new SimpleBinaryOpTest__AndNotInt16(); if (test.IsSupported) { // Validates basic functionality works, using Unsafe.Read test.RunBasicScenario_UnsafeRead(); // Validates basic functionality works, using Load test.RunBasicScenario_Load(); // Validates basic functionality works, using LoadAligned test.RunBasicScenario_LoadAligned(); // Validates calling via reflection works, using Unsafe.Read test.RunReflectionScenario_UnsafeRead(); // Validates calling via reflection works, using Load test.RunReflectionScenario_Load(); // Validates calling via reflection works, using LoadAligned test.RunReflectionScenario_LoadAligned(); // Validates passing a static member works test.RunClsVarScenario(); // Validates passing a local works, using Unsafe.Read test.RunLclVarScenario_UnsafeRead(); // Validates passing a local works, using Load test.RunLclVarScenario_Load(); // Validates passing a local works, using LoadAligned test.RunLclVarScenario_LoadAligned(); // Validates passing the field of a local works test.RunLclFldScenario(); // Validates passing an instance member works test.RunFldScenario(); } else { // Validates we throw on unsupported hardware test.RunUnsupportedScenario(); } if (!test.Succeeded) { throw new Exception("One or more scenarios did not complete as expected."); } } } public sealed unsafe class SimpleBinaryOpTest__AndNotInt16 { private const int VectorSize = 16; private const int ElementCount = VectorSize / sizeof(Int16); private static Int16[] _data1 = new Int16[ElementCount]; private static Int16[] _data2 = new Int16[ElementCount]; private static Vector128<Int16> _clsVar1; private static Vector128<Int16> _clsVar2; private Vector128<Int16> _fld1; private Vector128<Int16> _fld2; private SimpleBinaryOpTest__DataTable<Int16> _dataTable; static SimpleBinaryOpTest__AndNotInt16() { var random = new Random(); for (var i = 0; i < ElementCount; i++) { _data1[i] = (short)(random.Next(short.MinValue, short.MaxValue)); _data2[i] = (short)(random.Next(short.MinValue, short.MaxValue)); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int16>, byte>(ref _clsVar1), ref Unsafe.As<Int16, byte>(ref _data2[0]), VectorSize); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int16>, byte>(ref _clsVar2), ref Unsafe.As<Int16, byte>(ref _data1[0]), VectorSize); } public SimpleBinaryOpTest__AndNotInt16() { Succeeded = true; var random = new Random(); for (var i = 0; i < ElementCount; i++) { _data1[i] = (short)(random.Next(short.MinValue, short.MaxValue)); _data2[i] = (short)(random.Next(short.MinValue, short.MaxValue)); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int16>, byte>(ref _fld1), ref Unsafe.As<Int16, byte>(ref _data1[0]), VectorSize); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int16>, byte>(ref _fld2), ref Unsafe.As<Int16, byte>(ref _data2[0]), VectorSize); for (var i = 0; i < ElementCount; i++) { _data1[i] = (short)(random.Next(short.MinValue, short.MaxValue)); _data2[i] = (short)(random.Next(short.MinValue, short.MaxValue)); } _dataTable = new SimpleBinaryOpTest__DataTable<Int16>(_data1, _data2, new Int16[ElementCount], VectorSize); } public bool IsSupported => Sse2.IsSupported; public bool Succeeded { get; set; } public void RunBasicScenario_UnsafeRead() { var result = Sse2.AndNot( Unsafe.Read<Vector128<Int16>>(_dataTable.inArray1Ptr), Unsafe.Read<Vector128<Int16>>(_dataTable.inArray2Ptr) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunBasicScenario_Load() { var result = Sse2.AndNot( Sse2.LoadVector128((Int16*)(_dataTable.inArray1Ptr)), Sse2.LoadVector128((Int16*)(_dataTable.inArray2Ptr)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunBasicScenario_LoadAligned() { var result = Sse2.AndNot( Sse2.LoadAlignedVector128((Int16*)(_dataTable.inArray1Ptr)), Sse2.LoadAlignedVector128((Int16*)(_dataTable.inArray2Ptr)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_UnsafeRead() { var result = typeof(Sse2).GetMethod(nameof(Sse2.AndNot), new Type[] { typeof(Vector128<Int16>), typeof(Vector128<Int16>) }) .Invoke(null, new object[] { Unsafe.Read<Vector128<Int16>>(_dataTable.inArray1Ptr), Unsafe.Read<Vector128<Int16>>(_dataTable.inArray2Ptr) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Int16>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_Load() { var result = typeof(Sse2).GetMethod(nameof(Sse2.AndNot), new Type[] { typeof(Vector128<Int16>), typeof(Vector128<Int16>) }) .Invoke(null, new object[] { Sse2.LoadVector128((Int16*)(_dataTable.inArray1Ptr)), Sse2.LoadVector128((Int16*)(_dataTable.inArray2Ptr)) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Int16>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_LoadAligned() { var result = typeof(Sse2).GetMethod(nameof(Sse2.AndNot), new Type[] { typeof(Vector128<Int16>), typeof(Vector128<Int16>) }) .Invoke(null, new object[] { Sse2.LoadAlignedVector128((Int16*)(_dataTable.inArray1Ptr)), Sse2.LoadAlignedVector128((Int16*)(_dataTable.inArray2Ptr)) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Int16>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunClsVarScenario() { var result = Sse2.AndNot( _clsVar1, _clsVar2 ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr); } public void RunLclVarScenario_UnsafeRead() { var left = Unsafe.Read<Vector128<Int16>>(_dataTable.inArray1Ptr); var right = Unsafe.Read<Vector128<Int16>>(_dataTable.inArray2Ptr); var result = Sse2.AndNot(left, right); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(left, right, _dataTable.outArrayPtr); } public void RunLclVarScenario_Load() { var left = Sse2.LoadVector128((Int16*)(_dataTable.inArray1Ptr)); var right = Sse2.LoadVector128((Int16*)(_dataTable.inArray2Ptr)); var result = Sse2.AndNot(left, right); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(left, right, _dataTable.outArrayPtr); } public void RunLclVarScenario_LoadAligned() { var left = Sse2.LoadAlignedVector128((Int16*)(_dataTable.inArray1Ptr)); var right = Sse2.LoadAlignedVector128((Int16*)(_dataTable.inArray2Ptr)); var result = Sse2.AndNot(left, right); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(left, right, _dataTable.outArrayPtr); } public void RunLclFldScenario() { var test = new SimpleBinaryOpTest__AndNotInt16(); var result = Sse2.AndNot(test._fld1, test._fld2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr); } public void RunFldScenario() { var result = Sse2.AndNot(_fld1, _fld2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr); } public void RunUnsupportedScenario() { Succeeded = false; try { RunBasicScenario_UnsafeRead(); } catch (PlatformNotSupportedException) { Succeeded = true; } } private void ValidateResult(Vector128<Int16> left, Vector128<Int16> right, void* result, [CallerMemberName] string method = "") { Int16[] inArray1 = new Int16[ElementCount]; Int16[] inArray2 = new Int16[ElementCount]; Int16[] outArray = new Int16[ElementCount]; Unsafe.Write(Unsafe.AsPointer(ref inArray1[0]), left); Unsafe.Write(Unsafe.AsPointer(ref inArray2[0]), right); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int16, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), VectorSize); ValidateResult(inArray1, inArray2, outArray, method); } private void ValidateResult(void* left, void* right, void* result, [CallerMemberName] string method = "") { Int16[] inArray1 = new Int16[ElementCount]; Int16[] inArray2 = new Int16[ElementCount]; Int16[] outArray = new Int16[ElementCount]; Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int16, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(left), VectorSize); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int16, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(right), VectorSize); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int16, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), VectorSize); ValidateResult(inArray1, inArray2, outArray, method); } private void ValidateResult(Int16[] left, Int16[] right, Int16[] result, [CallerMemberName] string method = "") { if ((short)(~left[0] & right[0]) != result[0]) { Succeeded = false; } else { for (var i = 1; i < left.Length; i++) { if ((short)(~left[i] & right[i]) != result[i]) { Succeeded = false; break; } } } if (!Succeeded) { Console.WriteLine($"{nameof(Sse2)}.{nameof(Sse2.AndNot)}<Int16>: {method} failed:"); Console.WriteLine($" left: ({string.Join(", ", left)})"); Console.WriteLine($" right: ({string.Join(", ", right)})"); Console.WriteLine($" result: ({string.Join(", ", result)})"); Console.WriteLine(); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using Xunit; namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.anonytype01.anonytype01 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.anonytype01.anonytype01; unsafe // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // anonymous type // </Description> //<Expects Status=success></Expects> // <Code> //<Expects Status=warning>\(12,17\).*CS0649</Expects> public class UC { public int* p; } public class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { dynamic x = new { P = new UC() } ; return 0; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.array01.array01 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.array01.array01; unsafe // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // array initializer : unsafe array initializer with dynamic // </Description> //<Expects Status=success></Expects> // <Code> //<Expects Status=warning>\(12,17\).*CS0649</Expects> public class US { public int* p; } public class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { dynamic d1 = new US(); dynamic d2 = new US(); US[] array = { d1, d2, new US()} ; return 0; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.array02.array02 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.array02.array02; unsafe // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // array initializer : dynamic array initializer with unsafe // </Description> //<Expects Status=success></Expects> // <Code> //<Expects Status=warning>\(12,17\).*CS0649</Expects> public class US { public int* p; } public class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { dynamic[] array = { new US(), new US()} ; return 0; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.attribute01.attribute01 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.attribute01.attribute01; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // attribute // </Description> //<Expects Status=success></Expects> // <Code> //<Expects Status=warning>\(20,17\).*CS0649</Expects> using System; [AttributeUsage(AttributeTargets.Class)] public class MyAttr : System.Attribute { } [MyAttr] unsafe public class US { public int* p; } public class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { dynamic d1 = new US(); US u = d1; return 0; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.collection01.collection01 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.collection01.collection01; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // collection initializer : dynamic collection initializer with unsafe // </Description> //<Expects Status=success></Expects> // <Code> //<Expects Status=warning>\(14,17\).*CS0649</Expects> using System.Collections.Generic; unsafe public class US { public int* p; } public class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { List<dynamic> col = new List<dynamic> { new US(), new US()} ; return 0; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.collection02.collection02 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.collection02.collection02; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // collection initializer : unsafe type collection initializer with dynamic // </Description> //<Expects Status=success></Expects> // <Code> //<Expects Status=warning>\(14,17\).*CS0649</Expects> using System.Collections.Generic; unsafe public class US { public int* p; } public class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { dynamic d = new US(); List<US> col = new List<US> { d, d } ; return 0; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.collection03.collection03 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.collection03.collection03; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // collection initializer : unsafe type collection initializer with dynamic // </Description> // <RelatedBugs></RelatedBugs> //<Expects Status=success></Expects> // <Code> //<Expects Status=warning>\(17,17\).*CS0649</Expects> using System; using System.Collections.Generic; using Microsoft.CSharp.RuntimeBinder; unsafe public class US { public int* p; } public class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { dynamic d1 = 1; dynamic d2 = "hi"; try { List<US> col = new List<US> { d1, d2 } ; } catch (RuntimeBinderException ex) { bool ret = ErrorVerifier.Verify(ErrorMessageId.BadArgTypes, ex.Message, "System.Collections.Generic.List<US>.Add(US)"); if (ret) return 0; } return 1; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.ctor01.ctor01 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.ctor01.ctor01; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // ctor - pointer as arg // </Description> //<Expects Status=success></Expects> // <Code> public unsafe class C { public int* p; public C(int* q) { p = q; } } public unsafe class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { //int num = 5; //int* p = &num; //dynamic d = new C(p); return 0; } } } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.ctor02.ctor02 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.ctor02.ctor02; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // ctor - dynamic as arg // </Description> //<Expects Status=success></Expects> // <Code> public unsafe class C { public dynamic p; public C(dynamic q) { p = q; } } public unsafe class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { int num = 5; dynamic d = new C(num); return 0; } } } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.ctor03.ctor03 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.ctor03.ctor03; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // ctor - mixed dynamic and pointer as arg // </Description> //<Expects Status=success></Expects> // <Code> public unsafe class C { public dynamic d; public int* p; public C(dynamic x, int* y) { d = x; p = y; } } public unsafe class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { int num = 5; int* p = &num; dynamic d = new C(num, p); return 0; } } } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.dlgate01.dlgate01 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.dlgate01.dlgate01; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type</Title> // <Description> // delegate // </Description> //<Expects Status=success></Expects> // <Code> internal unsafe delegate void Foo(int* p); unsafe public class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { dynamic d = (Foo)Test.Bar; return 0; } public static void Bar(int* q) { } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.dtor01.dtor01 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.dtor01.dtor01; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // dtor // </Description> //<Expects Status=success></Expects> // <Code> public class C { unsafe ~C() { int num = 5; int* ptr = &num; } } public unsafe class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { dynamic d = new C(); return 0; } } } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.evnt01.evnt01 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.evnt01.evnt01; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type</Title> // <Description> // delegate // </Description> //<Expects Status=success></Expects> // <Code> internal unsafe delegate void Foo(int* p); unsafe public class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { dynamic d = (Foo)Test.Bar; return 0; } public static void Bar(int* q) { } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.explicit01.explicit01 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.explicit01.explicit01; unsafe // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // explicit conversion // </Description> //<Expects Status=success></Expects> // <Code> //<Expects Status=warning>\(12,17\).*CS0649</Expects> public class US { public int* p; public static explicit operator int (US u) { return 1; } public static explicit operator US(int i) { return new US(); } } public unsafe class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { US u = new US(); dynamic d = (int)u; dynamic x = u; int i = (int)x; if (i != 1) return 1; return 0; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.expressiontree01.expressiontree01 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.expressiontree01.expressiontree01; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // expression tree // </Description> //<Expects Status=success></Expects> // <Code> //<Expects Status=warning>\(15,17\).*CS0649</Expects> using System; using System.Linq.Expressions; public unsafe struct UC { public int* p; } unsafe public class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { Expression<Func<dynamic, UC>> f = x => new UC(); dynamic dyn = 10; f.Compile()(dyn); return 0; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.field01.field01 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.field01.field01; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // filed (static & non-static) // </Description> //<Expects Status=success></Expects> // <Code> public class C { public unsafe int* p; public unsafe static char* q; } public unsafe class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { dynamic d = new C(); return 0; } } } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.fieldinit01.fieldinit01 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.fieldinit01.fieldinit01; unsafe // <Area> dynamic in unsafe code </Area> // <Title>unsafe context</Title> // <Description> // dynamic in field initializer // </Description> // <RelatedBug></RelatedBug> //<Expects Status=success></Expects> // <Code> public class C { public dynamic field = 10; } public class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { C c = new C(); return 0; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.implicit01.implicit01 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.implicit01.implicit01; unsafe // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // implicit conversion // </Description> //<Expects Status=success></Expects> // <Code> //<Expects Status=warning>\(12,17\).*CS0649</Expects> public class US { public int* p; public static implicit operator int (US u) { return 1; } public static implicit operator US(int i) { return new US(); } } public unsafe class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { US u = new US(); dynamic x = u; int i = x; if (i != 1) return 1; return 0; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.indexer02.indexer02 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.indexer02.indexer02; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // indexer - dynamic as index // </Description> //<Expects Status=success></Expects> // <Code> public class C { public const int field = 10; unsafe public int* this[int[] index] { get { fixed (int* p = index) { return p; } } } } static public class D { public static int field = 1; } public unsafe class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { C d = new C(); int[] array = new[] { 1, 2, 3 } ; int* x = ((C)d)[array]; return 0; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.indexer04.indexer04 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.indexer04.indexer04; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // indexer - pointer as return value // </Description> //<Expects Status=success></Expects> // <Code> public class Unsafe { unsafe public int* this[int index] { get { int temp = 10; return &temp; } set { } } } public unsafe class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { dynamic d = new Unsafe(); bool ret = true; try { var p = d[1]; } catch (Microsoft.CSharp.RuntimeBinder.RuntimeBinderException ex) { ret = ErrorVerifier.Verify(ErrorMessageId.UnsafeNeeded, ex.Message); if (ret) return 0; } return 1; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.integeregererface02.integeregererface02 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.integeregererface02.integeregererface02; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type</Title> // <Description> // interface - method with dynamic // </Description> //<Expects Status=success></Expects> // <Code> public unsafe interface IF { void Foo(dynamic p); } unsafe public class C : IF { public void Foo(dynamic p) { } } unsafe public class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { IF i = new C(); dynamic d = i; d.Foo(i); return 0; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.lambda01.lambda01 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.lambda01.lambda01; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // lambda expression // </Description> //<Expects Status=success></Expects> // <Code> //<Expects Status=warning>\(14,17\).*CS0649</Expects> using System; unsafe public class UC { public int* p; } unsafe public class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { Func<dynamic, UC> f1 = x => new UC(); f1(1); Func<UC, dynamic> f2 = x => x; f2(new UC()); return 0; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.lambda02.lambda02 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.lambda02.lambda02; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // lambda expression // </Description> //<Expects Status=success></Expects> // <Code> //<Expects Status=warning>\(14,17\).*CS0649</Expects> using System; unsafe public class UC { public int* p; } unsafe public class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { Func<int, int> f = x => { int* p = &x; return *p; } ; dynamic dyn = 10; int result = f(dyn); if (result == 10) return 0; return 1; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.method02.method02 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.method02.method02; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // method (static & non-static) - dynamic as arg // </Description> //<Expects Status=success></Expects> // <Code> public class C { unsafe public int Foo(dynamic p) { return 1; } unsafe public static int Bar(dynamic p) { return 2; } } public unsafe class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { dynamic d = new C(); int num = 5; int result = d.Foo(num) + C.Bar(d); if (result != 3) return 1; return 0; } } } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.method05.method05 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.method05.method05; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // method (static & non-static) - dynamic as return type // </Description> //<Expects Status=success></Expects> // <Code> public class C { public static int field = 10; unsafe public dynamic Foo() { return 1; } unsafe public static dynamic Bar() { return 2; } } public unsafe class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { dynamic d = new C(); d.Foo(); C.Bar(); return 0; } } } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.method07.method07 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.method07.method07; unsafe // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // method - unsafe type as arg : extension method // </Description> //<Expects Status=success></Expects> // <Code> //<Expects Status=warning>\(12,17\).*CS0649</Expects> public class US { public int* ptr; } static public class Ext { public static void Foo(this US u, dynamic d) { } } public class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { US u = new US(); u.Foo(u); return 0; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.method08.method08 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.method08.method08; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // method - OPTIONAL param // </Description> // <RelatedBug></RelatedBug> //<Expects Status=success></Expects> // <Code> using System; using Microsoft.CSharp.RuntimeBinder; public unsafe class Test { public void Foo(void* ptr = null) { } [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { dynamic d = new Test(); bool ret = true; try { d.Foo(); } catch (RuntimeBinderException ex) { ret = ErrorVerifier.Verify(ErrorMessageId.UnsafeNeeded, ex.Message); if (ret) return 0; } return 1; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.objinit01.objinit01 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.objinit01.objinit01; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // object initializer // </Description> //<Expects Status=success></Expects> // <Code> using System.Collections.Generic; unsafe public class US { public int* p; } unsafe public class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { int num = 10; dynamic u = new US { p = &num } ; return 0; } } // </Code> }
using System; using System.Collections.Generic; using System.Linq; using Signum.Utilities.Reflection; using System.Linq.Expressions; using System.Reflection; using Signum.Utilities; using Signum.Entities.Reflection; using Signum.Utilities.ExpressionTrees; using System.ComponentModel; using System.Collections.Concurrent; namespace Signum.Entities.DynamicQuery { [Serializable] public abstract class QueryToken : IEquatable<QueryToken> { public int Priority = 0; public abstract override string ToString(); public abstract string NiceName(); public abstract string? Format { get; } public abstract string? Unit { get; } public abstract Type Type { get; } public abstract string Key { get; } public virtual bool IsGroupable { get { switch (QueryUtils.TryGetFilterType(this.Type)) { case FilterType.Boolean: case FilterType.Enum: case FilterType.Guid: case FilterType.Integer: case FilterType.Lite: case FilterType.String: return true; case FilterType.Decimal: case FilterType.Embedded: return false; case FilterType.DateTime: { PropertyRoute? route = this.GetPropertyRoute(); if (route != null && route.PropertyRouteType == PropertyRouteType.FieldOrProperty) { if (route.Type == typeof(Date)) return true; var pp = Validator.TryGetPropertyValidator(route); if (pp != null) { DateTimePrecisionValidatorAttribute? datetimePrecision = pp.Validators.OfType<DateTimePrecisionValidatorAttribute>().SingleOrDefaultEx(); if (datetimePrecision != null && datetimePrecision.Precision == DateTimePrecision.Days) return true; } } return false; } } return false; } } protected abstract List<QueryToken> SubTokensOverride(SubTokensOptions options); public virtual object QueryName => this.Parent!.QueryName; public Func<object, T> GetAccessor<T>(BuildExpressionContext context) { return Expression.Lambda<Func<object, T>>(this.BuildExpression(context), context.Parameter).Compile(); } public Expression BuildExpression(BuildExpressionContext context) { if (context.Replacemens != null && context.Replacemens.TryGetValue(this, out var result)) return result; return BuildExpressionInternal(context); } protected abstract Expression BuildExpressionInternal(BuildExpressionContext context); public abstract PropertyRoute? GetPropertyRoute(); internal PropertyRoute? AddPropertyRoute(PropertyInfo pi) { if (typeof(ModelEntity).IsAssignableFrom(Type)) return PropertyRoute.Root(Type).Add(pi); Type? type = Lite.Extract(Type); //Because Add doesn't work with lites if (type != null) return PropertyRoute.Root(type).Add(pi); PropertyRoute? pr = GetPropertyRoute(); if (pr == null) return null; return pr.Add(pi); } public abstract Implementations? GetImplementations(); public abstract string? IsAllowed(); public abstract QueryToken Clone(); public abstract QueryToken? Parent { get; } public QueryToken() { } static ConcurrentDictionary<(QueryToken, SubTokensOptions), Dictionary<string, QueryToken>> subTokensOverrideCache = new ConcurrentDictionary<(QueryToken, SubTokensOptions), Dictionary<string, QueryToken>>(); public QueryToken? SubTokenInternal(string key, SubTokensOptions options) { var result = CachedSubTokensOverride(options).TryGetC(key) ?? OnEntityExtension(this).SingleOrDefaultEx(a => a.Key == key); if (result == null) return null; string? allowed = result.IsAllowed(); if (allowed != null) throw new UnauthorizedAccessException($"Access to token '{this.FullKey()}.{key}' in query '{QueryUtils.GetKey(this.QueryName)}' is not allowed because: {allowed}"); return result; } public List<QueryToken> SubTokensInternal(SubTokensOptions options) { return CachedSubTokensOverride(options).Values .Concat(OnEntityExtension(this)) .Where(t => t.IsAllowed() == null) .OrderByDescending(a => a.Priority) .ThenBy(a => a.ToString()) .ToList(); } Dictionary<string, QueryToken> CachedSubTokensOverride(SubTokensOptions options) { return subTokensOverrideCache.GetOrAdd((this, options), (tup) => tup.Item1.SubTokensOverride(tup.Item2).ToDictionaryEx(a => a.Key, "subtokens for " + this.Key)); } public static Func<QueryToken, Type, SubTokensOptions, List<QueryToken>> ImplementedByAllSubTokens = (quetyToken, type, options) => throw new NotImplementedException("QueryToken.ImplementedByAllSubTokens not set"); public static Func<Type, bool> IsSystemVersioned = t => false; protected List<QueryToken> SubTokensBase(Type type, SubTokensOptions options, Implementations? implementations) { var ut = type.UnNullify(); if (ut == typeof(DateTime)) return DateTimeProperties(this, DateTimePrecision.Milliseconds).AndHasValue(this); if (ut == typeof(Date)) return DateProperties(this).AndHasValue(this); if (ut == typeof(float) || ut == typeof(double) || ut == typeof(decimal)) return StepTokens(this, 4).AndHasValue(this); if (ut == typeof(int) || ut == typeof(long) || ut == typeof(short)) return StepTokens(this, 0).AndModuloTokens(this).AndHasValue(this); if (ut == typeof(string)) return StringTokens().AndHasValue(this); Type cleanType = type.CleanType(); if (cleanType.IsIEntity()) { if (implementations!.Value.IsByAll) return ImplementedByAllSubTokens(this, type, options); // new[] { EntityPropertyToken.IdProperty(this) }; var onlyType = implementations.Value.Types.Only(); if (onlyType != null && onlyType == cleanType) return new[] { EntityPropertyToken.IdProperty(this), new EntityToStringToken(this), IsSystemVersioned(onlyType) ? new SystemTimeToken(this, SystemTimeProperty.SystemValidFrom): null, IsSystemVersioned(onlyType) ? new SystemTimeToken(this, SystemTimeProperty.SystemValidTo): null, } .NotNull() .Concat(EntityProperties(onlyType)).ToList().AndHasValue(this); return implementations.Value.Types.Select(t => (QueryToken)new AsTypeToken(this, t)).ToList().AndHasValue(this); } if (type.IsEmbeddedEntity() || type.IsModelEntity()) { return EntityProperties(type).OrderBy(a => a.ToString()).ToList().AndHasValue(this); } if (IsCollection(type)) { return CollectionProperties(this, options).AndHasValue(this); } return new List<QueryToken>(); } public List<QueryToken> StringTokens() { return new List<QueryToken> { new NetPropertyToken(this, ReflectionTools.GetPropertyInfo((string str) => str.Length), ()=>QueryTokenMessage.Length.NiceToString()) }; } public static IEnumerable<QueryToken> OnEntityExtension(QueryToken parent) { if (EntityExtensions == null) throw new InvalidOperationException("QuertToken.EntityExtensions function not set"); return EntityExtensions(parent); } public static Func<QueryToken, IEnumerable<QueryToken>>? EntityExtensions; public static List<QueryToken> DateTimeProperties(QueryToken parent, DateTimePrecision precision) { string utc = TimeZoneManager.Mode == TimeZoneMode.Utc ? "Utc - " : ""; return new List<QueryToken?> { new NetPropertyToken(parent, ReflectionTools.GetPropertyInfo((DateTime dt)=>dt.Year), () => utc + QueryTokenMessage.Year.NiceToString()), new NetPropertyToken(parent, ReflectionTools.GetMethodInfo((DateTime dt ) => dt.Quarter()), ()=> utc + QueryTokenMessage.Quarter.NiceToString()), new DatePartStartToken(parent, QueryTokenMessage.QuarterStart), new NetPropertyToken(parent, ReflectionTools.GetPropertyInfo((DateTime dt)=>dt.Month),() => utc + QueryTokenMessage.Month.NiceToString()), new DatePartStartToken(parent, QueryTokenMessage.MonthStart), new NetPropertyToken(parent, ReflectionTools.GetMethodInfo((DateTime dt ) => dt.WeekNumber()), ()=> utc + QueryTokenMessage.WeekNumber.NiceToString()), new DatePartStartToken(parent, QueryTokenMessage.WeekStart), new NetPropertyToken(parent, ReflectionTools.GetPropertyInfo((DateTime dt)=>dt.Day), () => utc + QueryTokenMessage.Day.NiceToString()), new NetPropertyToken(parent, ReflectionTools.GetPropertyInfo((DateTime dt)=>dt.DayOfYear), () => utc + QueryTokenMessage.DayOfYear.NiceToString()), new NetPropertyToken(parent, ReflectionTools.GetPropertyInfo((DateTime dt)=>dt.DayOfWeek), () => utc + QueryTokenMessage.DayOfWeek.NiceToString()), new DateToken(parent), precision < DateTimePrecision.Hours ? null: new NetPropertyToken(parent, ReflectionTools.GetPropertyInfo((DateTime dt)=>dt.Hour), () => utc + QueryTokenMessage.Hour.NiceToString()), precision < DateTimePrecision.Hours ? null: new DatePartStartToken(parent, QueryTokenMessage.HourStart), precision < DateTimePrecision.Minutes ? null: new NetPropertyToken(parent, ReflectionTools.GetPropertyInfo((DateTime dt)=>dt.Minute), () => utc + QueryTokenMessage.Minute.NiceToString()), precision < DateTimePrecision.Minutes ? null: new DatePartStartToken(parent, QueryTokenMessage.MinuteStart), precision < DateTimePrecision.Seconds ? null: new NetPropertyToken(parent, ReflectionTools.GetPropertyInfo((DateTime dt)=>dt.Second), () => utc + QueryTokenMessage.Second.NiceToString()), precision < DateTimePrecision.Seconds ? null: new DatePartStartToken(parent, QueryTokenMessage.SecondStart), precision < DateTimePrecision.Milliseconds? null: new NetPropertyToken(parent, ReflectionTools.GetPropertyInfo((DateTime dt)=>dt.Millisecond), () => utc + QueryTokenMessage.Millisecond.NiceToString()), }.NotNull().ToList(); } public static List<QueryToken> DateProperties(QueryToken parent) { string utc = TimeZoneManager.Mode == TimeZoneMode.Utc ? "Utc - " : ""; return new List<QueryToken?> { new NetPropertyToken(parent, ReflectionTools.GetPropertyInfo((Date dt)=>dt.Year), () => utc + QueryTokenMessage.Year.NiceToString()), new NetPropertyToken(parent, ReflectionTools.GetMethodInfo((Date dt ) => dt.Quarter()), ()=> utc + QueryTokenMessage.Quarter.NiceToString()), new DatePartStartToken(parent, QueryTokenMessage.QuarterStart), new NetPropertyToken(parent, ReflectionTools.GetPropertyInfo((Date dt)=>dt.Month),() => utc + QueryTokenMessage.Month.NiceToString()), new DatePartStartToken(parent, QueryTokenMessage.MonthStart), new NetPropertyToken(parent, ReflectionTools.GetMethodInfo((Date dt ) => dt.WeekNumber()), ()=> utc + QueryTokenMessage.WeekNumber.NiceToString()), new DatePartStartToken(parent, QueryTokenMessage.WeekStart), new NetPropertyToken(parent, ReflectionTools.GetPropertyInfo((Date dt)=>dt.Day), () => utc + QueryTokenMessage.Day.NiceToString()), new NetPropertyToken(parent, ReflectionTools.GetPropertyInfo((Date dt)=>dt.DayOfYear), () => utc + QueryTokenMessage.DayOfYear.NiceToString()), new NetPropertyToken(parent, ReflectionTools.GetPropertyInfo((Date dt)=>dt.DayOfWeek), () => utc + QueryTokenMessage.DayOfWeek.NiceToString()), }.NotNull().ToList(); } public static List<QueryToken> StepTokens(QueryToken parent, int decimals) { return new List<QueryToken?> { decimals >= 4? new StepToken(parent, 0.0001m): null, decimals >= 3? new StepToken(parent, 0.001m) : null, decimals >= 2? new StepToken(parent, 0.01m) : null, decimals >= 1? new StepToken(parent, 0.1m) : null, new StepToken(parent, 1m), new StepToken(parent, 10m), new StepToken(parent, 100m), new StepToken(parent, 1000m), new StepToken(parent, 10000m), new StepToken(parent, 100000m), new StepToken(parent, 1000000m), }.NotNull().ToList(); } public static List<QueryToken> CollectionProperties(QueryToken parent, SubTokensOptions options) { if (parent.HasAllOrAny()) options = options & ~SubTokensOptions.CanElement; List<QueryToken> tokens = new List<QueryToken>() { new CountToken(parent) }; if ((options & SubTokensOptions.CanElement) == SubTokensOptions.CanElement) tokens.AddRange(EnumExtensions.GetValues<CollectionElementType>().Select(cet => new CollectionElementToken(parent, cet))); if ((options & SubTokensOptions.CanAnyAll) == SubTokensOptions.CanAnyAll) tokens.AddRange(EnumExtensions.GetValues<CollectionAnyAllType>().Select(caat => new CollectionAnyAllToken(parent, caat))); return tokens; } public virtual bool HasAllOrAny() { return Parent != null && Parent.HasAllOrAny(); } public virtual bool HasElement() { return Parent != null && Parent.HasElement(); } IEnumerable<QueryToken> EntityProperties(Type type) { var result = from p in Reflector.PublicInstancePropertiesInOrder(type) where Reflector.QueryableProperty(type, p) select (QueryToken)new EntityPropertyToken(this, p, this.AddPropertyRoute(p)!); if (!type.IsEntity()) return result; var mixinProperties = from mt in MixinDeclarations.GetMixinDeclarations(type) from p in Reflector.PublicInstancePropertiesInOrder(mt) where Reflector.QueryableProperty(mt, p) select (QueryToken)new EntityPropertyToken(this, p, PropertyRoute.Root(type).Add(mt).Add(p)); return result.Concat(mixinProperties); } public string FullKey() { if (Parent == null) return Key; return Parent.FullKey() + "." + Key; } public override bool Equals(object? obj) { return obj is QueryToken && obj.GetType() == this.GetType() && Equals((QueryToken)obj); } public bool Equals(QueryToken other) { return other != null && other.QueryName.Equals(this.QueryName) && other.FullKey() == this.FullKey(); } public override int GetHashCode() { return this.FullKey().GetHashCode() ^ this.QueryName.GetHashCode(); } public virtual string TypeColor { get { if (IsCollection(Type)) return "#CE6700"; switch (QueryUtils.TryGetFilterType(Type)) { case FilterType.Integer: case FilterType.Decimal: case FilterType.String: case FilterType.Guid: case FilterType.Boolean: return "#000000"; case FilterType.DateTime: return "#5100A1"; case FilterType.Enum: return "#800046"; case FilterType.Lite: return "#2B91AF"; case FilterType.Embedded: return "#156F8A"; default: return "#7D7D7D"; } } } public string NiceTypeName { get { Type type = Type.CleanType(); if (IsCollection(type)) { return QueryTokenMessage.ListOf0.NiceToString().FormatWith(GetNiceTypeName(Type.ElementType()!, GetElementImplementations())); } return GetNiceTypeName(Type, GetImplementations()); } } protected internal virtual Implementations? GetElementImplementations() { var pr = GetPropertyRoute(); if (pr != null) return pr.Add("Item").TryGetImplementations(); return null; } public static bool IsCollection(Type type) { return type != typeof(string) && type != typeof(byte[]) && type.ElementType() != null; } static string GetNiceTypeName(Type type, Implementations? implementations) { switch (QueryUtils.TryGetFilterType(type)) { case FilterType.Integer: return QueryTokenMessage.Number.NiceToString(); case FilterType.Decimal: return QueryTokenMessage.DecimalNumber.NiceToString(); case FilterType.String: return QueryTokenMessage.Text.NiceToString(); case FilterType.DateTime: if (type.UnNullify() == typeof(Date)) return QueryTokenMessage.Date.NiceToString(); return QueryTokenMessage.DateTime.NiceToString(); case FilterType.Boolean: return QueryTokenMessage.Check.NiceToString(); case FilterType.Guid: return QueryTokenMessage.GlobalUniqueIdentifier.NiceToString(); case FilterType.Enum: return type.UnNullify().NiceName(); case FilterType.Lite: { var cleanType = type.CleanType(); var imp = implementations!.Value; if (imp.IsByAll) return QueryTokenMessage.AnyEntity.NiceToString(); return imp.Types.CommaOr(t => t.NiceName()); } case FilterType.Embedded: return QueryTokenMessage.Embedded0.NiceToString().FormatWith(type.NiceName()); default: return type.TypeName(); } } public bool ContainsKey(string key) { return this.Key == key || this.Parent != null && this.Parent.ContainsKey(key); } } public class BuildExpressionContext { public BuildExpressionContext(Type tupleType, ParameterExpression parameter, Dictionary<QueryToken, Expression> replacemens) { this.TupleType = tupleType; this.Parameter = parameter; this.Replacemens = replacemens; } public readonly Type TupleType; public readonly ParameterExpression Parameter; public readonly Dictionary<QueryToken, Expression> Replacemens; public Expression<Func<object, Lite<Entity>>> GetEntitySelector() { return Expression.Lambda<Func<object, Lite<Entity>>>(Replacemens.Single(a=>a.Key.FullKey() == "Entity").Value, Parameter); } } public enum QueryTokenMessage { [Description("({0} as {1})")] _0As1, [Description(" and ")] And, [Description("any entity")] AnyEntity, [Description("As {0}")] As0, [Description("check")] Check, [Description("Column {0} not found")] Column0NotFound, Count, Date, [Description("date and time")] DateTime, [Description("date and time with time zone")] DateTimeOffset, Day, DayOfWeek, DayOfYear, [Description("decimal number")] DecimalNumber, [Description("embedded {0}")] Embedded0, [Description("global unique identifier")] GlobalUniqueIdentifier, Hour, [Description("list of {0}")] ListOf0, Millisecond, Minute, Month, [Description("Month Start")] MonthStart, [Description("Quarter")] Quarter, [Description("Quarter Start")] QuarterStart, [Description("Week Start")] WeekStart, [Description("Hour Start")] HourStart, [Description("Minute Start")] MinuteStart, [Description("Second Start")] SecondStart, [Description("More than one column named {0}")] MoreThanOneColumnNamed0, [Description("number")] Number, [Description(" of ")] Of, Second, [Description("text")] Text, Year, WeekNumber, [Description("{0} step {1}")] _0Steps1, [Description("Step {0}")] Step0, Length, [Description("{0} has value")] _0HasValue, [Description("Has value")] HasValue, [Description("Modulo {0}")] Modulo0, [Description("{0} mod {1}")] _0Mod1, Null, Not, Distinct } }
// // Copyright (c) 2004-2011 Jaroslaw Kowalski <jaak@jkowalski.net> // // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // // * Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // // * Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // * Neither the name of Jaroslaw Kowalski nor the names of its // contributors may be used to endorse or promote products derived from this // software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF // THE POSSIBILITY OF SUCH DAMAGE. // namespace NLog.UnitTests.Targets.Wrappers { using System; using System.Threading; using NLog.Common; using NLog.Targets; using NLog.Targets.Wrappers; using System.Collections.Generic; using Xunit; public class AsyncTargetWrapperTests : NLogTestBase { [Fact] public void AsyncTargetWrapperInitTest() { var myTarget = new MyTarget(); var targetWrapper = new AsyncTargetWrapper(myTarget, 300, AsyncTargetWrapperOverflowAction.Grow); Assert.Equal(AsyncTargetWrapperOverflowAction.Grow, targetWrapper.OverflowAction); Assert.Equal(300, targetWrapper.QueueLimit); Assert.Equal(50, targetWrapper.TimeToSleepBetweenBatches); Assert.Equal(100, targetWrapper.BatchSize); } [Fact] public void AsyncTargetWrapperInitTest2() { var myTarget = new MyTarget(); var targetWrapper = new AsyncTargetWrapper() { WrappedTarget = myTarget, }; Assert.Equal(AsyncTargetWrapperOverflowAction.Discard, targetWrapper.OverflowAction); Assert.Equal(10000, targetWrapper.QueueLimit); Assert.Equal(50, targetWrapper.TimeToSleepBetweenBatches); Assert.Equal(100, targetWrapper.BatchSize); } [Fact] public void AsyncTargetWrapperSyncTest1() { var myTarget = new MyTarget(); var targetWrapper = new AsyncTargetWrapper { WrappedTarget = myTarget, }; targetWrapper.Initialize(null); myTarget.Initialize(null); var logEvent = new LogEventInfo(); Exception lastException = null; ManualResetEvent continuationHit = new ManualResetEvent(false); Thread continuationThread = null; AsyncContinuation continuation = ex => { lastException = ex; continuationThread = Thread.CurrentThread; continuationHit.Set(); }; targetWrapper.WriteAsyncLogEvent(logEvent.WithContinuation(continuation)); // continuation was not hit Assert.True(continuationHit.WaitOne(2000)); Assert.NotSame(continuationThread, Thread.CurrentThread); Assert.Null(lastException); Assert.Equal(1, myTarget.WriteCount); continuationHit.Reset(); targetWrapper.WriteAsyncLogEvent(logEvent.WithContinuation(continuation)); continuationHit.WaitOne(); Assert.NotSame(continuationThread, Thread.CurrentThread); Assert.Null(lastException); Assert.Equal(2, myTarget.WriteCount); } [Fact] public void AsyncTargetWrapperAsyncTest1() { var myTarget = new MyAsyncTarget(); var targetWrapper = new AsyncTargetWrapper(myTarget); targetWrapper.Initialize(null); myTarget.Initialize(null); var logEvent = new LogEventInfo(); Exception lastException = null; var continuationHit = new ManualResetEvent(false); AsyncContinuation continuation = ex => { lastException = ex; continuationHit.Set(); }; targetWrapper.WriteAsyncLogEvent(logEvent.WithContinuation(continuation)); Assert.True(continuationHit.WaitOne()); Assert.Null(lastException); Assert.Equal(1, myTarget.WriteCount); continuationHit.Reset(); targetWrapper.WriteAsyncLogEvent(logEvent.WithContinuation(continuation)); continuationHit.WaitOne(); Assert.Null(lastException); Assert.Equal(2, myTarget.WriteCount); } [Fact] public void AsyncTargetWrapperAsyncWithExceptionTest1() { var myTarget = new MyAsyncTarget { ThrowExceptions = true, }; var targetWrapper = new AsyncTargetWrapper(myTarget); targetWrapper.Initialize(null); myTarget.Initialize(null); var logEvent = new LogEventInfo(); Exception lastException = null; var continuationHit = new ManualResetEvent(false); AsyncContinuation continuation = ex => { lastException = ex; continuationHit.Set(); }; targetWrapper.WriteAsyncLogEvent(logEvent.WithContinuation(continuation)); Assert.True(continuationHit.WaitOne()); Assert.NotNull(lastException); Assert.IsType(typeof(InvalidOperationException), lastException); // no flush on exception Assert.Equal(0, myTarget.FlushCount); Assert.Equal(1, myTarget.WriteCount); continuationHit.Reset(); lastException = null; targetWrapper.WriteAsyncLogEvent(logEvent.WithContinuation(continuation)); continuationHit.WaitOne(); Assert.NotNull(lastException); Assert.IsType(typeof(InvalidOperationException), lastException); Assert.Equal(0, myTarget.FlushCount); Assert.Equal(2, myTarget.WriteCount); } [Fact] public void AsyncTargetWrapperFlushTest() { var myTarget = new MyAsyncTarget { ThrowExceptions = true, }; var targetWrapper = new AsyncTargetWrapper(myTarget) { OverflowAction = AsyncTargetWrapperOverflowAction.Grow, }; targetWrapper.Initialize(null); myTarget.Initialize(null); List<Exception> exceptions = new List<Exception>(); int eventCount = 5000; for (int i = 0; i < eventCount; ++i) { targetWrapper.WriteAsyncLogEvent(LogEventInfo.CreateNullEvent().WithContinuation( ex => { lock (exceptions) { exceptions.Add(ex); } })); } Exception lastException = null; ManualResetEvent mre = new ManualResetEvent(false); string internalLog = RunAndCaptureInternalLog( () => { targetWrapper.Flush( cont => { try { // by this time all continuations should be completed Assert.Equal(eventCount, exceptions.Count); // with just 1 flush of the target Assert.Equal(1, myTarget.FlushCount); // and all writes should be accounted for Assert.Equal(eventCount, myTarget.WriteCount); } catch (Exception ex) { lastException = ex; } finally { mre.Set(); } }); Assert.True(mre.WaitOne()); }, LogLevel.Trace); if (lastException != null) { Assert.True(false, lastException.ToString() + "\r\n" + internalLog); } } [Fact] public void AsyncTargetWrapperCloseTest() { var myTarget = new MyAsyncTarget { ThrowExceptions = true, }; var targetWrapper = new AsyncTargetWrapper(myTarget) { OverflowAction = AsyncTargetWrapperOverflowAction.Grow, TimeToSleepBetweenBatches = 1000, }; targetWrapper.Initialize(null); myTarget.Initialize(null); targetWrapper.WriteAsyncLogEvent(LogEventInfo.CreateNullEvent().WithContinuation(ex => { })); // quickly close the target before the timer elapses targetWrapper.Close(); } [Fact] public void AsyncTargetWrapperExceptionTest() { var targetWrapper = new AsyncTargetWrapper { OverflowAction = AsyncTargetWrapperOverflowAction.Grow, TimeToSleepBetweenBatches = 500, WrappedTarget = new DebugTarget(), }; targetWrapper.Initialize(null); // null out wrapped target - will cause exception on the timer thread targetWrapper.WrappedTarget = null; string internalLog = RunAndCaptureInternalLog( () => { targetWrapper.WriteAsyncLogEvent(LogEventInfo.CreateNullEvent().WithContinuation(ex => { })); targetWrapper.Close(); }, LogLevel.Trace); Assert.True(internalLog.StartsWith("Error Error in lazy writer timer procedure: System.NullReferenceException", StringComparison.Ordinal), internalLog); } [Fact] public void FlushingMultipleTimesSimultaneous() { var asyncTarget = new AsyncTargetWrapper { TimeToSleepBetweenBatches = 2000, WrappedTarget = new DebugTarget(), }; asyncTarget.Initialize(null); asyncTarget.WriteAsyncLogEvent(LogEventInfo.CreateNullEvent().WithContinuation(ex => { })); var firstContinuationCalled = false; var secondContinuationCalled = false; var firstContinuationResetEvent = new ManualResetEvent(false); var secondContinuationResetEvent = new ManualResetEvent(false); asyncTarget.Flush(ex => { firstContinuationCalled = true; firstContinuationResetEvent.Set(); }); asyncTarget.Flush(ex => { secondContinuationCalled = true; secondContinuationResetEvent.Set(); }); firstContinuationResetEvent.WaitOne(); secondContinuationResetEvent.WaitOne(); Assert.True(firstContinuationCalled); Assert.True(secondContinuationCalled); } class MyAsyncTarget : Target { public int FlushCount; public int WriteCount; protected override void Write(LogEventInfo logEvent) { throw new NotSupportedException(); } protected override void Write(AsyncLogEventInfo logEvent) { Assert.True(this.FlushCount <= this.WriteCount); Interlocked.Increment(ref this.WriteCount); ThreadPool.QueueUserWorkItem( s => { if (this.ThrowExceptions) { logEvent.Continuation(new InvalidOperationException("Some problem!")); logEvent.Continuation(new InvalidOperationException("Some problem!")); } else { logEvent.Continuation(null); logEvent.Continuation(null); } }); } protected override void FlushAsync(AsyncContinuation asyncContinuation) { Interlocked.Increment(ref this.FlushCount); ThreadPool.QueueUserWorkItem( s => asyncContinuation(null)); } public bool ThrowExceptions { get; set; } } class MyTarget : Target { public int FlushCount { get; set; } public int WriteCount { get; set; } protected override void Write(LogEventInfo logEvent) { Assert.True(this.FlushCount <= this.WriteCount); this.WriteCount++; } protected override void FlushAsync(AsyncContinuation asyncContinuation) { this.FlushCount++; asyncContinuation(null); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Diagnostics; using System.Runtime.InteropServices; using System.Text; namespace System.IO { public static partial class Path { public static char[] GetInvalidFileNameChars() => new char[] { '\0', '/' }; public static char[] GetInvalidPathChars() => new char[] { '\0' }; internal static int MaxPath => Interop.Sys.MaxPath; private static readonly bool s_isMac = Interop.Sys.GetUnixName() == "OSX"; // Expands the given path to a fully qualified path. public static string GetFullPath(string path) { if (path == null) throw new ArgumentNullException(nameof(path)); if (path.Length == 0) throw new ArgumentException(SR.Arg_PathIllegal); PathInternal.CheckInvalidPathChars(path); // Expand with current directory if necessary if (!IsPathRooted(path)) { path = Combine(Interop.Sys.GetCwd(), path); } // We would ideally use realpath to do this, but it resolves symlinks, requires that the file actually exist, // and turns it into a full path, which we only want if fullCheck is true. string collapsedString = RemoveRelativeSegments(path); Debug.Assert(collapsedString.Length < path.Length || collapsedString.ToString() == path, "Either we've removed characters, or the string should be unmodified from the input path."); if (collapsedString.Length > Interop.Sys.MaxPath) { throw new PathTooLongException(SR.IO_PathTooLong); } string result = collapsedString.Length == 0 ? PathInternal.DirectorySeparatorCharAsString : collapsedString; return result; } /// <summary> /// Try to remove relative segments from the given path (without combining with a root). /// </summary> /// <param name="skip">Skip the specified number of characters before evaluating.</param> private static string RemoveRelativeSegments(string path, int skip = 0) { bool flippedSeparator = false; // Remove "//", "/./", and "/../" from the path by copying each character to the output, // except the ones we're removing, such that the builder contains the normalized path // at the end. var sb = StringBuilderCache.Acquire(path.Length); if (skip > 0) { sb.Append(path, 0, skip); } int componentCharCount = 0; for (int i = skip; i < path.Length; i++) { char c = path[i]; if (PathInternal.IsDirectorySeparator(c) && i + 1 < path.Length) { componentCharCount = 0; // Skip this character if it's a directory separator and if the next character is, too, // e.g. "parent//child" => "parent/child" if (PathInternal.IsDirectorySeparator(path[i + 1])) { continue; } // Skip this character and the next if it's referring to the current directory, // e.g. "parent/./child" =? "parent/child" if ((i + 2 == path.Length || PathInternal.IsDirectorySeparator(path[i + 2])) && path[i + 1] == '.') { i++; continue; } // Skip this character and the next two if it's referring to the parent directory, // e.g. "parent/child/../grandchild" => "parent/grandchild" if (i + 2 < path.Length && (i + 3 == path.Length || PathInternal.IsDirectorySeparator(path[i + 3])) && path[i + 1] == '.' && path[i + 2] == '.') { // Unwind back to the last slash (and if there isn't one, clear out everything). int s; for (s = sb.Length - 1; s >= 0; s--) { if (PathInternal.IsDirectorySeparator(sb[s])) { sb.Length = s; break; } } if (s < 0) { sb.Length = 0; } i += 2; continue; } } if (++componentCharCount > Interop.Sys.MaxName) { throw new PathTooLongException(SR.IO_PathTooLong); } // Normalize the directory separator if needed if (c != PathInternal.DirectorySeparatorChar && c == PathInternal.AltDirectorySeparatorChar) { c = PathInternal.DirectorySeparatorChar; flippedSeparator = true; } sb.Append(c); } if (flippedSeparator || sb.Length != path.Length) { return StringBuilderCache.GetStringAndRelease(sb); } else { // We haven't changed the source path, return the original StringBuilderCache.Release(sb); return path; } } private static string RemoveLongPathPrefix(string path) { return path; // nop. There's nothing special about "long" paths on Unix. } public static string GetTempPath() { const string TempEnvVar = "TMPDIR"; const string DefaultTempPath = "/tmp/"; // Get the temp path from the TMPDIR environment variable. // If it's not set, just return the default path. // If it is, return it, ensuring it ends with a slash. string path = Environment.GetEnvironmentVariable(TempEnvVar); return string.IsNullOrEmpty(path) ? DefaultTempPath : PathInternal.IsDirectorySeparator(path[path.Length - 1]) ? path : path + PathInternal.DirectorySeparatorChar; } public static string GetTempFileName() { const string Suffix = ".tmp"; const int SuffixByteLength = 4; // mkstemps takes a char* and overwrites the XXXXXX with six characters // that'll result in a unique file name. string template = GetTempPath() + "tmpXXXXXX" + Suffix + "\0"; byte[] name = Encoding.UTF8.GetBytes(template); // Create, open, and close the temp file. IntPtr fd = Interop.CheckIo(Interop.Sys.MksTemps(name, SuffixByteLength)); Interop.Sys.Close(fd); // ignore any errors from close; nothing to do if cleanup isn't possible // 'name' is now the name of the file Debug.Assert(name[name.Length - 1] == '\0'); return Encoding.UTF8.GetString(name, 0, name.Length - 1); // trim off the trailing '\0' } public static bool IsPathRooted(string path) { if (path == null) return false; PathInternal.CheckInvalidPathChars(path); return path.Length > 0 && path[0] == PathInternal.DirectorySeparatorChar; } public static string GetPathRoot(string path) { if (path == null) return null; return IsPathRooted(path) ? PathInternal.DirectorySeparatorCharAsString : String.Empty; } private static unsafe void GetCryptoRandomBytes(byte* bytes, int byteCount) { // We want to avoid dependencies on the Crypto library when compiling in CoreCLR. This // will use the existing PAL implementation. byte[] buffer = new byte[KeyLength]; Microsoft.Win32.Win32Native.Random(bStrong: true, buffer: buffer, length: KeyLength); Runtime.InteropServices.Marshal.Copy(buffer, 0, (IntPtr)bytes, KeyLength); } /// <summary>Gets whether the system is case-sensitive.</summary> internal static bool IsCaseSensitive { get { return !s_isMac; } } } }
namespace XenAdmin.Dialogs { partial class CopyVMDialog { /// <summary> /// Required designer variable. /// </summary> private System.ComponentModel.IContainer components = null; /// <summary> /// Clean up any resources being used. /// </summary> /// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param> protected override void Dispose(bool disposing) { if (disposing && (components != null)) { components.Dispose(); } base.Dispose(disposing); } #region Component Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(CopyVMDialog)); this.srPicker1 = new XenAdmin.Controls.SrPicker(); this.CloseButton = new System.Windows.Forms.Button(); this.MoveButton = new System.Windows.Forms.Button(); this.label2 = new System.Windows.Forms.Label(); this.NameTextBox = new System.Windows.Forms.TextBox(); this.CloneRadioButton = new System.Windows.Forms.RadioButton(); this.CopyRadioButton = new System.Windows.Forms.RadioButton(); this.DescriptionTextBox = new System.Windows.Forms.TextBox(); this.label1 = new System.Windows.Forms.Label(); this.FastCloneDescription = new System.Windows.Forms.Label(); this.groupBox1 = new XenAdmin.Controls.DecentGroupBox(); this.tableLayoutPanelSrPicker = new System.Windows.Forms.TableLayoutPanel(); this.labelSrHint = new System.Windows.Forms.Label(); this.toolTipContainer1 = new XenAdmin.Controls.ToolTipContainer(); this.FastClonePanel = new System.Windows.Forms.Panel(); this.groupBox1.SuspendLayout(); this.tableLayoutPanelSrPicker.SuspendLayout(); this.toolTipContainer1.SuspendLayout(); this.FastClonePanel.SuspendLayout(); this.SuspendLayout(); // // srPicker1 // resources.ApplyResources(this.srPicker1, "srPicker1"); this.srPicker1.Connection = null; this.srPicker1.Name = "srPicker1"; this.srPicker1.SelectedIndexChanged += new System.EventHandler(this.srPicker1_SelectedIndexChanged); // // CloseButton // resources.ApplyResources(this.CloseButton, "CloseButton"); this.CloseButton.DialogResult = System.Windows.Forms.DialogResult.Cancel; this.CloseButton.Name = "CloseButton"; this.CloseButton.UseVisualStyleBackColor = true; this.CloseButton.Click += new System.EventHandler(this.CloseButton_Click); // // MoveButton // resources.ApplyResources(this.MoveButton, "MoveButton"); this.MoveButton.DialogResult = System.Windows.Forms.DialogResult.OK; this.MoveButton.Name = "MoveButton"; this.MoveButton.UseVisualStyleBackColor = true; this.MoveButton.Click += new System.EventHandler(this.MoveButton_Click); // // label2 // resources.ApplyResources(this.label2, "label2"); this.label2.Name = "label2"; // // NameTextBox // resources.ApplyResources(this.NameTextBox, "NameTextBox"); this.NameTextBox.Name = "NameTextBox"; this.NameTextBox.TextChanged += new System.EventHandler(this.NameTextBox_TextChanged); // // CloneRadioButton // resources.ApplyResources(this.CloneRadioButton, "CloneRadioButton"); this.CloneRadioButton.Checked = true; this.CloneRadioButton.Name = "CloneRadioButton"; this.CloneRadioButton.TabStop = true; this.CloneRadioButton.UseVisualStyleBackColor = true; this.CloneRadioButton.CheckedChanged += new System.EventHandler(this.CloneRadioButton_CheckedChanged); // // CopyRadioButton // resources.ApplyResources(this.CopyRadioButton, "CopyRadioButton"); this.CopyRadioButton.Name = "CopyRadioButton"; this.CopyRadioButton.UseVisualStyleBackColor = true; this.CopyRadioButton.CheckedChanged += new System.EventHandler(this.radioButton2_CheckedChanged); // // DescriptionTextBox // resources.ApplyResources(this.DescriptionTextBox, "DescriptionTextBox"); this.DescriptionTextBox.Name = "DescriptionTextBox"; this.DescriptionTextBox.TextChanged += new System.EventHandler(this.NameTextBox_TextChanged); // // label1 // resources.ApplyResources(this.label1, "label1"); this.label1.Name = "label1"; // // FastCloneDescription // resources.ApplyResources(this.FastCloneDescription, "FastCloneDescription"); this.FastCloneDescription.AutoEllipsis = true; this.FastCloneDescription.Name = "FastCloneDescription"; // // groupBox1 // resources.ApplyResources(this.groupBox1, "groupBox1"); this.groupBox1.Controls.Add(this.tableLayoutPanelSrPicker); this.groupBox1.Controls.Add(this.toolTipContainer1); this.groupBox1.Controls.Add(this.CopyRadioButton); this.groupBox1.Name = "groupBox1"; this.groupBox1.TabStop = false; // // tableLayoutPanelSrPicker // resources.ApplyResources(this.tableLayoutPanelSrPicker, "tableLayoutPanelSrPicker"); this.tableLayoutPanelSrPicker.Controls.Add(this.srPicker1, 0, 1); this.tableLayoutPanelSrPicker.Controls.Add(this.labelSrHint, 0, 0); this.tableLayoutPanelSrPicker.Name = "tableLayoutPanelSrPicker"; // // labelSrHint // resources.ApplyResources(this.labelSrHint, "labelSrHint"); this.labelSrHint.Name = "labelSrHint"; // // toolTipContainer1 // resources.ApplyResources(this.toolTipContainer1, "toolTipContainer1"); this.toolTipContainer1.Controls.Add(this.FastClonePanel); this.toolTipContainer1.Name = "toolTipContainer1"; // // FastClonePanel // this.FastClonePanel.Controls.Add(this.CloneRadioButton); this.FastClonePanel.Controls.Add(this.FastCloneDescription); resources.ApplyResources(this.FastClonePanel, "FastClonePanel"); this.FastClonePanel.Name = "FastClonePanel"; // // CopyVMDialog // this.AcceptButton = this.MoveButton; resources.ApplyResources(this, "$this"); this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Dpi; this.CancelButton = this.CloseButton; this.Controls.Add(this.groupBox1); this.Controls.Add(this.DescriptionTextBox); this.Controls.Add(this.NameTextBox); this.Controls.Add(this.label1); this.Controls.Add(this.label2); this.Controls.Add(this.MoveButton); this.Controls.Add(this.CloseButton); this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.Sizable; this.Name = "CopyVMDialog"; this.SizeGripStyle = System.Windows.Forms.SizeGripStyle.Show; this.Shown += new System.EventHandler(this.CopyVMDialog_Shown); this.groupBox1.ResumeLayout(false); this.groupBox1.PerformLayout(); this.tableLayoutPanelSrPicker.ResumeLayout(false); this.tableLayoutPanelSrPicker.PerformLayout(); this.toolTipContainer1.ResumeLayout(false); this.FastClonePanel.ResumeLayout(false); this.FastClonePanel.PerformLayout(); this.ResumeLayout(false); this.PerformLayout(); } #endregion private XenAdmin.Controls.SrPicker srPicker1; private System.Windows.Forms.Button CloseButton; private System.Windows.Forms.Button MoveButton; private System.Windows.Forms.Label label2; private System.Windows.Forms.TextBox NameTextBox; private System.Windows.Forms.RadioButton CloneRadioButton; private System.Windows.Forms.RadioButton CopyRadioButton; private System.Windows.Forms.TextBox DescriptionTextBox; private System.Windows.Forms.Label label1; private System.Windows.Forms.Label FastCloneDescription; private XenAdmin.Controls.DecentGroupBox groupBox1; private XenAdmin.Controls.ToolTipContainer toolTipContainer1; private System.Windows.Forms.Panel FastClonePanel; private System.Windows.Forms.TableLayoutPanel tableLayoutPanelSrPicker; private System.Windows.Forms.Label labelSrHint; } }
using System; using System.Reflection; using System.Collections; using Server; using Server.Network; using Server.Commands; using Server.Commands.Generic; namespace Server.Gumps { public class XmlSetTimeSpanGump : Gump { private PropertyInfo m_Property; private Mobile m_Mobile; private object m_Object; private Stack m_Stack; private int m_Page; private ArrayList m_List; public static readonly bool OldStyle = PropsConfig.OldStyle; public static readonly int GumpOffsetX = PropsConfig.GumpOffsetX; public static readonly int GumpOffsetY = PropsConfig.GumpOffsetY; public static readonly int TextHue = PropsConfig.TextHue; public static readonly int TextOffsetX = PropsConfig.TextOffsetX; public static readonly int OffsetGumpID = PropsConfig.OffsetGumpID; public static readonly int HeaderGumpID = PropsConfig.HeaderGumpID; public static readonly int EntryGumpID = PropsConfig.EntryGumpID; public static readonly int BackGumpID = PropsConfig.BackGumpID; public static readonly int SetGumpID = PropsConfig.SetGumpID; public static readonly int SetWidth = PropsConfig.SetWidth; public static readonly int SetOffsetX = PropsConfig.SetOffsetX, SetOffsetY = PropsConfig.SetOffsetY; public static readonly int SetButtonID1 = PropsConfig.SetButtonID1; public static readonly int SetButtonID2 = PropsConfig.SetButtonID2; public static readonly int PrevWidth = PropsConfig.PrevWidth; public static readonly int PrevOffsetX = PropsConfig.PrevOffsetX, PrevOffsetY = PropsConfig.PrevOffsetY; public static readonly int PrevButtonID1 = PropsConfig.PrevButtonID1; public static readonly int PrevButtonID2 = PropsConfig.PrevButtonID2; public static readonly int NextWidth = PropsConfig.NextWidth; public static readonly int NextOffsetX = PropsConfig.NextOffsetX, NextOffsetY = PropsConfig.NextOffsetY; public static readonly int NextButtonID1 = PropsConfig.NextButtonID1; public static readonly int NextButtonID2 = PropsConfig.NextButtonID2; public static readonly int OffsetSize = PropsConfig.OffsetSize; public static readonly int EntryHeight = PropsConfig.EntryHeight; public static readonly int BorderSize = PropsConfig.BorderSize; private static readonly int EntryWidth = 212; private static readonly int TotalWidth = OffsetSize + EntryWidth + OffsetSize + SetWidth + OffsetSize; private static readonly int TotalHeight = OffsetSize + (7 * (EntryHeight + OffsetSize)); private static readonly int BackWidth = BorderSize + TotalWidth + BorderSize; private static readonly int BackHeight = BorderSize + TotalHeight + BorderSize; public XmlSetTimeSpanGump( PropertyInfo prop, Mobile mobile, object o, Stack stack, int page, ArrayList list ) : base( GumpOffsetX, GumpOffsetY ) { m_Property = prop; m_Mobile = mobile; m_Object = o; m_Stack = stack; m_Page = page; m_List = list; TimeSpan ts = (TimeSpan)prop.GetValue( o, null ); AddPage( 0 ); AddBackground( 0, 0, BackWidth, BackHeight, BackGumpID ); AddImageTiled( BorderSize, BorderSize, TotalWidth - (OldStyle ? SetWidth + OffsetSize : 0), TotalHeight, OffsetGumpID ); AddRect( 0, prop.Name, 0, -1 ); AddRect( 1, ts.ToString(), 0, -1 ); AddRect( 2, "Zero", 1, -1 ); AddRect( 3, "From H:M:S", 2, -1 ); AddRect( 4, "H:", 3, 0 ); AddRect( 5, "M:", 4, 1 ); AddRect( 6, "S:", 5, 2 ); } private void AddRect( int index, string str, int button, int text ) { int x = BorderSize + OffsetSize; int y = BorderSize + OffsetSize + (index * (EntryHeight + OffsetSize)); AddImageTiled( x, y, EntryWidth, EntryHeight, EntryGumpID ); AddLabelCropped( x + TextOffsetX, y, EntryWidth - TextOffsetX, EntryHeight, TextHue, str ); if ( text != -1 ) AddTextEntry( x + 16 + TextOffsetX, y, EntryWidth - TextOffsetX - 16, EntryHeight, TextHue, text, "" ); x += EntryWidth + OffsetSize; if ( SetGumpID != 0 ) AddImageTiled( x, y, SetWidth, EntryHeight, SetGumpID ); if ( button != 0 ) AddButton( x + SetOffsetX, y + SetOffsetY, SetButtonID1, SetButtonID2, button, GumpButtonType.Reply, 0 ); } public override void OnResponse( NetState sender, RelayInfo info ) { TimeSpan toSet; bool shouldSet, shouldSend; TextRelay h = info.GetTextEntry( 0 ); TextRelay m = info.GetTextEntry( 1 ); TextRelay s = info.GetTextEntry( 2 ); switch ( info.ButtonID ) { case 1: // Zero { toSet = TimeSpan.Zero; shouldSet = true; shouldSend = true; break; } case 2: // From H:M:S { if ( h != null && m != null && s != null ) { try { toSet = TimeSpan.Parse( h.Text + ":" + m.Text + ":" + s.Text ); shouldSet = true; shouldSend = true; break; } catch { } } toSet = TimeSpan.Zero; shouldSet = false; shouldSend = false; break; } case 3: // From H { if ( h != null ) { try { toSet = TimeSpan.FromHours( Utility.ToDouble( h.Text ) ); shouldSet = true; shouldSend = true; break; } catch { } } toSet = TimeSpan.Zero; shouldSet = false; shouldSend = false; break; } case 4: // From M { if ( m != null ) { try { toSet = TimeSpan.FromMinutes( Utility.ToDouble( m.Text ) ); shouldSet = true; shouldSend = true; break; } catch { } } toSet = TimeSpan.Zero; shouldSet = false; shouldSend = false; break; } case 5: // From S { if ( s != null ) { try { toSet = TimeSpan.FromSeconds( Utility.ToDouble( s.Text ) ); shouldSet = true; shouldSend = true; break; } catch { } } toSet = TimeSpan.Zero; shouldSet = false; shouldSend = false; break; } default: { toSet = TimeSpan.Zero; shouldSet = false; shouldSend = true; break; } } if ( shouldSet ) { try { CommandLogging.LogChangeProperty( m_Mobile, m_Object, m_Property.Name, toSet.ToString() ); m_Property.SetValue( m_Object, toSet, null ); } catch { m_Mobile.SendMessage( "An exception was caught. The property may not have changed." ); } } if ( shouldSend ) m_Mobile.SendGump( new XmlPropertiesGump( m_Mobile, m_Object, m_Stack, m_List, m_Page ) ); } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.IO; using System.IO.Compression; using System.Reflection; using System.Xml; using log4net; using OpenMetaverse; using OpenSim.Framework; using OpenSim.Framework.Monitoring; using OpenSim.Framework.Serialization; using OpenSim.Framework.Serialization.External; using OpenSim.Region.CoreModules.World.Archiver; using OpenSim.Region.Framework.Scenes; using OpenSim.Services.Interfaces; using Ionic.Zlib; using GZipStream = Ionic.Zlib.GZipStream; using CompressionMode = Ionic.Zlib.CompressionMode; using CompressionLevel = Ionic.Zlib.CompressionLevel; using PermissionMask = OpenSim.Framework.PermissionMask; namespace OpenSim.Region.CoreModules.Avatar.Inventory.Archiver { public class InventoryArchiveWriteRequest { private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); /// <summary> /// Determine whether this archive will save assets. Default is true. /// </summary> public bool SaveAssets { get; set; } /// <summary> /// Determines which items will be included in the archive, according to their permissions. /// Default is null, meaning no permission checks. /// </summary> public string FilterContent { get; set; } /// <summary> /// Counter for inventory items saved to archive for passing to compltion event /// </summary> public int CountItems { get; set; } /// <summary> /// Counter for inventory items skipped due to permission filter option for passing to compltion event /// </summary> public int CountFiltered { get; set; } /// <value> /// Used to select all inventory nodes in a folder but not the folder itself /// </value> private const string STAR_WILDCARD = "*"; private InventoryArchiverModule m_module; private UserAccount m_userInfo; private string m_invPath; protected TarArchiveWriter m_archiveWriter; protected UuidGatherer m_assetGatherer; /// <value> /// We only use this to request modules /// </value> protected Scene m_scene; /// <value> /// ID of this request /// </value> protected UUID m_id; /// <value> /// Used to collect the uuids of the users that we need to save into the archive /// </value> protected Dictionary<UUID, int> m_userUuids = new Dictionary<UUID, int>(); /// <value> /// The stream to which the inventory archive will be saved. /// </value> private Stream m_saveStream; /// <summary> /// Constructor /// </summary> public InventoryArchiveWriteRequest( UUID id, InventoryArchiverModule module, Scene scene, UserAccount userInfo, string invPath, string savePath) : this( id, module, scene, userInfo, invPath, new GZipStream(new FileStream(savePath, FileMode.Create), CompressionMode.Compress, CompressionLevel.BestCompression)) { } /// <summary> /// Constructor /// </summary> public InventoryArchiveWriteRequest( UUID id, InventoryArchiverModule module, Scene scene, UserAccount userInfo, string invPath, Stream saveStream) { m_id = id; m_module = module; m_scene = scene; m_userInfo = userInfo; m_invPath = invPath; m_saveStream = saveStream; m_assetGatherer = new UuidGatherer(m_scene.AssetService); SaveAssets = true; FilterContent = null; } protected void ReceivedAllAssets(ICollection<UUID> assetsFoundUuids, ICollection<UUID> assetsNotFoundUuids, bool timedOut) { Exception reportedException = null; bool succeeded = true; try { m_archiveWriter.Close(); } catch (Exception e) { reportedException = e; succeeded = false; } finally { m_saveStream.Close(); } if (timedOut) { succeeded = false; reportedException = new Exception("Loading assets timed out"); } m_module.TriggerInventoryArchiveSaved( m_id, succeeded, m_userInfo, m_invPath, m_saveStream, reportedException, CountItems, CountFiltered); } protected void SaveInvItem(InventoryItemBase inventoryItem, string path, Dictionary<string, object> options, IUserAccountService userAccountService) { if (options.ContainsKey("exclude")) { if (((List<String>)options["exclude"]).Contains(inventoryItem.Name) || ((List<String>)options["exclude"]).Contains(inventoryItem.ID.ToString())) { if (options.ContainsKey("verbose")) { m_log.InfoFormat( "[INVENTORY ARCHIVER]: Skipping inventory item {0} {1} at {2}", inventoryItem.Name, inventoryItem.ID, path); } CountFiltered++; return; } } // Check For Permissions Filter Flags if (!CanUserArchiveObject(m_userInfo.PrincipalID, inventoryItem)) { m_log.InfoFormat( "[INVENTORY ARCHIVER]: Insufficient permissions, skipping inventory item {0} {1} at {2}", inventoryItem.Name, inventoryItem.ID, path); // Count Items Excluded CountFiltered++; return; } if (options.ContainsKey("verbose")) m_log.InfoFormat( "[INVENTORY ARCHIVER]: Saving item {0} {1} (asset UUID {2})", inventoryItem.ID, inventoryItem.Name, inventoryItem.AssetID); string filename = path + CreateArchiveItemName(inventoryItem); // Record the creator of this item for user record purposes (which might go away soon) m_userUuids[inventoryItem.CreatorIdAsUuid] = 1; string serialization = UserInventoryItemSerializer.Serialize(inventoryItem, options, userAccountService); m_archiveWriter.WriteFile(filename, serialization); AssetType itemAssetType = (AssetType)inventoryItem.AssetType; // Count inventory items (different to asset count) CountItems++; // Don't chase down link asset items as they actually point to their target item IDs rather than an asset if (SaveAssets && itemAssetType != AssetType.Link && itemAssetType != AssetType.LinkFolder) m_assetGatherer.AddForInspection(inventoryItem.AssetID); } /// <summary> /// Save an inventory folder /// </summary> /// <param name="inventoryFolder">The inventory folder to save</param> /// <param name="path">The path to which the folder should be saved</param> /// <param name="saveThisFolderItself">If true, save this folder itself. If false, only saves contents</param> /// <param name="options"></param> /// <param name="userAccountService"></param> protected void SaveInvFolder( InventoryFolderBase inventoryFolder, string path, bool saveThisFolderItself, Dictionary<string, object> options, IUserAccountService userAccountService) { if (options.ContainsKey("excludefolders")) { if (((List<String>)options["excludefolders"]).Contains(inventoryFolder.Name) || ((List<String>)options["excludefolders"]).Contains(inventoryFolder.ID.ToString())) { if (options.ContainsKey("verbose")) { m_log.InfoFormat( "[INVENTORY ARCHIVER]: Skipping folder {0} at {1}", inventoryFolder.Name, path); } return; } } if (options.ContainsKey("verbose")) m_log.InfoFormat("[INVENTORY ARCHIVER]: Saving folder {0}", inventoryFolder.Name); if (saveThisFolderItself) { path += CreateArchiveFolderName(inventoryFolder); // We need to make sure that we record empty folders m_archiveWriter.WriteDir(path); } InventoryCollection contents = m_scene.InventoryService.GetFolderContent(inventoryFolder.Owner, inventoryFolder.ID); foreach (InventoryFolderBase childFolder in contents.Folders) { SaveInvFolder(childFolder, path, true, options, userAccountService); } foreach (InventoryItemBase item in contents.Items) { SaveInvItem(item, path, options, userAccountService); } } /// <summary> /// Checks whether the user has permission to export an inventory item to an IAR. /// </summary> /// <param name="UserID">The user</param> /// <param name="InvItem">The inventory item</param> /// <returns>Whether the user is allowed to export the object to an IAR</returns> private bool CanUserArchiveObject(UUID UserID, InventoryItemBase InvItem) { if (FilterContent == null) return true;// Default To Allow Export bool permitted = true; bool canCopy = (InvItem.CurrentPermissions & (uint)PermissionMask.Copy) != 0; bool canTransfer = (InvItem.CurrentPermissions & (uint)PermissionMask.Transfer) != 0; bool canMod = (InvItem.CurrentPermissions & (uint)PermissionMask.Modify) != 0; if (FilterContent.Contains("C") && !canCopy) permitted = false; if (FilterContent.Contains("T") && !canTransfer) permitted = false; if (FilterContent.Contains("M") && !canMod) permitted = false; return permitted; } /// <summary> /// Execute the inventory write request /// </summary> public void Execute(Dictionary<string, object> options, IUserAccountService userAccountService) { if (options.ContainsKey("noassets") && (bool)options["noassets"]) SaveAssets = false; // Set Permission filter if flag is set if (options.ContainsKey("checkPermissions")) { Object temp; if (options.TryGetValue("checkPermissions", out temp)) FilterContent = temp.ToString().ToUpper(); } try { InventoryFolderBase inventoryFolder = null; InventoryItemBase inventoryItem = null; InventoryFolderBase rootFolder = m_scene.InventoryService.GetRootFolder(m_userInfo.PrincipalID); bool saveFolderContentsOnly = false; // Eliminate double slashes and any leading / on the path. string[] components = m_invPath.Split( new string[] { InventoryFolderImpl.PATH_DELIMITER }, StringSplitOptions.RemoveEmptyEntries); int maxComponentIndex = components.Length - 1; // If the path terminates with a STAR then later on we want to archive all nodes in the folder but not the // folder itself. This may get more sophisicated later on if (maxComponentIndex >= 0 && components[maxComponentIndex] == STAR_WILDCARD) { saveFolderContentsOnly = true; maxComponentIndex--; } else if (maxComponentIndex == -1) { // If the user has just specified "/", then don't save the root "My Inventory" folder. This is // more intuitive then requiring the user to specify "/*" for this. saveFolderContentsOnly = true; } m_invPath = String.Empty; for (int i = 0; i <= maxComponentIndex; i++) { m_invPath += components[i] + InventoryFolderImpl.PATH_DELIMITER; } // Annoyingly Split actually returns the original string if the input string consists only of delimiters // Therefore if we still start with a / after the split, then we need the root folder if (m_invPath.Length == 0) { inventoryFolder = rootFolder; } else { m_invPath = m_invPath.Remove(m_invPath.LastIndexOf(InventoryFolderImpl.PATH_DELIMITER)); List<InventoryFolderBase> candidateFolders = InventoryArchiveUtils.FindFoldersByPath(m_scene.InventoryService, rootFolder, m_invPath); if (candidateFolders.Count > 0) inventoryFolder = candidateFolders[0]; } // The path may point to an item instead if (inventoryFolder == null) inventoryItem = InventoryArchiveUtils.FindItemByPath(m_scene.InventoryService, rootFolder, m_invPath); if (null == inventoryFolder && null == inventoryItem) { // We couldn't find the path indicated string errorMessage = string.Format("Aborted save. Could not find inventory path {0}", m_invPath); Exception e = new InventoryArchiverException(errorMessage); m_module.TriggerInventoryArchiveSaved(m_id, false, m_userInfo, m_invPath, m_saveStream, e, 0, 0); throw e; } m_archiveWriter = new TarArchiveWriter(m_saveStream); m_log.InfoFormat("[INVENTORY ARCHIVER]: Adding control file to archive."); // Write out control file. This has to be done first so that subsequent loaders will see this file first // XXX: I know this is a weak way of doing it since external non-OAR aware tar executables will not do this // not sure how to fix this though, short of going with a completely different file format. m_archiveWriter.WriteFile(ArchiveConstants.CONTROL_FILE_PATH, CreateControlFile(options)); if (inventoryFolder != null) { m_log.DebugFormat( "[INVENTORY ARCHIVER]: Found folder {0} {1} at {2}", inventoryFolder.Name, inventoryFolder.ID, m_invPath == String.Empty ? InventoryFolderImpl.PATH_DELIMITER : m_invPath); //recurse through all dirs getting dirs and files SaveInvFolder(inventoryFolder, ArchiveConstants.INVENTORY_PATH, !saveFolderContentsOnly, options, userAccountService); } else if (inventoryItem != null) { m_log.DebugFormat( "[INVENTORY ARCHIVER]: Found item {0} {1} at {2}", inventoryItem.Name, inventoryItem.ID, m_invPath); SaveInvItem(inventoryItem, ArchiveConstants.INVENTORY_PATH, options, userAccountService); } // Don't put all this profile information into the archive right now. //SaveUsers(); if (SaveAssets) { m_assetGatherer.GatherAll(); m_log.DebugFormat( "[INVENTORY ARCHIVER]: Saving {0} assets for items", m_assetGatherer.GatheredUuids.Count); AssetsRequest ar = new AssetsRequest( new AssetsArchiver(m_archiveWriter), m_assetGatherer.GatheredUuids, m_scene.AssetService, m_scene.UserAccountService, m_scene.RegionInfo.ScopeID, options, ReceivedAllAssets); WorkManager.RunInThread(o => ar.Execute(), null, string.Format("AssetsRequest ({0})", m_scene.Name)); } else { m_log.DebugFormat("[INVENTORY ARCHIVER]: Not saving assets since --noassets was specified"); ReceivedAllAssets(new List<UUID>(), new List<UUID>(), false); } } catch (Exception) { m_saveStream.Close(); throw; } } /// <summary> /// Save information for the users that we've collected. /// </summary> protected void SaveUsers() { m_log.InfoFormat("[INVENTORY ARCHIVER]: Saving user information for {0} users", m_userUuids.Count); foreach (UUID creatorId in m_userUuids.Keys) { // Record the creator of this item UserAccount creator = m_scene.UserAccountService.GetUserAccount(m_scene.RegionInfo.ScopeID, creatorId); if (creator != null) { m_archiveWriter.WriteFile( ArchiveConstants.USERS_PATH + creator.FirstName + " " + creator.LastName + ".xml", UserProfileSerializer.Serialize(creator.PrincipalID, creator.FirstName, creator.LastName)); } else { m_log.WarnFormat("[INVENTORY ARCHIVER]: Failed to get creator profile for {0}", creatorId); } } } /// <summary> /// Create the archive name for a particular folder. /// </summary> /// /// These names are prepended with an inventory folder's UUID so that more than one folder can have the /// same name /// /// <param name="folder"></param> /// <returns></returns> public static string CreateArchiveFolderName(InventoryFolderBase folder) { return CreateArchiveFolderName(folder.Name, folder.ID); } /// <summary> /// Create the archive name for a particular item. /// </summary> /// /// These names are prepended with an inventory item's UUID so that more than one item can have the /// same name /// /// <param name="item"></param> /// <returns></returns> public static string CreateArchiveItemName(InventoryItemBase item) { return CreateArchiveItemName(item.Name, item.ID); } /// <summary> /// Create an archive folder name given its constituent components /// </summary> /// <param name="name"></param> /// <param name="id"></param> /// <returns></returns> public static string CreateArchiveFolderName(string name, UUID id) { return string.Format( "{0}{1}{2}/", InventoryArchiveUtils.EscapeArchivePath(name), ArchiveConstants.INVENTORY_NODE_NAME_COMPONENT_SEPARATOR, id); } /// <summary> /// Create an archive item name given its constituent components /// </summary> /// <param name="name"></param> /// <param name="id"></param> /// <returns></returns> public static string CreateArchiveItemName(string name, UUID id) { return string.Format( "{0}{1}{2}.xml", InventoryArchiveUtils.EscapeArchivePath(name), ArchiveConstants.INVENTORY_NODE_NAME_COMPONENT_SEPARATOR, id); } /// <summary> /// Create the control file for the archive /// </summary> /// <param name="options"></param> /// <returns></returns> public string CreateControlFile(Dictionary<string, object> options) { int majorVersion, minorVersion; if (options.ContainsKey("home")) { majorVersion = 1; minorVersion = 2; } else { majorVersion = 0; minorVersion = 3; } m_log.InfoFormat("[INVENTORY ARCHIVER]: Creating version {0}.{1} IAR", majorVersion, minorVersion); StringWriter sw = new StringWriter(); XmlTextWriter xtw = new XmlTextWriter(sw); xtw.Formatting = Formatting.Indented; xtw.WriteStartDocument(); xtw.WriteStartElement("archive"); xtw.WriteAttributeString("major_version", majorVersion.ToString()); xtw.WriteAttributeString("minor_version", minorVersion.ToString()); xtw.WriteElementString("assets_included", SaveAssets.ToString()); xtw.WriteEndElement(); xtw.Flush(); xtw.Close(); String s = sw.ToString(); sw.Close(); return s; } } }
using System; using System.IO; using System.Threading; using Htc.Vita.Core.Log; using Htc.Vita.Core.Util; namespace Htc.Vita.Core.Crypto { /// <summary> /// Class Sha1. /// </summary> public abstract partial class Sha1 { /// <summary> /// Gets the Base64 form length. /// </summary> /// <value>The Base64 form length.</value> public static int Base64FormLength => 28; // "2jmj7l5rSw0yVb/vlWAYkK/YBwk=" /// <summary> /// Gets the hexadecimal form length. /// </summary> /// <value>The hexadecimal form length.</value> public static int HexFormLength => 40; // "da39a3ee5e6b4b0d3255bfef95601890afd80709" static Sha1() { TypeRegistry.RegisterDefault<Sha1, DefaultSha1>(); } /// <summary> /// Registers the instance type. /// </summary> /// <typeparam name="T"></typeparam> public static void Register<T>() where T : Sha1, new() { TypeRegistry.Register<Sha1, T>(); } /// <summary> /// Gets the instance. /// </summary> /// <returns>Sha1.</returns> public static Sha1 GetInstance() { return TypeRegistry.GetInstance<Sha1>(); } /// <summary> /// Gets the instance. /// </summary> /// <typeparam name="T"></typeparam> /// <returns>Sha1.</returns> public static Sha1 GetInstance<T>() where T : Sha1, new() { return TypeRegistry.GetInstance<Sha1, T>(); } /// <summary> /// Generates the checksum value in Base64 form. /// </summary> /// <param name="file">The file.</param> /// <returns>System.String.</returns> public string GenerateInBase64(FileInfo file) { return GenerateInBase64( file, CancellationToken.None ); } /// <summary> /// Generates the checksum value in Base64 form. /// </summary> /// <param name="file">The file.</param> /// <param name="cancellationToken">The cancellation token.</param> /// <returns>System.String.</returns> public string GenerateInBase64( FileInfo file, CancellationToken cancellationToken) { if (file == null || !file.Exists) { return string.Empty; } var result = string.Empty; try { result = OnGenerateInBase64( file, cancellationToken ); } catch (OperationCanceledException) { Logger.GetInstance(typeof(Sha1)).Warn("Generating checksum in base64 cancelled"); } catch (Exception e) { Logger.GetInstance(typeof(Sha1)).Fatal($"Generating checksum in base64 error: {e}"); } return result; } /// <summary> /// Generates the checksum value in Base64 form. /// </summary> /// <param name="content">The content.</param> /// <returns>System.String.</returns> public string GenerateInBase64(string content) { if (content == null) { return string.Empty; } var result = string.Empty; try { result = OnGenerateInBase64(content); } catch (Exception e) { Logger.GetInstance(typeof(Sha1)).Fatal($"Generating checksum in base64 error: {e}"); } return result; } /// <summary> /// Generates the checksum value in hexadecimal form. /// </summary> /// <param name="file">The file.</param> /// <returns>System.String.</returns> public string GenerateInHex(FileInfo file) { return GenerateInHex( file, CancellationToken.None ); } /// <summary> /// Generates the checksum value in hexadecimal form. /// </summary> /// <param name="file">The file.</param> /// <param name="cancellationToken">The cancellation token.</param> /// <returns>System.String.</returns> public string GenerateInHex( FileInfo file, CancellationToken cancellationToken) { if (file == null || !file.Exists) { return string.Empty; } var result = string.Empty; try { result = OnGenerateInHex( file, cancellationToken ); } catch (OperationCanceledException) { Logger.GetInstance(typeof(Sha1)).Warn("Generating checksum in hex cancelled"); } catch (Exception e) { Logger.GetInstance(typeof(Sha1)).Fatal($"Generating checksum in hex error: {e}"); } return result; } /// <summary> /// Generates the checksum value in hexadecimal form. /// </summary> /// <param name="content">The content.</param> /// <returns>System.String.</returns> public string GenerateInHex(string content) { if (content == null) { return string.Empty; } var result = string.Empty; try { result = OnGenerateInHex(content); } catch (Exception e) { Logger.GetInstance(typeof(Sha1)).Fatal($"Generating checksum in hex error: {e}"); } return result; } /// <summary> /// Validates the file in all checksum form. /// </summary> /// <param name="file">The file.</param> /// <param name="checksum">The checksum.</param> /// <returns><c>true</c> if valid, <c>false</c> otherwise.</returns> public bool ValidateInAll( FileInfo file, string checksum) { return ValidateInAll( file, checksum, CancellationToken.None ); } /// <summary> /// Validates the file in all checksum form. /// </summary> /// <param name="file">The file.</param> /// <param name="checksum">The checksum.</param> /// <param name="cancellationToken">The cancellation token.</param> /// <returns><c>true</c> if valid, <c>false</c> otherwise.</returns> public bool ValidateInAll( FileInfo file, string checksum, CancellationToken cancellationToken) { if (string.IsNullOrWhiteSpace(checksum)) { return false; } if (checksum.Length == Base64FormLength) { return ValidateInBase64( file, checksum, cancellationToken ); } if (checksum.Length == HexFormLength) { return ValidateInHex( file, checksum, cancellationToken ); } return false; } /// <summary> /// Validates the content in all checksum form. /// </summary> /// <param name="content">The content.</param> /// <param name="checksum">The checksum.</param> /// <returns><c>true</c> if valid, <c>false</c> otherwise.</returns> public bool ValidateInAll( string content, string checksum) { if (string.IsNullOrWhiteSpace(checksum)) { return false; } if (checksum.Length == HexFormLength) { return ValidateInHex( content, checksum ); } return ValidateInBase64( content, checksum ); } /// <summary> /// Validates the file in Base64 form. /// </summary> /// <param name="file">The file.</param> /// <param name="checksum">The checksum.</param> /// <returns><c>true</c> if valid, <c>false</c> otherwise.</returns> public bool ValidateInBase64( FileInfo file, string checksum) { return ValidateInBase64( file, checksum, CancellationToken.None ); } /// <summary> /// Validates the file in Base64 form. /// </summary> /// <param name="file">The file.</param> /// <param name="checksum">The checksum.</param> /// <param name="cancellationToken">The cancellation token.</param> /// <returns><c>true</c> if valid, <c>false</c> otherwise.</returns> public bool ValidateInBase64( FileInfo file, string checksum, CancellationToken cancellationToken) { if (file == null || !file.Exists || string.IsNullOrWhiteSpace(checksum)) { return false; } var result = false; try { result = checksum.Equals(OnGenerateInBase64( file, cancellationToken )); } catch (OperationCanceledException) { Logger.GetInstance(typeof(Sha1)).Warn("Validating checksum in base64 cancelled"); } catch (Exception e) { Logger.GetInstance(typeof(Sha1)).Fatal($"Validating checksum in base64 error: {e}"); } return result; } /// <summary> /// Validates the file in Base64 form. /// </summary> /// <param name="content">The content.</param> /// <param name="checksum">The checksum.</param> /// <returns><c>true</c> if valid, <c>false</c> otherwise.</returns> public bool ValidateInBase64( string content, string checksum) { if (content == null || string.IsNullOrWhiteSpace(checksum)) { return false; } var result = false; try { result = checksum.Equals(OnGenerateInBase64(content)); } catch (Exception e) { Logger.GetInstance(typeof(Sha1)).Fatal($"Validating checksum in base64 error: {e}"); } return result; } /// <summary> /// Validates the file in hexadecimal form. /// </summary> /// <param name="file">The file.</param> /// <param name="checksum">The checksum.</param> /// <returns><c>true</c> if valid, <c>false</c> otherwise.</returns> public bool ValidateInHex( FileInfo file, string checksum) { return ValidateInHex( file, checksum, CancellationToken.None ); } /// <summary> /// Validates the file in hexadecimal form. /// </summary> /// <param name="file">The file.</param> /// <param name="checksum">The checksum.</param> /// <param name="cancellationToken">The cancellation token.</param> /// <returns><c>true</c> if form, <c>false</c> otherwise.</returns> public bool ValidateInHex( FileInfo file, string checksum, CancellationToken cancellationToken) { if (file == null || !file.Exists || string.IsNullOrWhiteSpace(checksum)) { return false; } var result = false; try { result = checksum.ToLowerInvariant().Equals(OnGenerateInHex( file, cancellationToken )); } catch (OperationCanceledException) { Logger.GetInstance(typeof(Sha1)).Warn("Validating checksum in hex cancelled"); } catch (Exception e) { Logger.GetInstance(typeof(Sha1)).Fatal($"Validating checksum in hex error: {e}"); } return result; } /// <summary> /// Validates the content in hexadecimal form. /// </summary> /// <param name="content">The content.</param> /// <param name="checksum">The checksum.</param> /// <returns><c>true</c> if valid, <c>false</c> otherwise.</returns> public bool ValidateInHex( string content, string checksum) { if (content == null || string.IsNullOrWhiteSpace(checksum)) { return false; } var result = false; try { result = checksum.ToLowerInvariant().Equals(OnGenerateInHex(content)); } catch (Exception e) { Logger.GetInstance(typeof(Sha1)).Fatal($"Validating checksum in hex error: {e}"); } return result; } /// <summary> /// Called when generating the checksum in Base64 form. /// </summary> /// <param name="file">The file.</param> /// <param name="cancellationToken">The cancellation token.</param> /// <returns>System.String.</returns> protected abstract string OnGenerateInBase64( FileInfo file, CancellationToken cancellationToken ); /// <summary> /// Called when generating the checksum in Base64 form. /// </summary> /// <param name="content">The content.</param> /// <returns>System.String.</returns> protected abstract string OnGenerateInBase64(string content); /// <summary> /// Called when generating the checksum in hexadecimal form. /// </summary> /// <param name="file">The file.</param> /// <param name="cancellationToken">The cancellation token.</param> /// <returns>System.String.</returns> protected abstract string OnGenerateInHex( FileInfo file, CancellationToken cancellationToken ); /// <summary> /// Called when generating the checksum in hexadecimal form. /// </summary> /// <param name="content">The content.</param> /// <returns>System.String.</returns> protected abstract string OnGenerateInHex(string content); } }
// Generated by the protocol buffer compiler. DO NOT EDIT! using pb = global::Google.ProtocolBuffers; using pbc = global::Google.ProtocolBuffers.Collections; using pbd = global::Google.ProtocolBuffers.Descriptors; using scg = global::System.Collections.Generic; namespace Sirikata.Protocol._PBJ_Internal { public static partial class MessageHeader { #region Extension registration public static void RegisterAllExtensions(pb::ExtensionRegistry registry) { } #endregion #region Static variables internal static pbd::MessageDescriptor internal__static_Sirikata_Protocol__PBJ_Internal_Header__Descriptor; internal static pb::FieldAccess.FieldAccessorTable<global::Sirikata.Protocol._PBJ_Internal.Header, global::Sirikata.Protocol._PBJ_Internal.Header.Builder> internal__static_Sirikata_Protocol__PBJ_Internal_Header__FieldAccessorTable; #endregion #region Descriptor public static pbd::FileDescriptor Descriptor { get { return descriptor; } } private static pbd::FileDescriptor descriptor; static MessageHeader() { byte[] descriptorData = global::System.Convert.FromBase64String( "ChNNZXNzYWdlSGVhZGVyLnByb3RvEh9TaXJpa2F0YS5Qcm90b2NvbC5fUEJK" + "X0ludGVybmFsIooDCgZIZWFkZXISFQoNc291cmNlX29iamVjdBgBIAEoDBIT" + "Cgtzb3VyY2VfcG9ydBgDIAEoDRIVCgxzb3VyY2Vfc3BhY2UYgAwgASgMEhoK" + "EmRlc3RpbmF0aW9uX29iamVjdBgCIAEoDBIYChBkZXN0aW5hdGlvbl9wb3J0" + "GAQgASgNEhoKEWRlc3RpbmF0aW9uX3NwYWNlGIEMIAEoDBIKCgJpZBgHIAEo" + "AxIQCghyZXBseV9pZBgIIAEoAxJMCg1yZXR1cm5fc3RhdHVzGIAOIAEoDjI0" + "LlNpcmlrYXRhLlByb3RvY29sLl9QQkpfSW50ZXJuYWwuSGVhZGVyLlJldHVy" + "blN0YXR1cyJ/CgxSZXR1cm5TdGF0dXMSCwoHU1VDQ0VTUxAAEhMKD05FVFdP" + "UktfRkFJTFVSRRABEhMKD1RJTUVPVVRfRkFJTFVSRRADEhIKDlBST1RPQ09M" + "X0VSUk9SEAQSEAoMUE9SVF9GQUlMVVJFEAUSEgoOVU5LTk9XTl9PQkpFQ1QQ" + "Bg=="); pbd::FileDescriptor.InternalDescriptorAssigner assigner = delegate(pbd::FileDescriptor root) { descriptor = root; internal__static_Sirikata_Protocol__PBJ_Internal_Header__Descriptor = Descriptor.MessageTypes[0]; internal__static_Sirikata_Protocol__PBJ_Internal_Header__FieldAccessorTable = new pb::FieldAccess.FieldAccessorTable<global::Sirikata.Protocol._PBJ_Internal.Header, global::Sirikata.Protocol._PBJ_Internal.Header.Builder>(internal__static_Sirikata_Protocol__PBJ_Internal_Header__Descriptor, new string[] { "SourceObject", "SourcePort", "SourceSpace", "DestinationObject", "DestinationPort", "DestinationSpace", "Id", "ReplyId", "ReturnStatus", }); return null; }; pbd::FileDescriptor.InternalBuildGeneratedFileFrom(descriptorData, new pbd::FileDescriptor[] { }, assigner); } #endregion } #region Messages public sealed partial class Header : pb::GeneratedMessage<Header, Header.Builder> { private static readonly Header defaultInstance = new Builder().BuildPartial(); public static Header DefaultInstance { get { return defaultInstance; } } public override Header DefaultInstanceForType { get { return defaultInstance; } } protected override Header ThisMessage { get { return this; } } public static pbd::MessageDescriptor Descriptor { get { return global::Sirikata.Protocol._PBJ_Internal.MessageHeader.internal__static_Sirikata_Protocol__PBJ_Internal_Header__Descriptor; } } protected override pb::FieldAccess.FieldAccessorTable<Header, Header.Builder> InternalFieldAccessors { get { return global::Sirikata.Protocol._PBJ_Internal.MessageHeader.internal__static_Sirikata_Protocol__PBJ_Internal_Header__FieldAccessorTable; } } #region Nested types public static class Types { public enum ReturnStatus { SUCCESS = 0, NETWORK_FAILURE = 1, TIMEOUT_FAILURE = 3, PROTOCOL_ERROR = 4, PORT_FAILURE = 5, UNKNOWN_OBJECT = 6, } } #endregion public const int SourceObjectFieldNumber = 1; private bool hasSourceObject; private pb::ByteString sourceObject_ = pb::ByteString.Empty; public bool HasSourceObject { get { return hasSourceObject; } } public pb::ByteString SourceObject { get { return sourceObject_; } } public const int SourcePortFieldNumber = 3; private bool hasSourcePort; private uint sourcePort_ = 0; public bool HasSourcePort { get { return hasSourcePort; } } [global::System.CLSCompliant(false)] public uint SourcePort { get { return sourcePort_; } } public const int SourceSpaceFieldNumber = 1536; private bool hasSourceSpace; private pb::ByteString sourceSpace_ = pb::ByteString.Empty; public bool HasSourceSpace { get { return hasSourceSpace; } } public pb::ByteString SourceSpace { get { return sourceSpace_; } } public const int DestinationObjectFieldNumber = 2; private bool hasDestinationObject; private pb::ByteString destinationObject_ = pb::ByteString.Empty; public bool HasDestinationObject { get { return hasDestinationObject; } } public pb::ByteString DestinationObject { get { return destinationObject_; } } public const int DestinationPortFieldNumber = 4; private bool hasDestinationPort; private uint destinationPort_ = 0; public bool HasDestinationPort { get { return hasDestinationPort; } } [global::System.CLSCompliant(false)] public uint DestinationPort { get { return destinationPort_; } } public const int DestinationSpaceFieldNumber = 1537; private bool hasDestinationSpace; private pb::ByteString destinationSpace_ = pb::ByteString.Empty; public bool HasDestinationSpace { get { return hasDestinationSpace; } } public pb::ByteString DestinationSpace { get { return destinationSpace_; } } public const int IdFieldNumber = 7; private bool hasId; private long id_ = 0L; public bool HasId { get { return hasId; } } public long Id { get { return id_; } } public const int ReplyIdFieldNumber = 8; private bool hasReplyId; private long replyId_ = 0L; public bool HasReplyId { get { return hasReplyId; } } public long ReplyId { get { return replyId_; } } public const int ReturnStatusFieldNumber = 1792; private bool hasReturnStatus; private global::Sirikata.Protocol._PBJ_Internal.Header.Types.ReturnStatus returnStatus_ = global::Sirikata.Protocol._PBJ_Internal.Header.Types.ReturnStatus.SUCCESS; public bool HasReturnStatus { get { return hasReturnStatus; } } public global::Sirikata.Protocol._PBJ_Internal.Header.Types.ReturnStatus ReturnStatus { get { return returnStatus_; } } public override bool IsInitialized { get { return true; } } public override void WriteTo(pb::CodedOutputStream output) { if (HasSourceObject) { output.WriteBytes(1, SourceObject); } if (HasDestinationObject) { output.WriteBytes(2, DestinationObject); } if (HasSourcePort) { output.WriteUInt32(3, SourcePort); } if (HasDestinationPort) { output.WriteUInt32(4, DestinationPort); } if (HasId) { output.WriteInt64(7, Id); } if (HasReplyId) { output.WriteInt64(8, ReplyId); } if (HasSourceSpace) { output.WriteBytes(1536, SourceSpace); } if (HasDestinationSpace) { output.WriteBytes(1537, DestinationSpace); } if (HasReturnStatus) { output.WriteEnum(1792, (int) ReturnStatus); } UnknownFields.WriteTo(output); } private int memoizedSerializedSize = -1; public override int SerializedSize { get { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (HasSourceObject) { size += pb::CodedOutputStream.ComputeBytesSize(1, SourceObject); } if (HasSourcePort) { size += pb::CodedOutputStream.ComputeUInt32Size(3, SourcePort); } if (HasSourceSpace) { size += pb::CodedOutputStream.ComputeBytesSize(1536, SourceSpace); } if (HasDestinationObject) { size += pb::CodedOutputStream.ComputeBytesSize(2, DestinationObject); } if (HasDestinationPort) { size += pb::CodedOutputStream.ComputeUInt32Size(4, DestinationPort); } if (HasDestinationSpace) { size += pb::CodedOutputStream.ComputeBytesSize(1537, DestinationSpace); } if (HasId) { size += pb::CodedOutputStream.ComputeInt64Size(7, Id); } if (HasReplyId) { size += pb::CodedOutputStream.ComputeInt64Size(8, ReplyId); } if (HasReturnStatus) { size += pb::CodedOutputStream.ComputeEnumSize(1792, (int) ReturnStatus); } size += UnknownFields.SerializedSize; memoizedSerializedSize = size; return size; } } public static Header ParseFrom(pb::ByteString data) { return ((Builder) CreateBuilder().MergeFrom(data)).BuildParsed(); } public static Header ParseFrom(pb::ByteString data, pb::ExtensionRegistry extensionRegistry) { return ((Builder) CreateBuilder().MergeFrom(data, extensionRegistry)).BuildParsed(); } public static Header ParseFrom(byte[] data) { return ((Builder) CreateBuilder().MergeFrom(data)).BuildParsed(); } public static Header ParseFrom(byte[] data, pb::ExtensionRegistry extensionRegistry) { return ((Builder) CreateBuilder().MergeFrom(data, extensionRegistry)).BuildParsed(); } public static Header ParseFrom(global::System.IO.Stream input) { return ((Builder) CreateBuilder().MergeFrom(input)).BuildParsed(); } public static Header ParseFrom(global::System.IO.Stream input, pb::ExtensionRegistry extensionRegistry) { return ((Builder) CreateBuilder().MergeFrom(input, extensionRegistry)).BuildParsed(); } public static Header ParseDelimitedFrom(global::System.IO.Stream input) { return CreateBuilder().MergeDelimitedFrom(input).BuildParsed(); } public static Header ParseDelimitedFrom(global::System.IO.Stream input, pb::ExtensionRegistry extensionRegistry) { return CreateBuilder().MergeDelimitedFrom(input, extensionRegistry).BuildParsed(); } public static Header ParseFrom(pb::CodedInputStream input) { return ((Builder) CreateBuilder().MergeFrom(input)).BuildParsed(); } public static Header ParseFrom(pb::CodedInputStream input, pb::ExtensionRegistry extensionRegistry) { return ((Builder) CreateBuilder().MergeFrom(input, extensionRegistry)).BuildParsed(); } public static Builder CreateBuilder() { return new Builder(); } public override Builder ToBuilder() { return CreateBuilder(this); } public override Builder CreateBuilderForType() { return new Builder(); } public static Builder CreateBuilder(Header prototype) { return (Builder) new Builder().MergeFrom(prototype); } public sealed partial class Builder : pb::GeneratedBuilder<Header, Builder> { protected override Builder ThisBuilder { get { return this; } } public Builder() {} Header result = new Header(); protected override Header MessageBeingBuilt { get { return result; } } public override Builder Clear() { result = new Header(); return this; } public override Builder Clone() { return new Builder().MergeFrom(result); } public override pbd::MessageDescriptor DescriptorForType { get { return global::Sirikata.Protocol._PBJ_Internal.Header.Descriptor; } } public override Header DefaultInstanceForType { get { return global::Sirikata.Protocol._PBJ_Internal.Header.DefaultInstance; } } public override Header BuildPartial() { if (result == null) { throw new global::System.InvalidOperationException("build() has already been called on this Builder"); } Header returnMe = result; result = null; return returnMe; } public override Builder MergeFrom(pb::IMessage other) { if (other is Header) { return MergeFrom((Header) other); } else { base.MergeFrom(other); return this; } } public override Builder MergeFrom(Header other) { if (other == global::Sirikata.Protocol._PBJ_Internal.Header.DefaultInstance) return this; if (other.HasSourceObject) { SourceObject = other.SourceObject; } if (other.HasSourcePort) { SourcePort = other.SourcePort; } if (other.HasSourceSpace) { SourceSpace = other.SourceSpace; } if (other.HasDestinationObject) { DestinationObject = other.DestinationObject; } if (other.HasDestinationPort) { DestinationPort = other.DestinationPort; } if (other.HasDestinationSpace) { DestinationSpace = other.DestinationSpace; } if (other.HasId) { Id = other.Id; } if (other.HasReplyId) { ReplyId = other.ReplyId; } if (other.HasReturnStatus) { ReturnStatus = other.ReturnStatus; } this.MergeUnknownFields(other.UnknownFields); return this; } public override Builder MergeFrom(pb::CodedInputStream input) { return MergeFrom(input, pb::ExtensionRegistry.Empty); } public override Builder MergeFrom(pb::CodedInputStream input, pb::ExtensionRegistry extensionRegistry) { pb::UnknownFieldSet.Builder unknownFields = null; while (true) { uint tag = input.ReadTag(); switch (tag) { case 0: { if (unknownFields != null) { this.UnknownFields = unknownFields.Build(); } return this; } default: { if (pb::WireFormat.IsEndGroupTag(tag)) { if (unknownFields != null) { this.UnknownFields = unknownFields.Build(); } return this; } if (unknownFields == null) { unknownFields = pb::UnknownFieldSet.CreateBuilder(this.UnknownFields); } ParseUnknownField(input, unknownFields, extensionRegistry, tag); break; } case 10: { SourceObject = input.ReadBytes(); break; } case 18: { DestinationObject = input.ReadBytes(); break; } case 24: { SourcePort = input.ReadUInt32(); break; } case 32: { DestinationPort = input.ReadUInt32(); break; } case 56: { Id = input.ReadInt64(); break; } case 64: { ReplyId = input.ReadInt64(); break; } case 12290: { SourceSpace = input.ReadBytes(); break; } case 12298: { DestinationSpace = input.ReadBytes(); break; } case 14336: { int rawValue = input.ReadEnum(); if (!global::System.Enum.IsDefined(typeof(global::Sirikata.Protocol._PBJ_Internal.Header.Types.ReturnStatus), rawValue)) { if (unknownFields == null) { unknownFields = pb::UnknownFieldSet.CreateBuilder(this.UnknownFields); } unknownFields.MergeVarintField(1792, (ulong) rawValue); } else { ReturnStatus = (global::Sirikata.Protocol._PBJ_Internal.Header.Types.ReturnStatus) rawValue; } break; } } } } public bool HasSourceObject { get { return result.HasSourceObject; } } public pb::ByteString SourceObject { get { return result.SourceObject; } set { SetSourceObject(value); } } public Builder SetSourceObject(pb::ByteString value) { pb::ThrowHelper.ThrowIfNull(value, "value"); result.hasSourceObject = true; result.sourceObject_ = value; return this; } public Builder ClearSourceObject() { result.hasSourceObject = false; result.sourceObject_ = pb::ByteString.Empty; return this; } public bool HasSourcePort { get { return result.HasSourcePort; } } [global::System.CLSCompliant(false)] public uint SourcePort { get { return result.SourcePort; } set { SetSourcePort(value); } } [global::System.CLSCompliant(false)] public Builder SetSourcePort(uint value) { result.hasSourcePort = true; result.sourcePort_ = value; return this; } public Builder ClearSourcePort() { result.hasSourcePort = false; result.sourcePort_ = 0; return this; } public bool HasSourceSpace { get { return result.HasSourceSpace; } } public pb::ByteString SourceSpace { get { return result.SourceSpace; } set { SetSourceSpace(value); } } public Builder SetSourceSpace(pb::ByteString value) { pb::ThrowHelper.ThrowIfNull(value, "value"); result.hasSourceSpace = true; result.sourceSpace_ = value; return this; } public Builder ClearSourceSpace() { result.hasSourceSpace = false; result.sourceSpace_ = pb::ByteString.Empty; return this; } public bool HasDestinationObject { get { return result.HasDestinationObject; } } public pb::ByteString DestinationObject { get { return result.DestinationObject; } set { SetDestinationObject(value); } } public Builder SetDestinationObject(pb::ByteString value) { pb::ThrowHelper.ThrowIfNull(value, "value"); result.hasDestinationObject = true; result.destinationObject_ = value; return this; } public Builder ClearDestinationObject() { result.hasDestinationObject = false; result.destinationObject_ = pb::ByteString.Empty; return this; } public bool HasDestinationPort { get { return result.HasDestinationPort; } } [global::System.CLSCompliant(false)] public uint DestinationPort { get { return result.DestinationPort; } set { SetDestinationPort(value); } } [global::System.CLSCompliant(false)] public Builder SetDestinationPort(uint value) { result.hasDestinationPort = true; result.destinationPort_ = value; return this; } public Builder ClearDestinationPort() { result.hasDestinationPort = false; result.destinationPort_ = 0; return this; } public bool HasDestinationSpace { get { return result.HasDestinationSpace; } } public pb::ByteString DestinationSpace { get { return result.DestinationSpace; } set { SetDestinationSpace(value); } } public Builder SetDestinationSpace(pb::ByteString value) { pb::ThrowHelper.ThrowIfNull(value, "value"); result.hasDestinationSpace = true; result.destinationSpace_ = value; return this; } public Builder ClearDestinationSpace() { result.hasDestinationSpace = false; result.destinationSpace_ = pb::ByteString.Empty; return this; } public bool HasId { get { return result.HasId; } } public long Id { get { return result.Id; } set { SetId(value); } } public Builder SetId(long value) { result.hasId = true; result.id_ = value; return this; } public Builder ClearId() { result.hasId = false; result.id_ = 0L; return this; } public bool HasReplyId { get { return result.HasReplyId; } } public long ReplyId { get { return result.ReplyId; } set { SetReplyId(value); } } public Builder SetReplyId(long value) { result.hasReplyId = true; result.replyId_ = value; return this; } public Builder ClearReplyId() { result.hasReplyId = false; result.replyId_ = 0L; return this; } public bool HasReturnStatus { get { return result.HasReturnStatus; } } public global::Sirikata.Protocol._PBJ_Internal.Header.Types.ReturnStatus ReturnStatus { get { return result.ReturnStatus; } set { SetReturnStatus(value); } } public Builder SetReturnStatus(global::Sirikata.Protocol._PBJ_Internal.Header.Types.ReturnStatus value) { result.hasReturnStatus = true; result.returnStatus_ = value; return this; } public Builder ClearReturnStatus() { result.hasReturnStatus = false; result.returnStatus_ = global::Sirikata.Protocol._PBJ_Internal.Header.Types.ReturnStatus.SUCCESS; return this; } } static Header() { object.ReferenceEquals(global::Sirikata.Protocol._PBJ_Internal.MessageHeader.Descriptor, null); } } #endregion }
using System; using System.Data; using System.Data.OleDb; using System.Drawing.Printing; using System.Reflection; using C1.Win.C1Preview; using C1.C1Report; using PCSUtils.Framework.ReportFrame; using PCSUtils.Utils; using C1PrintPreviewDialog = PCSUtils.Framework.ReportFrame.C1PrintPreviewDialog; namespace SuperviseReport { public class SuperviseReport : MarshalByRefObject, IDynamicReport { #region IDynamicReport Members private string mConnectionString; /// <summary> /// ConnectionString, provide for the Dynamic Report /// ALlow Dynamic Report to access the DataBase of PCS /// </summary> public string PCSConnectionString { get { return mConnectionString; } set { mConnectionString = value; } } private ReportBuilder mReportBuilder; /// <summary> /// Report Builder Utility Object /// Dynamic Report can use this object to render, modify, layout the report /// </summary> public ReportBuilder PCSReportBuilder { get { return mReportBuilder; } set { mReportBuilder = value; } } private C1PrintPreviewControl mViewer; /// <summary> /// ReportViewer Object, provide for the DynamicReport, /// allow Dynamic Report to manipulate with the REportViewer, /// modify the report after rendered if needed /// </summary> public C1PrintPreviewControl PCSReportViewer { get { return mViewer; } set { mViewer = value; } } private object mResult; /// <summary> /// Store other result if any. Ussually we store return DataTable here to display on the ReportViewer Form's Grid /// </summary> public object Result { get { return mResult; } set { mResult = value; } } private bool mUseEngine; /// <summary> /// Notify PCS whether the rendering report process is run by /// this IDynamicReport /// or the ReportViewer Engine (in the ReportViewer form) /// </summary> public bool UseReportViewerRenderEngine { get { return mUseEngine; } set { mUseEngine = value; } } private string mReportFolder; /// <summary> /// Inform External Process where to find out the ReportLayout ( the PCS' ReportDefinition Folder Path ) /// </summary> public string ReportDefinitionFolder { get { return mReportFolder; } set { mReportFolder = value; } } private string mLayoutFile; /// <summary> /// Inform External Process about the Layout file /// in which PCS instruct to use /// (PCS will assign this property while ReportViewer Form execute, /// ReportVIewer form will use the layout file in the report config entry to put in this property) /// </summary> public string ReportLayoutFile { get { return mLayoutFile; } set { mLayoutFile = value; } } /// <summary> /// /// </summary> /// <param name="pstrMethod">name of the method to call (which declare in the DynamicReport C# file)</param> /// <param name="pobjParameters">Array of parameters provide to call the Method with method name = pstrMethod</param> /// <returns></returns> public object Invoke(string pstrMethod, object[] pobjParameters) { return this.GetType().InvokeMember(pstrMethod, BindingFlags.InvokeMethod, null, this, pobjParameters); } #endregion const string DIFPOS_COL = "DifPos"; const string DIFNEV_COL = "DifNev"; const string METHOD_COL = "Method"; const string OH_COL = "OHQuantity"; const string ACTUAL_COL = "Actual"; const string SLIPCODE_COL = "SlipCode"; const string STOCKTAKINGDATE_FLD = "fldStockTakingDate"; public DataTable ExecuteReport(string pstrPeriodID, string pstrMasterLocationID, string pstrLocationID, string pstrBinID) { #region report table DataTable dtbData = GetReportData(pstrPeriodID, pstrLocationID, pstrBinID); #endregion #region build report data DataTable dtbStockTaking = GetStockTakingData(pstrPeriodID, pstrLocationID, pstrBinID); dtbData.Columns.Add(new DataColumn(DIFPOS_COL, typeof(decimal))); dtbData.Columns.Add(new DataColumn(DIFNEV_COL, typeof(decimal))); dtbData.Columns.Add(new DataColumn(METHOD_COL, typeof(string))); dtbData.Columns.Add(new DataColumn(SLIPCODE_COL, typeof(string))); int intCountPos = 0, intCountNev = 0; decimal decQtyCheck = 0, intNumPos = 0, intNumNev = 0; // calculate different and fill counting method foreach (DataRow drowData in dtbData.Rows) { string strLocationID = drowData["LocationID"].ToString(); string strBinID = drowData["BinID"].ToString(); string strProductID = drowData["ProductID"].ToString(); string strSlipCode; string strMethod = GetCountingMethod(strLocationID, strBinID, strProductID, dtbStockTaking, out strSlipCode); drowData[METHOD_COL] = strMethod; drowData[SLIPCODE_COL] = strSlipCode; decimal decOHQuantity = 0, decActual = 0; try { decOHQuantity = Convert.ToDecimal(drowData[OH_COL]); } catch{} try { decActual = Convert.ToDecimal(drowData[ACTUAL_COL]); } catch{} if (decActual - decOHQuantity > 0) { drowData[DIFPOS_COL] = decActual - decOHQuantity; intCountPos++; intNumPos += decActual - decOHQuantity; } else if (decActual - decOHQuantity < 0) { drowData[DIFNEV_COL] = decActual - decOHQuantity; intCountNev++; intNumNev += decActual - decOHQuantity; } decQtyCheck += decOHQuantity; } #endregion #region report C1Report rptReport = new C1Report(); mLayoutFile = "SuperviseReport.xml"; rptReport.Load(mReportFolder + "\\" + mLayoutFile, rptReport.GetReportInfo(mReportFolder + "\\" + mLayoutFile)[0]); rptReport.Layout.PaperSize = PaperKind.A3; #region Report constant const string PARAM_PERIOD = "fldParamPeriod"; const string PARAM_MASLOC = "fldParamMasLoc"; const string PARAM_LOC = "fldParamLocation"; const string PARAM_BIN = "fldParamBin"; const string COUNTPOS_FLD = "fldA"; const string COUNTNEV_FLD = "fldB"; const string NUMPOS_FLD = "fldC"; const string NUMNEV_FLD = "fldD"; const string NUMCHECK_FLD = "fldNumCheck"; const string QTYCHECK_FLD = "fldQtyCheck"; const string RATECOUNT_FLD = "fldRateCount"; const string RATEQTY_FLD = "fldRateQuantity"; #endregion #region report parameter DataRow drowPeriodInfo = GetPeriod(pstrPeriodID); try { rptReport.Fields[PARAM_PERIOD].Text = drowPeriodInfo["Description"].ToString(); } catch{} try { rptReport.Fields[STOCKTAKINGDATE_FLD].Text = Convert.ToDateTime(drowPeriodInfo["FromDate"]).ToString("dd-MM-yyyy"); } catch{} try { rptReport.Fields[PARAM_MASLOC].Text = GetMasLoc(pstrMasterLocationID); } catch{} try { rptReport.Fields[PARAM_LOC].Text = GetLocation(pstrLocationID); } catch{} try { rptReport.Fields[PARAM_BIN].Text = GetBin(pstrBinID); } catch{} try { rptReport.Fields[COUNTPOS_FLD].Text = intCountPos.ToString(); } catch{} try { rptReport.Fields[COUNTNEV_FLD].Text = intCountNev.ToString(); } catch{} try { rptReport.Fields[NUMPOS_FLD].Text = intNumPos.ToString(); } catch{} try { rptReport.Fields[NUMNEV_FLD].Text = intNumNev.ToString(); } catch{} try { rptReport.Fields[NUMCHECK_FLD].Text = dtbData.Rows.Count.ToString(); } catch{} try { rptReport.Fields[QTYCHECK_FLD].Text = decQtyCheck.ToString(); } catch{} try { // rate count = num dif/num check if (dtbData.Rows.Count > 0) rptReport.Fields[RATECOUNT_FLD].Text = "fldNumDif / fldNumCheck"; } catch{} try { if (decQtyCheck != 0) rptReport.Fields[RATEQTY_FLD].Text = "fldQtyDif / fldQtyCheck"; } catch{} #endregion // set datasource object that provides data to report. rptReport.DataSource.Recordset = dtbData; // render report rptReport.Render(); // render the report into the PrintPreviewControl C1PrintPreviewDialog ppvViewer = new C1PrintPreviewDialog(); ppvViewer.FormTitle = "Supervise Report"; ppvViewer.ReportViewer.Document = rptReport.Document; ppvViewer.Show(); #endregion return dtbData; } private DataTable GetReportData(string pstrPeriodID, string pstrLocationID, string pstrBinID) { OleDbConnection oconPCS = null; OleDbCommand ocmdPCS; try { string strSql = "SELECT L.LocationID, L.Code AS Location, B.BinID, B.Code AS Bin, P.ProductID," + " P.Code AS PartNo, P.Description AS PartName, P.Revision AS Model," + " C.Code AS Category, U.Code AS UM," + " SUM(ISNULL(OHQuantity,0)) OHQuantity, SUM(ISNULL(Actual,0)) Actual" + " FROM ITM_Product P" + " LEFT JOIN " + " (SELECT LocationID, BinID, ProductID, SUM(ISNULL(BookQuantity,0)) AS OHQuantity, SUM(ISNULL(Quantity,0)) AS Actual" + " FROM IV_StockTakingMaster SM JOIN IV_StockTaking ST" + " ON SM.StockTakingMasterID = ST.StockTakingMasterID" + " WHERE SM.StockTakingPeriodID = " + pstrPeriodID + " GROUP BY LocationID, BinID, ProductID) AS A" + " ON P.ProductID = A.ProductID" + " LEFT JOIN ITM_Category C ON P.CategoryID = C.CategoryID" + " JOIN MST_UnitOfMeasure U ON P.StockUMID = U.UnitOfMeasureID" + " JOIN MST_Location L ON A.LocationID = L.LocationID" + " JOIN MST_Bin B ON A.BinID = B.BinID" + " WHERE 1=1"; if (pstrLocationID != null && pstrLocationID.Length > 0) strSql += " AND A.LocationID IN (" + pstrLocationID + ")"; if (pstrBinID != null && pstrBinID.Length > 0) strSql += " AND A.BinID IN (" + pstrBinID + ")"; strSql += " GROUP BY L.LocationID, L.Code, B.BinID, B.Code, C.Code, " + " P.ProductID, P.Code, P.Description, P.Revision, U.Code" + " ORDER BY L.Code, B.Code, C.Code, " + " P.Code, P.Description, P.Revision, U.Code"; oconPCS = new OleDbConnection(mConnectionString); ocmdPCS = new OleDbCommand(strSql, oconPCS); ocmdPCS.Connection.Open(); DataTable dtbData = new DataTable(); OleDbDataAdapter odadPCS = new OleDbDataAdapter(ocmdPCS); odadPCS.Fill(dtbData); return dtbData; } catch (Exception ex) { throw ex; } finally { if (oconPCS != null) if (oconPCS.State != ConnectionState.Closed) oconPCS.Close(); } } private DataTable GetStockTakingData(string pstrPeriodID, string pstrLocationID, string pstrBinID) { OleDbConnection oconPCS = null; OleDbCommand ocmdPCS; try { string strSql = "SELECT DISTINCT D.SlipCode, M.LocationID, M.BinID, D.ProductID," + " D.CountingMethodID, C.Code AS Method" + " FROM IV_StockTaking D JOIN IV_StockTakingMaster M" + " ON D.StockTakingMasterID = M.StockTakingMasteriD" + " JOIN IV_CoutingMethod C ON D.CountingMethodID = C.CountingMethodID" + " WHERE StockTakingPeriodID = " + pstrPeriodID; if (pstrLocationID != null && pstrLocationID.Length > 0) strSql += " AND M.LocationID IN (" + pstrLocationID + ")"; if (pstrBinID != null && pstrBinID.Length > 0) strSql += " AND M.BinID IN (" + pstrBinID + ")"; oconPCS = new OleDbConnection(mConnectionString); ocmdPCS = new OleDbCommand(strSql, oconPCS); ocmdPCS.Connection.Open(); DataTable dtbData = new DataTable(); OleDbDataAdapter odadPCS = new OleDbDataAdapter(ocmdPCS); odadPCS.Fill(dtbData); return dtbData; } catch (Exception ex) { throw ex; } finally { if (oconPCS != null) if (oconPCS.State != ConnectionState.Closed) oconPCS.Close(); } } private DataRow GetPeriod(string pstrPeriodID) { OleDbConnection oconPCS = null; OleDbCommand ocmdPCS; try { string strSql = "SELECT Description, FromDate FROM IV_StockTakingPeriod WHERE StockTakingPeriodID = " + pstrPeriodID; oconPCS = new OleDbConnection(mConnectionString); ocmdPCS = new OleDbCommand(strSql, oconPCS); ocmdPCS.Connection.Open(); DataTable dtbData = new DataTable(); OleDbDataAdapter adapter = new OleDbDataAdapter(ocmdPCS); adapter.Fill(dtbData); return dtbData.Rows[0]; } catch (Exception ex) { throw ex; } finally { if (oconPCS != null) if (oconPCS.State != ConnectionState.Closed) oconPCS.Close(); } } private string GetMasLoc(string pstrMasLocID) { OleDbConnection oconPCS = null; OleDbCommand ocmdPCS; try { string strSql = "SELECT Code + ' (' + Name + ')' FROM MST_MasterLocation" + " WHERE MasterLocationID = " + pstrMasLocID; oconPCS = new OleDbConnection(mConnectionString); ocmdPCS = new OleDbCommand(strSql, oconPCS); ocmdPCS.Connection.Open(); object objResult = ocmdPCS.ExecuteScalar(); return objResult.ToString(); } catch (Exception ex) { throw ex; } finally { if (oconPCS != null) if (oconPCS.State != ConnectionState.Closed) oconPCS.Close(); } } private string GetLocation(string pstrLocID) { OleDbConnection oconPCS = null; OleDbCommand ocmdPCS; try { string strSql = "SELECT Code + ' (' + Name + ')' FROM MST_Location" + " WHERE LocationID = " + pstrLocID; oconPCS = new OleDbConnection(mConnectionString); ocmdPCS = new OleDbCommand(strSql, oconPCS); ocmdPCS.Connection.Open(); object objResult = ocmdPCS.ExecuteScalar(); return objResult.ToString(); } catch (Exception ex) { throw ex; } finally { if (oconPCS != null) if (oconPCS.State != ConnectionState.Closed) oconPCS.Close(); } } private string GetBin(string pstrBinID) { OleDbConnection oconPCS = null; OleDbCommand ocmdPCS; try { string strSql = "SELECT Code + ' (' + Name + ')' FROM MST_Bin" + " WHERE BinID = " + pstrBinID; oconPCS = new OleDbConnection(mConnectionString); ocmdPCS = new OleDbCommand(strSql, oconPCS); ocmdPCS.Connection.Open(); object objResult = ocmdPCS.ExecuteScalar(); return objResult.ToString(); } catch (Exception ex) { throw ex; } finally { if (oconPCS != null) if (oconPCS.State != ConnectionState.Closed) oconPCS.Close(); } } private string GetCountingMethod(string pstrLocationID, string pstrBinID, string pstrProductID, DataTable pdtbStockTakingData, out string strSlipCode) { strSlipCode = string.Empty; string strMethod = string.Empty; string strFilter = "LocationID = " + pstrLocationID + " AND BinID = " + pstrBinID + " AND ProductID = " + pstrProductID; DataRow[] drowMethod = pdtbStockTakingData.Select(strFilter); if (drowMethod.Length > 0) { strMethod = drowMethod[0][METHOD_COL].ToString(); foreach (DataRow drowSlip in drowMethod) strSlipCode += drowSlip[SLIPCODE_COL].ToString() + "+"; } if (strSlipCode.LastIndexOf("+") >= 0) strSlipCode = strSlipCode.Remove(strSlipCode.LastIndexOfAny("+".ToCharArray()), 1); return strMethod; } } }
// // ItunesPlayerImportSource.cs // // Authors: // Scott Peterson <lunchtimemama@gmail.com> // Alexander Kojevnikov <alexander@kojevnikov.com> // // Copyright (C) 2007 Scott Peterson // Copyright (C) 2009 Alexander Kojevnikov // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System; using System.Collections.Generic; using System.Data; using System.Globalization; using System.IO; using System.Text; using System.Xml; using Mono.Unix; using Gtk; using Banshee.Base; using Banshee.Collection; using Banshee.Collection.Database; using Banshee.IO; using Banshee.Library; using Banshee.Playlist; using Banshee.ServiceStack; using Banshee.Sources; using Banshee.Widgets; using Hyena.Data.Sqlite; namespace Banshee.PlayerMigration { public sealed class ItunesPlayerImportSource : ThreadPoolImportSource { // This is its own class so that we don't always load this stuff into memory private class ItunesImportData { public string library_uri, default_query, local_prefix, fallback_dir; public string[] query_dirs; public bool get_ratings, get_stats, get_playlists, user_provided_prefix, empty_library; public int total_songs, total_processed; public Dictionary<int, int> track_ids = new Dictionary<int, int> (); // key=itunes_id, value=banshee_id } private readonly object mutex = new object (); private volatile bool ok; public const string LibraryFilename = "iTunes Music Library.xml"; public override string Name { get { return Catalog.GetString ("iTunes Media Player"); } } public override bool CanImport { get { return true; } } private ItunesImportData data; protected override bool ConfirmImport () { if (data == null) { data = new ItunesImportData (); var dialog = new ItunesImportDialog (); if (!HandleImportDialog (dialog, delegate { data.library_uri = dialog.LibraryUri; })) { data = null; return false; } } return true; } private delegate void ImportDialogHandler (ItunesImportDialog dialog); private bool HandleImportDialog (ItunesImportDialog dialog, ImportDialogHandler code) { try { if (dialog.Run () == (int)ResponseType.Ok) { if(code != null) { code (dialog); } data.get_ratings = dialog.Ratings; data.get_stats = dialog.Stats; data.get_playlists = dialog.Playliststs; } else { return false; } } finally { dialog.Destroy (); dialog.Dispose (); } if (String.IsNullOrEmpty (data.library_uri)) { return false; } // Make sure the library version is supported (version 1.1) string message = null; bool prompt = false; using (var xml_reader = new XmlTextReader (data.library_uri)) { xml_reader.ReadToFollowing ("key"); do { xml_reader.Read (); string key = xml_reader.ReadContentAsString (); if (key == "Major Version" || key == "Minor Version") { xml_reader.Read (); xml_reader.Read (); if(xml_reader.ReadContentAsString () != "1") { message = Catalog.GetString ( "Banshee is not familiar with this version of the iTunes library format." + " Importing may or may not work as expected, or at all. Would you like to attempt to import anyway?"); prompt = true; break; } } } while (xml_reader.ReadToNextSibling ("key")); } if (prompt) { bool proceed = false; using (var message_dialog = new MessageDialog (null, 0, MessageType.Question, ButtonsType.YesNo, message)) { if (message_dialog.Run () == (int)ResponseType.Yes) { proceed = true; } message_dialog.Destroy (); } if (!proceed) { LogError (data.library_uri, "Unsupported version"); return false; } } return true; } protected override void ImportCore () { try { CountSongs (); data.empty_library = ServiceManager.SourceManager.MusicLibrary.TrackModel.Count == 0; var import_manager = ServiceManager.Get<LibraryImportManager> (); using (var xml_reader = new XmlTextReader (data.library_uri)) { ProcessLibraryXml (import_manager, xml_reader); } import_manager.NotifyAllSources (); } finally { data = null; } } private void CountSongs () { using (var xml_reader = new XmlTextReader (data.library_uri)) { xml_reader.ReadToDescendant("dict"); xml_reader.ReadToDescendant("dict"); xml_reader.ReadToDescendant("dict"); do { data.total_songs++; } while (xml_reader.ReadToNextSibling ("dict")); } } private void ProcessLibraryXml (LibraryImportManager import_manager, XmlReader xml_reader) { while (xml_reader.ReadToFollowing ("key") && !CheckForCanceled ()) { xml_reader.Read (); string key = xml_reader.ReadContentAsString (); xml_reader.Read (); xml_reader.Read (); switch (key) { case "Music Folder": if (!ProcessMusicFolderPath (xml_reader.ReadContentAsString ())) { return; } break; case "Tracks": ProcessSongs (import_manager, xml_reader.ReadSubtree ()); break; case "Playlists": if (data.get_playlists) { ProcessPlaylists (xml_reader.ReadSubtree ()); } break; } } } private bool ProcessMusicFolderPath(string path) { string[] itunes_music_uri_parts = ConvertToLocalUriFormat (path).Split (Path.DirectorySeparatorChar); string[] library_uri_parts = Path.GetDirectoryName (data.library_uri).Split (Path.DirectorySeparatorChar); string itunes_dir_name = library_uri_parts[library_uri_parts.Length - 1]; int i = 0; bool found = false; for (i = itunes_music_uri_parts.Length - 1; i >= 0; i--) { if (itunes_music_uri_parts[i] == itunes_dir_name) { found = true; break; } } if (!found) { var builder = new StringBuilder (path.Length - 17); for (int j = 3; j < itunes_music_uri_parts.Length; j++) { string part = itunes_music_uri_parts[j]; builder.Append (part); if (part.Length > 0) { builder.Append (Path.DirectorySeparatorChar); } } string local_path = builder.ToString (); System.Threading.Monitor.Enter (mutex); ThreadAssist.ProxyToMain (delegate { System.Threading.Monitor.Enter (mutex); using (var dialog = new ItunesMusicDirectoryDialog (local_path)) { if (dialog.Run () == (int)ResponseType.Ok) { data.local_prefix = dialog.UserMusicDirectory; data.user_provided_prefix = true; data.default_query = local_path; ok = true; } else { ok = false; } dialog.Destroy (); System.Threading.Monitor.Pulse (mutex); System.Threading.Monitor.Exit (mutex); } }); System.Threading.Monitor.Wait (mutex); System.Threading.Monitor.Exit (mutex); if (ok) { return true; } else { LogError (data.library_uri, "Unable to locate iTunes directory from iTunes URI"); return false; } } string[] tmp_query_dirs = new string[itunes_music_uri_parts.Length]; string upstream_uri; string tmp_upstream_uri = null; int step = 0; string root = Path.GetPathRoot (data.library_uri); bool same_root = library_uri_parts[0] == root.Split (Path.DirectorySeparatorChar)[0]; do { upstream_uri = tmp_upstream_uri; tmp_upstream_uri = root; for (int j = same_root ? 1 : 0; j < library_uri_parts.Length - step - 1; j++) { tmp_upstream_uri = Path.Combine (tmp_upstream_uri, library_uri_parts[j]); } tmp_upstream_uri = Path.Combine (tmp_upstream_uri, itunes_music_uri_parts[i - step]); data.fallback_dir = tmp_query_dirs[step] = itunes_music_uri_parts[i - step]; step++; } while (Banshee.IO.Directory.Exists (tmp_upstream_uri)); if (upstream_uri == null) { LogError (data.library_uri, "Unable to resolve iTunes URIs to local URIs"); return false; } data.query_dirs = new string[step - 2]; data.default_query = string.Empty; for (int j = step - 2; j >= 0; j--) { if (j > 0) { data.query_dirs[j - 1] = tmp_query_dirs[j]; } data.default_query += tmp_query_dirs[j] + Path.DirectorySeparatorChar; } data.local_prefix = string.Empty; for (int j = 0; j <= library_uri_parts.Length - step; j++) { data.local_prefix += library_uri_parts[j] + Path.DirectorySeparatorChar; } return true; } private void ProcessSongs (LibraryImportManager import_manager, XmlReader xml_reader) { using (xml_reader) { xml_reader.ReadToFollowing ("dict"); while (xml_reader.ReadToFollowing ("dict") && !CheckForCanceled ()) { ProcessSong (import_manager, xml_reader.ReadSubtree ()); } } } private void ProcessPlaylists (XmlReader xml_reader) { using (xml_reader) { while(xml_reader.ReadToFollowing ("dict") && !CheckForCanceled ()) { ProcessPlaylist (xml_reader.ReadSubtree ()); } } } private void ProcessSong (LibraryImportManager import_manager, XmlReader xml_reader) { data.total_processed++; var itunes_id = 0; var title = String.Empty; var title_sort = String.Empty; var genre = String.Empty; var artist = String.Empty; var artist_sort = String.Empty; var album_artist = String.Empty; var album_artist_sort = String.Empty; var composer = String.Empty; var album = String.Empty; var album_sort = String.Empty; var grouping = String.Empty; var year = 0; var rating = 0; var play_count = 0; var track_number = 0; var date_added = DateTime.Now; var last_played = DateTime.MinValue; SafeUri uri = null; using (xml_reader) { while (xml_reader.ReadToFollowing ("key")) { xml_reader.Read(); string key = xml_reader.ReadContentAsString (); xml_reader.Read (); xml_reader.Read (); try { switch (key) { case "Track ID": itunes_id = Int32.Parse (xml_reader.ReadContentAsString ()); break; case "Name": title = xml_reader.ReadContentAsString (); break; case "Sort Name": title_sort = xml_reader.ReadContentAsString (); break; case "Genre": genre = xml_reader.ReadContentAsString (); break; case "Artist": artist = xml_reader.ReadContentAsString (); break; case "Sort Artist": artist_sort = xml_reader.ReadContentAsString (); break; case "Album Artist": album_artist = xml_reader.ReadContentAsString (); break; case "Sort Album Artist": album_artist_sort = xml_reader.ReadContentAsString (); break; case "Composer": composer = xml_reader.ReadContentAsString (); break; case "Album": album = xml_reader.ReadContentAsString (); break; case "Sort Album": album_sort = xml_reader.ReadContentAsString (); break; case "Grouping": grouping = xml_reader.ReadContentAsString (); break; case "Year": year = Int32.Parse (xml_reader.ReadContentAsString ()); break; case "Rating": rating = Int32.Parse (xml_reader.ReadContentAsString ()) / 20; break; case "Play Count": play_count = Int32.Parse (xml_reader.ReadContentAsString ()); break; case "Track Number": track_number = Int32.Parse (xml_reader.ReadContentAsString ()); break; case "Date Added": date_added = DateTime.Parse (xml_reader.ReadContentAsString (), DateTimeFormatInfo.InvariantInfo, DateTimeStyles.AssumeUniversal); break; case "Play Date UTC": last_played = DateTime.Parse (xml_reader.ReadContentAsString (), DateTimeFormatInfo.InvariantInfo, DateTimeStyles.AssumeUniversal); break; case "Location": uri = ConvertToLocalUri (xml_reader.ReadContentAsString ()); break; } } catch { } } } if (uri == null) { return; } UpdateUserJob (data.total_processed, data.total_songs, artist, title); try { DatabaseTrackInfo track = import_manager.ImportTrack (uri); if (track == null) { LogError (SafeUri.UriToFilename (uri), Catalog.GetString ("Unable to import song.")); return; } if (!String.IsNullOrEmpty (title)) { track.TrackTitle = title; } if (!String.IsNullOrEmpty (title_sort)) { track.TrackTitleSort = title_sort; } if (!String.IsNullOrEmpty (artist)) { track.ArtistName = artist; } if (!String.IsNullOrEmpty (artist_sort)) { track.ArtistNameSort = artist_sort; } if (!String.IsNullOrEmpty (genre)) { track.Genre = genre; } if (!String.IsNullOrEmpty (album_artist)) { track.AlbumArtist = album_artist; } if (!String.IsNullOrEmpty (album_artist_sort)) { track.AlbumArtistSort = album_artist_sort; } if (!String.IsNullOrEmpty (composer)) { track.Composer = composer; } if (!String.IsNullOrEmpty (album)) { track.AlbumTitle = album; } if (!String.IsNullOrEmpty (album_sort)) { track.AlbumTitleSort = album_sort; } if (!String.IsNullOrEmpty (grouping)) { track.Grouping = grouping; } if (year > 0) { track.Year = year; } if (data.get_ratings && rating > 0 && rating <= 5) { track.Rating = rating; } if (data.get_stats && play_count > 0) { track.PlayCount = play_count; } if (track_number > 0) { track.TrackNumber = track_number; } if (data.get_stats) { track.DateAdded = date_added; } if (data.get_stats && last_played > DateTime.MinValue) { track.LastPlayed = last_played; } data.track_ids.Add (itunes_id, track.TrackId); track.Save (false); } catch (Exception e) { LogError (SafeUri.UriToFilename (uri), e); } } private void ProcessPlaylist (XmlReader xml_reader) { string name = string.Empty; bool skip = false; bool processed = false; using (xml_reader) { while (xml_reader.ReadToFollowing ("key")) { xml_reader.Read (); string key = xml_reader.ReadContentAsString (); xml_reader.Read (); switch (key) { case "Name": xml_reader.Read (); name = xml_reader.ReadContentAsString (); if (name == "Library" || name == "Music Videos" || name == "Audiobooks" || name == "Music" || name == "Movies" || name == "Party Shuffle" || name == "Podcasts" || name == "Party Shuffle" || name == "Purchased Music" || name == "Genius" || name == "TV Shows") { skip = true; } break; case "Smart Info": skip = true; break; case "Smart Criteria": skip = true; break; case "Playlist Items": xml_reader.Read (); if(!skip) { ProcessPlaylist (name, xml_reader.ReadSubtree ()); processed = true; } break; } } } // Empty playlist if (!processed && !skip) { ProcessPlaylist (name, null); } } private void ProcessPlaylist (string name, XmlReader xml_reader) { UpdateUserJob (1, 1, Catalog.GetString("Playlists"), name); ProcessRegularPlaylist (name, xml_reader); if (xml_reader != null) { xml_reader.Close (); } } private void ProcessRegularPlaylist (string name, XmlReader xml_reader) { var playlist_source = new PlaylistSource (name, ServiceManager.SourceManager.MusicLibrary); playlist_source.Save (); ServiceManager.SourceManager.MusicLibrary.AddChildSource (playlist_source); // Get the songs in the playlists if (xml_reader != null) { while (xml_reader.ReadToFollowing ("integer") && !CheckForCanceled ()) { xml_reader.Read (); int itunes_id = Int32.Parse (xml_reader.ReadContentAsString ()); int track_id; if (data.track_ids.TryGetValue (itunes_id, out track_id)) { try { ServiceManager.DbConnection.Execute ( "INSERT INTO CorePlaylistEntries (PlaylistID, TrackID) VALUES (?, ?)", playlist_source.DbId, track_id); } catch { } } } playlist_source.Reload (); playlist_source.NotifyUser (); } } private SafeUri ConvertToLocalUri (string raw_uri) { if (raw_uri == null) { return null; } string uri = ConvertToLocalUriFormat (raw_uri); int index = uri.IndexOf (data.default_query); if (data.user_provided_prefix && index != -1) { index += data.default_query.Length; } else if (index == -1 && data.query_dirs.Length > 0) { int count = 0; string path = data.query_dirs[data.query_dirs.Length - 1]; do { for (int k = data.query_dirs.Length - 2; k >= count; k--) { path = Path.Combine (path, data.query_dirs[k]); } index = uri.IndexOf (path); count++; } while(index == -1 && count < data.query_dirs.Length); if (index == -1) { index = uri.IndexOf(data.fallback_dir); if (index != -1) { index += data.fallback_dir.Length + 1; } } } if (index == -1) { if (data.empty_library) { LogError (uri, "Unable to map iTunes URI to local URI"); } return null; } SafeUri safe_uri = CreateSafeUri (Path.Combine( data.local_prefix, uri.Substring (index, uri.Length - index)), data.empty_library); if (safe_uri == null && !data.empty_library) { string local_uri = string.Empty; string lower_uri = raw_uri.ToLower (CultureInfo.InvariantCulture); int i = lower_uri.Length; while (true) { i = lower_uri.LastIndexOf (Path.DirectorySeparatorChar, i - 1); if (i == -1) { break; } try { using (var reader = ServiceManager.DbConnection.Query (String.Format ( @"SELECT Uri FROM CoreTracks WHERE lower(Uri) LIKE ""%{0}""", lower_uri.Substring (i + 1)))) { bool found = false; local_uri = string.Empty; while (reader.Read ()) { if (found) { local_uri = string.Empty; break; } found = true; local_uri = (string)reader[0]; } if (!found || local_uri.Length > 0) { break; } } } catch { break; } } if (local_uri.Length > 0) { safe_uri = CreateSafeUri (local_uri, true); } else { LogError (uri, "Unable to map iTunes URI to local URI"); } } return safe_uri; } private SafeUri CreateSafeUri (string uri, bool complain) { SafeUri safe_uri; try { safe_uri = new SafeUri (uri); } catch { if (complain) { LogError (uri, "URI is not a local file path"); } return null; } safe_uri = FindFile (safe_uri); if (safe_uri == null) { if (complain) { LogError (uri, "File does not exist"); } return null; } return safe_uri; } // URIs are UTF-8 percent-encoded. Deconding with System.Web.HttpServerUtility // involves too much overhead, so we do it cheap here. private static string ConvertToLocalUriFormat (string input) { var builder = new StringBuilder (input.Length); byte[] buffer = new byte[2]; bool using_buffer = false; for (int i = 0; i < input.Length; i++) { // If it's a '%', treat the two subsiquent characters as a UTF-8 byte in hex. if (input[i] == '%') { byte code = Byte.Parse (input.Substring(i + 1, 2), System.Globalization.NumberStyles.HexNumber); // If it's a non-ascii character, or there are already some non-ascii // characters in the buffer, then queue it for UTF-8 decoding. if (using_buffer || (code & 0x80) != 0) { if (using_buffer) { if (buffer[1] == 0) { buffer[1] = code; } else { byte[] new_buffer = new byte[buffer.Length + 1]; for (int j = 0; j < buffer.Length; j++) { new_buffer[j] = buffer[j]; } buffer = new_buffer; buffer[buffer.Length - 1] = code; } } else { buffer[0] = code; using_buffer = true; } } // If it's a lone ascii character, there's no need for fancy UTF-8 decoding. else { builder.Append ((char)code); } i += 2; } else { // If we have something in the buffer, decode it. if (using_buffer) { builder.Append (Encoding.UTF8.GetString (buffer)); if (buffer.Length > 2) { buffer = new byte[2]; } else { buffer[1] = 0; } using_buffer = false; } // And add our regular characters and convert to local directory separator char. if (input[i] == '/') { builder.Append (Path.DirectorySeparatorChar); } else { builder.Append (input[i]); } } } return builder.ToString (); } private static SafeUri FindFile (SafeUri uri) { // URIs kept by iTunes often contain characters in a case different from the actual // files and directories. This method tries to find the real file URI. if (Banshee.IO.File.Exists (uri)) { return uri; } string path = uri.AbsolutePath; string file = Path.GetFileName (path); string directory = Path.GetDirectoryName (path); directory = FindDirectory (directory); if (directory == null) { return null; } uri = new SafeUri (Path.Combine (directory, file), false); if (Banshee.IO.File.Exists (uri)) { return uri; } foreach (string item in Banshee.IO.Directory.GetFiles (directory)) { string name = Path.GetFileName (item); if (0 != String.Compare (file, name, true)) { continue; } return new SafeUri (Path.Combine (directory, name), false); } return null; } private static string FindDirectory (string directory) { if (Banshee.IO.Directory.Exists (directory)) { return directory; } string current = Path.GetFileName (directory); directory = Path.GetDirectoryName (directory); if (String.IsNullOrEmpty (directory)) { return null; } directory = FindDirectory (directory); if (String.IsNullOrEmpty (directory)) { return null; } foreach (string item in Banshee.IO.Directory.GetDirectories (directory)) { string name = Path.GetFileName (item); if (0 != String.Compare (current, name, true)) { continue; } return Path.Combine (directory, name); } return null; } public override string [] IconNames { get { return new string [] { "itunes", "system-search" }; } } public override int SortOrder { get { return 40; } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Reflection; using System.Text.Encodings.Web; using System.Text.Unicode; using Xunit; namespace Microsoft.Framework.WebEncoders { public class UnicodeRangesTests { [Fact] public void Range_None() { UnicodeRange range = UnicodeRanges.None; Assert.NotNull(range); // Test 1: the range should be empty Assert.Equal(0, range.FirstCodePoint); Assert.Equal(0, range.Length); } [Fact] public void Range_All() { Range_Unicode('\u0000', '\uFFFF', "All"); } [Theory] [InlineData('\u0000', '\u007F', "BasicLatin")] [InlineData('\u0080', '\u00FF', "Latin1Supplement")] [InlineData('\u0100', '\u017F', "LatinExtendedA")] [InlineData('\u0180', '\u024F', "LatinExtendedB")] [InlineData('\u0250', '\u02AF', "IpaExtensions")] [InlineData('\u02B0', '\u02FF', "SpacingModifierLetters")] [InlineData('\u0300', '\u036F', "CombiningDiacriticalMarks")] [InlineData('\u0370', '\u03FF', "GreekandCoptic")] [InlineData('\u0400', '\u04FF', "Cyrillic")] [InlineData('\u0500', '\u052F', "CyrillicSupplement")] [InlineData('\u0530', '\u058F', "Armenian")] [InlineData('\u0590', '\u05FF', "Hebrew")] [InlineData('\u0600', '\u06FF', "Arabic")] [InlineData('\u0700', '\u074F', "Syriac")] [InlineData('\u0750', '\u077F', "ArabicSupplement")] [InlineData('\u0780', '\u07BF', "Thaana")] [InlineData('\u07C0', '\u07FF', "NKo")] [InlineData('\u0800', '\u083F', "Samaritan")] [InlineData('\u0840', '\u085F', "Mandaic")] [InlineData('\u08A0', '\u08FF', "ArabicExtendedA")] [InlineData('\u0900', '\u097F', "Devanagari")] [InlineData('\u0980', '\u09FF', "Bengali")] [InlineData('\u0A00', '\u0A7F', "Gurmukhi")] [InlineData('\u0A80', '\u0AFF', "Gujarati")] [InlineData('\u0B00', '\u0B7F', "Oriya")] [InlineData('\u0B80', '\u0BFF', "Tamil")] [InlineData('\u0C00', '\u0C7F', "Telugu")] [InlineData('\u0C80', '\u0CFF', "Kannada")] [InlineData('\u0D00', '\u0D7F', "Malayalam")] [InlineData('\u0D80', '\u0DFF', "Sinhala")] [InlineData('\u0E00', '\u0E7F', "Thai")] [InlineData('\u0E80', '\u0EFF', "Lao")] [InlineData('\u0F00', '\u0FFF', "Tibetan")] [InlineData('\u1000', '\u109F', "Myanmar")] [InlineData('\u10A0', '\u10FF', "Georgian")] [InlineData('\u1100', '\u11FF', "HangulJamo")] [InlineData('\u1200', '\u137F', "Ethiopic")] [InlineData('\u1380', '\u139F', "EthiopicSupplement")] [InlineData('\u13A0', '\u13FF', "Cherokee")] [InlineData('\u1400', '\u167F', "UnifiedCanadianAboriginalSyllabics")] [InlineData('\u1680', '\u169F', "Ogham")] [InlineData('\u16A0', '\u16FF', "Runic")] [InlineData('\u1700', '\u171F', "Tagalog")] [InlineData('\u1720', '\u173F', "Hanunoo")] [InlineData('\u1740', '\u175F', "Buhid")] [InlineData('\u1760', '\u177F', "Tagbanwa")] [InlineData('\u1780', '\u17FF', "Khmer")] [InlineData('\u1800', '\u18AF', "Mongolian")] [InlineData('\u18B0', '\u18FF', "UnifiedCanadianAboriginalSyllabicsExtended")] [InlineData('\u1900', '\u194F', "Limbu")] [InlineData('\u1950', '\u197F', "TaiLe")] [InlineData('\u1980', '\u19DF', "NewTaiLue")] [InlineData('\u19E0', '\u19FF', "KhmerSymbols")] [InlineData('\u1A00', '\u1A1F', "Buginese")] [InlineData('\u1A20', '\u1AAF', "TaiTham")] [InlineData('\u1AB0', '\u1AFF', "CombiningDiacriticalMarksExtended")] [InlineData('\u1B00', '\u1B7F', "Balinese")] [InlineData('\u1B80', '\u1BBF', "Sundanese")] [InlineData('\u1BC0', '\u1BFF', "Batak")] [InlineData('\u1C00', '\u1C4F', "Lepcha")] [InlineData('\u1C50', '\u1C7F', "OlChiki")] [InlineData('\u1CC0', '\u1CCF', "SundaneseSupplement")] [InlineData('\u1CD0', '\u1CFF', "VedicExtensions")] [InlineData('\u1D00', '\u1D7F', "PhoneticExtensions")] [InlineData('\u1D80', '\u1DBF', "PhoneticExtensionsSupplement")] [InlineData('\u1DC0', '\u1DFF', "CombiningDiacriticalMarksSupplement")] [InlineData('\u1E00', '\u1EFF', "LatinExtendedAdditional")] [InlineData('\u1F00', '\u1FFF', "GreekExtended")] [InlineData('\u2000', '\u206F', "GeneralPunctuation")] [InlineData('\u2070', '\u209F', "SuperscriptsandSubscripts")] [InlineData('\u20A0', '\u20CF', "CurrencySymbols")] [InlineData('\u20D0', '\u20FF', "CombiningDiacriticalMarksforSymbols")] [InlineData('\u2100', '\u214F', "LetterlikeSymbols")] [InlineData('\u2150', '\u218F', "NumberForms")] [InlineData('\u2190', '\u21FF', "Arrows")] [InlineData('\u2200', '\u22FF', "MathematicalOperators")] [InlineData('\u2300', '\u23FF', "MiscellaneousTechnical")] [InlineData('\u2400', '\u243F', "ControlPictures")] [InlineData('\u2440', '\u245F', "OpticalCharacterRecognition")] [InlineData('\u2460', '\u24FF', "EnclosedAlphanumerics")] [InlineData('\u2500', '\u257F', "BoxDrawing")] [InlineData('\u2580', '\u259F', "BlockElements")] [InlineData('\u25A0', '\u25FF', "GeometricShapes")] [InlineData('\u2600', '\u26FF', "MiscellaneousSymbols")] [InlineData('\u2700', '\u27BF', "Dingbats")] [InlineData('\u27C0', '\u27EF', "MiscellaneousMathematicalSymbolsA")] [InlineData('\u27F0', '\u27FF', "SupplementalArrowsA")] [InlineData('\u2800', '\u28FF', "BraillePatterns")] [InlineData('\u2900', '\u297F', "SupplementalArrowsB")] [InlineData('\u2980', '\u29FF', "MiscellaneousMathematicalSymbolsB")] [InlineData('\u2A00', '\u2AFF', "SupplementalMathematicalOperators")] [InlineData('\u2B00', '\u2BFF', "MiscellaneousSymbolsandArrows")] [InlineData('\u2C00', '\u2C5F', "Glagolitic")] [InlineData('\u2C60', '\u2C7F', "LatinExtendedC")] [InlineData('\u2C80', '\u2CFF', "Coptic")] [InlineData('\u2D00', '\u2D2F', "GeorgianSupplement")] [InlineData('\u2D30', '\u2D7F', "Tifinagh")] [InlineData('\u2D80', '\u2DDF', "EthiopicExtended")] [InlineData('\u2DE0', '\u2DFF', "CyrillicExtendedA")] [InlineData('\u2E00', '\u2E7F', "SupplementalPunctuation")] [InlineData('\u2E80', '\u2EFF', "CjkRadicalsSupplement")] [InlineData('\u2F00', '\u2FDF', "KangxiRadicals")] [InlineData('\u2FF0', '\u2FFF', "IdeographicDescriptionCharacters")] [InlineData('\u3000', '\u303F', "CjkSymbolsandPunctuation")] [InlineData('\u3040', '\u309F', "Hiragana")] [InlineData('\u30A0', '\u30FF', "Katakana")] [InlineData('\u3100', '\u312F', "Bopomofo")] [InlineData('\u3130', '\u318F', "HangulCompatibilityJamo")] [InlineData('\u3190', '\u319F', "Kanbun")] [InlineData('\u31A0', '\u31BF', "BopomofoExtended")] [InlineData('\u31C0', '\u31EF', "CjkStrokes")] [InlineData('\u31F0', '\u31FF', "KatakanaPhoneticExtensions")] [InlineData('\u3200', '\u32FF', "EnclosedCjkLettersandMonths")] [InlineData('\u3300', '\u33FF', "CjkCompatibility")] [InlineData('\u3400', '\u4DBF', "CjkUnifiedIdeographsExtensionA")] [InlineData('\u4DC0', '\u4DFF', "YijingHexagramSymbols")] [InlineData('\u4E00', '\u9FFF', "CjkUnifiedIdeographs")] [InlineData('\uA000', '\uA48F', "YiSyllables")] [InlineData('\uA490', '\uA4CF', "YiRadicals")] [InlineData('\uA4D0', '\uA4FF', "Lisu")] [InlineData('\uA500', '\uA63F', "Vai")] [InlineData('\uA640', '\uA69F', "CyrillicExtendedB")] [InlineData('\uA6A0', '\uA6FF', "Bamum")] [InlineData('\uA700', '\uA71F', "ModifierToneLetters")] [InlineData('\uA720', '\uA7FF', "LatinExtendedD")] [InlineData('\uA800', '\uA82F', "SylotiNagri")] [InlineData('\uA830', '\uA83F', "CommonIndicNumberForms")] [InlineData('\uA840', '\uA87F', "Phagspa")] [InlineData('\uA880', '\uA8DF', "Saurashtra")] [InlineData('\uA8E0', '\uA8FF', "DevanagariExtended")] [InlineData('\uA900', '\uA92F', "KayahLi")] [InlineData('\uA930', '\uA95F', "Rejang")] [InlineData('\uA960', '\uA97F', "HangulJamoExtendedA")] [InlineData('\uA980', '\uA9DF', "Javanese")] [InlineData('\uA9E0', '\uA9FF', "MyanmarExtendedB")] [InlineData('\uAA00', '\uAA5F', "Cham")] [InlineData('\uAA60', '\uAA7F', "MyanmarExtendedA")] [InlineData('\uAA80', '\uAADF', "TaiViet")] [InlineData('\uAAE0', '\uAAFF', "MeeteiMayekExtensions")] [InlineData('\uAB00', '\uAB2F', "EthiopicExtendedA")] [InlineData('\uAB30', '\uAB6F', "LatinExtendedE")] [InlineData('\uAB70', '\uABBF', "CherokeeSupplement")] [InlineData('\uABC0', '\uABFF', "MeeteiMayek")] [InlineData('\uAC00', '\uD7AF', "HangulSyllables")] [InlineData('\uD7B0', '\uD7FF', "HangulJamoExtendedB")] [InlineData('\uF900', '\uFAFF', "CjkCompatibilityIdeographs")] [InlineData('\uFB00', '\uFB4F', "AlphabeticPresentationForms")] [InlineData('\uFB50', '\uFDFF', "ArabicPresentationFormsA")] [InlineData('\uFE00', '\uFE0F', "VariationSelectors")] [InlineData('\uFE10', '\uFE1F', "VerticalForms")] [InlineData('\uFE20', '\uFE2F', "CombiningHalfMarks")] [InlineData('\uFE30', '\uFE4F', "CjkCompatibilityForms")] [InlineData('\uFE50', '\uFE6F', "SmallFormVariants")] [InlineData('\uFE70', '\uFEFF', "ArabicPresentationFormsB")] [InlineData('\uFF00', '\uFFEF', "HalfwidthandFullwidthForms")] [InlineData('\uFFF0', '\uFFFF', "Specials")] public void Range_Unicode(ushort first, ushort last, string blockName) { Assert.Equal(0x0, first & 0xF); // first char in any block should be U+nnn0 Assert.Equal(0xF, last & 0xF); // last char in any block should be U+nnnF Assert.True(first < last); // code point ranges should be ordered var propInfo = typeof(UnicodeRanges).GetRuntimeProperty(blockName); Assert.NotNull(propInfo); UnicodeRange range = (UnicodeRange)propInfo.GetValue(null); Assert.NotNull(range); // Test 1: the range should span the range first..last Assert.Equal(first, range.FirstCodePoint); Assert.Equal(last, range.FirstCodePoint + range.Length - 1); } } }
#region License // Copyright (c) 2007 James Newton-King // // Permission is hereby granted, free of charge, to any person // obtaining a copy of this software and associated documentation // files (the "Software"), to deal in the Software without // restriction, including without limitation the rights to use, // copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following // conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES // OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT // HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, // WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR // OTHER DEALINGS IN THE SOFTWARE. #endregion #pragma warning disable 618 using System; using System.Collections; using System.Collections.Generic; using System.Globalization; using Newtonsoft.Json.Converters; using Newtonsoft.Json.Serialization; using Newtonsoft.Json.Tests.TestObjects; using Newtonsoft.Json.Utilities; #if NETFX_CORE using Microsoft.VisualStudio.TestPlatform.UnitTestFramework; using TestFixture = Microsoft.VisualStudio.TestPlatform.UnitTestFramework.TestClassAttribute; using Test = Microsoft.VisualStudio.TestPlatform.UnitTestFramework.TestMethodAttribute; #elif ASPNETCORE50 using Xunit; using Test = Xunit.FactAttribute; using Assert = Newtonsoft.Json.Tests.XUnitAssert; #else using NUnit.Framework; #endif using Newtonsoft.Json.Schema; using System.IO; using Newtonsoft.Json.Linq; using System.Text; using Extensions = Newtonsoft.Json.Schema.Extensions; #if NET20 using Newtonsoft.Json.Utilities.LinqBridge; #else using System.Linq; #endif namespace Newtonsoft.Json.Tests.Schema { [TestFixture] public class JsonSchemaGeneratorTests : TestFixtureBase { [Test] public void Generate_GenericDictionary() { JsonSchemaGenerator generator = new JsonSchemaGenerator(); JsonSchema schema = generator.Generate(typeof(Dictionary<string, List<string>>)); string json = schema.ToString(); StringAssert.AreEqual(@"{ ""type"": ""object"", ""additionalProperties"": { ""type"": [ ""array"", ""null"" ], ""items"": { ""type"": [ ""string"", ""null"" ] } } }", json); Dictionary<string, List<string>> value = new Dictionary<string, List<string>> { { "HasValue", new List<string>() { "first", "second", null } }, { "NoValue", null } }; string valueJson = JsonConvert.SerializeObject(value, Formatting.Indented); JObject o = JObject.Parse(valueJson); Assert.IsTrue(o.IsValid(schema)); } #if !(NETFX_CORE || PORTABLE || ASPNETCORE50 || PORTABLE40) [Test] public void Generate_DefaultValueAttributeTestClass() { JsonSchemaGenerator generator = new JsonSchemaGenerator(); JsonSchema schema = generator.Generate(typeof(DefaultValueAttributeTestClass)); string json = schema.ToString(); StringAssert.AreEqual(@"{ ""description"": ""DefaultValueAttributeTestClass description!"", ""type"": ""object"", ""additionalProperties"": false, ""properties"": { ""TestField1"": { ""required"": true, ""type"": ""integer"", ""default"": 21 }, ""TestProperty1"": { ""required"": true, ""type"": [ ""string"", ""null"" ], ""default"": ""TestProperty1Value"" } } }", json); } #endif [Test] public void Generate_Person() { JsonSchemaGenerator generator = new JsonSchemaGenerator(); JsonSchema schema = generator.Generate(typeof(Person)); string json = schema.ToString(); StringAssert.AreEqual(@"{ ""id"": ""Person"", ""title"": ""Title!"", ""description"": ""JsonObjectAttribute description!"", ""type"": ""object"", ""properties"": { ""Name"": { ""required"": true, ""type"": [ ""string"", ""null"" ] }, ""BirthDate"": { ""required"": true, ""type"": ""string"" }, ""LastModified"": { ""required"": true, ""type"": ""string"" } } }", json); } [Test] public void Generate_UserNullable() { JsonSchemaGenerator generator = new JsonSchemaGenerator(); JsonSchema schema = generator.Generate(typeof(UserNullable)); string json = schema.ToString(); StringAssert.AreEqual(@"{ ""type"": ""object"", ""properties"": { ""Id"": { ""required"": true, ""type"": ""string"" }, ""FName"": { ""required"": true, ""type"": [ ""string"", ""null"" ] }, ""LName"": { ""required"": true, ""type"": [ ""string"", ""null"" ] }, ""RoleId"": { ""required"": true, ""type"": ""integer"" }, ""NullableRoleId"": { ""required"": true, ""type"": [ ""integer"", ""null"" ] }, ""NullRoleId"": { ""required"": true, ""type"": [ ""integer"", ""null"" ] }, ""Active"": { ""required"": true, ""type"": [ ""boolean"", ""null"" ] } } }", json); } [Test] public void Generate_RequiredMembersClass() { JsonSchemaGenerator generator = new JsonSchemaGenerator(); JsonSchema schema = generator.Generate(typeof(RequiredMembersClass)); Assert.AreEqual(JsonSchemaType.String, schema.Properties["FirstName"].Type); Assert.AreEqual(JsonSchemaType.String | JsonSchemaType.Null, schema.Properties["MiddleName"].Type); Assert.AreEqual(JsonSchemaType.String | JsonSchemaType.Null, schema.Properties["LastName"].Type); Assert.AreEqual(JsonSchemaType.String, schema.Properties["BirthDate"].Type); } [Test] public void Generate_Store() { JsonSchemaGenerator generator = new JsonSchemaGenerator(); JsonSchema schema = generator.Generate(typeof(Store)); Assert.AreEqual(11, schema.Properties.Count); JsonSchema productArraySchema = schema.Properties["product"]; JsonSchema productSchema = productArraySchema.Items[0]; Assert.AreEqual(4, productSchema.Properties.Count); } [Test] public void MissingSchemaIdHandlingTest() { JsonSchemaGenerator generator = new JsonSchemaGenerator(); JsonSchema schema = generator.Generate(typeof(Store)); Assert.AreEqual(null, schema.Id); generator.UndefinedSchemaIdHandling = UndefinedSchemaIdHandling.UseTypeName; schema = generator.Generate(typeof(Store)); Assert.AreEqual(typeof(Store).FullName, schema.Id); generator.UndefinedSchemaIdHandling = UndefinedSchemaIdHandling.UseAssemblyQualifiedName; schema = generator.Generate(typeof(Store)); Assert.AreEqual(typeof(Store).AssemblyQualifiedName, schema.Id); } [Test] public void CircularReferenceError() { ExceptionAssert.Throws<Exception>(() => { JsonSchemaGenerator generator = new JsonSchemaGenerator(); generator.Generate(typeof(CircularReferenceClass)); }, @"Unresolved circular reference for type 'Newtonsoft.Json.Tests.TestObjects.CircularReferenceClass'. Explicitly define an Id for the type using a JsonObject/JsonArray attribute or automatically generate a type Id using the UndefinedSchemaIdHandling property."); } [Test] public void CircularReferenceWithTypeNameId() { JsonSchemaGenerator generator = new JsonSchemaGenerator(); generator.UndefinedSchemaIdHandling = UndefinedSchemaIdHandling.UseTypeName; JsonSchema schema = generator.Generate(typeof(CircularReferenceClass), true); Assert.AreEqual(JsonSchemaType.String, schema.Properties["Name"].Type); Assert.AreEqual(typeof(CircularReferenceClass).FullName, schema.Id); Assert.AreEqual(JsonSchemaType.Object | JsonSchemaType.Null, schema.Properties["Child"].Type); Assert.AreEqual(schema, schema.Properties["Child"]); } [Test] public void CircularReferenceWithExplicitId() { JsonSchemaGenerator generator = new JsonSchemaGenerator(); JsonSchema schema = generator.Generate(typeof(CircularReferenceWithIdClass)); Assert.AreEqual(JsonSchemaType.String | JsonSchemaType.Null, schema.Properties["Name"].Type); Assert.AreEqual("MyExplicitId", schema.Id); Assert.AreEqual(JsonSchemaType.Object | JsonSchemaType.Null, schema.Properties["Child"].Type); Assert.AreEqual(schema, schema.Properties["Child"]); } [Test] public void GenerateSchemaForType() { JsonSchemaGenerator generator = new JsonSchemaGenerator(); generator.UndefinedSchemaIdHandling = UndefinedSchemaIdHandling.UseTypeName; JsonSchema schema = generator.Generate(typeof(Type)); Assert.AreEqual(JsonSchemaType.String, schema.Type); string json = JsonConvert.SerializeObject(typeof(Version), Formatting.Indented); JValue v = new JValue(json); Assert.IsTrue(v.IsValid(schema)); } #if !(NETFX_CORE || PORTABLE || ASPNETCORE50 || PORTABLE40) [Test] public void GenerateSchemaForISerializable() { JsonSchemaGenerator generator = new JsonSchemaGenerator(); generator.UndefinedSchemaIdHandling = UndefinedSchemaIdHandling.UseTypeName; JsonSchema schema = generator.Generate(typeof(Exception)); Assert.AreEqual(JsonSchemaType.Object, schema.Type); Assert.AreEqual(true, schema.AllowAdditionalProperties); Assert.AreEqual(null, schema.Properties); } #endif #if !(NETFX_CORE || PORTABLE || ASPNETCORE50 || PORTABLE40) [Test] public void GenerateSchemaForDBNull() { JsonSchemaGenerator generator = new JsonSchemaGenerator(); generator.UndefinedSchemaIdHandling = UndefinedSchemaIdHandling.UseTypeName; JsonSchema schema = generator.Generate(typeof(DBNull)); Assert.AreEqual(JsonSchemaType.Null, schema.Type); } public class CustomDirectoryInfoMapper : DefaultContractResolver { public CustomDirectoryInfoMapper() : base(true) { } protected override JsonContract CreateContract(Type objectType) { if (objectType == typeof(DirectoryInfo)) return base.CreateObjectContract(objectType); return base.CreateContract(objectType); } protected override IList<JsonProperty> CreateProperties(Type type, MemberSerialization memberSerialization) { IList<JsonProperty> properties = base.CreateProperties(type, memberSerialization); JsonPropertyCollection c = new JsonPropertyCollection(type); c.AddRange(properties.Where(m => m.PropertyName != "Root")); return c; } } [Test] public void GenerateSchemaForDirectoryInfo() { JsonSchemaGenerator generator = new JsonSchemaGenerator(); generator.UndefinedSchemaIdHandling = UndefinedSchemaIdHandling.UseTypeName; generator.ContractResolver = new CustomDirectoryInfoMapper { #if !(NETFX_CORE || PORTABLE || ASPNETCORE50) IgnoreSerializableAttribute = true #endif }; JsonSchema schema = generator.Generate(typeof(DirectoryInfo), true); string json = schema.ToString(); StringAssert.AreEqual(@"{ ""id"": ""System.IO.DirectoryInfo"", ""required"": true, ""type"": [ ""object"", ""null"" ], ""additionalProperties"": false, ""properties"": { ""Name"": { ""required"": true, ""type"": [ ""string"", ""null"" ] }, ""Parent"": { ""$ref"": ""System.IO.DirectoryInfo"" }, ""Exists"": { ""required"": true, ""type"": ""boolean"" }, ""FullName"": { ""required"": true, ""type"": [ ""string"", ""null"" ] }, ""Extension"": { ""required"": true, ""type"": [ ""string"", ""null"" ] }, ""CreationTime"": { ""required"": true, ""type"": ""string"" }, ""CreationTimeUtc"": { ""required"": true, ""type"": ""string"" }, ""LastAccessTime"": { ""required"": true, ""type"": ""string"" }, ""LastAccessTimeUtc"": { ""required"": true, ""type"": ""string"" }, ""LastWriteTime"": { ""required"": true, ""type"": ""string"" }, ""LastWriteTimeUtc"": { ""required"": true, ""type"": ""string"" }, ""Attributes"": { ""required"": true, ""type"": ""integer"" } } }", json); DirectoryInfo temp = new DirectoryInfo(@"c:\temp"); JTokenWriter jsonWriter = new JTokenWriter(); JsonSerializer serializer = new JsonSerializer(); serializer.Converters.Add(new IsoDateTimeConverter()); serializer.ContractResolver = new CustomDirectoryInfoMapper { #if !(NETFX_CORE || PORTABLE || ASPNETCORE50) IgnoreSerializableInterface = true #endif }; serializer.Serialize(jsonWriter, temp); List<string> errors = new List<string>(); jsonWriter.Token.Validate(schema, (sender, args) => errors.Add(args.Message)); Assert.AreEqual(0, errors.Count); } #endif [Test] public void GenerateSchemaCamelCase() { JsonSchemaGenerator generator = new JsonSchemaGenerator(); generator.UndefinedSchemaIdHandling = UndefinedSchemaIdHandling.UseTypeName; generator.ContractResolver = new CamelCasePropertyNamesContractResolver() { #if !(NETFX_CORE || PORTABLE || ASPNETCORE50 || PORTABLE40) IgnoreSerializableAttribute = true #endif }; JsonSchema schema = generator.Generate(typeof(Version), true); string json = schema.ToString(); StringAssert.AreEqual(@"{ ""id"": ""System.Version"", ""type"": [ ""object"", ""null"" ], ""additionalProperties"": false, ""properties"": { ""major"": { ""required"": true, ""type"": ""integer"" }, ""minor"": { ""required"": true, ""type"": ""integer"" }, ""build"": { ""required"": true, ""type"": ""integer"" }, ""revision"": { ""required"": true, ""type"": ""integer"" }, ""majorRevision"": { ""required"": true, ""type"": ""integer"" }, ""minorRevision"": { ""required"": true, ""type"": ""integer"" } } }", json); } #if !(NETFX_CORE || PORTABLE || ASPNETCORE50 || PORTABLE40) [Test] public void GenerateSchemaSerializable() { JsonSchemaGenerator generator = new JsonSchemaGenerator(); generator.ContractResolver = new DefaultContractResolver { IgnoreSerializableAttribute = false }; generator.UndefinedSchemaIdHandling = UndefinedSchemaIdHandling.UseTypeName; JsonSchema schema = generator.Generate(typeof(Version), true); string json = schema.ToString(); StringAssert.AreEqual(@"{ ""id"": ""System.Version"", ""type"": [ ""object"", ""null"" ], ""additionalProperties"": false, ""properties"": { ""_Major"": { ""required"": true, ""type"": ""integer"" }, ""_Minor"": { ""required"": true, ""type"": ""integer"" }, ""_Build"": { ""required"": true, ""type"": ""integer"" }, ""_Revision"": { ""required"": true, ""type"": ""integer"" } } }", json); JTokenWriter jsonWriter = new JTokenWriter(); JsonSerializer serializer = new JsonSerializer(); serializer.ContractResolver = new DefaultContractResolver { IgnoreSerializableAttribute = false }; serializer.Serialize(jsonWriter, new Version(1, 2, 3, 4)); List<string> errors = new List<string>(); jsonWriter.Token.Validate(schema, (sender, args) => errors.Add(args.Message)); Assert.AreEqual(0, errors.Count); StringAssert.AreEqual(@"{ ""_Major"": 1, ""_Minor"": 2, ""_Build"": 3, ""_Revision"": 4 }", jsonWriter.Token.ToString()); Version version = jsonWriter.Token.ToObject<Version>(serializer); Assert.AreEqual(1, version.Major); Assert.AreEqual(2, version.Minor); Assert.AreEqual(3, version.Build); Assert.AreEqual(4, version.Revision); } #endif public enum SortTypeFlag { No = 0, Asc = 1, Desc = -1 } public class X { public SortTypeFlag x; } [Test] public void GenerateSchemaWithNegativeEnum() { JsonSchemaGenerator jsonSchemaGenerator = new JsonSchemaGenerator(); JsonSchema schema = jsonSchemaGenerator.Generate(typeof(X)); string json = schema.ToString(); StringAssert.AreEqual(@"{ ""type"": ""object"", ""properties"": { ""x"": { ""required"": true, ""type"": ""integer"", ""enum"": [ 0, 1, -1 ] } } }", json); } [Test] public void CircularCollectionReferences() { Type type = typeof(Workspace); JsonSchemaGenerator jsonSchemaGenerator = new JsonSchemaGenerator(); jsonSchemaGenerator.UndefinedSchemaIdHandling = UndefinedSchemaIdHandling.UseTypeName; JsonSchema jsonSchema = jsonSchemaGenerator.Generate(type); // should succeed Assert.IsNotNull(jsonSchema); } [Test] public void CircularReferenceWithMixedRequires() { JsonSchemaGenerator jsonSchemaGenerator = new JsonSchemaGenerator(); jsonSchemaGenerator.UndefinedSchemaIdHandling = UndefinedSchemaIdHandling.UseTypeName; JsonSchema jsonSchema = jsonSchemaGenerator.Generate(typeof(CircularReferenceClass)); string json = jsonSchema.ToString(); StringAssert.AreEqual(@"{ ""id"": ""Newtonsoft.Json.Tests.TestObjects.CircularReferenceClass"", ""type"": [ ""object"", ""null"" ], ""properties"": { ""Name"": { ""required"": true, ""type"": ""string"" }, ""Child"": { ""$ref"": ""Newtonsoft.Json.Tests.TestObjects.CircularReferenceClass"" } } }", json); } [Test] public void JsonPropertyWithHandlingValues() { JsonSchemaGenerator jsonSchemaGenerator = new JsonSchemaGenerator(); jsonSchemaGenerator.UndefinedSchemaIdHandling = UndefinedSchemaIdHandling.UseTypeName; JsonSchema jsonSchema = jsonSchemaGenerator.Generate(typeof(JsonPropertyWithHandlingValues)); string json = jsonSchema.ToString(); StringAssert.AreEqual(@"{ ""id"": ""Newtonsoft.Json.Tests.TestObjects.JsonPropertyWithHandlingValues"", ""required"": true, ""type"": [ ""object"", ""null"" ], ""properties"": { ""DefaultValueHandlingIgnoreProperty"": { ""type"": [ ""string"", ""null"" ], ""default"": ""Default!"" }, ""DefaultValueHandlingIncludeProperty"": { ""required"": true, ""type"": [ ""string"", ""null"" ], ""default"": ""Default!"" }, ""DefaultValueHandlingPopulateProperty"": { ""required"": true, ""type"": [ ""string"", ""null"" ], ""default"": ""Default!"" }, ""DefaultValueHandlingIgnoreAndPopulateProperty"": { ""type"": [ ""string"", ""null"" ], ""default"": ""Default!"" }, ""NullValueHandlingIgnoreProperty"": { ""type"": [ ""string"", ""null"" ] }, ""NullValueHandlingIncludeProperty"": { ""required"": true, ""type"": [ ""string"", ""null"" ] }, ""ReferenceLoopHandlingErrorProperty"": { ""$ref"": ""Newtonsoft.Json.Tests.TestObjects.JsonPropertyWithHandlingValues"" }, ""ReferenceLoopHandlingIgnoreProperty"": { ""$ref"": ""Newtonsoft.Json.Tests.TestObjects.JsonPropertyWithHandlingValues"" }, ""ReferenceLoopHandlingSerializeProperty"": { ""$ref"": ""Newtonsoft.Json.Tests.TestObjects.JsonPropertyWithHandlingValues"" } } }", json); } [Test] public void GenerateForNullableInt32() { JsonSchemaGenerator jsonSchemaGenerator = new JsonSchemaGenerator(); JsonSchema jsonSchema = jsonSchemaGenerator.Generate(typeof(NullableInt32TestClass)); string json = jsonSchema.ToString(); StringAssert.AreEqual(@"{ ""type"": ""object"", ""properties"": { ""Value"": { ""required"": true, ""type"": [ ""integer"", ""null"" ] } } }", json); } [JsonConverter(typeof(StringEnumConverter))] public enum SortTypeFlagAsString { No = 0, Asc = 1, Desc = -1 } public class Y { public SortTypeFlagAsString y; } } public class NullableInt32TestClass { public int? Value { get; set; } } public class DMDSLBase { public String Comment; } public class Workspace : DMDSLBase { public ControlFlowItemCollection Jobs = new ControlFlowItemCollection(); } public class ControlFlowItemBase : DMDSLBase { public String Name; } public class ControlFlowItem : ControlFlowItemBase //A Job { public TaskCollection Tasks = new TaskCollection(); public ContainerCollection Containers = new ContainerCollection(); } public class ControlFlowItemCollection : List<ControlFlowItem> { } public class Task : ControlFlowItemBase { public DataFlowTaskCollection DataFlowTasks = new DataFlowTaskCollection(); public BulkInsertTaskCollection BulkInsertTask = new BulkInsertTaskCollection(); } public class TaskCollection : List<Task> { } public class Container : ControlFlowItemBase { public ControlFlowItemCollection ContainerJobs = new ControlFlowItemCollection(); } public class ContainerCollection : List<Container> { } public class DataFlowTask_DSL : ControlFlowItemBase { } public class DataFlowTaskCollection : List<DataFlowTask_DSL> { } public class SequenceContainer_DSL : Container { } public class BulkInsertTaskCollection : List<BulkInsertTask_DSL> { } public class BulkInsertTask_DSL { } } #pragma warning restore 618
// // Options.cs // // Authors: // Jonathan Pryor <jpryor@novell.com> // // Copyright (C) 2008 Novell (http://www.novell.com) // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // // Compile With: // gmcs -debug+ -r:System.Core Options.cs -o:NDesk.Options.dll // gmcs -debug+ -d:LINQ -r:System.Core Options.cs -o:NDesk.Options.dll // // The LINQ version just changes the implementation of // OptionSet.Parse(IEnumerable<string>), and confers no semantic changes. // // A Getopt::Long-inspired option parsing library for C#. // // NDesk.Options.OptionSet is built upon a key/value table, where the // key is a option format string and the value is a delegate that is // invoked when the format string is matched. // // Option format strings: // Regex-like BNF Grammar: // name: .+ // type: [=:] // sep: ( [^{}]+ | '{' .+ '}' )? // aliases: ( name type sep ) ( '|' name type sep )* // // Each '|'-delimited name is an alias for the associated action. If the // format string ends in a '=', it has a required value. If the format // string ends in a ':', it has an optional value. If neither '=' or ':' // is present, no value is supported. `=' or `:' need only be defined on one // alias, but if they are provided on more than one they must be consistent. // // Each alias portion may also end with a "key/value separator", which is used // to split option values if the option accepts > 1 value. If not specified, // it defaults to '=' and ':'. If specified, it can be any character except // '{' and '}' OR the *string* between '{' and '}'. If no separator should be // used (i.e. the separate values should be distinct arguments), then "{}" // should be used as the separator. // // Options are extracted either from the current option by looking for // the option name followed by an '=' or ':', or is taken from the // following option IFF: // - The current option does not contain a '=' or a ':' // - The current option requires a value (i.e. not a Option type of ':') // // The `name' used in the option format string does NOT include any leading // option indicator, such as '-', '--', or '/'. All three of these are // permitted/required on any named option. // // Option bundling is permitted so long as: // - '-' is used to start the option group // - all of the bundled options are a single character // - at most one of the bundled options accepts a value, and the value // provided starts from the next character to the end of the string. // // This allows specifying '-a -b -c' as '-abc', and specifying '-D name=value' // as '-Dname=value'. // // Option processing is disabled by specifying "--". All options after "--" // are returned by OptionSet.Parse() unchanged and unprocessed. // // Unprocessed options are returned from OptionSet.Parse(). // // Examples: // int verbose = 0; // OptionSet p = new OptionSet () // .Add ("v", v => ++verbose) // .Add ("name=|value=", v => Console.WriteLine (v)); // p.Parse (new string[]{"-v", "--v", "/v", "-name=A", "/name", "B", "extra"}); // // The above would parse the argument string array, and would invoke the // lambda expression three times, setting `verbose' to 3 when complete. // It would also print out "A" and "B" to standard output. // The returned array would contain the string "extra". // // C# 3.0 collection initializers are supported and encouraged: // var p = new OptionSet () { // { "h|?|help", v => ShowHelp () }, // }; // // System.ComponentModel.TypeConverter is also supported, allowing the use of // custom data types in the callback type; TypeConverter.ConvertFromString() // is used to convert the value option to an instance of the specified // type: // // var p = new OptionSet () { // { "foo=", (Foo f) => Console.WriteLine (f.ToString ()) }, // }; // // Random other tidbits: // - Boolean options (those w/o '=' or ':' in the option format string) // are explicitly enabled if they are followed with '+', and explicitly // disabled if they are followed with '-': // string a = null; // var p = new OptionSet () { // { "a", s => a = s }, // }; // p.Parse (new string[]{"-a"}); // sets v != null // p.Parse (new string[]{"-a+"}); // sets v != null // p.Parse (new string[]{"-a-"}); // sets v == null // // ReSharper disable All using System; using System.Collections; using System.Collections.Generic; using System.Collections.ObjectModel; using System.ComponentModel; using System.IO; using System.Linq; using System.Runtime.Serialization; using System.Security.Permissions; using System.Text; using System.Text.RegularExpressions; namespace Mono.Options { public class OptionValueCollection : IList, IList<string> { List<string> values = new List<string>(); OptionContext c; internal OptionValueCollection(OptionContext c) { this.c = c; } #region ICollection void ICollection.CopyTo(Array array, int index) { (values as ICollection).CopyTo(array, index); } bool ICollection.IsSynchronized { get { return (values as ICollection).IsSynchronized; } } object ICollection.SyncRoot { get { return (values as ICollection).SyncRoot; } } #endregion #region ICollection<T> public void Add(string item) { values.Add(item); } public void Clear() { values.Clear(); } public bool Contains(string item) { return values.Contains(item); } public void CopyTo(string[] array, int arrayIndex) { values.CopyTo(array, arrayIndex); } public bool Remove(string item) { return values.Remove(item); } public int Count { get { return values.Count; } } public bool IsReadOnly { get { return false; } } #endregion #region IEnumerable IEnumerator IEnumerable.GetEnumerator() { return values.GetEnumerator(); } #endregion #region IEnumerable<T> public IEnumerator<string> GetEnumerator() { return values.GetEnumerator(); } #endregion #region IList int IList.Add(object value) { return (values as IList).Add(value); } bool IList.Contains(object value) { return (values as IList).Contains(value); } int IList.IndexOf(object value) { return (values as IList).IndexOf(value); } void IList.Insert(int index, object value) { (values as IList).Insert(index, value); } void IList.Remove(object value) { (values as IList).Remove(value); } void IList.RemoveAt(int index) { (values as IList).RemoveAt(index); } bool IList.IsFixedSize { get { return false; } } object IList.this[int index] { get { return this[index]; } set { (values as IList)[index] = value; } } #endregion #region IList<T> public int IndexOf(string item) { return values.IndexOf(item); } public void Insert(int index, string item) { values.Insert(index, item); } public void RemoveAt(int index) { values.RemoveAt(index); } private void AssertValid(int index) { if (c.Option == null) throw new InvalidOperationException("OptionContext.Option is null."); if (index >= c.Option.MaxValueCount) throw new ArgumentOutOfRangeException("index"); if (c.Option.OptionValueType == OptionValueType.Required && index >= values.Count) throw new OptionException(string.Format( c.OptionSet.MessageLocalizer("Missing required value for option '{0}'."), c.OptionName), c.OptionName); } public string this[int index] { get { AssertValid(index); return index >= values.Count ? null : values[index]; } set { values[index] = value; } } #endregion public List<string> ToList() { return new List<string>(values); } public string[] ToArray() { return values.ToArray(); } public override string ToString() { return string.Join(", ", values.ToArray()); } } public class OptionContext { private Option option; private string name; private int index; private OptionSet set; private OptionValueCollection c; public OptionContext(OptionSet set) { this.set = set; this.c = new OptionValueCollection(this); } public Option Option { get { return option; } set { option = value; } } public string OptionName { get { return name; } set { name = value; } } public int OptionIndex { get { return index; } set { index = value; } } public OptionSet OptionSet { get { return set; } } public OptionValueCollection OptionValues { get { return c; } } } public enum OptionValueType { None, Optional, Required, } public abstract class Option { string prototype, description; string[] names; OptionValueType type; int count; string[] separators; protected Option(string prototype, string description) : this(prototype, description, 1) { } protected Option(string prototype, string description, int maxValueCount) { if (prototype == null) throw new ArgumentNullException("prototype"); if (prototype.Length == 0) throw new ArgumentException("Cannot be the empty string.", "prototype"); if (maxValueCount < 0) throw new ArgumentOutOfRangeException("maxValueCount"); this.prototype = prototype; this.names = prototype.Split('|'); this.description = description; this.count = maxValueCount; this.type = ParsePrototype(); if (this.count == 0 && type != OptionValueType.None) throw new ArgumentException( "Cannot provide maxValueCount of 0 for OptionValueType.Required or " + "OptionValueType.Optional.", "maxValueCount"); if (this.type == OptionValueType.None && maxValueCount > 1) throw new ArgumentException( string.Format("Cannot provide maxValueCount of {0} for OptionValueType.None.", maxValueCount), "maxValueCount"); if (Array.IndexOf(names, "<>") >= 0 && ((names.Length == 1 && this.type != OptionValueType.None) || (names.Length > 1 && this.MaxValueCount > 1))) throw new ArgumentException( "The default option handler '<>' cannot require values.", "prototype"); } public string Prototype { get { return prototype; } } public string Description { get { return description; } } public OptionValueType OptionValueType { get { return type; } } public int MaxValueCount { get { return count; } } public string[] GetNames() { return (string[])names.Clone(); } public string[] GetValueSeparators() { if (separators == null) return new string[0]; return (string[])separators.Clone(); } protected static T Parse<T>(string value, OptionContext c) { Type tt = typeof(T); bool nullable = tt.IsValueType && tt.IsGenericType && !tt.IsGenericTypeDefinition && tt.GetGenericTypeDefinition() == typeof(Nullable<>); Type targetType = nullable ? tt.GetGenericArguments()[0] : typeof(T); TypeConverter conv = TypeDescriptor.GetConverter(targetType); T t = default(T); try { if (value != null) t = (T)conv.ConvertFromString(value); } catch (Exception e) { throw new OptionException( string.Format( c.OptionSet.MessageLocalizer("Could not convert string `{0}' to type {1} for option `{2}'."), value, targetType.Name, c.OptionName), c.OptionName, e); } return t; } internal string[] Names { get { return names; } } internal string[] ValueSeparators { get { return separators; } } static readonly char[] NameTerminator = new char[] { '=', ':' }; private OptionValueType ParsePrototype() { char type = '\0'; List<string> seps = new List<string>(); for (int i = 0; i < names.Length; ++i) { string name = names[i]; if (name.Length == 0) throw new ArgumentException("Empty option names are not supported.", "prototype"); int end = name.IndexOfAny(NameTerminator); if (end == -1) continue; names[i] = name.Substring(0, end); if (type == '\0' || type == name[end]) type = name[end]; else throw new ArgumentException( string.Format("Conflicting option types: '{0}' vs. '{1}'.", type, name[end]), "prototype"); AddSeparators(name, end, seps); } if (type == '\0') return OptionValueType.None; if (count <= 1 && seps.Count != 0) throw new ArgumentException( string.Format("Cannot provide key/value separators for Options taking {0} value(s).", count), "prototype"); if (count > 1) { if (seps.Count == 0) this.separators = new string[] { ":", "=" }; else if (seps.Count == 1 && seps[0].Length == 0) this.separators = null; else this.separators = seps.ToArray(); } return type == '=' ? OptionValueType.Required : OptionValueType.Optional; } private static void AddSeparators(string name, int end, ICollection<string> seps) { int start = -1; for (int i = end + 1; i < name.Length; ++i) { switch (name[i]) { case '{': if (start != -1) throw new ArgumentException( string.Format("Ill-formed name/value separator found in \"{0}\".", name), "prototype"); start = i + 1; break; case '}': if (start == -1) throw new ArgumentException( string.Format("Ill-formed name/value separator found in \"{0}\".", name), "prototype"); seps.Add(name.Substring(start, i - start)); start = -1; break; default: if (start == -1) seps.Add(name[i].ToString()); break; } } if (start != -1) throw new ArgumentException( string.Format("Ill-formed name/value separator found in \"{0}\".", name), "prototype"); } public void Invoke(OptionContext c) { OnParseComplete(c); c.OptionName = null; c.Option = null; c.OptionValues.Clear(); } protected abstract void OnParseComplete(OptionContext c); public override string ToString() { return Prototype; } } [Serializable] public class OptionException : Exception { private string option; public OptionException() { } public OptionException(string message, string optionName) : base(message) { this.option = optionName; } public OptionException(string message, string optionName, Exception innerException) : base(message, innerException) { this.option = optionName; } protected OptionException(SerializationInfo info, StreamingContext context) : base(info, context) { this.option = info.GetString("OptionName"); } public string OptionName { get { return this.option; } } [SecurityPermission(SecurityAction.LinkDemand, SerializationFormatter = true)] public override void GetObjectData(SerializationInfo info, StreamingContext context) { base.GetObjectData(info, context); info.AddValue("OptionName", option); } } public delegate void OptionAction<TKey, TValue>(TKey key, TValue value); public class OptionSet : KeyedCollection<string, Option> { public OptionSet() : this(delegate(string f) { return f; }) { } public OptionSet(Converter<string, string> localizer) { this.localizer = localizer; } Converter<string, string> localizer; public Converter<string, string> MessageLocalizer { get { return localizer; } } protected override string GetKeyForItem(Option item) { if (item == null) throw new ArgumentNullException("option"); if (item.Names != null && item.Names.Length > 0) return item.Names[0]; // This should never happen, as it's invalid for Option to be // constructed w/o any names. throw new InvalidOperationException("Option has no names!"); } protected override void InsertItem(int index, Option item) { base.InsertItem(index, item); AddImpl(item); } protected override void RemoveItem(int index) { base.RemoveItem(index); Option p = Items[index]; // KeyedCollection.RemoveItem() handles the 0th item for (int i = 1; i < p.Names.Length; ++i) { Dictionary.Remove(p.Names[i]); } } protected override void SetItem(int index, Option item) { base.SetItem(index, item); RemoveItem(index); AddImpl(item); } private void AddImpl(Option option) { if (option == null) throw new ArgumentNullException("option"); List<string> added = new List<string>(option.Names.Length); try { // KeyedCollection.InsertItem/SetItem handle the 0th name. for (int i = 1; i < option.Names.Length; ++i) { Dictionary.Add(option.Names[i], option); added.Add(option.Names[i]); } } catch (Exception) { foreach (string name in added) Dictionary.Remove(name); throw; } } public new OptionSet Add(Option option) { base.Add(option); return this; } sealed class ActionOption : Option { Action<OptionValueCollection> action; public ActionOption(string prototype, string description, int count, Action<OptionValueCollection> action) : base(prototype, description, count) { if (action == null) throw new ArgumentNullException("action"); this.action = action; } protected override void OnParseComplete(OptionContext c) { action(c.OptionValues); } } public OptionSet Add(string prototype, Action<string> action) { return Add(prototype, null, action); } public OptionSet Add(string prototype, string description, Action<string> action) { if (action == null) throw new ArgumentNullException("action"); Option p = new ActionOption(prototype, description, 1, delegate(OptionValueCollection v) { action(v[0]); }); base.Add(p); return this; } public OptionSet Add(string prototype, OptionAction<string, string> action) { return Add(prototype, null, action); } public OptionSet Add(string prototype, string description, OptionAction<string, string> action) { if (action == null) throw new ArgumentNullException("action"); Option p = new ActionOption(prototype, description, 2, delegate(OptionValueCollection v) { action(v[0], v[1]); }); base.Add(p); return this; } sealed class ActionOption<T> : Option { Action<T> action; public ActionOption(string prototype, string description, Action<T> action) : base(prototype, description, 1) { if (action == null) throw new ArgumentNullException("action"); this.action = action; } protected override void OnParseComplete(OptionContext c) { action(Parse<T>(c.OptionValues[0], c)); } } sealed class ActionOption<TKey, TValue> : Option { OptionAction<TKey, TValue> action; public ActionOption(string prototype, string description, OptionAction<TKey, TValue> action) : base(prototype, description, 2) { if (action == null) throw new ArgumentNullException("action"); this.action = action; } protected override void OnParseComplete(OptionContext c) { action( Parse<TKey>(c.OptionValues[0], c), Parse<TValue>(c.OptionValues[1], c)); } } public OptionSet Add<T>(string prototype, Action<T> action) { return Add(prototype, null, action); } public OptionSet Add<T>(string prototype, string description, Action<T> action) { return Add(new ActionOption<T>(prototype, description, action)); } public OptionSet Add<TKey, TValue>(string prototype, OptionAction<TKey, TValue> action) { return Add(prototype, null, action); } public OptionSet Add<TKey, TValue>(string prototype, string description, OptionAction<TKey, TValue> action) { return Add(new ActionOption<TKey, TValue>(prototype, description, action)); } protected virtual OptionContext CreateOptionContext() { return new OptionContext(this); } public List<string> Parse(IEnumerable<string> arguments) { OptionContext c = CreateOptionContext(); c.OptionIndex = -1; bool process = true; List<string> unprocessed = new List<string>(); Option def = Contains("<>") ? this["<>"] : null; foreach (string argument in arguments) { ++c.OptionIndex; if (argument == "--") { process = false; continue; } if (!process) { Unprocessed(unprocessed, def, c, argument); continue; } if (!Parse(argument, c)) Unprocessed(unprocessed, def, c, argument); } if (c.Option != null) c.Option.Invoke(c); return unprocessed; } private static bool Unprocessed(ICollection<string> extra, Option def, OptionContext c, string argument) { if (def == null) { extra.Add(argument); return false; } c.OptionValues.Add(argument); c.Option = def; c.Option.Invoke(c); return false; } private readonly Regex ValueOption = new Regex( @"^(?<flag>--|-|/)(?<name>[^:=]+)((?<sep>[:=])(?<value>.*))?$"); protected bool GetOptionParts(string argument, out string flag, out string name, out string sep, out string value) { if (argument == null) throw new ArgumentNullException("argument"); flag = name = sep = value = null; Match m = ValueOption.Match(argument); if (!m.Success) { return false; } flag = m.Groups["flag"].Value; name = m.Groups["name"].Value; if (m.Groups["sep"].Success && m.Groups["value"].Success) { sep = m.Groups["sep"].Value; value = m.Groups["value"].Value; } return true; } protected virtual bool Parse(string argument, OptionContext c) { if (c.Option != null) { ParseValue(argument, c); return true; } string f, n, s, v; if (!GetOptionParts(argument, out f, out n, out s, out v)) return false; Option p; if (ContainsKey(n)) { p = GetOptionForKey(n); c.OptionName = f + n; c.Option = p; switch (p.OptionValueType) { case OptionValueType.None: c.OptionValues.Add(n); c.Option.Invoke(c); break; case OptionValueType.Optional: case OptionValueType.Required: ParseValue(v, c); break; } return true; } // no match; is it a bool option? if (ParseBool(argument, n, c)) return true; // is it a bundled option? if (ParseBundledValue(f, string.Concat(n + s + v), c)) return true; return false; } private bool ContainsKey(string key) { return this.SelectMany(op => op.Names.Select(n => n.ToLower())).Contains(key.ToLower()); } private Option GetOptionForKey(string key) { return this.SingleOrDefault(op => op.Names.Select(n => n.ToLower()).Contains(key.ToLower())); } private void ParseValue(string option, OptionContext c) { if (option != null) foreach (string o in c.Option.ValueSeparators != null ? option.Split(c.Option.ValueSeparators, StringSplitOptions.None) : new string[] { option }) { c.OptionValues.Add(o); } if (c.OptionValues.Count == c.Option.MaxValueCount || c.Option.OptionValueType == OptionValueType.Optional) c.Option.Invoke(c); else if (c.OptionValues.Count > c.Option.MaxValueCount) { throw new OptionException(localizer(string.Format( "Error: Found {0} option values when expecting {1}.", c.OptionValues.Count, c.Option.MaxValueCount)), c.OptionName); } } private bool ParseBool(string option, string n, OptionContext c) { Option p; string rn; if (n.Length >= 1 && (n[n.Length - 1] == '+' || n[n.Length - 1] == '-') && Contains((rn = n.Substring(0, n.Length - 1)))) { p = this[rn]; string v = n[n.Length - 1] == '+' ? option : null; c.OptionName = option; c.Option = p; c.OptionValues.Add(v); p.Invoke(c); return true; } return false; } private bool ParseBundledValue(string f, string n, OptionContext c) { if (f != "-") return false; for (int i = 0; i < n.Length; ++i) { Option p; string opt = f + n[i].ToString(); string rn = n[i].ToString(); if (!Contains(rn)) { if (i == 0) return false; throw new OptionException(string.Format(localizer( "Cannot bundle unregistered option '{0}'."), opt), opt); } p = this[rn]; switch (p.OptionValueType) { case OptionValueType.None: Invoke(c, opt, n, p); break; case OptionValueType.Optional: case OptionValueType.Required: { string v = n.Substring(i + 1); c.Option = p; c.OptionName = opt; ParseValue(v.Length != 0 ? v : null, c); return true; } default: throw new InvalidOperationException("Unknown OptionValueType: " + p.OptionValueType); } } return true; } private static void Invoke(OptionContext c, string name, string value, Option option) { c.OptionName = name; c.Option = option; c.OptionValues.Add(value); option.Invoke(c); } private const int OptionWidth = 29; public void WriteOptionDescriptions(TextWriter o) { foreach (Option p in this) { int written = 0; if (!WriteOptionPrototype(o, p, ref written)) continue; if (written < OptionWidth) o.Write(new string(' ', OptionWidth - written)); else { o.WriteLine(); o.Write(new string(' ', OptionWidth)); } bool indent = false; string prefix = new string(' ', OptionWidth + 2); foreach (string line in GetLines(localizer(GetDescription(p.Description)))) { if (indent) o.Write(prefix); o.WriteLine(line); indent = true; } } } bool WriteOptionPrototype(TextWriter o, Option p, ref int written) { string[] names = p.Names; int i = GetNextOptionIndex(names, 0); if (i == names.Length) return false; if (names[i].Length == 1) { Write(o, ref written, " -"); Write(o, ref written, names[0]); } else { Write(o, ref written, " --"); Write(o, ref written, names[0]); } for (i = GetNextOptionIndex(names, i + 1); i < names.Length; i = GetNextOptionIndex(names, i + 1)) { Write(o, ref written, ", "); Write(o, ref written, names[i].Length == 1 ? "-" : "--"); Write(o, ref written, names[i]); } if (p.OptionValueType == OptionValueType.Optional || p.OptionValueType == OptionValueType.Required) { if (p.OptionValueType == OptionValueType.Optional) { Write(o, ref written, localizer("[")); } Write(o, ref written, localizer("=" + GetArgumentName(0, p.MaxValueCount, p.Description))); string sep = p.ValueSeparators != null && p.ValueSeparators.Length > 0 ? p.ValueSeparators[0] : " "; for (int c = 1; c < p.MaxValueCount; ++c) { Write(o, ref written, localizer(sep + GetArgumentName(c, p.MaxValueCount, p.Description))); } if (p.OptionValueType == OptionValueType.Optional) { Write(o, ref written, localizer("]")); } } return true; } static int GetNextOptionIndex(string[] names, int i) { while (i < names.Length && names[i] == "<>") { ++i; } return i; } static void Write(TextWriter o, ref int n, string s) { n += s.Length; o.Write(s); } private static string GetArgumentName(int index, int maxIndex, string description) { if (description == null) return maxIndex == 1 ? "VALUE" : "VALUE" + (index + 1); string[] nameStart; if (maxIndex == 1) nameStart = new string[] { "{0:", "{" }; else nameStart = new string[] { "{" + index + ":" }; for (int i = 0; i < nameStart.Length; ++i) { int start, j = 0; do { start = description.IndexOf(nameStart[i], j); } while (start >= 0 && j != 0 ? description[j++ - 1] == '{' : false); if (start == -1) continue; int end = description.IndexOf("}", start); if (end == -1) continue; return description.Substring(start + nameStart[i].Length, end - start - nameStart[i].Length); } return maxIndex == 1 ? "VALUE" : "VALUE" + (index + 1); } private static string GetDescription(string description) { if (description == null) return string.Empty; StringBuilder sb = new StringBuilder(description.Length); int start = -1; for (int i = 0; i < description.Length; ++i) { switch (description[i]) { case '{': if (i == start) { sb.Append('{'); start = -1; } else if (start < 0) start = i + 1; break; case '}': if (start < 0) { if ((i + 1) == description.Length || description[i + 1] != '}') throw new InvalidOperationException("Invalid option description: " + description); ++i; sb.Append("}"); } else { sb.Append(description.Substring(start, i - start)); start = -1; } break; case ':': if (start < 0) goto default; start = i + 1; break; default: if (start < 0) sb.Append(description[i]); break; } } return sb.ToString(); } private static IEnumerable<string> GetLines(string description) { if (string.IsNullOrEmpty(description)) { yield return string.Empty; yield break; } int length = 80 - OptionWidth - 1; int start = 0, end; do { end = GetLineEnd(start, length, description); char c = description[end - 1]; if (char.IsWhiteSpace(c) && end != description.Length) --end; bool writeContinuation = end != description.Length && !IsEolChar(c); string line = description.Substring(start, end - start) + (writeContinuation ? "-" : ""); yield return line; start = end; if (char.IsWhiteSpace(c)) ++start; length = 80 - OptionWidth - 2 - 1; } while (end < description.Length); } private static bool IsEolChar(char c) { return !char.IsLetterOrDigit(c); } private static int GetLineEnd(int start, int length, string description) { int end = System.Math.Min(start + length, description.Length); int sep = -1; for (int i = start + 1; i < end; ++i) { if (description[i] == '\n') return i + 1; if (IsEolChar(description[i])) sep = i + 1; } if (sep == -1 || end == description.Length) return end; return sep; } } }
using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using System.Diagnostics; using Microsoft.Extensions.Logging; namespace Orleans.EventSourcing.Common { /// <summary> /// A general template for constructing log view adaptors that are based on /// a sequentially read and written primary. We use this to construct /// a variety of different log-consistency providers, all following the same basic pattern /// (read and write latest view from/to primary, and send notifications after writing). ///<para> /// Note that the log itself is transient, i.e. not actually saved to storage - only the latest view and some /// metadata (the log position, and write flags) is stored in the primary. /// It is safe to interleave calls to this adaptor (using grain scheduler only, of course). /// </para> ///<para> /// Subclasses override ReadAsync and WriteAsync to read from / write to primary. /// Calls to the primary are serialized, i.e. never interleave. /// </para> /// </summary> /// <typeparam name="TLogView">The user-defined view of the log</typeparam> /// <typeparam name="TLogEntry">The type of the log entries</typeparam> /// <typeparam name="TSubmissionEntry">The type of submission entries stored in pending queue</typeparam> public abstract class PrimaryBasedLogViewAdaptor<TLogView, TLogEntry, TSubmissionEntry> : ILogViewAdaptor<TLogView, TLogEntry> where TLogView : class, new() where TLogEntry : class where TSubmissionEntry : SubmissionEntry<TLogEntry> { /// <summary> /// Set confirmed view the initial value (a view of the empty log) /// </summary> protected abstract void InitializeConfirmedView(TLogView initialstate); /// <summary> /// Read cached global state. /// </summary> protected abstract TLogView LastConfirmedView(); /// <summary> /// Read version of cached global state. /// </summary> protected abstract int GetConfirmedVersion(); /// <summary> /// Read the latest primary state. Must block/retry until successful. /// Should not throw exceptions, but record them in <see cref="LastPrimaryIssue"/> /// </summary> /// <returns></returns> protected abstract Task ReadAsync(); /// <summary> /// Apply pending entries to the primary. Must block/retry until successful. /// Should not throw exceptions, but record them in <see cref="LastPrimaryIssue"/> /// </summary> protected abstract Task<int> WriteAsync(); /// <summary> /// Create a submission entry for the submitted log entry. /// Using a type parameter so we can add protocol-specific info to this class. /// </summary> /// <returns></returns> protected abstract TSubmissionEntry MakeSubmissionEntry(TLogEntry entry); /// <summary> /// Whether this cluster supports submitting updates /// </summary> protected virtual bool SupportSubmissions { get { return true; } } /// <summary> /// Handle protocol messages. /// </summary> protected virtual Task<ILogConsistencyProtocolMessage> OnMessageReceived(ILogConsistencyProtocolMessage payload) { // subclasses that define custom protocol messages must override this throw new NotImplementedException(); } public virtual Task<IReadOnlyList<TLogEntry>> RetrieveLogSegment(int fromVersion, int length) { throw new NotSupportedException(); } /// <summary> /// Handle notification messages. Override this to handle notification subtypes. /// </summary> protected virtual void OnNotificationReceived(INotificationMessage payload) { var msg = payload as VersionNotificationMessage; if (msg != null) { if (msg.Version > lastVersionNotified) lastVersionNotified = msg.Version; return; } var batchmsg = payload as BatchedNotificationMessage; if (batchmsg != null) { foreach (var bm in batchmsg.Notifications) OnNotificationReceived(bm); return; } // subclass should have handled this in override throw new ProtocolTransportException(string.Format("message type {0} not handled by OnNotificationReceived", payload.GetType().FullName)); } /// <summary> /// The last version we have been notified of /// </summary> private int lastVersionNotified; /// <summary> /// Process stored notifications during worker cycle. Override to handle notification subtypes. /// </summary> protected virtual void ProcessNotifications() { if (lastVersionNotified > this.GetConfirmedVersion()) { Services.Log(LogLevel.Debug, "force refresh because of version notification v{0}", lastVersionNotified); needRefresh = true; } } /// <summary> /// Merge two notification messages, for batching. Override to handle notification subtypes. /// </summary> protected virtual INotificationMessage Merge(INotificationMessage earliermessage, INotificationMessage latermessage) { return new VersionNotificationMessage() { Version = latermessage.Version }; } /// <summary> /// The grain that is using this adaptor. /// </summary> protected ILogViewAdaptorHost<TLogView, TLogEntry> Host { get; private set; } /// <summary> /// The runtime services required for implementing notifications between grain instances in different cluster. /// </summary> protected ILogConsistencyProtocolServices Services { get; private set; } /// <summary> /// Construct an instance, for the given parameters. /// </summary> protected PrimaryBasedLogViewAdaptor(ILogViewAdaptorHost<TLogView, TLogEntry> host, TLogView initialstate, ILogConsistencyProtocolServices services) { Debug.Assert(host != null && services != null && initialstate != null); this.Host = host; this.Services = services; InitializeConfirmedView(initialstate); worker = new BatchWorkerFromDelegate(Work); } /// <inheritdoc/> public virtual Task PreOnActivate() { Services.Log(LogLevel.Trace, "PreActivation Started"); // this flag indicates we have not done an initial load from storage yet // we do not act on this yet, but wait until after user OnActivate has run. needInitialRead = true; Services.Log(LogLevel.Trace, "PreActivation Complete"); return Task.CompletedTask; } public virtual Task PostOnActivate() { Services.Log(LogLevel.Trace, "PostActivation Started"); // start worker, if it has not already happened if (needInitialRead) worker.Notify(); Services.Log(LogLevel.Trace, "PostActivation Complete"); return Task.CompletedTask; } /// <inheritdoc/> public virtual async Task PostOnDeactivate() { Services.Log(LogLevel.Trace, "Deactivation Started"); while (!worker.IsIdle()) { await worker.WaitForCurrentWorkToBeServiced(); } Services.Log(LogLevel.Trace, "Deactivation Complete"); } // the currently submitted, unconfirmed entries. private readonly List<TSubmissionEntry> pending = new List<TSubmissionEntry>(); /// called at beginning of WriteAsync to the current tentative state protected TLogView CopyTentativeState() { var state = TentativeView; tentativeStateInternal = null; // to avoid aliasing return state; } /// called at beginning of WriteAsync to the current batch of updates protected TSubmissionEntry[] GetCurrentBatchOfUpdates() { return pending.ToArray(); // must use a copy } /// called at beginning of WriteAsync to get current number of pending updates protected int GetNumberPendingUpdates() { return pending.Count; } /// <summary> /// Tentative State. Represents Stable State + effects of pending updates. /// Computed lazily (null if not in use) /// </summary> private TLogView tentativeStateInternal; /// <summary> /// A flag that indicates to the worker that the client wants to refresh the state /// </summary> private bool needRefresh; /// <summary> /// A flag that indicates that we have not read global state at all yet, and should do so /// </summary> private bool needInitialRead; /// <summary> /// Background worker which asynchronously sends operations to the leader /// </summary> private BatchWorker worker; /// statistics gathering. Is null unless stats collection is turned on. protected LogConsistencyStatistics stats = null; /// For use by protocols. Determines if this cluster is part of the configured multicluster. protected bool IsMyClusterJoined() { return true; } /// <summary> /// Block until this cluster is joined to the multicluster. /// </summary> protected async Task EnsureClusterJoinedAsync() { while (!IsMyClusterJoined()) { Services.Log(LogLevel.Debug, "Waiting for join"); await Task.Delay(5000); } } /// <inheritdoc /> public void Submit(TLogEntry logEntry) { if (!SupportSubmissions) throw new InvalidOperationException("provider does not support submissions on cluster " + Services.MyClusterId); if (stats != null) stats.EventCounters["SubmitCalled"]++; Services.Log(LogLevel.Trace, "Submit"); SubmitInternal(DateTime.UtcNow, logEntry); worker.Notify(); } /// <inheritdoc /> public void SubmitRange(IEnumerable<TLogEntry> logEntries) { if (!SupportSubmissions) throw new InvalidOperationException("Provider does not support submissions on cluster " + Services.MyClusterId); if (stats != null) stats.EventCounters["SubmitRangeCalled"]++; Services.Log(LogLevel.Trace, "SubmitRange"); var time = DateTime.UtcNow; foreach (var e in logEntries) SubmitInternal(time, e); worker.Notify(); } /// <inheritdoc /> public Task<bool> TryAppend(TLogEntry logEntry) { if (!SupportSubmissions) throw new InvalidOperationException("Provider does not support submissions on cluster " + Services.MyClusterId); if (stats != null) stats.EventCounters["TryAppendCalled"]++; Services.Log(LogLevel.Trace, "TryAppend"); var promise = new TaskCompletionSource<bool>(TaskCreationOptions.RunContinuationsAsynchronously); SubmitInternal(DateTime.UtcNow, logEntry, GetConfirmedVersion() + pending.Count, promise); worker.Notify(); return promise.Task; } /// <inheritdoc /> public Task<bool> TryAppendRange(IEnumerable<TLogEntry> logEntries) { if (!SupportSubmissions) throw new InvalidOperationException("Provider does not support submissions on cluster " + Services.MyClusterId); if (stats != null) stats.EventCounters["TryAppendRangeCalled"]++; Services.Log(LogLevel.Trace, "TryAppendRange"); var promise = new TaskCompletionSource<bool>(TaskCreationOptions.RunContinuationsAsynchronously); var time = DateTime.UtcNow; var pos = GetConfirmedVersion() + pending.Count; bool first = true; foreach (var e in logEntries) { SubmitInternal(time, e, pos++, first ? promise : null); first = false; } worker.Notify(); return promise.Task; } private const int unconditional = -1; private void SubmitInternal(DateTime time, TLogEntry logentry, int conditionalPosition = unconditional, TaskCompletionSource<bool> resultPromise = null) { // create a submission entry var submissionentry = this.MakeSubmissionEntry(logentry); submissionentry.SubmissionTime = time; submissionentry.ResultPromise = resultPromise; submissionentry.ConditionalPosition = conditionalPosition; // add submission to queue pending.Add(submissionentry); // if we have a tentative state in use, update it if (this.tentativeStateInternal != null) { try { Host.UpdateView(this.tentativeStateInternal, logentry); } catch (Exception e) { Services.CaughtUserCodeException("UpdateView", nameof(SubmitInternal), e); } } try { Host.OnViewChanged(true, false); } catch (Exception e) { Services.CaughtUserCodeException("OnViewChanged", nameof(SubmitInternal), e); } } /// <inheritdoc /> public TLogView TentativeView { get { if (stats != null) stats.EventCounters["TentativeViewCalled"]++; if (tentativeStateInternal == null) CalculateTentativeState(); return tentativeStateInternal; } } /// <inheritdoc /> public TLogView ConfirmedView { get { if (stats != null) stats.EventCounters["ConfirmedViewCalled"]++; return LastConfirmedView(); } } /// <inheritdoc /> public int ConfirmedVersion { get { if (stats != null) stats.EventCounters["ConfirmedVersionCalled"]++; return GetConfirmedVersion(); } } /// <summary> /// Called from network /// </summary> /// <param name="payLoad"></param> /// <returns></returns> public async Task<ILogConsistencyProtocolMessage> OnProtocolMessageReceived(ILogConsistencyProtocolMessage payLoad) { var notificationMessage = payLoad as INotificationMessage; if (notificationMessage != null) { Services.Log(LogLevel.Debug, "NotificationReceived v{0}", notificationMessage.Version); OnNotificationReceived(notificationMessage); // poke worker so it will process the notifications worker.Notify(); return null; } else { //it's a protocol message return await OnMessageReceived(payLoad); } } /// <summary> /// method is virtual so subclasses can add their own events /// </summary> public virtual void EnableStatsCollection() { stats = new LogConsistencyStatistics() { EventCounters = new Dictionary<string, long>(), StabilizationLatenciesInMsecs = new List<int>() }; stats.EventCounters.Add("TentativeViewCalled", 0); stats.EventCounters.Add("ConfirmedViewCalled", 0); stats.EventCounters.Add("ConfirmedVersionCalled", 0); stats.EventCounters.Add("SubmitCalled", 0); stats.EventCounters.Add("SubmitRangeCalled", 0); stats.EventCounters.Add("TryAppendCalled", 0); stats.EventCounters.Add("TryAppendRangeCalled", 0); stats.EventCounters.Add("ConfirmSubmittedEntriesCalled", 0); stats.EventCounters.Add("SynchronizeNowCalled", 0); stats.EventCounters.Add("WritebackEvents", 0); stats.StabilizationLatenciesInMsecs = new List<int>(); } /// <summary> /// Disable stats collection /// </summary> public void DisableStatsCollection() { stats = null; } /// <summary> /// Get states /// </summary> /// <returns></returns> public LogConsistencyStatistics GetStats() { return stats; } private void CalculateTentativeState() { // copy the confirmed view this.tentativeStateInternal = Services.DeepCopy(LastConfirmedView()); // Now apply all operations in pending foreach (var u in this.pending) try { Host.UpdateView(this.tentativeStateInternal, u.Entry); } catch (Exception e) { Services.CaughtUserCodeException("UpdateView", nameof(CalculateTentativeState), e); } } /// <summary> /// batch worker performs reads from and writes to global state. /// only one work cycle is active at any time. /// </summary> internal async Task Work() { Services.Log(LogLevel.Debug, "<1 ProcessNotifications"); var version = GetConfirmedVersion(); ProcessNotifications(); Services.Log(LogLevel.Debug, "<2 NotifyViewChanges"); NotifyViewChanges(ref version); bool haveToWrite = (pending.Count != 0); bool haveToRead = needInitialRead || (needRefresh && !haveToWrite); Services.Log(LogLevel.Debug, "<3 Storage htr={0} htw={1}", haveToRead, haveToWrite); try { if (haveToRead) { needRefresh = needInitialRead = false; // retrieving fresh version await ReadAsync(); NotifyViewChanges(ref version); } if (haveToWrite) { needRefresh = needInitialRead = false; // retrieving fresh version await UpdatePrimary(); if (stats != null) stats.EventCounters["WritebackEvents"]++; } } catch (Exception e) { // this should never happen - we are supposed to catch and store exceptions // in the correct place (LastPrimaryException or notification trackers) Services.ProtocolError($"Exception in Worker Cycle: {e}", true); } Services.Log(LogLevel.Debug, "<4 Done"); } /// <summary> /// This function stores the operations in the pending queue as a batch to the primary. /// Retries until some batch commits or there are no updates left. /// </summary> internal async Task UpdatePrimary() { int version = GetConfirmedVersion(); while (true) { try { // find stale conditional updates, remove them, and notify waiters RemoveStaleConditionalUpdates(); if (pending.Count == 0) return; // no updates to write. // try to write the updates as a batch var writeResult = await WriteAsync(); NotifyViewChanges(ref version, writeResult); // if the batch write failed due to conflicts, retry. if (writeResult == 0) continue; try { Host.OnViewChanged(false, true); } catch (Exception e) { Services.CaughtUserCodeException("OnViewChanged", nameof(UpdatePrimary), e); } // notify waiting promises of the success of conditional updates NotifyPromises(writeResult, true); // record stabilization time, for statistics if (stats != null) { var timeNow = DateTime.UtcNow; for (int i = 0; i < writeResult; i++) { var latency = timeNow - pending[i].SubmissionTime; stats.StabilizationLatenciesInMsecs.Add(latency.Milliseconds); } } // remove completed updates from queue pending.RemoveRange(0, writeResult); return; } catch (Exception e) { // this should never happen - we are supposed to catch and store exceptions // in the correct place (LastPrimaryException or notification trackers) Services.ProtocolError($"Exception in {nameof(UpdatePrimary)}: {e}", true); } } } private void NotifyViewChanges(ref int version, int numWritten = 0) { var v = GetConfirmedVersion(); bool tentativeChanged = (v != version + numWritten); bool confirmedChanged = (v != version); if (tentativeChanged || confirmedChanged) { tentativeStateInternal = null; // conservative. try { Host.OnViewChanged(tentativeChanged, confirmedChanged); } catch (Exception e) { Services.CaughtUserCodeException("OnViewChanged", nameof(NotifyViewChanges), e); } version = v; } } /// <summary> /// Store the last issue that occurred while reading or updating primary. /// Is null if successful. /// </summary> protected RecordedConnectionIssue LastPrimaryIssue; /// <inheritdoc /> public async Task Synchronize() { if (stats != null) stats.EventCounters["SynchronizeNowCalled"]++; Services.Log(LogLevel.Debug, "SynchronizeNowStart"); needRefresh = true; await worker.NotifyAndWaitForWorkToBeServiced(); Services.Log(LogLevel.Debug, "SynchronizeNowComplete"); } /// <inheritdoc/> public IEnumerable<TLogEntry> UnconfirmedSuffix { get { return pending.Select(te => te.Entry); } } /// <inheritdoc /> public async Task ConfirmSubmittedEntries() { if (stats != null) stats.EventCounters["ConfirmSubmittedEntriesCalled"]++; Services.Log(LogLevel.Debug, "ConfirmSubmittedEntriesStart"); if (pending.Count != 0) await worker.WaitForCurrentWorkToBeServiced(); Services.Log(LogLevel.Debug, "ConfirmSubmittedEntriesEnd"); } /// <summary> /// send failure notifications /// </summary> protected void NotifyPromises(int count, bool success) { for (int i = 0; i < count; i++) { var promise = pending[i].ResultPromise; if (promise != null) promise.SetResult(success); } } /// <summary> /// go through updates and remove all the conditional updates that have already failed /// </summary> protected void RemoveStaleConditionalUpdates() { int version = GetConfirmedVersion(); bool foundFailedConditionalUpdates = false; for (int pos = 0; pos < pending.Count; pos++) { var submissionEntry = pending[pos]; if (submissionEntry.ConditionalPosition != unconditional && (foundFailedConditionalUpdates || submissionEntry.ConditionalPosition != (version + pos))) { foundFailedConditionalUpdates = true; if (submissionEntry.ResultPromise != null) submissionEntry.ResultPromise.SetResult(false); } pos++; } if (foundFailedConditionalUpdates) { pending.RemoveAll(e => e.ConditionalPosition != unconditional); tentativeStateInternal = null; try { Host.OnViewChanged(true, false); } catch (Exception e) { Services.CaughtUserCodeException("OnViewChanged", nameof(RemoveStaleConditionalUpdates), e); } } } } /// <summary> /// Base class for submission entries stored in pending queue. /// </summary> /// <typeparam name="TLogEntry">The type of entry for this submission</typeparam> public class SubmissionEntry<TLogEntry> { /// <summary> The log entry that is submitted. </summary> public TLogEntry Entry; /// <summary> A timestamp for this submission. </summary> public DateTime SubmissionTime; /// <summary> For conditional updates, a promise that resolves once it is known whether the update was successful or not.</summary> public TaskCompletionSource<bool> ResultPromise; /// <summary> For conditional updates, the log position at which this update is supposed to be applied. </summary> public int ConditionalPosition; } }
using System; using System.Data.Common; using System.Data.SqlClient; using System.IO; using System.Linq; using System.Net.Http; using System.Reflection; using Dazinator.Extensions.FileProviders.GlobPatternFilter; using Microsoft.AspNetCore.Builder; using Microsoft.AspNetCore.Hosting; using Microsoft.AspNetCore.Http; using Microsoft.AspNetCore.Mvc.ApplicationModels; using Microsoft.AspNetCore.Mvc.Razor.Compilation; using Microsoft.AspNetCore.Server.Kestrel.Core; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.DependencyInjection.Extensions; using Microsoft.Extensions.Hosting; using Microsoft.Extensions.Logging; using Serilog; using Smidge; using Smidge.Cache; using Smidge.FileProcessors; using Smidge.InMemory; using Smidge.Nuglify; using Umbraco.Cms.Core; using Umbraco.Cms.Core.Cache; using Umbraco.Cms.Core.Composing; using Umbraco.Cms.Core.Configuration.Models; using Umbraco.Cms.Core.DependencyInjection; using Umbraco.Cms.Core.Diagnostics; using Umbraco.Cms.Core.Hosting; using Umbraco.Cms.Core.Logging; using Umbraco.Cms.Core.Macros; using Umbraco.Cms.Core.Net; using Umbraco.Cms.Core.Notifications; using Umbraco.Cms.Core.Security; using Umbraco.Cms.Core.Services; using Umbraco.Cms.Core.Telemetry; using Umbraco.Cms.Core.Templates; using Umbraco.Cms.Core.Web; using Umbraco.Cms.Core.WebAssets; using Umbraco.Cms.Infrastructure.DependencyInjection; using Umbraco.Cms.Infrastructure.HostedServices; using Umbraco.Cms.Infrastructure.HostedServices.ServerRegistration; using Umbraco.Cms.Infrastructure.Migrations.Install; using Umbraco.Cms.Infrastructure.Persistence; using Umbraco.Cms.Infrastructure.Persistence.SqlSyntax; using Umbraco.Cms.Web.Common; using Umbraco.Cms.Web.Common.ApplicationModels; using Umbraco.Cms.Web.Common.AspNetCore; using Umbraco.Cms.Web.Common.Controllers; using Umbraco.Cms.Web.Common.DependencyInjection; using Umbraco.Cms.Web.Common.Localization; using Umbraco.Cms.Web.Common.Macros; using Umbraco.Cms.Web.Common.Middleware; using Umbraco.Cms.Web.Common.ModelBinders; using Umbraco.Cms.Web.Common.Mvc; using Umbraco.Cms.Web.Common.Profiler; using Umbraco.Cms.Web.Common.RuntimeMinification; using Umbraco.Cms.Web.Common.Security; using Umbraco.Cms.Web.Common.Templates; using Umbraco.Cms.Web.Common.UmbracoContext; using IHostingEnvironment = Umbraco.Cms.Core.Hosting.IHostingEnvironment; namespace Umbraco.Extensions { // TODO: We could add parameters to configure each of these for flexibility /// <summary> /// Extension methods for <see cref="IUmbracoBuilder"/> for the common Umbraco functionality /// </summary> public static partial class UmbracoBuilderExtensions { /// <summary> /// Creates an <see cref="IUmbracoBuilder"/> and registers basic Umbraco services /// </summary> public static IUmbracoBuilder AddUmbraco( this IServiceCollection services, IWebHostEnvironment webHostEnvironment, IConfiguration config) { if (services is null) { throw new ArgumentNullException(nameof(services)); } if (config is null) { throw new ArgumentNullException(nameof(config)); } IHostingEnvironment tempHostingEnvironment = GetTemporaryHostingEnvironment(webHostEnvironment, config); var loggingDir = tempHostingEnvironment.MapPathContentRoot(Constants.SystemDirectories.LogFiles); var loggingConfig = new LoggingConfiguration(loggingDir); services.AddLogger(tempHostingEnvironment, loggingConfig, config); // The DataDirectory is used to resolve database file paths (directly supported by SQL CE and manually replaced for LocalDB) AppDomain.CurrentDomain.SetData("DataDirectory", tempHostingEnvironment?.MapPathContentRoot(Constants.SystemDirectories.Data)); // Manually create and register the HttpContextAccessor. In theory this should not be registered // again by the user but if that is the case it's not the end of the world since HttpContextAccessor // is just based on AsyncLocal, see https://github.com/dotnet/aspnetcore/blob/main/src/Http/Http/src/HttpContextAccessor.cs IHttpContextAccessor httpContextAccessor = new HttpContextAccessor(); services.AddSingleton(httpContextAccessor); var requestCache = new HttpContextRequestAppCache(httpContextAccessor); var appCaches = AppCaches.Create(requestCache); services.ConfigureOptions<ConfigureKestrelServerOptions>(); services.ConfigureOptions<ConfigureFormOptions>(); IProfiler profiler = GetWebProfiler(config); ILoggerFactory loggerFactory = LoggerFactory.Create(cfg => cfg.AddSerilog(Log.Logger, false)); TypeLoader typeLoader = services.AddTypeLoader( Assembly.GetEntryAssembly(), tempHostingEnvironment, loggerFactory, appCaches, config, profiler); return new UmbracoBuilder(services, config, typeLoader, loggerFactory, profiler, appCaches, tempHostingEnvironment); } /// <summary> /// Adds core Umbraco services /// </summary> /// <remarks> /// This will not add any composers/components /// </remarks> public static IUmbracoBuilder AddUmbracoCore(this IUmbracoBuilder builder) { if (builder is null) { throw new ArgumentNullException(nameof(builder)); } // Add ASP.NET specific services builder.Services.AddUnique<IBackOfficeInfo, AspNetCoreBackOfficeInfo>(); builder.Services.AddUnique<IHostingEnvironment, AspNetCoreHostingEnvironment>(); builder.Services.AddHostedService(factory => factory.GetRequiredService<IRuntime>()); // Add supported databases builder.AddUmbracoSqlServerSupport(); builder.AddUmbracoSqlCeSupport(); builder.Services.AddSingleton<DatabaseSchemaCreatorFactory>(); // Must be added here because DbProviderFactories is netstandard 2.1 so cannot exist in Infra for now builder.Services.AddSingleton<IDbProviderFactoryCreator>(factory => new DbProviderFactoryCreator( DbProviderFactories.GetFactory, factory.GetServices<ISqlSyntaxProvider>(), factory.GetServices<IBulkSqlInsertProvider>(), factory.GetServices<IDatabaseCreator>(), factory.GetServices<IProviderSpecificMapperFactory>() )); builder.AddCoreInitialServices(); // aspnet app lifetime mgmt builder.Services.AddUnique<IUmbracoApplicationLifetime, AspNetCoreUmbracoApplicationLifetime>(); builder.Services.AddUnique<IApplicationShutdownRegistry, AspNetCoreApplicationShutdownRegistry>(); return builder; } /// <summary> /// Add Umbraco hosted services /// </summary> public static IUmbracoBuilder AddHostedServices(this IUmbracoBuilder builder) { builder.Services.AddHostedService<QueuedHostedService>(); builder.Services.AddHostedService<HealthCheckNotifier>(); builder.Services.AddHostedService<KeepAlive>(); builder.Services.AddHostedService<LogScrubber>(); builder.Services.AddHostedService<ContentVersionCleanup>(); builder.Services.AddHostedService<ScheduledPublishing>(); builder.Services.AddHostedService<TempFileCleanup>(); builder.Services.AddHostedService<InstructionProcessTask>(); builder.Services.AddHostedService<TouchServerTask>(); builder.Services.AddHostedService(provider => new ReportSiteTask( provider.GetRequiredService<ILogger<ReportSiteTask>>(), provider.GetRequiredService<ITelemetryService>())); return builder; } private static IUmbracoBuilder AddHttpClients(this IUmbracoBuilder builder) { builder.Services.AddHttpClient(); builder.Services.AddHttpClient(Constants.HttpClients.IgnoreCertificateErrors) .ConfigurePrimaryHttpMessageHandler(() => new HttpClientHandler { ServerCertificateCustomValidationCallback = HttpClientHandler.DangerousAcceptAnyServerCertificateValidator }); return builder; } /// <summary> /// Adds the Umbraco request profiler /// </summary> public static IUmbracoBuilder AddUmbracoProfiler(this IUmbracoBuilder builder) { builder.Services.AddSingleton<WebProfilerHtml>(); builder.Services.AddMiniProfiler(options => { // WebProfiler determine and start profiling. We should not use the MiniProfilerMiddleware to also profile options.ShouldProfile = request => false; // this is a default path and by default it performs a 'contains' check which will match our content controller // (and probably other requests) and ignore them. options.IgnoredPaths.Remove("/content/"); }); builder.AddNotificationHandler<UmbracoApplicationStartingNotification, InitializeWebProfiling>(); return builder; } public static IUmbracoBuilder AddMvcAndRazor(this IUmbracoBuilder builder, Action<IMvcBuilder> mvcBuilding = null) { // TODO: We need to figure out if we can work around this because calling AddControllersWithViews modifies the global app and order is very important // this will directly affect developers who need to call that themselves. // We need to have runtime compilation of views when using umbraco. We could consider having only this when a specific config is set. // But as far as I can see, there are still precompiled views, even when this is activated, so maybe it is okay. IMvcBuilder mvcBuilder = builder.Services .AddControllersWithViews(); FixForDotnet6Preview1(builder.Services); mvcBuilder.AddRazorRuntimeCompilation(); mvcBuilding?.Invoke(mvcBuilder); return builder; } /// <summary> /// This fixes an issue for .NET6 Preview1, that in AddRazorRuntimeCompilation cannot remove the existing IViewCompilerProvider. /// </summary> /// <remarks> /// When running .NET6 Preview1 there is an issue with looks to be fixed when running ASP.NET Core 6. /// This issue is because the default implementation of IViewCompilerProvider has changed, so the /// AddRazorRuntimeCompilation extension can't remove the default and replace with the runtimeviewcompiler. /// /// This method basically does the same as the ASP.NET Core 6 version of AddRazorRuntimeCompilation /// https://github.com/dotnet/aspnetcore/blob/f7dc5e24af7f9692a1db66741954b90b42d84c3a/src/Mvc/Mvc.Razor.RuntimeCompilation/src/DependencyInjection/RazorRuntimeCompilationMvcCoreBuilderExtensions.cs#L71-L80 /// /// While running .NET5 this does nothing as the ImplementationType has another FullName, and this is handled by the .NET5 version of AddRazorRuntimeCompilation /// </remarks> private static void FixForDotnet6Preview1(IServiceCollection services) { var compilerProvider = services.FirstOrDefault(f => f.ServiceType == typeof(IViewCompilerProvider) && f.ImplementationType?.Assembly == typeof(IViewCompilerProvider).Assembly && f.ImplementationType.FullName == "Microsoft.AspNetCore.Mvc.Razor.Compilation.DefaultViewCompiler"); if (compilerProvider != null) { services.Remove(compilerProvider); } } /// <summary> /// Add runtime minifier support for Umbraco /// </summary> public static IUmbracoBuilder AddRuntimeMinifier(this IUmbracoBuilder builder) { // Add custom ISmidgeFileProvider to include the additional App_Plugins location // to load assets from. builder.Services.AddSingleton<ISmidgeFileProvider>(f => { IWebHostEnvironment hostEnv = f.GetRequiredService<IWebHostEnvironment>(); return new SmidgeFileProvider( hostEnv.WebRootFileProvider, new GlobPatternFilterFileProvider( hostEnv.ContentRootFileProvider, // only include js or css files within App_Plugins new[] { "/App_Plugins/**/*.js", "/App_Plugins/**/*.css" })); }); builder.Services.AddUnique<ICacheBuster, UmbracoSmidgeConfigCacheBuster>(); builder.Services.AddSmidge(builder.Config.GetSection(Constants.Configuration.ConfigRuntimeMinification)); // Replace the Smidge request helper, in order to discourage the use of brotli since it's super slow builder.Services.AddUnique<IRequestHelper, SmidgeRequestHelper>(); builder.Services.AddSmidgeNuglify(); builder.Services.AddSmidgeInMemory(false); // it will be enabled based on config/cachebuster builder.Services.AddUnique<IRuntimeMinifier, SmidgeRuntimeMinifier>(); builder.Services.AddSingleton<SmidgeHelperAccessor>(); builder.Services.AddTransient<IPreProcessor, SmidgeNuglifyJs>(); builder.Services.ConfigureOptions<SmidgeOptionsSetup>(); return builder; } /// <summary> /// Adds all web based services required for Umbraco to run /// </summary> public static IUmbracoBuilder AddWebComponents(this IUmbracoBuilder builder) { // Add service session // This can be overwritten by the user by adding their own call to AddSession // since the last call of AddSession take precedence builder.Services.AddSession(options => { options.Cookie.Name = "UMB_SESSION"; options.Cookie.HttpOnly = true; }); builder.Services.ConfigureOptions<UmbracoMvcConfigureOptions>(); builder.Services.ConfigureOptions<UmbracoRequestLocalizationOptions>(); builder.Services.TryAddEnumerable(ServiceDescriptor.Transient<IApplicationModelProvider, UmbracoApiBehaviorApplicationModelProvider>()); builder.Services.TryAddEnumerable(ServiceDescriptor.Transient<IApplicationModelProvider, BackOfficeApplicationModelProvider>()); builder.Services.TryAddEnumerable(ServiceDescriptor.Transient<IApplicationModelProvider, VirtualPageApplicationModelProvider>()); builder.AddUmbracoImageSharp(); // AspNetCore specific services builder.Services.AddUnique<IRequestAccessor, AspNetCoreRequestAccessor>(); builder.AddNotificationHandler<UmbracoRequestBeginNotification, AspNetCoreRequestAccessor>(); // Password hasher builder.Services.AddUnique<IPasswordHasher, AspNetCorePasswordHasher>(); builder.Services.AddUnique<Cms.Core.Web.ICookieManager, AspNetCoreCookieManager>(); builder.Services.AddTransient<IIpResolver, AspNetCoreIpResolver>(); builder.Services.AddUnique<IUserAgentProvider, AspNetCoreUserAgentProvider>(); builder.Services.AddMultipleUnique<ISessionIdResolver, ISessionManager, AspNetCoreSessionManager>(); builder.Services.AddUnique<IMarchal, AspNetCoreMarchal>(); builder.Services.AddUnique<IProfilerHtml, WebProfilerHtml>(); builder.Services.AddUnique<IMacroRenderer, MacroRenderer>(); builder.Services.AddSingleton<PartialViewMacroEngine>(); // register the umbraco context factory builder.Services.AddUnique<IUmbracoContextFactory, UmbracoContextFactory>(); builder.Services.AddUnique<IBackOfficeSecurityAccessor, BackOfficeSecurityAccessor>(); var umbracoApiControllerTypes = builder.TypeLoader.GetUmbracoApiControllers().ToList(); builder.WithCollectionBuilder<UmbracoApiControllerTypeCollectionBuilder>() .Add(umbracoApiControllerTypes); builder.Services.AddSingleton<UmbracoRequestLoggingMiddleware>(); builder.Services.AddSingleton<PreviewAuthenticationMiddleware>(); builder.Services.AddSingleton<UmbracoRequestMiddleware>(); builder.Services.AddSingleton<BootFailedMiddleware>(); builder.Services.AddSingleton<UmbracoJsonModelBinder>(); builder.Services.AddUnique<ITemplateRenderer, TemplateRenderer>(); builder.Services.AddUnique<IPublicAccessChecker, PublicAccessChecker>(); builder.Services.AddSingleton<ContentModelBinder>(); builder.Services.AddSingleton<IUmbracoHelperAccessor, UmbracoHelperAccessor>(); builder.Services.AddSingleton<IScopedServiceProvider, ScopedServiceProvider>(); builder.Services.AddScoped<UmbracoHelper>(); builder.Services.AddScoped<IBackOfficeSecurity, BackOfficeSecurity>(); builder.AddHttpClients(); return builder; } // TODO: Does this need to exist and/or be public? public static IUmbracoBuilder AddWebServer(this IUmbracoBuilder builder) { // TODO: We need to figure out why this is needed and fix those endpoints to not need them, we don't want to change global things // If using Kestrel: https://stackoverflow.com/a/55196057 builder.Services.Configure<KestrelServerOptions>(options => { options.AllowSynchronousIO = true; }); builder.Services.Configure<IISServerOptions>(options => { options.AllowSynchronousIO = true; }); return builder; } /// <summary> /// Adds SqlCe support for Umbraco /// </summary> private static IUmbracoBuilder AddUmbracoSqlCeSupport(this IUmbracoBuilder builder) { try { var binFolder = Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location); if (binFolder != null) { var dllPath = Path.Combine(binFolder, "Umbraco.Persistence.SqlCe.dll"); var umbSqlCeAssembly = Assembly.LoadFrom(dllPath); Type sqlCeSyntaxProviderType = umbSqlCeAssembly.GetType("Umbraco.Cms.Persistence.SqlCe.SqlCeSyntaxProvider"); Type sqlCeBulkSqlInsertProviderType = umbSqlCeAssembly.GetType("Umbraco.Cms.Persistence.SqlCe.SqlCeBulkSqlInsertProvider"); Type sqlCeDatabaseCreatorType = umbSqlCeAssembly.GetType("Umbraco.Cms.Persistence.SqlCe.SqlCeDatabaseCreator"); Type sqlCeSpecificMapperFactory = umbSqlCeAssembly.GetType("Umbraco.Cms.Persistence.SqlCe.SqlCeSpecificMapperFactory"); if (!(sqlCeSyntaxProviderType is null || sqlCeBulkSqlInsertProviderType is null || sqlCeDatabaseCreatorType is null || sqlCeSpecificMapperFactory is null)) { builder.Services.AddSingleton(typeof(ISqlSyntaxProvider), sqlCeSyntaxProviderType); builder.Services.AddSingleton(typeof(IBulkSqlInsertProvider), sqlCeBulkSqlInsertProviderType); builder.Services.AddSingleton(typeof(IDatabaseCreator), sqlCeDatabaseCreatorType); builder.Services.AddSingleton(typeof(IProviderSpecificMapperFactory), sqlCeSpecificMapperFactory); } var sqlCeAssembly = Assembly.LoadFrom(Path.Combine(binFolder, "System.Data.SqlServerCe.dll")); var sqlCe = sqlCeAssembly.GetType("System.Data.SqlServerCe.SqlCeProviderFactory"); if (!(sqlCe is null)) { DbProviderFactories.RegisterFactory(Cms.Core.Constants.DbProviderNames.SqlCe, sqlCe); } } } catch { // Ignore if SqlCE is not available } return builder; } /// <summary> /// Adds Sql Server support for Umbraco /// </summary> private static IUmbracoBuilder AddUmbracoSqlServerSupport(this IUmbracoBuilder builder) { DbProviderFactories.RegisterFactory(Cms.Core.Constants.DbProviderNames.SqlServer, SqlClientFactory.Instance); builder.Services.AddSingleton<ISqlSyntaxProvider, SqlServerSyntaxProvider>(); builder.Services.AddSingleton<IBulkSqlInsertProvider, SqlServerBulkSqlInsertProvider>(); builder.Services.AddSingleton<IDatabaseCreator, SqlServerDatabaseCreator>(); return builder; } private static IProfiler GetWebProfiler(IConfiguration config) { var isDebug = config.GetValue<bool>($"{Cms.Core.Constants.Configuration.ConfigHosting}:Debug"); // create and start asap to profile boot if (!isDebug) { // should let it be null, that's how MiniProfiler is meant to work, // but our own IProfiler expects an instance so let's get one return new NoopProfiler(); } var webProfiler = new WebProfiler(); webProfiler.StartBoot(); return webProfiler; } /// <summary> /// HACK: returns an AspNetCoreHostingEnvironment that doesn't monitor changes to configuration.<br/> /// We require this to create a TypeLoader during ConfigureServices.<br/> /// Instances returned from this method shouldn't be registered in the service collection. /// </summary> private static IHostingEnvironment GetTemporaryHostingEnvironment(IWebHostEnvironment webHostEnvironment, IConfiguration config) { var hostingSettings = config.GetSection(Cms.Core.Constants.Configuration.ConfigHosting).Get<HostingSettings>() ?? new HostingSettings(); var webRoutingSettings = config.GetSection(Cms.Core.Constants.Configuration.ConfigWebRouting).Get<WebRoutingSettings>() ?? new WebRoutingSettings(); var wrappedHostingSettings = new OptionsMonitorAdapter<HostingSettings>(hostingSettings); var wrappedWebRoutingSettings = new OptionsMonitorAdapter<WebRoutingSettings>(webRoutingSettings); // This is needed in order to create a unique Application Id var serviceCollection = new ServiceCollection(); serviceCollection.AddDataProtection(); serviceCollection.AddSingleton<IHostEnvironment>(s => webHostEnvironment); var serviceProvider = serviceCollection.BuildServiceProvider(); return new AspNetCoreHostingEnvironment( serviceProvider, wrappedHostingSettings, wrappedWebRoutingSettings, webHostEnvironment); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /****************************************************************************** * This file is auto-generated from a template file by the GenerateTests.csx * * script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make * * changes, please update the corresponding template and run according to the * * directions listed in the file. * ******************************************************************************/ using System; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Runtime.Intrinsics; using System.Runtime.Intrinsics.X86; namespace JIT.HardwareIntrinsics.X86 { public static partial class Program { private static void MultiplyAddSubtractSingle() { var test = new AlternatingTernaryOpTest__MultiplyAddSubtractSingle(); if (test.IsSupported) { // Validates basic functionality works, using Unsafe.Read test.RunBasicScenario_UnsafeRead(); if (Avx.IsSupported) { // Validates basic functionality works, using Load test.RunBasicScenario_Load(); // Validates basic functionality works, using LoadAligned test.RunBasicScenario_LoadAligned(); } // Validates calling via reflection works, using Unsafe.Read test.RunReflectionScenario_UnsafeRead(); if (Avx.IsSupported) { // Validates calling via reflection works, using Load test.RunReflectionScenario_Load(); // Validates calling via reflection works, using LoadAligned test.RunReflectionScenario_LoadAligned(); } // Validates passing a static member works test.RunClsVarScenario(); if (Avx.IsSupported) { // Validates passing a static member works, using pinning and Load test.RunClsVarScenario_Load(); } // Validates passing a local works, using Unsafe.Read test.RunLclVarScenario_UnsafeRead(); if (Avx.IsSupported) { // Validates passing a local works, using Load test.RunLclVarScenario_Load(); // Validates passing a local works, using LoadAligned test.RunLclVarScenario_LoadAligned(); } // Validates passing the field of a local class works test.RunClassLclFldScenario(); if (Avx.IsSupported) { // Validates passing the field of a local class works, using pinning and Load test.RunClassLclFldScenario_Load(); } // Validates passing an instance member of a class works test.RunClassFldScenario(); if (Avx.IsSupported) { // Validates passing an instance member of a class works, using pinning and Load test.RunClassFldScenario_Load(); } // Validates passing the field of a local struct works test.RunStructLclFldScenario(); if (Avx.IsSupported) { // Validates passing the field of a local struct works, using pinning and Load test.RunStructLclFldScenario_Load(); } // Validates passing an instance member of a struct works test.RunStructFldScenario(); if (Avx.IsSupported) { // Validates passing an instance member of a struct works, using pinning and Load test.RunStructFldScenario_Load(); } } else { // Validates we throw on unsupported hardware test.RunUnsupportedScenario(); } if (!test.Succeeded) { throw new Exception("One or more scenarios did not complete as expected."); } } } public sealed unsafe class AlternatingTernaryOpTest__MultiplyAddSubtractSingle { private struct DataTable { private byte[] inArray1; private byte[] inArray2; private byte[] inArray3; private byte[] outArray; private GCHandle inHandle1; private GCHandle inHandle2; private GCHandle inHandle3; private GCHandle outHandle; private ulong alignment; public DataTable(Single[] inArray1, Single[] inArray2, Single[] inArray3, Single[] outArray, int alignment) { int sizeOfinArray1 = inArray1.Length * Unsafe.SizeOf<Single>(); int sizeOfinArray2 = inArray2.Length * Unsafe.SizeOf<Single>(); int sizeOfinArray3 = inArray3.Length * Unsafe.SizeOf<Single>(); int sizeOfoutArray = outArray.Length * Unsafe.SizeOf<Single>(); if ((alignment != 32 && alignment != 16) || (alignment * 2) < sizeOfinArray1 || (alignment * 2) < sizeOfinArray2 || (alignment * 2) < sizeOfinArray3 || (alignment * 2) < sizeOfoutArray) { throw new ArgumentException("Invalid value of alignment"); } this.inArray1 = new byte[alignment * 2]; this.inArray2 = new byte[alignment * 2]; this.inArray3 = new byte[alignment * 2]; this.outArray = new byte[alignment * 2]; this.inHandle1 = GCHandle.Alloc(this.inArray1, GCHandleType.Pinned); this.inHandle2 = GCHandle.Alloc(this.inArray2, GCHandleType.Pinned); this.inHandle3 = GCHandle.Alloc(this.inArray3, GCHandleType.Pinned); this.outHandle = GCHandle.Alloc(this.outArray, GCHandleType.Pinned); this.alignment = (ulong)alignment; Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray1Ptr), ref Unsafe.As<Single, byte>(ref inArray1[0]), (uint)sizeOfinArray1); Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray2Ptr), ref Unsafe.As<Single, byte>(ref inArray2[0]), (uint)sizeOfinArray2); Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray3Ptr), ref Unsafe.As<Single, byte>(ref inArray3[0]), (uint)sizeOfinArray3); } public void* inArray1Ptr => Align((byte*)(inHandle1.AddrOfPinnedObject().ToPointer()), alignment); public void* inArray2Ptr => Align((byte*)(inHandle2.AddrOfPinnedObject().ToPointer()), alignment); public void* inArray3Ptr => Align((byte*)(inHandle3.AddrOfPinnedObject().ToPointer()), alignment); public void* outArrayPtr => Align((byte*)(outHandle.AddrOfPinnedObject().ToPointer()), alignment); public void Dispose() { inHandle1.Free(); inHandle2.Free(); inHandle3.Free(); outHandle.Free(); } private static unsafe void* Align(byte* buffer, ulong expectedAlignment) { return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1)); } } private struct TestStruct { public Vector256<Single> _fld1; public Vector256<Single> _fld2; public Vector256<Single> _fld3; public static TestStruct Create() { var testStruct = new TestStruct(); for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSingle(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Single>, byte>(ref testStruct._fld1), ref Unsafe.As<Single, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector256<Single>>()); for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSingle(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Single>, byte>(ref testStruct._fld2), ref Unsafe.As<Single, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector256<Single>>()); for (var i = 0; i < Op3ElementCount; i++) { _data3[i] = TestLibrary.Generator.GetSingle(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Single>, byte>(ref testStruct._fld3), ref Unsafe.As<Single, byte>(ref _data3[0]), (uint)Unsafe.SizeOf<Vector256<Single>>()); return testStruct; } public void RunStructFldScenario(AlternatingTernaryOpTest__MultiplyAddSubtractSingle testClass) { var result = Fma.MultiplyAddSubtract(_fld1, _fld2, _fld3); Unsafe.Write(testClass._dataTable.outArrayPtr, result); testClass.ValidateResult(_fld1, _fld2, _fld3, testClass._dataTable.outArrayPtr); } public void RunStructFldScenario_Load(AlternatingTernaryOpTest__MultiplyAddSubtractSingle testClass) { fixed (Vector256<Single>* pFld1 = &_fld1) fixed (Vector256<Single>* pFld2 = &_fld2) fixed (Vector256<Single>* pFld3 = &_fld3) { var result = Fma.MultiplyAddSubtract( Avx.LoadVector256((Single*)(pFld1)), Avx.LoadVector256((Single*)(pFld2)), Avx.LoadVector256((Single*)(pFld3)) ); Unsafe.Write(testClass._dataTable.outArrayPtr, result); testClass.ValidateResult(_fld1, _fld2, _fld3, testClass._dataTable.outArrayPtr); } } } private static readonly int LargestVectorSize = 32; private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector256<Single>>() / sizeof(Single); private static readonly int Op2ElementCount = Unsafe.SizeOf<Vector256<Single>>() / sizeof(Single); private static readonly int Op3ElementCount = Unsafe.SizeOf<Vector256<Single>>() / sizeof(Single); private static readonly int RetElementCount = Unsafe.SizeOf<Vector256<Single>>() / sizeof(Single); private static Single[] _data1 = new Single[Op1ElementCount]; private static Single[] _data2 = new Single[Op2ElementCount]; private static Single[] _data3 = new Single[Op3ElementCount]; private static Vector256<Single> _clsVar1; private static Vector256<Single> _clsVar2; private static Vector256<Single> _clsVar3; private Vector256<Single> _fld1; private Vector256<Single> _fld2; private Vector256<Single> _fld3; private DataTable _dataTable; static AlternatingTernaryOpTest__MultiplyAddSubtractSingle() { for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSingle(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Single>, byte>(ref _clsVar1), ref Unsafe.As<Single, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector256<Single>>()); for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSingle(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Single>, byte>(ref _clsVar2), ref Unsafe.As<Single, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector256<Single>>()); for (var i = 0; i < Op3ElementCount; i++) { _data3[i] = TestLibrary.Generator.GetSingle(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Single>, byte>(ref _clsVar3), ref Unsafe.As<Single, byte>(ref _data3[0]), (uint)Unsafe.SizeOf<Vector256<Single>>()); } public AlternatingTernaryOpTest__MultiplyAddSubtractSingle() { Succeeded = true; for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSingle(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Single>, byte>(ref _fld1), ref Unsafe.As<Single, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector256<Single>>()); for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSingle(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Single>, byte>(ref _fld2), ref Unsafe.As<Single, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector256<Single>>()); for (var i = 0; i < Op3ElementCount; i++) { _data3[i] = TestLibrary.Generator.GetSingle(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Single>, byte>(ref _fld3), ref Unsafe.As<Single, byte>(ref _data3[0]), (uint)Unsafe.SizeOf<Vector256<Single>>()); for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSingle(); } for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSingle(); } for (var i = 0; i < Op3ElementCount; i++) { _data3[i] = TestLibrary.Generator.GetSingle(); } _dataTable = new DataTable(_data1, _data2, _data3, new Single[RetElementCount], LargestVectorSize); } public bool IsSupported => Fma.IsSupported; public bool Succeeded { get; set; } public void RunBasicScenario_UnsafeRead() { TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead)); var result = Fma.MultiplyAddSubtract( Unsafe.Read<Vector256<Single>>(_dataTable.inArray1Ptr), Unsafe.Read<Vector256<Single>>(_dataTable.inArray2Ptr), Unsafe.Read<Vector256<Single>>(_dataTable.inArray3Ptr) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.inArray3Ptr, _dataTable.outArrayPtr); } public void RunBasicScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load)); var result = Fma.MultiplyAddSubtract( Avx.LoadVector256((Single*)(_dataTable.inArray1Ptr)), Avx.LoadVector256((Single*)(_dataTable.inArray2Ptr)), Avx.LoadVector256((Single*)(_dataTable.inArray3Ptr)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.inArray3Ptr, _dataTable.outArrayPtr); } public void RunBasicScenario_LoadAligned() { TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_LoadAligned)); var result = Fma.MultiplyAddSubtract( Avx.LoadAlignedVector256((Single*)(_dataTable.inArray1Ptr)), Avx.LoadAlignedVector256((Single*)(_dataTable.inArray2Ptr)), Avx.LoadAlignedVector256((Single*)(_dataTable.inArray3Ptr)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.inArray3Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_UnsafeRead() { TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead)); var result = typeof(Fma).GetMethod(nameof(Fma.MultiplyAddSubtract), new Type[] { typeof(Vector256<Single>), typeof(Vector256<Single>), typeof(Vector256<Single>) }) .Invoke(null, new object[] { Unsafe.Read<Vector256<Single>>(_dataTable.inArray1Ptr), Unsafe.Read<Vector256<Single>>(_dataTable.inArray2Ptr), Unsafe.Read<Vector256<Single>>(_dataTable.inArray3Ptr) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector256<Single>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.inArray3Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load)); var result = typeof(Fma).GetMethod(nameof(Fma.MultiplyAddSubtract), new Type[] { typeof(Vector256<Single>), typeof(Vector256<Single>), typeof(Vector256<Single>) }) .Invoke(null, new object[] { Avx.LoadVector256((Single*)(_dataTable.inArray1Ptr)), Avx.LoadVector256((Single*)(_dataTable.inArray2Ptr)), Avx.LoadVector256((Single*)(_dataTable.inArray3Ptr)) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector256<Single>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.inArray3Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_LoadAligned() { TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_LoadAligned)); var result = typeof(Fma).GetMethod(nameof(Fma.MultiplyAddSubtract), new Type[] { typeof(Vector256<Single>), typeof(Vector256<Single>), typeof(Vector256<Single>) }) .Invoke(null, new object[] { Avx.LoadAlignedVector256((Single*)(_dataTable.inArray1Ptr)), Avx.LoadAlignedVector256((Single*)(_dataTable.inArray2Ptr)), Avx.LoadAlignedVector256((Single*)(_dataTable.inArray3Ptr)) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector256<Single>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.inArray3Ptr, _dataTable.outArrayPtr); } public void RunClsVarScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario)); var result = Fma.MultiplyAddSubtract( _clsVar1, _clsVar2, _clsVar3 ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_clsVar1, _clsVar2, _clsVar3, _dataTable.outArrayPtr); } public void RunClsVarScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario_Load)); fixed (Vector256<Single>* pClsVar1 = &_clsVar1) fixed (Vector256<Single>* pClsVar2 = &_clsVar2) fixed (Vector256<Single>* pClsVar3 = &_clsVar3) { var result = Fma.MultiplyAddSubtract( Avx.LoadVector256((Single*)(pClsVar1)), Avx.LoadVector256((Single*)(pClsVar2)), Avx.LoadVector256((Single*)(pClsVar3)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_clsVar1, _clsVar2, _clsVar3, _dataTable.outArrayPtr); } } public void RunLclVarScenario_UnsafeRead() { TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead)); var op1 = Unsafe.Read<Vector256<Single>>(_dataTable.inArray1Ptr); var op2 = Unsafe.Read<Vector256<Single>>(_dataTable.inArray2Ptr); var op3 = Unsafe.Read<Vector256<Single>>(_dataTable.inArray3Ptr); var result = Fma.MultiplyAddSubtract(op1, op2, op3); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(op1, op2, op3, _dataTable.outArrayPtr); } public void RunLclVarScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load)); var op1 = Avx.LoadVector256((Single*)(_dataTable.inArray1Ptr)); var op2 = Avx.LoadVector256((Single*)(_dataTable.inArray2Ptr)); var op3 = Avx.LoadVector256((Single*)(_dataTable.inArray3Ptr)); var result = Fma.MultiplyAddSubtract(op1, op2, op3); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(op1, op2, op3, _dataTable.outArrayPtr); } public void RunLclVarScenario_LoadAligned() { TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_LoadAligned)); var op1 = Avx.LoadAlignedVector256((Single*)(_dataTable.inArray1Ptr)); var op2 = Avx.LoadAlignedVector256((Single*)(_dataTable.inArray2Ptr)); var op3 = Avx.LoadAlignedVector256((Single*)(_dataTable.inArray3Ptr)); var result = Fma.MultiplyAddSubtract(op1, op2, op3); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(op1, op2, op3, _dataTable.outArrayPtr); } public void RunClassLclFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario)); var test = new AlternatingTernaryOpTest__MultiplyAddSubtractSingle(); var result = Fma.MultiplyAddSubtract(test._fld1, test._fld2, test._fld3); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld1, test._fld2, test._fld3, _dataTable.outArrayPtr); } public void RunClassLclFldScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario_Load)); var test = new AlternatingTernaryOpTest__MultiplyAddSubtractSingle(); fixed (Vector256<Single>* pFld1 = &test._fld1) fixed (Vector256<Single>* pFld2 = &test._fld2) fixed (Vector256<Single>* pFld3 = &test._fld3) { var result = Fma.MultiplyAddSubtract( Avx.LoadVector256((Single*)(pFld1)), Avx.LoadVector256((Single*)(pFld2)), Avx.LoadVector256((Single*)(pFld3)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld1, test._fld2, test._fld3, _dataTable.outArrayPtr); } } public void RunClassFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario)); var result = Fma.MultiplyAddSubtract(_fld1, _fld2, _fld3); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_fld1, _fld2, _fld3, _dataTable.outArrayPtr); } public void RunClassFldScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario_Load)); fixed (Vector256<Single>* pFld1 = &_fld1) fixed (Vector256<Single>* pFld2 = &_fld2) fixed (Vector256<Single>* pFld3 = &_fld3) { var result = Fma.MultiplyAddSubtract( Avx.LoadVector256((Single*)(pFld1)), Avx.LoadVector256((Single*)(pFld2)), Avx.LoadVector256((Single*)(pFld3)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_fld1, _fld2, _fld3, _dataTable.outArrayPtr); } } public void RunStructLclFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario)); var test = TestStruct.Create(); var result = Fma.MultiplyAddSubtract(test._fld1, test._fld2, test._fld3); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld1, test._fld2, test._fld3, _dataTable.outArrayPtr); } public void RunStructLclFldScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario_Load)); var test = TestStruct.Create(); var result = Fma.MultiplyAddSubtract( Avx.LoadVector256((Single*)(&test._fld1)), Avx.LoadVector256((Single*)(&test._fld2)), Avx.LoadVector256((Single*)(&test._fld3)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld1, test._fld2, test._fld3, _dataTable.outArrayPtr); } public void RunStructFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario)); var test = TestStruct.Create(); test.RunStructFldScenario(this); } public void RunStructFldScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario_Load)); var test = TestStruct.Create(); test.RunStructFldScenario_Load(this); } public void RunUnsupportedScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario)); bool succeeded = false; try { RunBasicScenario_UnsafeRead(); } catch (PlatformNotSupportedException) { succeeded = true; } if (!succeeded) { Succeeded = false; } } private void ValidateResult(Vector256<Single> op1, Vector256<Single> op2, Vector256<Single> op3, void* result, [CallerMemberName] string method = "") { Single[] inArray1 = new Single[Op1ElementCount]; Single[] inArray2 = new Single[Op2ElementCount]; Single[] inArray3 = new Single[Op3ElementCount]; Single[] outArray = new Single[RetElementCount]; Unsafe.WriteUnaligned(ref Unsafe.As<Single, byte>(ref inArray1[0]), op1); Unsafe.WriteUnaligned(ref Unsafe.As<Single, byte>(ref inArray2[0]), op2); Unsafe.WriteUnaligned(ref Unsafe.As<Single, byte>(ref inArray3[0]), op3); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector256<Single>>()); ValidateResult(inArray1, inArray2, inArray3, outArray, method); } private void ValidateResult(void* op1, void* op2, void* op3, void* result, [CallerMemberName] string method = "") { Single[] inArray1 = new Single[Op1ElementCount]; Single[] inArray2 = new Single[Op2ElementCount]; Single[] inArray3 = new Single[Op3ElementCount]; Single[] outArray = new Single[RetElementCount]; Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(op1), (uint)Unsafe.SizeOf<Vector256<Single>>()); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(op2), (uint)Unsafe.SizeOf<Vector256<Single>>()); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref inArray3[0]), ref Unsafe.AsRef<byte>(op3), (uint)Unsafe.SizeOf<Vector256<Single>>()); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector256<Single>>()); ValidateResult(inArray1, inArray2, inArray3, outArray, method); } private void ValidateResult(Single[] firstOp, Single[] secondOp, Single[] thirdOp, Single[] result, [CallerMemberName] string method = "") { bool succeeded = true; for (var i = 0; i < RetElementCount; i += 2) { if (BitConverter.SingleToInt32Bits(MathF.Round((firstOp[i] * secondOp[i]) - thirdOp[i], 3)) != BitConverter.SingleToInt32Bits(MathF.Round(result[i], 3))) { succeeded = false; break; } if (BitConverter.SingleToInt32Bits(MathF.Round((firstOp[i + 1] * secondOp[i + 1]) + thirdOp[i + 1], 3)) != BitConverter.SingleToInt32Bits(MathF.Round(result[i + 1], 3))) { succeeded = false; break; } } if (!succeeded) { TestLibrary.TestFramework.LogInformation($"{nameof(Fma)}.{nameof(Fma.MultiplyAddSubtract)}<Single>(Vector256<Single>, Vector256<Single>, Vector256<Single>): {method} failed:"); TestLibrary.TestFramework.LogInformation($" firstOp: ({string.Join(", ", firstOp)})"); TestLibrary.TestFramework.LogInformation($"secondOp: ({string.Join(", ", secondOp)})"); TestLibrary.TestFramework.LogInformation($" thirdOp: ({string.Join(", ", thirdOp)})"); TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})"); TestLibrary.TestFramework.LogInformation(string.Empty); Succeeded = false; } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. namespace System.Runtime.Serialization { using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics; using System.Text; using System.Xml; using System.Xml.Serialization; using System.Security; using DataContractDictionary = System.Collections.Generic.Dictionary<System.Xml.XmlQualifiedName, DataContract>; #if USE_REFEMIT || uapaot public class XmlObjectSerializerReadContext : XmlObjectSerializerContext #else internal class XmlObjectSerializerReadContext : XmlObjectSerializerContext #endif { internal Attributes attributes; private HybridObjectCache _deserializedObjects; private XmlSerializableReader _xmlSerializableReader; private XmlDocument _xmlDocument; private Attributes _attributesInXmlData; private object _getOnlyCollectionValue; private bool _isGetOnlyCollection; private HybridObjectCache DeserializedObjects { get { if (_deserializedObjects == null) _deserializedObjects = new HybridObjectCache(); return _deserializedObjects; } } private XmlDocument Document => _xmlDocument ?? (_xmlDocument = new XmlDocument()); internal override bool IsGetOnlyCollection { get { return _isGetOnlyCollection; } set { _isGetOnlyCollection = value; } } #if USE_REFEMIT public object GetCollectionMember() #else internal object GetCollectionMember() #endif { return _getOnlyCollectionValue; } #if USE_REFEMIT public void StoreCollectionMemberInfo(object collectionMember) #else internal void StoreCollectionMemberInfo(object collectionMember) #endif { _getOnlyCollectionValue = collectionMember; _isGetOnlyCollection = true; } #if USE_REFEMIT public static void ThrowNullValueReturnedForGetOnlyCollectionException(Type type) #else internal static void ThrowNullValueReturnedForGetOnlyCollectionException(Type type) #endif { throw System.Runtime.Serialization.DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(SR.Format(SR.NullValueReturnedForGetOnlyCollection, DataContract.GetClrTypeFullName(type)))); } #if USE_REFEMIT public static void ThrowArrayExceededSizeException(int arraySize, Type type) #else internal static void ThrowArrayExceededSizeException(int arraySize, Type type) #endif { throw System.Runtime.Serialization.DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(SR.Format(SR.ArrayExceededSize, arraySize, DataContract.GetClrTypeFullName(type)))); } internal static XmlObjectSerializerReadContext CreateContext(DataContractSerializer serializer, DataContract rootTypeDataContract, DataContractResolver dataContractResolver) { return (serializer.PreserveObjectReferences || serializer.SerializationSurrogateProvider != null) ? new XmlObjectSerializerReadContextComplex(serializer, rootTypeDataContract, dataContractResolver) : new XmlObjectSerializerReadContext(serializer, rootTypeDataContract, dataContractResolver); } internal XmlObjectSerializerReadContext(XmlObjectSerializer serializer, int maxItemsInObjectGraph, StreamingContext streamingContext, bool ignoreExtensionDataObject) : base(serializer, maxItemsInObjectGraph, streamingContext, ignoreExtensionDataObject) { } internal XmlObjectSerializerReadContext(DataContractSerializer serializer, DataContract rootTypeDataContract, DataContractResolver dataContractResolver) : base(serializer, rootTypeDataContract, dataContractResolver) { this.attributes = new Attributes(); } #if USE_REFEMIT public virtual object InternalDeserialize(XmlReaderDelegator xmlReader, int id, RuntimeTypeHandle declaredTypeHandle, string name, string ns) #else internal virtual object InternalDeserialize(XmlReaderDelegator xmlReader, int id, RuntimeTypeHandle declaredTypeHandle, string name, string ns) #endif { DataContract dataContract = GetDataContract(id, declaredTypeHandle); return InternalDeserialize(xmlReader, name, ns, ref dataContract); } internal virtual object InternalDeserialize(XmlReaderDelegator xmlReader, Type declaredType, string name, string ns) { DataContract dataContract = GetDataContract(declaredType); return InternalDeserialize(xmlReader, name, ns, ref dataContract); } internal virtual object InternalDeserialize(XmlReaderDelegator xmlReader, Type declaredType, DataContract dataContract, string name, string ns) { if (dataContract == null) GetDataContract(declaredType); return InternalDeserialize(xmlReader, name, ns, ref dataContract); } protected bool TryHandleNullOrRef(XmlReaderDelegator reader, Type declaredType, string name, string ns, ref object retObj) { ReadAttributes(reader); if (attributes.Ref != Globals.NewObjectId) { if (_isGetOnlyCollection) { throw System.Runtime.Serialization.DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(SR.Format(SR.ErrorDeserializing, SR.Format(SR.ErrorTypeInfo, DataContract.GetClrTypeFullName(declaredType)), SR.Format(SR.XmlStartElementExpected, Globals.RefLocalName)))); } else { retObj = GetExistingObject(attributes.Ref, declaredType, name, ns); reader.Skip(); return true; } } else if (attributes.XsiNil) { reader.Skip(); return true; } return false; } protected object InternalDeserialize(XmlReaderDelegator reader, string name, string ns, ref DataContract dataContract) { object retObj = null; if (TryHandleNullOrRef(reader, dataContract.UnderlyingType, name, ns, ref retObj)) return retObj; bool knownTypesAddedInCurrentScope = false; if (dataContract.KnownDataContracts != null) { scopedKnownTypes.Push(dataContract.KnownDataContracts); knownTypesAddedInCurrentScope = true; } if (attributes.XsiTypeName != null) { dataContract = ResolveDataContractFromKnownTypes(attributes.XsiTypeName, attributes.XsiTypeNamespace, dataContract); if (dataContract == null) { if (DataContractResolver == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(XmlObjectSerializer.TryAddLineInfo(reader, SR.Format(SR.DcTypeNotFoundOnDeserialize, attributes.XsiTypeNamespace, attributes.XsiTypeName, reader.NamespaceURI, reader.LocalName)))); } throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(XmlObjectSerializer.TryAddLineInfo(reader, SR.Format(SR.DcTypeNotResolvedOnDeserialize, attributes.XsiTypeNamespace, attributes.XsiTypeName, reader.NamespaceURI, reader.LocalName)))); } knownTypesAddedInCurrentScope = ReplaceScopedKnownTypesTop(dataContract.KnownDataContracts, knownTypesAddedInCurrentScope); } if (dataContract.IsISerializable && attributes.FactoryTypeName != null) { DataContract factoryDataContract = ResolveDataContractFromKnownTypes(attributes.FactoryTypeName, attributes.FactoryTypeNamespace, dataContract); if (factoryDataContract != null) { if (factoryDataContract.IsISerializable) { dataContract = factoryDataContract; knownTypesAddedInCurrentScope = ReplaceScopedKnownTypesTop(dataContract.KnownDataContracts, knownTypesAddedInCurrentScope); } else { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(SR.Format(SR.FactoryTypeNotISerializable, DataContract.GetClrTypeFullName(factoryDataContract.UnderlyingType), DataContract.GetClrTypeFullName(dataContract.UnderlyingType)))); } } } if (knownTypesAddedInCurrentScope) { object obj = ReadDataContractValue(dataContract, reader); scopedKnownTypes.Pop(); return obj; } else { return ReadDataContractValue(dataContract, reader); } } private bool ReplaceScopedKnownTypesTop(DataContractDictionary knownDataContracts, bool knownTypesAddedInCurrentScope) { if (knownTypesAddedInCurrentScope) { scopedKnownTypes.Pop(); knownTypesAddedInCurrentScope = false; } if (knownDataContracts != null) { scopedKnownTypes.Push(knownDataContracts); knownTypesAddedInCurrentScope = true; } return knownTypesAddedInCurrentScope; } #if USE_REFEMIT public static bool MoveToNextElement(XmlReaderDelegator xmlReader) #else internal static bool MoveToNextElement(XmlReaderDelegator xmlReader) #endif { return (xmlReader.MoveToContent() != XmlNodeType.EndElement); } #if USE_REFEMIT public int GetMemberIndex(XmlReaderDelegator xmlReader, XmlDictionaryString[] memberNames, XmlDictionaryString[] memberNamespaces, int memberIndex, ExtensionDataObject extensionData) #else internal int GetMemberIndex(XmlReaderDelegator xmlReader, XmlDictionaryString[] memberNames, XmlDictionaryString[] memberNamespaces, int memberIndex, ExtensionDataObject extensionData) #endif { for (int i = memberIndex + 1; i < memberNames.Length; i++) { if (xmlReader.IsStartElement(memberNames[i], memberNamespaces[i])) return i; } HandleMemberNotFound(xmlReader, extensionData, memberIndex); return memberNames.Length; } #if USE_REFEMIT public int GetMemberIndexWithRequiredMembers(XmlReaderDelegator xmlReader, XmlDictionaryString[] memberNames, XmlDictionaryString[] memberNamespaces, int memberIndex, int requiredIndex, ExtensionDataObject extensionData) #else internal int GetMemberIndexWithRequiredMembers(XmlReaderDelegator xmlReader, XmlDictionaryString[] memberNames, XmlDictionaryString[] memberNamespaces, int memberIndex, int requiredIndex, ExtensionDataObject extensionData) #endif { for (int i = memberIndex + 1; i < memberNames.Length; i++) { if (xmlReader.IsStartElement(memberNames[i], memberNamespaces[i])) { if (requiredIndex < i) ThrowRequiredMemberMissingException(xmlReader, memberIndex, requiredIndex, memberNames); return i; } } HandleMemberNotFound(xmlReader, extensionData, memberIndex); return memberNames.Length; } #if USE_REFEMIT public static void ThrowRequiredMemberMissingException(XmlReaderDelegator xmlReader, int memberIndex, int requiredIndex, XmlDictionaryString[] memberNames) #else internal static void ThrowRequiredMemberMissingException(XmlReaderDelegator xmlReader, int memberIndex, int requiredIndex, XmlDictionaryString[] memberNames) #endif { StringBuilder stringBuilder = new StringBuilder(); if (requiredIndex == memberNames.Length) requiredIndex--; for (int i = memberIndex + 1; i <= requiredIndex; i++) { if (stringBuilder.Length != 0) stringBuilder.Append(" | "); stringBuilder.Append(memberNames[i].Value); } throw System.Runtime.Serialization.DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(XmlObjectSerializer.TryAddLineInfo(xmlReader, SR.Format(SR.UnexpectedElementExpectingElements, xmlReader.NodeType, xmlReader.LocalName, xmlReader.NamespaceURI, stringBuilder.ToString())))); } #if uapaot public static void ThrowMissingRequiredMembers(object obj, XmlDictionaryString[] memberNames, byte[] expectedElements, byte[] requiredElements) { StringBuilder stringBuilder = new StringBuilder(); int missingMembersCount = 0; for (int i = 0; i < memberNames.Length; i++) { if (IsBitSet(expectedElements, i) && IsBitSet(requiredElements, i)) { if (stringBuilder.Length != 0) stringBuilder.Append(", "); stringBuilder.Append(memberNames[i]); missingMembersCount++; } } if (missingMembersCount == 1) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(SR.Format(SR.JsonOneRequiredMemberNotFound, DataContract.GetClrTypeFullName(obj.GetType()), stringBuilder.ToString()))); } else { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(SR.Format(SR.JsonRequiredMembersNotFound, DataContract.GetClrTypeFullName(obj.GetType()), stringBuilder.ToString()))); } } public static void ThrowDuplicateMemberException(object obj, XmlDictionaryString[] memberNames, int memberIndex) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(SR.Format(SR.JsonDuplicateMemberInInput, DataContract.GetClrTypeFullName(obj.GetType()), memberNames[memberIndex]))); } private static bool IsBitSet(byte[] bytes, int bitIndex) { throw new NotImplementedException(); //return BitFlagsGenerator.IsBitSet(bytes, bitIndex); } #endif protected void HandleMemberNotFound(XmlReaderDelegator xmlReader, ExtensionDataObject extensionData, int memberIndex) { xmlReader.MoveToContent(); if (xmlReader.NodeType != XmlNodeType.Element) throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(CreateUnexpectedStateException(XmlNodeType.Element, xmlReader)); if (IgnoreExtensionDataObject || extensionData == null) SkipUnknownElement(xmlReader); else HandleUnknownElement(xmlReader, extensionData, memberIndex); } internal void HandleUnknownElement(XmlReaderDelegator xmlReader, ExtensionDataObject extensionData, int memberIndex) { if (extensionData.Members == null) extensionData.Members = new List<ExtensionDataMember>(); extensionData.Members.Add(ReadExtensionDataMember(xmlReader, memberIndex)); } #if USE_REFEMIT public void SkipUnknownElement(XmlReaderDelegator xmlReader) #else internal void SkipUnknownElement(XmlReaderDelegator xmlReader) #endif { ReadAttributes(xmlReader); xmlReader.Skip(); } #if USE_REFEMIT public string ReadIfNullOrRef(XmlReaderDelegator xmlReader, Type memberType, bool isMemberTypeSerializable) #else internal string ReadIfNullOrRef(XmlReaderDelegator xmlReader, Type memberType, bool isMemberTypeSerializable) #endif { if (attributes.Ref != Globals.NewObjectId) { CheckIfTypeSerializable(memberType, isMemberTypeSerializable); xmlReader.Skip(); return attributes.Ref; } else if (attributes.XsiNil) { CheckIfTypeSerializable(memberType, isMemberTypeSerializable); xmlReader.Skip(); return Globals.NullObjectId; } return Globals.NewObjectId; } #if USE_REFEMIT public virtual void ReadAttributes(XmlReaderDelegator xmlReader) #else internal virtual void ReadAttributes(XmlReaderDelegator xmlReader) #endif { if (attributes == null) attributes = new Attributes(); attributes.Read(xmlReader); } #if USE_REFEMIT public void ResetAttributes() #else internal void ResetAttributes() #endif { if (attributes != null) attributes.Reset(); } #if USE_REFEMIT public string GetObjectId() #else internal string GetObjectId() #endif { return attributes.Id; } #if USE_REFEMIT public virtual int GetArraySize() #else internal virtual int GetArraySize() #endif { return -1; } #if USE_REFEMIT public void AddNewObject(object obj) #else internal void AddNewObject(object obj) #endif { AddNewObjectWithId(attributes.Id, obj); } #if USE_REFEMIT public void AddNewObjectWithId(string id, object obj) #else internal void AddNewObjectWithId(string id, object obj) #endif { if (id != Globals.NewObjectId) DeserializedObjects.Add(id, obj); } public void ReplaceDeserializedObject(string id, object oldObj, object newObj) { if (object.ReferenceEquals(oldObj, newObj)) return; if (id != Globals.NewObjectId) { // In certain cases (IObjectReference, SerializationSurrogate or DataContractSurrogate), // an object can be replaced with a different object once it is deserialized. If the // object happens to be referenced from within itself, that reference needs to be updated // with the new instance. BinaryFormatter supports this by fixing up such references later. // These XmlObjectSerializer implementations do not currently support fix-ups. Hence we // throw in such cases to allow us add fix-up support in the future if we need to. if (DeserializedObjects.IsObjectReferenced(id)) throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(SR.Format(SR.FactoryObjectContainsSelfReference, DataContract.GetClrTypeFullName(oldObj.GetType()), DataContract.GetClrTypeFullName(newObj.GetType()), id))); DeserializedObjects.Remove(id); DeserializedObjects.Add(id, newObj); } } #if USE_REFEMIT public object GetExistingObject(string id, Type type, string name, string ns) #else internal object GetExistingObject(string id, Type type, string name, string ns) #endif { object retObj = DeserializedObjects.GetObject(id); if (retObj == null) throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(SR.Format(SR.DeserializedObjectWithIdNotFound, id))); return retObj; } private object GetExistingObjectOrExtensionData(string id) { object retObj = DeserializedObjects.GetObject(id); if (retObj == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError( XmlObjectSerializer.CreateSerializationException(SR.Format(SR.DeserializedObjectWithIdNotFound, id))); } return retObj; } public object GetRealObject(IObjectReference obj, string id) { object realObj = SurrogateDataContract.GetRealObject(obj, this.GetStreamingContext()); // If GetRealObject returns null, it indicates that the object could not resolve itself because // it is missing information. This may occur in a case where multiple IObjectReference instances // depend on each other. BinaryFormatter supports this by fixing up the references later. These // XmlObjectSerializer implementations do not support fix-ups since the format does not contain // forward references. However, we throw for this case since it allows us to add fix-up support // in the future if we need to. if (realObj == null) throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException("error")); ReplaceDeserializedObject(id, obj, realObj); return realObj; } #if USE_REFEMIT public static void Read(XmlReaderDelegator xmlReader) #else internal static void Read(XmlReaderDelegator xmlReader) #endif { if (!xmlReader.Read()) throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(SR.Format(SR.UnexpectedEndOfFile))); } internal static void ParseQualifiedName(string qname, XmlReaderDelegator xmlReader, out string name, out string ns, out string prefix) { int colon = qname.IndexOf(':'); prefix = ""; if (colon >= 0) prefix = qname.Substring(0, colon); name = qname.Substring(colon + 1); ns = xmlReader.LookupNamespace(prefix); } #if USE_REFEMIT public static T[] EnsureArraySize<T>(T[] array, int index) #else internal static T[] EnsureArraySize<T>(T[] array, int index) #endif { if (array.Length <= index) { if (index == Int32.MaxValue) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError( XmlObjectSerializer.CreateSerializationException( SR.Format(SR.MaxArrayLengthExceeded, Int32.MaxValue, DataContract.GetClrTypeFullName(typeof(T))))); } int newSize = (index < Int32.MaxValue / 2) ? index * 2 : Int32.MaxValue; T[] newArray = new T[newSize]; Array.Copy(array, 0, newArray, 0, array.Length); array = newArray; } return array; } #if USE_REFEMIT public static T[] TrimArraySize<T>(T[] array, int size) #else internal static T[] TrimArraySize<T>(T[] array, int size) #endif { if (size != array.Length) { T[] newArray = new T[size]; Array.Copy(array, 0, newArray, 0, size); array = newArray; } return array; } #if USE_REFEMIT public void CheckEndOfArray(XmlReaderDelegator xmlReader, int arraySize, XmlDictionaryString itemName, XmlDictionaryString itemNamespace) #else internal void CheckEndOfArray(XmlReaderDelegator xmlReader, int arraySize, XmlDictionaryString itemName, XmlDictionaryString itemNamespace) #endif { if (xmlReader.NodeType == XmlNodeType.EndElement) return; while (xmlReader.IsStartElement()) { if (xmlReader.IsStartElement(itemName, itemNamespace)) throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(SR.Format(SR.ArrayExceededSizeAttribute, arraySize, itemName.Value, itemNamespace.Value))); SkipUnknownElement(xmlReader); } if (xmlReader.NodeType != XmlNodeType.EndElement) throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(CreateUnexpectedStateException(XmlNodeType.EndElement, xmlReader)); } internal object ReadIXmlSerializable(XmlReaderDelegator xmlReader, XmlDataContract xmlDataContract, bool isMemberType) { if (_xmlSerializableReader == null) _xmlSerializableReader = new XmlSerializableReader(); return ReadIXmlSerializable(_xmlSerializableReader, xmlReader, xmlDataContract, isMemberType); } internal static object ReadRootIXmlSerializable(XmlReaderDelegator xmlReader, XmlDataContract xmlDataContract, bool isMemberType) { return ReadIXmlSerializable(new XmlSerializableReader(), xmlReader, xmlDataContract, isMemberType); } internal static object ReadIXmlSerializable(XmlSerializableReader xmlSerializableReader, XmlReaderDelegator xmlReader, XmlDataContract xmlDataContract, bool isMemberType) { object obj = null; xmlSerializableReader.BeginRead(xmlReader); if (isMemberType && !xmlDataContract.HasRoot) { xmlReader.Read(); xmlReader.MoveToContent(); } if (xmlDataContract.UnderlyingType == Globals.TypeOfXmlElement) { if (!xmlReader.IsStartElement()) throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(CreateUnexpectedStateException(XmlNodeType.Element, xmlReader)); XmlDocument xmlDoc = new XmlDocument(); obj = (XmlElement)xmlDoc.ReadNode(xmlSerializableReader); } else if (xmlDataContract.UnderlyingType == Globals.TypeOfXmlNodeArray) { obj = XmlSerializableServices.ReadNodes(xmlSerializableReader); } else { IXmlSerializable xmlSerializable = xmlDataContract.CreateXmlSerializableDelegate(); xmlSerializable.ReadXml(xmlSerializableReader); obj = xmlSerializable; } xmlSerializableReader.EndRead(); return obj; } public SerializationInfo ReadSerializationInfo(XmlReaderDelegator xmlReader, Type type) { var serInfo = new SerializationInfo(type, XmlObjectSerializer.FormatterConverter); XmlNodeType nodeType; while ((nodeType = xmlReader.MoveToContent()) != XmlNodeType.EndElement) { if (nodeType != XmlNodeType.Element) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(CreateUnexpectedStateException(XmlNodeType.Element, xmlReader)); } if (xmlReader.NamespaceURI.Length != 0) { SkipUnknownElement(xmlReader); continue; } string name = XmlConvert.DecodeName(xmlReader.LocalName); IncrementItemCount(1); ReadAttributes(xmlReader); object value; if (attributes.Ref != Globals.NewObjectId) { xmlReader.Skip(); value = GetExistingObject(attributes.Ref, null, name, String.Empty); } else if (attributes.XsiNil) { xmlReader.Skip(); value = null; } else { value = InternalDeserialize(xmlReader, Globals.TypeOfObject, name, String.Empty); } serInfo.AddValue(name, value); } return serInfo; } protected virtual DataContract ResolveDataContractFromTypeName() { return (attributes.XsiTypeName == null) ? null : ResolveDataContractFromKnownTypes(attributes.XsiTypeName, attributes.XsiTypeNamespace, null /*memberTypeContract*/); } private ExtensionDataMember ReadExtensionDataMember(XmlReaderDelegator xmlReader, int memberIndex) { var member = new ExtensionDataMember { Name = xmlReader.LocalName, Namespace = xmlReader.NamespaceURI, MemberIndex = memberIndex }; member.Value = xmlReader.UnderlyingExtensionDataReader != null ? xmlReader.UnderlyingExtensionDataReader.GetCurrentNode() : ReadExtensionDataValue(xmlReader); return member; } public IDataNode ReadExtensionDataValue(XmlReaderDelegator xmlReader) { ReadAttributes(xmlReader); IncrementItemCount(1); IDataNode dataNode = null; if (attributes.Ref != Globals.NewObjectId) { xmlReader.Skip(); object o = GetExistingObjectOrExtensionData(attributes.Ref); dataNode = (o is IDataNode) ? (IDataNode)o : new DataNode<object>(o); dataNode.Id = attributes.Ref; } else if (attributes.XsiNil) { xmlReader.Skip(); dataNode = null; } else { string dataContractName = null; string dataContractNamespace = null; if (attributes.XsiTypeName != null) { dataContractName = attributes.XsiTypeName; dataContractNamespace = attributes.XsiTypeNamespace; } if (IsReadingCollectionExtensionData(xmlReader)) { Read(xmlReader); dataNode = ReadUnknownCollectionData(xmlReader, dataContractName, dataContractNamespace); } else if (attributes.FactoryTypeName != null) { Read(xmlReader); dataNode = ReadUnknownISerializableData(xmlReader, dataContractName, dataContractNamespace); } else if (IsReadingClassExtensionData(xmlReader)) { Read(xmlReader); dataNode = ReadUnknownClassData(xmlReader, dataContractName, dataContractNamespace); } else { DataContract dataContract = ResolveDataContractFromTypeName(); if (dataContract == null) dataNode = ReadExtensionDataValue(xmlReader, dataContractName, dataContractNamespace); else if (dataContract is XmlDataContract) dataNode = ReadUnknownXmlData(xmlReader, dataContractName, dataContractNamespace); else { if (dataContract.IsISerializable) { Read(xmlReader); dataNode = ReadUnknownISerializableData(xmlReader, dataContractName, dataContractNamespace); } else if (dataContract is PrimitiveDataContract) { if (attributes.Id == Globals.NewObjectId) { Read(xmlReader); xmlReader.MoveToContent(); dataNode = ReadUnknownPrimitiveData(xmlReader, dataContract.UnderlyingType, dataContractName, dataContractNamespace); xmlReader.ReadEndElement(); } else { dataNode = new DataNode<object>(xmlReader.ReadElementContentAsAnyType(dataContract.UnderlyingType)); InitializeExtensionDataNode(dataNode, dataContractName, dataContractNamespace); } } else if (dataContract is EnumDataContract) { dataNode = new DataNode<object>(((EnumDataContract)dataContract).ReadEnumValue(xmlReader)); InitializeExtensionDataNode(dataNode, dataContractName, dataContractNamespace); } else if (dataContract is ClassDataContract) { Read(xmlReader); dataNode = ReadUnknownClassData(xmlReader, dataContractName, dataContractNamespace); } else if (dataContract is CollectionDataContract) { Read(xmlReader); dataNode = ReadUnknownCollectionData(xmlReader, dataContractName, dataContractNamespace); } } } } return dataNode; } protected virtual void StartReadExtensionDataValue(XmlReaderDelegator xmlReader) { } private IDataNode ReadExtensionDataValue(XmlReaderDelegator xmlReader, string dataContractName, string dataContractNamespace) { StartReadExtensionDataValue(xmlReader); if (attributes.UnrecognizedAttributesFound) return ReadUnknownXmlData(xmlReader, dataContractName, dataContractNamespace); IDictionary<string, string> namespacesInScope = xmlReader.GetNamespacesInScope(XmlNamespaceScope.ExcludeXml); Read(xmlReader); xmlReader.MoveToContent(); switch (xmlReader.NodeType) { case XmlNodeType.Text: return ReadPrimitiveExtensionDataValue(xmlReader, dataContractName, dataContractNamespace); case XmlNodeType.Element: if (xmlReader.NamespaceURI.StartsWith(Globals.DataContractXsdBaseNamespace, StringComparison.Ordinal)) return ReadUnknownClassData(xmlReader, dataContractName, dataContractNamespace); else return ReadAndResolveUnknownXmlData(xmlReader, namespacesInScope, dataContractName, dataContractNamespace); case XmlNodeType.EndElement: { // NOTE: cannot distinguish between empty class or IXmlSerializable and typeof(object) IDataNode objNode = ReadUnknownPrimitiveData(xmlReader, Globals.TypeOfObject, dataContractName, dataContractNamespace); xmlReader.ReadEndElement(); objNode.IsFinalValue = false; return objNode; } default: throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(CreateUnexpectedStateException(XmlNodeType.Element, xmlReader)); } } protected virtual IDataNode ReadPrimitiveExtensionDataValue(XmlReaderDelegator xmlReader, string dataContractName, string dataContractNamespace) { Type valueType = xmlReader.ValueType; if (valueType == Globals.TypeOfString) { // NOTE: cannot distinguish other primitives from string (default XmlReader ValueType) IDataNode stringNode = new DataNode<object>(xmlReader.ReadContentAsString()); InitializeExtensionDataNode(stringNode, dataContractName, dataContractNamespace); stringNode.IsFinalValue = false; xmlReader.ReadEndElement(); return stringNode; } IDataNode objNode = ReadUnknownPrimitiveData(xmlReader, valueType, dataContractName, dataContractNamespace); xmlReader.ReadEndElement(); return objNode; } protected void InitializeExtensionDataNode(IDataNode dataNode, string dataContractName, string dataContractNamespace) { dataNode.DataContractName = dataContractName; dataNode.DataContractNamespace = dataContractNamespace; dataNode.ClrAssemblyName = attributes.ClrAssembly; dataNode.ClrTypeName = attributes.ClrType; AddNewObject(dataNode); dataNode.Id = attributes.Id; } private IDataNode ReadUnknownPrimitiveData(XmlReaderDelegator xmlReader, Type type, string dataContractName, string dataContractNamespace) { IDataNode dataNode = xmlReader.ReadExtensionData(type); InitializeExtensionDataNode(dataNode, dataContractName, dataContractNamespace); return dataNode; } private ClassDataNode ReadUnknownClassData(XmlReaderDelegator xmlReader, string dataContractName, string dataContractNamespace) { var dataNode = new ClassDataNode(); InitializeExtensionDataNode(dataNode, dataContractName, dataContractNamespace); int memberIndex = 0; XmlNodeType nodeType; while ((nodeType = xmlReader.MoveToContent()) != XmlNodeType.EndElement) { if (nodeType != XmlNodeType.Element) throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(CreateUnexpectedStateException(XmlNodeType.Element, xmlReader)); if (dataNode.Members == null) dataNode.Members = new List<ExtensionDataMember>(); dataNode.Members.Add(ReadExtensionDataMember(xmlReader, memberIndex++)); } xmlReader.ReadEndElement(); return dataNode; } private CollectionDataNode ReadUnknownCollectionData(XmlReaderDelegator xmlReader, string dataContractName, string dataContractNamespace) { var dataNode = new CollectionDataNode(); InitializeExtensionDataNode(dataNode, dataContractName, dataContractNamespace); int arraySize = attributes.ArraySZSize; XmlNodeType nodeType; while ((nodeType = xmlReader.MoveToContent()) != XmlNodeType.EndElement) { if (nodeType != XmlNodeType.Element) throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(CreateUnexpectedStateException(XmlNodeType.Element, xmlReader)); if (dataNode.ItemName == null) { dataNode.ItemName = xmlReader.LocalName; dataNode.ItemNamespace = xmlReader.NamespaceURI; } if (xmlReader.IsStartElement(dataNode.ItemName, dataNode.ItemNamespace)) { if (dataNode.Items == null) dataNode.Items = new List<IDataNode>(); dataNode.Items.Add(ReadExtensionDataValue(xmlReader)); } else SkipUnknownElement(xmlReader); } xmlReader.ReadEndElement(); if (arraySize != -1) { dataNode.Size = arraySize; if (dataNode.Items == null) { if (dataNode.Size > 0) throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(SR.Format(SR.ArraySizeAttributeIncorrect, arraySize, 0))); } else if (dataNode.Size != dataNode.Items.Count) throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(SR.Format(SR.ArraySizeAttributeIncorrect, arraySize, dataNode.Items.Count))); } else { if (dataNode.Items != null) { dataNode.Size = dataNode.Items.Count; } else { dataNode.Size = 0; } } return dataNode; } private ISerializableDataNode ReadUnknownISerializableData(XmlReaderDelegator xmlReader, string dataContractName, string dataContractNamespace) { var dataNode = new ISerializableDataNode(); InitializeExtensionDataNode(dataNode, dataContractName, dataContractNamespace); dataNode.FactoryTypeName = attributes.FactoryTypeName; dataNode.FactoryTypeNamespace = attributes.FactoryTypeNamespace; XmlNodeType nodeType; while ((nodeType = xmlReader.MoveToContent()) != XmlNodeType.EndElement) { if (nodeType != XmlNodeType.Element) throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(CreateUnexpectedStateException(XmlNodeType.Element, xmlReader)); if (xmlReader.NamespaceURI.Length != 0) { SkipUnknownElement(xmlReader); continue; } var member = new ISerializableDataMember(); member.Name = xmlReader.LocalName; member.Value = ReadExtensionDataValue(xmlReader); if (dataNode.Members == null) dataNode.Members = new List<ISerializableDataMember>(); dataNode.Members.Add(member); } xmlReader.ReadEndElement(); return dataNode; } private IDataNode ReadUnknownXmlData(XmlReaderDelegator xmlReader, string dataContractName, string dataContractNamespace) { XmlDataNode dataNode = new XmlDataNode(); InitializeExtensionDataNode(dataNode, dataContractName, dataContractNamespace); dataNode.OwnerDocument = Document; if (xmlReader.NodeType == XmlNodeType.EndElement) return dataNode; IList<XmlAttribute> xmlAttributes = null; IList<XmlNode> xmlChildNodes = null; XmlNodeType nodeType = xmlReader.MoveToContent(); if (nodeType != XmlNodeType.Text) { while (xmlReader.MoveToNextAttribute()) { string ns = xmlReader.NamespaceURI; if (ns != Globals.SerializationNamespace && ns != Globals.SchemaInstanceNamespace) { if (xmlAttributes == null) xmlAttributes = new List<XmlAttribute>(); xmlAttributes.Add((XmlAttribute)Document.ReadNode(xmlReader.UnderlyingReader)); } } Read(xmlReader); } while ((nodeType = xmlReader.MoveToContent()) != XmlNodeType.EndElement) { if (xmlReader.EOF) throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(SR.Format(SR.UnexpectedEndOfFile))); if (xmlChildNodes == null) xmlChildNodes = new List<XmlNode>(); xmlChildNodes.Add(Document.ReadNode(xmlReader.UnderlyingReader)); } xmlReader.ReadEndElement(); dataNode.XmlAttributes = xmlAttributes; dataNode.XmlChildNodes = xmlChildNodes; return dataNode; } // Pattern-recognition logic: the method reads XML elements into DOM. To recognize as an array, it requires that // all items have the same name and namespace. To recognize as an ISerializable type, it requires that all // items be unqualified. If the XML only contains elements (no attributes or other nodes) is recognized as a // class/class hierarchy. Otherwise it is deserialized as XML. private IDataNode ReadAndResolveUnknownXmlData(XmlReaderDelegator xmlReader, IDictionary<string, string> namespaces, string dataContractName, string dataContractNamespace) { bool couldBeISerializableData = true; bool couldBeCollectionData = true; bool couldBeClassData = true; string elementNs = null, elementName = null; var xmlChildNodes = new List<XmlNode>(); IList<XmlAttribute> xmlAttributes = null; if (namespaces != null) { xmlAttributes = new List<XmlAttribute>(); foreach (KeyValuePair<string, string> prefixNsPair in namespaces) { xmlAttributes.Add(AddNamespaceDeclaration(prefixNsPair.Key, prefixNsPair.Value)); } } XmlNodeType nodeType; while ((nodeType = xmlReader.NodeType) != XmlNodeType.EndElement) { if (nodeType == XmlNodeType.Element) { string ns = xmlReader.NamespaceURI; string name = xmlReader.LocalName; if (couldBeISerializableData) couldBeISerializableData = (ns.Length == 0); if (couldBeCollectionData) { if (elementName == null) { elementName = name; elementNs = ns; } else couldBeCollectionData = (String.CompareOrdinal(elementName, name) == 0) && (String.CompareOrdinal(elementNs, ns) == 0); } } else if (xmlReader.EOF) throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(SR.Format(SR.UnexpectedEndOfFile))); else if (IsContentNode(xmlReader.NodeType)) couldBeClassData = couldBeISerializableData = couldBeCollectionData = false; if (_attributesInXmlData == null) _attributesInXmlData = new Attributes(); _attributesInXmlData.Read(xmlReader); XmlNode childNode = Document.ReadNode(xmlReader.UnderlyingReader); xmlChildNodes.Add(childNode); if (namespaces == null) { if (_attributesInXmlData.XsiTypeName != null) childNode.Attributes.Append(AddNamespaceDeclaration(_attributesInXmlData.XsiTypePrefix, _attributesInXmlData.XsiTypeNamespace)); if (_attributesInXmlData.FactoryTypeName != null) childNode.Attributes.Append(AddNamespaceDeclaration(_attributesInXmlData.FactoryTypePrefix, _attributesInXmlData.FactoryTypeNamespace)); } } xmlReader.ReadEndElement(); if (elementName != null && couldBeCollectionData) return ReadUnknownCollectionData(CreateReaderOverChildNodes(xmlAttributes, xmlChildNodes), dataContractName, dataContractNamespace); else if (couldBeISerializableData) return ReadUnknownISerializableData(CreateReaderOverChildNodes(xmlAttributes, xmlChildNodes), dataContractName, dataContractNamespace); else if (couldBeClassData) return ReadUnknownClassData(CreateReaderOverChildNodes(xmlAttributes, xmlChildNodes), dataContractName, dataContractNamespace); else { XmlDataNode dataNode = new XmlDataNode(); InitializeExtensionDataNode(dataNode, dataContractName, dataContractNamespace); dataNode.OwnerDocument = Document; dataNode.XmlChildNodes = xmlChildNodes; dataNode.XmlAttributes = xmlAttributes; return dataNode; } } private bool IsContentNode(XmlNodeType nodeType) { switch (nodeType) { case XmlNodeType.Whitespace: case XmlNodeType.SignificantWhitespace: case XmlNodeType.Comment: case XmlNodeType.ProcessingInstruction: case XmlNodeType.DocumentType: return false; default: return true; } } internal XmlReaderDelegator CreateReaderOverChildNodes(IList<XmlAttribute> xmlAttributes, IList<XmlNode> xmlChildNodes) { XmlNode wrapperElement = CreateWrapperXmlElement(Document, xmlAttributes, xmlChildNodes, null, null, null); XmlReaderDelegator nodeReader = CreateReaderDelegatorForReader(new XmlNodeReader(wrapperElement)); nodeReader.MoveToContent(); Read(nodeReader); return nodeReader; } internal static XmlNode CreateWrapperXmlElement(XmlDocument document, IList<XmlAttribute> xmlAttributes, IList<XmlNode> xmlChildNodes, string prefix, string localName, string ns) { localName = localName ?? "wrapper"; ns = ns ?? string.Empty; XmlNode wrapperElement = document.CreateElement(prefix, localName, ns); if (xmlAttributes != null) { for (int i = 0; i < xmlAttributes.Count; i++) { wrapperElement.Attributes.Append((XmlAttribute) xmlAttributes[i]); } } if (xmlChildNodes != null) { for (int i = 0; i < xmlChildNodes.Count; i++) { wrapperElement.AppendChild(xmlChildNodes[i]); } } return wrapperElement; } private XmlAttribute AddNamespaceDeclaration(string prefix, string ns) { XmlAttribute attribute = (prefix == null || prefix.Length == 0) ? Document.CreateAttribute(null, Globals.XmlnsPrefix, Globals.XmlnsNamespace) : Document.CreateAttribute(Globals.XmlnsPrefix, prefix, Globals.XmlnsNamespace); attribute.Value = ns; return attribute; } #if USE_REFEMIT public static Exception CreateUnexpectedStateException(XmlNodeType expectedState, XmlReaderDelegator xmlReader) #else internal static Exception CreateUnexpectedStateException(XmlNodeType expectedState, XmlReaderDelegator xmlReader) #endif { return XmlObjectSerializer.CreateSerializationExceptionWithReaderDetails(SR.Format(SR.ExpectingState, expectedState), xmlReader); } //Silverlight only helper function to create SerializationException #if USE_REFEMIT public static Exception CreateSerializationException(string message) #else internal static Exception CreateSerializationException(string message) #endif { return XmlObjectSerializer.CreateSerializationException(message); } protected virtual object ReadDataContractValue(DataContract dataContract, XmlReaderDelegator reader) { return dataContract.ReadXmlValue(reader, this); } protected virtual XmlReaderDelegator CreateReaderDelegatorForReader(XmlReader xmlReader) { return new XmlReaderDelegator(xmlReader); } protected virtual bool IsReadingCollectionExtensionData(XmlReaderDelegator xmlReader) { return (attributes.ArraySZSize != -1); } protected virtual bool IsReadingClassExtensionData(XmlReaderDelegator xmlReader) { return false; } } }
/* *************************************************************************** * This file is part of SharpNEAT - Evolution of Neural Networks. * * Copyright 2004-2016 Colin Green (sharpneat@gmail.com) * * SharpNEAT is free software; you can redistribute it and/or modify * it under the terms of The MIT License (MIT). * * You should have received a copy of the MIT License * along with SharpNEAT; if not, see https://opensource.org/licenses/MIT. */ using System; using System.Collections.Generic; using System.Diagnostics; using SharpNeat.Core; namespace SharpNeat.SpeciationStrategies { // ENHANCEMENT: k-means++ seeks to choose better starting clusters. (http://en.wikipedia.org/wiki/K-means_clustering) // ENHANCEMENT: The filtering algorithm uses kd-trees to speed up each k-means step[9]. (http://en.wikipedia.org/wiki/K-means_clustering) // ENHANCEMENT: Euclidean squared distance metric is equivalent for k-means and faster than euclidean (http://www.improvedoutcomes.com/docs/WebSiteDocs/Clustering/Clustering_Parameters/Euclidean_and_Euclidean_Squared_Distance_Metrics.htm) /// <summary> /// An ISpeciationStrategy that speciates genomes using the k-means clustering method. /// k-means requires a distance metric and as such this class requires am IDistanceMetric to be provided at /// construction time. Different distance metrics can be used including NeatDistanceMetric which is /// equivalent to the metric used in the standard NEAT method albeit with a different clustering/speciation /// algorithm (Standard NEAT does not use k-means). /// </summary> /// <typeparam name="TGenome">The genome type to apply clustering to.</typeparam> public class KMeansClusteringStrategy<TGenome> : ISpeciationStrategy<TGenome> where TGenome : class, IGenome<TGenome> { const int __MAX_KMEANS_LOOPS = 5; readonly IDistanceMetric _distanceMetric; #region Constructor /// <summary> /// Constructor that accepts an IDistanceMetric to be used for the k-means method. /// </summary> public KMeansClusteringStrategy(IDistanceMetric distanceMetric) { _distanceMetric = distanceMetric; } #endregion #region ISpeciationStrategy<TGenome> Members /// <summary> /// Speciates the genomes in genomeList into the number of species specified by specieCount /// and returns a newly constructed list of Specie objects containing the speciated genomes. /// </summary> public IList<Specie<TGenome>> InitializeSpeciation(IList<TGenome> genomeList, int specieCount) { // Create empty specieList. // Use an initial specieList capacity that will limit the need for memory reallocation but that isn't // too wasteful of memory. int initSpeciesCapacity = (genomeList.Count * 2) / specieCount; List<Specie<TGenome>> specieList = new List<Specie<TGenome>>(specieCount); for(int i=0; i<specieCount; i++) { specieList.Add(new Specie<TGenome>((uint)i, i, initSpeciesCapacity)); } // Speciate genomes into the empty species. SpeciateGenomes(genomeList, specieList); return specieList; } /// <summary> /// Speciates the genomes in genomeList into the provided specieList. It is assumed that /// the genomeList represents all of the required genomes and that the species are currently empty. /// /// This method can be used for initialization or completely re-speciating an existing genome population. /// </summary> public void SpeciateGenomes(IList<TGenome> genomeList, IList<Specie<TGenome>> specieList) { Debug.Assert(SpeciationUtils.TestEmptySpecies(specieList), "SpeciateGenomes(IList<TGenome>,IList<Species<TGenome>>) called with non-empty species"); Debug.Assert(genomeList.Count >= specieList.Count, $"SpeciateGenomes(IList<TGenome>,IList<Species<TGenome>>). Species count [{specieList.Count}] is greater than genome count [{genomeList.Count}]."); // Randomly allocate the first k genomes to their own specie. Because there is only one genome in these // species each genome effectively represents a specie centroid. This is necessary to ensure we get k specieList. // If we randomly assign all genomes to species from the outset and then calculate centroids then typically some // of the species become empty. // This approach ensures that each species will have at least one genome - because that genome is the specie // centroid and therefore has distance of zero from the centroid (itself). int specieCount = specieList.Count; for(int i=0; i<specieCount; i++) { Specie<TGenome> specie = specieList[i]; genomeList[i].SpecieIdx = specie.Idx; specie.GenomeList.Add(genomeList[i]); // Just set the specie centroid directly. specie.Centroid = genomeList[i].Position; } // Now allocate the remaining genomes based on their distance from the centroids. int genomeCount = genomeList.Count; for(int i=specieCount; i<genomeCount; i++) { TGenome genome = genomeList[i]; Specie<TGenome> closestSpecie = FindClosestSpecie(genome, specieList); genome.SpecieIdx = closestSpecie.Idx; closestSpecie.GenomeList.Add(genome); } // Recalculate each specie's centroid. foreach(Specie<TGenome> specie in specieList) { specie.Centroid = CalculateSpecieCentroid(specie); } // Perform the main k-means loop until convergence. SpeciateUntilConvergence(genomeList, specieList); Debug.Assert(SpeciationUtils.PerformIntegrityCheck(specieList)); } /// <summary> /// Speciates the offspring genomes in offspringList into the provided specieList. In contrast to /// SpeciateGenomes() offspringList is taken to be a list of new genomes (offspring) that should be /// added to existing species. That is, the species contain genomes that are not in offspringList /// that we wish to keep; typically these would be elite genomes that are the parents of the /// offspring. /// </summary> public void SpeciateOffspring(IList<TGenome> offspringList, IList<Specie<TGenome>> specieList) { // Each specie should contain at least one genome. We need at least one existing genome per specie to act // as a specie centroid in order to define where the specie is within the encoding space. Debug.Assert(SpeciationUtils.TestPopulatedSpecies(specieList), "SpeciateOffspring(IList<TGenome>,IList<Species<TGenome>>) called with an empty specie."); // Update the centroid of each specie. If we're adding offspring this means that old genomes // have been removed from the population and therefore the centroids are out-of-date. foreach(Specie<TGenome> specie in specieList) { specie.Centroid = CalculateSpecieCentroid(specie); } // Allocate each offspring genome to the specie it is closest to. foreach(TGenome genome in offspringList) { Specie<TGenome> closestSpecie = FindClosestSpecie(genome, specieList); closestSpecie.GenomeList.Add(genome); genome.SpecieIdx = closestSpecie.Idx; } // Recalculate each specie's centroid now that we have additional genomes in the specieList. foreach(Specie<TGenome> specie in specieList) { specie.Centroid = CalculateSpecieCentroid(specie); } // Accumulate *all* genomes into a flat genome list. int genomeCount = 0; foreach(Specie<TGenome> specie in specieList) { genomeCount += specie.GenomeList.Count; } List<TGenome> genomeList = new List<TGenome>(genomeCount); foreach(Specie<TGenome> specie in specieList) { genomeList.AddRange(specie.GenomeList); } // Perform the main k-means loop until convergence. SpeciateUntilConvergence(genomeList, specieList); Debug.Assert(SpeciationUtils.PerformIntegrityCheck(specieList)); } #endregion #region Private Methods [k-means] /// <summary> /// Perform the main k-means loop until no genome reallocations occur or some maximum number of loops /// has been performed. Theoretically a small number of reallocations may occur for a great many loops /// therefore we require the additional max loops threshold exit strategy - the clusters should be pretty /// stable and well defined after a few loops even if the algorithm hasn't converged completely. /// </summary> private void SpeciateUntilConvergence(IList<TGenome> genomeList, IList<Specie<TGenome>> specieList) { List<Specie<TGenome>> emptySpecieList = new List<Specie<TGenome>>(); int specieCount = specieList.Count; // Array of flags that indicate if a specie was modified (had genomes allocated to and/or from it). bool[] specieModArr = new bool[specieCount]; // Main k-means loop. for(int loops=0; loops<__MAX_KMEANS_LOOPS; loops++) { // Track number of reallocations made on each loop. int reallocations = 0; // Loop over genomes. For each one find the specie it is closest to; if it is not the specie // it is currently in then reallocate it. foreach(TGenome genome in genomeList) { Specie<TGenome> closestSpecie = FindClosestSpecie(genome, specieList); if(genome.SpecieIdx != closestSpecie.Idx) { // Track which species have been modified. specieModArr[genome.SpecieIdx] = true; specieModArr[closestSpecie.Idx] = true; // Add the genome to its new specie and set its speciesIdx accordingly. // For now we leave the genome in its original species; It's more efficient to determine // all reallocations and then remove reallocated genomes from their origin specie all together; // This is because we can shuffle down the remaining genomes in a specie to fill the gaps made by // the removed genomes - and do so in one round of shuffling instead of shuffling to fill a gap on // each remove. closestSpecie.GenomeList.Add(genome); genome.SpecieIdx = closestSpecie.Idx; reallocations++; } } // Complete the reallocations. for(int i=0; i<specieCount; i++) { if(!specieModArr[i]) { // Specie not changed. Skip. continue; } // Reset flag. specieModArr[i] = false; // Remove the genomes that have been allocated to other species. We fill the resulting // gaps by shuffling down the remaining genomes. Specie<TGenome> specie = specieList[i]; specie.GenomeList.RemoveAll(delegate(TGenome genome) { return genome.SpecieIdx != specie.Idx; }); // Track empty species. We will allocate genomes to them after this loop. // This is necessary as some distance metrics can result in empty species occurring. if(0 == specie.GenomeList.Count) { emptySpecieList.Add(specie); } else { // Recalc the specie centroid now that it contains a different set of genomes. specie.Centroid = CalculateSpecieCentroid(specie); } } // Check for empty species. We need to reallocate some genomes into the empty specieList to maintain the // required number of species. if(0 != emptySpecieList.Count) { // We find the genomes in the population as a whole that are furthest from their containing specie's // centroid genome - we call these outlier genomes. We then move these genomes into the empty species to // act as the sole member and centroid of those species; These act as specie seeds for the next k-means loop. TGenome[] genomeByDistanceArr = GetGenomesByDistanceFromSpecie(genomeList, specieList); // Reallocate each of the outlier genomes from their current specie to an empty specie. int emptySpecieCount = emptySpecieList.Count; int outlierIdx = 0; for(int i=0; i<emptySpecieCount; i++) { // Find the next outlier genome that can be re-allocated. Skip genomes that are the // only member of a specie - that would just create another empty specie. TGenome genome; Specie<TGenome> sourceSpecie; do { genome = genomeByDistanceArr[outlierIdx++]; sourceSpecie = specieList[genome.SpecieIdx]; } while(sourceSpecie.GenomeList.Count == 1 && outlierIdx < genomeByDistanceArr.Length); if(outlierIdx == genomeByDistanceArr.Length) { // Theoretically impossible. We do the test so that we get an easy to trace error message if it does happen. throw new SharpNeatException("Error finding outlier genome. No outliers could be found in any specie with more than 1 genome."); } // Get ref to the empty specie and register both source and target specie with specieModArr. Specie<TGenome> emptySpecie = emptySpecieList[i]; specieModArr[emptySpecie.Idx] = true; specieModArr[sourceSpecie.Idx] = true; // Reallocate the genome. Here we do the remove operation right away; We aren't expecting to deal with many empty // species, usually it will be one or two at most; Any more and there's probably something wrong with the distance // metric, e.g. maybe it doesn't satisfy the triangle inequality (see wikipedia). // Another reason to remove right is to eliminate the possibility of removing multiple outlier genomes from the // same specie and potentially leaving it empty; The test in the do-while loop above only takes genomes from // currently non-empty species. sourceSpecie.GenomeList.Remove(genome); emptySpecie.GenomeList.Add(genome); genome.SpecieIdx = emptySpecie.Idx; reallocations++; } // Recalculate centroid for all affected species. for(int i=0; i<specieCount; i++) { if(specieModArr[i]) { // Reset flag while we're here. Do this first to help maintain CPU cache coherency (we just tested it). specieModArr[i] = false; specieList[i].Centroid = CalculateSpecieCentroid(specieList[i]); } } // Clear emptySpecieList after using it. Otherwise we are holding old references and thus creating // work for the garbage collector. emptySpecieList.Clear(); } // Exit the loop if no genome reallocations have occurred. The species are stable, speciation is completed. if(0==reallocations) { break; } } } /// <summary> /// Recalculate the specie centroid based on the genomes currently in the specie. /// </summary> private CoordinateVector CalculateSpecieCentroid(Specie<TGenome> specie) { // Special case - 1 genome in specie (its position *is* the specie centroid). if(1 == specie.GenomeList.Count) { return new CoordinateVector(specie.GenomeList[0].Position.CoordArray); } // Create a temp list containing all of the genome positions. List<TGenome> genomeList = specie.GenomeList; int count = genomeList.Count; List<CoordinateVector> coordList = new List<CoordinateVector>(count); for(int i=0; i<count; i++) { coordList.Add(genomeList[i].Position); } // The centroid calculation is a function of the distance metric. return _distanceMetric.CalculateCentroid(coordList); } // ENHANCEMENT: Optimization candidate. /// <summary> /// Gets an array of all genomes ordered by their distance from their current specie. /// </summary> private TGenome[] GetGenomesByDistanceFromSpecie(IList<TGenome> genomeList, IList<Specie<TGenome>> specieList) { // Build a list of all genomes paired with their distance from their centroid. int genomeCount = genomeList.Count; GenomeDistancePair<TGenome>[] genomeDistanceArr = new GenomeDistancePair<TGenome>[genomeCount]; for(int i=0; i<genomeCount; i++) { TGenome genome = genomeList[i]; double distance = _distanceMetric.GetDistance(genome.Position, specieList[genome.SpecieIdx].Centroid); genomeDistanceArr[i] = new GenomeDistancePair<TGenome>(distance, genome); } // Sort list. Longest distance first. Array.Sort(genomeDistanceArr); // Put the sorted genomes in an array and return it. TGenome[] genomeArr = new TGenome[genomeCount]; for(int i=0; i<genomeCount; i++) { genomeArr[i] = genomeDistanceArr[i]._genome; } return genomeArr; } /// <summary> /// Find the specie that a genome is closest to as determined by the distance metric. /// </summary> private Specie<TGenome> FindClosestSpecie(TGenome genome, IList<Specie<TGenome>> specieList) { // Measure distance to first specie's centroid. Specie<TGenome> closestSpecie = specieList[0]; double closestDistance = _distanceMetric.GetDistance(genome.Position, closestSpecie.Centroid); // Measure distance to all remaining species. int speciesCount = specieList.Count; for(int i=1; i<speciesCount; i++) { double distance = _distanceMetric.GetDistance(genome.Position, specieList[i].Centroid); if(distance < closestDistance) { closestDistance = distance; closestSpecie = specieList[i]; } } return closestSpecie; } #endregion } }
/* * QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals. * Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using QuantConnect.Algorithm.Framework.Portfolio; using QuantConnect.Orders; namespace QuantConnect.Securities { /// <summary> /// SecurityHolding is a base class for purchasing and holding a market item which manages the asset portfolio /// </summary> public class SecurityHolding { //Working Variables private decimal _averagePrice; private decimal _quantity; private decimal _price; private decimal _totalSaleVolume; private decimal _profit; private decimal _lastTradeProfit; private decimal _totalFees; private readonly Security _security; /// <summary> /// Create a new holding class instance setting the initial properties to $0. /// </summary> /// <param name="security">The security being held</param> public SecurityHolding(Security security) { _security = security; //Total Sales Volume for the day _totalSaleVolume = 0; _lastTradeProfit = 0; } /// <summary> /// Create a new holding class instance copying the initial properties /// </summary> /// <param name="holding">The security being held</param> protected SecurityHolding(SecurityHolding holding) { _security = holding._security; _averagePrice = holding._averagePrice; _quantity = holding._quantity; _price = holding._price; _totalSaleVolume = holding._totalSaleVolume; _profit = holding._profit; _lastTradeProfit = holding._lastTradeProfit; _totalFees = holding._totalFees; } /// <summary> /// The security being held /// </summary> protected Security Security { get { return _security; } } /// <summary> /// Gets the current target holdings for this security /// </summary> public IPortfolioTarget Target { get; internal set; } /// <summary> /// Average price of the security holdings. /// </summary> public decimal AveragePrice { get { return _averagePrice; } protected set { _averagePrice = value; } } /// <summary> /// Quantity of the security held. /// </summary> /// <remarks>Positive indicates long holdings, negative quantity indicates a short holding</remarks> /// <seealso cref="AbsoluteQuantity"/> public decimal Quantity { get { return _quantity; } protected set { _quantity = value; } } /// <summary> /// Symbol identifier of the underlying security. /// </summary> public Symbol Symbol { get { return _security.Symbol; } } /// <summary> /// The security type of the symbol /// </summary> public SecurityType Type { get { return _security.Type; } } /// <summary> /// Leverage of the underlying security. /// </summary> public virtual decimal Leverage { get { return _security.BuyingPowerModel.GetLeverage(_security); } } /// <summary> /// Acquisition cost of the security total holdings. /// </summary> public virtual decimal HoldingsCost { get { return AveragePrice * Convert.ToDecimal(Quantity) * _security.QuoteCurrency.ConversionRate * _security.SymbolProperties.ContractMultiplier; } } /// <summary> /// Unlevered Acquisition cost of the security total holdings. /// </summary> public virtual decimal UnleveredHoldingsCost { get { return HoldingsCost/Leverage; } } /// <summary> /// Current market price of the security. /// </summary> public virtual decimal Price { get { return _price; } protected set { _price = value; } } /// <summary> /// Absolute holdings cost for current holdings in units of the account's currency /// </summary> /// <seealso cref="HoldingsCost"/> public virtual decimal AbsoluteHoldingsCost { get { return Math.Abs(HoldingsCost); } } /// <summary> /// Unlevered absolute acquisition cost of the security total holdings. /// </summary> public virtual decimal UnleveredAbsoluteHoldingsCost { get { return Math.Abs(UnleveredHoldingsCost); } } /// <summary> /// Market value of our holdings. /// </summary> public virtual decimal HoldingsValue { get { return _price * Quantity * _security.QuoteCurrency.ConversionRate * _security.SymbolProperties.ContractMultiplier; } } /// <summary> /// Absolute of the market value of our holdings. /// </summary> /// <seealso cref="HoldingsValue"/> public virtual decimal AbsoluteHoldingsValue { get { return Math.Abs(HoldingsValue); } } /// <summary> /// Boolean flat indicating if we hold any of the security /// </summary> public virtual bool HoldStock { get { return (AbsoluteQuantity > 0); } } /// <summary> /// Boolean flat indicating if we hold any of the security /// </summary> /// <remarks>Alias of HoldStock</remarks> /// <seealso cref="HoldStock"/> public virtual bool Invested { get { return HoldStock; } } /// <summary> /// The total transaction volume for this security since the algorithm started. /// </summary> public virtual decimal TotalSaleVolume { get { return _totalSaleVolume; } } /// <summary> /// Total fees for this company since the algorithm started. /// </summary> public virtual decimal TotalFees { get { return _totalFees; } } /// <summary> /// Boolean flag indicating we have a net positive holding of the security. /// </summary> /// <seealso cref="IsShort"/> public virtual bool IsLong { get { return Quantity > 0; } } /// <summary> /// BBoolean flag indicating we have a net negative holding of the security. /// </summary> /// <seealso cref="IsLong"/> public virtual bool IsShort { get { return Quantity < 0; } } /// <summary> /// Absolute quantity of holdings of this security /// </summary> /// <seealso cref="Quantity"/> public virtual decimal AbsoluteQuantity { get { return Math.Abs(Quantity); } } /// <summary> /// Record of the closing profit from the last trade conducted. /// </summary> public virtual decimal LastTradeProfit { get { return _lastTradeProfit; } } /// <summary> /// Calculate the total profit for this security. /// </summary> /// <seealso cref="NetProfit"/> public virtual decimal Profit { get { return _profit; } } /// <summary> /// Return the net for this company measured by the profit less fees. /// </summary> /// <seealso cref="Profit"/> /// <seealso cref="TotalFees"/> public virtual decimal NetProfit { get { return Profit - TotalFees; } } /// <summary> /// Gets the unrealized profit as a percenage of holdings cost /// </summary> public decimal UnrealizedProfitPercent { get { if (AbsoluteHoldingsCost == 0) return 0m; return UnrealizedProfit/AbsoluteHoldingsCost; } } /// <summary> /// Unrealized profit of this security when absolute quantity held is more than zero. /// </summary> public virtual decimal UnrealizedProfit { get { return TotalCloseProfit(); } } /// <summary> /// Adds a fee to the running total of total fees. /// </summary> /// <param name="newFee"></param> public void AddNewFee(decimal newFee) { _totalFees += newFee; } /// <summary> /// Adds a profit record to the running total of profit. /// </summary> /// <param name="profitLoss">The cash change in portfolio from closing a position</param> public void AddNewProfit(decimal profitLoss) { _profit += profitLoss; } /// <summary> /// Adds a new sale value to the running total trading volume in terms of the account currency /// </summary> /// <param name="saleValue"></param> public void AddNewSale(decimal saleValue) { _totalSaleVolume += saleValue; } /// <summary> /// Set the last trade profit for this security from a Portfolio.ProcessFill call. /// </summary> /// <param name="lastTradeProfit">Value of the last trade profit</param> public void SetLastTradeProfit(decimal lastTradeProfit) { _lastTradeProfit = lastTradeProfit; } /// <summary> /// Set the quantity of holdings and their average price after processing a portfolio fill. /// </summary> public virtual void SetHoldings(decimal averagePrice, int quantity) { _averagePrice = averagePrice; _quantity = quantity; } /// <summary> /// Set the quantity of holdings and their average price after processing a portfolio fill. /// </summary> public virtual void SetHoldings(decimal averagePrice, decimal quantity) { _averagePrice = averagePrice; _quantity = quantity; } /// <summary> /// Update local copy of closing price value. /// </summary> /// <param name="closingPrice">Price of the underlying asset to be used for calculating market price / portfolio value</param> public virtual void UpdateMarketPrice(decimal closingPrice) { _price = closingPrice; } /// <summary> /// Profit if we closed the holdings right now including the approximate fees. /// </summary> /// <remarks>Does not use the transaction model for market fills but should.</remarks> public virtual decimal TotalCloseProfit() { if (AbsoluteQuantity == 0) { return 0; } // this is in the account currency var marketOrder = new MarketOrder(_security.Symbol, -Quantity, _security.LocalTime.ConvertToUtc(_security.Exchange.TimeZone)); var orderFee = _security.FeeModel.GetOrderFee(_security, marketOrder); var price = marketOrder.Direction == OrderDirection.Sell ? _security.BidPrice : _security.AskPrice; return (price - AveragePrice)*Quantity*_security.QuoteCurrency.ConversionRate*_security.SymbolProperties.ContractMultiplier - orderFee; } } }
using System; using System.Linq; using System.Collections.Generic; using System.Runtime.CompilerServices; namespace lanterna.gui2 { /** * This emulates the behaviour of the GridLayout in SWT (as opposed to the one in AWT/Swing). I originally ported the * SWT class itself but due to licensing concerns (the eclipse license is not compatible with LGPL) I was advised not to * do that. This is a partial implementation and some of the semantics have changed, but in general it works the same * way so the SWT documentation will generally match. * <p> * You use the {@code GridLayout} by specifying a number of columns you want your grid to have and then when you add * components, you assign {@code LayoutData} to these components using the different static methods in this class * ({@code createLayoutData(..)}). You can set components to span both rows and columns, as well as defining how to * distribute the available space. */ public class GridLayout : LayoutManager { /** * The enum is used to specify where in a grid cell a component should be placed, in the case that the preferred * size of the component is smaller than the space in the cell. This class will generally use two alignments, one * for horizontal and one for vertical. */ public enum Alignment { /** * Place the component at the start of the cell (horizontally or vertically) and leave whatever space is left * after the preferred size empty. */ BEGINNING, /** * Place the component at the middle of the cell (horizontally or vertically) and leave the space before and * after empty. */ CENTER, /** * Place the component at the end of the cell (horizontally or vertically) and leave whatever space is left * before the preferred size empty. */ END, /** * Force the component to be the same size as the table cell */ FILL } class GridLayoutData : LayoutData { internal GridLayout.Alignment horizontalAlignment; internal GridLayout.Alignment verticalAlignment; internal bool grabExtraHorizontalSpace; internal bool grabExtraVerticalSpace; internal int horizontalSpan; internal int verticalSpan; public GridLayoutData( GridLayout.Alignment horizontalAlignment, GridLayout.Alignment verticalAlignment, bool grabExtraHorizontalSpace, bool grabExtraVerticalSpace, int horizontalSpan, int verticalSpan) { if (horizontalSpan < 1 || verticalSpan < 1) { throw new ArgumentException("Horizontal/Vertical span must be 1 or greater"); } this.horizontalAlignment = horizontalAlignment; this.verticalAlignment = verticalAlignment; this.grabExtraHorizontalSpace = grabExtraHorizontalSpace; this.grabExtraVerticalSpace = grabExtraVerticalSpace; this.horizontalSpan = horizontalSpan; this.verticalSpan = verticalSpan; } } private static GridLayout.GridLayoutData DEFAULT = new GridLayoutData( horizontalAlignment: GridLayout.Alignment.BEGINNING, verticalAlignment: GridLayout.Alignment.BEGINNING, grabExtraHorizontalSpace: false, grabExtraVerticalSpace: false, verticalSpan: 1, horizontalSpan: 1); /** * Creates a layout data object for {@code GridLayout}:s that specify the horizontal and vertical alignment for the * component in case the cell space is larger than the preferred size of the component * @param horizontalAlignment Horizontal alignment strategy * @param verticalAlignment Vertical alignment strategy * @return The layout data object containing the specified alignments */ public static LayoutData createLayoutData(GridLayout.Alignment horizontalAlignment, GridLayout.Alignment verticalAlignment) { return GridLayout.createLayoutData(horizontalAlignment, verticalAlignment, false, false); } /** * Creates a layout data object for {@code GridLayout}:s that specify the horizontal and vertical alignment for the * component in case the cell space is larger than the preferred size of the component. This method also has fields * for indicating that the component would like to take more space if available to the container. For example, if * the container is assigned is assigned an area of 50x15, but all the child components in the grid together only * asks for 40x10, the remaining 10 columns and 5 rows will be empty. If just a single component asks for extra * space horizontally and/or vertically, the grid will expand out to fill the entire area and the text space will be * assigned to the component that asked for it. * * @param horizontalAlignment Horizontal alignment strategy * @param verticalAlignment Vertical alignment strategy * @param grabExtraHorizontalSpace If set to {@code true}, this component will ask to be assigned extra horizontal * space if there is any to assign * @param grabExtraVerticalSpace If set to {@code true}, this component will ask to be assigned extra vertical * space if there is any to assign * @return The layout data object containing the specified alignments and size requirements */ public static LayoutData createLayoutData( GridLayout.Alignment horizontalAlignment, GridLayout.Alignment verticalAlignment, bool grabExtraHorizontalSpace, bool grabExtraVerticalSpace) { return GridLayout.createLayoutData(horizontalAlignment, verticalAlignment, grabExtraHorizontalSpace, grabExtraVerticalSpace, 1, 1); } /** * Creates a layout data object for {@code GridLayout}:s that specify the horizontal and vertical alignment for the * component in case the cell space is larger than the preferred size of the component. This method also has fields * for indicating that the component would like to take more space if available to the container. For example, if * the container is assigned is assigned an area of 50x15, but all the child components in the grid together only * asks for 40x10, the remaining 10 columns and 5 rows will be empty. If just a single component asks for extra * space horizontally and/or vertically, the grid will expand out to fill the entire area and the text space will be * assigned to the component that asked for it. It also puts in data on how many rows and/or columns the component * should span. * * @param horizontalAlignment Horizontal alignment strategy * @param verticalAlignment Vertical alignment strategy * @param grabExtraHorizontalSpace If set to {@code true}, this component will ask to be assigned extra horizontal * space if there is any to assign * @param grabExtraVerticalSpace If set to {@code true}, this component will ask to be assigned extra vertical * space if there is any to assign * @param horizontalSpan How many "cells" this component wants to span horizontally * @param verticalSpan How many "cells" this component wants to span vertically * @return The layout data object containing the specified alignments, size requirements and cell spanning */ public static LayoutData createLayoutData( GridLayout.Alignment horizontalAlignment, GridLayout.Alignment verticalAlignment, bool grabExtraHorizontalSpace, bool grabExtraVerticalSpace, int horizontalSpan, int verticalSpan) { return new GridLayout.GridLayoutData( horizontalAlignment, verticalAlignment, grabExtraHorizontalSpace, grabExtraVerticalSpace, horizontalSpan, verticalSpan); } /** * This is a shortcut method that will create a grid layout data object that will expand its cell as much as is can * horizontally and make the component occupy the whole area horizontally and center it vertically * @param horizontalSpan How many cells to span horizontally * @return Layout data object with the specified span and horizontally expanding as much as it can */ public static LayoutData createHorizontallyFilledLayoutData(int horizontalSpan) { return GridLayout.createLayoutData( GridLayout.Alignment.FILL, GridLayout.Alignment.CENTER, true, false, horizontalSpan, 1); } /** * This is a shortcut method that will create a grid layout data object that will expand its cell as much as is can * vertically and make the component occupy the whole area vertically and center it horizontally * @param horizontalSpan How many cells to span vertically * @return Layout data object with the specified span and vertically expanding as much as it can */ public static LayoutData createHorizontallyEndAlignedLayoutData(int horizontalSpan) { return GridLayout.createLayoutData( GridLayout.Alignment.END, GridLayout.Alignment.CENTER, true, false, horizontalSpan, 1); } private int numberOfColumns; private int horizontalSpacing; private int verticalSpacing; private int topMarginSize; private int bottomMarginSize; private int leftMarginSize; private int rightMarginSize; private bool changed; /** * Creates a new {@code GridLayout} with the specified number of columns. Initially, this layout will have a * horizontal spacing of 1 and vertical spacing of 0, with a left and right margin of 1. * @param numberOfColumns Number of columns in this grid */ public GridLayout(int numberOfColumns) { this.numberOfColumns = numberOfColumns; this.horizontalSpacing = 1; this.verticalSpacing = 0; this.topMarginSize = 0; this.bottomMarginSize = 0; this.leftMarginSize = 1; this.rightMarginSize = 1; this.changed = true; } /** * Returns the horizontal spacing, i.e. the number of empty columns between each cell * @return Horizontal spacing */ public virtual int getHorizontalSpacing() { return this.horizontalSpacing; } /** * Sets the horizontal spacing, i.e. the number of empty columns between each cell * @param horizontalSpacing New horizontal spacing * @return Itself */ public virtual GridLayout setHorizontalSpacing(int horizontalSpacing) { if (horizontalSpacing < 0) { throw new ArgumentException("Horizontal spacing cannot be less than 0"); } this.horizontalSpacing = horizontalSpacing; this.changed = true; return this; } /** * Returns the vertical spacing, i.e. the number of empty columns between each row * @return Vertical spacing */ public virtual int getVerticalSpacing() { return this.verticalSpacing; } /** * Sets the vertical spacing, i.e. the number of empty columns between each row * @param verticalSpacing New vertical spacing * @return Itself */ public virtual GridLayout setVerticalSpacing(int verticalSpacing) { if (verticalSpacing < 0) { throw new ArgumentException("Vertical spacing cannot be less than 0"); } this.verticalSpacing = verticalSpacing; this.changed = true; return this; } /** * Returns the top margin, i.e. number of empty rows above the first row in the grid * @return Top margin, in number of rows */ public virtual int getTopMarginSize() { return this.topMarginSize; } /** * Sets the top margin, i.e. number of empty rows above the first row in the grid * @param topMarginSize Top margin, in number of rows * @return Itself */ public virtual GridLayout setTopMarginSize(int topMarginSize) { if (topMarginSize < 0) { throw new ArgumentException("Top margin size cannot be less than 0"); } this.topMarginSize = topMarginSize; this.changed = true; return this; } /** * Returns the bottom margin, i.e. number of empty rows below the last row in the grid * @return Bottom margin, in number of rows */ public virtual int getBottomMarginSize() { return this.bottomMarginSize; } /** * Sets the bottom margin, i.e. number of empty rows below the last row in the grid * @param bottomMarginSize Bottom margin, in number of rows * @return Itself */ public virtual GridLayout setBottomMarginSize(int bottomMarginSize) { if (bottomMarginSize < 0) { throw new ArgumentException("Bottom margin size cannot be less than 0"); } this.bottomMarginSize = bottomMarginSize; this.changed = true; return this; } /** * Returns the left margin, i.e. number of empty columns left of the first column in the grid * @return Left margin, in number of columns */ public virtual int getLeftMarginSize() { return this.leftMarginSize; } /** * Sets the left margin, i.e. number of empty columns left of the first column in the grid * @param leftMarginSize Left margin, in number of columns * @return Itself */ public virtual GridLayout setLeftMarginSize(int leftMarginSize) { if (leftMarginSize < 0) { throw new ArgumentException("Left margin size cannot be less than 0"); } this.leftMarginSize = leftMarginSize; this.changed = true; return this; } /** * Returns the right margin, i.e. number of empty columns right of the last column in the grid * @return Right margin, in number of columns */ public virtual int getRightMarginSize() { return this.rightMarginSize; } /** * Sets the right margin, i.e. number of empty columns right of the last column in the grid * @param rightMarginSize Right margin, in number of columns * @return Itself */ public virtual GridLayout setRightMarginSize(int rightMarginSize) { if (rightMarginSize < 0) { throw new ArgumentException("Right margin size cannot be less than 0"); } this.rightMarginSize = rightMarginSize; this.changed = true; return this; } // public virtual bool hasChanged() { return this.changed; } // public virtual TerminalSize getPreferredSize(List<Component> components) { TerminalSize preferredSize = TerminalSize.ZERO; if (components.Count == 0) { return preferredSize.withRelative( this.leftMarginSize + this.rightMarginSize, this.topMarginSize + this.bottomMarginSize); } Component[][] table = this.buildTable(components); table = this.eliminateUnusedRowsAndColumns(table); //Figure out each column first, this can be done independently of the row heights int preferredWidth = 0; int preferredHeight = 0; int[] preferredColumnWidths = this.getPreferredColumnWidths(table); foreach(var width in preferredColumnWidths) { preferredWidth += width; } int[] preferredRowHeights = this.getPreferredRowHeights(table); foreach(var height in preferredRowHeights) { preferredHeight += height; } preferredSize = preferredSize.withRelative(preferredWidth, preferredHeight); preferredSize = preferredSize.withRelativeColumns(this.leftMarginSize + this.rightMarginSize + (table[0].Length - 1) * this.horizontalSpacing); preferredSize = preferredSize.withRelativeRows(this.topMarginSize + this.bottomMarginSize + (table.Length - 1) * this.verticalSpacing); return preferredSize; } // public virtual void doLayout(TerminalSize area, List<Component> components) { //Sanity check, if the area is way too small, just return Component[][] table = this.buildTable(components); table = this.eliminateUnusedRowsAndColumns(table); if (area.Equals(TerminalSize.ZERO) || table.Length == 0 || area.getColumns() <= this.leftMarginSize + this.rightMarginSize + (table[0].Length - 1) * this.horizontalSpacing || area.getRows() <= this.bottomMarginSize + this.topMarginSize + (table.Length - 1) * this.verticalSpacing) { return; } //Adjust area to the margins area = area.withRelative(-this.leftMarginSize - this.rightMarginSize, -this.topMarginSize - this.bottomMarginSize); var sizeMap = new Dictionary<Component, TerminalSize>(); var positionMap = new Dictionary<Component, TerminalPosition>(); //Figure out each column first, this can be done independently of the row heights int[] columnWidths = this.getPreferredColumnWidths(table); //Take notes of which columns we can expand if the usable area is larger than what the components want var expandableColumns = this.getExpandableColumns(table); //Next, start shrinking to make sure it fits the size of the area we are trying to lay out on. //Notice we subtract the horizontalSpacing to take the space between components into account TerminalSize areaWithoutHorizontalSpacing = area.withRelativeColumns(-this.horizontalSpacing * (table[0].Length - 1)); int totalWidth = this.shrinkWidthToFitArea(areaWithoutHorizontalSpacing, columnWidths); //Finally, if there is extra space, make the expandable columns larger while (areaWithoutHorizontalSpacing.getColumns() > totalWidth && expandableColumns.Count > 0) { totalWidth = this.grabExtraHorizontalSpace(areaWithoutHorizontalSpacing, columnWidths, expandableColumns, totalWidth); } //Now repeat for rows int[] rowHeights = this.getPreferredRowHeights(table); var expandableRows = this.getExpandableRows(table); TerminalSize areaWithoutVerticalSpacing = area.withRelativeRows(-this.verticalSpacing * (table.Length - 1)); int totalHeight = this.shrinkHeightToFitArea(areaWithoutVerticalSpacing, rowHeights); while (areaWithoutVerticalSpacing.getRows() > totalHeight && expandableRows.Count > 0) { totalHeight = this.grabExtraVerticalSpace(areaWithoutVerticalSpacing, rowHeights, expandableRows, totalHeight); } //Ok, all constraints are in place, we can start placing out components. To simplify, do it horizontally first //and vertically after TerminalPosition tableCellTopLeft = TerminalPosition.TOP_LEFT_CORNER; for (int y = 0; y < table.Length; y++) { tableCellTopLeft = tableCellTopLeft.withColumn(0); for (int x = 0; x < table[y].Length; x++) { Component component = table[y][x]; if (component != null && !positionMap.ContainsKey(component)) { GridLayout.GridLayoutData layoutData = this.getLayoutData(component); TerminalSize size = component.getPreferredSize(); TerminalPosition position = tableCellTopLeft; int availableHorizontalSpace = 0; int availableVerticalSpace = 0; for (int i = 0; i < layoutData.horizontalSpan; i++) { availableHorizontalSpace += columnWidths[x + i] + ((i <= 0) ? 0 : this.horizontalSpacing); } for (int j = 0; j < layoutData.verticalSpan; j++) { availableVerticalSpace += rowHeights[y + j] + ((j <= 0) ? 0 : this.verticalSpacing); } //Make sure to obey the size restrictions size = size.withColumns(Math.Min(size.getColumns(), availableHorizontalSpace)); size = size.withRows(Math.Min(size.getRows(), availableVerticalSpace)); switch (layoutData.horizontalAlignment) { case Alignment.CENTER: position = position.withRelativeColumn((availableHorizontalSpace - size.getColumns()) / 2); break; case Alignment.BEGINNING: position = position.withRelativeColumn(availableHorizontalSpace - size.getColumns()); break; case Alignment.END: size = size.withColumns(availableHorizontalSpace); break; default: break; } switch (layoutData.verticalAlignment) { case Alignment.CENTER: position = position.withRelativeRow((availableVerticalSpace - size.getRows()) / 2); break; case Alignment.BEGINNING: position = position.withRelativeRow(availableVerticalSpace - size.getRows()); break; case Alignment.END: size = size.withRows(availableVerticalSpace); break; default: break; } sizeMap.Add(component, size); positionMap.Add(component, position); } tableCellTopLeft = tableCellTopLeft.withRelativeColumn(columnWidths[x] + this.horizontalSpacing); } tableCellTopLeft = tableCellTopLeft.withRelativeRow(rowHeights[y] + this.verticalSpacing); } //Apply the margins here foreach (var component2 in components) { component2.setPosition((positionMap[component2]).withRelative(this.leftMarginSize, this.topMarginSize)); component2.setSize(sizeMap[component2]); } this.changed = false; } private int[] getPreferredColumnWidths(Component[][] table) { //actualNumberOfColumns may be different from this.numberOfColumns since some columns may have been eliminated int actualNumberOfColumns = table[0].Length; int[] columnWidths = new int[actualNumberOfColumns]; //Start by letting all span = 1 columns take what they need foreach (Component[] row in table) { for (int i = 0; i < actualNumberOfColumns; i++) { Component component = row[i]; if (component != null) { GridLayout.GridLayoutData layoutData = this.getLayoutData(component); if (layoutData.horizontalSpan == 1) { columnWidths[i] = Math.Max(columnWidths[i], component.getPreferredSize().getColumns()); } } } } //Next, do span > 1 and enlarge if necessary foreach (Component[] row2 in table) { int j = 0; while (j < actualNumberOfColumns) { Component component2 = row2[j]; if (component2 == null) { j++; } else { GridLayout.GridLayoutData layoutData2 = this.getLayoutData(component2); if (layoutData2.horizontalSpan > 1) { int accumWidth = 0; for (int k = j; k < j + layoutData2.horizontalSpan; k++) { accumWidth += columnWidths[k]; } int preferredWidth = component2.getPreferredSize().getColumns(); if (preferredWidth > accumWidth) { int columnOffset = 0; do { columnWidths[j + columnOffset]++; columnOffset++; accumWidth++; if (columnOffset == layoutData2.horizontalSpan) { columnOffset = 0; } } while (preferredWidth > accumWidth); } } j += layoutData2.horizontalSpan; } } } return columnWidths; } private int[] getPreferredRowHeights(Component[][] table) { int numberOfRows = table.Length; int[] rowHeights = new int[numberOfRows]; //Start by letting all span = 1 rows take what they need int rowIndex = 0; foreach (Component[] row in table) { for (int i = 0; i < row.Length; i++) { Component component = row[i]; if (component != null) { GridLayout.GridLayoutData layoutData = this.getLayoutData(component); if (layoutData.verticalSpan == 1) { rowHeights[rowIndex] = Math.Max(rowHeights[rowIndex], component.getPreferredSize().getRows()); } } } rowIndex++; } //Next, do span > 1 and enlarge if necessary for (int x = 0; x < this.numberOfColumns; x++) { int y = 0; while (y < numberOfRows && y < table.Length) { if (x >= table[y].Length) { y++; } else { Component component2 = table[y][x]; if (component2 == null) { y++; } else { GridLayout.GridLayoutData layoutData2 = this.getLayoutData(component2); if (layoutData2.verticalSpan > 1) { int accumulatedHeight = 0; for (int j = y; j < y + layoutData2.verticalSpan; j++) { accumulatedHeight += rowHeights[j]; } int preferredHeight = component2.getPreferredSize().getRows(); if (preferredHeight > accumulatedHeight) { int rowOffset = 0; do { rowHeights[y + rowOffset]++; rowOffset++; accumulatedHeight++; if (rowOffset == layoutData2.verticalSpan) { rowOffset = 0; } } while (preferredHeight > accumulatedHeight); } } y += layoutData2.verticalSpan; } } } } return rowHeights; } private List<int> getExpandableColumns(Component[][] table) { var expandableColumns = new List<int>(); foreach (var row in table) { for (int i = 0; i < row.Length; i++) { if (row[i] != null) { GridLayout.GridLayoutData layoutData = this.getLayoutData(row[i]); if (layoutData.grabExtraHorizontalSpace) { expandableColumns.Add(i); } } } } return expandableColumns; } private List<int> getExpandableRows(Component[][] table) { List<int> expandableRows = new List<int>(); for (int rowIndex = 0; rowIndex < table.Length; rowIndex++) { Component[] row = table[rowIndex]; for (int columnIndex = 0; columnIndex < row.Length; columnIndex++) { if (row[columnIndex] != null) { GridLayout.GridLayoutData layoutData = this.getLayoutData(row[columnIndex]); if (layoutData.grabExtraVerticalSpace) { expandableRows.Add(rowIndex); } } } } return expandableRows; } private int shrinkWidthToFitArea(TerminalSize area, int[] columnWidths) { int totalWidth = 0; foreach (int width in columnWidths) { totalWidth += width; } if (totalWidth > area.getColumns()) { int columnOffset = 0; do { if (columnWidths[columnOffset] > 0) { columnWidths[columnOffset]--; totalWidth += -1; } columnOffset++; if (columnOffset == this.numberOfColumns) { columnOffset = 0; } } while (totalWidth > area.getColumns()); } return totalWidth; } private int shrinkHeightToFitArea(TerminalSize area, int[] rowHeights) { int totalHeight = 0; foreach (int height in rowHeights) { totalHeight += height; } if (totalHeight > area.getRows()) { int rowOffset = 0; do { if (rowHeights[rowOffset] > 0) { rowHeights[rowOffset]--; totalHeight += -1; } rowOffset++; if (rowOffset == rowHeights.Length) { rowOffset = 0; } } while (totalHeight > area.getRows()); } return totalHeight; } private int grabExtraHorizontalSpace(TerminalSize area, int[] columnWidths, List<int> expandableColumns, int totalWidth) { foreach (int columnIndex in expandableColumns) { columnWidths[columnIndex]++; totalWidth++; if (area.getColumns() == totalWidth) { break; } } return totalWidth; } private int grabExtraVerticalSpace(TerminalSize area, int[] rowHeights, List<int> expandableRows, int totalHeight) { foreach (int rowIndex in expandableRows) { rowHeights[rowIndex]++; totalHeight++; if (area.getColumns() == totalHeight) { break; } } return totalHeight; } private Component[][] buildTable(List<Component> components) { var rows = new List<Component[]>(); var hspans = new List<int[]>(); var vspans = new List<int[]>(); int rowCount = 0; int rowsExtent = 1; var toBePlaced = new LinkedList<Component>(components); while (toBePlaced.Count > 0 || rowCount < rowsExtent) { //Start new row Component[] row = new Component[this.numberOfColumns]; int[] hspan = new int[this.numberOfColumns]; int[] vspan = new int[this.numberOfColumns]; for (int i = 0; i < this.numberOfColumns; i++) { if (i > 0 && hspan[i - 1] > 1) { row[i] = row[i - 1]; hspan[i] = hspan[i - 1] - 1; vspan[i] = vspan[i - 1]; } else if (rowCount > 0 && ((int[])vspans[rowCount - 1])[i] > 1) { row[i] = ((Component[])rows[rowCount - 1])[i]; hspan[i] = ((int[])hspans[rowCount - 1])[i]; vspan[i] = ((int[])vspans[rowCount - 1])[i] - 1; } else if (toBePlaced.Count > 0) { Component component = toBePlaced.First();//.poll(); toBePlaced.RemoveFirst(); GridLayout.GridLayoutData gridLayoutData = this.getLayoutData(component); row[i] = component; hspan[i] = gridLayoutData.horizontalSpan; vspan[i] = gridLayoutData.verticalSpan; rowsExtent = Math.Max(rowsExtent, rowCount + gridLayoutData.verticalSpan); } else { row[i] = null; hspan[i] = 1; vspan[i] = 1; } } rows.Add(row); hspans.Add(hspan); vspans.Add(vspan); rowCount++; } return (Component[][])rows.ToArray(); } private Component[][] eliminateUnusedRowsAndColumns(Component[][] table) { if (table.Length == 0) { return table; } //Could make this into a Set, but I doubt there will be any real gain in performance as these are probably going //to be very small. var rowsToRemove = new List<int>(); var columnsToRemove = new List<int>(); int tableRows = table.Length; int tableColumns = table[0].Length; //Scan for unnecessary columns for (int column = tableColumns - 1; column > 0; column--) { foreach (Component[] row in table) { if (row[column] != row[column - 1]) { goto columnLoopEnd;//GLEE! I USED A GOTO TO BREAK THE DOUBLE LOOP!!! } } columnsToRemove.Add(column); } columnLoopEnd: //Scan for unnecessary rows for (int row = tableRows - 1; row > 0; row--) { for (int column = 0; column < tableColumns; column++) { if (table[row][column] != table[row - 1][column]) { goto rowLoopEnd;//GLEE! I USED A GOTO TO BREAK THE DOUBLE LOOP!!! } } rowsToRemove.Add(row); } rowLoopEnd: //If there's nothing to remove, just return the same if (rowsToRemove.Count == 0 && columnsToRemove.Count == 0) { return table; } //Build a new table with rows & columns eliminated Component[][] newTable = new Component[tableRows - rowsToRemove.Count][]; int insertedRowCounter = 0; for (int row3 = 0; row3 < tableRows; row3++) { Component[] newColumn = new Component[tableColumns - columnsToRemove.Count]; int insertedColumnCounter = 0; for (int column3 = 0; column3 < tableColumns; column3++) { if (!columnsToRemove.Contains(column3)) { newColumn[insertedColumnCounter] = table[row3][column3]; insertedColumnCounter++; } } newTable[insertedRowCounter] = newColumn; insertedRowCounter++; } return newTable; } private GridLayout.GridLayoutData getLayoutData(Component component) { LayoutData layoutData = component.getLayoutData(); if (layoutData == null || !(layoutData is GridLayout.GridLayoutData)) { return GridLayout.DEFAULT; } return (GridLayout.GridLayoutData)layoutData; } } }
using UnityEngine; using UnityEditor; using System.Collections; using System.Collections.Generic; using System.IO; using System.Text.RegularExpressions; namespace UnityEditor.XCodeEditor { public partial class XCProject : System.IDisposable { private PBXDictionary _datastore; public PBXDictionary _objects; //private PBXDictionary _configurations; private PBXGroup _rootGroup; //private string _defaultConfigurationName; private string _rootObjectKey; public string projectRootPath { get; private set; } private FileInfo projectFileInfo; public string filePath { get; private set; } //private string sourcePathRoot; private bool modified = false; #region Data // Objects private PBXSortedDictionary<PBXBuildFile> _buildFiles; private PBXSortedDictionary<PBXGroup> _groups; private PBXSortedDictionary<PBXFileReference> _fileReferences; private PBXDictionary<PBXNativeTarget> _nativeTargets; private PBXDictionary<PBXFrameworksBuildPhase> _frameworkBuildPhases; private PBXDictionary<PBXResourcesBuildPhase> _resourcesBuildPhases; private PBXDictionary<PBXShellScriptBuildPhase> _shellScriptBuildPhases; private PBXDictionary<PBXSourcesBuildPhase> _sourcesBuildPhases; private PBXDictionary<PBXCopyFilesBuildPhase> _copyBuildPhases; private PBXDictionary<PBXVariantGroup> _variantGroups; private PBXDictionary<XCBuildConfiguration> _buildConfigurations; private PBXSortedDictionary<XCConfigurationList> _configurationLists; private PBXProject _project; #endregion #region Constructor public XCProject() { } public XCProject( string filePath ) : this() { if( !System.IO.Directory.Exists( filePath ) ) { Debug.LogWarning( "XCode project path does not exist: " + filePath ); return; } if( filePath.EndsWith( ".xcodeproj" ) ) { Debug.Log( "Opening project " + filePath ); this.projectRootPath = Path.GetDirectoryName( filePath ); this.filePath = filePath; } else { Debug.Log( "Looking for xcodeproj files in " + filePath ); string[] projects = System.IO.Directory.GetDirectories( filePath, "*.xcodeproj" ); if( projects.Length == 0 ) { Debug.LogWarning( "Error: missing xcodeproj file" ); return; } this.projectRootPath = filePath; //if the path is relative to the project, we need to make it absolute if (!System.IO.Path.IsPathRooted(projectRootPath)) projectRootPath = Application.dataPath.Replace("Assets", "") + projectRootPath; //Debug.Log ("projectRootPath adjusted to " + projectRootPath); this.filePath = projects[ 0 ]; } projectFileInfo = new FileInfo( Path.Combine( this.filePath, "project.pbxproj" ) ); string contents = projectFileInfo.OpenText().ReadToEnd(); PBXParser parser = new PBXParser(); _datastore = parser.Decode( contents ); if( _datastore == null ) { throw new System.Exception( "Project file not found at file path " + filePath ); } if( !_datastore.ContainsKey( "objects" ) ) { Debug.Log( "Errore " + _datastore.Count ); return; } _objects = (PBXDictionary)_datastore["objects"]; modified = false; _rootObjectKey = (string)_datastore["rootObject"]; if( !string.IsNullOrEmpty( _rootObjectKey ) ) { _project = new PBXProject( _rootObjectKey, (PBXDictionary)_objects[ _rootObjectKey ] ); _rootGroup = new PBXGroup( _rootObjectKey, (PBXDictionary)_objects[ _project.mainGroupID ] ); } else { Debug.LogWarning( "error: project has no root object" ); _project = null; _rootGroup = null; } } #endregion #region Properties public PBXProject project { get { return _project; } } public PBXGroup rootGroup { get { return _rootGroup; } } public PBXSortedDictionary<PBXBuildFile> buildFiles { get { if( _buildFiles == null ) { _buildFiles = new PBXSortedDictionary<PBXBuildFile>( _objects ); } return _buildFiles; } } public PBXSortedDictionary<PBXGroup> groups { get { if( _groups == null ) { _groups = new PBXSortedDictionary<PBXGroup>( _objects ); } return _groups; } } public PBXSortedDictionary<PBXFileReference> fileReferences { get { if( _fileReferences == null ) { _fileReferences = new PBXSortedDictionary<PBXFileReference>( _objects ); } return _fileReferences; } } public PBXDictionary<PBXVariantGroup> variantGroups { get { if( _variantGroups == null ) { _variantGroups = new PBXDictionary<PBXVariantGroup>( _objects ); } return _variantGroups; } } public PBXDictionary<PBXNativeTarget> nativeTargets { get { if( _nativeTargets == null ) { _nativeTargets = new PBXDictionary<PBXNativeTarget>( _objects ); } return _nativeTargets; } } public PBXDictionary<XCBuildConfiguration> buildConfigurations { get { if( _buildConfigurations == null ) { _buildConfigurations = new PBXDictionary<XCBuildConfiguration>( _objects ); } return _buildConfigurations; } } public PBXSortedDictionary<XCConfigurationList> configurationLists { get { if( _configurationLists == null ) { _configurationLists = new PBXSortedDictionary<XCConfigurationList>( _objects ); } return _configurationLists; } } public PBXDictionary<PBXFrameworksBuildPhase> frameworkBuildPhases { get { if( _frameworkBuildPhases == null ) { _frameworkBuildPhases = new PBXDictionary<PBXFrameworksBuildPhase>( _objects ); } return _frameworkBuildPhases; } } public PBXDictionary<PBXResourcesBuildPhase> resourcesBuildPhases { get { if( _resourcesBuildPhases == null ) { _resourcesBuildPhases = new PBXDictionary<PBXResourcesBuildPhase>( _objects ); } return _resourcesBuildPhases; } } public PBXDictionary<PBXShellScriptBuildPhase> shellScriptBuildPhases { get { if( _shellScriptBuildPhases == null ) { _shellScriptBuildPhases = new PBXDictionary<PBXShellScriptBuildPhase>( _objects ); } return _shellScriptBuildPhases; } } public PBXDictionary<PBXSourcesBuildPhase> sourcesBuildPhases { get { if( _sourcesBuildPhases == null ) { _sourcesBuildPhases = new PBXDictionary<PBXSourcesBuildPhase>( _objects ); } return _sourcesBuildPhases; } } public PBXDictionary<PBXCopyFilesBuildPhase> copyBuildPhases { get { if( _copyBuildPhases == null ) { _copyBuildPhases = new PBXDictionary<PBXCopyFilesBuildPhase>( _objects ); } return _copyBuildPhases; } } #endregion #region PBXMOD public bool AddOtherCFlags( string flag ) { return AddOtherCFlags( new PBXList( flag ) ); } public bool AddOtherCFlags( PBXList flags ) { foreach( KeyValuePair<string, XCBuildConfiguration> buildConfig in buildConfigurations ) { buildConfig.Value.AddOtherCFlags( flags ); } modified = true; return modified; } public bool AddOtherLinkerFlags( string flag ) { return AddOtherLinkerFlags( new PBXList( flag ) ); } public bool AddOtherLinkerFlags( PBXList flags ) { foreach( KeyValuePair<string, XCBuildConfiguration> buildConfig in buildConfigurations ) { buildConfig.Value.AddOtherLinkerFlags( flags ); } modified = true; return modified; } public bool overwriteBuildSetting( string settingName, string newValue, string buildConfigName = "all") { Debug.Log("overwriteBuildSetting " + settingName + " " + newValue + " " + buildConfigName); foreach( KeyValuePair<string, XCBuildConfiguration> buildConfig in buildConfigurations ) { //Debug.Log ("build config " + buildConfig); XCBuildConfiguration b = buildConfig.Value; if ( (string)b.data["name"] == buildConfigName || (string)buildConfigName == "all") { //Debug.Log ("found " + b.data["name"] + " config"); buildConfig.Value.overwriteBuildSetting(settingName, newValue); modified = true; } else { //Debug.LogWarning ("skipping " + buildConfigName + " config " + (string)b.data["name"]); } } return modified; } public bool AddHeaderSearchPaths( string path ) { return AddHeaderSearchPaths( new PBXList( path ) ); } public bool AddHeaderSearchPaths( PBXList paths ) { Debug.Log ("AddHeaderSearchPaths " + paths); foreach( KeyValuePair<string, XCBuildConfiguration> buildConfig in buildConfigurations ) { buildConfig.Value.AddHeaderSearchPaths( paths ); } modified = true; return modified; } public bool AddLibrarySearchPaths( string path ) { return AddLibrarySearchPaths( new PBXList( path ) ); } public bool AddLibrarySearchPaths( PBXList paths ) { Debug.Log ("AddLibrarySearchPaths " + paths); foreach( KeyValuePair<string, XCBuildConfiguration> buildConfig in buildConfigurations ) { buildConfig.Value.AddLibrarySearchPaths( paths ); } modified = true; return modified; } public bool AddFrameworkSearchPaths( string path ) { return AddFrameworkSearchPaths( new PBXList( path ) ); } public bool AddFrameworkSearchPaths( PBXList paths ) { foreach( KeyValuePair<string, XCBuildConfiguration> buildConfig in buildConfigurations ) { buildConfig.Value.AddFrameworkSearchPaths( paths ); } modified = true; return modified; } public object GetObject( string guid ) { return _objects[guid]; } public PBXDictionary AddFile( string filePath, PBXGroup parent = null, string tree = "SOURCE_ROOT", bool createBuildFiles = true, bool weak = false ) { //Debug.Log("AddFile " + filePath + ", " + parent + ", " + tree + ", " + (createBuildFiles? "TRUE":"FALSE") + ", " + (weak? "TRUE":"FALSE") ); PBXDictionary results = new PBXDictionary(); if (filePath == null) { Debug.LogError ("AddFile called with null filePath"); return results; } string absPath = string.Empty; if( Path.IsPathRooted( filePath ) ) { Debug.Log( "Path is Rooted" ); absPath = filePath; } else if( tree.CompareTo( "SDKROOT" ) != 0) { absPath = Path.Combine( Application.dataPath, filePath ); } if( !( File.Exists( absPath ) || Directory.Exists( absPath ) ) && tree.CompareTo( "SDKROOT" ) != 0 ) { Debug.Log( "Missing file: " + filePath ); return results; } else if( tree.CompareTo( "SOURCE_ROOT" ) == 0 ) { Debug.Log( "Source Root File" ); System.Uri fileURI = new System.Uri( absPath ); System.Uri rootURI = new System.Uri( ( projectRootPath + "/." ) ); filePath = rootURI.MakeRelativeUri( fileURI ).ToString(); } else if( tree.CompareTo("GROUP") == 0) { Debug.Log( "Group File" ); filePath = System.IO.Path.GetFileName( filePath ); } if( parent == null ) { parent = _rootGroup; } //Check if there is already a file PBXFileReference fileReference = GetFile( System.IO.Path.GetFileName( filePath ) ); if( fileReference != null ) { Debug.Log("File already exists: " + filePath); //not a warning, because this is normal for most builds! return null; } fileReference = new PBXFileReference( filePath, (TreeEnum)System.Enum.Parse( typeof(TreeEnum), tree ) ); parent.AddChild( fileReference ); fileReferences.Add( fileReference ); results.Add( fileReference.guid, fileReference ); //Create a build file for reference if( !string.IsNullOrEmpty( fileReference.buildPhase ) && createBuildFiles ) { switch( fileReference.buildPhase ) { case "PBXFrameworksBuildPhase": foreach( KeyValuePair<string, PBXFrameworksBuildPhase> currentObject in frameworkBuildPhases ) { BuildAddFile(fileReference,currentObject,weak); } if ( !string.IsNullOrEmpty( absPath ) && ( tree.CompareTo( "SOURCE_ROOT" ) == 0 )) { string libraryPath = Path.Combine( "$(SRCROOT)", Path.GetDirectoryName( filePath ) ); if (File.Exists(absPath)) { this.AddLibrarySearchPaths( new PBXList( libraryPath ) ); } else { this.AddFrameworkSearchPaths( new PBXList( libraryPath ) ); } } break; case "PBXResourcesBuildPhase": foreach( KeyValuePair<string, PBXResourcesBuildPhase> currentObject in resourcesBuildPhases ) { Debug.Log( "Adding Resources Build File" ); BuildAddFile(fileReference,currentObject,weak); } break; case "PBXShellScriptBuildPhase": foreach( KeyValuePair<string, PBXShellScriptBuildPhase> currentObject in shellScriptBuildPhases ) { Debug.Log( "Adding Script Build File" ); BuildAddFile(fileReference,currentObject,weak); } break; case "PBXSourcesBuildPhase": foreach( KeyValuePair<string, PBXSourcesBuildPhase> currentObject in sourcesBuildPhases ) { Debug.Log( "Adding Source Build File" ); BuildAddFile(fileReference,currentObject,weak); } break; case "PBXCopyFilesBuildPhase": foreach( KeyValuePair<string, PBXCopyFilesBuildPhase> currentObject in copyBuildPhases ) { Debug.Log( "Adding Copy Files Build Phase" ); BuildAddFile(fileReference,currentObject,weak); } break; case null: Debug.LogWarning( "File Not Supported: " + filePath ); break; default: Debug.LogWarning( "File Not Supported." ); return null; } } return results; } public PBXNativeTarget GetNativeTarget( string name ) { PBXNativeTarget naviTarget = null; foreach( KeyValuePair<string, PBXNativeTarget> currentObject in nativeTargets ) { string targetName = (string)currentObject.Value.data["name"]; if (targetName == name) { naviTarget = currentObject.Value; break; } } return naviTarget; } public int GetBuildActionMask() { int buildActionMask = 0; foreach( var currentObject in copyBuildPhases ) { buildActionMask = (int)currentObject.Value.data["buildActionMask"]; break; } return buildActionMask; } public PBXCopyFilesBuildPhase AddEmbedFrameworkBuildPhase() { PBXCopyFilesBuildPhase phase = null; PBXNativeTarget naviTarget = GetNativeTarget("Unity-iPhone"); if (naviTarget == null) { Debug.Log("Not found Correct NativeTarget."); return phase; } //check if embed framework buildPhase exist foreach( var currentObject in copyBuildPhases ) { object nameObj = null; if (currentObject.Value.data.TryGetValue("name", out nameObj)) { string name = (string)nameObj; if (name == "Embed Frameworks") return currentObject.Value; } } int buildActionMask = this.GetBuildActionMask(); phase = new PBXCopyFilesBuildPhase(buildActionMask); var buildPhases = (ArrayList)naviTarget.data["buildPhases"]; buildPhases.Add(phase.guid);//add build phase copyBuildPhases.Add(phase); return phase; } public void AddEmbedFramework( string fileName) { Debug.Log( "Add Embed Framework: " + fileName ); //Check if there is already a file PBXFileReference fileReference = GetFile( System.IO.Path.GetFileName( fileName ) ); if( fileReference == null ) { Debug.Log("Embed Framework must added already: " + fileName); return; } var embedPhase = this.AddEmbedFrameworkBuildPhase(); if (embedPhase == null) { Debug.Log("AddEmbedFrameworkBuildPhase Failed."); return; } //create a build file PBXBuildFile buildFile = new PBXBuildFile( fileReference ); buildFile.AddCodeSignOnCopy(); buildFiles.Add( buildFile ); embedPhase.AddBuildFile(buildFile); } private void BuildAddFile (PBXFileReference fileReference, KeyValuePair<string, PBXFrameworksBuildPhase> currentObject,bool weak) { PBXBuildFile buildFile = new PBXBuildFile( fileReference, weak ); buildFiles.Add( buildFile ); currentObject.Value.AddBuildFile( buildFile ); } private void BuildAddFile (PBXFileReference fileReference, KeyValuePair<string, PBXResourcesBuildPhase> currentObject,bool weak) { PBXBuildFile buildFile = new PBXBuildFile( fileReference, weak ); buildFiles.Add( buildFile ); currentObject.Value.AddBuildFile( buildFile ); } private void BuildAddFile (PBXFileReference fileReference, KeyValuePair<string, PBXShellScriptBuildPhase> currentObject,bool weak) { PBXBuildFile buildFile = new PBXBuildFile( fileReference, weak ); buildFiles.Add( buildFile ); currentObject.Value.AddBuildFile( buildFile ); } private void BuildAddFile (PBXFileReference fileReference, KeyValuePair<string, PBXSourcesBuildPhase> currentObject,bool weak) { PBXBuildFile buildFile = new PBXBuildFile( fileReference, weak ); buildFiles.Add( buildFile ); currentObject.Value.AddBuildFile( buildFile ); } private void BuildAddFile (PBXFileReference fileReference, KeyValuePair<string, PBXCopyFilesBuildPhase> currentObject,bool weak) { PBXBuildFile buildFile = new PBXBuildFile( fileReference, weak ); buildFiles.Add( buildFile ); currentObject.Value.AddBuildFile( buildFile ); } public bool AddFolder( string folderPath, PBXGroup parent = null, string[] exclude = null, bool recursive = true, bool createBuildFile = true ) { Debug.Log("Folder PATH: "+folderPath); if( !Directory.Exists( folderPath ) ){ Debug.Log("Directory doesn't exist?"); return false; } if (folderPath.EndsWith(".lproj")){ Debug.Log("Ended with .lproj"); return AddLocFolder(folderPath, parent, exclude, createBuildFile); } DirectoryInfo sourceDirectoryInfo = new DirectoryInfo( folderPath ); if( exclude == null ){ Debug.Log("Exclude was null"); exclude = new string[] {}; } if( parent == null ){ Debug.Log("Parent was null"); parent = rootGroup; } // Create group PBXGroup newGroup = GetGroup( sourceDirectoryInfo.Name, null /*relative path*/, parent ); Debug.Log("New Group created"); foreach( string directory in Directory.GetDirectories( folderPath ) ) { Debug.Log( "DIR: " + directory ); if( directory.EndsWith( ".bundle" ) ) { // Treat it like a file and copy even if not recursive // TODO also for .xcdatamodeld? Debug.LogWarning( "This is a special folder: " + directory ); AddFile( directory, newGroup, "SOURCE_ROOT", createBuildFile ); continue; } if( recursive ) { Debug.Log( "recursive" ); AddFolder( directory, newGroup, exclude, recursive, createBuildFile ); } } // Adding files. string regexExclude = string.Format( @"{0}", string.Join( "|", exclude ) ); foreach( string file in Directory.GetFiles( folderPath ) ) { if( Regex.IsMatch( file, regexExclude ) ) { continue; } Debug.Log("Adding Files for Folder"); AddFile( file, newGroup, "SOURCE_ROOT", createBuildFile ); } modified = true; return modified; } // We support neither recursing into nor bundles contained inside loc folders public bool AddLocFolder( string folderPath, PBXGroup parent = null, string[] exclude = null, bool createBuildFile = true) { DirectoryInfo sourceDirectoryInfo = new DirectoryInfo( folderPath ); if( exclude == null ) exclude = new string[] {}; if( parent == null ) parent = rootGroup; // Create group as needed System.Uri projectFolderURI = new System.Uri( projectFileInfo.DirectoryName ); System.Uri locFolderURI = new System.Uri( folderPath ); var relativePath = projectFolderURI.MakeRelativeUri( locFolderURI ).ToString(); PBXGroup newGroup = GetGroup( sourceDirectoryInfo.Name, relativePath, parent ); // Add loc region to project string nom = sourceDirectoryInfo.Name; string region = nom.Substring(0, nom.Length - ".lproj".Length); project.AddRegion(region); // Adding files. string regexExclude = string.Format( @"{0}", string.Join( "|", exclude ) ); foreach( string file in Directory.GetFiles( folderPath ) ) { if( Regex.IsMatch( file, regexExclude ) ) { continue; } // Add a variant group for the language as well var variant = new PBXVariantGroup(System.IO.Path.GetFileName( file ), null, "GROUP"); variantGroups.Add(variant); // The group gets a reference to the variant, not to the file itself newGroup.AddChild(variant); AddFile( file, variant, "GROUP", createBuildFile ); } modified = true; return modified; } #endregion #region Getters public PBXFileReference GetFile( string name ) { if( string.IsNullOrEmpty( name ) ) { return null; } foreach( KeyValuePair<string, PBXFileReference> current in fileReferences ) { if( !string.IsNullOrEmpty( current.Value.name ) && current.Value.name.CompareTo( name ) == 0 ) { return current.Value; } } return null; } public PBXGroup GetGroup( string name, string path = null, PBXGroup parent = null ) { if( string.IsNullOrEmpty( name ) ) return null; if( parent == null ) parent = rootGroup; foreach( KeyValuePair<string, PBXGroup> current in groups ) { if( string.IsNullOrEmpty( current.Value.name ) ) { if( current.Value.path.CompareTo( name ) == 0 && parent.HasChild( current.Key ) ) { return current.Value; } } else if( current.Value.name.CompareTo( name ) == 0 && parent.HasChild( current.Key ) ) { return current.Value; } } PBXGroup result = new PBXGroup( name, path ); groups.Add( result ); parent.AddChild( result ); modified = true; return result; } #endregion #region Mods public void ApplyMod( string pbxmod ) { XCMod mod = new XCMod( pbxmod ); foreach(var lib in mod.libs){ Debug.Log("Library: "+lib); } ApplyMod( mod ); } public void ApplyMod( XCMod mod ) { PBXGroup modGroup = this.GetGroup( mod.group ); Debug.Log( "Adding libraries..." ); foreach( XCModFile libRef in mod.libs ) { string completeLibPath = System.IO.Path.Combine( "usr/lib", libRef.filePath ); Debug.Log ("Adding library " + completeLibPath); this.AddFile( completeLibPath, modGroup, "SDKROOT", true, libRef.isWeak ); } Debug.Log( "Adding frameworks..." ); PBXGroup frameworkGroup = this.GetGroup( "Frameworks" ); foreach( string framework in mod.frameworks ) { string[] filename = framework.Split( ':' ); bool isWeak = ( filename.Length > 1 ) ? true : false; string completePath = System.IO.Path.Combine( "System/Library/Frameworks", filename[0] ); this.AddFile( completePath, frameworkGroup, "SDKROOT", true, isWeak ); } Debug.Log( "Adding files..." ); foreach( string filePath in mod.files ) { string absoluteFilePath = System.IO.Path.Combine( mod.path, filePath ); this.AddFile( absoluteFilePath, modGroup ); } Debug.Log( "Adding embed binaries..." ); if (mod.embed_binaries != null) { //1. Add LD_RUNPATH_SEARCH_PATHS for embed framework this.overwriteBuildSetting("LD_RUNPATH_SEARCH_PATHS", "$(inherited) @executable_path/Frameworks", "Release"); this.overwriteBuildSetting("LD_RUNPATH_SEARCH_PATHS", "$(inherited) @executable_path/Frameworks", "Debug"); foreach( string binary in mod.embed_binaries ) { string absoluteFilePath = System.IO.Path.Combine( mod.path, binary ); this.AddEmbedFramework(absoluteFilePath); } } Debug.Log( "Adding folders..." ); foreach( string folderPath in mod.folders ) { string absoluteFolderPath = System.IO.Path.Combine( Application.dataPath, folderPath ); Debug.Log ("Adding folder " + absoluteFolderPath); this.AddFolder( absoluteFolderPath, modGroup, (string[])mod.excludes.ToArray( typeof(string) ) ); } Debug.Log( "Adding headerpaths..." ); foreach( string headerpath in mod.headerpaths ) { if (headerpath.Contains("$(inherited)")) { Debug.Log ("not prepending a path to " + headerpath); this.AddHeaderSearchPaths( headerpath ); } else { string absoluteHeaderPath = System.IO.Path.Combine( mod.path, headerpath ); this.AddHeaderSearchPaths( absoluteHeaderPath ); } } Debug.Log( "Adding compiler flags..." ); foreach( string flag in mod.compiler_flags ) { this.AddOtherCFlags( flag ); } Debug.Log( "Adding linker flags..." ); foreach( string flag in mod.linker_flags ) { this.AddOtherLinkerFlags( flag ); } Debug.Log ("Adding plist items..."); string plistPath = this.projectRootPath + "/Info.plist"; XCPlist plist = new XCPlist (plistPath); plist.Process(mod.plist); this.Consolidate(); } #endregion #region Savings public void Consolidate() { PBXDictionary consolidated = new PBXDictionary(); consolidated.Append<PBXBuildFile>( this.buildFiles );//sort! consolidated.Append<PBXCopyFilesBuildPhase>( this.copyBuildPhases ); consolidated.Append<PBXFileReference>( this.fileReferences );//sort! consolidated.Append<PBXFrameworksBuildPhase>( this.frameworkBuildPhases ); consolidated.Append<PBXGroup>( this.groups );//sort! consolidated.Append<PBXNativeTarget>( this.nativeTargets ); consolidated.Add( project.guid, project.data );//TODO this should be named PBXProject? consolidated.Append<PBXResourcesBuildPhase>( this.resourcesBuildPhases ); consolidated.Append<PBXShellScriptBuildPhase>( this.shellScriptBuildPhases ); consolidated.Append<PBXSourcesBuildPhase>( this.sourcesBuildPhases ); consolidated.Append<PBXVariantGroup>( this.variantGroups ); consolidated.Append<XCBuildConfiguration>( this.buildConfigurations ); consolidated.Append<XCConfigurationList>( this.configurationLists ); _objects = consolidated; consolidated = null; } public void Backup() { string backupPath = Path.Combine( this.filePath, "project.backup.pbxproj" ); // Delete previous backup file if( File.Exists( backupPath ) ) File.Delete( backupPath ); // Backup original pbxproj file first File.Copy( System.IO.Path.Combine( this.filePath, "project.pbxproj" ), backupPath ); } private void DeleteExisting(string path) { // Delete old project file if( File.Exists( path )) File.Delete( path ); } private void CreateNewProject(PBXDictionary result, string path) { PBXParser parser = new PBXParser(); StreamWriter saveFile = File.CreateText( path ); saveFile.Write( parser.Encode( result, true ) ); saveFile.Close(); } /// <summary> /// Saves a project after editing. /// </summary> public void Save() { PBXDictionary result = new PBXDictionary(); result.Add( "archiveVersion", 1 ); result.Add( "classes", new PBXDictionary() ); result.Add( "objectVersion", 46 ); Consolidate(); result.Add( "objects", _objects ); result.Add( "rootObject", _rootObjectKey ); string projectPath = Path.Combine( this.filePath, "project.pbxproj" ); // Delete old project file, in case of an IOException 'Sharing violation on path Error' DeleteExisting(projectPath); // Parse result object directly into file CreateNewProject(result,projectPath); } /** * Raw project data. */ public Dictionary<string, object> objects { get { return null; } } #endregion public void Dispose() { } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. // See CompactBinary.cs for a description of the CompactBinary protocol. namespace Bond.Protocols { using System; using System.Runtime.CompilerServices; using System.Text; using System.Collections.Generic; using Bond.IO; /// <summary> /// Length-calculator for Bond CompactBinary protocol V2 /// </summary> [Reader(typeof(CompactBinaryReader<>))] public struct CompactBinaryCounter : IProtocolWriter { private class CounterStackFrame { public readonly LinkedListNode<UInt32> lengthSlot; public int currentLength; public CounterStackFrame(LinkedListNode<UInt32> slot) { lengthSlot = slot; } } readonly LinkedList<UInt32> lengths; readonly Stack<CounterStackFrame> counterStack; /// <summary> /// Create an instance of CompactBinaryCounter /// </summary> public CompactBinaryCounter(LinkedList<UInt32> lengthsOut) { lengths = lengthsOut; counterStack = new Stack<CounterStackFrame>(); } private CounterStackFrame GetCurrentStackFrame() { return counterStack.Peek(); } private void AddBytes(int count) { GetCurrentStackFrame().currentLength += count; } private void AddVarUInt16(ushort value) { AddBytes(IntegerHelper.GetVarUInt16Length(value)); } private void AddVarUInt32(uint value) { AddBytes(IntegerHelper.GetVarUInt32Length(value)); } private void AddVarUInt64(ulong value) { AddBytes(IntegerHelper.GetVarUInt64Length(value)); } /// <summary> /// Write protocol magic number and version /// </summary> [MethodImpl(MethodImplOptions.AggressiveInlining)] public void WriteVersion() { } #region Complex types /// <summary> /// Start writing a struct /// </summary> /// <param name="metadata">Schema metadata</param> [MethodImpl(MethodImplOptions.AggressiveInlining)] public void WriteStructBegin(Metadata metadata) { LinkedListNode<UInt32> frameNode = lengths.AddLast(0); counterStack.Push(new CounterStackFrame(frameNode)); } /// <summary> /// End writing a struct /// </summary> [MethodImpl(MethodImplOptions.AggressiveInlining)] public void WriteStructEnd() { CounterStackFrame frame = counterStack.Peek(); uint structLength = (uint)frame.currentLength + 1; // +1 for the BT_STOP byte frame.lengthSlot.Value = structLength; counterStack.Pop(); if (counterStack.Count > 0) { AddVarUInt32(structLength); AddBytes((int)structLength); } } /// <summary> /// Start writing a base struct /// </summary> /// <param name="metadata">Base schema metadata</param> [MethodImpl(MethodImplOptions.AggressiveInlining)] public void WriteBaseBegin(Metadata metadata) { } /// <summary> /// End writing a base struct /// </summary> [MethodImpl(MethodImplOptions.AggressiveInlining)] public void WriteBaseEnd() { AddBytes(1); } /// <summary> /// Start writing a field /// </summary> /// <param name="type">Type of the field</param> /// <param name="id">Identifier of the field</param> /// <param name="metadata">Metadata of the field</param> [MethodImpl(MethodImplOptions.AggressiveInlining)] public void WriteFieldBegin(BondDataType type, ushort id, Metadata metadata) { if (id <= 5) { AddBytes(1); } else if (id <= 0xFF) { AddBytes(2); } else { AddBytes(3); } } /// <summary> /// Indicate that field was omitted because it was set to its default value /// </summary> /// <param name="dataType">Type of the field</param> /// <param name="id">Identifier of the field</param> /// <param name="metadata">Metadata of the field</param> [MethodImpl(MethodImplOptions.AggressiveInlining)] public void WriteFieldOmitted(BondDataType dataType, ushort id, Metadata metadata) { } /// <summary> /// End writing a field /// </summary> [MethodImpl(MethodImplOptions.AggressiveInlining)] public void WriteFieldEnd() { } /// <summary> /// Start writing a list or set container /// </summary> /// <param name="count">Number of elements in the container</param> /// <param name="elementType">Type of the elements</param> [MethodImpl(MethodImplOptions.AggressiveInlining)] public void WriteContainerBegin(int count, BondDataType elementType) { if (count < 7) { AddBytes(1); } else { AddBytes(1); AddVarUInt32((uint)count); } } /// <summary> /// Start writing a map container /// </summary> /// <param name="count">Number of elements in the container</param> /// <param name="keyType">Type of the keys</param> /// <param name="valueType">Type of the values</param> [MethodImpl(MethodImplOptions.AggressiveInlining)] public void WriteContainerBegin(int count, BondDataType keyType, BondDataType valueType) { AddBytes(2); AddVarUInt32((uint)count); } /// <summary> /// End writing a container /// </summary> [MethodImpl(MethodImplOptions.AggressiveInlining)] public void WriteContainerEnd() { } /// <summary> /// Write array of bytes verbatim /// </summary> [MethodImpl(MethodImplOptions.AggressiveInlining)] public void WriteBytes(ArraySegment<byte> data) { AddBytes(data.Count); } #endregion #region Primitive types /// <summary> /// Write an UInt8 /// </summary> [MethodImpl(MethodImplOptions.AggressiveInlining)] public void WriteUInt8(byte value) { AddBytes(1); } /// <summary> /// Write an UInt16 /// </summary> [MethodImpl(MethodImplOptions.AggressiveInlining)] public void WriteUInt16(UInt16 value) { AddVarUInt16(value); } /// <summary> /// Write an UInt16 /// </summary> [MethodImpl(MethodImplOptions.AggressiveInlining)] public void WriteUInt32(UInt32 value) { AddVarUInt32(value); } /// <summary> /// Write an UInt64 /// </summary> [MethodImpl(MethodImplOptions.AggressiveInlining)] public void WriteUInt64(UInt64 value) { AddVarUInt64(value); } /// <summary> /// Write an Int8 /// </summary> [MethodImpl(MethodImplOptions.AggressiveInlining)] public void WriteInt8(SByte value) { AddBytes(1); } /// <summary> /// Write an Int16 /// </summary> [MethodImpl(MethodImplOptions.AggressiveInlining)] public void WriteInt16(Int16 value) { AddVarUInt16(IntegerHelper.EncodeZigzag16(value)); } /// <summary> /// Write an Int32 /// </summary> [MethodImpl(MethodImplOptions.AggressiveInlining)] public void WriteInt32(Int32 value) { AddVarUInt32(IntegerHelper.EncodeZigzag32(value)); } /// <summary> /// Write an Int64 /// </summary> [MethodImpl(MethodImplOptions.AggressiveInlining)] public void WriteInt64(Int64 value) { AddVarUInt64(IntegerHelper.EncodeZigzag64(value)); } /// <summary> /// Write a float /// </summary> [MethodImpl(MethodImplOptions.AggressiveInlining)] public void WriteFloat(float value) { AddBytes(4); } /// <summary> /// Write a double /// </summary> [MethodImpl(MethodImplOptions.AggressiveInlining)] public void WriteDouble(double value) { AddBytes(8); } /// <summary> /// Write a bool /// </summary> [MethodImpl(MethodImplOptions.AggressiveInlining)] public void WriteBool(bool value) { AddBytes(1); } /// <summary> /// Write a UTF-8 string /// </summary> [MethodImpl(MethodImplOptions.AggressiveInlining)] public void WriteString(string value) { int size = Encoding.UTF8.GetByteCount(value); AddVarUInt32((uint)size); AddBytes(size); } /// <summary> /// Write a UTF-16 string /// </summary> [MethodImpl(MethodImplOptions.AggressiveInlining)] public void WriteWString(string value) { AddVarUInt32((uint)value.Length); AddBytes(value.Length * 2); } #endregion } }
// Copyright 2015, Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Author: api.anash@gmail.com (Anash P. Oommen) using Google.Api.Ads.Common.Util; using Google.Api.Ads.Dfp.Lib; using Google.Api.Ads.Dfp.Util.v201505; using Google.Api.Ads.Dfp.v201505; using NUnit.Framework; using System; using System.Collections.Generic; using System.IO; using System.Net; using System.Text; using System.Threading; using System.Configuration; using System.Collections; namespace Google.Api.Ads.Dfp.Tests.v201505 { /// <summary> /// A utility class to assist the testing of v201505 services. /// </summary> public class TestUtils { public User GetCurrentUser(DfpUser user) { return GetUserByEmail(user, new DfpAppConfig().Email); } public User GetTrafficker(DfpUser user) { return GetUserByEmail(user, "dfp.api.trafficker@gmail.com"); } public User GetSalesperson(DfpUser user) { return GetUserByEmail(user, "dfp.api.salesperson@gmail.com"); } public User GetUserByEmail(DfpUser user, string email) { UserService userService = (UserService) user.GetService(DfpService.v201505.UserService); // Create a Statement to get all users sorted by name. Statement statement = new Statement(); statement.query = string.Format("where email = '{0}' LIMIT 1", email); UserPage page = userService.getUsersByStatement(statement); if (page.results != null && page.results.Length > 0) { return page.results[0]; } else { return null; } } public Role GetRole(DfpUser user, string roleName) { UserService userService = (UserService)user.GetService(DfpService.v201505.UserService); // Get all roles. Role[] roles = userService.getAllRoles(); foreach (Role role in roles) { if (role.name == roleName) { return role; } } return null; } /// <summary> /// Create a test company for running further tests. /// </summary> /// <returns>A test company for running further tests.</returns> public Company CreateCompany(DfpUser user, CompanyType companyType) { CompanyService companyService = (CompanyService) user.GetService( DfpService.v201505.CompanyService); Company company = new Company(); company.name = string.Format("Company #{0}", GetTimeStamp()); company.type = companyType; return companyService.createCompanies(new Company[] {company})[0]; } public Order CreateOrder(DfpUser user, long advertiserId, long salespersonId, long traffickerId) { // Get the OrderService. OrderService orderService = (OrderService) user.GetService(DfpService.v201505.OrderService); Order order = new Order(); order.name = string.Format("Order #{0}", GetTimeStamp()); order.advertiserId = advertiserId; order.salespersonId = salespersonId; order.traffickerId = traffickerId; return orderService.createOrders(new Order[] {order})[0]; } public LineItem CreateLineItem(DfpUser user, long orderId, string adUnitId) { LineItemService lineItemService = (LineItemService) user.GetService(DfpService.v201505.LineItemService); long placementId = CreatePlacement(user, new string[] {adUnitId}).id; // Create inventory targeting. InventoryTargeting inventoryTargeting = new InventoryTargeting(); inventoryTargeting.targetedPlacementIds = new long[] {placementId}; // Create geographical targeting. GeoTargeting geoTargeting = new GeoTargeting(); // Include the US and Quebec, Canada. Location countryLocation = new Location(); countryLocation.id = 2840L; Location regionLocation = new Location(); regionLocation.id = 20123L; geoTargeting.targetedLocations = new Location[] {countryLocation, regionLocation}; // Exclude Chicago and the New York metro area. Location cityLocation = new Location(); cityLocation.id = 1016367L; Location metroLocation = new Location(); metroLocation.id = 200501L; geoTargeting.excludedLocations = new Location[] {cityLocation, metroLocation}; // Exclude domains that are not under the network's control. UserDomainTargeting userDomainTargeting = new UserDomainTargeting(); userDomainTargeting.domains = new String[] {"usa.gov"}; userDomainTargeting.targeted = false; // Create day-part targeting. DayPartTargeting dayPartTargeting = new DayPartTargeting(); dayPartTargeting.timeZone = DeliveryTimeZone.BROWSER; // Target only the weekend in the browser's timezone. DayPart saturdayDayPart = new DayPart(); saturdayDayPart.dayOfWeek = Google.Api.Ads.Dfp.v201505.DayOfWeek.SATURDAY; saturdayDayPart.startTime = new TimeOfDay(); saturdayDayPart.startTime.hour = 0; saturdayDayPart.startTime.minute = MinuteOfHour.ZERO; saturdayDayPart.endTime = new TimeOfDay(); saturdayDayPart.endTime.hour = 24; saturdayDayPart.endTime.minute = MinuteOfHour.ZERO; DayPart sundayDayPart = new DayPart(); sundayDayPart.dayOfWeek = Google.Api.Ads.Dfp.v201505.DayOfWeek.SUNDAY; sundayDayPart.startTime = new TimeOfDay(); sundayDayPart.startTime.hour = 0; sundayDayPart.startTime.minute = MinuteOfHour.ZERO; sundayDayPart.endTime = new TimeOfDay(); sundayDayPart.endTime.hour = 24; sundayDayPart.endTime.minute = MinuteOfHour.ZERO; dayPartTargeting.dayParts = new DayPart[] {saturdayDayPart, sundayDayPart}; // Create technology targeting. TechnologyTargeting technologyTargeting = new TechnologyTargeting(); // Create browser targeting. BrowserTargeting browserTargeting = new BrowserTargeting(); browserTargeting.isTargeted = true; // Target just the Chrome browser. Technology browserTechnology = new Technology(); browserTechnology.id = 500072L; browserTargeting.browsers = new Technology[] {browserTechnology}; technologyTargeting.browserTargeting = browserTargeting; LineItem lineItem = new LineItem(); lineItem.name = "Line item #" + new TestUtils().GetTimeStamp(); lineItem.orderId = orderId; lineItem.targeting = new Targeting(); lineItem.targeting.inventoryTargeting = inventoryTargeting; lineItem.targeting.geoTargeting = geoTargeting; lineItem.targeting.userDomainTargeting = userDomainTargeting; lineItem.targeting.dayPartTargeting = dayPartTargeting; lineItem.targeting.technologyTargeting = technologyTargeting; lineItem.lineItemType = LineItemType.STANDARD; lineItem.allowOverbook = true; // Set the creative rotation type to even. lineItem.creativeRotationType = CreativeRotationType.EVEN; // Set the size of creatives that can be associated with this line item. Size size = new Size(); size.width = 300; size.height = 250; size.isAspectRatio = false; // Create the creative placeholder. CreativePlaceholder creativePlaceholder = new CreativePlaceholder(); creativePlaceholder.size = size; lineItem.creativePlaceholders = new CreativePlaceholder[] {creativePlaceholder}; // Set the line item to run for one month. lineItem.startDateTimeType = StartDateTimeType.IMMEDIATELY; lineItem.endDateTime = DateTimeUtilities.FromDateTime(System.DateTime.Today.AddMonths(1), "America/New_York"); // Set the cost per unit to $2. lineItem.costType = CostType.CPM; lineItem.costPerUnit = new Money(); lineItem.costPerUnit.currencyCode = "USD"; lineItem.costPerUnit.microAmount = 2000000L; // Set the number of units bought to 500,000 so that the budget is // $1,000. Goal goal = new Goal(); goal.units = 500000L; goal.unitType = UnitType.IMPRESSIONS; lineItem.primaryGoal = goal; return lineItemService.createLineItems(new LineItem[] {lineItem})[0]; } /// <summary> /// Create a test company for running further tests. /// </summary> /// <returns>A creative for running further tests.</returns> public Creative CreateCreative(DfpUser user, long advertiserId) { CreativeService creativeService = (CreativeService)user.GetService( DfpService.v201505.CreativeService); // Create creative size. Size size = new Size(); size.width = 300; size.height = 250; // Create an image creative. ImageCreative imageCreative = new ImageCreative(); imageCreative.name = string.Format("Image creative #{0}", GetTimeStamp()); imageCreative.advertiserId = advertiserId; imageCreative.destinationUrl = "http://www.google.com"; imageCreative.size = size; // Create image asset. CreativeAsset creativeAsset = new CreativeAsset(); creativeAsset.fileName = "image.jpg"; creativeAsset.assetByteArray = MediaUtilities.GetAssetDataFromUrl( "http://www.google.com/intl/en/adwords/select/images/samples/inline.jpg"); creativeAsset.size = size; imageCreative.primaryImageAsset = creativeAsset; return creativeService.createCreatives(new Creative[] {imageCreative})[0]; } public LineItemCreativeAssociation CreateLica(DfpUser user, long lineItemId, long creativeId) { LineItemCreativeAssociationService licaService = (LineItemCreativeAssociationService)user.GetService( DfpService.v201505.LineItemCreativeAssociationService); LineItemCreativeAssociation lica = new LineItemCreativeAssociation(); lica.creativeId = creativeId; lica.lineItemId = lineItemId; return licaService.createLineItemCreativeAssociations( new LineItemCreativeAssociation[] {lica})[0]; } public AdUnit CreateAdUnit(DfpUser user) { InventoryService inventoryService = (InventoryService) user.GetService(DfpService.v201505.InventoryService); AdUnit adUnit = new AdUnit(); adUnit.name = string.Format("Ad_Unit_{0}", GetTimeStamp()); adUnit.parentId = FindRootAdUnit(user).id; // Set the size of possible creatives that can match this ad unit. Size size = new Size(); size.width = 300; size.height = 250; // Create ad unit size. AdUnitSize adUnitSize = new AdUnitSize(); adUnitSize.size = size; adUnitSize.environmentType = EnvironmentType.BROWSER; adUnit.adUnitSizes = new AdUnitSize[] {adUnitSize}; return inventoryService.createAdUnits(new AdUnit[] {adUnit})[0]; } public AdUnit FindRootAdUnit(DfpUser user) { // Get InventoryService. InventoryService inventoryService = (InventoryService)user.GetService(DfpService.v201505.InventoryService); // Create a Statement to only select the root ad unit. Statement statement = new Statement(); statement.query = "WHERE parentId IS NULL LIMIT 500"; // Get ad units by Statement. AdUnitPage page = inventoryService.getAdUnitsByStatement(statement); return page.results[0]; } public Placement CreatePlacement(DfpUser user, string[] targetedAdUnitIds) { // Get InventoryService. PlacementService placementService = (PlacementService) user.GetService(DfpService.v201505.PlacementService); Placement placement = new Placement(); placement.name = string.Format("Test placement #{0}", this.GetTimeStamp()); placement.description = "Test placement"; placement.targetedAdUnitIds = targetedAdUnitIds; return placementService.createPlacements(new Placement[] {placement})[0]; } public ReportJob CreateReport(DfpUser user) { // Get ReportService. ReportService reportService = (ReportService) user.GetService(DfpService.v201505.ReportService); ReportJob reportJob = new ReportJob(); reportJob.reportQuery = new ReportQuery(); reportJob.reportQuery.dimensions = new Dimension[] {Dimension.ORDER_ID, Dimension.ORDER_NAME}; reportJob.reportQuery.columns = new Column[] {Column.AD_SERVER_IMPRESSIONS, Column.AD_SERVER_CLICKS, Column.AD_SERVER_CTR, Column.AD_SERVER_CPM_AND_CPC_REVENUE, Column.AD_SERVER_WITHOUT_CPD_AVERAGE_ECPM}; reportJob.reportQuery.dateRangeType = DateRangeType.LAST_MONTH; return reportService.runReportJob(reportJob); } /// <summary> /// Gets the current timestamp as a string. /// </summary> /// <returns>The current timestamp as a string.</returns> public string GetTimeStamp() { Thread.Sleep(500); return (System.DateTime.UtcNow - new System.DateTime(1970, 1, 1)).Ticks. ToString(); } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. namespace Microsoft.Azure.Management.Redis.Fluent.RedisCache.Definition { using Microsoft.Azure.Management.Redis.Fluent.Models; using Microsoft.Azure.Management.ResourceManager.Fluent.Core; using System; using System.Collections.Generic; /// <summary> /// A Redis Cache definition with Premium Sku specific functionality. /// </summary> public interface IWithPremiumSkuCreate : Microsoft.Azure.Management.Redis.Fluent.RedisCache.Definition.IWithCreate, Microsoft.Azure.Management.Redis.Fluent.RedisCache.Definition.IWithPremiumSkuCreateBeta { /// <summary> /// The number of shards to be created on a Premium Cluster Cache. /// </summary> /// <param name="shardCount">The shard count value to set.</param> /// <return>The next stage of Redis Cache with Premium SKU definition.</return> Microsoft.Azure.Management.Redis.Fluent.RedisCache.Definition.IWithPremiumSkuCreate WithShardCount(int shardCount); } /// <summary> /// Container interface for all the definitions that need to be implemented. /// </summary> public interface IDefinition : Microsoft.Azure.Management.Redis.Fluent.RedisCache.Definition.IBlank, Microsoft.Azure.Management.Redis.Fluent.RedisCache.Definition.IWithGroup, Microsoft.Azure.Management.Redis.Fluent.RedisCache.Definition.IWithSku, Microsoft.Azure.Management.Redis.Fluent.RedisCache.Definition.IWithCreate, Microsoft.Azure.Management.Redis.Fluent.RedisCache.Definition.IWithPremiumSkuCreate { } /// <summary> /// A Redis Cache definition allowing the sku to be set. /// </summary> public interface IWithSku { /// <summary> /// Specifies the Basic sku of the Redis Cache. /// </summary> /// <return>The next stage of Redis Cache definition.</return> Microsoft.Azure.Management.Redis.Fluent.RedisCache.Definition.IWithCreate WithBasicSku(); /// <summary> /// Specifies the Basic sku of the Redis Cache. /// </summary> /// <param name="capacity">Specifies what size of Redis Cache to deploy for Basic sku with C family (0, 1, 2, 3, 4, 5, 6).</param> /// <return>The next stage of Redis Cache definition.</return> Microsoft.Azure.Management.Redis.Fluent.RedisCache.Definition.IWithCreate WithBasicSku(int capacity); /// <summary> /// Specifies the Premium sku of the Redis Cache. /// </summary> /// <return>The next stage of Redis Cache definition.</return> Microsoft.Azure.Management.Redis.Fluent.RedisCache.Definition.IWithPremiumSkuCreate WithPremiumSku(); /// <summary> /// Specifies the Premium sku of the Redis Cache. /// </summary> /// <param name="capacity">Specifies what size of Redis Cache to deploy for Standard sku with P family (1, 2, 3, 4).</param> /// <return>The next stage of Redis Cache definition.</return> Microsoft.Azure.Management.Redis.Fluent.RedisCache.Definition.IWithPremiumSkuCreate WithPremiumSku(int capacity); /// <summary> /// Specifies the Standard Sku of the Redis Cache. /// </summary> /// <return>The next stage of Redis Cache definition.</return> Microsoft.Azure.Management.Redis.Fluent.RedisCache.Definition.IWithCreate WithStandardSku(); /// <summary> /// Specifies the Standard sku of the Redis Cache. /// </summary> /// <param name="capacity">Specifies what size of Redis Cache to deploy for Standard sku with C family (0, 1, 2, 3, 4, 5, 6).</param> /// <return>The next stage of Redis Cache definition.</return> Microsoft.Azure.Management.Redis.Fluent.RedisCache.Definition.IWithCreate WithStandardSku(int capacity); } /// <summary> /// A Redis Cache definition allowing resource group to be set. /// </summary> public interface IWithGroup : Microsoft.Azure.Management.ResourceManager.Fluent.Core.GroupableResource.Definition.IWithGroup<Microsoft.Azure.Management.Redis.Fluent.RedisCache.Definition.IWithSku> { } /// <summary> /// The first stage of the Redis Cache definition. /// </summary> public interface IBlank : Microsoft.Azure.Management.ResourceManager.Fluent.Core.Resource.Definition.IDefinitionWithRegion<Microsoft.Azure.Management.Redis.Fluent.RedisCache.Definition.IWithGroup> { } /// <summary> /// A Redis Cache definition with sufficient inputs to create a new /// Redis Cache in the cloud, but exposing additional optional inputs to /// specify. /// </summary> public interface IWithCreate : Microsoft.Azure.Management.ResourceManager.Fluent.Core.ResourceActions.ICreatable<Microsoft.Azure.Management.Redis.Fluent.IRedisCache>, Microsoft.Azure.Management.ResourceManager.Fluent.Core.Resource.Definition.IDefinitionWithTags<Microsoft.Azure.Management.Redis.Fluent.RedisCache.Definition.IWithCreate>, Microsoft.Azure.Management.Redis.Fluent.RedisCache.Definition.IWithCreateBeta { /// <summary> /// Enables non-ssl Redis server port (6379). /// </summary> /// <return>The next stage of Redis Cache definition.</return> Microsoft.Azure.Management.Redis.Fluent.RedisCache.Definition.IWithCreate WithNonSslPort(); /// <summary> /// Patch schedule on a Premium Cluster Cache. /// </summary> /// <param name="dayOfWeek">Day of week when cache can be patched.</param> /// <param name="startHourUtc">Start hour after which cache patching can start.</param> /// <return>The next stage of Redis Cache with Premium SKU definition.</return> Microsoft.Azure.Management.Redis.Fluent.RedisCache.Definition.IWithCreate WithPatchSchedule(Microsoft.Azure.Management.Redis.Fluent.Models.DayOfWeek dayOfWeek, int startHourUtc); /// <summary> /// Patch schedule on a Premium Cluster Cache. /// </summary> /// <param name="dayOfWeek">Day of week when cache can be patched.</param> /// <param name="startHourUtc">Start hour after which cache patching can start.</param> /// <param name="maintenanceWindow">ISO8601 timespan specifying how much time cache patching can take.</param> /// <return>The next stage of Redis Cache with Premium SKU definition.</return> Microsoft.Azure.Management.Redis.Fluent.RedisCache.Definition.IWithCreate WithPatchSchedule(Microsoft.Azure.Management.Redis.Fluent.Models.DayOfWeek dayOfWeek, int startHourUtc, TimeSpan maintenanceWindow); /// <summary> /// Patch schedule on a Premium Cluster Cache. /// </summary> /// <param name="scheduleEntry">Patch schedule entry for Premium Redis Cache.</param> /// <return>The next stage of Redis Cache with Premium SKU definition.</return> Microsoft.Azure.Management.Redis.Fluent.RedisCache.Definition.IWithCreate WithPatchSchedule(ScheduleEntry scheduleEntry); /// <summary> /// Patch schedule on a Premium Cluster Cache. /// </summary> /// <param name="scheduleEntry">List of patch schedule entries for Premium Redis Cache.</param> /// <return>The next stage of Redis Cache with Premium SKU definition.</return> Microsoft.Azure.Management.Redis.Fluent.RedisCache.Definition.IWithCreate WithPatchSchedule(IList<Microsoft.Azure.Management.Redis.Fluent.Models.ScheduleEntry> scheduleEntry); /// <summary> /// All Redis Settings. Few possible keys: /// rdb-backup-enabled, rdb-storage-connection-string, rdb-backup-frequency, maxmemory-delta, maxmemory-policy, /// notify-keyspace-events, maxmemory-samples, slowlog-log-slower-than, slowlog-max-len, list-max-ziplist-entries, /// list-max-ziplist-value, hash-max-ziplist-entries, hash-max-ziplist-value, set -max-intset-entries, /// zset-max-ziplist-entries, zset-max-ziplist-value etc. /// </summary> /// <param name="redisConfiguration">Configuration of Redis Cache as a map indexed by configuration name.</param> /// <return>The next stage of Redis Cache definition.</return> Microsoft.Azure.Management.Redis.Fluent.RedisCache.Definition.IWithCreate WithRedisConfiguration(IDictionary<string,string> redisConfiguration); /// <summary> /// Specifies Redis Setting. /// rdb-backup-enabled, rdb-storage-connection-string, rdb-backup-frequency, maxmemory-delta, maxmemory-policy, /// notify-keyspace-events, maxmemory-samples, slowlog-log-slower-than, slowlog-max-len, list-max-ziplist-entries, /// list-max-ziplist-value, hash-max-ziplist-entries, hash-max-ziplist-value, set -max-intset-entries, /// zset-max-ziplist-entries, zset-max-ziplist-value etc. /// </summary> /// <param name="key">Redis configuration name.</param> /// <param name="value">Redis configuration value.</param> /// <return>The next stage of Redis Cache definition.</return> Microsoft.Azure.Management.Redis.Fluent.RedisCache.Definition.IWithCreate WithRedisConfiguration(string key, string value); } /// <summary> /// A Redis Cache definition with Premium Sku specific functionality. /// </summary> public interface IWithPremiumSkuCreateBeta : Microsoft.Azure.Management.ResourceManager.Fluent.Core.IBeta { /// <summary> /// Sets Redis Cache static IP. Required when deploying a Redis Cache inside an existing Azure Virtual Network. /// </summary> /// <param name="staticIP">The static IP value to set.</param> /// <return>The next stage of Redis Cache definition.</return> Microsoft.Azure.Management.Redis.Fluent.RedisCache.Definition.IWithCreate WithStaticIP(string staticIP); /// <summary> /// Assigns the specified subnet to this instance of Redis Cache. /// </summary> /// <param name="network">Instance of Network object.</param> /// <param name="subnetName">The name of the subnet.</param> /// <return>The next stage of Redis Cache definition.</return> Microsoft.Azure.Management.Redis.Fluent.RedisCache.Definition.IWithCreate WithSubnet(IHasId network, string subnetName); /// <summary> /// Assigns the specified subnet to this instance of Redis Cache. /// </summary> /// <param name="subnetId">Resource id of subnet.</param> /// <return>The next stage of Redis Cache definition.</return> Microsoft.Azure.Management.Redis.Fluent.RedisCache.Definition.IWithCreate WithSubnet(string subnetId); } /// <summary> /// A Redis Cache definition with sufficient inputs to create a new /// Redis Cache in the cloud, but exposing additional optional inputs to /// specify. /// </summary> public interface IWithCreateBeta : Microsoft.Azure.Management.ResourceManager.Fluent.Core.IBeta { /// <summary> /// Creates Redis cache firewall rule with range of IP addresses permitted to connect to the cache. /// </summary> /// <param name="name">Name of the rule.</param> /// <param name="lowestIp">Lowest IP address included in the range.</param> /// <param name="highestIp">Highest IP address included in the range.</param> /// <return>The next stage of Redis Cache definition.</return> Microsoft.Azure.Management.Redis.Fluent.RedisCache.Definition.IWithCreate WithFirewallRule(string name, string lowestIp, string highestIp); /// <summary> /// Creates Redis cache firewall rule with range of IP addresses permitted to connect to the cache. /// </summary> /// <param name="rule">Firewall rule that specifies name, lowest and highest IP address included in the range of permitted IP addresses.</param> /// <return>The next stage of Redis Cache definition.</return> Microsoft.Azure.Management.Redis.Fluent.RedisCache.Definition.IWithCreate WithFirewallRule(IRedisFirewallRule rule); /// <summary> /// Requires clients to use a specified TLS version (or higher) to connect (e,g, '1.0', '1.1', '1.2'). /// </summary> /// <param name="tlsVersion">Minimum TLS version.</param> /// <return>The next stage of Redis Cache definition.</return> Microsoft.Azure.Management.Redis.Fluent.RedisCache.Definition.IWithCreate WithMinimumTlsVersion(TlsVersion tlsVersion); } }
#region License //----------------------------------------------------------------------- // <copyright> // The MIT License (MIT) // // Copyright (c) 2014 Kirk S Woll // // Permission is hereby granted, free of charge, to any person obtaining a copy of // this software and associated documentation files (the "Software"), to deal in // the Software without restriction, including without limitation the rights to // use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of // the Software, and to permit persons to whom the Software is furnished to do so, // subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all // copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS // FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR // COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER // IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN // CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // </copyright> //----------------------------------------------------------------------- #endregion using System; using System.Collections.Generic; using System.Linq; using Microsoft.CodeAnalysis; namespace WootzJs.Compiler { public class Context { [ThreadStatic] private static Context instance; public static Context Instance { get { return instance; } } public Solution Solution { get; private set; } public Project Project { get; private set; } public Compilation Compilation { get; private set; } public ReflectionCache ReflectionCache { get; private set; } public INamedTypeSymbol Exception { get; private set; } public INamedTypeSymbol SpecialFunctions { get; private set; } public IMethodSymbol DefaultOf { get; private set; } public IMethodSymbol InternalInit { get; private set; } public INamedTypeSymbol Assembly { get; private set; } public IMethodSymbol AssemblyConstructor { get; private set; } public INamedTypeSymbol JsAttributeType { get; private set; } public INamedTypeSymbol JsCompilerOptionsAttribute { get; private set; } public INamedTypeSymbol PrecedesAttribute { get; private set; } public INamedTypeSymbol ObjectType { get; private set; } public new IMethodSymbol GetType { get; private set; } public IMethodSymbol ObjectReferenceEquals { get; private set; } public IMethodSymbol ObjectCast { get; private set; } public IMethodSymbol ObjectCreateDelegate { get; private set; } public INamedTypeSymbol TypeType { get; private set; } public IArrayTypeSymbol TypeArray { get; private set; } public IMethodSymbol TypeConstructor { get; private set; } public IMethodSymbol TypeInit { get; private set; } public IMethodSymbol CreateTypeParameter { get; private set; } public IMethodSymbol TypeIsInstanceOfType { get; private set; } public IMethodSymbol Type_GetTypeFromTypeFunc { get; private set; } public IMethodSymbol GetField { get; private set; } public IMethodSymbol GetProperty { get; private set; } public IMethodSymbol GetMethod { get; private set; } public IMethodSymbol GetConstructor { get; private set; } public INamedTypeSymbol AsExtensionType { get; private set; } public INamedTypeSymbol JsniType { get; private set; } public INamedTypeSymbol EnumType { get; private set; } public IMethodSymbol EnumGetValue { get; private set; } public IMethodSymbol EnumInternalToObject { get; private set; } public INamedTypeSymbol Enumerable { get; private set; } public INamedTypeSymbol EnumerableGeneric { get; private set; } public INamedTypeSymbol Enumerator { get; private set; } public IMethodSymbol EnumerableGetEnumerator { get; private set; } public IPropertySymbol EnumeratorCurrent { get; private set; } public IMethodSymbol EnumeratorMoveNext { get; private set; } public INamedTypeSymbol DelegateType { get; private set; } public IMethodSymbol DelegateTypeConstructor { get; private set; } public IPropertySymbol DelegateTarget { get; private set; } public IMethodSymbol DelegateCombine { get; private set; } public IMethodSymbol DelegateRemove { get; private set; } public INamedTypeSymbol MulticastDelegateType { get; private set; } public IMethodSymbol MulticastDelegateConstructor { get; private set; } public INamedTypeSymbol NullableType { get; private set; } public IPropertySymbol NullableHasValue { get; private set; } public IPropertySymbol NullableValue { get; private set; } public IMethodSymbol NullableGetValueOrDefault { get; private set; } public INamedTypeSymbol FieldInfo { get; private set; } public INamedTypeSymbol MethodInfo { get; private set; } public INamedTypeSymbol MemberInfo { get; private set; } public INamedTypeSymbol ParameterInfo { get; private set; } public INamedTypeSymbol ConstructorInfo { get; private set; } public INamedTypeSymbol PropertyInfo { get; private set; } public INamedTypeSymbol EventInfo { get; private set; } public INamedTypeSymbol Attribute { get; private set; } public IMethodSymbol FieldInfoConstructor { get; private set; } public IMethodSymbol MethodInfoConstructor { get; private set; } public IMethodSymbol ParameterInfoConstructor { get; private set; } public IMethodSymbol ConstructorInfoConstructor { get; private set; } public IMethodSymbol PropertyInfoConstructor { get; private set; } public IMethodSymbol EventInfoConstructor { get; private set; } public INamedTypeSymbol TypeAttributes { get; private set; } public IFieldSymbol TypeAttributesPublic { get; private set; } public IFieldSymbol TypeAttributesNotPublic { get; private set; } public IFieldSymbol TypeAttributesNestedPublic { get; private set; } public IFieldSymbol TypeAttributesNestedPrivate { get; private set; } public IFieldSymbol TypeAttributesNestedFamily { get; private set; } public IFieldSymbol TypeAttributesNestedAssembly { get; private set; } public IFieldSymbol TypeAttributesNestedFamANDAssem { get; private set; } public IFieldSymbol TypeAttributesNestedFamORAssem { get; private set; } public IFieldSymbol TypeAttributesAbstract { get; private set; } public IFieldSymbol TypeAttributesSealed { get; private set; } public IFieldSymbol TypeAttributesClass { get; private set; } public IFieldSymbol TypeAttributesInterface { get; private set; } public INamedTypeSymbol FieldAttributes { get; private set; } public IFieldSymbol FieldAttributesPublic { get; private set; } public IFieldSymbol FieldAttributesPrivate { get; private set; } public IFieldSymbol FieldAttributesFamily { get; private set; } public IFieldSymbol FieldAttributesAssembly { get; private set; } public IFieldSymbol FieldAttributesFamORAssem { get; private set; } public IFieldSymbol FieldAttributesStatic { get; private set; } public IFieldSymbol FieldAttributesInitOnly { get; private set; } public IFieldSymbol FieldAttributesLiteral { get; private set; } public INamedTypeSymbol MethodAttributes { get; private set; } public IFieldSymbol MethodAttributesPublic { get; private set; } public IFieldSymbol MethodAttributesPrivate { get; private set; } public IFieldSymbol MethodAttributesFamily { get; private set; } public IFieldSymbol MethodAttributesAssembly { get; private set; } public IFieldSymbol MethodAttributesFamORAssem { get; private set; } public IFieldSymbol MethodAttributesStatic { get; private set; } public INamedTypeSymbol ParameterAttributes { get; private set; } public IFieldSymbol ParameterAttributesOut { get; private set; } public IFieldSymbol ParameterAttributesHasDefault { get; private set; } public IFieldSymbol ParameterAttributesNone { get; private set; } public INamedTypeSymbol JsFunction { get; private set; } public INamedTypeSymbol IDisposable { get; private set; } public IMethodSymbol IDisposableDispose { get; private set; } public INamedTypeSymbol Expression { get; private set; } public INamedTypeSymbol ExpressionGeneric { get; private set; } public IArrayTypeSymbol ExpressionArray { get; private set; } public INamedTypeSymbol ExpressionType { get; private set; } public INamedTypeSymbol ExpressionLambda { get; private set; } public INamedTypeSymbol ParameterExpression { get; private set; } public IArrayTypeSymbol ParameterExpressionArray { get; private set; } public INamedTypeSymbol NewExpression { get; private set; } public INamedTypeSymbol MemberBinding { get; private set; } public IArrayTypeSymbol MemberBindingArray { get; private set; } public INamedTypeSymbol ElementInit { get; private set; } public IArrayTypeSymbol ElementInitArray { get; private set; } public INamedTypeSymbol String { get; private set; } public IMethodSymbol ObjectToString { get; private set; } public INamedTypeSymbol Char { get; private set; } public INamedTypeSymbol CharNullable { get; private set; } public INamedTypeSymbol Int64 { get; private set; } public INamedTypeSymbol Int32 { get; private set; } public INamedTypeSymbol Int16 { get; private set; } public INamedTypeSymbol UInt64 { get; private set; } public INamedTypeSymbol UInt32 { get; private set; } public INamedTypeSymbol UInt16 { get; private set; } public INamedTypeSymbol Byte { get; private set; } public INamedTypeSymbol SByte { get; private set; } public INamedTypeSymbol Single { get; private set; } public INamedTypeSymbol Double { get; private set; } public INamedTypeSymbol Decimal { get; private set; } public INamedTypeSymbol Constant { get; private set; } public INamedTypeSymbol Action { get; private set; } public INamedTypeSymbol ActionT { get; private set; } public INamedTypeSymbol Func { get; private set; } public INamedTypeSymbol JsObject { get; private set; } public INamedTypeSymbol JsString { get; private set; } public IMethodSymbol SafeToString { get; private set; } public INamedTypeSymbol Array { get; private set; } public INamedTypeSymbol INotifyPropertyChanged { get; private set; } public INamedTypeSymbol InvalidOperationException { get; private set; } public IMethodSymbol InvalidOperationExceptionStringConstructor { get; private set; } public INamedTypeSymbol IAsyncStateMachine { get; private set; } public IMethodSymbol IAsyncStateMachineMoveNext { get; private set; } public IMethodSymbol IAsyncStateMachineSetStateMachine { get; private set; } public INamedTypeSymbol IEnumerator { get; private set; } public INamedTypeSymbol IEnumeratorT { get; private set; } public INamedTypeSymbol IEnumerable { get; private set; } public INamedTypeSymbol IEnumerableT { get; private set; } public IPropertySymbol IEnumeratorCurrent { get; private set; } public IMethodSymbol IEnumeratorMoveNext { get; private set; } public IMethodSymbol IEnumeratorReset { get; private set; } public IMethodSymbol IEnumerableGetEnumerator { get; private set; } public IPropertySymbol IEnumeratorTCurrent { get; private set; } public IMethodSymbol IEnumerableTGetEnumerator { get; private set; } public INamedTypeSymbol YieldIterator { get; set; } public IMethodSymbol YieldIteratorClone { get; set; } public IMethodSymbol YieldIteratorDoMoveNext { get; set; } public IMethodSymbol Nop { get; private set; } public INamedTypeSymbol Task { get; private set; } public INamedTypeSymbol TaskT { get; private set; } public INamedTypeSymbol TaskAwaiter { get; private set; } public INamedTypeSymbol TaskAwaiterT { get; private set; } public INamedTypeSymbol AsyncVoidMethodBuilder { get; private set; } public IMethodSymbol AsyncVoidMethodBuilderCreate { get; private set; } public IMethodSymbol AsyncVoidMethodBuilderStart { get; private set; } public INamedTypeSymbol AsyncTaskMethodBuilder { get; private set; } public IMethodSymbol AsyncTaskMethodBuilderCreate { get; private set; } public IMethodSymbol AsyncTaskMethodBuilderStart { get; private set; } public INamedTypeSymbol AsyncTaskTMethodBuilder { get; private set; } public IMethodSymbol AsyncTaskTMethodBuilderCreate { get; private set; } public IMethodSymbol AsyncTaskTMethodBuilderStart { get; private set; } public INamedTypeSymbol CallerMemberNameAttribute { get; private set; } public INamedTypeSymbol CultureInfo { get; private set; } public IMethodSymbol NullPropagation { get; private set; } // public NamedTypeSymbol IAutoNotifyPropertyChanged { get; private set; } // public MethodSymbol NotifyPropertyChanged { get; private set; } public static void Update(Solution solution, Project project, Compilation compilation, ReflectionCache reflectionCache) { Profiler.Time("Updating Context", () => { instance = new Context(); instance.UpdateContext(solution, project, compilation, reflectionCache); }); } private void UpdateContext(Solution solution, Project project, Compilation compilation, ReflectionCache reflectionCache) { Solution = solution; Project = project; Compilation = compilation; ReflectionCache = reflectionCache; // var diagnostics = compilation.GetDiagnostics(); // var mscorlib = compilation.GetReferencedAssemblySymbol(project.MetadataReferences.First()); // var typeNames = mscorlib.TypeNames.OrderBy(x => x).ToArray(); ObjectToString = compilation.ObjectType.GetMembers("ToString").OfType<IMethodSymbol>().Single(); String = compilation.FindType("System.String"); SpecialFunctions = compilation.FindType("System.Runtime.WootzJs.SpecialFunctions"); DefaultOf = SpecialFunctions.GetMethod("DefaultOf"); Char = compilation.FindType("System.Char"); Byte = compilation.FindType("System.Byte"); Int16 = compilation.FindType("System.Int16"); Int32 = compilation.FindType("System.Int32"); Int64 = compilation.FindType("System.Int64"); SByte = compilation.FindType("System.SByte"); UInt16 = compilation.FindType("System.UInt16"); UInt32 = compilation.FindType("System.UInt32"); UInt64 = compilation.FindType("System.UInt64"); Single = compilation.FindType("System.Single"); Double = compilation.FindType("System.Double"); Decimal = compilation.FindType("System.Decimal"); Exception = compilation.FindType("System.Exception"); InternalInit = Exception.GetMethodByName("InternalInit"); Assembly = compilation.FindType("System.Reflection.Assembly"); AssemblyConstructor = Assembly.InstanceConstructors.Single(); JsAttributeType = compilation.FindType("System.Runtime.WootzJs.JsAttribute"); JsCompilerOptionsAttribute = compilation.FindType("System.Runtime.WootzJs.JsCompilerOptionsAttribute"); PrecedesAttribute = compilation.FindType("System.Runtime.WootzJs.DependsOnAttribute"); ObjectType = compilation.FindType("System.Object"); GetType = ObjectType.GetMethod("GetType"); ObjectReferenceEquals = (IMethodSymbol)ObjectType.GetMembers("ReferenceEquals").Single(); ObjectCast = (IMethodSymbol)SpecialFunctions.GetMembers("ObjectCast").Single(); ObjectCreateDelegate = (IMethodSymbol)SpecialFunctions.GetMembers("CreateDelegate").Single(); TypeType = compilation.FindType("System.Type"); TypeArray = compilation.CreateArrayTypeSymbol(TypeType); TypeConstructor = TypeType.InstanceConstructors.Single(); TypeInit = (IMethodSymbol)TypeType.GetMembers("Init").Single(); CreateTypeParameter = (IMethodSymbol)TypeType.GetMembers("CreateTypeParameter").Single(); TypeIsInstanceOfType = (IMethodSymbol)TypeType.GetMembers("IsInstanceOfType").Single(); Type_GetTypeFromTypeFunc = (IMethodSymbol)TypeType.GetMembers("_GetTypeFromTypeFunc").Single(); GetField = TypeType.GetMethod("GetField", String); GetProperty = TypeType.GetMethod("GetProperty", String); GetMethod = TypeType.GetMethod("GetMethod", String, TypeArray); GetConstructor = TypeType.GetMethod("GetConstructor", TypeArray); AsExtensionType = compilation.FindType("System.Runtime.WootzJs.AsExtension"); JsniType = compilation.FindType("System.Runtime.WootzJs.Jsni"); EnumType = compilation.FindType("System.Enum"); EnumGetValue = EnumType.GetMethod("GetValue"); EnumInternalToObject = EnumType.GetMethod("InternalToObject"); Enumerable = compilation.FindType("System.Collections.IEnumerable"); EnumerableGeneric = compilation.FindType("System.Collections.Generic.IEnumerable`1"); Enumerator = compilation.FindType("System.Collections.IEnumerator"); EnumerableGetEnumerator = (IMethodSymbol)Enumerable.GetMembers("GetEnumerator").Single(); EnumeratorCurrent = (IPropertySymbol)Enumerator.GetMembers("Current").Single(); EnumeratorMoveNext = (IMethodSymbol)Enumerator.GetMembers("MoveNext").Single(); DelegateType = compilation.FindType("System.Delegate"); DelegateTypeConstructor = DelegateType.InstanceConstructors.Single(); DelegateTarget = (IPropertySymbol)DelegateType.GetMembers("Target").Single(); DelegateCombine = DelegateType.GetMembers("Combine").OfType<IMethodSymbol>().Single(x => x.Parameters.Count() == 2 && x.Parameters.All(y => Equals(y.Type, DelegateType))); DelegateRemove = DelegateType.GetMembers("Remove").OfType<IMethodSymbol>().Single(x => x.Parameters.Count() == 2 && x.Parameters.All(y => Equals(y.Type, DelegateType))); MulticastDelegateType = compilation.FindType("System.MulticastDelegate"); MulticastDelegateConstructor = MulticastDelegateType.InstanceConstructors.Single(x => x.Parameters.Last().Type.TypeKind == TypeKind.Array); NullableType = compilation.FindType("System.Nullable`1"); CharNullable = NullableType.Construct(Char); NullableHasValue = (IPropertySymbol)NullableType.GetMembers("HasValue").Single(); NullableValue = (IPropertySymbol)NullableType.GetMembers("Value").Single(); NullableGetValueOrDefault = (IMethodSymbol)NullableType.GetMembers("GetValueOrDefault").Single(); FieldInfo = compilation.FindType("System.Reflection.FieldInfo"); MethodInfo = compilation.FindType("System.Reflection.MethodInfo"); MemberInfo = compilation.FindType("System.Reflection.MemberInfo"); PropertyInfo = compilation.FindType("System.Reflection.PropertyInfo"); EventInfo = compilation.FindType("System.Reflection.EventInfo"); ConstructorInfo = compilation.FindType("System.Reflection.ConstructorInfo"); Attribute = compilation.FindType("System.Attribute"); FieldInfoConstructor = FieldInfo.InstanceConstructors.Single(); MethodInfoConstructor = MethodInfo.InstanceConstructors.Single(); ParameterInfo = compilation.FindType("System.Reflection.ParameterInfo"); ParameterInfoConstructor = ParameterInfo.InstanceConstructors.Single(); PropertyInfoConstructor = PropertyInfo.InstanceConstructors.Single(); EventInfoConstructor = EventInfo.InstanceConstructors.Single(); ConstructorInfoConstructor = ConstructorInfo.InstanceConstructors.Single(); TypeAttributes = compilation.FindType("System.Reflection.TypeAttributes"); TypeAttributesPublic = (IFieldSymbol)TypeAttributes.GetMembers("Public").Single(); TypeAttributesNotPublic = (IFieldSymbol)TypeAttributes.GetMembers("NotPublic").Single(); TypeAttributesNestedPublic = (IFieldSymbol)TypeAttributes.GetMembers("NestedPublic").Single(); TypeAttributesNestedPrivate = (IFieldSymbol)TypeAttributes.GetMembers("NestedPrivate").Single(); TypeAttributesNestedFamily = (IFieldSymbol)TypeAttributes.GetMembers("NestedFamily").Single(); TypeAttributesNestedAssembly = (IFieldSymbol)TypeAttributes.GetMembers("NestedAssembly").Single(); TypeAttributesNestedFamANDAssem = (IFieldSymbol)TypeAttributes.GetMembers("NestedFamANDAssem").Single(); TypeAttributesNestedFamORAssem = (IFieldSymbol)TypeAttributes.GetMembers("NestedFamORAssem").Single(); TypeAttributesAbstract = (IFieldSymbol)TypeAttributes.GetMembers("Abstract").Single(); TypeAttributesSealed = (IFieldSymbol)TypeAttributes.GetMembers("Sealed").Single(); TypeAttributesClass = (IFieldSymbol)TypeAttributes.GetMembers("Class").Single(); TypeAttributesInterface = (IFieldSymbol)TypeAttributes.GetMembers("Interface").Single(); FieldAttributes = compilation.FindType("System.Reflection.FieldAttributes"); FieldAttributesPublic = (IFieldSymbol)FieldAttributes.GetMembers("Public").Single(); FieldAttributesPrivate = (IFieldSymbol)FieldAttributes.GetMembers("Private").Single(); FieldAttributesFamily = (IFieldSymbol)FieldAttributes.GetMembers("Family").Single(); FieldAttributesAssembly = (IFieldSymbol)FieldAttributes.GetMembers("Assembly").Single(); FieldAttributesFamORAssem = (IFieldSymbol)FieldAttributes.GetMembers("FamORAssem").Single(); FieldAttributesStatic = (IFieldSymbol)FieldAttributes.GetMembers("Static").Single(); FieldAttributesInitOnly = (IFieldSymbol)FieldAttributes.GetMembers("InitOnly").Single(); FieldAttributesLiteral = (IFieldSymbol)FieldAttributes.GetMembers("Literal").Single(); MethodAttributes = compilation.FindType("System.Reflection.MethodAttributes"); MethodAttributesPublic = (IFieldSymbol)MethodAttributes.GetMembers("Public").Single(); MethodAttributesPrivate = (IFieldSymbol)MethodAttributes.GetMembers("Private").Single(); MethodAttributesFamily = (IFieldSymbol)MethodAttributes.GetMembers("Family").Single(); MethodAttributesAssembly = (IFieldSymbol)MethodAttributes.GetMembers("Assembly").Single(); MethodAttributesFamORAssem = (IFieldSymbol)MethodAttributes.GetMembers("FamORAssem").Single(); MethodAttributesStatic = (IFieldSymbol)MethodAttributes.GetMembers("Static").Single(); ParameterAttributes = compilation.FindType("System.Reflection.ParameterAttributes"); ParameterAttributesOut = (IFieldSymbol)ParameterAttributes.GetMembers("Out").Single(); ParameterAttributesHasDefault = (IFieldSymbol)ParameterAttributes.GetMembers("HasDefault").Single(); ParameterAttributesNone = (IFieldSymbol)ParameterAttributes.GetMembers("None").Single(); JsFunction = compilation.FindType("System.Runtime.WootzJs.JsFunction"); IDisposable = compilation.FindType("System.IDisposable"); IDisposableDispose = (IMethodSymbol)IDisposable.GetMembers("Dispose").Single(); Expression = compilation.FindType("System.Linq.Expressions.Expression"); ExpressionGeneric = compilation.FindType("System.Linq.Expressions.Expression`1"); ExpressionArray = compilation.CreateArrayTypeSymbol(Expression); ExpressionType = compilation.FindType("System.Linq.Expressions.ExpressionType"); ExpressionLambda = compilation.FindType("System.Linq.Expressions.Expression`1"); ParameterExpression = compilation.FindType("System.Linq.Expressions.ParameterExpression"); ParameterExpressionArray = compilation.CreateArrayTypeSymbol(ParameterExpression); NewExpression = compilation.FindType("System.Linq.Expressions.NewExpression"); MemberBinding = compilation.FindType("System.Linq.Expressions.MemberBinding"); MemberBindingArray = compilation.CreateArrayTypeSymbol(MemberBinding); ElementInit = compilation.FindType("System.Linq.Expressions.ElementInit"); ElementInitArray = compilation.CreateArrayTypeSymbol(ElementInit); Constant = compilation.FindType("System.Linq.Expressions.ConstantExpression"); Action = compilation.FindType("System.Action"); ActionT = compilation.FindType("System.Action`1"); Func = compilation.FindType("System.Func`1"); JsObject = compilation.FindType("System.Runtime.WootzJs.JsObject"); JsString = compilation.FindType("System.Runtime.WootzJs.JsString"); SafeToString = SpecialFunctions.GetMembers("SafeToString").OfType<IMethodSymbol>().Single(); Array = compilation.FindType("System.Array"); INotifyPropertyChanged = compilation.FindType("System.ComponentModel.INotifyPropertyChanged"); Task = compilation.FindType("System.Threading.Tasks.Task"); TaskT = compilation.FindType("System.Threading.Tasks.Task`1"); TaskAwaiter = compilation.FindType("System.Runtime.CompilerServices.TaskAwaiter"); TaskAwaiterT = compilation.FindType("System.Runtime.CompilerServices.TaskAwaiter`1"); Nop = compilation.FindType("System.Runtime.CompilerServices.Op").GetMethod("Nothing"); AsyncVoidMethodBuilder = compilation.FindType("System.Runtime.CompilerServices.AsyncVoidMethodBuilder"); AsyncVoidMethodBuilderCreate = AsyncVoidMethodBuilder.GetMethod("Create"); AsyncVoidMethodBuilderStart = AsyncVoidMethodBuilder.GetMethod("Start"); AsyncTaskMethodBuilder = compilation.FindType("System.Runtime.CompilerServices.AsyncTaskMethodBuilder"); AsyncTaskMethodBuilderCreate = AsyncTaskMethodBuilder.GetMethod("Create"); AsyncTaskMethodBuilderStart = AsyncTaskMethodBuilder.GetMethod("Start"); AsyncTaskTMethodBuilder = compilation.FindType("System.Runtime.CompilerServices.AsyncTaskMethodBuilder`1"); AsyncTaskTMethodBuilderCreate = AsyncTaskTMethodBuilder.GetMethod("Create"); AsyncTaskTMethodBuilderStart = AsyncTaskTMethodBuilder.GetMethod("Start"); CallerMemberNameAttribute = compilation.FindType("System.Runtime.CompilerServices.CallerMemberNameAttribute"); CultureInfo = compilation.FindType("System.Globalization.CultureInfo"); InvalidOperationException = compilation.FindType("System.InvalidOperationException"); InvalidOperationExceptionStringConstructor = InvalidOperationException.Constructors.Single(x => x.Parameters.Count() == 1 && x.Parameters.First().Type == String); IAsyncStateMachine = compilation.FindType("System.Runtime.CompilerServices.IAsyncStateMachine"); IAsyncStateMachineMoveNext = IAsyncStateMachine.GetMethodByName("MoveNext"); IAsyncStateMachineSetStateMachine = IAsyncStateMachine.GetMethodByName("SetStateMachine"); IEnumerator = compilation.FindType("System.Collections.IEnumerator"); IEnumerable = compilation.FindType("System.Collections.IEnumerable"); IEnumeratorT = compilation.FindType("System.Collections.Generic.IEnumerator`1"); IEnumerableT = compilation.FindType("System.Collections.Generic.IEnumerable`1"); IEnumeratorCurrent = (IPropertySymbol)IEnumerator.GetMembers("Current").Single(); IEnumeratorMoveNext = (IMethodSymbol)IEnumerator.GetMembers("MoveNext").Single(); IEnumeratorReset = (IMethodSymbol)IEnumerator.GetMembers("Reset").Single(); IEnumerableGetEnumerator = (IMethodSymbol)IEnumerable.GetMembers("GetEnumerator").Single(); IEnumeratorTCurrent = (IPropertySymbol)IEnumeratorT.GetMembers("Current").Single(); IEnumerableTGetEnumerator = (IMethodSymbol)IEnumerableT.GetMembers("GetEnumerator").Single(); YieldIterator = compilation.FindType("System.YieldIterator`1"); YieldIteratorClone = YieldIterator.GetMethodByName("Clone"); YieldIteratorDoMoveNext = YieldIterator.GetMethodByName("DoMoveNext"); NullPropagation = SpecialFunctions.GetMethod("NullPropagation"); // IAutoNotifyPropertyChanged = compilation.FindType("System.Runtime.WootzJs.IAutoNotifyPropertyChanged"); // NotifyPropertyChanged = IAutoNotifyPropertyChanged.GetMethod("NotifyPropertyChanged"); } } }
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ namespace NPOI.HSSF.Util { using System; using NPOI.HSSF.UserModel; using NPOI.SS.Util; /// <summary> /// Various utility functions that make working with a region of cells easier. /// @author Eric Pugh epugh@upstate.com /// </summary> public class HSSFRegionUtil { private HSSFRegionUtil() { // no instances of this class } /// <summary> /// For setting the same property on many cells to the same value /// </summary> private class CellPropertySetter { private HSSFWorkbook _workbook; private String _propertyName; private short _propertyValue; public CellPropertySetter(HSSFWorkbook workbook, String propertyName, int value) { _workbook = workbook; _propertyName = propertyName; _propertyValue = (short)value; } public void SetProperty(NPOI.SS.UserModel.IRow row, int column) { NPOI.SS.UserModel.ICell cell = HSSFCellUtil.GetCell(row, column); HSSFCellUtil.SetCellStyleProperty(cell, _workbook, _propertyName, _propertyValue); } } /// <summary> /// Sets the left border for a region of cells by manipulating the cell style /// of the individual cells on the left /// </summary> /// <param name="border">The new border</param> /// <param name="region">The region that should have the border</param> /// <param name="sheet">The sheet that the region is on.</param> /// <param name="workbook">The workbook that the region is on.</param> public static void SetBorderLeft(NPOI.SS.UserModel.BorderStyle border, CellRangeAddress region, HSSFSheet sheet, HSSFWorkbook workbook) { int rowStart = region.FirstRow; int rowEnd = region.LastRow; int column = region.FirstColumn; CellPropertySetter cps = new CellPropertySetter(workbook, CellUtil.BORDER_LEFT, (int)border); for (int i = rowStart; i <= rowEnd; i++) { cps.SetProperty(HSSFCellUtil.GetRow(i, sheet), column); } } /// <summary> /// Sets the leftBorderColor attribute of the HSSFRegionUtil object /// </summary> /// <param name="color">The color of the border</param> /// <param name="region">The region that should have the border</param> /// <param name="sheet">The sheet that the region is on.</param> /// <param name="workbook">The workbook that the region is on.</param> public static void SetLeftBorderColor(int color, CellRangeAddress region, HSSFSheet sheet, HSSFWorkbook workbook) { int rowStart = region.FirstRow; int rowEnd = region.LastRow; int column = region.FirstColumn; CellPropertySetter cps = new CellPropertySetter(workbook, CellUtil.LEFT_BORDER_COLOR, color); for (int i = rowStart; i <= rowEnd; i++) { cps.SetProperty(HSSFCellUtil.GetRow(i, sheet), column); } } /// <summary> /// Sets the borderRight attribute of the HSSFRegionUtil object /// </summary> /// <param name="border">The new border</param> /// <param name="region">The region that should have the border</param> /// <param name="sheet">The sheet that the region is on.</param> /// <param name="workbook">The workbook that the region is on.</param> public static void SetBorderRight(NPOI.SS.UserModel.BorderStyle border, CellRangeAddress region, HSSFSheet sheet, HSSFWorkbook workbook) { int rowStart = region.FirstRow; int rowEnd = region.LastRow; int column = region.LastColumn; CellPropertySetter cps = new CellPropertySetter(workbook, CellUtil.BORDER_RIGHT, (int)border); for (int i = rowStart; i <= rowEnd; i++) { cps.SetProperty(HSSFCellUtil.GetRow(i, sheet), column); } } /// <summary> /// Sets the rightBorderColor attribute of the HSSFRegionUtil object /// </summary> /// <param name="color">The color of the border</param> /// <param name="region">The region that should have the border</param> /// <param name="sheet">The workbook that the region is on.</param> /// <param name="workbook">The sheet that the region is on.</param> public static void SetRightBorderColor(int color, CellRangeAddress region, HSSFSheet sheet, HSSFWorkbook workbook) { int rowStart = region.FirstRow; int rowEnd = region.LastRow; int column = region.LastColumn; CellPropertySetter cps = new CellPropertySetter(workbook, CellUtil.RIGHT_BORDER_COLOR, color); for (int i = rowStart; i <= rowEnd; i++) { cps.SetProperty(HSSFCellUtil.GetRow(i, sheet), column); } } /// <summary> /// Sets the borderBottom attribute of the HSSFRegionUtil object /// </summary> /// <param name="border">The new border</param> /// <param name="region">The region that should have the border</param> /// <param name="sheet">The sheet that the region is on.</param> /// <param name="workbook">The workbook that the region is on.</param> public static void SetBorderBottom(NPOI.SS.UserModel.BorderStyle border, CellRangeAddress region, HSSFSheet sheet, HSSFWorkbook workbook) { int colStart = region.FirstColumn; int colEnd = region.LastColumn; int rowIndex = region.LastRow; CellPropertySetter cps = new CellPropertySetter(workbook, CellUtil.BORDER_BOTTOM, (int)border); NPOI.SS.UserModel.IRow row = HSSFCellUtil.GetRow(rowIndex, sheet); for (int i = colStart; i <= colEnd; i++) { cps.SetProperty(row, i); } } /// <summary> /// Sets the bottomBorderColor attribute of the HSSFRegionUtil object /// </summary> /// <param name="color">The color of the border</param> /// <param name="region">The region that should have the border</param> /// <param name="sheet">The sheet that the region is on.</param> /// <param name="workbook">The workbook that the region is on.</param> public static void SetBottomBorderColor(int color, CellRangeAddress region, HSSFSheet sheet, HSSFWorkbook workbook) { int colStart = region.FirstColumn; int colEnd = region.LastColumn; int rowIndex = region.LastRow; CellPropertySetter cps = new CellPropertySetter(workbook, CellUtil.BOTTOM_BORDER_COLOR, color); NPOI.SS.UserModel.IRow row = HSSFCellUtil.GetRow(rowIndex, sheet); for (int i = colStart; i <= colEnd; i++) { cps.SetProperty(row, i); } } /// <summary> /// Sets the borderBottom attribute of the HSSFRegionUtil object /// </summary> /// <param name="border">The new border</param> /// <param name="region">The region that should have the border</param> /// <param name="sheet">The sheet that the region is on.</param> /// <param name="workbook">The workbook that the region is on.</param> public static void SetBorderTop(NPOI.SS.UserModel.BorderStyle border, CellRangeAddress region, HSSFSheet sheet, HSSFWorkbook workbook) { int colStart = region.FirstColumn; int colEnd = region.LastColumn; int rowIndex = region.FirstRow; CellPropertySetter cps = new CellPropertySetter(workbook, CellUtil.BORDER_TOP, (int)border); NPOI.SS.UserModel.IRow row = HSSFCellUtil.GetRow(rowIndex, sheet); for (int i = colStart; i <= colEnd; i++) { cps.SetProperty(row, i); } } /// <summary> /// Sets the topBorderColor attribute of the HSSFRegionUtil object /// </summary> /// <param name="color">The color of the border</param> /// <param name="region">The region that should have the border</param> /// <param name="sheet">The sheet that the region is on.</param> /// <param name="workbook">The workbook that the region is on.</param> public static void SetTopBorderColor(int color, CellRangeAddress region, HSSFSheet sheet, HSSFWorkbook workbook) { int colStart = region.FirstColumn; int colEnd = region.LastColumn; int rowIndex = region.FirstRow; CellPropertySetter cps = new CellPropertySetter(workbook, CellUtil.TOP_BORDER_COLOR, color); NPOI.SS.UserModel.IRow row = HSSFCellUtil.GetRow(rowIndex, sheet); for (int i = colStart; i <= colEnd; i++) { cps.SetProperty(row, i); } } } }
/******************************************************************************* Copyright (C) 2012 Gamieon, Inc. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ******************************************************************************/ using UnityEngine; using System.Collections; public enum CameraMode { CycleFaceForward = 0, CycleOrbit = 1, CycleTopDown = 2, Freeze = 3 }; static public class ConfigurationDirector { static public int GetAbsoluteMaxPlayerCount() { // TODO: Increase this number after proving the engine can handle 16 players return 16; } static public GameRuleTypes GetGameRules() { return (GameRuleTypes)PlayerPrefs.GetInt("GameRules", 1); } // TODO: When we introduce new modes of player (Team DM, Soccer), we should // be accessing this from the single player and network host setup menus. static public void SetGameRules(GameRuleTypes value) { PlayerPrefs.SetInt("GameRules", (int)value); } static public float GetCycleHue() { return PlayerPrefs.GetFloat("CycleHue", 0.56f); } static public Color GetCycleColor() { return ColorDirector.H2RGB(GetCycleHue()); } static public void SetCycleHue(float value) { PlayerPrefs.SetFloat("CycleHue", value); } static public string GetPlayerName() { return PlayerPrefs.GetString("PlayerName", "Player"); } static public void SetPlayerName(string value) { PlayerPrefs.SetString("PlayerName", value); } static public string GetGameName() { return PlayerPrefs.GetString("GameName", "Cycles3D Game"); } static public void SetGameName(string value) { PlayerPrefs.SetString("GameName", value); } static public string GetGameDescription() { return PlayerPrefs.GetString("GameDescription", ""); } static public void SetGameDescription(string value) { PlayerPrefs.SetString("GameDescription", value); } static public string GetGamePort() { return PlayerPrefs.GetString("GamePort", "19384"); } static public void SetGamePort(string value) { PlayerPrefs.SetString("GamePort", value); } static public string GetGamePassword() { return PlayerPrefs.GetString("GamePassword", ""); } static public void SetGamePassword(string value) { PlayerPrefs.SetString("GamePassword", value); } static public bool GetGameDedicatedServer() { return (PlayerPrefs.GetInt("GameDedicatedServer", 0) == 0) ? false : true; } static public void SetGameDedicatedServer(bool value) { PlayerPrefs.SetInt("GameDedicatedServer", value ? 1 : 0); } static public int GetGameMaxConnections() { return PlayerPrefs.GetInt("GameMaxConnections", 2); } static public void SetGameMaxConnections(int value) { PlayerPrefs.SetInt("GameMaxConnections", value); } static public bool GetGamePrivate() { return (PlayerPrefs.GetInt("Private", 0) == 0) ? false : true; } static public void SetGamePrivate(bool value) { PlayerPrefs.SetInt("Private", value ? 1 : 0); } static public string GetJoinIPAddress() { return PlayerPrefs.GetString("JoinIPAddress", ""); } static public void SetJoinIPAddress(string value) { PlayerPrefs.SetString("JoinIPAddress", value); } static public string GetJoinPort() { return PlayerPrefs.GetString("JoinPort", "19384"); } static public void SetJoinPort(string value) { PlayerPrefs.SetString("JoinPort", value); } static public CameraMode GetCameraMode() { return (CameraMode)PlayerPrefs.GetInt("CameraMode", 0); } static public void SetCameraMode(CameraMode value) { PlayerPrefs.SetInt("CameraMode", (int)value); } static public int GetEnemyCount() { return PlayerPrefs.GetInt("EnemyCount", 5); } static public void SetEnemyCount(int value) { PlayerPrefs.SetInt("EnemyCount", value); } static public float GetSFXVolume() { return PlayerPrefs.GetFloat("SFXVolume", 0.8f); } static public void SetSFXVolume(float value) { PlayerPrefs.SetFloat("SFXVolume", value); } static public float GetCycleHumVolume() { return PlayerPrefs.GetFloat("CycleHumVolume", 0.8f); } static public void SetCycleHumVolume(float value) { PlayerPrefs.SetFloat("CycleHumVolume", value); } static public string GetCycleModelName() { return PlayerPrefs.GetString("CycleModelName", GetDefaultCycleModelName()); } static public void SetCycleModelName(string value) { PlayerPrefs.SetString("CycleModelName", value); } static public string GetDefaultCycleModelName() { return "{Default}"; } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.Net; using System.Net.Sockets; using System.Xml; using System.IO; using Nini.Config; using OpenMetaverse; using OpenMetaverse.StructuredData; using OpenSim.Framework.Console; namespace OpenSim.Framework { [Serializable] public class SimpleRegionInfo { // private static readonly log4net.ILog m_log // = log4net.LogManager.GetLogger(System.Reflection.MethodBase.GetCurrentMethod().DeclaringType); /// <summary> /// The port by which http communication occurs with the region (most noticeably, CAPS communication) /// </summary> public uint HttpPort { get { return m_httpPort; } set { m_httpPort = value; } } protected uint m_httpPort; /// <summary> /// A well-formed URI for the host region server (namely "http://" + ExternalHostName) /// </summary> public string ServerURI { get { return m_serverURI; } set { m_serverURI = value; } } protected string m_serverURI; public string RegionName { get { return m_regionName; } set { m_regionName = value; } } protected string m_regionName = String.Empty; protected bool Allow_Alternate_Ports; public bool m_allow_alternate_ports; protected string m_externalHostName; protected IPEndPoint m_internalEndPoint; protected uint? m_regionLocX; protected uint? m_regionLocY; protected uint m_remotingPort; public UUID RegionID = UUID.Zero; public string RemotingAddress; public UUID ScopeID = UUID.Zero; public SimpleRegionInfo() { } public SimpleRegionInfo(uint regionLocX, uint regionLocY, IPEndPoint internalEndPoint, string externalUri) { m_regionLocX = regionLocX; m_regionLocY = regionLocY; m_internalEndPoint = internalEndPoint; m_externalHostName = externalUri; } public SimpleRegionInfo(uint regionLocX, uint regionLocY, string externalUri, uint port) { m_regionLocX = regionLocX; m_regionLocY = regionLocY; m_externalHostName = externalUri; m_internalEndPoint = new IPEndPoint(IPAddress.Parse("0.0.0.0"), (int) port); } public SimpleRegionInfo(RegionInfo ConvertFrom) { m_regionName = ConvertFrom.RegionName; m_regionLocX = ConvertFrom.RegionLocX; m_regionLocY = ConvertFrom.RegionLocY; m_internalEndPoint = ConvertFrom.InternalEndPoint; m_externalHostName = ConvertFrom.ExternalHostName; m_remotingPort = ConvertFrom.RemotingPort; m_httpPort = ConvertFrom.HttpPort; m_allow_alternate_ports = ConvertFrom.m_allow_alternate_ports; RemotingAddress = ConvertFrom.RemotingAddress; RegionID = UUID.Zero; ServerURI = ConvertFrom.ServerURI; } public uint RemotingPort { get { return m_remotingPort; } set { m_remotingPort = value; } } /// <value> /// This accessor can throw all the exceptions that Dns.GetHostAddresses can throw. /// /// XXX Isn't this really doing too much to be a simple getter, rather than an explict method? /// </value> public IPEndPoint ExternalEndPoint { get { // Old one defaults to IPv6 //return new IPEndPoint(Dns.GetHostAddresses(m_externalHostName)[0], m_internalEndPoint.Port); IPAddress ia = null; // If it is already an IP, don't resolve it - just return directly if (IPAddress.TryParse(m_externalHostName, out ia)) return new IPEndPoint(ia, m_internalEndPoint.Port); // Reset for next check ia = null; try { foreach (IPAddress Adr in Dns.GetHostAddresses(m_externalHostName)) { if (ia == null) ia = Adr; if (Adr.AddressFamily == AddressFamily.InterNetwork) { ia = Adr; break; } } } catch (SocketException e) { throw new Exception( "Unable to resolve local hostname " + m_externalHostName + " innerException of type '" + e + "' attached to this exception", e); } return new IPEndPoint(ia, m_internalEndPoint.Port); } set { m_externalHostName = value.ToString(); } } public string ExternalHostName { get { return m_externalHostName; } set { m_externalHostName = value; } } public IPEndPoint InternalEndPoint { get { return m_internalEndPoint; } set { m_internalEndPoint = value; } } public uint RegionLocX { get { return m_regionLocX.Value; } set { m_regionLocX = value; } } public uint RegionLocY { get { return m_regionLocY.Value; } set { m_regionLocY = value; } } public ulong RegionHandle { get { return Util.UIntsToLong((RegionLocX * (uint) Constants.RegionSize), (RegionLocY * (uint) Constants.RegionSize)); } } public int getInternalEndPointPort() { return m_internalEndPoint.Port; } public Dictionary<string, object> ToKeyValuePairs() { Dictionary<string, object> kvp = new Dictionary<string, object>(); kvp["uuid"] = RegionID.ToString(); kvp["locX"] = RegionLocX.ToString(); kvp["locY"] = RegionLocY.ToString(); kvp["external_ip_address"] = ExternalEndPoint.Address.ToString(); kvp["external_port"] = ExternalEndPoint.Port.ToString(); kvp["external_host_name"] = ExternalHostName; kvp["http_port"] = HttpPort.ToString(); kvp["internal_ip_address"] = InternalEndPoint.Address.ToString(); kvp["internal_port"] = InternalEndPoint.Port.ToString(); kvp["alternate_ports"] = m_allow_alternate_ports.ToString(); kvp["server_uri"] = ServerURI; return kvp; } public SimpleRegionInfo(Dictionary<string, object> kvp) { if ((kvp["external_ip_address"] != null) && (kvp["external_port"] != null)) { int port = 0; Int32.TryParse((string)kvp["external_port"], out port); IPEndPoint ep = new IPEndPoint(IPAddress.Parse((string)kvp["external_ip_address"]), port); ExternalEndPoint = ep; } else ExternalEndPoint = new IPEndPoint(IPAddress.Parse("0.0.0.0"), 0); if (kvp["external_host_name"] != null) ExternalHostName = (string)kvp["external_host_name"]; if (kvp["http_port"] != null) { UInt32 port = 0; UInt32.TryParse((string)kvp["http_port"], out port); HttpPort = port; } if ((kvp["internal_ip_address"] != null) && (kvp["internal_port"] != null)) { int port = 0; Int32.TryParse((string)kvp["internal_port"], out port); IPEndPoint ep = new IPEndPoint(IPAddress.Parse((string)kvp["internal_ip_address"]), port); InternalEndPoint = ep; } else InternalEndPoint = new IPEndPoint(IPAddress.Parse("0.0.0.0"), 0); if (kvp["alternate_ports"] != null) { bool alts = false; Boolean.TryParse((string)kvp["alternate_ports"], out alts); m_allow_alternate_ports = alts; } if (kvp["server_uri"] != null) ServerURI = (string)kvp["server_uri"]; } } public class RegionInfo : SimpleRegionInfo { // private static readonly log4net.ILog m_log // = log4net.LogManager.GetLogger(System.Reflection.MethodBase.GetCurrentMethod().DeclaringType); public bool commFailTF = false; public ConfigurationMember configMember; public string DataStore = String.Empty; public string RegionFile = String.Empty; public bool isSandbox = false; public bool Persistent = true; private EstateSettings m_estateSettings; private RegionSettings m_regionSettings; // private IConfigSource m_configSource = null; public UUID MasterAvatarAssignedUUID = UUID.Zero; public string MasterAvatarFirstName = String.Empty; public string MasterAvatarLastName = String.Empty; public string MasterAvatarSandboxPassword = String.Empty; public UUID originRegionID = UUID.Zero; public string proxyUrl = ""; public int ProxyOffset = 0; public string regionSecret = UUID.Random().ToString(); public string osSecret; public UUID lastMapUUID = UUID.Zero; public string lastMapRefresh = "0"; private int m_nonphysPrimMax = 0; private int m_physPrimMax = 0; private bool m_clampPrimSize = false; private int m_objectCapacity = 0; private string m_regionType = String.Empty; // Apparently, we're applying the same estatesettings regardless of whether it's local or remote. // MT: Yes. Estates can't span trust boundaries. Therefore, it can be // assumed that all instances belonging to one estate are able to // access the same database server. Since estate settings are lodaed // from there, that should be sufficient for full remote administration // File based loading // public RegionInfo(string description, string filename, bool skipConsoleConfig, IConfigSource configSource) : this(description, filename, skipConsoleConfig, configSource, String.Empty) { } public RegionInfo(string description, string filename, bool skipConsoleConfig, IConfigSource configSource, string configName) { // m_configSource = configSource; if (filename.ToLower().EndsWith(".ini")) { if (!File.Exists(filename)) // New region config request { IniConfigSource newFile = new IniConfigSource(); ReadNiniConfig(newFile, String.Empty); newFile.Save(filename); RegionFile = filename; return; } IniConfigSource source = new IniConfigSource(filename); bool saveFile = false; if (source.Configs[configName] == null) saveFile = true; ReadNiniConfig(source, configName); if (configName != String.Empty && saveFile) source.Save(filename); RegionFile = filename; return; } try { // This will throw if it's not legal Nini XML format // and thereby toss it to the legacy loader // IConfigSource xmlsource = new XmlConfigSource(filename); ReadNiniConfig(xmlsource, configName); RegionFile = filename; return; } catch (Exception) { } configMember = new ConfigurationMember(filename, description, loadConfigurationOptions, handleIncomingConfiguration, !skipConsoleConfig); configMember.performConfigurationRetrieve(); RegionFile = filename; } // The web loader uses this // public RegionInfo(string description, XmlNode xmlNode, bool skipConsoleConfig, IConfigSource configSource) { // m_configSource = configSource; configMember = new ConfigurationMember(xmlNode, description, loadConfigurationOptions, handleIncomingConfiguration, !skipConsoleConfig); configMember.performConfigurationRetrieve(); } public RegionInfo(uint regionLocX, uint regionLocY, IPEndPoint internalEndPoint, string externalUri) : base(regionLocX, regionLocY, internalEndPoint, externalUri) { } public RegionInfo() { } public RegionInfo(SerializableRegionInfo ConvertFrom) { m_regionLocX = ConvertFrom.RegionLocX; m_regionLocY = ConvertFrom.RegionLocY; m_internalEndPoint = ConvertFrom.InternalEndPoint; m_externalHostName = ConvertFrom.ExternalHostName; m_remotingPort = ConvertFrom.RemotingPort; m_allow_alternate_ports = ConvertFrom.m_allow_alternate_ports; RemotingAddress = ConvertFrom.RemotingAddress; RegionID = UUID.Zero; proxyUrl = ConvertFrom.ProxyUrl; originRegionID = ConvertFrom.OriginRegionID; RegionName = ConvertFrom.RegionName; ServerURI = ConvertFrom.ServerURI; } public RegionInfo(SimpleRegionInfo ConvertFrom) { m_regionLocX = ConvertFrom.RegionLocX; m_regionLocY = ConvertFrom.RegionLocY; m_internalEndPoint = ConvertFrom.InternalEndPoint; m_externalHostName = ConvertFrom.ExternalHostName; m_remotingPort = ConvertFrom.RemotingPort; m_allow_alternate_ports = ConvertFrom.m_allow_alternate_ports; RemotingAddress = ConvertFrom.RemotingAddress; RegionID = UUID.Zero; ServerURI = ConvertFrom.ServerURI; } public EstateSettings EstateSettings { get { if (m_estateSettings == null) { m_estateSettings = new EstateSettings(); } return m_estateSettings; } set { m_estateSettings = value; } } public RegionSettings RegionSettings { get { if (m_regionSettings == null) { m_regionSettings = new RegionSettings(); } return m_regionSettings; } set { m_regionSettings = value; } } public int NonphysPrimMax { get { return m_nonphysPrimMax; } } public int PhysPrimMax { get { return m_physPrimMax; } } public bool ClampPrimSize { get { return m_clampPrimSize; } } public int ObjectCapacity { get { return m_objectCapacity; } } public byte AccessLevel { get { return (byte)Util.ConvertMaturityToAccessLevel((uint)RegionSettings.Maturity); } } public string RegionType { get { return m_regionType; } } public void SetEndPoint(string ipaddr, int port) { IPAddress tmpIP = IPAddress.Parse(ipaddr); IPEndPoint tmpEPE = new IPEndPoint(tmpIP, port); m_internalEndPoint = tmpEPE; } private void ReadNiniConfig(IConfigSource source, string name) { bool creatingNew = false; if (source.Configs.Count == 0) { MainConsole.Instance.Output("=====================================\n"); MainConsole.Instance.Output("We are now going to ask a couple of questions about your region.\n"); MainConsole.Instance.Output("You can press 'enter' without typing anything to use the default\n"); MainConsole.Instance.Output("the default is displayed between [ ] brackets.\n"); MainConsole.Instance.Output("=====================================\n"); if (name == String.Empty) name = MainConsole.Instance.CmdPrompt("New region name", name); if (name == String.Empty) throw new Exception("Cannot interactively create region with no name"); source.AddConfig(name); creatingNew = true; } if (name == String.Empty) name = source.Configs[0].Name; if (source.Configs[name] == null) { source.AddConfig(name); creatingNew = true; } IConfig config = source.Configs[name]; // UUID // string regionUUID = config.GetString("RegionUUID", string.Empty); if (regionUUID == String.Empty) { UUID newID = UUID.Random(); regionUUID = MainConsole.Instance.CmdPrompt("Region UUID", newID.ToString()); config.Set("RegionUUID", regionUUID); } RegionID = new UUID(regionUUID); originRegionID = RegionID; // What IS this?! // Region name // RegionName = name; // Region location // string location = config.GetString("Location", String.Empty); if (location == String.Empty) { location = MainConsole.Instance.CmdPrompt("Region Location", "1000,1000"); config.Set("Location", location); } string[] locationElements = location.Split(new char[] {','}); m_regionLocX = Convert.ToUInt32(locationElements[0]); m_regionLocY = Convert.ToUInt32(locationElements[1]); // Datastore (is this implemented? Omitted from example!) // DataStore = config.GetString("Datastore", String.Empty); // Internal IP // IPAddress address; if (config.Contains("InternalAddress")) { address = IPAddress.Parse(config.GetString("InternalAddress", String.Empty)); } else { address = IPAddress.Parse(MainConsole.Instance.CmdPrompt("Internal IP address", "0.0.0.0")); config.Set("InternalAddress", address.ToString()); } int port; if (config.Contains("InternalPort")) { port = config.GetInt("InternalPort", 9000); } else { port = Convert.ToInt32(MainConsole.Instance.CmdPrompt("Internal port", "9000")); config.Set("InternalPort", port); } m_internalEndPoint = new IPEndPoint(address, port); if (config.Contains("AllowAlternatePorts")) { m_allow_alternate_ports = config.GetBoolean("AllowAlternatePorts", true); } else { m_allow_alternate_ports = Convert.ToBoolean(MainConsole.Instance.CmdPrompt("Allow alternate ports", "False")); config.Set("AllowAlternatePorts", m_allow_alternate_ports.ToString()); } // External IP // string externalName; if (config.Contains("ExternalHostName")) { externalName = config.GetString("ExternalHostName", "SYSTEMIP"); } else { externalName = MainConsole.Instance.CmdPrompt("External host name", "SYSTEMIP"); config.Set("ExternalHostName", externalName); } if (externalName == "SYSTEMIP") m_externalHostName = Util.GetLocalHost().ToString(); else m_externalHostName = externalName; // Master avatar cruft // string masterAvatarUUID; if (!creatingNew) { masterAvatarUUID = config.GetString("MasterAvatarUUID", UUID.Zero.ToString()); MasterAvatarFirstName = config.GetString("MasterAvatarFirstName", String.Empty); MasterAvatarLastName = config.GetString("MasterAvatarLastName", String.Empty); MasterAvatarSandboxPassword = config.GetString("MasterAvatarSandboxPassword", String.Empty); } else { masterAvatarUUID = MainConsole.Instance.CmdPrompt("Master Avatar UUID", UUID.Zero.ToString()); if (masterAvatarUUID != UUID.Zero.ToString()) { config.Set("MasterAvatarUUID", masterAvatarUUID); } else { MasterAvatarFirstName = MainConsole.Instance.CmdPrompt("Master Avatar first name (enter for no master avatar)", String.Empty); if (MasterAvatarFirstName != String.Empty) { MasterAvatarLastName = MainConsole.Instance.CmdPrompt("Master Avatar last name", String.Empty); MasterAvatarSandboxPassword = MainConsole.Instance.CmdPrompt("Master Avatar sandbox password", String.Empty); config.Set("MasterAvatarFirstName", MasterAvatarFirstName); config.Set("MasterAvatarLastName", MasterAvatarLastName); config.Set("MasterAvatarSandboxPassword", MasterAvatarSandboxPassword); } } } MasterAvatarAssignedUUID = new UUID(masterAvatarUUID); m_regionType = config.GetString("RegionType", String.Empty); // Prim stuff // m_nonphysPrimMax = config.GetInt("NonphysicalPrimMax", 256); m_physPrimMax = config.GetInt("PhysicalPrimMax", 10); m_clampPrimSize = config.GetBoolean("ClampPrimSize", false); m_objectCapacity = config.GetInt("MaxPrims", 15000); // Multi-tenancy // ScopeID = new UUID(config.GetString("ScopeID", UUID.Zero.ToString())); } private void WriteNiniConfig(IConfigSource source) { IConfig config = source.Configs[RegionName]; if (config != null) source.Configs.Remove(RegionName); config = source.AddConfig(RegionName); config.Set("RegionUUID", RegionID.ToString()); string location = String.Format("{0},{1}", m_regionLocX, m_regionLocY); config.Set("Location", location); if (DataStore != String.Empty) config.Set("Datastore", DataStore); config.Set("InternalAddress", m_internalEndPoint.Address.ToString()); config.Set("InternalPort", m_internalEndPoint.Port); config.Set("AllowAlternatePorts", m_allow_alternate_ports.ToString()); config.Set("ExternalHostName", m_externalHostName); if (MasterAvatarAssignedUUID != UUID.Zero) { config.Set("MasterAvatarUUID", MasterAvatarAssignedUUID.ToString()); } else if (MasterAvatarFirstName != String.Empty && MasterAvatarLastName != String.Empty) { config.Set("MasterAvatarFirstName", MasterAvatarFirstName); config.Set("MasterAvatarLastName", MasterAvatarLastName); } if (MasterAvatarSandboxPassword != String.Empty) { config.Set("MasterAvatarSandboxPassword", MasterAvatarSandboxPassword); } if (m_nonphysPrimMax != 0) config.Set("NonphysicalPrimMax", m_nonphysPrimMax); if (m_physPrimMax != 0) config.Set("PhysicalPrimMax", m_physPrimMax); config.Set("ClampPrimSize", m_clampPrimSize.ToString()); if (m_objectCapacity != 0) config.Set("MaxPrims", m_objectCapacity); if (ScopeID != UUID.Zero) config.Set("ScopeID", ScopeID.ToString()); if (RegionType != String.Empty) config.Set("RegionType", RegionType); } public bool ignoreIncomingConfiguration(string configuration_key, object configuration_result) { return true; } public void SaveRegionToFile(string description, string filename) { if (filename.ToLower().EndsWith(".ini")) { IniConfigSource source = new IniConfigSource(); try { source = new IniConfigSource(filename); // Load if it exists } catch (Exception) { } WriteNiniConfig(source); source.Save(filename); return; } configMember = new ConfigurationMember(filename, description, loadConfigurationOptionsFromMe, ignoreIncomingConfiguration, false); configMember.performConfigurationRetrieve(); RegionFile = filename; } public void loadConfigurationOptionsFromMe() { configMember.addConfigurationOption("sim_UUID", ConfigurationOption.ConfigurationTypes.TYPE_UUID_NULL_FREE, "UUID of Region (Default is recommended, random UUID)", RegionID.ToString(), true); configMember.addConfigurationOption("sim_name", ConfigurationOption.ConfigurationTypes.TYPE_STRING_NOT_EMPTY, "Region Name", RegionName, true); configMember.addConfigurationOption("sim_location_x", ConfigurationOption.ConfigurationTypes.TYPE_UINT32, "Grid Location (X Axis)", m_regionLocX.ToString(), true); configMember.addConfigurationOption("sim_location_y", ConfigurationOption.ConfigurationTypes.TYPE_UINT32, "Grid Location (Y Axis)", m_regionLocY.ToString(), true); //m_configMember.addConfigurationOption("datastore", ConfigurationOption.ConfigurationTypes.TYPE_STRING_NOT_EMPTY, "Filename for local storage", "OpenSim.db", false); configMember.addConfigurationOption("internal_ip_address", ConfigurationOption.ConfigurationTypes.TYPE_IP_ADDRESS, "Internal IP Address for incoming UDP client connections", m_internalEndPoint.Address.ToString(), true); configMember.addConfigurationOption("internal_ip_port", ConfigurationOption.ConfigurationTypes.TYPE_INT32, "Internal IP Port for incoming UDP client connections", m_internalEndPoint.Port.ToString(), true); configMember.addConfigurationOption("allow_alternate_ports", ConfigurationOption.ConfigurationTypes.TYPE_BOOLEAN, "Allow sim to find alternate UDP ports when ports are in use?", m_allow_alternate_ports.ToString(), true); configMember.addConfigurationOption("external_host_name", ConfigurationOption.ConfigurationTypes.TYPE_STRING_NOT_EMPTY, "External Host Name", m_externalHostName, true); configMember.addConfigurationOption("master_avatar_uuid", ConfigurationOption.ConfigurationTypes.TYPE_UUID, "Master Avatar UUID", MasterAvatarAssignedUUID.ToString(), true); configMember.addConfigurationOption("master_avatar_first", ConfigurationOption.ConfigurationTypes.TYPE_STRING_NOT_EMPTY, "First Name of Master Avatar", MasterAvatarFirstName, true); configMember.addConfigurationOption("master_avatar_last", ConfigurationOption.ConfigurationTypes.TYPE_STRING_NOT_EMPTY, "Last Name of Master Avatar", MasterAvatarLastName, true); configMember.addConfigurationOption("master_avatar_pass", ConfigurationOption.ConfigurationTypes.TYPE_STRING, "(Sandbox Mode Only)Password for Master Avatar account", MasterAvatarSandboxPassword, true); configMember.addConfigurationOption("lastmap_uuid", ConfigurationOption.ConfigurationTypes.TYPE_UUID, "Last Map UUID", lastMapUUID.ToString(), true); configMember.addConfigurationOption("lastmap_refresh", ConfigurationOption.ConfigurationTypes.TYPE_STRING_NOT_EMPTY, "Last Map Refresh", Util.UnixTimeSinceEpoch().ToString(), true); configMember.addConfigurationOption("nonphysical_prim_max", ConfigurationOption.ConfigurationTypes.TYPE_INT32, "Maximum size for nonphysical prims", m_nonphysPrimMax.ToString(), true); configMember.addConfigurationOption("physical_prim_max", ConfigurationOption.ConfigurationTypes.TYPE_INT32, "Maximum size for physical prims", m_physPrimMax.ToString(), true); configMember.addConfigurationOption("clamp_prim_size", ConfigurationOption.ConfigurationTypes.TYPE_BOOLEAN, "Clamp prims to max size", m_clampPrimSize.ToString(), true); configMember.addConfigurationOption("object_capacity", ConfigurationOption.ConfigurationTypes.TYPE_INT32, "Max objects this sim will hold", m_objectCapacity.ToString(), true); configMember.addConfigurationOption("scope_id", ConfigurationOption.ConfigurationTypes.TYPE_UUID, "Scope ID for this region", ScopeID.ToString(), true); configMember.addConfigurationOption("region_type", ConfigurationOption.ConfigurationTypes.TYPE_STRING, "Free form string describing the type of region", String.Empty, true); } public void loadConfigurationOptions() { configMember.addConfigurationOption("sim_UUID", ConfigurationOption.ConfigurationTypes.TYPE_UUID, "UUID of Region (Default is recommended, random UUID)", UUID.Random().ToString(), true); configMember.addConfigurationOption("sim_name", ConfigurationOption.ConfigurationTypes.TYPE_STRING_NOT_EMPTY, "Region Name", "OpenSim Test", false); configMember.addConfigurationOption("sim_location_x", ConfigurationOption.ConfigurationTypes.TYPE_UINT32, "Grid Location (X Axis)", "1000", false); configMember.addConfigurationOption("sim_location_y", ConfigurationOption.ConfigurationTypes.TYPE_UINT32, "Grid Location (Y Axis)", "1000", false); //m_configMember.addConfigurationOption("datastore", ConfigurationOption.ConfigurationTypes.TYPE_STRING_NOT_EMPTY, "Filename for local storage", "OpenSim.db", false); configMember.addConfigurationOption("internal_ip_address", ConfigurationOption.ConfigurationTypes.TYPE_IP_ADDRESS, "Internal IP Address for incoming UDP client connections", "0.0.0.0", false); configMember.addConfigurationOption("internal_ip_port", ConfigurationOption.ConfigurationTypes.TYPE_INT32, "Internal IP Port for incoming UDP client connections", ConfigSettings.DefaultRegionHttpPort.ToString(), false); configMember.addConfigurationOption("allow_alternate_ports", ConfigurationOption.ConfigurationTypes.TYPE_BOOLEAN, "Allow sim to find alternate UDP ports when ports are in use?", "false", true); configMember.addConfigurationOption("external_host_name", ConfigurationOption.ConfigurationTypes.TYPE_STRING_NOT_EMPTY, "External Host Name", "127.0.0.1", false); configMember.addConfigurationOption("master_avatar_uuid", ConfigurationOption.ConfigurationTypes.TYPE_UUID, "Master Avatar UUID", UUID.Zero.ToString(), true); configMember.addConfigurationOption("master_avatar_first", ConfigurationOption.ConfigurationTypes.TYPE_STRING_NOT_EMPTY, "First Name of Master Avatar", "Test", false, (ConfigurationOption.ConfigurationOptionShouldBeAsked) shouldMasterAvatarDetailsBeAsked); configMember.addConfigurationOption("master_avatar_last", ConfigurationOption.ConfigurationTypes.TYPE_STRING_NOT_EMPTY, "Last Name of Master Avatar", "User", false, (ConfigurationOption.ConfigurationOptionShouldBeAsked) shouldMasterAvatarDetailsBeAsked); configMember.addConfigurationOption("master_avatar_pass", ConfigurationOption.ConfigurationTypes.TYPE_STRING, "(Sandbox Mode Only)Password for Master Avatar account", "test", false, (ConfigurationOption.ConfigurationOptionShouldBeAsked) shouldMasterAvatarDetailsBeAsked); configMember.addConfigurationOption("lastmap_uuid", ConfigurationOption.ConfigurationTypes.TYPE_UUID, "Last Map UUID", lastMapUUID.ToString(), true); configMember.addConfigurationOption("lastmap_refresh", ConfigurationOption.ConfigurationTypes.TYPE_STRING_NOT_EMPTY, "Last Map Refresh", Util.UnixTimeSinceEpoch().ToString(), true); configMember.addConfigurationOption("nonphysical_prim_max", ConfigurationOption.ConfigurationTypes.TYPE_INT32, "Maximum size for nonphysical prims", "0", true); configMember.addConfigurationOption("physical_prim_max", ConfigurationOption.ConfigurationTypes.TYPE_INT32, "Maximum size for physical prims", "0", true); configMember.addConfigurationOption("clamp_prim_size", ConfigurationOption.ConfigurationTypes.TYPE_BOOLEAN, "Clamp prims to max size", "false", true); configMember.addConfigurationOption("object_capacity", ConfigurationOption.ConfigurationTypes.TYPE_INT32, "Max objects this sim will hold", "0", true); configMember.addConfigurationOption("scope_id", ConfigurationOption.ConfigurationTypes.TYPE_UUID, "Scope ID for this region", UUID.Zero.ToString(), true); configMember.addConfigurationOption("region_type", ConfigurationOption.ConfigurationTypes.TYPE_STRING, "Region Type", String.Empty, true); } public bool shouldMasterAvatarDetailsBeAsked(string configuration_key) { return MasterAvatarAssignedUUID == UUID.Zero; } public bool handleIncomingConfiguration(string configuration_key, object configuration_result) { switch (configuration_key) { case "sim_UUID": RegionID = (UUID) configuration_result; originRegionID = (UUID) configuration_result; break; case "sim_name": RegionName = (string) configuration_result; break; case "sim_location_x": m_regionLocX = (uint) configuration_result; break; case "sim_location_y": m_regionLocY = (uint) configuration_result; break; case "datastore": DataStore = (string) configuration_result; break; case "internal_ip_address": IPAddress address = (IPAddress) configuration_result; m_internalEndPoint = new IPEndPoint(address, 0); break; case "internal_ip_port": m_internalEndPoint.Port = (int) configuration_result; break; case "allow_alternate_ports": m_allow_alternate_ports = (bool) configuration_result; break; case "external_host_name": if ((string) configuration_result != "SYSTEMIP") { m_externalHostName = (string) configuration_result; } else { m_externalHostName = Util.GetLocalHost().ToString(); } break; case "master_avatar_uuid": MasterAvatarAssignedUUID = (UUID) configuration_result; break; case "master_avatar_first": MasterAvatarFirstName = (string) configuration_result; break; case "master_avatar_last": MasterAvatarLastName = (string) configuration_result; break; case "master_avatar_pass": MasterAvatarSandboxPassword = (string)configuration_result; break; case "lastmap_uuid": lastMapUUID = (UUID)configuration_result; break; case "lastmap_refresh": lastMapRefresh = (string)configuration_result; break; case "nonphysical_prim_max": m_nonphysPrimMax = (int)configuration_result; break; case "physical_prim_max": m_physPrimMax = (int)configuration_result; break; case "clamp_prim_size": m_clampPrimSize = (bool)configuration_result; break; case "object_capacity": m_objectCapacity = (int)configuration_result; break; case "scope_id": ScopeID = (UUID)configuration_result; break; case "region_type": m_regionType = (string)configuration_result; break; } return true; } public void SaveLastMapUUID(UUID mapUUID) { lastMapUUID = mapUUID; lastMapRefresh = Util.UnixTimeSinceEpoch().ToString(); if (configMember == null) return; configMember.forceSetConfigurationOption("lastmap_uuid", mapUUID.ToString()); configMember.forceSetConfigurationOption("lastmap_refresh", lastMapRefresh); } public OSDMap PackRegionInfoData() { OSDMap args = new OSDMap(); args["region_id"] = OSD.FromUUID(RegionID); if ((RegionName != null) && !RegionName.Equals("")) args["region_name"] = OSD.FromString(RegionName); args["external_host_name"] = OSD.FromString(ExternalHostName); args["http_port"] = OSD.FromString(HttpPort.ToString()); args["server_uri"] = OSD.FromString(ServerURI); args["region_xloc"] = OSD.FromString(RegionLocX.ToString()); args["region_yloc"] = OSD.FromString(RegionLocY.ToString()); args["internal_ep_address"] = OSD.FromString(InternalEndPoint.Address.ToString()); args["internal_ep_port"] = OSD.FromString(InternalEndPoint.Port.ToString()); if ((RemotingAddress != null) && !RemotingAddress.Equals("")) args["remoting_address"] = OSD.FromString(RemotingAddress); args["remoting_port"] = OSD.FromString(RemotingPort.ToString()); args["allow_alt_ports"] = OSD.FromBoolean(m_allow_alternate_ports); if ((proxyUrl != null) && !proxyUrl.Equals("")) args["proxy_url"] = OSD.FromString(proxyUrl); if (RegionType != String.Empty) args["region_type"] = OSD.FromString(RegionType); return args; } public void UnpackRegionInfoData(OSDMap args) { if (args["region_id"] != null) RegionID = args["region_id"].AsUUID(); if (args["region_name"] != null) RegionName = args["region_name"].AsString(); if (args["external_host_name"] != null) ExternalHostName = args["external_host_name"].AsString(); if (args["http_port"] != null) UInt32.TryParse(args["http_port"].AsString(), out m_httpPort); if (args["server_uri"] != null) ServerURI = args["server_uri"].AsString(); if (args["region_xloc"] != null) { uint locx; UInt32.TryParse(args["region_xloc"].AsString(), out locx); RegionLocX = locx; } if (args["region_yloc"] != null) { uint locy; UInt32.TryParse(args["region_yloc"].AsString(), out locy); RegionLocY = locy; } IPAddress ip_addr = null; if (args["internal_ep_address"] != null) { IPAddress.TryParse(args["internal_ep_address"].AsString(), out ip_addr); } int port = 0; if (args["internal_ep_port"] != null) { Int32.TryParse(args["internal_ep_port"].AsString(), out port); } InternalEndPoint = new IPEndPoint(ip_addr, port); if (args["remoting_address"] != null) RemotingAddress = args["remoting_address"].AsString(); if (args["remoting_port"] != null) UInt32.TryParse(args["remoting_port"].AsString(), out m_remotingPort); if (args["allow_alt_ports"] != null) m_allow_alternate_ports = args["allow_alt_ports"].AsBoolean(); if (args["proxy_url"] != null) proxyUrl = args["proxy_url"].AsString(); if (args["region_type"] != null) m_regionType = args["region_type"].AsString(); } public static RegionInfo Create(UUID regionID, string regionName, uint regX, uint regY, string externalHostName, uint httpPort, uint simPort, uint remotingPort, string serverURI) { RegionInfo regionInfo; IPEndPoint neighbourInternalEndPoint = new IPEndPoint(Util.GetHostFromDNS(externalHostName), (int)simPort); regionInfo = new RegionInfo(regX, regY, neighbourInternalEndPoint, externalHostName); regionInfo.RemotingPort = remotingPort; regionInfo.RemotingAddress = externalHostName; regionInfo.HttpPort = httpPort; regionInfo.RegionID = regionID; regionInfo.RegionName = regionName; regionInfo.ServerURI = serverURI; return regionInfo; } } }
using System; using System.IO; using System.Text; namespace ICSharpCode.SharpZipLib.Tar { /// <summary> /// The TarInputStream reads a UNIX tar archive as an InputStream. /// methods are provided to position at each successive entry in /// the archive, and the read each entry as a normal input stream /// using read(). /// </summary> public class TarInputStream : Stream { #region Constructors /// <summary> /// Construct a TarInputStream with default block factor /// </summary> /// <param name="inputStream">stream to source data from</param> public TarInputStream(Stream inputStream) : this(inputStream, TarBuffer.DefaultBlockFactor) { } /// <summary> /// Construct a TarInputStream with user specified block factor /// </summary> /// <param name="inputStream">stream to source data from</param> /// <param name="blockFactor">block factor to apply to archive</param> public TarInputStream(Stream inputStream, int blockFactor) { this.inputStream = inputStream; tarBuffer = TarBuffer.CreateInputTarBuffer(inputStream, blockFactor); } #endregion /// <summary> /// Gets or sets a flag indicating ownership of underlying stream. /// When the flag is true <see cref="Stream.Dispose()" /> will close the underlying stream also. /// </summary> /// <remarks>The default value is true.</remarks> public bool IsStreamOwner { get { return tarBuffer.IsStreamOwner; } set { tarBuffer.IsStreamOwner = value; } } #region Stream Overrides /// <summary> /// Gets a value indicating whether the current stream supports reading /// </summary> public override bool CanRead { get { return inputStream.CanRead; } } /// <summary> /// Gets a value indicating whether the current stream supports seeking /// This property always returns false. /// </summary> public override bool CanSeek { get { return false; } } /// <summary> /// Gets a value indicating if the stream supports writing. /// This property always returns false. /// </summary> public override bool CanWrite { get { return false; } } /// <summary> /// The length in bytes of the stream /// </summary> public override long Length { get { return inputStream.Length; } } /// <summary> /// Gets or sets the position within the stream. /// Setting the Position is not supported and throws a NotSupportedExceptionNotSupportedException /// </summary> /// <exception cref="NotSupportedException">Any attempt to set position</exception> public override long Position { get { return inputStream.Position; } set { throw new NotSupportedException("TarInputStream Seek not supported"); } } /// <summary> /// Flushes the baseInputStream /// </summary> public override void Flush() { inputStream.Flush(); } /// <summary> /// Set the streams position. This operation is not supported and will throw a NotSupportedException /// </summary> /// <param name="offset">The offset relative to the origin to seek to.</param> /// <param name="origin">The <see cref="SeekOrigin"/> to start seeking from.</param> /// <returns>The new position in the stream.</returns> /// <exception cref="NotSupportedException">Any access</exception> public override long Seek(long offset, SeekOrigin origin) { throw new NotSupportedException("TarInputStream Seek not supported"); } /// <summary> /// Sets the length of the stream /// This operation is not supported and will throw a NotSupportedException /// </summary> /// <param name="value">The new stream length.</param> /// <exception cref="NotSupportedException">Any access</exception> public override void SetLength(long value) { throw new NotSupportedException("TarInputStream SetLength not supported"); } /// <summary> /// Writes a block of bytes to this stream using data from a buffer. /// This operation is not supported and will throw a NotSupportedException /// </summary> /// <param name="buffer">The buffer containing bytes to write.</param> /// <param name="offset">The offset in the buffer of the frist byte to write.</param> /// <param name="count">The number of bytes to write.</param> /// <exception cref="NotSupportedException">Any access</exception> public override void Write(byte[] buffer, int offset, int count) { throw new NotSupportedException("TarInputStream Write not supported"); } /// <summary> /// Writes a byte to the current position in the file stream. /// This operation is not supported and will throw a NotSupportedException /// </summary> /// <param name="value">The byte value to write.</param> /// <exception cref="NotSupportedException">Any access</exception> public override void WriteByte(byte value) { throw new NotSupportedException("TarInputStream WriteByte not supported"); } /// <summary> /// Reads a byte from the current tar archive entry. /// </summary> /// <returns>A byte cast to an int; -1 if the at the end of the stream.</returns> public override int ReadByte() { byte[] oneByteBuffer = new byte[1]; int num = Read(oneByteBuffer, 0, 1); if (num <= 0) { // return -1 to indicate that no byte was read. return -1; } return oneByteBuffer[0]; } /// <summary> /// Reads bytes from the current tar archive entry. /// /// This method is aware of the boundaries of the current /// entry in the archive and will deal with them appropriately /// </summary> /// <param name="buffer"> /// The buffer into which to place bytes read. /// </param> /// <param name="offset"> /// The offset at which to place bytes read. /// </param> /// <param name="count"> /// The number of bytes to read. /// </param> /// <returns> /// The number of bytes read, or 0 at end of stream/EOF. /// </returns> public override int Read(byte[] buffer, int offset, int count) { if (buffer == null) { throw new ArgumentNullException(nameof(buffer)); } int totalRead = 0; if (entryOffset >= entrySize) { return 0; } long numToRead = count; if ((numToRead + entryOffset) > entrySize) { numToRead = entrySize - entryOffset; } if (readBuffer != null) { int sz = (numToRead > readBuffer.Length) ? readBuffer.Length : (int)numToRead; Array.Copy(readBuffer, 0, buffer, offset, sz); if (sz >= readBuffer.Length) { readBuffer = null; } else { int newLen = readBuffer.Length - sz; byte[] newBuf = new byte[newLen]; Array.Copy(readBuffer, sz, newBuf, 0, newLen); readBuffer = newBuf; } totalRead += sz; numToRead -= sz; offset += sz; } while (numToRead > 0) { byte[] rec = tarBuffer.ReadBlock(); if (rec == null) { // Unexpected EOF! throw new TarException("unexpected EOF with " + numToRead + " bytes unread"); } var sz = (int)numToRead; int recLen = rec.Length; if (recLen > sz) { Array.Copy(rec, 0, buffer, offset, sz); readBuffer = new byte[recLen - sz]; Array.Copy(rec, sz, readBuffer, 0, recLen - sz); } else { sz = recLen; Array.Copy(rec, 0, buffer, offset, recLen); } totalRead += sz; numToRead -= sz; offset += sz; } entryOffset += totalRead; return totalRead; } /// <summary> /// Closes this stream. Calls the TarBuffer's close() method. /// The underlying stream is closed by the TarBuffer. /// </summary> protected override void Dispose(bool disposing) { if (disposing) { tarBuffer.Close(); } } #endregion /// <summary> /// Set the entry factory for this instance. /// </summary> /// <param name="factory">The factory for creating new entries</param> public void SetEntryFactory(IEntryFactory factory) { entryFactory = factory; } /// <summary> /// Get the record size being used by this stream's TarBuffer. /// </summary> public int RecordSize { get { return tarBuffer.RecordSize; } } /// <summary> /// Get the record size being used by this stream's TarBuffer. /// </summary> /// <returns> /// TarBuffer record size. /// </returns> [Obsolete("Use RecordSize property instead")] public int GetRecordSize() { return tarBuffer.RecordSize; } /// <summary> /// Get the available data that can be read from the current /// entry in the archive. This does not indicate how much data /// is left in the entire archive, only in the current entry. /// This value is determined from the entry's size header field /// and the amount of data already read from the current entry. /// </summary> /// <returns> /// The number of available bytes for the current entry. /// </returns> public long Available { get { return entrySize - entryOffset; } } /// <summary> /// Skip bytes in the input buffer. This skips bytes in the /// current entry's data, not the entire archive, and will /// stop at the end of the current entry's data if the number /// to skip extends beyond that point. /// </summary> /// <param name="skipCount"> /// The number of bytes to skip. /// </param> public void Skip(long skipCount) { // TODO: REVIEW efficiency of TarInputStream.Skip // This is horribly inefficient, but it ensures that we // properly skip over bytes via the TarBuffer... // byte[] skipBuf = new byte[8 * 1024]; for (long num = skipCount; num > 0;) { int toRead = num > skipBuf.Length ? skipBuf.Length : (int)num; int numRead = Read(skipBuf, 0, toRead); if (numRead == -1) { break; } num -= numRead; } } /// <summary> /// Return a value of true if marking is supported; false otherwise. /// </summary> /// <remarks>Currently marking is not supported, the return value is always false.</remarks> public bool IsMarkSupported { get { return false; } } /// <summary> /// Since we do not support marking just yet, we do nothing. /// </summary> /// <param name ="markLimit"> /// The limit to mark. /// </param> public void Mark(int markLimit) { } /// <summary> /// Since we do not support marking just yet, we do nothing. /// </summary> public void Reset() { } /// <summary> /// Get the next entry in this tar archive. This will skip /// over any remaining data in the current entry, if there /// is one, and place the input stream at the header of the /// next entry, and read the header and instantiate a new /// TarEntry from the header bytes and return that entry. /// If there are no more entries in the archive, null will /// be returned to indicate that the end of the archive has /// been reached. /// </summary> /// <returns> /// The next TarEntry in the archive, or null. /// </returns> public TarEntry GetNextEntry() { if (hasHitEOF) { return null; } if (currentEntry != null) { SkipToNextEntry(); } byte[] headerBuf = tarBuffer.ReadBlock(); if (headerBuf == null) { hasHitEOF = true; } else hasHitEOF |= TarBuffer.IsEndOfArchiveBlock(headerBuf); if (hasHitEOF) { currentEntry = null; } else { try { var header = new TarHeader(); header.ParseBuffer(headerBuf); if (!header.IsChecksumValid) { throw new TarException("Header checksum is invalid"); } this.entryOffset = 0; this.entrySize = header.Size; StringBuilder longName = null; if (header.TypeFlag == TarHeader.LF_GNU_LONGNAME) { byte[] nameBuffer = new byte[TarBuffer.BlockSize]; long numToRead = this.entrySize; longName = new StringBuilder(); while (numToRead > 0) { int numRead = this.Read(nameBuffer, 0, (numToRead > nameBuffer.Length ? nameBuffer.Length : (int)numToRead)); if (numRead == -1) { throw new InvalidHeaderException("Failed to read long name entry"); } longName.Append(TarHeader.ParseName(nameBuffer, 0, numRead).ToString()); numToRead -= numRead; } SkipToNextEntry(); headerBuf = this.tarBuffer.ReadBlock(); } else if (header.TypeFlag == TarHeader.LF_GHDR) { // POSIX global extended header // Ignore things we dont understand completely for now SkipToNextEntry(); headerBuf = this.tarBuffer.ReadBlock(); } else if (header.TypeFlag == TarHeader.LF_XHDR) { // POSIX extended header // Ignore things we dont understand completely for now SkipToNextEntry(); headerBuf = this.tarBuffer.ReadBlock(); } else if (header.TypeFlag == TarHeader.LF_GNU_VOLHDR) { // TODO: could show volume name when verbose SkipToNextEntry(); headerBuf = this.tarBuffer.ReadBlock(); } else if (header.TypeFlag != TarHeader.LF_NORMAL && header.TypeFlag != TarHeader.LF_OLDNORM && header.TypeFlag != TarHeader.LF_LINK && header.TypeFlag != TarHeader.LF_SYMLINK && header.TypeFlag != TarHeader.LF_DIR) { // Ignore things we dont understand completely for now SkipToNextEntry(); headerBuf = tarBuffer.ReadBlock(); } if (entryFactory == null) { currentEntry = new TarEntry(headerBuf); if (longName != null) { currentEntry.Name = longName.ToString(); } } else { currentEntry = entryFactory.CreateEntry(headerBuf); } // Magic was checked here for 'ustar' but there are multiple valid possibilities // so this is not done anymore. entryOffset = 0; // TODO: Review How do we resolve this discrepancy?! entrySize = this.currentEntry.Size; } catch (InvalidHeaderException ex) { entrySize = 0; entryOffset = 0; currentEntry = null; string errorText = string.Format("Bad header in record {0} block {1} {2}", tarBuffer.CurrentRecord, tarBuffer.CurrentBlock, ex.Message); throw new InvalidHeaderException(errorText); } } return currentEntry; } /// <summary> /// Copies the contents of the current tar archive entry directly into /// an output stream. /// </summary> /// <param name="outputStream"> /// The OutputStream into which to write the entry's data. /// </param> public void CopyEntryContents(Stream outputStream) { byte[] tempBuffer = new byte[32 * 1024]; while (true) { int numRead = Read(tempBuffer, 0, tempBuffer.Length); if (numRead <= 0) { break; } outputStream.Write(tempBuffer, 0, numRead); } } void SkipToNextEntry() { long numToSkip = entrySize - entryOffset; if (numToSkip > 0) { Skip(numToSkip); } readBuffer = null; } /// <summary> /// This interface is provided, along with the method <see cref="SetEntryFactory"/>, to allow /// the programmer to have their own <see cref="TarEntry"/> subclass instantiated for the /// entries return from <see cref="GetNextEntry"/>. /// </summary> public interface IEntryFactory { /// <summary> /// Create an entry based on name alone /// </summary> /// <param name="name"> /// Name of the new EntryPointNotFoundException to create /// </param> /// <returns>created TarEntry or descendant class</returns> TarEntry CreateEntry(string name); /// <summary> /// Create an instance based on an actual file /// </summary> /// <param name="fileName"> /// Name of file to represent in the entry /// </param> /// <returns> /// Created TarEntry or descendant class /// </returns> TarEntry CreateEntryFromFile(string fileName); /// <summary> /// Create a tar entry based on the header information passed /// </summary> /// <param name="headerBuffer"> /// Buffer containing header information to create an an entry from. /// </param> /// <returns> /// Created TarEntry or descendant class /// </returns> TarEntry CreateEntry(byte[] headerBuffer); } /// <summary> /// Standard entry factory class creating instances of the class TarEntry /// </summary> public class EntryFactoryAdapter : IEntryFactory { /// <summary> /// Create a <see cref="TarEntry"/> based on named /// </summary> /// <param name="name">The name to use for the entry</param> /// <returns>A new <see cref="TarEntry"/></returns> public TarEntry CreateEntry(string name) { return TarEntry.CreateTarEntry(name); } /// <summary> /// Create a tar entry with details obtained from <paramref name="fileName">file</paramref> /// </summary> /// <param name="fileName">The name of the file to retrieve details from.</param> /// <returns>A new <see cref="TarEntry"/></returns> public TarEntry CreateEntryFromFile(string fileName) { return TarEntry.CreateEntryFromFile(fileName); } /// <summary> /// Create an entry based on details in <paramref name="headerBuffer">header</paramref> /// </summary> /// <param name="headerBuffer">The buffer containing entry details.</param> /// <returns>A new <see cref="TarEntry"/></returns> public TarEntry CreateEntry(byte[] headerBuffer) { return new TarEntry(headerBuffer); } } #region Instance Fields /// <summary> /// Flag set when last block has been read /// </summary> protected bool hasHitEOF; /// <summary> /// Size of this entry as recorded in header /// </summary> protected long entrySize; /// <summary> /// Number of bytes read for this entry so far /// </summary> protected long entryOffset; /// <summary> /// Buffer used with calls to <code>Read()</code> /// </summary> protected byte[] readBuffer; /// <summary> /// Working buffer /// </summary> protected TarBuffer tarBuffer; /// <summary> /// Current entry being read /// </summary> TarEntry currentEntry; /// <summary> /// Factory used to create TarEntry or descendant class instance /// </summary> protected IEntryFactory entryFactory; /// <summary> /// Stream used as the source of input data. /// </summary> readonly Stream inputStream; #endregion } }
/***************************************************************************** * Air.cs * Copyright (c) 2010 Dustin Firchow * * Inspired by a 2007 post on http://forums.xna.com/forums/p/2189/11345.aspx * by SwampThingTom. *****************************************************************************/ using System; using System.Collections.Generic; using System.Linq; using Microsoft.Xna.Framework; using Microsoft.Xna.Framework.Audio; using Microsoft.Xna.Framework.Content; using Microsoft.Xna.Framework.GamerServices; using Microsoft.Xna.Framework.Graphics; using Microsoft.Xna.Framework.Input; using Microsoft.Xna.Framework.Media; // These are used because I'm defining Air.Gamepad and Air.Mouse using XnaKeyboard = Microsoft.Xna.Framework.Input.Keyboard; using XnaGamePad = Microsoft.Xna.Framework.Input.GamePad; using XnaMouse = Microsoft.Xna.Framework.Input.Mouse; namespace AnotherInputRectifier { /// <summary> /// This is a game component that implements IUpdateable. /// </summary> public class Air : Microsoft.Xna.Framework.GameComponent { #region Private Constants private static short PlayerOne = 0; private static short PlayerTwo = 1; private static short PlayerThree = 2; private static short PlayerFour = 3; #endregion #region GamePad and Mouse static classes public static class GamePad { public enum Thumbsticks { Left, Right } public enum Triggers { Left, Right } public enum Buttons { Back, BigButton, Start, A, B, X, Y, LeftShoulder, RightShoulder, LeftStick, RightStick } public enum DPad { Left, Right, Up, Down } /// <summary> /// Returns a digital direction control for a player's dpad. /// </summary> /// <param name="player">player index for a gamepad</param> /// <returns>control for a dpad</returns> public static IDigitalDirectionControl DPadControl(PlayerIndex player) { return new Details.GamePadDPadControl(player); } /// <summary> /// Returns a digital direction control for player one's dpad. /// </summary> /// <returns>control for a dpad</returns> public static IDigitalDirectionControl DPadControl() { return new Details.GamePadDPadControl(PlayerIndex.One); } } #if !XBOX360 public static class Mouse { public enum Buttons { Left, Right, Middle, X1, X2 } /// <summary> /// Gets mouse position as an /// analog direction control. /// </summary> public static IAnalogDirectionControl Position { get { return new Details.MousePositionControl(); } } } #endif #endregion #region Constructor and base overrides public Air(Game game) : base(game) { // TODO: Construct any child components here #if !XBOX360 mouseState = XnaMouse.GetState(); #endif keyboardState = Keyboard.GetState(); UpdateGamePadStates(); } /// <summary> /// Allows the game component to perform any initialization it needs to before starting /// to run. This is where it can query for any required services and load content. /// </summary> public override void Initialize() { // TODO: Add your initialization code here base.Initialize(); } /// <summary> /// Allows the game component to update itself. /// </summary> /// <param name="gameTime">Provides a snapshot of timing values.</param> public override void Update(GameTime gameTime) { // TODO: Add your update code here UpdateStates(); base.Update(gameTime); } private void UpdateStates() { #if !XBOX360 previousMouseState = mouseState; mouseState = XnaMouse.GetState(); #endif previousKeyboardState = keyboardState; keyboardState = Keyboard.GetState(); previousGamePadState[PlayerOne] = gamePadState[PlayerOne]; previousGamePadState[PlayerTwo] = gamePadState[PlayerTwo]; previousGamePadState[PlayerThree] = gamePadState[PlayerThree]; previousGamePadState[PlayerFour] = gamePadState[PlayerFour]; UpdateGamePadStates(); } private void UpdateGamePadStates() { gamePadState[PlayerOne] = XnaGamePad.GetState(PlayerIndex.One); gamePadState[PlayerTwo] = XnaGamePad.GetState(PlayerIndex.Two); gamePadState[PlayerThree] = XnaGamePad.GetState(PlayerIndex.Three); gamePadState[PlayerFour] = XnaGamePad.GetState(PlayerIndex.Four); } #endregion /// <summary> /// Anything related to the mouse. /// Not built if platform is XBOX360 /// </summary> #region Mouse #if !XBOX360 static MouseState mouseState; static MouseState previousMouseState; public static MouseState MouseState { get { return mouseState; } } public static MouseState PreviousMouseState { get { return previousMouseState; } } public static IAnalogDirectionControl CreateMouseControl() { return new Details.MousePositionControl(); } #endif #endregion /// <summary> /// Anything related to the keyboard. /// </summary> #region Keyboard static KeyboardState keyboardState; static KeyboardState previousKeyboardState; public static KeyboardState KeyboardState { get { return keyboardState; } } public static KeyboardState PreviousKeyboardState { get { return previousKeyboardState; } } public static IDigitalControl CreateKeyboardControl(Keys key) { return new Details.KeyControl(key); } #endregion /// <summary> /// Anything related to a game pad /// </summary> #region Gamepad static GamePadState[] gamePadState = new GamePadState[4]; static GamePadState[] previousGamePadState = new GamePadState[4]; /// <summary> /// Gets the gamepad state for the specified player. /// </summary> /// <param name="player">The player index.</param> /// <returns>GamePadState for specified player.</returns> public static GamePadState GamePadState(PlayerIndex player) { return gamePadState[IndexOf(player)]; } /// <summary> /// Gets the gamepad state for player one. /// </summary> /// <returns>GamePadState for PlayerIndex.One</returns> public static GamePadState GamePadState() { return gamePadState[PlayerOne]; } /// <summary> /// Gets the gamepad state from the previous frame for the /// specified player. /// </summary> /// <param name="player">The player index.</param> /// <returns> /// Specified player's previous gamepad state. /// </returns> public static GamePadState PreviousGamePadState(PlayerIndex player) { return previousGamePadState[IndexOf(player)]; } /// <summary> /// Gets the gamepad state from the previous frame for /// player one. /// </summary> /// <returns>Player one's previous gamepad state.</returns> public static GamePadState PreviousGamePadState() { return previousGamePadState[PlayerOne]; } /// <summary> /// Checks to see if the specified player's gamepad is /// currently connected. /// </summary> /// <returns> /// True if gamepad is connected, otherwise returns false. /// </returns> public static bool IsConnected(PlayerIndex player) { return gamePadState[IndexOf(player)].IsConnected; } /// <summary> /// Checks to see if player one's gamepad is currently connected. /// </summary> /// <returns> /// True if gamepad is connected, otherwise returns false. /// </returns> public static bool IsConnected() { return gamePadState[PlayerOne].IsConnected; } /// <summary> /// Get index for gamePadState[] from a PlayerIndex /// </summary> /// <param name="player">The PlayerIndex for the game pad you want.</param> /// <returns>The correct index for the gamePadState[]. /// Or -1 if something goes horribly wrong.</returns> private static int IndexOf(PlayerIndex player) { switch (player) { case PlayerIndex.One: return PlayerOne; case PlayerIndex.Two: return PlayerTwo; case PlayerIndex.Three: return PlayerThree; case PlayerIndex.Four: return PlayerFour; } return -1; // Return bad value } #endregion } }
using System; using System.Collections; using System.IO; using Org.BouncyCastle.Asn1; using Org.BouncyCastle.Asn1.Cms; using Org.BouncyCastle.Asn1.Kisa; using Org.BouncyCastle.Asn1.Nist; using Org.BouncyCastle.Asn1.Ntt; using Org.BouncyCastle.Asn1.Pkcs; using Org.BouncyCastle.Asn1.X509; using Org.BouncyCastle.Asn1.X9; using Org.BouncyCastle.Crypto; using Org.BouncyCastle.Crypto.Parameters; using Org.BouncyCastle.Math; using Org.BouncyCastle.Security; using Org.BouncyCastle.X509; namespace Org.BouncyCastle.Cms { /** * General class for generating a CMS enveloped-data message. * * A simple example of usage. * * <pre> * CMSEnvelopedDataGenerator fact = new CMSEnvelopedDataGenerator(); * * fact.addKeyTransRecipient(cert); * * CMSEnvelopedData data = fact.generate(content, algorithm, "BC"); * </pre> */ public class CmsEnvelopedGenerator { internal static readonly short[] rc2Table = { 0xbd, 0x56, 0xea, 0xf2, 0xa2, 0xf1, 0xac, 0x2a, 0xb0, 0x93, 0xd1, 0x9c, 0x1b, 0x33, 0xfd, 0xd0, 0x30, 0x04, 0xb6, 0xdc, 0x7d, 0xdf, 0x32, 0x4b, 0xf7, 0xcb, 0x45, 0x9b, 0x31, 0xbb, 0x21, 0x5a, 0x41, 0x9f, 0xe1, 0xd9, 0x4a, 0x4d, 0x9e, 0xda, 0xa0, 0x68, 0x2c, 0xc3, 0x27, 0x5f, 0x80, 0x36, 0x3e, 0xee, 0xfb, 0x95, 0x1a, 0xfe, 0xce, 0xa8, 0x34, 0xa9, 0x13, 0xf0, 0xa6, 0x3f, 0xd8, 0x0c, 0x78, 0x24, 0xaf, 0x23, 0x52, 0xc1, 0x67, 0x17, 0xf5, 0x66, 0x90, 0xe7, 0xe8, 0x07, 0xb8, 0x60, 0x48, 0xe6, 0x1e, 0x53, 0xf3, 0x92, 0xa4, 0x72, 0x8c, 0x08, 0x15, 0x6e, 0x86, 0x00, 0x84, 0xfa, 0xf4, 0x7f, 0x8a, 0x42, 0x19, 0xf6, 0xdb, 0xcd, 0x14, 0x8d, 0x50, 0x12, 0xba, 0x3c, 0x06, 0x4e, 0xec, 0xb3, 0x35, 0x11, 0xa1, 0x88, 0x8e, 0x2b, 0x94, 0x99, 0xb7, 0x71, 0x74, 0xd3, 0xe4, 0xbf, 0x3a, 0xde, 0x96, 0x0e, 0xbc, 0x0a, 0xed, 0x77, 0xfc, 0x37, 0x6b, 0x03, 0x79, 0x89, 0x62, 0xc6, 0xd7, 0xc0, 0xd2, 0x7c, 0x6a, 0x8b, 0x22, 0xa3, 0x5b, 0x05, 0x5d, 0x02, 0x75, 0xd5, 0x61, 0xe3, 0x18, 0x8f, 0x55, 0x51, 0xad, 0x1f, 0x0b, 0x5e, 0x85, 0xe5, 0xc2, 0x57, 0x63, 0xca, 0x3d, 0x6c, 0xb4, 0xc5, 0xcc, 0x70, 0xb2, 0x91, 0x59, 0x0d, 0x47, 0x20, 0xc8, 0x4f, 0x58, 0xe0, 0x01, 0xe2, 0x16, 0x38, 0xc4, 0x6f, 0x3b, 0x0f, 0x65, 0x46, 0xbe, 0x7e, 0x2d, 0x7b, 0x82, 0xf9, 0x40, 0xb5, 0x1d, 0x73, 0xf8, 0xeb, 0x26, 0xc7, 0x87, 0x97, 0x25, 0x54, 0xb1, 0x28, 0xaa, 0x98, 0x9d, 0xa5, 0x64, 0x6d, 0x7a, 0xd4, 0x10, 0x81, 0x44, 0xef, 0x49, 0xd6, 0xae, 0x2e, 0xdd, 0x76, 0x5c, 0x2f, 0xa7, 0x1c, 0xc9, 0x09, 0x69, 0x9a, 0x83, 0xcf, 0x29, 0x39, 0xb9, 0xe9, 0x4c, 0xff, 0x43, 0xab }; internal static readonly short[] rc2Ekb = { 0x5d, 0xbe, 0x9b, 0x8b, 0x11, 0x99, 0x6e, 0x4d, 0x59, 0xf3, 0x85, 0xa6, 0x3f, 0xb7, 0x83, 0xc5, 0xe4, 0x73, 0x6b, 0x3a, 0x68, 0x5a, 0xc0, 0x47, 0xa0, 0x64, 0x34, 0x0c, 0xf1, 0xd0, 0x52, 0xa5, 0xb9, 0x1e, 0x96, 0x43, 0x41, 0xd8, 0xd4, 0x2c, 0xdb, 0xf8, 0x07, 0x77, 0x2a, 0xca, 0xeb, 0xef, 0x10, 0x1c, 0x16, 0x0d, 0x38, 0x72, 0x2f, 0x89, 0xc1, 0xf9, 0x80, 0xc4, 0x6d, 0xae, 0x30, 0x3d, 0xce, 0x20, 0x63, 0xfe, 0xe6, 0x1a, 0xc7, 0xb8, 0x50, 0xe8, 0x24, 0x17, 0xfc, 0x25, 0x6f, 0xbb, 0x6a, 0xa3, 0x44, 0x53, 0xd9, 0xa2, 0x01, 0xab, 0xbc, 0xb6, 0x1f, 0x98, 0xee, 0x9a, 0xa7, 0x2d, 0x4f, 0x9e, 0x8e, 0xac, 0xe0, 0xc6, 0x49, 0x46, 0x29, 0xf4, 0x94, 0x8a, 0xaf, 0xe1, 0x5b, 0xc3, 0xb3, 0x7b, 0x57, 0xd1, 0x7c, 0x9c, 0xed, 0x87, 0x40, 0x8c, 0xe2, 0xcb, 0x93, 0x14, 0xc9, 0x61, 0x2e, 0xe5, 0xcc, 0xf6, 0x5e, 0xa8, 0x5c, 0xd6, 0x75, 0x8d, 0x62, 0x95, 0x58, 0x69, 0x76, 0xa1, 0x4a, 0xb5, 0x55, 0x09, 0x78, 0x33, 0x82, 0xd7, 0xdd, 0x79, 0xf5, 0x1b, 0x0b, 0xde, 0x26, 0x21, 0x28, 0x74, 0x04, 0x97, 0x56, 0xdf, 0x3c, 0xf0, 0x37, 0x39, 0xdc, 0xff, 0x06, 0xa4, 0xea, 0x42, 0x08, 0xda, 0xb4, 0x71, 0xb0, 0xcf, 0x12, 0x7a, 0x4e, 0xfa, 0x6c, 0x1d, 0x84, 0x00, 0xc8, 0x7f, 0x91, 0x45, 0xaa, 0x2b, 0xc2, 0xb1, 0x8f, 0xd5, 0xba, 0xf2, 0xad, 0x19, 0xb2, 0x67, 0x36, 0xf7, 0x0f, 0x0a, 0x92, 0x7d, 0xe3, 0x9d, 0xe9, 0x90, 0x3e, 0x23, 0x27, 0x66, 0x13, 0xec, 0x81, 0x15, 0xbd, 0x22, 0xbf, 0x9f, 0x7e, 0xa9, 0x51, 0x4b, 0x4c, 0xfb, 0x02, 0xd3, 0x70, 0x86, 0x31, 0xe7, 0x3b, 0x05, 0x03, 0x54, 0x60, 0x48, 0x65, 0x18, 0xd2, 0xcd, 0x5f, 0x32, 0x88, 0x0e, 0x35, 0xfd }; // TODO Create named constants for all of these public static readonly string DesEde3Cbc = PkcsObjectIdentifiers.DesEde3Cbc.Id; public static readonly string RC2Cbc = PkcsObjectIdentifiers.RC2Cbc.Id; public const string IdeaCbc = "1.3.6.1.4.1.188.7.1.1.2"; public const string Cast5Cbc = "1.2.840.113533.7.66.10"; public static readonly string Aes128Cbc = NistObjectIdentifiers.IdAes128Cbc.Id; public static readonly string Aes192Cbc = NistObjectIdentifiers.IdAes192Cbc.Id; public static readonly string Aes256Cbc = NistObjectIdentifiers.IdAes256Cbc.Id; public static readonly string Camellia128Cbc = NttObjectIdentifiers.IdCamellia128Cbc.Id; public static readonly string Camellia192Cbc = NttObjectIdentifiers.IdCamellia192Cbc.Id; public static readonly string Camellia256Cbc = NttObjectIdentifiers.IdCamellia256Cbc.Id; public static readonly string SeedCbc = KisaObjectIdentifiers.IdSeedCbc.Id; public static readonly string DesEde3Wrap = PkcsObjectIdentifiers.IdAlgCms3DesWrap.Id; public static readonly string Aes128Wrap = NistObjectIdentifiers.IdAes128Wrap.Id; public static readonly string Aes192Wrap = NistObjectIdentifiers.IdAes192Wrap.Id; public static readonly string Aes256Wrap = NistObjectIdentifiers.IdAes256Wrap.Id; public static readonly string Camellia128Wrap = NttObjectIdentifiers.IdCamellia128Wrap.Id; public static readonly string Camellia192Wrap = NttObjectIdentifiers.IdCamellia192Wrap.Id; public static readonly string Camellia256Wrap = NttObjectIdentifiers.IdCamellia256Wrap.Id; public static readonly string SeedWrap = KisaObjectIdentifiers.IdNpkiAppCmsSeedWrap.Id; public static readonly string ECDHSha1Kdf = X9ObjectIdentifiers.DHSinglePassStdDHSha1KdfScheme.Id; internal static readonly CmsEnvelopedHelper Helper = CmsEnvelopedHelper.Instance; internal readonly IList recipientInfs = new ArrayList(); internal readonly SecureRandom rand; protected class RecipientInf { private readonly X509Certificate cert; private AlgorithmIdentifier keyEncAlg; private readonly AsymmetricKeyParameter pubKey; private readonly Asn1OctetString subKeyId; private readonly string secKeyAlgorithm; private readonly KeyParameter secKey; private readonly KekIdentifier secKeyId; private readonly OriginatorIdentifierOrKey originator; private const Asn1OctetString ukm = null; private readonly AlgorithmIdentifier derivationAlg; internal RecipientInf( X509Certificate cert) { this.cert = cert; this.pubKey = cert.GetPublicKey(); try { TbsCertificateStructure tbs = TbsCertificateStructure.GetInstance( Asn1Object.FromByteArray(cert.GetTbsCertificate())); keyEncAlg = tbs.SubjectPublicKeyInfo.AlgorithmID; } // catch (IOException e) catch (Exception) { throw new ArgumentException("can't extract key algorithm from this cert"); } // catch (CertificateEncodingException) // { // throw new ArgumentException("can't extract tbs structure from this cert"); // } } internal RecipientInf( AsymmetricKeyParameter pubKey, Asn1OctetString subKeyId) { this.pubKey = pubKey; this.subKeyId = subKeyId; try { SubjectPublicKeyInfo info = SubjectPublicKeyInfoFactory.CreateSubjectPublicKeyInfo(pubKey); keyEncAlg = info.AlgorithmID; } catch (IOException) { throw new ArgumentException("can't extract key algorithm from this key"); } } internal RecipientInf( string secKeyAlgorithm, // TODO Can get this from secKey? KeyParameter secKey, KekIdentifier secKeyId) { this.secKeyAlgorithm = secKeyAlgorithm; this.secKey = secKey; this.secKeyId = secKeyId; if (secKeyAlgorithm.StartsWith("DES")) { keyEncAlg = new AlgorithmIdentifier( PkcsObjectIdentifiers.IdAlgCms3DesWrap, DerNull.Instance); } else if (secKeyAlgorithm.StartsWith("RC2")) { keyEncAlg = new AlgorithmIdentifier( PkcsObjectIdentifiers.IdAlgCmsRC2Wrap, new DerInteger(58)); } else if (secKeyAlgorithm.StartsWith("AES")) { int length = secKey.GetKey().Length * 8; DerObjectIdentifier wrapOid; if (length == 128) { wrapOid = NistObjectIdentifiers.IdAes128Wrap; } else if (length == 192) { wrapOid = NistObjectIdentifiers.IdAes192Wrap; } else if (length == 256) { wrapOid = NistObjectIdentifiers.IdAes256Wrap; } else { throw new ArgumentException("illegal keysize in AES"); } keyEncAlg = new AlgorithmIdentifier(wrapOid); // parameters absent } else if (secKeyAlgorithm.StartsWith("SEED")) { // parameters absent keyEncAlg = new AlgorithmIdentifier(KisaObjectIdentifiers.IdNpkiAppCmsSeedWrap); } else if (secKeyAlgorithm.StartsWith("CAMELLIA")) { int length = secKey.GetKey().Length * 8; DerObjectIdentifier wrapOid; if (length == 128) { wrapOid = NttObjectIdentifiers.IdCamellia128Wrap; } else if (length == 192) { wrapOid = NttObjectIdentifiers.IdCamellia192Wrap; } else if (length == 256) { wrapOid = NttObjectIdentifiers.IdCamellia256Wrap; } else { throw new ArgumentException("illegal keysize in Camellia"); } keyEncAlg = new AlgorithmIdentifier(wrapOid); // parameters must be absent } else { throw new ArgumentException("unknown algorithm"); } } public RecipientInf( string secKeyAlgorithm, // TODO Can get this from secKey? KeyParameter secKey, string algorithm, string wrapOid, OriginatorIdentifierOrKey originator, X509Certificate cert) { DerSequence paramSeq = new DerSequence( new DerObjectIdentifier(wrapOid), DerNull.Instance); this.secKeyAlgorithm = secKeyAlgorithm; this.secKey = secKey; this.keyEncAlg = new AlgorithmIdentifier(new DerObjectIdentifier(algorithm), paramSeq); this.originator = originator; this.cert = cert; } public RecipientInf( string secKeyAlgorithm, // TODO Can get this from secKey? KeyParameter secKey, AlgorithmIdentifier derivationAlg) { this.secKeyAlgorithm = secKeyAlgorithm; this.secKey = secKey; this.derivationAlg = derivationAlg; } internal RecipientInfo ToRecipientInfo( KeyParameter key, SecureRandom random) { byte[] keyBytes = key.GetKey(); if (pubKey != null) { IWrapper keyWrapper = Helper.CreateWrapper(keyEncAlg.ObjectID.Id); keyWrapper.Init(true, new ParametersWithRandom(pubKey, random)); Asn1OctetString encKey = new DerOctetString( keyWrapper.Wrap(keyBytes, 0, keyBytes.Length)); RecipientIdentifier recipId; if (cert != null) { TbsCertificateStructure tbs = TbsCertificateStructure.GetInstance( Asn1Object.FromByteArray(cert.GetTbsCertificate())); Asn1.Cms.IssuerAndSerialNumber encSid = new Asn1.Cms.IssuerAndSerialNumber( tbs.Issuer, tbs.SerialNumber.Value); recipId = new RecipientIdentifier(encSid); } else { recipId = new RecipientIdentifier(subKeyId); } return new RecipientInfo(new KeyTransRecipientInfo(recipId, keyEncAlg, encKey)); } else if (originator != null) { IWrapper keyWrapper = Helper.CreateWrapper( DerObjectIdentifier.GetInstance( Asn1Sequence.GetInstance(keyEncAlg.Parameters)[0]).Id); keyWrapper.Init(true, new ParametersWithRandom(secKey, random)); Asn1OctetString encKey = new DerOctetString( keyWrapper.Wrap(keyBytes, 0, keyBytes.Length)); RecipientEncryptedKey rKey = new RecipientEncryptedKey( new KeyAgreeRecipientIdentifier( new Asn1.Cms.IssuerAndSerialNumber( PrincipalUtilities.GetIssuerX509Principal(cert), cert.SerialNumber)), encKey); return new RecipientInfo( new KeyAgreeRecipientInfo(originator, ukm, keyEncAlg, new DerSequence(rKey))); } else if (derivationAlg != null) { string rfc3211WrapperName = Helper.GetRfc3211WrapperName(secKeyAlgorithm); IWrapper keyWrapper = Helper.CreateWrapper(rfc3211WrapperName); // Note: In Java build, the IV is automatically generated in JCE layer int ivLength = rfc3211WrapperName.StartsWith("DESEDE") ? 8 : 16; byte[] iv = new byte[ivLength]; random.NextBytes(iv); ICipherParameters parameters = new ParametersWithIV(secKey, iv); keyWrapper.Init(true, new ParametersWithRandom(parameters, random)); Asn1OctetString encKey = new DerOctetString( keyWrapper.Wrap(keyBytes, 0, keyBytes.Length)); // byte[] iv = keyWrapper.GetIV(); DerSequence seq = new DerSequence( new DerObjectIdentifier(secKeyAlgorithm), new DerOctetString(iv)); keyEncAlg = new AlgorithmIdentifier(PkcsObjectIdentifiers.IdAlgPwriKek, seq); return new RecipientInfo(new PasswordRecipientInfo(derivationAlg, keyEncAlg, encKey)); } else { IWrapper keyWrapper = Helper.CreateWrapper(keyEncAlg.ObjectID.Id); keyWrapper.Init(true, new ParametersWithRandom(secKey, random)); Asn1OctetString encKey = new DerOctetString( keyWrapper.Wrap(keyBytes, 0, keyBytes.Length)); return new RecipientInfo(new KekRecipientInfo(secKeyId, keyEncAlg, encKey)); } } } public CmsEnvelopedGenerator() : this(new SecureRandom()) { } /// <summary>Constructor allowing specific source of randomness</summary> /// <param name="rand">Instance of <c>SecureRandom</c> to use.</param> public CmsEnvelopedGenerator( SecureRandom rand) { this.rand = rand; } /** * add a recipient. * * @param cert recipient's public key certificate * @exception ArgumentException if there is a problem with the certificate */ public void AddKeyTransRecipient( X509Certificate cert) { recipientInfs.Add(new RecipientInf(cert)); } /** * add a recipient * * @param key the public key used by the recipient * @param subKeyId the identifier for the recipient's public key * @exception ArgumentException if there is a problem with the key */ public void AddKeyTransRecipient( AsymmetricKeyParameter pubKey, byte[] subKeyId) { recipientInfs.Add(new CmsEnvelopedGenerator.RecipientInf(pubKey, new DerOctetString(subKeyId))); } /** * add a KEK recipient. * @param key the secret key to use for wrapping * @param keyIdentifier the byte string that identifies the key */ public void AddKekRecipient( string keyAlgorithm, // TODO Remove need for this parameter KeyParameter key, byte[] keyIdentifier) { recipientInfs.Add(new RecipientInf(keyAlgorithm, key, new KekIdentifier(keyIdentifier, null, null))); } public void AddPasswordRecipient( CmsPbeKey pbeKey, string kekAlgorithmOid) { Pbkdf2Params p = new Pbkdf2Params(pbeKey.Salt, pbeKey.IterationCount); KeyParameter secretKey = pbeKey.GetEncoded(kekAlgorithmOid); recipientInfs.Add(new RecipientInf(kekAlgorithmOid, secretKey, new AlgorithmIdentifier(PkcsObjectIdentifiers.IdPbkdf2, p))); } /** * Add a key agreement based recipient. * * @param agreementAlgorithm key agreement algorithm to use. * @param senderPrivateKey private key to initialise sender side of agreement with. * @param senderPublicKey sender public key to include with message. * @param recipientCert recipient's public key certificate. * @param cekWrapAlgorithm OID for key wrapping algorithm to use. * @exception SecurityUtilityException if the algorithm requested cannot be found * @exception InvalidKeyException if the keys are inappropriate for the algorithm specified */ public void AddKeyAgreementRecipient( string agreementAlgorithm, AsymmetricKeyParameter senderPrivateKey, AsymmetricKeyParameter senderPublicKey, X509Certificate recipientCert, string cekWrapAlgorithm) { if (!senderPrivateKey.IsPrivate) throw new ArgumentException("Expected private key", "senderPrivateKey"); if (senderPublicKey.IsPrivate) throw new ArgumentException("Expected public key", "senderPublicKey"); IBasicAgreement agreement = AgreementUtilities.GetBasicAgreementWithKdf( agreementAlgorithm, cekWrapAlgorithm); agreement.Init(new ParametersWithRandom(senderPrivateKey, rand)); BigInteger secretNum = agreement.CalculateAgreement(recipientCert.GetPublicKey()); try { SubjectPublicKeyInfo oPubKeyInfo = SubjectPublicKeyInfoFactory.CreateSubjectPublicKeyInfo(senderPublicKey); OriginatorIdentifierOrKey originator = new OriginatorIdentifierOrKey( new OriginatorPublicKey( new AlgorithmIdentifier(oPubKeyInfo.AlgorithmID.ObjectID, DerNull.Instance), oPubKeyInfo.PublicKeyData.GetBytes())); // TODO Fix the way bytes are derived from the secret byte[] secretBytes = secretNum.ToByteArrayUnsigned(); KeyParameter secret = ParameterUtilities.CreateKeyParameter( cekWrapAlgorithm, secretBytes); recipientInfs.Add( new RecipientInf(cekWrapAlgorithm, secret, agreementAlgorithm, cekWrapAlgorithm, originator, recipientCert)); } catch (IOException e) { throw new InvalidKeyException("cannot extract originator public key: " + e); } } protected internal virtual AlgorithmIdentifier GetAlgorithmIdentifier( string encryptionOid, KeyParameter encKey, Asn1Encodable asn1Params, out ICipherParameters cipherParameters) { Asn1Object asn1Object; if (asn1Params != null) { asn1Object = asn1Params.ToAsn1Object(); cipherParameters = ParameterUtilities.GetCipherParameters( encryptionOid, encKey, asn1Object); } else { asn1Object = DerNull.Instance; cipherParameters = encKey; } return new AlgorithmIdentifier( new DerObjectIdentifier(encryptionOid), asn1Object); } protected internal virtual Asn1Encodable GenerateAsn1Parameters( string encryptionOid, byte[] encKeyBytes) { Asn1Encodable asn1Params = null; try { if (encryptionOid.Equals(RC2Cbc)) { byte[] iv = new byte[8]; rand.NextBytes(iv); // TODO Is this detailed repeat of Java version really necessary? int effKeyBits = encKeyBytes.Length * 8; int parameterVersion; if (effKeyBits < 256) { parameterVersion = rc2Table[effKeyBits]; } else { parameterVersion = effKeyBits; } asn1Params = new RC2CbcParameter(parameterVersion, iv); } else { asn1Params = ParameterUtilities.GenerateParameters(encryptionOid, rand); } } catch (SecurityUtilityException) { // No problem... no parameters generated } return asn1Params; } } }
// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! using gaxgrpc = Google.Api.Gax.Grpc; using wkt = Google.Protobuf.WellKnownTypes; using grpccore = Grpc.Core; using moq = Moq; using st = System.Threading; using stt = System.Threading.Tasks; using xunit = Xunit; namespace Google.Area120.Tables.V1Alpha1.Tests { /// <summary>Generated unit tests.</summary> public sealed class GeneratedTablesServiceClientTest { [xunit::FactAttribute] public void GetTableRequestObject() { moq::Mock<TablesService.TablesServiceClient> mockGrpcClient = new moq::Mock<TablesService.TablesServiceClient>(moq::MockBehavior.Strict); GetTableRequest request = new GetTableRequest { TableName = TableName.FromTable("[TABLE]"), }; Table expectedResponse = new Table { TableName = TableName.FromTable("[TABLE]"), DisplayName = "display_name137f65c2", Columns = { new ColumnDescription(), }, }; mockGrpcClient.Setup(x => x.GetTable(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); TablesServiceClient client = new TablesServiceClientImpl(mockGrpcClient.Object, null); Table response = client.GetTable(request); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetTableRequestObjectAsync() { moq::Mock<TablesService.TablesServiceClient> mockGrpcClient = new moq::Mock<TablesService.TablesServiceClient>(moq::MockBehavior.Strict); GetTableRequest request = new GetTableRequest { TableName = TableName.FromTable("[TABLE]"), }; Table expectedResponse = new Table { TableName = TableName.FromTable("[TABLE]"), DisplayName = "display_name137f65c2", Columns = { new ColumnDescription(), }, }; mockGrpcClient.Setup(x => x.GetTableAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Table>(stt::Task.FromResult(expectedResponse), null, null, null, null)); TablesServiceClient client = new TablesServiceClientImpl(mockGrpcClient.Object, null); Table responseCallSettings = await client.GetTableAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Table responseCancellationToken = await client.GetTableAsync(request, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetTable() { moq::Mock<TablesService.TablesServiceClient> mockGrpcClient = new moq::Mock<TablesService.TablesServiceClient>(moq::MockBehavior.Strict); GetTableRequest request = new GetTableRequest { TableName = TableName.FromTable("[TABLE]"), }; Table expectedResponse = new Table { TableName = TableName.FromTable("[TABLE]"), DisplayName = "display_name137f65c2", Columns = { new ColumnDescription(), }, }; mockGrpcClient.Setup(x => x.GetTable(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); TablesServiceClient client = new TablesServiceClientImpl(mockGrpcClient.Object, null); Table response = client.GetTable(request.Name); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetTableAsync() { moq::Mock<TablesService.TablesServiceClient> mockGrpcClient = new moq::Mock<TablesService.TablesServiceClient>(moq::MockBehavior.Strict); GetTableRequest request = new GetTableRequest { TableName = TableName.FromTable("[TABLE]"), }; Table expectedResponse = new Table { TableName = TableName.FromTable("[TABLE]"), DisplayName = "display_name137f65c2", Columns = { new ColumnDescription(), }, }; mockGrpcClient.Setup(x => x.GetTableAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Table>(stt::Task.FromResult(expectedResponse), null, null, null, null)); TablesServiceClient client = new TablesServiceClientImpl(mockGrpcClient.Object, null); Table responseCallSettings = await client.GetTableAsync(request.Name, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Table responseCancellationToken = await client.GetTableAsync(request.Name, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetTableResourceNames() { moq::Mock<TablesService.TablesServiceClient> mockGrpcClient = new moq::Mock<TablesService.TablesServiceClient>(moq::MockBehavior.Strict); GetTableRequest request = new GetTableRequest { TableName = TableName.FromTable("[TABLE]"), }; Table expectedResponse = new Table { TableName = TableName.FromTable("[TABLE]"), DisplayName = "display_name137f65c2", Columns = { new ColumnDescription(), }, }; mockGrpcClient.Setup(x => x.GetTable(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); TablesServiceClient client = new TablesServiceClientImpl(mockGrpcClient.Object, null); Table response = client.GetTable(request.TableName); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetTableResourceNamesAsync() { moq::Mock<TablesService.TablesServiceClient> mockGrpcClient = new moq::Mock<TablesService.TablesServiceClient>(moq::MockBehavior.Strict); GetTableRequest request = new GetTableRequest { TableName = TableName.FromTable("[TABLE]"), }; Table expectedResponse = new Table { TableName = TableName.FromTable("[TABLE]"), DisplayName = "display_name137f65c2", Columns = { new ColumnDescription(), }, }; mockGrpcClient.Setup(x => x.GetTableAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Table>(stt::Task.FromResult(expectedResponse), null, null, null, null)); TablesServiceClient client = new TablesServiceClientImpl(mockGrpcClient.Object, null); Table responseCallSettings = await client.GetTableAsync(request.TableName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Table responseCancellationToken = await client.GetTableAsync(request.TableName, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetWorkspaceRequestObject() { moq::Mock<TablesService.TablesServiceClient> mockGrpcClient = new moq::Mock<TablesService.TablesServiceClient>(moq::MockBehavior.Strict); GetWorkspaceRequest request = new GetWorkspaceRequest { WorkspaceName = WorkspaceName.FromWorkspace("[WORKSPACE]"), }; Workspace expectedResponse = new Workspace { WorkspaceName = WorkspaceName.FromWorkspace("[WORKSPACE]"), DisplayName = "display_name137f65c2", Tables = { new Table(), }, }; mockGrpcClient.Setup(x => x.GetWorkspace(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); TablesServiceClient client = new TablesServiceClientImpl(mockGrpcClient.Object, null); Workspace response = client.GetWorkspace(request); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetWorkspaceRequestObjectAsync() { moq::Mock<TablesService.TablesServiceClient> mockGrpcClient = new moq::Mock<TablesService.TablesServiceClient>(moq::MockBehavior.Strict); GetWorkspaceRequest request = new GetWorkspaceRequest { WorkspaceName = WorkspaceName.FromWorkspace("[WORKSPACE]"), }; Workspace expectedResponse = new Workspace { WorkspaceName = WorkspaceName.FromWorkspace("[WORKSPACE]"), DisplayName = "display_name137f65c2", Tables = { new Table(), }, }; mockGrpcClient.Setup(x => x.GetWorkspaceAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Workspace>(stt::Task.FromResult(expectedResponse), null, null, null, null)); TablesServiceClient client = new TablesServiceClientImpl(mockGrpcClient.Object, null); Workspace responseCallSettings = await client.GetWorkspaceAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Workspace responseCancellationToken = await client.GetWorkspaceAsync(request, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetWorkspace() { moq::Mock<TablesService.TablesServiceClient> mockGrpcClient = new moq::Mock<TablesService.TablesServiceClient>(moq::MockBehavior.Strict); GetWorkspaceRequest request = new GetWorkspaceRequest { WorkspaceName = WorkspaceName.FromWorkspace("[WORKSPACE]"), }; Workspace expectedResponse = new Workspace { WorkspaceName = WorkspaceName.FromWorkspace("[WORKSPACE]"), DisplayName = "display_name137f65c2", Tables = { new Table(), }, }; mockGrpcClient.Setup(x => x.GetWorkspace(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); TablesServiceClient client = new TablesServiceClientImpl(mockGrpcClient.Object, null); Workspace response = client.GetWorkspace(request.Name); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetWorkspaceAsync() { moq::Mock<TablesService.TablesServiceClient> mockGrpcClient = new moq::Mock<TablesService.TablesServiceClient>(moq::MockBehavior.Strict); GetWorkspaceRequest request = new GetWorkspaceRequest { WorkspaceName = WorkspaceName.FromWorkspace("[WORKSPACE]"), }; Workspace expectedResponse = new Workspace { WorkspaceName = WorkspaceName.FromWorkspace("[WORKSPACE]"), DisplayName = "display_name137f65c2", Tables = { new Table(), }, }; mockGrpcClient.Setup(x => x.GetWorkspaceAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Workspace>(stt::Task.FromResult(expectedResponse), null, null, null, null)); TablesServiceClient client = new TablesServiceClientImpl(mockGrpcClient.Object, null); Workspace responseCallSettings = await client.GetWorkspaceAsync(request.Name, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Workspace responseCancellationToken = await client.GetWorkspaceAsync(request.Name, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetWorkspaceResourceNames() { moq::Mock<TablesService.TablesServiceClient> mockGrpcClient = new moq::Mock<TablesService.TablesServiceClient>(moq::MockBehavior.Strict); GetWorkspaceRequest request = new GetWorkspaceRequest { WorkspaceName = WorkspaceName.FromWorkspace("[WORKSPACE]"), }; Workspace expectedResponse = new Workspace { WorkspaceName = WorkspaceName.FromWorkspace("[WORKSPACE]"), DisplayName = "display_name137f65c2", Tables = { new Table(), }, }; mockGrpcClient.Setup(x => x.GetWorkspace(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); TablesServiceClient client = new TablesServiceClientImpl(mockGrpcClient.Object, null); Workspace response = client.GetWorkspace(request.WorkspaceName); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetWorkspaceResourceNamesAsync() { moq::Mock<TablesService.TablesServiceClient> mockGrpcClient = new moq::Mock<TablesService.TablesServiceClient>(moq::MockBehavior.Strict); GetWorkspaceRequest request = new GetWorkspaceRequest { WorkspaceName = WorkspaceName.FromWorkspace("[WORKSPACE]"), }; Workspace expectedResponse = new Workspace { WorkspaceName = WorkspaceName.FromWorkspace("[WORKSPACE]"), DisplayName = "display_name137f65c2", Tables = { new Table(), }, }; mockGrpcClient.Setup(x => x.GetWorkspaceAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Workspace>(stt::Task.FromResult(expectedResponse), null, null, null, null)); TablesServiceClient client = new TablesServiceClientImpl(mockGrpcClient.Object, null); Workspace responseCallSettings = await client.GetWorkspaceAsync(request.WorkspaceName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Workspace responseCancellationToken = await client.GetWorkspaceAsync(request.WorkspaceName, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetRowRequestObject() { moq::Mock<TablesService.TablesServiceClient> mockGrpcClient = new moq::Mock<TablesService.TablesServiceClient>(moq::MockBehavior.Strict); GetRowRequest request = new GetRowRequest { RowName = RowName.FromTableRow("[TABLE]", "[ROW]"), View = View.ColumnIdView, }; Row expectedResponse = new Row { RowName = RowName.FromTableRow("[TABLE]", "[ROW]"), Values = { { "key8a0b6e3c", new wkt::Value() }, }, }; mockGrpcClient.Setup(x => x.GetRow(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); TablesServiceClient client = new TablesServiceClientImpl(mockGrpcClient.Object, null); Row response = client.GetRow(request); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetRowRequestObjectAsync() { moq::Mock<TablesService.TablesServiceClient> mockGrpcClient = new moq::Mock<TablesService.TablesServiceClient>(moq::MockBehavior.Strict); GetRowRequest request = new GetRowRequest { RowName = RowName.FromTableRow("[TABLE]", "[ROW]"), View = View.ColumnIdView, }; Row expectedResponse = new Row { RowName = RowName.FromTableRow("[TABLE]", "[ROW]"), Values = { { "key8a0b6e3c", new wkt::Value() }, }, }; mockGrpcClient.Setup(x => x.GetRowAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Row>(stt::Task.FromResult(expectedResponse), null, null, null, null)); TablesServiceClient client = new TablesServiceClientImpl(mockGrpcClient.Object, null); Row responseCallSettings = await client.GetRowAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Row responseCancellationToken = await client.GetRowAsync(request, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetRow() { moq::Mock<TablesService.TablesServiceClient> mockGrpcClient = new moq::Mock<TablesService.TablesServiceClient>(moq::MockBehavior.Strict); GetRowRequest request = new GetRowRequest { RowName = RowName.FromTableRow("[TABLE]", "[ROW]"), }; Row expectedResponse = new Row { RowName = RowName.FromTableRow("[TABLE]", "[ROW]"), Values = { { "key8a0b6e3c", new wkt::Value() }, }, }; mockGrpcClient.Setup(x => x.GetRow(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); TablesServiceClient client = new TablesServiceClientImpl(mockGrpcClient.Object, null); Row response = client.GetRow(request.Name); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetRowAsync() { moq::Mock<TablesService.TablesServiceClient> mockGrpcClient = new moq::Mock<TablesService.TablesServiceClient>(moq::MockBehavior.Strict); GetRowRequest request = new GetRowRequest { RowName = RowName.FromTableRow("[TABLE]", "[ROW]"), }; Row expectedResponse = new Row { RowName = RowName.FromTableRow("[TABLE]", "[ROW]"), Values = { { "key8a0b6e3c", new wkt::Value() }, }, }; mockGrpcClient.Setup(x => x.GetRowAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Row>(stt::Task.FromResult(expectedResponse), null, null, null, null)); TablesServiceClient client = new TablesServiceClientImpl(mockGrpcClient.Object, null); Row responseCallSettings = await client.GetRowAsync(request.Name, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Row responseCancellationToken = await client.GetRowAsync(request.Name, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetRowResourceNames() { moq::Mock<TablesService.TablesServiceClient> mockGrpcClient = new moq::Mock<TablesService.TablesServiceClient>(moq::MockBehavior.Strict); GetRowRequest request = new GetRowRequest { RowName = RowName.FromTableRow("[TABLE]", "[ROW]"), }; Row expectedResponse = new Row { RowName = RowName.FromTableRow("[TABLE]", "[ROW]"), Values = { { "key8a0b6e3c", new wkt::Value() }, }, }; mockGrpcClient.Setup(x => x.GetRow(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); TablesServiceClient client = new TablesServiceClientImpl(mockGrpcClient.Object, null); Row response = client.GetRow(request.RowName); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetRowResourceNamesAsync() { moq::Mock<TablesService.TablesServiceClient> mockGrpcClient = new moq::Mock<TablesService.TablesServiceClient>(moq::MockBehavior.Strict); GetRowRequest request = new GetRowRequest { RowName = RowName.FromTableRow("[TABLE]", "[ROW]"), }; Row expectedResponse = new Row { RowName = RowName.FromTableRow("[TABLE]", "[ROW]"), Values = { { "key8a0b6e3c", new wkt::Value() }, }, }; mockGrpcClient.Setup(x => x.GetRowAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Row>(stt::Task.FromResult(expectedResponse), null, null, null, null)); TablesServiceClient client = new TablesServiceClientImpl(mockGrpcClient.Object, null); Row responseCallSettings = await client.GetRowAsync(request.RowName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Row responseCancellationToken = await client.GetRowAsync(request.RowName, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void CreateRowRequestObject() { moq::Mock<TablesService.TablesServiceClient> mockGrpcClient = new moq::Mock<TablesService.TablesServiceClient>(moq::MockBehavior.Strict); CreateRowRequest request = new CreateRowRequest { Parent = "parent7858e4d0", Row = new Row(), View = View.ColumnIdView, }; Row expectedResponse = new Row { RowName = RowName.FromTableRow("[TABLE]", "[ROW]"), Values = { { "key8a0b6e3c", new wkt::Value() }, }, }; mockGrpcClient.Setup(x => x.CreateRow(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); TablesServiceClient client = new TablesServiceClientImpl(mockGrpcClient.Object, null); Row response = client.CreateRow(request); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task CreateRowRequestObjectAsync() { moq::Mock<TablesService.TablesServiceClient> mockGrpcClient = new moq::Mock<TablesService.TablesServiceClient>(moq::MockBehavior.Strict); CreateRowRequest request = new CreateRowRequest { Parent = "parent7858e4d0", Row = new Row(), View = View.ColumnIdView, }; Row expectedResponse = new Row { RowName = RowName.FromTableRow("[TABLE]", "[ROW]"), Values = { { "key8a0b6e3c", new wkt::Value() }, }, }; mockGrpcClient.Setup(x => x.CreateRowAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Row>(stt::Task.FromResult(expectedResponse), null, null, null, null)); TablesServiceClient client = new TablesServiceClientImpl(mockGrpcClient.Object, null); Row responseCallSettings = await client.CreateRowAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Row responseCancellationToken = await client.CreateRowAsync(request, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void CreateRow() { moq::Mock<TablesService.TablesServiceClient> mockGrpcClient = new moq::Mock<TablesService.TablesServiceClient>(moq::MockBehavior.Strict); CreateRowRequest request = new CreateRowRequest { Parent = "parent7858e4d0", Row = new Row(), }; Row expectedResponse = new Row { RowName = RowName.FromTableRow("[TABLE]", "[ROW]"), Values = { { "key8a0b6e3c", new wkt::Value() }, }, }; mockGrpcClient.Setup(x => x.CreateRow(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); TablesServiceClient client = new TablesServiceClientImpl(mockGrpcClient.Object, null); Row response = client.CreateRow(request.Parent, request.Row); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task CreateRowAsync() { moq::Mock<TablesService.TablesServiceClient> mockGrpcClient = new moq::Mock<TablesService.TablesServiceClient>(moq::MockBehavior.Strict); CreateRowRequest request = new CreateRowRequest { Parent = "parent7858e4d0", Row = new Row(), }; Row expectedResponse = new Row { RowName = RowName.FromTableRow("[TABLE]", "[ROW]"), Values = { { "key8a0b6e3c", new wkt::Value() }, }, }; mockGrpcClient.Setup(x => x.CreateRowAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Row>(stt::Task.FromResult(expectedResponse), null, null, null, null)); TablesServiceClient client = new TablesServiceClientImpl(mockGrpcClient.Object, null); Row responseCallSettings = await client.CreateRowAsync(request.Parent, request.Row, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Row responseCancellationToken = await client.CreateRowAsync(request.Parent, request.Row, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void BatchCreateRowsRequestObject() { moq::Mock<TablesService.TablesServiceClient> mockGrpcClient = new moq::Mock<TablesService.TablesServiceClient>(moq::MockBehavior.Strict); BatchCreateRowsRequest request = new BatchCreateRowsRequest { Parent = "parent7858e4d0", Requests = { new CreateRowRequest(), }, }; BatchCreateRowsResponse expectedResponse = new BatchCreateRowsResponse { Rows = { new Row(), }, }; mockGrpcClient.Setup(x => x.BatchCreateRows(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); TablesServiceClient client = new TablesServiceClientImpl(mockGrpcClient.Object, null); BatchCreateRowsResponse response = client.BatchCreateRows(request); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task BatchCreateRowsRequestObjectAsync() { moq::Mock<TablesService.TablesServiceClient> mockGrpcClient = new moq::Mock<TablesService.TablesServiceClient>(moq::MockBehavior.Strict); BatchCreateRowsRequest request = new BatchCreateRowsRequest { Parent = "parent7858e4d0", Requests = { new CreateRowRequest(), }, }; BatchCreateRowsResponse expectedResponse = new BatchCreateRowsResponse { Rows = { new Row(), }, }; mockGrpcClient.Setup(x => x.BatchCreateRowsAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<BatchCreateRowsResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null)); TablesServiceClient client = new TablesServiceClientImpl(mockGrpcClient.Object, null); BatchCreateRowsResponse responseCallSettings = await client.BatchCreateRowsAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); BatchCreateRowsResponse responseCancellationToken = await client.BatchCreateRowsAsync(request, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void UpdateRowRequestObject() { moq::Mock<TablesService.TablesServiceClient> mockGrpcClient = new moq::Mock<TablesService.TablesServiceClient>(moq::MockBehavior.Strict); UpdateRowRequest request = new UpdateRowRequest { Row = new Row(), UpdateMask = new wkt::FieldMask(), View = View.ColumnIdView, }; Row expectedResponse = new Row { RowName = RowName.FromTableRow("[TABLE]", "[ROW]"), Values = { { "key8a0b6e3c", new wkt::Value() }, }, }; mockGrpcClient.Setup(x => x.UpdateRow(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); TablesServiceClient client = new TablesServiceClientImpl(mockGrpcClient.Object, null); Row response = client.UpdateRow(request); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task UpdateRowRequestObjectAsync() { moq::Mock<TablesService.TablesServiceClient> mockGrpcClient = new moq::Mock<TablesService.TablesServiceClient>(moq::MockBehavior.Strict); UpdateRowRequest request = new UpdateRowRequest { Row = new Row(), UpdateMask = new wkt::FieldMask(), View = View.ColumnIdView, }; Row expectedResponse = new Row { RowName = RowName.FromTableRow("[TABLE]", "[ROW]"), Values = { { "key8a0b6e3c", new wkt::Value() }, }, }; mockGrpcClient.Setup(x => x.UpdateRowAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Row>(stt::Task.FromResult(expectedResponse), null, null, null, null)); TablesServiceClient client = new TablesServiceClientImpl(mockGrpcClient.Object, null); Row responseCallSettings = await client.UpdateRowAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Row responseCancellationToken = await client.UpdateRowAsync(request, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void UpdateRow() { moq::Mock<TablesService.TablesServiceClient> mockGrpcClient = new moq::Mock<TablesService.TablesServiceClient>(moq::MockBehavior.Strict); UpdateRowRequest request = new UpdateRowRequest { Row = new Row(), UpdateMask = new wkt::FieldMask(), }; Row expectedResponse = new Row { RowName = RowName.FromTableRow("[TABLE]", "[ROW]"), Values = { { "key8a0b6e3c", new wkt::Value() }, }, }; mockGrpcClient.Setup(x => x.UpdateRow(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); TablesServiceClient client = new TablesServiceClientImpl(mockGrpcClient.Object, null); Row response = client.UpdateRow(request.Row, request.UpdateMask); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task UpdateRowAsync() { moq::Mock<TablesService.TablesServiceClient> mockGrpcClient = new moq::Mock<TablesService.TablesServiceClient>(moq::MockBehavior.Strict); UpdateRowRequest request = new UpdateRowRequest { Row = new Row(), UpdateMask = new wkt::FieldMask(), }; Row expectedResponse = new Row { RowName = RowName.FromTableRow("[TABLE]", "[ROW]"), Values = { { "key8a0b6e3c", new wkt::Value() }, }, }; mockGrpcClient.Setup(x => x.UpdateRowAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Row>(stt::Task.FromResult(expectedResponse), null, null, null, null)); TablesServiceClient client = new TablesServiceClientImpl(mockGrpcClient.Object, null); Row responseCallSettings = await client.UpdateRowAsync(request.Row, request.UpdateMask, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Row responseCancellationToken = await client.UpdateRowAsync(request.Row, request.UpdateMask, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void BatchUpdateRowsRequestObject() { moq::Mock<TablesService.TablesServiceClient> mockGrpcClient = new moq::Mock<TablesService.TablesServiceClient>(moq::MockBehavior.Strict); BatchUpdateRowsRequest request = new BatchUpdateRowsRequest { Parent = "parent7858e4d0", Requests = { new UpdateRowRequest(), }, }; BatchUpdateRowsResponse expectedResponse = new BatchUpdateRowsResponse { Rows = { new Row(), }, }; mockGrpcClient.Setup(x => x.BatchUpdateRows(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); TablesServiceClient client = new TablesServiceClientImpl(mockGrpcClient.Object, null); BatchUpdateRowsResponse response = client.BatchUpdateRows(request); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task BatchUpdateRowsRequestObjectAsync() { moq::Mock<TablesService.TablesServiceClient> mockGrpcClient = new moq::Mock<TablesService.TablesServiceClient>(moq::MockBehavior.Strict); BatchUpdateRowsRequest request = new BatchUpdateRowsRequest { Parent = "parent7858e4d0", Requests = { new UpdateRowRequest(), }, }; BatchUpdateRowsResponse expectedResponse = new BatchUpdateRowsResponse { Rows = { new Row(), }, }; mockGrpcClient.Setup(x => x.BatchUpdateRowsAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<BatchUpdateRowsResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null)); TablesServiceClient client = new TablesServiceClientImpl(mockGrpcClient.Object, null); BatchUpdateRowsResponse responseCallSettings = await client.BatchUpdateRowsAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); BatchUpdateRowsResponse responseCancellationToken = await client.BatchUpdateRowsAsync(request, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void DeleteRowRequestObject() { moq::Mock<TablesService.TablesServiceClient> mockGrpcClient = new moq::Mock<TablesService.TablesServiceClient>(moq::MockBehavior.Strict); DeleteRowRequest request = new DeleteRowRequest { RowName = RowName.FromTableRow("[TABLE]", "[ROW]"), }; wkt::Empty expectedResponse = new wkt::Empty { }; mockGrpcClient.Setup(x => x.DeleteRow(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); TablesServiceClient client = new TablesServiceClientImpl(mockGrpcClient.Object, null); client.DeleteRow(request); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task DeleteRowRequestObjectAsync() { moq::Mock<TablesService.TablesServiceClient> mockGrpcClient = new moq::Mock<TablesService.TablesServiceClient>(moq::MockBehavior.Strict); DeleteRowRequest request = new DeleteRowRequest { RowName = RowName.FromTableRow("[TABLE]", "[ROW]"), }; wkt::Empty expectedResponse = new wkt::Empty { }; mockGrpcClient.Setup(x => x.DeleteRowAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<wkt::Empty>(stt::Task.FromResult(expectedResponse), null, null, null, null)); TablesServiceClient client = new TablesServiceClientImpl(mockGrpcClient.Object, null); await client.DeleteRowAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); await client.DeleteRowAsync(request, st::CancellationToken.None); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void DeleteRow() { moq::Mock<TablesService.TablesServiceClient> mockGrpcClient = new moq::Mock<TablesService.TablesServiceClient>(moq::MockBehavior.Strict); DeleteRowRequest request = new DeleteRowRequest { RowName = RowName.FromTableRow("[TABLE]", "[ROW]"), }; wkt::Empty expectedResponse = new wkt::Empty { }; mockGrpcClient.Setup(x => x.DeleteRow(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); TablesServiceClient client = new TablesServiceClientImpl(mockGrpcClient.Object, null); client.DeleteRow(request.Name); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task DeleteRowAsync() { moq::Mock<TablesService.TablesServiceClient> mockGrpcClient = new moq::Mock<TablesService.TablesServiceClient>(moq::MockBehavior.Strict); DeleteRowRequest request = new DeleteRowRequest { RowName = RowName.FromTableRow("[TABLE]", "[ROW]"), }; wkt::Empty expectedResponse = new wkt::Empty { }; mockGrpcClient.Setup(x => x.DeleteRowAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<wkt::Empty>(stt::Task.FromResult(expectedResponse), null, null, null, null)); TablesServiceClient client = new TablesServiceClientImpl(mockGrpcClient.Object, null); await client.DeleteRowAsync(request.Name, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); await client.DeleteRowAsync(request.Name, st::CancellationToken.None); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void DeleteRowResourceNames() { moq::Mock<TablesService.TablesServiceClient> mockGrpcClient = new moq::Mock<TablesService.TablesServiceClient>(moq::MockBehavior.Strict); DeleteRowRequest request = new DeleteRowRequest { RowName = RowName.FromTableRow("[TABLE]", "[ROW]"), }; wkt::Empty expectedResponse = new wkt::Empty { }; mockGrpcClient.Setup(x => x.DeleteRow(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); TablesServiceClient client = new TablesServiceClientImpl(mockGrpcClient.Object, null); client.DeleteRow(request.RowName); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task DeleteRowResourceNamesAsync() { moq::Mock<TablesService.TablesServiceClient> mockGrpcClient = new moq::Mock<TablesService.TablesServiceClient>(moq::MockBehavior.Strict); DeleteRowRequest request = new DeleteRowRequest { RowName = RowName.FromTableRow("[TABLE]", "[ROW]"), }; wkt::Empty expectedResponse = new wkt::Empty { }; mockGrpcClient.Setup(x => x.DeleteRowAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<wkt::Empty>(stt::Task.FromResult(expectedResponse), null, null, null, null)); TablesServiceClient client = new TablesServiceClientImpl(mockGrpcClient.Object, null); await client.DeleteRowAsync(request.RowName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); await client.DeleteRowAsync(request.RowName, st::CancellationToken.None); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void BatchDeleteRowsRequestObject() { moq::Mock<TablesService.TablesServiceClient> mockGrpcClient = new moq::Mock<TablesService.TablesServiceClient>(moq::MockBehavior.Strict); BatchDeleteRowsRequest request = new BatchDeleteRowsRequest { ParentAsTableName = TableName.FromTable("[TABLE]"), RowNames = { RowName.FromTableRow("[TABLE]", "[ROW]"), }, }; wkt::Empty expectedResponse = new wkt::Empty { }; mockGrpcClient.Setup(x => x.BatchDeleteRows(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); TablesServiceClient client = new TablesServiceClientImpl(mockGrpcClient.Object, null); client.BatchDeleteRows(request); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task BatchDeleteRowsRequestObjectAsync() { moq::Mock<TablesService.TablesServiceClient> mockGrpcClient = new moq::Mock<TablesService.TablesServiceClient>(moq::MockBehavior.Strict); BatchDeleteRowsRequest request = new BatchDeleteRowsRequest { ParentAsTableName = TableName.FromTable("[TABLE]"), RowNames = { RowName.FromTableRow("[TABLE]", "[ROW]"), }, }; wkt::Empty expectedResponse = new wkt::Empty { }; mockGrpcClient.Setup(x => x.BatchDeleteRowsAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<wkt::Empty>(stt::Task.FromResult(expectedResponse), null, null, null, null)); TablesServiceClient client = new TablesServiceClientImpl(mockGrpcClient.Object, null); await client.BatchDeleteRowsAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); await client.BatchDeleteRowsAsync(request, st::CancellationToken.None); mockGrpcClient.VerifyAll(); } } }
/* * UCS4Encoding.cs - Implementation of the "System.Xml.UCS4Encoding" class. * * Copyright (C) 2003 Southern Storm Software, Pty Ltd. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ namespace I18N.Common { using System; using System.Text; public class UCS4Encoding : Encoding { // Byte ordering for UCS-4 streams. public enum ByteOrder { Order_1234, Order_4321, Order_3412, Order_2143 }; // enum ByteOrder // Internal state. private ByteOrder byteOrder; // Constructors. public UCS4Encoding(int codePage, ByteOrder order) : base(codePage) { byteOrder = order; } // Get the number of bytes needed to encode a character buffer. public override int GetByteCount(char[] chars, int index, int count) { if(chars == null) { throw new ArgumentNullException("chars"); } if(index < 0 || index > chars.Length) { throw new ArgumentOutOfRangeException ("index", Strings.GetString("ArgRange_Array")); } if(count < 0 || count > (chars.Length - index)) { throw new ArgumentOutOfRangeException ("count", Strings.GetString("ArgRange_Array")); } int bytes = 0; char ch; while(count > 0) { ch = chars[index++]; --count; if(ch >= '\uD800' && ch <= '\uDBFF') { // Possibly the start of a surrogate pair. if(count > 0) { ch = chars[index]; if(ch >= '\uDC00' && ch <= '\uDFFF') { ++index; --count; } } } bytes += 4; } return bytes; } // Get the bytes that result from encoding a character buffer. public override int GetBytes(char[] chars, int charIndex, int charCount, byte[] bytes, int byteIndex) { if(chars == null) { throw new ArgumentNullException("chars"); } if(bytes == null) { throw new ArgumentNullException("bytes"); } if(charIndex < 0 || charIndex > chars.Length) { throw new ArgumentOutOfRangeException ("charIndex", Strings.GetString("ArgRange_Array")); } if(charCount < 0 || charCount > (chars.Length - charIndex)) { throw new ArgumentOutOfRangeException ("charCount", Strings.GetString("ArgRange_Array")); } if(byteIndex < 0 || byteIndex > bytes.Length) { throw new ArgumentOutOfRangeException ("byteIndex", Strings.GetString("ArgRange_Array")); } int posn = byteIndex; int left = bytes.Length - byteIndex; uint pair; char ch; switch(byteOrder) { case ByteOrder.Order_1234: { while(charCount-- > 0) { if(left < 4) { throw new ArgumentException (Strings.GetString("Arg_InsufficientSpace")); } ch = chars[charIndex++]; if(ch >= '\uD800' && ch <= '\uDBFF' && charCount > 0 && chars[charIndex] >= '\uDC00' && chars[charIndex] <= '\uDFFF') { pair = ((uint)(ch - '\uD800')) << 10; ch = chars[charIndex++]; --charCount; pair += ((uint)(ch - '\uDC00')) + (uint)0x10000; bytes[posn++] = (byte)(pair >> 24); bytes[posn++] = (byte)(pair >> 16); bytes[posn++] = (byte)(pair >> 8); bytes[posn++] = (byte)pair; } else { bytes[posn++] = (byte)0; bytes[posn++] = (byte)0; bytes[posn++] = (byte)(ch >> 8); bytes[posn++] = (byte)ch; } left -= 4; } } break; case ByteOrder.Order_4321: { while(charCount-- > 0) { if(left < 4) { throw new ArgumentException (Strings.GetString("Arg_InsufficientSpace")); } ch = chars[charIndex++]; if(ch >= '\uD800' && ch <= '\uDBFF' && charCount > 0 && chars[charIndex] >= '\uDC00' && chars[charIndex] <= '\uDFFF') { pair = ((uint)(ch - '\uD800')) << 10; ch = chars[charIndex++]; --charCount; pair += ((uint)(ch - '\uDC00')) + (uint)0x10000; bytes[posn++] = (byte)pair; bytes[posn++] = (byte)(pair >> 8); bytes[posn++] = (byte)(pair >> 16); bytes[posn++] = (byte)(pair >> 24); } else { bytes[posn++] = (byte)ch; bytes[posn++] = (byte)(ch >> 8); bytes[posn++] = (byte)0; bytes[posn++] = (byte)0; } left -= 4; } } break; case ByteOrder.Order_3412: { while(charCount-- > 0) { if(left < 4) { throw new ArgumentException (Strings.GetString("Arg_InsufficientSpace")); } ch = chars[charIndex++]; if(ch >= '\uD800' && ch <= '\uDBFF' && charCount > 0 && chars[charIndex] >= '\uDC00' && chars[charIndex] <= '\uDFFF') { pair = ((uint)(ch - '\uD800')) << 10; ch = chars[charIndex++]; --charCount; pair += ((uint)(ch - '\uDC00')) + (uint)0x10000; bytes[posn++] = (byte)(pair >> 8); bytes[posn++] = (byte)pair; bytes[posn++] = (byte)(pair >> 24); bytes[posn++] = (byte)(pair >> 16); } else { bytes[posn++] = (byte)(ch >> 8); bytes[posn++] = (byte)ch; bytes[posn++] = (byte)0; bytes[posn++] = (byte)0; } left -= 4; } } break; case ByteOrder.Order_2143: { while(charCount-- > 0) { if(left < 4) { throw new ArgumentException (Strings.GetString("Arg_InsufficientSpace")); } ch = chars[charIndex++]; if(ch >= '\uD800' && ch <= '\uDBFF' && charCount > 0 && chars[charIndex] >= '\uDC00' && chars[charIndex] <= '\uDFFF') { pair = ((uint)(ch - '\uD800')) << 10; ch = chars[charIndex++]; --charCount; pair += ((uint)(ch - '\uDC00')) + (uint)0x10000; bytes[posn++] = (byte)(pair >> 16); bytes[posn++] = (byte)(pair >> 24); bytes[posn++] = (byte)pair; bytes[posn++] = (byte)(pair >> 8); } else { bytes[posn++] = (byte)0; bytes[posn++] = (byte)0; bytes[posn++] = (byte)ch; bytes[posn++] = (byte)(ch >> 8); } left -= 4; } } break; } return posn - byteIndex; } // Read a 4-byte character from an array. private static uint ReadChar(ByteOrder byteOrder, byte[] array, int index) { switch(byteOrder) { case ByteOrder.Order_1234: { return (((uint)(array[index])) << 24) | (((uint)(array[index + 1])) << 16) | (((uint)(array[index + 2])) << 8) | ((uint)(array[index + 3])); } // Not reached. case ByteOrder.Order_4321: { return (((uint)(array[index + 3])) << 24) | (((uint)(array[index + 2])) << 16) | (((uint)(array[index + 1])) << 8) | ((uint)(array[index])); } // Not reached. case ByteOrder.Order_3412: { return (((uint)(array[index + 2])) << 24) | (((uint)(array[index + 3])) << 16) | (((uint)(array[index])) << 8) | ((uint)(array[index + 1])); } // Not reached. case ByteOrder.Order_2143: { return (((uint)(array[index + 1])) << 24) | (((uint)(array[index])) << 16) | (((uint)(array[index + 3])) << 8) | ((uint)(array[index + 2])); } // Not reached. } return 0; } // Internal version of "GetCharCount". private static int InternalGetCharCount (ByteOrder byteOrder, byte[] leftOver, int leftOverLen, byte[] bytes, int index, int count) { // Validate the parameters. if(bytes == null) { throw new ArgumentNullException("bytes"); } if(index < 0 || index > bytes.Length) { throw new ArgumentOutOfRangeException ("index", Strings.GetString("ArgRange_Array")); } if(count < 0 || count > (bytes.Length - index)) { throw new ArgumentOutOfRangeException ("count", Strings.GetString("ArgRange_Array")); } // Handle the left-over buffer. int chars = 0; uint value; if(leftOverLen > 0) { if((leftOverLen + count) < 4) { return 0; } Array.Copy(bytes, index, leftOver, leftOverLen, 4 - leftOverLen); value = ReadChar(byteOrder, leftOver, 0); if(value != (uint)0x0000FEFF) { if(value > (uint)0x0000FFFF) { chars += 2; } else { ++chars; } } index += 4 - leftOverLen; count -= 4 - leftOverLen; } // Handle the main buffer contents. while(count >= 4) { value = ReadChar(byteOrder, bytes, index); if(value != (uint)0x0000FEFF) { if(value > (uint)0x0000FFFF) { chars += 2; } else { ++chars; } } index += 4; count -= 4; } return chars; } // Get the number of characters needed to decode a byte buffer. public override int GetCharCount(byte[] bytes, int index, int count) { return InternalGetCharCount(byteOrder, null, 0, bytes, index, count); } // Internal version of "GetChars". private static int InternalGetChars(ByteOrder byteOrder, byte[] leftOver, ref int leftOverLen, byte[] bytes, int byteIndex, int byteCount, char[] chars, int charIndex) { // Validate the parameters. if(bytes == null) { throw new ArgumentNullException("bytes"); } if(chars == null) { throw new ArgumentNullException("chars"); } if(byteIndex < 0 || byteIndex > bytes.Length) { throw new ArgumentOutOfRangeException ("byteIndex", Strings.GetString("ArgRange_Array")); } if(byteCount < 0 || byteCount > (bytes.Length - byteIndex)) { throw new ArgumentOutOfRangeException ("byteCount", Strings.GetString("ArgRange_Array")); } if(charIndex < 0 || charIndex > chars.Length) { throw new ArgumentOutOfRangeException ("charIndex", Strings.GetString("ArgRange_Array")); } // Handle the left-over buffer. uint value; int start = charIndex; int charCount = chars.Length - charIndex; if(leftOverLen > 0) { if((leftOverLen + byteCount) < 4) { Array.Copy(bytes, byteIndex, leftOver, leftOverLen, byteCount); leftOverLen += byteCount; return 0; } Array.Copy(bytes, byteIndex, leftOver, leftOverLen, 4 - leftOverLen); value = ReadChar(byteOrder, leftOver, 0); if(value != (uint)0x0000FEFF) { if(value > (uint)0x0000FFFF) { if(charCount < 2) { throw new ArgumentException (Strings.GetString("Arg_InsufficientSpace")); } value -= (uint)0x10000; chars[charIndex++] = (char)((value >> 10) + (uint)0xD800); chars[charIndex++] = (char)((value & (uint)0x03FF) + (uint)0xDC00); charCount -= 2; } else { if(charCount < 1) { throw new ArgumentException (Strings.GetString("Arg_InsufficientSpace")); } chars[charIndex++] = (char)value; --charCount; } } byteIndex += 4 - leftOverLen; byteCount -= 4 - leftOverLen; leftOverLen = 0; } // Handle the main buffer contents. while(byteCount >= 4) { value = ReadChar(byteOrder, bytes, byteIndex); if(value != (uint)0x0000FEFF) { if(value > (uint)0x0000FFFF) { if(charCount < 2) { throw new ArgumentException (Strings.GetString("Arg_InsufficientSpace")); } value -= (uint)0x10000; chars[charIndex++] = (char)((value >> 10) + (uint)0xD800); chars[charIndex++] = (char)((value & (uint)0x03FF) + (uint)0xDC00); charCount -= 2; } else { if(charCount < 1) { throw new ArgumentException (Strings.GetString("Arg_InsufficientSpace")); } chars[charIndex++] = (char)value; --charCount; } } byteIndex += 4; byteCount -= 4; } if(byteCount > 0) { Array.Copy(bytes, byteIndex, leftOver, 0, byteCount); leftOverLen = byteCount; } return charIndex - start; } // Get the characters that result from decoding a byte buffer. public override int GetChars(byte[] bytes, int byteIndex, int byteCount, char[] chars, int charIndex) { int leftOverLen = 0; return InternalGetChars(byteOrder, null, ref leftOverLen, bytes, byteIndex, byteCount, chars, charIndex); } // Get the maximum number of bytes needed to encode a // specified number of characters. public override int GetMaxByteCount(int charCount) { if(charCount < 0) { throw new ArgumentOutOfRangeException ("charCount", Strings.GetString("ArgRange_NonNegative")); } return charCount * 4; } // Get the maximum number of characters needed to decode a // specified number of bytes. public override int GetMaxCharCount(int byteCount) { if(byteCount < 0) { throw new ArgumentOutOfRangeException ("byteCount", Strings.GetString("ArgRange_NonNegative")); } // We may need to account for surrogate pairs, // so use / 2 rather than / 4. return byteCount / 2; } // Get a UCS4-specific decoder that is attached to this instance. public override Decoder GetDecoder() { return new UCS4Decoder(byteOrder); } // Get the UCS4 preamble. public override byte[] GetPreamble() { byte[] preamble = new byte[4]; switch(byteOrder) { case ByteOrder.Order_1234: { preamble[0] = (byte)0x00; preamble[1] = (byte)0x00; preamble[2] = (byte)0xFE; preamble[3] = (byte)0xFF; } break; case ByteOrder.Order_4321: { preamble[0] = (byte)0xFF; preamble[1] = (byte)0xFE; preamble[2] = (byte)0x00; preamble[3] = (byte)0x00; } break; case ByteOrder.Order_3412: { preamble[0] = (byte)0xFE; preamble[1] = (byte)0xFF; preamble[2] = (byte)0x00; preamble[3] = (byte)0x00; } break; case ByteOrder.Order_2143: { preamble[0] = (byte)0x00; preamble[1] = (byte)0x00; preamble[2] = (byte)0xFF; preamble[3] = (byte)0xFE; } break; } return preamble; } // Determine if this object is equal to another. public override bool Equals(Object value) { UCS4Encoding enc = (value as UCS4Encoding); if(enc != null) { return (byteOrder == enc.byteOrder); } else { return false; } } // Get the hash code for this object. public override int GetHashCode() { return base.GetHashCode(); } #if !ECMA_COMPAT // Get the mail body name for this encoding. public override String BodyName { get { switch(byteOrder) { case ByteOrder.Order_1234: return "ucs-4-be"; case ByteOrder.Order_4321: return "ucs-4"; case ByteOrder.Order_3412: return "ucs-4-3412"; case ByteOrder.Order_2143: return "ucs-4-2143"; } return null; } } // Get the human-readable name for this encoding. public override String EncodingName { get { switch(byteOrder) { case ByteOrder.Order_1234: return "Unicode (UCS-4 Big-Endian)"; case ByteOrder.Order_4321: return "Unicode (UCS-4)"; case ByteOrder.Order_3412: return "Unicode (UCS-4 Order 3412)"; case ByteOrder.Order_2143: return "Unicode (UCS-4 Order 2143)"; } return null; } } // Get the mail agent header name for this encoding. public override String HeaderName { get { return BodyName; } } // Determine if this encoding can be displayed in a Web browser. public override bool IsBrowserDisplay { get { return false; } } // Determine if this encoding can be saved from a Web browser. public override bool IsBrowserSave { get { return false; } } // Determine if this encoding can be displayed in a mail/news agent. public override bool IsMailNewsDisplay { get { return false; } } // Determine if this encoding can be saved from a mail/news agent. public override bool IsMailNewsSave { get { return false; } } // Get the IANA-preferred Web name for this encoding. public override String WebName { get { return BodyName; } } // Get the Windows code page represented by this object. public override int WindowsCodePage { get { // Use UCS-2 as the underlying "real" code page. if(byteOrder == ByteOrder.Order_1234) { return 1201; } else { return 1200; } } } #endif // !ECMA_COMPAT // UCS4 decoder implementation. private sealed class UCS4Decoder : Decoder { // Internal state. private ByteOrder byteOrder; private byte[] buffer; private int bufferUsed; // Constructor. public UCS4Decoder(ByteOrder order) { byteOrder = order; buffer = new byte [4]; bufferUsed = 0; } // Override inherited methods. public override int GetCharCount(byte[] bytes, int index, int count) { return InternalGetCharCount(byteOrder, buffer, bufferUsed, bytes, index, count); } public override int GetChars(byte[] bytes, int byteIndex, int byteCount, char[] chars, int charIndex) { return InternalGetChars(byteOrder, buffer, ref bufferUsed, bytes, byteIndex, byteCount, chars, charIndex); } } // class UCS4Decoder }; // class UCS4Encoding // Wrap the above in I18N-aware classes. public class CP12000 : UCS4Encoding { public CP12000() : base(12000, ByteOrder.Order_4321) {} }; // class CP12000 public class ENCucs_4 : CP12000 { public ENCucs_4() : base() {} }; // class ENCucs_4 public class ENCucs_4_le : CP12000 { public ENCucs_4_le() : base() {} }; // class ENCucs_4_le public class CP12001 : UCS4Encoding { public CP12001() : base(12001, ByteOrder.Order_1234) {} }; // class CP12000 public class ENCucs_4_be : CP12001 { public ENCucs_4_be() : base() {} }; // class ENCucs_4_be }; // namespace I18N.Common
#if MONO extern alias Mono; #else extern alias Xna; #endif namespace Mentula.GuiItems.Containers { #if MONO using Mono::Microsoft.Xna.Framework; using Mono::Microsoft.Xna.Framework.Graphics; using Mono::Microsoft.Xna.Framework.Input; #else using Xna::Microsoft.Xna.Framework; using Xna::Microsoft.Xna.Framework.Graphics; using Xna::Microsoft.Xna.Framework.Input; #endif using Core; using Core.EventHandlers; using Items; using System.Linq; using static Core.GuiItem; using static Utilities; using DeJong.Utilities.Logging; /// <summary> /// A class for grouping <see cref="GuiItems"/>. /// </summary> /// <typeparam name="T"> The type of <see cref="Game"/> to refrence to. </typeparam> /// <remarks> /// This object is used to handle default operations when dealing with GuiItems. /// Properties like <see cref="ScreenWidthMiddle"/> are also added for extra support when adding GuiItems. /// As default the <see cref="Menu{T}"/> will also suppress refresh calls to controlls whilst they are being initialzes /// making the process of initialization faster when using a <see cref="Menu{T}"/>. /// </remarks> /// <example> /// In this example a main menu is created. /// The two properties are public for use in other menu's. /// Line 9 makes sure that when the menu becomes visible the mouse will be useable. /// /// In initialize the default font option is used so we don't have to specify one every time we make a button. /// A background is loaded and four buttons are made. /// /// <code> /// public sealed class MainMenu : <![CDATA[Menu<MainGame>]]> /// { /// public static readonly Color ButtonBackColor = new Color(150, 150, 130, 150); /// public static readonly int TxtW = 150, TxtH = 25; /// /// public MainMenu(MainGame game) /// : base(game) /// { /// VisibleChanged += (s, e) => { if (Visible) { Game.IsMouseVisible = true; } }; /// } /// /// public override void Initialize() /// { /// SetDefaultFont("MenuFont"); /// int txtHM = TxtH >> 1; /// /// GuiItem bg = AddGuiItem(); /// bg.BackgroundImage = <![CDATA[Game.Content.Load<Texture2D>("Utilities\\MainBackground");]]> /// bg.Enabled = false; /// /// Button btnSingleplayer = AddDefButton(); /// btnSingleplayer.MoveRelative(Anchor.MiddleWidth, y: ScreenHeightMiddle + txtHM * 4); /// btnSingleplayer.Text = "Singleplayer"; /// /// Button btnMultiplayer = AddDefButton(); /// btnMultiplayer.MoveRelative(Anchor.MiddleWidth, y: ScreenheightMiddle + txtHM * 8); /// btnMultiplayer.Text = "Multiplayer"; /// /// Button btnOptions = AddDefButton(); /// btnOptions.MoveRelative(Anchor.MiddleWidth, y: ScreenheightMiddle + txtHM * 12); /// btnOptions.Text = "Options"; /// /// Button btnQuit = AddButton(); /// btnQuit.MoveRelative(Anchor.MiddleWidth, y: ScreenheightMiddle + txtHM * 16); /// btnQuit.Text = "Quit"; /// /// btnSingleplayer.LeftClick += (sender, args) => Game.SetState(GameState.SingleplayerMenu); /// btnMultiplayer.LeftClick += (sender, args) => Game.SetState(GameState.MultiplayerMenu); /// btnOptions.LeftClick += (sender, args) => Game.SetState(GameState.OptionsMenu); /// btnQuit.LeftClick += (sender, args) => Game.Exit(); /// /// base.Initialize(); /// } /// /// private Button AddDefButton() /// { /// Button result = AddButton(); /// result.Width = TxtW; /// result.Height = TxtH; /// result.BackColor = ButtonBackColor; /// return result; /// } /// } /// </code> /// </example> #if !DEBUG [System.Diagnostics.DebuggerStepThrough] #endif public partial class Menu<T> : DrawableMentulaGameComponent<T> where T : Game { /// <summary> /// Indicates whether the <see cref="Menu{T}"/> should handle textbox focusing. /// Default value = <see langword="true"/>. /// </summary> protected bool autoFocusTextbox; /// <summary> /// Indicates whether the <see cref="Menu{T}"/> should handle dropdown focusing. /// Default value = <see langword="true"/>. /// </summary> protected bool autoFocusDropDown; /// <summary> /// The underlying <see cref="GuiItems"/> of the <see cref="Menu{T}"/> /// </summary> protected GuiItemCollection controlls; /// <summary> /// The default <see cref="SpriteFont"/> to use if none is specified. /// </summary> protected SpriteFont font; /// <summary> /// The default <see cref="SpriteBatch"/> to use while drawing the controlls. /// </summary> protected SpriteBatch batch; /// <summary> /// Initializes a new instance of the <see cref="Menu{T}"/> class. /// </summary> /// <param name="game"> The game to associate with this <see cref="Menu{T}"/>. </param> public Menu(T game) : this(game, true) { } /// <summary> /// Initializes a new instance of the <see cref="Menu{T}"/> class. /// </summary> /// <param name="game"> The game to associate with this <see cref="Menu{T}"/>. </param> /// <param name="allowRefreshSuppression"> /// Whether the <see cref="Menu{T}"/> is allowed to suppress refresh calls /// when the controlls are being initialized. /// </param> public Menu(T game, bool allowRefreshSuppression) : base(game) { controlls = new GuiItemCollection(null); batch = new SpriteBatch(Game.GraphicsDevice); autoFocusTextbox = true; autoFocusDropDown = true; if (allowRefreshSuppression) suppressRefresh = true; } /// <summary> /// Initializes the <see cref="SpriteBatch"/> used for drawing. /// Override to initialize <see cref="GuiItems"/>. /// </summary> public override void Initialize() { suppressRefresh = false; Log.Info(nameof(Menu<T>), $"Refreshing {controlls.Count} controll(s)"); for (int i = 0; i < controlls.Count; i++) { controlls[i].Refresh(); } base.Initialize(); } /// <summary> /// Release the unmanaged and managed resources used by the <see cref="Menu{T}"/>. /// </summary> /// <param name="disposing"> /// true to release both managed and unmanaged resources; false to release only unmanaged /// resources. /// </param> protected override void Dispose(bool disposing) { if (disposing) { Log.Verbose(nameof(Menu<T>), $"Disposing {controlls.Count} controll(s)"); for (int i = 0; i < controlls.Count; i++) { Log.Verbose(nameof(Menu<T>), $"Disposing: {controlls[i]}"); controlls[i].Dispose(); } } controlls.Clear(); base.Dispose(disposing); } /// <summary> /// Gets a control with a specified <see cref="GuiItem.Name"/> as a specified <see cref="GuiItem"/>. /// </summary> /// <typeparam name="TControl"> The <see cref="GuiItem"/> to cast to. </typeparam> /// <param name="name"> The specified <see cref="GuiItem.Name"/> to search for. </param> /// /// <returns> The <see cref="GuiItem"/> that was found as type TControl; otherwise null. </returns> public TControl FindControl<TControl>(string name) where TControl : GuiItem { return (TControl)FindControl(name); } /// <summary> /// Gets a control with a specified name. /// </summary> /// <param name="name"> The specified <see cref="GuiItem.Name"/> to search for. </param> /// <returns> The <see cref="GuiItem"/> that was found; otherwise null. </returns> public GuiItem FindControl(string name) { return controlls.FirstOrDefault(c => c.Name == name); } /// <summary> /// Disables and hides the <see cref="Menu{T}"/>. /// </summary> public void Hide() { Enabled = false; Visible = false; } /// <summary> /// Enables and shows the <see cref="Menu{T}"/>. /// </summary> public void Show() { Enabled = true; Visible = true; } /// <summary> /// Updates the <see cref="Menu{T}"/> and its controlls. /// </summary> /// <param name="gameTime"> Time elapsed since the last call to Update. </param> public override void Update(GameTime gameTime) { if (Enabled) { MouseState mState = Mouse.GetState(); KeyboardState kState = Keyboard.GetState(); float delta = (float)gameTime.ElapsedGameTime.TotalSeconds; for (int i = 0; i < controlls.Count; i++) { GuiItem control = controlls[i]; if (control.SuppressUpdate) { control.SuppressUpdate = false; continue; } control.Update(delta); } } base.Update(gameTime); } /// <summary> /// Draws the <see cref="Menu{T}"/> and its controlls. /// </summary> /// <param name="gameTime"> Time elapsed since the last call to Draw. </param> public override void Draw(GameTime gameTime) { if (Visible) { batch.Begin(); for (int i = 0; i < controlls.Count; i++) { if (controlls[i].SuppressDraw) { controlls[i].SuppressDraw = false; continue; } controlls[i].Draw(batch); } batch.End(); } base.Draw(gameTime); } private void TextBox_Click(GuiItem sender, MouseEventArgs e) { if (autoFocusTextbox) { for (int i = 0; i < controlls.Count; i++) { TextBox txt = controlls[i] as TextBox; if (txt != null) txt.Focused = txt.Name == sender.Name; } } } private void DropDown_VisibilityChanged(GuiItem sender, ValueChangedEventArgs<bool> e) { if (autoFocusDropDown && e.NewValue) { for (int i = 0; i < controlls.Count; i++) { DropDown dd; if ((dd = controlls[i] as DropDown) != null) { if (dd.Name != sender.Name) dd.Hide(); dd.Refresh(); } } } } } }
using System; using System.ComponentModel; using System.Drawing; using System.Drawing.Design; using System.Drawing.Drawing2D; using System.Windows.Forms; using System.Windows.Forms.Design; namespace SIL.Windows.Forms.SuperToolTip { partial class dlgSuperToolTipEditor : Form { class ColorNameCouple { private Color cBegin; public Color ColorBegin { get { return cBegin; } set { cBegin = value; } } private Color cMiddle; public Color ColorMiddle { get { return cMiddle; } set { cMiddle = value; } } private Color cEnd; public Color ColorEnd { get { return cEnd; } set { cEnd = value; } } private string name; public string Name { get { return name; } set { name = value; } } } protected override void OnLoad(EventArgs e) { base.OnLoad(e); LoadPredefinedColors(); } private void LoadPredefinedColors() { cmbPredefined.DisplayMember = "Name"; ColorNameCouple cnc; #region Office2007 cnc = new ColorNameCouple(); cnc.Name = "Office 2007"; cnc.ColorBegin = Color.FromArgb(255,255,255); cnc.ColorMiddle = Color.FromArgb(242,246,251); cnc.ColorEnd = Color.FromArgb(202,218,239); cmbPredefined.Items.Add(cnc); #endregion #region Pinkish cnc = new ColorNameCouple(); cnc.Name = "Pinkish"; cnc.ColorBegin = Color.FromArgb(26, 6, 12); cnc.ColorMiddle = Color.FromArgb(148, 39, 72); cnc.ColorEnd = Color.FromArgb(238, 176, 193); cmbPredefined.Items.Add(cnc); #endregion #region Grayish cnc = new ColorNameCouple(); cnc.Name = "Grayish"; cnc.ColorBegin = Color.FromArgb(0, 0, 0); cnc.ColorMiddle = Color.FromArgb(112, 112, 112); cnc.ColorEnd = Color.FromArgb(245, 245, 245); cmbPredefined.Items.Add(cnc); #endregion #region Blueish cnc = new ColorNameCouple(); cnc.Name = "Blueish"; cnc.ColorBegin = Color.FromArgb(12, 35, 148); cnc.ColorMiddle = Color.FromArgb(63, 94, 239); cnc.ColorEnd = Color.FromArgb(245, 245, 245); cmbPredefined.Items.Add(cnc); #endregion #region Greenish cnc = new ColorNameCouple(); cnc.Name = "Greenish"; cnc.ColorBegin = Color.FromArgb(85, 138, 26); cnc.ColorMiddle = Color.FromArgb(205, 215, 21); cnc.ColorEnd = Color.FromArgb(68, 71, 7); cmbPredefined.Items.Add(cnc); #endregion #region Dark Green cnc = new ColorNameCouple(); cnc.Name = "Dark Green"; cnc.ColorBegin = Color.FromArgb(28, 116, 12); cnc.ColorMiddle = Color.FromArgb(10, 36, 4); cnc.ColorEnd = Color.FromArgb(28, 116, 12); cmbPredefined.Items.Add(cnc); #endregion #region Fuchia cnc = new ColorNameCouple(); cnc.Name = "Fuchia"; cnc.ColorBegin = Color.FromArgb(202, 23, 166); cnc.ColorMiddle = Color.FromArgb(0, 0, 0); cnc.ColorEnd = Color.FromArgb(183, 43, 131); cmbPredefined.Items.Add(cnc); #endregion } private SuperToolTipInfoWrapper _sttinfo; public SuperToolTipInfoWrapper SuperToolTipInfoWrapper { get { return _sttinfo; } set { _sttinfo = value; } } public dlgSuperToolTipEditor(SuperToolTipInfoWrapper sttiw) : this() { this.superToolTip1.SetSuperStuff(btnPreview, sttiw); this._sttinfo = sttiw; this.chkShowHeaderSeparator.Checked = this._sttinfo.SuperToolTipInfo.ShowHeaderSeparator; this.chkShowFooterSeparator.Checked = this._sttinfo.SuperToolTipInfo.ShowFooterSeparator; this.picBodyImage.Image = this._sttinfo.SuperToolTipInfo.BodyImage; this.txtBody.Text = this._sttinfo.SuperToolTipInfo.BodyText; this.txtBody.ForeColor = this._sttinfo.SuperToolTipInfo.BodyForeColor; this.txtBody.Font = this._sttinfo.SuperToolTipInfo.BodyFont; this.picFooterImage.Image = this._sttinfo.SuperToolTipInfo.FooterImage; this.txtFooter.Text = this._sttinfo.SuperToolTipInfo.FooterText; this.txtFooter.ForeColor = this._sttinfo.SuperToolTipInfo.FooterForeColor; this.txtFooter.Font = this._sttinfo.SuperToolTipInfo.FooterFont; this.chkFooter.Checked = this._sttinfo.SuperToolTipInfo.ShowFooter; this.txtHeader.Text = this._sttinfo.SuperToolTipInfo.HeaderText; this.txtHeader.ForeColor = this._sttinfo.SuperToolTipInfo.HeaderForeColor; this.txtHeader.Font = this._sttinfo.SuperToolTipInfo.HeaderFont; this.chkHeader.Checked = this._sttinfo.SuperToolTipInfo.ShowHeader; this.btnBegin.BackColor = this._sttinfo.SuperToolTipInfo.BackgroundGradientBegin; this.btnMiddle.BackColor = this._sttinfo.SuperToolTipInfo.BackgroundGradientMiddle; this.btnEnd.BackColor = this._sttinfo.SuperToolTipInfo.BackgroundGradientEnd; this.grpHeader.Enabled = this._sttinfo.SuperToolTipInfo.ShowHeader; this.grpFooter.Enabled = this._sttinfo.SuperToolTipInfo.ShowFooter; } public dlgSuperToolTipEditor() { InitializeComponent(); btnHeaderFont.Tag = txtHeader; btnBodyFont.Tag = txtBody; btnFooterFont.Tag = txtFooter; } private void GetColor(object sender, EventArgs e) { dlgColorPicker.Color = ((Button)sender).BackColor; if (dlgColorPicker.ShowDialog() == DialogResult.OK) { ((Button)sender).BackColor = dlgColorPicker.Color; pnlBackColorPreview.Invalidate(); switch (((Button)sender).Name) { case "btnBegin": this._sttinfo.SuperToolTipInfo.BackgroundGradientBegin = dlgColorPicker.Color; break; case "btnMiddle": this._sttinfo.SuperToolTipInfo.BackgroundGradientMiddle = dlgColorPicker.Color; break; case "btnEnd": this._sttinfo.SuperToolTipInfo.BackgroundGradientEnd = dlgColorPicker.Color; break; } } } private void GetFont(object sender, EventArgs e) { RichTextBox txt = ((Button)sender).Tag as RichTextBox; if (dlgFontPicker.ShowDialog() == DialogResult.OK) { txt.ForeColor = dlgFontPicker.Color; txt.Font = dlgFontPicker.Font; OnTextSettingsChanged(txt, EventArgs.Empty); } } private void pnlBackColorPreview_Paint(object sender, PaintEventArgs e) { MakeColorPreview(e.Graphics); } private void MakeColorPreview(Graphics graphics) { Rectangle rect1 = new Rectangle(0, pnlBackColorPreview.Height / 2, pnlBackColorPreview.Width, pnlBackColorPreview.Height / 2); Rectangle rect2 = new Rectangle(0, 0, pnlBackColorPreview.Width, pnlBackColorPreview.Height / 2); using (LinearGradientBrush b2 = new LinearGradientBrush(new Rectangle(0, pnlBackColorPreview.Height / 2 - 1, pnlBackColorPreview.Width, pnlBackColorPreview.Height / 2), _sttinfo.SuperToolTipInfo.BackgroundGradientMiddle, _sttinfo.SuperToolTipInfo.BackgroundGradientEnd, LinearGradientMode.Vertical)) using (LinearGradientBrush b1 = new LinearGradientBrush(new Rectangle(0, 0, pnlBackColorPreview.Width, pnlBackColorPreview.Height / 2), _sttinfo.SuperToolTipInfo.BackgroundGradientBegin, _sttinfo.SuperToolTipInfo.BackgroundGradientMiddle, LinearGradientMode.Vertical)) { graphics.FillRectangle(b2, rect1); graphics.FillRectangle(b1, rect2); } } private void OnBackgroundColorSettingChanged(object sender, EventArgs e) { grpCustom.Enabled = rdbtnCustom.Checked; grpPredefined.Enabled = rdbtnPredefined.Checked; } private void btnClearBodyImage_Click(object sender, EventArgs e) { picBodyImage.Image = null; _sttinfo.SuperToolTipInfo.BodyImage = null; } private void btnBrowseBodyImage_Click(object sender, EventArgs e) { if (dlgImagePicker.ShowDialog() == DialogResult.OK) { picBodyImage.Image = Bitmap.FromFile(dlgImagePicker.FileName); _sttinfo.SuperToolTipInfo.BodyImage = picBodyImage.Image as Bitmap; } } private void OnTextSettingsChanged(object sender, EventArgs e) { RichTextBox txt = (RichTextBox)sender; switch (txt.Name) { case "txtHeader": _sttinfo.SuperToolTipInfo.HeaderFont = txt.Font; _sttinfo.SuperToolTipInfo.HeaderForeColor = txt.ForeColor; _sttinfo.SuperToolTipInfo.HeaderText = txt.Text; break; case "txtBody": _sttinfo.SuperToolTipInfo.BodyFont = txtBody.Font; _sttinfo.SuperToolTipInfo.BodyForeColor = txtBody.ForeColor; _sttinfo.SuperToolTipInfo.BodyText = txt.Text; break; case "txtFooter": _sttinfo.SuperToolTipInfo.FooterFont = txt.Font; _sttinfo.SuperToolTipInfo.FooterForeColor = txt.ForeColor; _sttinfo.SuperToolTipInfo.FooterText = txt.Text; break; } } private void cmbPredefined_SelectedIndexChanged(object sender, EventArgs e) { ColorNameCouple cn = cmbPredefined.SelectedItem as ColorNameCouple; _sttinfo.SuperToolTipInfo.BackgroundGradientBegin = cn.ColorBegin; _sttinfo.SuperToolTipInfo.BackgroundGradientMiddle = cn.ColorMiddle; _sttinfo.SuperToolTipInfo.BackgroundGradientEnd = cn.ColorEnd; btnBegin.BackColor = cn.ColorBegin; btnMiddle.BackColor = cn.ColorMiddle; btnEnd.BackColor = cn.ColorEnd; pnlBackColorPreview.Invalidate(); } private void btnBrowseFooterImage_Click(object sender, EventArgs e) { if (dlgImagePicker.ShowDialog() == DialogResult.OK) { picFooterImage.Image = Bitmap.FromFile(dlgImagePicker.FileName); _sttinfo.SuperToolTipInfo.FooterImage = picFooterImage.Image as Bitmap; } } private void btnClearFooterImage_Click(object sender, EventArgs e) { picFooterImage.Image = null; _sttinfo.SuperToolTipInfo.BodyImage = null; } private void chkShowFooterSeparator_CheckedChanged(object sender, EventArgs e) { _sttinfo.SuperToolTipInfo.ShowFooterSeparator = chkShowFooterSeparator.Checked; } private void chkShowHeaderSeparator_CheckedChanged(object sender, EventArgs e) { _sttinfo.SuperToolTipInfo.ShowHeaderSeparator = chkShowHeaderSeparator.Checked; } private void chkFooter_CheckedChanged(object sender, EventArgs e) { _sttinfo.SuperToolTipInfo.ShowFooter = chkFooter.Checked; grpFooter.Enabled = chkFooter.Checked; } private void chkHeader_CheckedChanged(object sender, EventArgs e) { _sttinfo.SuperToolTipInfo.ShowHeader = chkHeader.Checked; grpHeader.Enabled = chkHeader.Checked; } } public class SuperToolTipEditor : UITypeEditor { public override UITypeEditorEditStyle GetEditStyle(ITypeDescriptorContext context) { return UITypeEditorEditStyle.Modal; } public override object EditValue(ITypeDescriptorContext context, IServiceProvider provider, object value) { if (provider == null) { return null; } IWindowsFormsEditorService iwefs = (IWindowsFormsEditorService)provider.GetService(typeof(IWindowsFormsEditorService)); SuperToolTipInfoWrapper info = (SuperToolTipInfoWrapper)value; dlgSuperToolTipEditor dlg = new dlgSuperToolTipEditor(info); if (iwefs.ShowDialog(dlg) == DialogResult.OK) { return dlg.SuperToolTipInfoWrapper; } return value; } } }
using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Reflection; using System.Text.RegularExpressions; using System.Web.UI; using System.Web.UI.WebControls; using System.Xml.Serialization; using Anthem; using BVSoftware.Bvc5.Core.Catalog; using BVSoftware.Bvc5.Core.Contacts; using BVSoftware.Bvc5.Core.Content; using BVSoftware.Bvc5.Core.Orders; using BVSoftware.Bvc5.Core.Shipping; using StructuredSolutions.Bvc5.Shipping; using StructuredSolutions.Bvc5.Shipping.PostalCodes; using StructuredSolutions.Bvc5.Shipping.Providers.Controls; using StructuredSolutions.Bvc5.Shipping.Providers.Settings; using ASPNET=System.Web.UI.WebControls; using DropDownList=System.Web.UI.WebControls.DropDownList; using GridView=System.Web.UI.WebControls.GridView; using Label=System.Web.UI.WebControls.Label; using ListItem = System.Web.UI.WebControls.ListItem; using TextBox = System.Web.UI.WebControls.TextBox; public partial class BVModules_Shipping_Package_Rules_ShippingCostRules : UserControl { #region Overrides /// <summary> /// Add script to the page that hides the import validation message when /// the dialog is closed. /// Also add script to perform client-side validation of the file import /// field (built in RequiredFieldValidator does not work). /// </summary> /// <param name="e"></param> protected override void OnPreRender(EventArgs e) { base.OnPreRender(e); String script = String.Format( @"function hideImportCostDialogValidation(type, args) {{ el = document.getElementById('{0}'); if (typeof(el) != 'undefined' && el != null) {{ el.style.display = 'none'; }} }} ", ImportCostUploadRequired.ClientID); Page.ClientScript.RegisterStartupScript(GetType(), script, script, true); script = String.Format( @"function validateImportCostUpload(source, args) {{ el = document.getElementById('{0}'); if (typeof(el) != 'undefined' && el != null) {{ args.IsValid = (el.value != null && el.value.length > 0); el = document.getElementById(source.id); el.style.display = args.IsValid ? 'none' : 'inline'; }} }} ", ImportCostUpload.ClientID); Page.ClientScript.RegisterStartupScript(GetType(), script, script, true); } #endregion #region Properties protected PackageRuleProviderSettings Settings { get { return ((PackageRulesEditor) NamingContainer).Settings; } } #endregion #region Event Handlers protected void Export_Click(object sender, EventArgs e) { Response.Clear(); Response.ContentType = "text/xml"; Response.AddHeader("Content-Disposition", "attachment;filename=PackageCost.rules"); XmlSerializer serializer = new XmlSerializer(typeof (PackageRuleList)); serializer.Serialize(Response.OutputStream, Settings.CostingRules); Response.End(); } protected void Import_Click(object sender, EventArgs e) { if (Page.IsValid) { XmlSerializer serializer = new XmlSerializer(typeof (PackageRule[])); try { PackageRule[] rules = serializer.Deserialize(ImportCostUpload.FileContent) as PackageRule[]; if (rules != null) { Settings.CostingRules.Clear(); foreach (PackageRule rule in rules) Settings.CostingRules.Add(rule); Settings.CostingRules.Save(); DataBind(); } } catch { ImportCostUploadRequired.Text = "<br />Invalid file format<br />"; ImportCostUploadRequired.IsValid = false; RedisplayImportCostDialog(); } } } protected void ImportCostUploadRequired_ServerValidate(object source, ServerValidateEventArgs e) { e.IsValid = ImportCostUpload.HasFile; if (!e.IsValid) { ImportCostUploadRequired.Text = "<br />Please select a rules file<br />"; RedisplayImportCostDialog(); } } protected void RuleDataSource_Deleting(object sender, ObjectDataSourceMethodEventArgs e) { e.InputParameters["settings"] = Settings; } protected void RuleDataSource_Selecting(object sender, ObjectDataSourceSelectingEventArgs e) { e.InputParameters["settings"] = Settings; } protected void RuleDataSource_Updating(object sender, ObjectDataSourceMethodEventArgs e) { if (Page.IsValid) { e.InputParameters["settings"] = Settings; e.InputParameters["matches"] = Page.Items["matches"]; } else { e.Cancel = true; } } protected void Rules_RowCommand(object sender, GridViewCommandEventArgs e) { if (e.CommandName == "New") { String id = e.CommandArgument.ToString(); Int32 index = Settings.CostingRules.IndexOf(id); PackageRule rule = new PackageRule(); rule.Matches.Add(new PackageMatch()); Settings.CostingRules.Insert(index, rule); Settings.CostingRules.Save(); Rules.DataBind(); Rules.EditIndex = index; } else if (e.CommandName == "MoveUp") { String id = e.CommandArgument.ToString(); Int32 index = Settings.CostingRules.IndexOf(id); if (index > 0) { PackageRule rule = Settings.CostingRules[index]; Settings.CostingRules.RemoveAt(index); Settings.CostingRules.Insert(index - 1, rule); Settings.CostingRules.Save(); Rules.DataBind(); } } else if (e.CommandName == "MoveDown") { String id = e.CommandArgument.ToString(); Int32 index = Settings.CostingRules.IndexOf(id); if (index < (Settings.CostingRules.Count - 2)) { PackageRule rule = Settings.CostingRules[index]; Settings.CostingRules.RemoveAt(index); Settings.CostingRules.Insert(index + 1, rule); Settings.CostingRules.Save(); Rules.DataBind(); } } else if (e.CommandName == "Update") { if (Page.IsValid) { GridViewRow row = Rules.Rows[Rules.EditIndex]; if (row != null) { DropDownList packagePropertyList = (DropDownList) row.FindControl("ValuePackagePropertyField"); PackageProperties packageProperty = (PackageProperties) Enum.Parse(typeof (PackageProperties), packagePropertyList.SelectedValue); if (packageProperty == PackageProperties.Distance) { if (PostalCode.IsPostalDataInstalled()) { Manager.AddScriptForClientSideEval( "alert('No postal code data has been installed. The Distance property will always return -1.');"); } } else { PackageMatchList matches = ((BVModules_Shipping_Package_Rules_PackageMatchEditor) row.FindControl("PackageMatchEditor")) .GetMatches(); foreach (PackageMatch match in matches) { if (match.PackageProperty == PackageProperties.Distance) { if (PostalCode.IsPostalDataInstalled()) { Manager.AddScriptForClientSideEval( "alert('No postal code data has been installed. The Distance property will always return -1.');"); break; } } } } Page.Items["matches"] = ((BVModules_Shipping_Package_Rules_PackageMatchEditor) row.FindControl("PackageMatchEditor")). GetMatches(); } } } else if (e.CommandName == "View") { if (Page.IsValid) { GridViewRow row = Rules.Rows[Rules.EditIndex]; if (row != null) { GridView grid = row.FindControl("SampleShippingCosts") as GridView; if (grid != null) { Int32 count; grid.Visible = true; Session["SampleShippingCostsData"] = GetSampleOrders(row, out count); grid.DataSource = Session["SampleShippingCostsData"]; grid.DataBind(); if (count > grid.PageSize*5) { grid.Caption = string.Format("{0}+ matching packages", grid.PageSize*5); } else { grid.Caption = string.Format("{0} matching package", count); if (count == 0 || count > 1) grid.Caption += "s"; } } } } } } protected void Rules_RowCreated(object sender, GridViewRowEventArgs e) { if (e.Row.RowType == DataControlRowType.DataRow) { String id = Rules.DataKeys[e.Row.RowIndex].Value.ToString(); PackageRule rule = Settings.CostingRules[id]; Int32 rowIndex = (Rules.PageIndex*Rules.PageSize) + e.Row.RowIndex; if (e.Row.RowState == DataControlRowState.Normal || e.Row.RowState == DataControlRowState.Alternate) { if (rowIndex == Settings.CostingRules.Count - 1) { e.Row.FindControl("DeleteRow").Visible = false; e.Row.FindControl("MoveRuleUp").Visible = false; e.Row.FindControl("MoveRuleDown").Visible = false; } if (rowIndex == Settings.CostingRules.Count - 2) { e.Row.FindControl("MoveRuleDown").Visible = false; } if (rowIndex == 0) { e.Row.FindControl("MoveRuleUp").Visible = false; } } else if ((e.Row.RowState & DataControlRowState.Edit) == DataControlRowState.Edit) { DropDownList packagePropertyList = (DropDownList) e.Row.FindControl("ValuePackagePropertyField"); DropDownList itemPropertyList = (DropDownList) e.Row.FindControl("ValueItemPropertyField"); DropDownList customPropertyList = (DropDownList) e.Row.FindControl("ValueCustomPropertyField"); HelpLabel customPropertyLabel = (HelpLabel) e.Row.FindControl("ValueCustomPropertyLabel"); Label multiplierLabel = (Label) e.Row.FindControl("ValueMultiplierLabel"); HelpLabel valueLabel = (HelpLabel) e.Row.FindControl("ValueLabel"); TextBox valueField = (TextBox) e.Row.FindControl("ValueField"); BaseValidator valueRequired = (BaseValidator) e.Row.FindControl("ValueRequired"); BaseValidator valueNumeric = (BaseValidator) e.Row.FindControl("ValueNumeric"); packagePropertyList.Items.Clear(); packagePropertyList.Items.AddRange(GetPackageProperties()); itemPropertyList.Items.Clear(); itemPropertyList.Items.AddRange(GetItemProperties()); itemPropertyList.Visible = false; if (rule.ValuePackageProperty == PackageProperties.ItemProperty) { itemPropertyList.Visible = true; PrepareCustomPropertyField(customPropertyLabel, customPropertyList, rule.ValueItemProperty); PrepareValueField(multiplierLabel, valueLabel, valueField, valueRequired, valueNumeric, rule.ValueItemProperty); } else { PrepareCustomPropertyField(customPropertyLabel, customPropertyList, rule.ValuePackageProperty); PrepareValueField(multiplierLabel, valueLabel, valueField, valueRequired, valueNumeric, rule.ValuePackageProperty); } if (customPropertyList.Items.Count == 0) customPropertyList.Items.Add(new ListItem("", rule.ValueCustomProperty)); if (customPropertyList.Items.FindByValue(rule.ValueCustomProperty) == null) rule.ValueCustomProperty = customPropertyList.Items[0].Value; } } } protected void SampleShippingCosts_PageIndexChanging(object sender, GridViewPageEventArgs e) { GridView grid = (GridView) sender; grid.DataSource = Session["SampleShippingCostsData"]; grid.PageIndex = e.NewPageIndex; grid.DataBind(); } protected void ValueItemPropertyField_SelectedIndexChanged(object sender, EventArgs e) { if (sender != null) { DropDownList itemPropertyField = (DropDownList) sender; ItemProperties itemProperty = (ItemProperties) Enum.Parse(typeof (ItemProperties), itemPropertyField.SelectedValue); DropDownList customPropertyField = (DropDownList) itemPropertyField.NamingContainer.FindControl("ValueCustomPropertyField"); HelpLabel customPropertyLabel = (HelpLabel) itemPropertyField.NamingContainer.FindControl("ValueCustomPropertyLabel"); Label multiplierLabel = (Label) itemPropertyField.NamingContainer.FindControl("ValueMultiplierLabel"); HelpLabel valueLabel = (HelpLabel) itemPropertyField.NamingContainer.FindControl("ValueLabel"); TextBox valueField = (TextBox) itemPropertyField.NamingContainer.FindControl("ValueField"); BaseValidator valueRequired = (BaseValidator) itemPropertyField.NamingContainer.FindControl("ValueRequired"); BaseValidator valueNumeric = (BaseValidator) itemPropertyField.NamingContainer.FindControl("ValueNumeric"); PrepareCustomPropertyField(customPropertyLabel, customPropertyField, itemProperty); PrepareValueField(multiplierLabel, valueLabel, valueField, valueRequired, valueNumeric, itemProperty); } } protected void ValuePackagePropertyField_SelectedIndexChanged(object sender, EventArgs e) { if (sender != null) { DropDownList packagePropertyField = (DropDownList) sender; PackageProperties packageProperty = (PackageProperties) Enum.Parse(typeof (PackageProperties), packagePropertyField.SelectedValue); DropDownList itemPropertyField = (DropDownList) packagePropertyField.NamingContainer.FindControl("ValueItemPropertyField"); ItemProperties itemProperty = (ItemProperties) Enum.Parse(typeof (ItemProperties), itemPropertyField.SelectedValue); DropDownList customPropertyField = (DropDownList) packagePropertyField.NamingContainer.FindControl("ValueCustomPropertyField"); HelpLabel customPropertyLabel = (HelpLabel) packagePropertyField.NamingContainer.FindControl("ValueCustomPropertyLabel"); Label multiplierLabel = (Label) packagePropertyField.NamingContainer.FindControl("ValueMultiplierLabel"); HelpLabel valueLabel = (HelpLabel) packagePropertyField.NamingContainer.FindControl("ValueLabel"); TextBox valueField = (TextBox) packagePropertyField.NamingContainer.FindControl("ValueField"); BaseValidator valueRequired = (BaseValidator) packagePropertyField.NamingContainer.FindControl("ValueRequired"); BaseValidator valueNumeric = (BaseValidator) packagePropertyField.NamingContainer.FindControl("ValueNumeric"); if (packageProperty == PackageProperties.ItemProperty) { itemPropertyField.Visible = true; PrepareCustomPropertyField(customPropertyLabel, customPropertyField, itemProperty); PrepareValueField(multiplierLabel, valueLabel, valueField, valueRequired, valueNumeric, itemProperty); } else { itemPropertyField.Visible = false; customPropertyField.Visible = false; PrepareValueField(multiplierLabel, valueLabel, valueField, valueRequired, valueNumeric, packageProperty); Page.Validate("RuleGroup"); if (Page.IsValid) { PrepareCustomPropertyField(customPropertyLabel, customPropertyField, packageProperty); } } } } #endregion #region Methods private readonly Regex hiddenAddressProperties = new Regex("bvin|lastupdated", RegexOptions.IgnoreCase | RegexOptions.Compiled); private readonly Regex hiddenItemProperties = new Regex("FixedAmountOne", RegexOptions.IgnoreCase | RegexOptions.Compiled); private readonly Regex hiddenOrderProviders = new Regex("By Item Count|62219D9E-83BF-4c30-BC6B-7FB8650B27D6|By Order Total|C6F61920-1FBF-4bf4-8F17-8859EC719ECB|By Weight|0ECAEE72-19C8-427f-A301-DFE042EB2A28|Per Item|41B590A7-003C-48d1-8446-EAE93C156AA1|Per Order|C7321896-3F3C-47d0-B535-B1510AC75EBD|Order Rules|E3005436-C8BA-4D19-ACB4-A9391342E1A7", RegexOptions.IgnoreCase | RegexOptions.Compiled); private readonly Regex hiddenVendorManufacturerProperties = new Regex("address|bvin|lastupdated|dropshipemailtemplateid", RegexOptions.IgnoreCase | RegexOptions.Compiled); private ListItem[] GetAddressProperties() { List<ListItem> properties = new List<ListItem>(); foreach (PropertyInfo property in typeof (Address).GetProperties(BindingFlags.Instance | BindingFlags.Public)) { if (!hiddenAddressProperties.IsMatch(property.Name)) properties.Add(new ListItem(property.Name, property.Name)); } properties.Sort(delegate(ListItem item1, ListItem item2) { return string.Compare(item1.Text, item2.Text); }); return properties.ToArray(); } private ListItem[] GetItemProperties() { List<ListItem> properties = new List<ListItem>(); foreach (ItemProperties property in Enum.GetValues(typeof (ItemProperties))) { if (!hiddenItemProperties.IsMatch(property.ToString())) { if (ItemPropertiesHelper.GetPropertyType(property) != PropertyTypes.Alphabetic) { properties.Add(new ListItem(ItemPropertiesHelper.GetDisplayName(property), property.ToString())); } } } return properties.ToArray(); } private static ListItem[] GetPackageProperties() { List<ListItem> properties = new List<ListItem>(); foreach (PackageProperties property in Enum.GetValues(typeof (PackageProperties))) if (PackagePropertiesHelper.GetPropertyType(property) != PropertyTypes.Alphabetic) { properties.Add(new ListItem(PackagePropertiesHelper.GetDisplayName(property), property.ToString())); } return properties.ToArray(); } private static ListItem[] GetPropertyTypes() { List<ListItem> propertyTypes = new List<ListItem>(); Collection<ProductProperty> properties = ProductProperty.FindAll(); foreach (ProductProperty property in properties) { if (property.TypeCode == ProductPropertyType.TextField || property.TypeCode == ProductPropertyType.CurrencyField) { propertyTypes.Add(new ListItem(property.DisplayName, property.Bvin)); } } if (propertyTypes.Count == 0) { propertyTypes.Add(new ListItem("- n/a -", string.Empty)); } return propertyTypes.ToArray(); } protected String GetRuleAsString(String id) { PackageRule rule = Settings.CostingRules[id]; return rule.ToString(); } private List<SampleOrderResult> GetSampleOrders(GridViewRow row, out int count) { GridView grid = (GridView) row.FindControl("SampleShippingCosts"); PackageRule rule = new PackageRule(Rules.DataKeys[row.RowIndex].Value.ToString()); rule.Matches.AddRange( ((BVModules_Shipping_Package_Rules_PackageMatchEditor) row.FindControl("PackageMatchEditor")).GetMatches()); rule.Value = Decimal.Parse(((TextBox) row.FindControl("ValueField")).Text); rule.ValueCustomProperty = ((DropDownList) row.FindControl("ValueCustomPropertyField")).SelectedValue; rule.ValueItemPropertyAsString = ((DropDownList) row.FindControl("ValueItemPropertyField")).SelectedValue; rule.ValuePropertyAsString = ((DropDownList) row.FindControl("ValuePackagePropertyField")).SelectedValue; count = 0; // Scan all placed orders List<SampleOrderResult> results = new List<SampleOrderResult>(); foreach (Order order in Order.FindByCriteria(new OrderSearchCriteria())) { Order heavyOrder = Order.FindByBvin(order.Bvin); // "Unship" all of the items so that the samples look like they // were just placed. Skip any orders with deleted items. bool skipOrder = false; foreach (LineItem lineitem in heavyOrder.Items) { if (lineitem.AssociatedProduct == null || lineitem.AssociatedProduct.ShippingMode == ShippingMode.None) skipOrder = true; else lineitem.QuantityShipped = 0; } if (skipOrder) break; int groupNumber = 0; foreach (ExtendedShippingGroup group in Settings.PackagingRules.Apply(heavyOrder.GetShippingGroups())) { groupNumber += 1; if (rule.IsMatch(group)) { count += 1; if (count > grid.PageSize*5) break; SampleOrderResult result = new SampleOrderResult(); result.OrderNumber = order.OrderNumber; result.OrderDisplay = string.Format("<a href=\"{0}\" target=\"order\">{1}</a> / {2}", Page.ResolveUrl( string.Format("~/BVAdmin/Orders/ViewOrder.aspx?id={0}", order.Bvin)), order.OrderNumber, string.IsNullOrEmpty(group.Name) ? "(default)" : group.Name); List<string> matchValues = new List<string>(); List<string> limitValues = new List<string>(); if (rule.IsDefaultRule) { matchValues.Add("n/a"); limitValues.Add("n/a"); } else { for (int index = 0; index < rule.Matches.Count; index++) { PackageMatch match = rule.Matches[index]; string matchValue = PackagePropertiesHelper.GetPackagePropertyValue(group, match.PackageProperty, match.ItemProperty, match.CustomProperty, "1").ToString(); if (string.IsNullOrEmpty(matchValue)) matchValue = "(empty)"; matchValues.Add(matchValue); string limitValue = PackagePropertiesHelper.GetPackagePropertyValue(group, match.LimitPackageProperty, match.LimitItemProperty, match.LimitCustomProperty, match.Limit). ToString(); if (string.IsNullOrEmpty(limitValue)) limitValue = "(empty)"; limitValues.Add(limitValue); } } result.MatchValues = string.Join(", ", matchValues.ToArray()); result.LimitValues = string.Join(", ", limitValues.ToArray()); object value = PackagePropertiesHelper.GetPackagePropertyValue(group, rule.ValuePackageProperty, rule.ValueItemProperty, rule.ValueCustomProperty, "1"); result.Value = value == null ? "n/a" : value.ToString(); if (String.IsNullOrEmpty(result.Value)) result.Value = "(empty)"; ShippingRate rate = new ShippingRate(((PackageRulesEditor) NamingContainer).NameFieldText, string.Empty, string.Empty, 0, string.Empty); decimal? cost = rule.GetCost(group); if (cost.HasValue) { rate.Rate = cost.Value; result.RateDisplay = rate.RateAndNameForDisplay; } else { result.RateDisplay = "Hidden"; } results.Add(result); } } } results.Sort(); return results; } /// <summary> /// Return a list of shipping methods that use a ShippingProvider that implements /// the GetFilteredRates(Collection&lt;ShippingGroup&gt; g, ShippingMethod m) method. /// </summary> private ListItem[] GetShippingMethods() { List<ListItem> methods = new List<ListItem>(); methods.Add(new ListItem("-n/a-", "")); foreach (ShippingMethod method in ShippingMethod.FindAll()) { if (String.Compare(method.Bvin, ((BVShippingModule)NamingContainer).ShippingMethod.Bvin, true) == 0) continue; ShippingProvider provider = AvailableProviders.FindProviderById(method.ShippingProviderId); if (provider == null || !provider.ProviderId.Equals(method.ShippingProviderId, StringComparison.InvariantCultureIgnoreCase)) continue; if (hiddenOrderProviders.IsMatch(provider.ProviderId)) continue; methods.Add(new ListItem(method.Name, method.Bvin)); } return methods.ToArray(); } private ListItem[] GetVendorManufacturerProperties() { List<ListItem> properties = new List<ListItem>(); foreach ( PropertyInfo property in typeof (VendorManufacturerBase).GetProperties(BindingFlags.Instance | BindingFlags.Public)) { if (!hiddenVendorManufacturerProperties.IsMatch(property.Name)) properties.Add(new ListItem(property.Name, property.Name)); } properties.AddRange(GetAddressProperties()); properties.Sort(delegate(ListItem item1, ListItem item2) { return string.Compare(item1.Text, item2.Text); }); return properties.ToArray(); } private void PrepareCustomPropertyField(WebControl label, ListControl list, ItemProperties property) { list.Items.Clear(); if (property == ItemProperties.CustomProperty) { label.ToolTip = "<p>Select the custom property to use.</p>"; list.Items.AddRange(GetPropertyTypes()); list.Visible = true; } else if (property == ItemProperties.Manufacturer || property == ItemProperties.Vendor) { label.ToolTip = string.Format("<p>Select the {0} property to use.</p>", property.ToString().ToLower()); list.Items.AddRange(GetVendorManufacturerProperties()); list.Visible = true; } else { label.ToolTip = ""; list.Items.Add(new ListItem("n/a", "")); list.Visible = false; } } private void PrepareCustomPropertyField(WebControl label, ListControl list, PackageProperties property) { list.Items.Clear(); if (property == PackageProperties.DestinationAddress || property == PackageProperties.SourceAddress) { label.ToolTip = "<p>Select the address property to use.</p>"; list.Items.AddRange(GetAddressProperties()); list.Visible = true; } else if (property == PackageProperties.UseMethod) { label.ToolTip = "<p>Select the shipping method to use.</p><p>Note that only shipping methods that can calculate package shipping costs are shown.</p>"; list.Items.AddRange(GetShippingMethods()); list.Visible = true; } else { label.ToolTip = ""; list.Items.Add(new ListItem("n/a", "")); list.Visible = false; } } private static void PrepareValueField(Control multiplier, Label label, Control field, WebControl requiredValidator, WebControl numericValidator, ItemProperties property) { PropertyTypes propertyType = ItemPropertiesHelper.GetPropertyType(property); PrepareValueField(multiplier, label, field, requiredValidator, numericValidator, propertyType); } private static void PrepareValueField(Control multiplier, Label label, Control field, WebControl requiredValidator, WebControl numericValidator, PackageProperties property) { PropertyTypes propertyType = PackagePropertiesHelper.GetPropertyType(property); PrepareValueField(multiplier, label, field, requiredValidator, numericValidator, propertyType); } private static void PrepareValueField(Control multiplier, Label label, Control field, WebControl requiredValidator, WebControl numericValidator, PropertyTypes propertyType) { if (propertyType == PropertyTypes.Numeric) { multiplier.Visible = true; field.Visible = true; label.Text = "Multiplier"; label.ToolTip = "<p>Enter the multiplier. If the final cost is less than 0, the rate will not be displayed to the customer.</p>"; requiredValidator.Enabled = true; numericValidator.Enabled = true; } else if (propertyType == PropertyTypes.Fixed) { multiplier.Visible = false; field.Visible = true; label.Text = "Value"; label.ToolTip = "<p>Enter the fixed cost.</p>"; requiredValidator.Enabled = true; numericValidator.Enabled = true; } else { multiplier.Visible = false; field.Visible = false; requiredValidator.Enabled = false; numericValidator.Enabled = false; } } private void RedisplayImportCostDialog() { Page.ClientScript.RegisterStartupScript(GetType(), "showImportCostDialog", "YAHOO.util.Event.addListener(window, \"load\", showImportCostDialog);", true); } #endregion #region Private Classes [Serializable] private class SampleOrderResult : IComparable { private String _limitValues = String.Empty; private String _matchValues = String.Empty; private String _orderDisplay = String.Empty; private String _orderNumber = String.Empty; private String _rateDisplay = String.Empty; private String _value = String.Empty; public String OrderDisplay { get { return _orderDisplay; } set { _orderDisplay = value; } } public String OrderNumber { get { return _orderNumber; } set { _orderNumber = value; } } public String LimitValues { get { return _limitValues; } set { _limitValues = value; } } public String MatchValues { get { return _matchValues; } set { _matchValues = value; } } public String Value { get { return _value; } set { _value = value; } } public String RateDisplay { get { return _rateDisplay; } set { _rateDisplay = value; } } #region IComparable Members public int CompareTo(object obj) { if (obj == null) throw new ArgumentNullException("obj"); SampleOrderResult result = obj as SampleOrderResult; if (result == null) throw new ArgumentException("obj must be a SampleOrderResult."); return String.Compare(OrderNumber, result.OrderNumber, true); } #endregion } #endregion }
// Copyright (c) The Avalonia Project. All rights reserved. // Licensed under the MIT license. See licence.md file in the project root for full license information. using Avalonia.Controls; using Avalonia.Layout; using Avalonia.Media; using Xunit; #if AVALONIA_CAIRO namespace Avalonia.Cairo.RenderTests.Controls #elif AVALONIA_SKIA namespace Avalonia.Skia.RenderTests #else namespace Avalonia.Direct2D1.RenderTests.Controls #endif { public class BorderTests : TestBase { public BorderTests() : base(@"Controls\Border") { } [Fact] public void Border_1px_Border() { Decorator target = new Decorator { Padding = new Thickness(8), Width = 200, Height = 200, Child = new Border { BorderBrush = Brushes.Black, BorderThickness = 1, } }; RenderToFile(target); CompareImages(); } [Fact] public void Border_2px_Border() { Decorator target = new Decorator { Padding = new Thickness(8), Width = 200, Height = 200, Child = new Border { BorderBrush = Brushes.Black, BorderThickness = 2, } }; RenderToFile(target); CompareImages(); } [Fact] public void Border_Fill() { Decorator target = new Decorator { Padding = new Thickness(8), Width = 200, Height = 200, Child = new Border { Background = Brushes.Red, } }; RenderToFile(target); CompareImages(); } [Fact] public void Border_Brush_Offsets_Content() { Decorator target = new Decorator { Padding = new Thickness(8), Width = 200, Height = 200, Child = new Border { BorderBrush = Brushes.Black, BorderThickness = 2, Child = new Border { Background = Brushes.Red, } } }; RenderToFile(target); CompareImages(); } [Fact] public void Border_Padding_Offsets_Content() { Decorator target = new Decorator { Padding = new Thickness(8), Width = 200, Height = 200, Child = new Border { BorderBrush = Brushes.Black, BorderThickness = 2, Padding = new Thickness(2), Child = new Border { Background = Brushes.Red, } } }; RenderToFile(target); CompareImages(); } [Fact] public void Border_Margin_Offsets_Content() { Decorator target = new Decorator { Padding = new Thickness(8), Width = 200, Height = 200, Child = new Border { BorderBrush = Brushes.Black, BorderThickness = 2, Child = new Border { Background = Brushes.Red, Margin = new Thickness(2), } } }; RenderToFile(target); CompareImages(); } #if AVALONIA_CAIRO [Fact(Skip = "Font scaling currently broken on cairo")] #else [Fact] #endif public void Border_Centers_Content_Horizontally() { Decorator target = new Decorator { Padding = new Thickness(8), Width = 200, Height = 200, Child = new Border { BorderBrush = Brushes.Black, BorderThickness = 2, Child = new TextBlock { Text = "Foo", Background = Brushes.Red, FontFamily = "Segoe UI", FontSize = 12, HorizontalAlignment = HorizontalAlignment.Center, } } }; RenderToFile(target); CompareImages(); } #if AVALONIA_CAIRO [Fact(Skip = "Font scaling currently broken on cairo")] #elif AVALONIA_SKIA_SKIP_FAIL [Fact(Skip = "Waiting for new FormattedText")] #else [Fact] #endif public void Border_Centers_Content_Vertically() { Decorator target = new Decorator { Padding = new Thickness(8), Width = 200, Height = 200, Child = new Border { BorderBrush = Brushes.Black, BorderThickness = 2, Child = new TextBlock { Text = "Foo", Background = Brushes.Red, FontFamily = "Segoe UI", FontSize = 12, VerticalAlignment = VerticalAlignment.Center, } } }; RenderToFile(target); CompareImages(); } #if AVALONIA_CAIRO [Fact(Skip = "Font scaling currently broken on cairo")] #else [Fact] #endif public void Border_Stretches_Content_Horizontally() { Decorator target = new Decorator { Padding = new Thickness(8), Width = 200, Height = 200, Child = new Border { BorderBrush = Brushes.Black, BorderThickness = 2, Child = new TextBlock { Text = "Foo", Background = Brushes.Red, FontFamily = "Segoe UI", FontSize = 12, HorizontalAlignment = HorizontalAlignment.Stretch, } } }; RenderToFile(target); CompareImages(); } #if AVALONIA_CAIRO [Fact(Skip = "Font scaling currently broken on cairo")] #else [Fact] #endif public void Border_Stretches_Content_Vertically() { Decorator target = new Decorator { Padding = new Thickness(8), Width = 200, Height = 200, Child = new Border { BorderBrush = Brushes.Black, BorderThickness = 2, Child = new TextBlock { Text = "Foo", Background = Brushes.Red, FontFamily = "Segoe UI", FontSize = 12, VerticalAlignment = VerticalAlignment.Stretch, } } }; RenderToFile(target); CompareImages(); } #if AVALONIA_CAIRO [Fact(Skip = "Font scaling currently broken on cairo")] #else [Fact] #endif public void Border_Left_Aligns_Content() { Decorator target = new Decorator { Padding = new Thickness(8), Width = 200, Height = 200, Child = new Border { BorderBrush = Brushes.Black, BorderThickness = 2, Child = new TextBlock { Text = "Foo", Background = Brushes.Red, FontFamily = "Segoe UI", FontSize = 12, HorizontalAlignment = HorizontalAlignment.Left, } } }; RenderToFile(target); CompareImages(); } #if AVALONIA_CAIRO [Fact(Skip = "Font scaling currently broken on cairo")] #else [Fact] #endif public void Border_Right_Aligns_Content() { Decorator target = new Decorator { Padding = new Thickness(8), Width = 200, Height = 200, Child = new Border { BorderBrush = Brushes.Black, BorderThickness = 2, Child = new TextBlock { Text = "Foo", Background = Brushes.Red, FontFamily = "Segoe UI", FontSize = 12, HorizontalAlignment = HorizontalAlignment.Right, } } }; RenderToFile(target); CompareImages(); } #if AVALONIA_CAIRO [Fact(Skip = "Font scaling currently broken on cairo")] #elif AVALONIA_SKIA_SKIP_FAIL [Fact(Skip = "Waiting for new FormattedText")] #else [Fact] #endif public void Border_Top_Aligns_Content() { Decorator target = new Decorator { Padding = new Thickness(8), Width = 200, Height = 200, Child = new Border { BorderBrush = Brushes.Black, BorderThickness = 2, Child = new TextBlock { Text = "Foo", Background = Brushes.Red, FontFamily = "Segoe UI", FontSize = 12, VerticalAlignment = VerticalAlignment.Top, } } }; RenderToFile(target); CompareImages(); } #if AVALONIA_CAIRO [Fact(Skip = "Font scaling currently broken on cairo")] #elif AVALONIA_SKIA_SKIP_FAIL [Fact(Skip = "Waiting for new FormattedText")] #else [Fact] #endif public void Border_Bottom_Aligns_Content() { Decorator target = new Decorator { Padding = new Thickness(8), Width = 200, Height = 200, Child = new Border { BorderBrush = Brushes.Black, BorderThickness = 2, Child = new TextBlock { Text = "Foo", Background = Brushes.Red, FontFamily = "Segoe UI", FontSize = 12, VerticalAlignment = VerticalAlignment.Bottom, } } }; RenderToFile(target); CompareImages(); } [Fact] public void Border_Nested_Rotate() { Decorator target = new Decorator { Padding = new Thickness(8), Width = 200, Height = 200, Child = new Border { Background = Brushes.Coral, Width = 100, Height = 100, HorizontalAlignment = HorizontalAlignment.Center, VerticalAlignment = VerticalAlignment.Center, Child = new Border { Margin = new Thickness(25), Background = Brushes.Chocolate, }, RenderTransform = new RotateTransform(45), } }; RenderToFile(target); CompareImages(); } } }
/* * Copyright 2010-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ /* * Do not modify this file. This file is generated from the ec2-2015-04-15.normal.json service model. */ using System; using System.Collections.Generic; using System.Globalization; using System.IO; using System.Text; using System.Xml.Serialization; using Amazon.EC2.Model; using Amazon.Runtime; using Amazon.Runtime.Internal; using Amazon.Runtime.Internal.Transform; using Amazon.Runtime.Internal.Util; namespace Amazon.EC2.Model.Internal.MarshallTransformations { /// <summary> /// RunInstances Request Marshaller /// </summary> public class RunInstancesRequestMarshaller : IMarshaller<IRequest, RunInstancesRequest> , IMarshaller<IRequest,AmazonWebServiceRequest> { /// <summary> /// Marshaller the request object to the HTTP request. /// </summary> /// <param name="input"></param> /// <returns></returns> public IRequest Marshall(AmazonWebServiceRequest input) { return this.Marshall((RunInstancesRequest)input); } /// <summary> /// Marshaller the request object to the HTTP request. /// </summary> /// <param name="publicRequest"></param> /// <returns></returns> public IRequest Marshall(RunInstancesRequest publicRequest) { IRequest request = new DefaultRequest(publicRequest, "Amazon.EC2"); request.Parameters.Add("Action", "RunInstances"); request.Parameters.Add("Version", "2015-04-15"); if(publicRequest != null) { if(publicRequest.IsSetAdditionalInfo()) { request.Parameters.Add("AdditionalInfo", StringUtils.FromString(publicRequest.AdditionalInfo)); } if(publicRequest.IsSetBlockDeviceMappings()) { int publicRequestlistValueIndex = 1; foreach(var publicRequestlistValue in publicRequest.BlockDeviceMappings) { if(publicRequestlistValue.IsSetDeviceName()) { request.Parameters.Add("BlockDeviceMapping" + "." + publicRequestlistValueIndex + "." + "DeviceName", StringUtils.FromString(publicRequestlistValue.DeviceName)); } if(publicRequestlistValue.IsSetEbs()) { if(publicRequestlistValue.Ebs.IsSetDeleteOnTermination()) { request.Parameters.Add("BlockDeviceMapping" + "." + publicRequestlistValueIndex + "." + "Ebs" + "." + "DeleteOnTermination", StringUtils.FromBool(publicRequestlistValue.Ebs.DeleteOnTermination)); } if(publicRequestlistValue.Ebs.IsSetEncrypted()) { request.Parameters.Add("BlockDeviceMapping" + "." + publicRequestlistValueIndex + "." + "Ebs" + "." + "Encrypted", StringUtils.FromBool(publicRequestlistValue.Ebs.Encrypted)); } if(publicRequestlistValue.Ebs.IsSetIops()) { request.Parameters.Add("BlockDeviceMapping" + "." + publicRequestlistValueIndex + "." + "Ebs" + "." + "Iops", StringUtils.FromInt(publicRequestlistValue.Ebs.Iops)); } if(publicRequestlistValue.Ebs.IsSetSnapshotId()) { request.Parameters.Add("BlockDeviceMapping" + "." + publicRequestlistValueIndex + "." + "Ebs" + "." + "SnapshotId", StringUtils.FromString(publicRequestlistValue.Ebs.SnapshotId)); } if(publicRequestlistValue.Ebs.IsSetVolumeSize()) { request.Parameters.Add("BlockDeviceMapping" + "." + publicRequestlistValueIndex + "." + "Ebs" + "." + "VolumeSize", StringUtils.FromInt(publicRequestlistValue.Ebs.VolumeSize)); } if(publicRequestlistValue.Ebs.IsSetVolumeType()) { request.Parameters.Add("BlockDeviceMapping" + "." + publicRequestlistValueIndex + "." + "Ebs" + "." + "VolumeType", StringUtils.FromString(publicRequestlistValue.Ebs.VolumeType)); } } if(publicRequestlistValue.IsSetNoDevice()) { request.Parameters.Add("BlockDeviceMapping" + "." + publicRequestlistValueIndex + "." + "NoDevice", StringUtils.FromString(publicRequestlistValue.NoDevice)); } if(publicRequestlistValue.IsSetVirtualName()) { request.Parameters.Add("BlockDeviceMapping" + "." + publicRequestlistValueIndex + "." + "VirtualName", StringUtils.FromString(publicRequestlistValue.VirtualName)); } publicRequestlistValueIndex++; } } if(publicRequest.IsSetClientToken()) { request.Parameters.Add("ClientToken", StringUtils.FromString(publicRequest.ClientToken)); } if(publicRequest.IsSetDisableApiTermination()) { request.Parameters.Add("DisableApiTermination", StringUtils.FromBool(publicRequest.DisableApiTermination)); } if(publicRequest.IsSetEbsOptimized()) { request.Parameters.Add("EbsOptimized", StringUtils.FromBool(publicRequest.EbsOptimized)); } if(publicRequest.IsSetIamInstanceProfile()) { if(publicRequest.IamInstanceProfile.IsSetArn()) { request.Parameters.Add("IamInstanceProfile" + "." + "Arn", StringUtils.FromString(publicRequest.IamInstanceProfile.Arn)); } if(publicRequest.IamInstanceProfile.IsSetName()) { request.Parameters.Add("IamInstanceProfile" + "." + "Name", StringUtils.FromString(publicRequest.IamInstanceProfile.Name)); } } if(publicRequest.IsSetImageId()) { request.Parameters.Add("ImageId", StringUtils.FromString(publicRequest.ImageId)); } if(publicRequest.IsSetInstanceInitiatedShutdownBehavior()) { request.Parameters.Add("InstanceInitiatedShutdownBehavior", StringUtils.FromString(publicRequest.InstanceInitiatedShutdownBehavior)); } if(publicRequest.IsSetInstanceType()) { request.Parameters.Add("InstanceType", StringUtils.FromString(publicRequest.InstanceType)); } if(publicRequest.IsSetKernelId()) { request.Parameters.Add("KernelId", StringUtils.FromString(publicRequest.KernelId)); } if(publicRequest.IsSetKeyName()) { request.Parameters.Add("KeyName", StringUtils.FromString(publicRequest.KeyName)); } if(publicRequest.IsSetMaxCount()) { request.Parameters.Add("MaxCount", StringUtils.FromInt(publicRequest.MaxCount)); } if(publicRequest.IsSetMinCount()) { request.Parameters.Add("MinCount", StringUtils.FromInt(publicRequest.MinCount)); } if(publicRequest.IsSetMonitoring()) { request.Parameters.Add("Monitoring.Enabled", StringUtils.FromBool(publicRequest.Monitoring)); } if(publicRequest.IsSetNetworkInterfaces()) { int publicRequestlistValueIndex = 1; foreach(var publicRequestlistValue in publicRequest.NetworkInterfaces) { if(publicRequestlistValue.IsSetAssociatePublicIpAddress()) { request.Parameters.Add("NetworkInterface" + "." + publicRequestlistValueIndex + "." + "AssociatePublicIpAddress", StringUtils.FromBool(publicRequestlistValue.AssociatePublicIpAddress)); } if(publicRequestlistValue.IsSetDeleteOnTermination()) { request.Parameters.Add("NetworkInterface" + "." + publicRequestlistValueIndex + "." + "DeleteOnTermination", StringUtils.FromBool(publicRequestlistValue.DeleteOnTermination)); } if(publicRequestlistValue.IsSetDescription()) { request.Parameters.Add("NetworkInterface" + "." + publicRequestlistValueIndex + "." + "Description", StringUtils.FromString(publicRequestlistValue.Description)); } if(publicRequestlistValue.IsSetDeviceIndex()) { request.Parameters.Add("NetworkInterface" + "." + publicRequestlistValueIndex + "." + "DeviceIndex", StringUtils.FromInt(publicRequestlistValue.DeviceIndex)); } if(publicRequestlistValue.IsSetGroups()) { int publicRequestlistValuelistValueIndex = 1; foreach(var publicRequestlistValuelistValue in publicRequestlistValue.Groups) { request.Parameters.Add("NetworkInterface" + "." + publicRequestlistValueIndex + "." + "SecurityGroupId" + "." + publicRequestlistValuelistValueIndex, StringUtils.FromString(publicRequestlistValuelistValue)); publicRequestlistValuelistValueIndex++; } } if(publicRequestlistValue.IsSetNetworkInterfaceId()) { request.Parameters.Add("NetworkInterface" + "." + publicRequestlistValueIndex + "." + "NetworkInterfaceId", StringUtils.FromString(publicRequestlistValue.NetworkInterfaceId)); } if(publicRequestlistValue.IsSetPrivateIpAddress()) { request.Parameters.Add("NetworkInterface" + "." + publicRequestlistValueIndex + "." + "PrivateIpAddress", StringUtils.FromString(publicRequestlistValue.PrivateIpAddress)); } if(publicRequestlistValue.IsSetPrivateIpAddresses()) { int publicRequestlistValuelistValueIndex = 1; foreach(var publicRequestlistValuelistValue in publicRequestlistValue.PrivateIpAddresses) { if(publicRequestlistValuelistValue.IsSetPrimary()) { request.Parameters.Add("NetworkInterface" + "." + publicRequestlistValueIndex + "." + "PrivateIpAddressesSet" + "." + publicRequestlistValuelistValueIndex + "." + "Primary", StringUtils.FromBool(publicRequestlistValuelistValue.Primary)); } if(publicRequestlistValuelistValue.IsSetPrivateIpAddress()) { request.Parameters.Add("NetworkInterface" + "." + publicRequestlistValueIndex + "." + "PrivateIpAddressesSet" + "." + publicRequestlistValuelistValueIndex + "." + "PrivateIpAddress", StringUtils.FromString(publicRequestlistValuelistValue.PrivateIpAddress)); } publicRequestlistValuelistValueIndex++; } } if(publicRequestlistValue.IsSetSecondaryPrivateIpAddressCount()) { request.Parameters.Add("NetworkInterface" + "." + publicRequestlistValueIndex + "." + "SecondaryPrivateIpAddressCount", StringUtils.FromInt(publicRequestlistValue.SecondaryPrivateIpAddressCount)); } if(publicRequestlistValue.IsSetSubnetId()) { request.Parameters.Add("NetworkInterface" + "." + publicRequestlistValueIndex + "." + "SubnetId", StringUtils.FromString(publicRequestlistValue.SubnetId)); } publicRequestlistValueIndex++; } } if(publicRequest.IsSetPlacement()) { if(publicRequest.Placement.IsSetAvailabilityZone()) { request.Parameters.Add("Placement" + "." + "AvailabilityZone", StringUtils.FromString(publicRequest.Placement.AvailabilityZone)); } if(publicRequest.Placement.IsSetGroupName()) { request.Parameters.Add("Placement" + "." + "GroupName", StringUtils.FromString(publicRequest.Placement.GroupName)); } if(publicRequest.Placement.IsSetTenancy()) { request.Parameters.Add("Placement" + "." + "Tenancy", StringUtils.FromString(publicRequest.Placement.Tenancy)); } } if(publicRequest.IsSetPrivateIpAddress()) { request.Parameters.Add("PrivateIpAddress", StringUtils.FromString(publicRequest.PrivateIpAddress)); } if(publicRequest.IsSetRamdiskId()) { request.Parameters.Add("RamdiskId", StringUtils.FromString(publicRequest.RamdiskId)); } if(publicRequest.IsSetSecurityGroupIds()) { int publicRequestlistValueIndex = 1; foreach(var publicRequestlistValue in publicRequest.SecurityGroupIds) { request.Parameters.Add("SecurityGroupId" + "." + publicRequestlistValueIndex, StringUtils.FromString(publicRequestlistValue)); publicRequestlistValueIndex++; } } if(publicRequest.IsSetSecurityGroups()) { int publicRequestlistValueIndex = 1; foreach(var publicRequestlistValue in publicRequest.SecurityGroups) { request.Parameters.Add("SecurityGroup" + "." + publicRequestlistValueIndex, StringUtils.FromString(publicRequestlistValue)); publicRequestlistValueIndex++; } } if(publicRequest.IsSetSubnetId()) { request.Parameters.Add("SubnetId", StringUtils.FromString(publicRequest.SubnetId)); } if(publicRequest.IsSetUserData()) { request.Parameters.Add("UserData", StringUtils.FromString(publicRequest.UserData)); } } return request; } } }
// Python Tools for Visual Studio // Copyright(c) Microsoft Corporation // All rights reserved. // // Licensed under the Apache License, Version 2.0 (the License); you may not use // this file except in compliance with the License. You may obtain a copy of the // License at http://www.apache.org/licenses/LICENSE-2.0 // // THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS // OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY // IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE, // MERCHANTABILITY OR NON-INFRINGEMENT. // // See the Apache Version 2.0 License for specific language governing // permissions and limitations under the License. using System; using System.Collections.Generic; using System.Linq; using System.Reflection; using System.Runtime.InteropServices; using Microsoft.Build.Construction; using Microsoft.PythonTools.Infrastructure; using Microsoft.VisualStudio.Shell.Interop; using Microsoft.VisualStudioTools.Project; namespace Microsoft.PythonTools.Project { /// <summary> /// Creates Python Projects /// </summary> [Guid(PythonConstants.ProjectFactoryGuid)] class PythonProjectFactory : ProjectFactory { // We don't want to create projects with these GUIDs because they are // either incompatible or don't really exist (e.g. telemetry markers). private static readonly HashSet<Guid> IgnoredProjectTypeGuids = new HashSet<Guid> { new Guid("{789894C7-04A9-4A11-A6B5-3F4435165112}"), // Flask Web Project marker new Guid("{E614C764-6D9E-4607-9337-B7073809A0BD}"), // Bottle Web Project marker new Guid("{725071E1-96AE-4405-9303-1BA64EFF6EBD}"), // Worker Role Project marker new Guid("{A41C8EA1-112A-4A2D-9F91-29557995525F}"), // ML Classifier template marker new Guid("{8267E218-6B96-4D5D-A9DF-50CEDF58D05F}"), // ML Clustering template marker new Guid("{6C0EFAFA-1A04-41B6-A6D7-511B90951B5B}"), // ML Regression template marker // Reserved for future use new Guid("{C6BB79BC-0657-4BB5-8732-4FFE9EB5352D}"), new Guid("{C966CC89-2BC8-4036-85D1-478A085253AD}"), new Guid("{D848A2D7-0C4D-4A6A-9048-2B62DC103475}"), new Guid("{74DCBC5F-E288-431D-A7A0-B7CD4BE4B611}"), new Guid("{2BAC7739-571D-41CB-953C-7101995EBD9E}"), new Guid("{B452423D-5304-416F-975E-351476E8705C}"), new Guid("{587EF8DD-BE2D-4792-AE5F-8AE0A49AC1A5}") }; internal const string UwpProjectGuid = @"{2b557614-1a2b-4903-b9df-ed20d7b63f3a}"; // These targets files existed in PTVS 2.1 Beta but were removed. We // want to replace them with some properties and Web.targets. // Some intermediate builds of PTVS have different paths that will not // be upgraded automatically. private const string Ptvs21BetaBottleTargets = @"$(VSToolsPath)\Python Tools\Microsoft.PythonTools.Bottle.targets"; private const string Ptvs21BetaFlaskTargets = @"$(VSToolsPath)\Python Tools\Microsoft.PythonTools.Flask.targets"; // These targets files existed in early PTVS versions but are no longer // suitable and need to be replaced with our own targets file. internal const string CommonTargets = @"$(MSBuildToolsPath)\Microsoft.Common.targets"; internal const string CommonProps = @"$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props"; internal const string PtvsTargets = @"$(MSBuildExtensionsPath32)\Microsoft\VisualStudio\v$(VisualStudioVersion)\Python Tools\Microsoft.PythonTools.targets"; internal const string WebTargets = @"$(MSBuildExtensionsPath32)\Microsoft\VisualStudio\v$(VisualStudioVersion)\Python Tools\Microsoft.PythonTools.Web.targets"; internal const string UwpTargets = @"$(MSBuildExtensionsPath32)\Microsoft\VisualStudio\v$(VisualStudioVersion)\Python Tools\Microsoft.PythonTools.Uwp.targets"; #if DEV15 internal const string ToolsVersion = "15.1"; #else internal const string ToolsVersion = "4.0"; #endif // These GUIDs were used for well-known interpreter IDs private static readonly Dictionary<Guid, string> InterpreterIdMap = new Dictionary<Guid, string> { { new Guid("{2AF0F10D-7135-4994-9156-5D01C9C11B7E}"), "Global|PythonCore|{0}-32" }, { new Guid("{9A7A9026-48C1-4688-9D5D-E5699D47D074}"), "Global|PythonCore|{0}" }, { new Guid("{80659AB7-4D53-4E0C-8588-A766116CBD46}"), "IronPython|{0}-32" }, { new Guid("{FCC291AA-427C-498C-A4D7-4502D6449B8C}"), "IronPython|{0}-64" }, { new Guid("{86767848-40B4-4007-8BCC-A3835EDF0E69}"), "PythonUwpIoT|{0}|$(MSBuildProjectFullPath)" }, }; public PythonProjectFactory(IServiceProvider/*!*/ package) : base(package) { } internal override ProjectNode/*!*/ CreateProject() { // Ensure our package is properly loaded var pyService = Site.GetPythonToolsService(); return new PythonProjectNode(Site); } protected override string ProjectTypeGuids(string file) { var guids = base.ProjectTypeGuids(file); // Exclude GUIDs from IgnoredProjectTypeGuids so we don't try and // create projects from them. return string.Join(";", guids .Split(';') .Where(s => { Guid g; return Guid.TryParse(s, out g) && !IgnoredProjectTypeGuids.Contains(g); }) ); } private static bool IsGuidValue(ProjectPropertyElement e) { Guid g; return Guid.TryParse(e.Value, out g); } private static bool IsGuidValue(ProjectMetadataElement e) { Guid g; return Guid.TryParse(e.Value, out g); } private static bool IsGuidValue(ProjectItemElement e) { Guid g; foreach (var i in (e.Include?.Split('/', '\\')).MaybeEnumerate()) { if (Guid.TryParse(i?.Trim() ?? "", out g)) { return true; } } return false; } private static bool IsAssemblyReference(ProjectItemElement e) { try { new AssemblyName(e.Include); } catch (Exception) { return false; } return true; } private static bool IsMscorlibReference(ProjectItemElement e) { try { return (new AssemblyName(e.Include)).Name == "mscorlib"; } catch (Exception) { return false; } } protected override ProjectUpgradeState UpgradeProjectCheck( ProjectRootElement projectXml, ProjectRootElement userProjectXml, Action<__VSUL_ERRORLEVEL, string> log, ref Guid projectFactory, ref __VSPPROJECTUPGRADEVIAFACTORYFLAGS backupSupport ) { Version version; // Referencing an interpreter by GUID if (projectXml.Properties.Where(p => p.Name == "InterpreterId").Any(IsGuidValue) || projectXml.ItemGroups.SelectMany(g => g.Items) .Where(i => i.ItemType == "InterpreterReference") .Any(IsGuidValue) || projectXml.ItemGroups.SelectMany(g => g.Items) .Where(i => i.ItemType == "Interpreter") .SelectMany(i => i.Metadata.Where(m => m.Name == "BaseInterpreter")) .Any(IsGuidValue) ) { return ProjectUpgradeState.OneWayUpgrade; } var imports = new HashSet<string>(projectXml.Imports.Select(p => p.Project), StringComparer.OrdinalIgnoreCase); // Only importing the Common targets and/or props. if (imports.Contains(CommonProps) || imports.Contains(CommonTargets) && imports.Count == 1) { return ProjectUpgradeState.OneWayUpgrade; } // Includes imports from PTVS 2.2 if (projectXml.Properties.Any(IsPtvsTargetsFileProperty)) { return ProjectUpgradeState.SafeRepair; } // Uses web or Django launcher and has no WebBrowserUrl property if (projectXml.Properties.Where(p => p.Name == "LaunchProvider") .Any(p => p.Value == "Web launcher" || p.Value == "Django launcher") && !projectXml.Properties.Any(p => p.Name == "WebBrowserUrl")) { return ProjectUpgradeState.SafeRepair; } // Importing a targets file from 2.1 Beta if (imports.Contains(Ptvs21BetaBottleTargets) || imports.Contains(Ptvs21BetaFlaskTargets)) { return ProjectUpgradeState.SafeRepair; } // ToolsVersion less than 4.0 (or unspecified) is not supported, so // set it to 4.0. if (!Version.TryParse(projectXml.ToolsVersion, out version) || version < new Version(4, 0)) { return ProjectUpgradeState.SafeRepair; } // Referencing .NET assemblies but not mscorlib var references = projectXml.ItemGroups.SelectMany(g => g.Items) .Where(i => i.ItemType == ProjectFileConstants.Reference).ToArray(); if (references.Any(IsAssemblyReference) && !references.Any(IsMscorlibReference)) { return ProjectUpgradeState.SafeRepair; } return ProjectUpgradeState.NotNeeded; } protected override void UpgradeProject( ref ProjectRootElement projectXml, ref ProjectRootElement userProjectXml, Action<__VSUL_ERRORLEVEL, string> log ) { Version version; // ToolsVersion less than 4.0 (or unspecified) is not supported, so // set it to the latest. if (!Version.TryParse(projectXml.ToolsVersion, out version) || version < new Version(4, 0)) { projectXml.ToolsVersion = ToolsVersion; log(__VSUL_ERRORLEVEL.VSUL_INFORMATIONAL, Strings.UpgradedToolsVersion); } // Referencing an interpreter by GUID ProcessInterpreterIdsFrom22(projectXml, log); // Importing a targets file from 2.2 ProcessImportsFrom22(projectXml, log); // Importing a targets file from 2.1 Beta ProcessImportsFrom21b(projectXml, log); // Add missing WebBrowserUrl property ProcessMissingWebBrowserUrl(projectXml, log); // Referencing .NET assemblies but not mscorlib ProcessMissingMscorlibReference(projectXml, log); } private static bool IsPtvsTargetsFileProperty(ProjectPropertyElement p) { return p.Name == "PtvsTargetsFile"; } private static void ProcessMissingWebBrowserUrl(ProjectRootElement projectXml, Action<__VSUL_ERRORLEVEL, string> log) { foreach (var g in projectXml.PropertyGroupsReversed) { var launcher = g.PropertiesReversed.FirstOrDefault(p => p.Name == "LaunchProvider"); if (launcher == null) { continue; } if (launcher.Value != "Web launcher" && launcher.Value != "Django launcher") { return; } // <WebBrowserUrl>http://localhost</WebBrowserUrl> g.AddProperty("WebBrowserUrl", "http://localhost"); log(__VSUL_ERRORLEVEL.VSUL_INFORMATIONAL, Strings.UpgradedWebBrowserUrlProperty); return; } } private static void ProcessImportsFrom22(ProjectRootElement projectXml, Action<__VSUL_ERRORLEVEL, string> log) { bool anyUpdated = false; var propValue = PtvsTargets; foreach (var p in projectXml.Properties.Where(IsPtvsTargetsFileProperty).ToArray()) { propValue = p.Value; p.Parent.RemoveChild(p); anyUpdated = true; } // Replace: // <Import Condition="Exists($(PtvsTargetsFile))" Project="$(PtvsTargetsFile)" /> // <Import Condition="!Exists($(PtvsTargetsFile))" Project="$(MSBuildToolsPath)\Microsoft.Common.targets" /> // // With: // <Import Project="$(MSBuildExtensionsPath32)\Microsoft\VisualStudio\v$(VisualStudioVersion)\Python Tools\Microsoft.PythonTools.targets" /> foreach (var p in projectXml.Imports.Where(i => i.Condition.Contains("$(PtvsTargetsFile)") || i.Project.Equals("$(PtvsTargetsFile)")).ToArray()) { p.Parent.RemoveChild(p); anyUpdated = true; } string targets = PtvsTargets; if (ContainsProjectTypeGuid(projectXml, UwpProjectGuid)) { targets = UwpTargets; } if (!projectXml.Imports.Any(p => targets.Equals(p.Project, StringComparison.OrdinalIgnoreCase))) { projectXml.AddImport(targets); anyUpdated = true; } if (anyUpdated) { log(__VSUL_ERRORLEVEL.VSUL_INFORMATIONAL, Strings.UpgradedImportsFor30); } } private static void ProcessImportsFrom21b(ProjectRootElement projectXml, Action<__VSUL_ERRORLEVEL, string> log) { var bottleImports = projectXml.Imports.Where(p => p.Project.Equals(Ptvs21BetaBottleTargets, StringComparison.OrdinalIgnoreCase)).ToList(); var flaskImports = projectXml.Imports.Where(p => p.Project.Equals(Ptvs21BetaFlaskTargets, StringComparison.OrdinalIgnoreCase)).ToList(); foreach (var import in bottleImports.Concat(flaskImports)) { import.Project = WebTargets; } if (bottleImports.Any()) { var globals = projectXml.PropertyGroups.FirstOrDefault() ?? projectXml.AddPropertyGroup(); AddOrSetProperty(globals, "PythonDebugWebServerCommandArguments", "--debug $(CommandLineArguments)"); AddOrSetProperty(globals, "PythonWsgiHandler", "{StartupModule}.wsgi_app()"); log(__VSUL_ERRORLEVEL.VSUL_INFORMATIONAL, Strings.UpgradedBottleImports); } if (flaskImports.Any()) { var globals = projectXml.PropertyGroups.FirstOrDefault() ?? projectXml.AddPropertyGroup(); AddOrSetProperty(globals, "PythonWsgiHandler", "{StartupModule}.wsgi_app"); log(__VSUL_ERRORLEVEL.VSUL_INFORMATIONAL, Strings.UpgradedFlaskImports); } var commonPropsImports = projectXml.Imports.Where(p => p.Project.Equals(CommonProps, StringComparison.OrdinalIgnoreCase)).ToList(); foreach (var p in commonPropsImports) { projectXml.RemoveChild(p); log(__VSUL_ERRORLEVEL.VSUL_INFORMATIONAL, Strings.UpgradedRemoveCommonProps); } if (projectXml.Imports.Count == 1 && projectXml.Imports.First().Project.Equals(CommonTargets, StringComparison.OrdinalIgnoreCase)) { projectXml.RemoveChild(projectXml.Imports.First()); projectXml.AddImport(PtvsTargets); log(__VSUL_ERRORLEVEL.VSUL_INFORMATIONAL, Strings.UpgradedRemoveCommonTargets); } } private static void ProcessInterpreterIdsFrom22(ProjectRootElement projectXml, Action<__VSUL_ERRORLEVEL, string> log) { bool interpreterChanged = false, interpreterRemoved = false; var msbuildInterpreters = new Dictionary<Guid, string>(); foreach (var i in projectXml.ItemGroups.SelectMany(g => g.Items).Where(i => i.ItemType == "Interpreter")) { var id = i.Metadata.LastOrDefault(m => m.Name == "Id"); if (id != null) { Guid guid; if (Guid.TryParse(id.Value, out guid)) { msbuildInterpreters[guid] = i.Include?.Trim('/', '\\'); } } var mdBase = i.Metadata.LastOrDefault(m => m.Name == "BaseInterpreter"); if (mdBase != null) { // BaseInterpreter value is now unused, so just remove it mdBase.Parent.RemoveChild(mdBase); } var mdVer = i.Metadata.LastOrDefault(m => m.Name == "Version"); if (mdVer == null) { log(__VSUL_ERRORLEVEL.VSUL_ERROR, Strings.UpgradedInterpreterReferenceFailed); continue; } } var interpreterId = projectXml.Properties.LastOrDefault(p => p.Name == "InterpreterId"); var interpreterVersion = projectXml.Properties.LastOrDefault(p => p.Name == "InterpreterVersion"); if (interpreterId != null && interpreterVersion != null) { var newId = MapInterpreterId(interpreterId.Value, interpreterVersion.Value, msbuildInterpreters); if (newId != null) { interpreterId.Value = newId; if (!ContainsProjectTypeGuid(projectXml, UwpProjectGuid)) { interpreterVersion.Parent.RemoveChild(interpreterVersion); } interpreterChanged = true; } else { interpreterId.Parent.RemoveChild(interpreterId); interpreterVersion.Parent.RemoveChild(interpreterVersion); interpreterRemoved = true; } } foreach (var i in projectXml.ItemGroups.SelectMany(g => g.Items).Where(i => i.ItemType == "InterpreterReference").ToList()) { var newId = MapInterpreterId(i.Include, null, null); if (newId != null) { i.Include = newId; interpreterChanged = true; } else { i.Parent.RemoveChild(i); interpreterRemoved = true; } } if (interpreterRemoved) { log(__VSUL_ERRORLEVEL.VSUL_WARNING, Strings.UpgradedInterpreterReferenceRemoved); } else if (interpreterChanged) { log(__VSUL_ERRORLEVEL.VSUL_INFORMATIONAL, Strings.UpgradedInterpreterReference); } } private static void ProcessMissingMscorlibReference(ProjectRootElement projectXml, Action<__VSUL_ERRORLEVEL, string> log) { var references = projectXml.ItemGroups.SelectMany(g => g.Items) .Where(i => i.ItemType == ProjectFileConstants.Reference).ToArray(); if (!references.Any(IsAssemblyReference) || references.Any(IsMscorlibReference)) { return; } var group = projectXml.ItemGroups.OrderByDescending(g => g.Items.Count(i => i.ItemType == ProjectFileConstants.Reference)).FirstOrDefault() ?? projectXml.AddItemGroup(); group.AddItem(ProjectFileConstants.Reference, "mscorlib", new Dictionary<string, string> { ["Name"] = "mscorlib", ["Private"] = "False" }); log(__VSUL_ERRORLEVEL.VSUL_INFORMATIONAL, Strings.UpgradedMscorlibReference); } private static void AddOrSetProperty(ProjectPropertyGroupElement group, string name, string value) { bool anySet = false; foreach (var prop in group.Properties.Where(p => p.Name == name)) { prop.Value = value; anySet = true; } if (!anySet) { group.AddProperty(name, value); } } private static string MapInterpreterId(string idStr, string versionStr, IDictionary<Guid, string> msBuildInterpreters) { int splitter = idStr.IndexOfAny(new[] { '/', '\\' }); if (splitter > 0) { versionStr = idStr.Substring(splitter + 1); idStr = idStr.Remove(splitter); } Guid id; Version version; if (string.IsNullOrEmpty(idStr) || !Guid.TryParse(idStr, out id)) { return null; } string fmt; if (InterpreterIdMap.TryGetValue(id, out fmt)) { if (string.IsNullOrEmpty(versionStr) || !Version.TryParse(versionStr, out version)) { return null; } return fmt.FormatInvariant(version.ToString()); } string msbuildId = null; if ((msBuildInterpreters?.TryGetValue(id, out msbuildId) ?? false) && !string.IsNullOrEmpty(msbuildId)) { return "MSBuild|{0}|$(MSBuildProjectFullPath)".FormatInvariant(msbuildId); } return null; } private static bool ContainsProjectTypeGuid(ProjectRootElement projectXml, string guid) { return projectXml.Properties.Where(p => p.Name == ProjectFileConstants.ProjectTypeGuids).Any(p => p.Value.Contains(guid)); } } }
using De.Osthus.Ambeth.Util; using System; using System.Text; namespace De.Osthus.Ambeth.Collections { /** * This special kind of HashMap is intended to be used in high-performance concurrent scenarios with many reads and only some single occurences of write * accesses. To allow extremely high concurrency there is NO lock in read access scenarios. The design pattern to synchronize the reads with the indeed * synchronized write accesses the internal table-structure well be REPLACED on each write. * * Because of this the existing internal object graph will NEVER be modified allowing unsynchronized read access of any amount without performance loss. * * @param <K> * @param <V> */ public class WeakSmartCopyMap<K, V> : WeakHashMap<K, V> where V : class { private readonly Object writeLock = new Object(); public bool AutoCleanupNullValue { get; set; } public WeakSmartCopyMap() : base() { // intended blank } public WeakSmartCopyMap(float loadFactor) : base(loadFactor) { // intended blank } public WeakSmartCopyMap(int initialCapacity, float loadFactor) : base(initialCapacity, loadFactor) { // intended blank } public WeakSmartCopyMap(int initialCapacity) : base(initialCapacity) { // intended blank } public Object GetWriteLock() { return writeLock; } protected virtual TempHashMap<IMapEntry<K, V>, K, V> CreateEmptyInstance() { WeakSmartCopyMap<K, V> This = this; return new TempHashMap<IMapEntry<K, V>, K, V>(table.Length, this.loadFactor, delegate(int hash, K key, V value, IMapEntry<K, V> nextEntry) { return This.CreateEntry(hash, key, value, nextEntry); }, delegate(K key, IMapEntry<K, V> entry) { return This.EqualKeys(key, entry); }, delegate(K key) { return This.ExtractHash(key); }, delegate(IMapEntry<K, V> entry, IMapEntry<K, V> nextEntry) { This.SetNextEntry(entry, nextEntry); }, delegate(IMapEntry<K, V> entry, V value) { return This.SetValueForEntry(entry, value); }); } protected TempHashMap<IMapEntry<K, V>, K, V> CreateCopy() { // Copy existing data in FULLY NEW STRUCTURE IMapEntry<K, V>[] table = this.table; TempHashMap<IMapEntry<K, V>, K, V> backupMap = CreateEmptyInstance(); if (AutoCleanupNullValue) { for (int a = table.Length; a-- > 0; ) { IMapEntry<K, V> entry = table[a]; while (entry != null) { K key = entry.Key; if (key != null) { V value = entry.Value; WeakReference valueAsRef = value as WeakReference; if (valueAsRef.Target != null) { // Only copy the entry if the value content is still valid backupMap.Put(CloneKey(key), CloneValue(value)); } } entry = entry.NextEntry; } } } else { for (int a = table.Length; a-- > 0; ) { IMapEntry<K, V> entry = table[a]; while (entry != null) { K key = entry.Key; if (key != null) { V value = entry.Value; backupMap.Put(CloneKey(key), CloneValue(value)); } entry = entry.NextEntry; } } } return backupMap; } protected void SaveCopy(TempHashMap<IMapEntry<K, V>, K, V> copy) { // Now the structure contains all necessary data, so we "retarget" the existing table table = copy.GetTable(); threshold = copy.GetThreshold(); size = copy.Count; } protected virtual K CloneKey(K key) { return key; } protected virtual V CloneValue(V value) { return value; } public override void Clear() { Object writeLock = GetWriteLock(); lock (writeLock) { if (Count == 0) { return; } TempHashMap<IMapEntry<K, V>, K, V> backupMap = CreateCopy(); // Write new data in the copied structure backupMap.Clear(); SaveCopy(backupMap); } } public override V Put(K key, V value) { Object writeLock = GetWriteLock(); lock (writeLock) { TempHashMap<IMapEntry<K, V>, K, V> backupMap = CreateCopy(); // Write new data in the copied structure V existingValue = backupMap.Put(key, value); SaveCopy(backupMap); return existingValue; } } public override void PutAll(IMap<K, V> map) { Object writeLock = GetWriteLock(); lock (writeLock) { TempHashMap<IMapEntry<K, V>, K, V> backupMap = CreateCopy(); // Write new data in the copied structure backupMap.PutAll(map); SaveCopy(backupMap); } } public override bool PutIfNotExists(K key, V value) { Object writeLock = GetWriteLock(); lock (writeLock) { TempHashMap<IMapEntry<K, V>, K, V> backupMap = CreateCopy(); // Write new data in the copied structure if (!backupMap.PutIfNotExists(key, value)) { return false; } SaveCopy(backupMap); return true; } } public override V Remove(K key) { Object writeLock = GetWriteLock(); lock (writeLock) { TempHashMap<IMapEntry<K, V>, K, V> backupMap = CreateCopy(); // Write new data in the copied structure V existingValue = backupMap.Remove(key); SaveCopy(backupMap); return existingValue; } } public override bool RemoveIfValue(K key, V value) { Object writeLock = GetWriteLock(); lock (writeLock) { TempHashMap<IMapEntry<K, V>, K, V> backupMap = CreateCopy(); // Write new data in the copied structure if (!backupMap.RemoveIfValue(key, value)) { return false; } SaveCopy(backupMap); return true; } } } }
using System; using System.Collections.Generic; using System.Diagnostics; using System.Globalization; using System.Text; namespace Medo.Security.Cryptography.PasswordSafe { /// <summary> /// Password policy definition. /// </summary> public class PasswordPolicy { /// <summary> /// Create a new policy. /// </summary> /// <param name="passwordLength">Password length.</param> public PasswordPolicy(int passwordLength) { TotalPasswordLength = passwordLength; } internal PasswordPolicy(RecordCollection records) { if (records.Contains(RecordType.PasswordPolicy)) { var text = records[RecordType.PasswordPolicy].Text; FillPolicy(new StringBuilder(text)); } if (records.Contains(RecordType.OwnSymbolsForPassword)) { var text = records[RecordType.OwnSymbolsForPassword].Text; SetSpecialSymbolSet(text.ToCharArray()); } Records = records; } private readonly RecordCollection Records; /// <summary> /// Used to mark document as changed. /// </summary> protected void MarkPolicyAsChanged() { if (Records != null) { var record = Records[RecordType.PasswordPolicy]; var sb = new StringBuilder(); sb.Append(((ushort)Style).ToString("X4", CultureInfo.InvariantCulture)); sb.Append(TotalPasswordLength.ToString("X3", CultureInfo.InvariantCulture)); sb.Append(MinimumLowercaseCount.ToString("X3", CultureInfo.InvariantCulture)); sb.Append(MinimumUppercaseCount.ToString("X3", CultureInfo.InvariantCulture)); sb.Append(MinimumDigitCount.ToString("X3", CultureInfo.InvariantCulture)); sb.Append(MinimumSymbolCount.ToString("X3", CultureInfo.InvariantCulture)); record.Text = sb.ToString(); } } /// <summary> /// Used to mark document as changed. /// </summary> protected void MarkSymbolsAsChanged() { if (Records != null) { var record = Records[RecordType.OwnSymbolsForPassword]; record.Text = new string(GetSpecialSymbolSet()); } } private PasswordPolicyStyle _style; /// <summary> /// Gets/sets style of password policy. /// </summary> /// <exception cref="ArgumentOutOfRangeException">Value cannot be wider than 16-bit.</exception> public PasswordPolicyStyle Style { get { return _style; } set { if (((int)value & 0xFFFF0000) != 0) { throw new ArgumentOutOfRangeException(nameof(value), "Value cannot be wider than 16-bit."); } if ((value & PasswordPolicyStyle.UseHexDigits) == 0) { _style = value; } else { //force hex values only _style = PasswordPolicyStyle.UseHexDigits; } MarkPolicyAsChanged(); } } private int _totalPasswordLength; /// <summary> /// Gets/sets total password length. /// </summary> /// <exception cref="ArgumentOutOfRangeException">Count must be between 1 and 4095.</exception> public int TotalPasswordLength { get { return _totalPasswordLength; } set { if ((value < 1) || (value > 4095)) { throw new ArgumentOutOfRangeException(nameof(value), "Length must be between 1 and 4095."); } _totalPasswordLength = value; MarkPolicyAsChanged(); } } private int _minimumLowercaseCount; /// <summary> /// Gets/sets minimum lowercase count. /// </summary> /// <exception cref="ArgumentOutOfRangeException">Count must be between 0 and 4095.</exception> public int MinimumLowercaseCount { get { return _minimumLowercaseCount; } set { if ((value < 0) || (value > 4095)) { throw new ArgumentOutOfRangeException(nameof(value), "Count must be between 0 and 4095."); } _minimumLowercaseCount = value; MarkPolicyAsChanged(); } } private int _minimumUppercaseCount; /// <summary> /// Gets/sets minimum uppercase count. /// </summary> /// <exception cref="ArgumentOutOfRangeException">Count must be between 0 and 4095.</exception> public int MinimumUppercaseCount { get { return _minimumUppercaseCount; } set { if ((value < 0) || (value > 4095)) { throw new ArgumentOutOfRangeException(nameof(value), "Count must be between 0 and 4095."); } _minimumUppercaseCount = value; MarkPolicyAsChanged(); } } private int _minimumDigitCount; /// <summary> /// Gets/sets minimum digit count. /// </summary> /// <exception cref="ArgumentOutOfRangeException">Count must be between 0 and 4095.</exception> public int MinimumDigitCount { get { return _minimumDigitCount; } set { if ((value < 0) || (value > 4095)) { throw new ArgumentOutOfRangeException(nameof(value), "Count must be between 0 and 4095."); } _minimumDigitCount = value; MarkPolicyAsChanged(); } } private int _minimumSymbolCount; /// <summary> /// Gets/sets minimum symbol count. /// </summary> /// <exception cref="ArgumentOutOfRangeException">Count must be between 0 and 4095.</exception> public int MinimumSymbolCount { get { return _minimumSymbolCount; } set { if ((value < 0) || (value > 4095)) { throw new ArgumentOutOfRangeException(nameof(value), "Count must be between 0 and 4095."); } _minimumSymbolCount = value; MarkPolicyAsChanged(); } } private char[] _specialSymbolSet = new char[] { }; /// <summary> /// Returns special symbols that are allowed in the password. /// </summary> public char[] GetSpecialSymbolSet() { return _specialSymbolSet; } /// <summary> /// Sets which special characters are allowed. /// </summary> /// <param name="specialSymbols"></param> /// <exception cref="ArgumentNullException">Value cannot be null.</exception> public void SetSpecialSymbolSet(params char[] specialSymbols) { if (specialSymbols == null) { throw new ArgumentNullException(nameof(specialSymbols), "Value cannot be null."); } //filter the same var symbols = new List<char>(specialSymbols); if (symbols.Count > 1) { symbols.Sort(); var prevCh = symbols[symbols.Count - 1]; for (var i = symbols.Count - 2; i >= 0; i--) { var currCh = symbols[i]; if (currCh == prevCh) { symbols.RemoveAt(i); } else { prevCh = currCh; } } } _specialSymbolSet = symbols.ToArray(); MarkSymbolsAsChanged(); } /// <summary> /// Returns true if objects are equal. /// </summary> /// <param name="obj">Other object.</param> public override bool Equals(object obj) { if (obj is NamedPasswordPolicy other) { return string.Equals(ToString(), other.ToString(), StringComparison.Ordinal); } return false; } /// <summary> /// Returns hash code. /// </summary> public override int GetHashCode() { return Style.GetHashCode(); } private void FillPolicy(StringBuilder text) { if ((text.Length < 4) || !int.TryParse(text.ToString(0, 4), NumberStyles.HexNumber, CultureInfo.InvariantCulture, out var styleFlags)) { return; } text.Remove(0, 4); Style = (PasswordPolicyStyle)styleFlags; if ((text.Length < 3) || !int.TryParse(text.ToString(0, 3), NumberStyles.HexNumber, CultureInfo.InvariantCulture, out var totalPasswordLength)) { return; } text.Remove(0, 3); TotalPasswordLength = totalPasswordLength; if ((text.Length < 3) || !int.TryParse(text.ToString(0, 3), NumberStyles.HexNumber, CultureInfo.InvariantCulture, out var minimumLowercase)) { return; } text.Remove(0, 3); MinimumLowercaseCount = minimumLowercase; if ((text.Length < 3) || !int.TryParse(text.ToString(0, 3), NumberStyles.HexNumber, CultureInfo.InvariantCulture, out var minimumUppercase)) { return; } text.Remove(0, 3); MinimumUppercaseCount = minimumUppercase; if ((text.Length < 3) || !int.TryParse(text.ToString(0, 3), NumberStyles.HexNumber, CultureInfo.InvariantCulture, out var minimumDigits)) { return; } text.Remove(0, 3); MinimumDigitCount = minimumDigits; if ((text.Length < 3) || !int.TryParse(text.ToString(0, 3), NumberStyles.HexNumber, CultureInfo.InvariantCulture, out var minimumSymbols)) { return; } text.Remove(0, 3); MinimumSymbolCount = minimumSymbols; } } }
// ==++== // // Copyright (c) Microsoft Corporation. All rights reserved. // // ==--== // <OWNER>[....]</OWNER> // using System; using System.Collections; using System.Collections.Generic; using System.Security; namespace System.Collections.Generic { using System.Globalization; using System.Runtime; using System.Runtime.CompilerServices; using System.Diagnostics.Contracts; [Serializable] [TypeDependencyAttribute("System.Collections.Generic.ObjectEqualityComparer`1")] public abstract class EqualityComparer<T> : IEqualityComparer, IEqualityComparer<T> { static volatile EqualityComparer<T> defaultComparer; public static EqualityComparer<T> Default { #if !FEATURE_CORECLR [TargetedPatchingOptOut("Performance critical to inline across NGen image boundaries")] #endif get { Contract.Ensures(Contract.Result<EqualityComparer<T>>() != null); EqualityComparer<T> comparer = defaultComparer; if (comparer == null) { comparer = CreateComparer(); defaultComparer = comparer; } return comparer; } } // // Note that logic in this method is replicated in vm\compile.cpp to ensure that NGen // saves the right instantiations // [System.Security.SecuritySafeCritical] // auto-generated private static EqualityComparer<T> CreateComparer() { Contract.Ensures(Contract.Result<EqualityComparer<T>>() != null); RuntimeType t = (RuntimeType)typeof(T); // Specialize type byte for performance reasons if (t == typeof(byte)) { return (EqualityComparer<T>)(object)(new ByteEqualityComparer()); } #if MOBILE // Breaks .net serialization compatibility if (t == typeof (string)) return (EqualityComparer<T>)(object)new InternalStringComparer (); #endif // If T implements IEquatable<T> return a GenericEqualityComparer<T> if (typeof(IEquatable<T>).IsAssignableFrom(t)) { #if MONO return (EqualityComparer<T>)RuntimeType.CreateInstanceForAnotherGenericParameter (typeof(GenericEqualityComparer<>), t); #else return (EqualityComparer<T>)RuntimeTypeHandle.CreateInstanceForAnotherGenericParameter((RuntimeType)typeof(GenericEqualityComparer<int>), t); #endif } // If T is a Nullable<U> where U implements IEquatable<U> return a NullableEqualityComparer<U> if (t.IsGenericType && t.GetGenericTypeDefinition() == typeof(Nullable<>)) { RuntimeType u = (RuntimeType)t.GetGenericArguments()[0]; if (typeof(IEquatable<>).MakeGenericType(u).IsAssignableFrom(u)) { #if MONO return (EqualityComparer<T>)RuntimeType.CreateInstanceForAnotherGenericParameter (typeof(NullableEqualityComparer<>), u); #else return (EqualityComparer<T>)RuntimeTypeHandle.CreateInstanceForAnotherGenericParameter((RuntimeType)typeof(NullableEqualityComparer<int>), u); #endif } } // If T is an int-based Enum, return an EnumEqualityComparer<T> // See the METHOD__JIT_HELPERS__UNSAFE_ENUM_CAST and METHOD__JIT_HELPERS__UNSAFE_ENUM_CAST_LONG cases in getILIntrinsicImplementation if (t.IsEnum) { TypeCode underlyingTypeCode = Type.GetTypeCode(Enum.GetUnderlyingType(t)); // Depending on the enum type, we need to special case the comparers so that we avoid boxing // Note: We have different comparers for Short and SByte because for those types we need to make sure we call GetHashCode on the actual underlying type as the // implementation of GetHashCode is more complex than for the other types. switch (underlyingTypeCode) { case TypeCode.Int16: // short #if MONO return (EqualityComparer<T>)RuntimeType.CreateInstanceForAnotherGenericParameter (typeof(ShortEnumEqualityComparer<>), t); #else return (EqualityComparer<T>)RuntimeTypeHandle.CreateInstanceForAnotherGenericParameter((RuntimeType)typeof(ShortEnumEqualityComparer<short>), t); #endif case TypeCode.SByte: #if MONO return (EqualityComparer<T>)RuntimeType.CreateInstanceForAnotherGenericParameter (typeof(SByteEnumEqualityComparer<>), t); #else return (EqualityComparer<T>)RuntimeTypeHandle.CreateInstanceForAnotherGenericParameter((RuntimeType)typeof(SByteEnumEqualityComparer<sbyte>), t); #endif case TypeCode.Int32: case TypeCode.UInt32: case TypeCode.Byte: case TypeCode.UInt16: //ushort #if MONO return (EqualityComparer<T>)RuntimeType.CreateInstanceForAnotherGenericParameter (typeof(EnumEqualityComparer<>), t); #else return (EqualityComparer<T>)RuntimeTypeHandle.CreateInstanceForAnotherGenericParameter((RuntimeType)typeof(EnumEqualityComparer<int>), t); #endif case TypeCode.Int64: case TypeCode.UInt64: #if MONO return (EqualityComparer<T>)RuntimeType.CreateInstanceForAnotherGenericParameter (typeof(LongEnumEqualityComparer<>), t); #else return (EqualityComparer<T>)RuntimeTypeHandle.CreateInstanceForAnotherGenericParameter((RuntimeType)typeof(LongEnumEqualityComparer<long>), t); #endif } } // Otherwise return an ObjectEqualityComparer<T> return new ObjectEqualityComparer<T>(); } [Pure] public abstract bool Equals(T x, T y); [Pure] public abstract int GetHashCode(T obj); internal virtual int IndexOf(T[] array, T value, int startIndex, int count) { int endIndex = startIndex + count; for (int i = startIndex; i < endIndex; i++) { if (Equals(array[i], value)) return i; } return -1; } internal virtual int LastIndexOf(T[] array, T value, int startIndex, int count) { int endIndex = startIndex - count + 1; for (int i = startIndex; i >= endIndex; i--) { if (Equals(array[i], value)) return i; } return -1; } int IEqualityComparer.GetHashCode(object obj) { if (obj == null) return 0; if (obj is T) return GetHashCode((T)obj); ThrowHelper.ThrowArgumentException(ExceptionResource.Argument_InvalidArgumentForComparison); return 0; } bool IEqualityComparer.Equals(object x, object y) { if (x == y) return true; if (x == null || y == null) return false; if ((x is T) && (y is T)) return Equals((T)x, (T)y); ThrowHelper.ThrowArgumentException(ExceptionResource.Argument_InvalidArgumentForComparison); return false; } } // The methods in this class look identical to the inherited methods, but the calls // to Equal bind to IEquatable<T>.Equals(T) instead of Object.Equals(Object) [Serializable] internal class GenericEqualityComparer<T>: EqualityComparer<T> where T: IEquatable<T> { [Pure] public override bool Equals(T x, T y) { if (x != null) { if (y != null) return x.Equals(y); return false; } if (y != null) return false; return true; } [Pure] #if !FEATURE_CORECLR [TargetedPatchingOptOut("Performance critical to inline across NGen image boundaries")] #endif public override int GetHashCode(T obj) { if (obj == null) return 0; return obj.GetHashCode(); } internal override int IndexOf(T[] array, T value, int startIndex, int count) { int endIndex = startIndex + count; if (value == null) { for (int i = startIndex; i < endIndex; i++) { if (array[i] == null) return i; } } else { for (int i = startIndex; i < endIndex; i++) { if (array[i] != null && array[i].Equals(value)) return i; } } return -1; } internal override int LastIndexOf(T[] array, T value, int startIndex, int count) { int endIndex = startIndex - count + 1; if (value == null) { for (int i = startIndex; i >= endIndex; i--) { if (array[i] == null) return i; } } else { for (int i = startIndex; i >= endIndex; i--) { if (array[i] != null && array[i].Equals(value)) return i; } } return -1; } // Equals method for the comparer itself. public override bool Equals(Object obj){ GenericEqualityComparer<T> comparer = obj as GenericEqualityComparer<T>; return comparer != null; } public override int GetHashCode() { return this.GetType().Name.GetHashCode(); } } [Serializable] internal class NullableEqualityComparer<T> : EqualityComparer<Nullable<T>> where T : struct, IEquatable<T> { [Pure] public override bool Equals(Nullable<T> x, Nullable<T> y) { if (x.HasValue) { if (y.HasValue) return x.value.Equals(y.value); return false; } if (y.HasValue) return false; return true; } [Pure] public override int GetHashCode(Nullable<T> obj) { return obj.GetHashCode(); } internal override int IndexOf(Nullable<T>[] array, Nullable<T> value, int startIndex, int count) { int endIndex = startIndex + count; if (!value.HasValue) { for (int i = startIndex; i < endIndex; i++) { if (!array[i].HasValue) return i; } } else { for (int i = startIndex; i < endIndex; i++) { if (array[i].HasValue && array[i].value.Equals(value.value)) return i; } } return -1; } internal override int LastIndexOf(Nullable<T>[] array, Nullable<T> value, int startIndex, int count) { int endIndex = startIndex - count + 1; if (!value.HasValue) { for (int i = startIndex; i >= endIndex; i--) { if (!array[i].HasValue) return i; } } else { for (int i = startIndex; i >= endIndex; i--) { if (array[i].HasValue && array[i].value.Equals(value.value)) return i; } } return -1; } // Equals method for the comparer itself. public override bool Equals(Object obj){ NullableEqualityComparer<T> comparer = obj as NullableEqualityComparer<T>; return comparer != null; } public override int GetHashCode() { return this.GetType().Name.GetHashCode(); } } [Serializable] internal class ObjectEqualityComparer<T>: EqualityComparer<T> { [Pure] public override bool Equals(T x, T y) { if (x != null) { if (y != null) return x.Equals(y); return false; } if (y != null) return false; return true; } [Pure] #if !FEATURE_CORECLR [TargetedPatchingOptOut("Performance critical to inline across NGen image boundaries")] #endif public override int GetHashCode(T obj) { if (obj == null) return 0; return obj.GetHashCode(); } internal override int IndexOf(T[] array, T value, int startIndex, int count) { int endIndex = startIndex + count; if (value == null) { for (int i = startIndex; i < endIndex; i++) { if (array[i] == null) return i; } } else { for (int i = startIndex; i < endIndex; i++) { if (array[i] != null && array[i].Equals(value)) return i; } } return -1; } internal override int LastIndexOf(T[] array, T value, int startIndex, int count) { int endIndex = startIndex - count + 1; if (value == null) { for (int i = startIndex; i >= endIndex; i--) { if (array[i] == null) return i; } } else { for (int i = startIndex; i >= endIndex; i--) { if (array[i] != null && array[i].Equals(value)) return i; } } return -1; } // Equals method for the comparer itself. public override bool Equals(Object obj){ ObjectEqualityComparer<T> comparer = obj as ObjectEqualityComparer<T>; return comparer != null; } public override int GetHashCode() { return this.GetType().Name.GetHashCode(); } } // Performance of IndexOf on byte array is very important for some scenarios. // We will call the C runtime function memchr, which is optimized. [Serializable] internal class ByteEqualityComparer: EqualityComparer<byte> { [Pure] public override bool Equals(byte x, byte y) { return x == y; } [Pure] public override int GetHashCode(byte b) { return b.GetHashCode(); } [System.Security.SecuritySafeCritical] // auto-generated internal unsafe override int IndexOf(byte[] array, byte value, int startIndex, int count) { if (array==null) throw new ArgumentNullException("array"); if (startIndex < 0) throw new ArgumentOutOfRangeException("startIndex", Environment.GetResourceString("ArgumentOutOfRange_Index")); if (count < 0) throw new ArgumentOutOfRangeException("count", Environment.GetResourceString("ArgumentOutOfRange_Count")); if (count > array.Length - startIndex) throw new ArgumentException(Environment.GetResourceString("Argument_InvalidOffLen")); Contract.EndContractBlock(); if (count == 0) return -1; fixed (byte* pbytes = array) { return Buffer.IndexOfByte(pbytes, value, startIndex, count); } } internal override int LastIndexOf(byte[] array, byte value, int startIndex, int count) { int endIndex = startIndex - count + 1; for (int i = startIndex; i >= endIndex; i--) { if (array[i] == value) return i; } return -1; } // Equals method for the comparer itself. public override bool Equals(Object obj){ ByteEqualityComparer comparer = obj as ByteEqualityComparer; return comparer != null; } public override int GetHashCode() { return this.GetType().Name.GetHashCode(); } } [Serializable] internal sealed class EnumEqualityComparer<T>: EqualityComparer<T> where T : struct { [Pure] public override bool Equals(T x, T y) { int x_final = System.Runtime.CompilerServices.JitHelpers.UnsafeEnumCast(x); int y_final = System.Runtime.CompilerServices.JitHelpers.UnsafeEnumCast(y); return x_final == y_final; } [Pure] public override int GetHashCode(T obj) { int x_final = System.Runtime.CompilerServices.JitHelpers.UnsafeEnumCast(obj); return x_final.GetHashCode(); } // Equals method for the comparer itself. public override bool Equals(Object obj){ EnumEqualityComparer<T> comparer = obj as EnumEqualityComparer<T>; return comparer != null; } public override int GetHashCode() { return this.GetType().Name.GetHashCode(); } } [Serializable] internal sealed class LongEnumEqualityComparer<T>: EqualityComparer<T> where T : struct { [Pure] public override bool Equals(T x, T y) { long x_final = System.Runtime.CompilerServices.JitHelpers.UnsafeEnumCastLong(x); long y_final = System.Runtime.CompilerServices.JitHelpers.UnsafeEnumCastLong(y); return x_final == y_final; } [Pure] public override int GetHashCode(T obj) { long x_final = System.Runtime.CompilerServices.JitHelpers.UnsafeEnumCastLong(obj); return x_final.GetHashCode(); } // Equals method for the comparer itself. public override bool Equals(Object obj){ LongEnumEqualityComparer<T> comparer = obj as LongEnumEqualityComparer<T>; return comparer != null; } public override int GetHashCode() { return this.GetType().Name.GetHashCode(); } } [Serializable] sealed class InternalStringComparer : EqualityComparer<string> { public override int GetHashCode (string obj) { if (obj == null) return 0; return obj.GetHashCode (); } public override bool Equals (string x, string y) { if (x == null) return y == null; if ((object) x == (object) y) return true; return x.Equals (y); } internal override int IndexOf (string[] array, string value, int startIndex, int count) { int endIndex = startIndex + count; for (int i = startIndex; i < endIndex; ++i) { if (Array.UnsafeLoad (array, i) == value) return i; } return -1; } } #if FEATURE_RANDOMIZED_STRING_HASHING // This type is not serializeable by design. It does not exist in previous versions and will be removed // Once we move the framework to using secure hashing by default. internal sealed class RandomizedStringEqualityComparer : IEqualityComparer<String>, IEqualityComparer, IWellKnownStringEqualityComparer { private long _entropy; public RandomizedStringEqualityComparer() { _entropy = HashHelpers.GetEntropy(); } public new bool Equals(object x, object y) { if (x == y) return true; if (x == null || y == null) return false; if ((x is string) && (y is string)) return Equals((string)x, (string)y); ThrowHelper.ThrowArgumentException(ExceptionResource.Argument_InvalidArgumentForComparison); return false; } [Pure] public bool Equals(string x, string y) { if (x != null) { if (y != null) return x.Equals(y); return false; } if (y != null) return false; return true; } [Pure] [SecuritySafeCritical] public int GetHashCode(String obj) { if(obj == null) return 0; return String.InternalMarvin32HashString(obj, obj.Length, _entropy); } [Pure] [SecuritySafeCritical] public int GetHashCode(Object obj) { if(obj == null) return 0; string sObj = obj as string; if(sObj != null) return String.InternalMarvin32HashString(sObj, sObj.Length, _entropy); return obj.GetHashCode(); } // Equals method for the comparer itself. public override bool Equals(Object obj) { RandomizedStringEqualityComparer comparer = obj as RandomizedStringEqualityComparer; return (comparer != null) && (this._entropy == comparer._entropy); } public override int GetHashCode() { return (this.GetType().Name.GetHashCode() ^ ((int) (_entropy & 0x7FFFFFFF))); } IEqualityComparer IWellKnownStringEqualityComparer.GetRandomizedEqualityComparer() { return new RandomizedStringEqualityComparer(); } // We want to serialize the old comparer. IEqualityComparer IWellKnownStringEqualityComparer.GetEqualityComparerForSerialization() { return EqualityComparer<string>.Default; } } // This type is not serializeable by design. It does not exist in previous versions and will be removed // Once we move the framework to using secure hashing by default. internal sealed class RandomizedObjectEqualityComparer : IEqualityComparer, IWellKnownStringEqualityComparer { private long _entropy; public RandomizedObjectEqualityComparer() { _entropy = HashHelpers.GetEntropy(); } [Pure] public new bool Equals(Object x, Object y) { if (x != null) { if (y != null) return x.Equals(y); return false; } if (y != null) return false; return true; } [Pure] [SecuritySafeCritical] public int GetHashCode(Object obj) { if(obj == null) return 0; string sObj = obj as string; if(sObj != null) return String.InternalMarvin32HashString(sObj, sObj.Length, _entropy); return obj.GetHashCode(); } // Equals method for the comparer itself. public override bool Equals(Object obj){ RandomizedObjectEqualityComparer comparer = obj as RandomizedObjectEqualityComparer; return (comparer != null) && (this._entropy == comparer._entropy); } public override int GetHashCode() { return (this.GetType().Name.GetHashCode() ^ ((int) (_entropy & 0x7FFFFFFF))); } IEqualityComparer IWellKnownStringEqualityComparer.GetRandomizedEqualityComparer() { return new RandomizedObjectEqualityComparer(); } // We want to serialize the old comparer, which in this case was null. IEqualityComparer IWellKnownStringEqualityComparer.GetEqualityComparerForSerialization() { return null; } } #endif }
/* FluorineFx open source library Copyright (C) 2007 Zoltan Csibi, zoltan@TheSilentGroup.com, FluorineFx.com This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ using System; using System.Collections; using System.Collections.Generic; using System.Text; using System.IO; #if !SILVERLIGHT using log4net; #endif using FluorineFx.Util; using FluorineFx.IO; namespace FluorineFx.IO.Mp4 { /// <summary> /// This reader is used to read the contents of an MP4 file. /// </summary> class Mp4Reader : ITagReader { #if !SILVERLIGHT private static readonly ILog log = LogManager.GetLogger(typeof(Mp4Reader)); #endif /// <summary> /// Audio packet prefix /// </summary> public static byte[] PREFIX_AUDIO_FRAME = new byte[]{(byte) 0xaf, (byte) 0x01}; /// <summary> /// Audio config aac main /// </summary> public static byte[] AUDIO_CONFIG_FRAME_AAC_MAIN = new byte[]{(byte) 0x0a, (byte) 0x10}; /// <summary> /// Audio config aac lc /// </summary> public static byte[] AUDIO_CONFIG_FRAME_AAC_LC = new byte[]{(byte) 0x12, (byte) 0x10}; /// <summary> /// Audio config sbr /// </summary> public static byte[] AUDIO_CONFIG_FRAME_SBR = new byte[]{(byte) 0x13, (byte) 0x90, (byte) 0x56, (byte) 0xe5, (byte) 0xa5, (byte) 0x48, (byte) 0x00}; /// <summary> /// Video packet prefix for the decoder frame /// </summary> public static byte[] PREFIX_VIDEO_CONFIG_FRAME = new byte[]{(byte) 0x17, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00}; /// <summary> /// Video packet prefix for key frames /// </summary> public static byte[] PREFIX_VIDEO_KEYFRAME = new byte[]{(byte) 0x17, (byte) 0x01, (byte) 0, (byte) 0, (byte) 0}; /// <summary> /// Video packet prefix for standard frames (interframe) /// </summary> public static byte[] PREFIX_VIDEO_FRAME = new byte[]{(byte) 0x27, (byte) 0x01, (byte) 0, (byte) 0, (byte) 0}; object _syncLock = new object(); //private FileInfo _file; private Stream _stream; private Mp4DataStream _inputStream; /// <summary> /// Container for metadata and any other tags that should be sent prior to media data. /// </summary> private LinkedList<ITag> _firstTags = new LinkedList<ITag>(); /// <summary> /// Container for seek points in the video. These are the time stamps for the key frames. /// </summary> private LinkedList<int> _seekPoints; /// <summary> /// Mapping between file position and timestamp in ms. /// </summary> private Dictionary<int, long> _timePosMap; private Dictionary<int, long> _samplePosMap; /// <summary> /// Whether or not the clip contains a video track. /// </summary> private bool _hasVideo = false; /// <summary> /// Whether or not the clip contains an audio track. /// </summary> private bool _hasAudio = false; /// <summary> /// Default video codec. /// </summary> private String _videoCodecId = "avc1"; /// <summary> /// Default audio codec. /// </summary> private String _audioCodecId = "mp4a"; /// <summary> /// Decoder bytes / configs. /// </summary> private byte[] _audioDecoderBytes; private byte[] _videoDecoderBytes; /// <summary> /// Duration in milliseconds. /// </summary> private long _duration; /// <summary> /// Movie time scale. /// </summary> private int _timeScale; private int _width; private int _height; /// <summary> /// Audio sample rate kHz. /// </summary> private double _audioTimeScale; private int _audioChannels; /// <summary> /// Default to aac lc /// </summary> private int _audioCodecType = 1; private int _videoSampleCount; private double _fps; private double _videoTimeScale; private int _avcLevel; private int _avcProfile; private String _formattedDuration; private long _moovOffset; private long _mdatOffset; /// <summary> /// Samples to chunk mappings. /// </summary> private List<Mp4Atom.Record> _videoSamplesToChunks; private List<Mp4Atom.Record> _audioSamplesToChunks; /// <summary> /// Keyframe - sample numbers. /// </summary> private List<int> _syncSamples; /// <summary> /// Samples. /// </summary> private List<int> _videoSamples; private List<int> _audioSamples; /// <summary> /// Chunk offsets. /// </summary> private List<long> _videoChunkOffsets; private List<long> _audioChunkOffsets; /// <summary> /// Sample duration. /// </summary> private int _videoSampleDuration = 125; private int _audioSampleDuration = 1024; /// <summary> /// Keeps track of current frame / sample. /// </summary> private int _currentFrame = 0; private int _prevFrameSize = 0; private List<Mp4Frame> _frames = new List<Mp4Frame>(); private long _audioCount; private long _videoCount; /// <summary> /// Creates MP4 reader from file input stream, sets up metadata generation flag. /// </summary> /// <param name="file"></param> public Mp4Reader(FileInfo file) { //_file = file; _stream = new FileStream(file.FullName, FileMode.Open); _inputStream = new Mp4DataStream(_stream); //Decode all the info that we want from the atoms DecodeHeader(); //Analyze the samples/chunks and build the keyframe meta data AnalyzeFrames(); //Add meta data _firstTags.AddLast(CreateFileMeta()); //Create / add the pre-streaming (decoder config) tags CreatePreStreamingTags(); } public Mp4Reader(Stream stream) { _stream = stream; _inputStream = new Mp4DataStream(_stream); } /// <summary> /// Gets an object that can be used to synchronize access. /// </summary> public object SyncRoot { get { return _syncLock; } } private long GetCurrentPosition() { return _inputStream.Offset; } #region ITagReader Members public IStreamableFile File { get { return null; } } public int Offset { get { return (int)_inputStream.Offset; } } public long BytesRead { get { return GetCurrentPosition(); } } public long Duration { get { return _duration; } } /// <summary> /// This handles the moov atom being at the beginning or end of the file, so the mdat may also be before or after the moov atom. /// </summary> public void DecodeHeader() { try { // The first atom will/should be the type Mp4Atom type = Mp4Atom.CreateAtom(_inputStream); // Expect ftyp #if !SILVERLIGHT log.Debug(string.Format("Type {0}", type)); #endif //log.debug("Atom int types - free={} wide={}", MP4Atom.typeToInt("free"), MP4Atom.typeToInt("wide")); // keep a running count of the number of atoms found at the "top" levels int topAtoms = 0; // we want a moov and an mdat, anything else throw the invalid file type error while (topAtoms < 2) { Mp4Atom atom = Mp4Atom.CreateAtom(_inputStream); switch (atom.Type) { case 1836019574: //moov topAtoms++; Mp4Atom moov = atom; // expect moov #if !SILVERLIGHT log.Debug(string.Format("Type {0}", moov)); #endif //log.Debug("moov children: {}", moov.getChildren()); _moovOffset = _inputStream.Offset - moov.Size; Mp4Atom mvhd = moov.Lookup(Mp4Atom.TypeToInt("mvhd"), 0); if (mvhd != null) { #if !SILVERLIGHT log.Debug("Movie header atom found"); #endif //get the initial timescale _timeScale = mvhd.TimeScale; _duration = mvhd.Duration; #if !SILVERLIGHT log.Debug(string.Format("Time scale {0} Duration {1}", _timeScale, _duration)); #endif } /* nothing needed here yet MP4Atom meta = moov.lookup(MP4Atom.typeToInt("meta"), 0); if (meta != null) { log.debug("Meta atom found"); log.debug("{}", ToStringBuilder.reflectionToString(meta)); } */ //two tracks or bust int i = 0; while (i < 2) { Mp4Atom trak = moov.Lookup(Mp4Atom.TypeToInt("trak"), i); if (trak != null) { #if !SILVERLIGHT log.Debug("Track atom found"); #endif //log.debug("trak children: {}", trak.getChildren()); // trak: tkhd, edts, mdia Mp4Atom tkhd = trak.Lookup(Mp4Atom.TypeToInt("tkhd"), 0); if (tkhd != null) { #if !SILVERLIGHT log.Debug("Track header atom found"); #endif //log.debug("tkhd children: {}", tkhd.getChildren()); if (tkhd.Width > 0) { _width = tkhd.Width; _height = tkhd.Height; #if !SILVERLIGHT log.Debug(string.Format("Width {0} x Height {1}", _width, _height)); #endif } } Mp4Atom edts = trak.Lookup(Mp4Atom.TypeToInt("edts"), 0); if (edts != null) { #if !SILVERLIGHT log.Debug("Edit atom found"); #endif //log.debug("edts children: {}", edts.getChildren()); //log.debug("Width {} x Height {}", edts.getWidth(), edts.getHeight()); } Mp4Atom mdia = trak.Lookup(Mp4Atom.TypeToInt("mdia"), 0); if (mdia != null) { #if !SILVERLIGHT log.Debug("Media atom found"); #endif // mdia: mdhd, hdlr, minf int scale = 0; //get the media header atom Mp4Atom mdhd = mdia.Lookup(Mp4Atom.TypeToInt("mdhd"), 0); if (mdhd != null) { #if !SILVERLIGHT log.Debug("Media data header atom found"); #endif //this will be for either video or audio depending media info scale = mdhd.TimeScale; #if !SILVERLIGHT log.Debug(string.Format("Time scale {0}", scale)); #endif } Mp4Atom hdlr = mdia.Lookup(Mp4Atom.TypeToInt("hdlr"), 0); if (hdlr != null) { #if !SILVERLIGHT log.Debug("Handler ref atom found"); // soun or vide log.Debug(string.Format("Handler type: {0}", Mp4Atom.IntToType(hdlr.HandlerType))); #endif String hdlrType = Mp4Atom.IntToType(hdlr.HandlerType); if ("vide".Equals(hdlrType)) { _hasVideo = true; if (scale > 0) { _videoTimeScale = scale * 1.0; #if !SILVERLIGHT log.Debug(string.Format("Video time scale: {0}", _videoTimeScale)); #endif } } else if ("soun".Equals(hdlrType)) { _hasAudio = true; if (scale > 0) { _audioTimeScale = scale * 1.0; #if !SILVERLIGHT log.Debug(string.Format("Audio time scale: {0}", _audioTimeScale)); #endif } } i++; } Mp4Atom minf = mdia.Lookup(Mp4Atom.TypeToInt("minf"), 0); if (minf != null) { #if !SILVERLIGHT log.Debug("Media info atom found"); #endif // minf: (audio) smhd, dinf, stbl / (video) vmhd, // dinf, stbl Mp4Atom smhd = minf.Lookup(Mp4Atom.TypeToInt("smhd"), 0); if (smhd != null) { #if !SILVERLIGHT log.Debug("Sound header atom found"); #endif Mp4Atom dinf = minf.Lookup(Mp4Atom.TypeToInt("dinf"), 0); if (dinf != null) { #if !SILVERLIGHT log.Debug("Data info atom found"); #endif // dinf: dref //log.Debug("Sound dinf children: {}", dinf.getChildren()); Mp4Atom dref = dinf.Lookup(Mp4Atom.TypeToInt("dref"), 0); if (dref != null) { #if !SILVERLIGHT log.Debug("Data reference atom found"); #endif } } Mp4Atom stbl = minf.Lookup(Mp4Atom.TypeToInt("stbl"), 0); if (stbl != null) { #if !SILVERLIGHT log.Debug("Sample table atom found"); #endif // stbl: stsd, stts, stss, stsc, stsz, stco, // stsh //log.debug("Sound stbl children: {}", stbl.getChildren()); // stsd - sample description // stts - time to sample // stsc - sample to chunk // stsz - sample size // stco - chunk offset //stsd - has codec child Mp4Atom stsd = stbl.Lookup(Mp4Atom.TypeToInt("stsd"), 0); if (stsd != null) { //stsd: mp4a #if !SILVERLIGHT log.Debug("Sample description atom found"); #endif Mp4Atom mp4a = stsd.Children[0]; //could set the audio codec here SetAudioCodecId(Mp4Atom.IntToType(mp4a.Type)); //log.debug("{}", ToStringBuilder.reflectionToString(mp4a)); #if !SILVERLIGHT log.Debug(string.Format("Sample size: {0}", mp4a.SampleSize)); #endif int ats = mp4a.TimeScale; //skip invalid audio time scale if (ats > 0) { _audioTimeScale = ats * 1.0; } _audioChannels = mp4a.ChannelCount; #if !SILVERLIGHT log.Debug(string.Format("Sample rate (audio time scale): {0}", _audioTimeScale)); log.Debug(string.Format("Channels: {0}", _audioChannels)); #endif //mp4a: esds if (mp4a.Children.Count > 0) { #if !SILVERLIGHT log.Debug("Elementary stream descriptor atom found"); #endif Mp4Atom esds = mp4a.Children[0]; //log.debug("{}", ToStringBuilder.reflectionToString(esds)); Mp4Descriptor descriptor = esds.EsdDescriptor; //log.debug("{}", ToStringBuilder.reflectionToString(descriptor)); if (descriptor != null) { List<Mp4Descriptor> children = descriptor.Children; for (int e = 0; e < children.Count; e++) { Mp4Descriptor descr = children[e]; //log.debug("{}", ToStringBuilder.reflectionToString(descr)); if (descr.Children.Count > 0) { List<Mp4Descriptor> children2 = descr.Children; for (int e2 = 0; e2 < children2.Count; e2++) { Mp4Descriptor descr2 = children2[e2]; //log.debug("{}", ToStringBuilder.reflectionToString(descr2)); if (descr2.Type == Mp4Descriptor.MP4DecSpecificInfoDescriptorTag) { //we only want the MP4DecSpecificInfoDescriptorTag _audioDecoderBytes = descr2.DSID; //compare the bytes to get the aacaot/aottype //match first byte switch (_audioDecoderBytes[0]) { case 0x12: default: //AAC LC - 12 10 _audioCodecType = 1; break; case 0x0a: //AAC Main - 0A 10 _audioCodecType = 0; break; case 0x11: case 0x13: //AAC LC SBR - 11 90 & 13 xx _audioCodecType = 2; break; } //we want to break out of top level for loop e = 99; break; } } } } } } } //stsc - has Records Mp4Atom stsc = stbl.Lookup(Mp4Atom.TypeToInt("stsc"), 0); if (stsc != null) { #if !SILVERLIGHT log.Debug("Sample to chunk atom found"); #endif _audioSamplesToChunks = stsc.Records; #if !SILVERLIGHT log.Debug(string.Format("Record count: {0}", _audioSamplesToChunks.Count)); #endif Mp4Atom.Record rec = _audioSamplesToChunks[0]; #if !SILVERLIGHT log.Debug(string.Format("Record data: Description index={0} Samples per chunk={1}", rec.SampleDescriptionIndex, rec.SamplesPerChunk)); #endif } //stsz - has Samples Mp4Atom stsz = stbl.Lookup(Mp4Atom.TypeToInt("stsz"), 0); if (stsz != null) { #if !SILVERLIGHT log.Debug("Sample size atom found"); #endif _audioSamples = stsz.Samples; //vector full of integers #if !SILVERLIGHT log.Debug(string.Format("Sample size: {0}", stsz.SampleSize)); log.Debug(string.Format("Sample count: {0}", _audioSamples.Count)); #endif } //stco - has Chunks Mp4Atom stco = stbl.Lookup(Mp4Atom.TypeToInt("stco"), 0); if (stco != null) { #if !SILVERLIGHT log.Debug("Chunk offset atom found"); #endif //vector full of integers _audioChunkOffsets = stco.Chunks; #if !SILVERLIGHT log.Debug(string.Format("Chunk count: {0}", _audioChunkOffsets.Count)); #endif } //stts - has TimeSampleRecords Mp4Atom stts = stbl.Lookup(Mp4Atom.TypeToInt("stts"), 0); if (stts != null) { #if !SILVERLIGHT log.Debug("Time to sample atom found"); #endif List<Mp4Atom.TimeSampleRecord> records = stts.TimeToSamplesRecords; #if !SILVERLIGHT log.Debug(string.Format("Record count: {0}", records.Count)); #endif Mp4Atom.TimeSampleRecord rec = records[0]; #if !SILVERLIGHT log.Debug(string.Format("Record data: Consecutive samples={0} Duration={1}", rec.ConsecutiveSamples, rec.SampleDuration)); #endif //if we have 1 record then all samples have the same duration if (records.Count > 1) { //TODO: handle audio samples with varying durations #if !SILVERLIGHT log.Debug("Audio samples have differing durations, audio playback may fail"); #endif } _audioSampleDuration = rec.SampleDuration; } } } Mp4Atom vmhd = minf.Lookup(Mp4Atom.TypeToInt("vmhd"), 0); if (vmhd != null) { #if !SILVERLIGHT log.Debug("Video header atom found"); #endif Mp4Atom dinf = minf.Lookup(Mp4Atom.TypeToInt("dinf"), 0); if (dinf != null) { #if !SILVERLIGHT log.Debug("Data info atom found"); #endif // dinf: dref //log.debug("Video dinf children: {}", dinf.getChildren()); Mp4Atom dref = dinf.Lookup(Mp4Atom.TypeToInt("dref"), 0); if (dref != null) { #if !SILVERLIGHT log.Debug("Data reference atom found"); #endif } } Mp4Atom stbl = minf.Lookup(Mp4Atom.TypeToInt("stbl"), 0); if (stbl != null) { #if !SILVERLIGHT log.Debug("Sample table atom found"); #endif // stbl: stsd, stts, stss, stsc, stsz, stco, // stsh //log.debug("Video stbl children: {}", stbl.getChildren()); // stsd - sample description // stts - (decoding) time to sample // stsc - sample to chunk // stsz - sample size // stco - chunk offset // ctts - (composition) time to sample // stss - sync sample // sdtp - independent and disposable samples //stsd - has codec child Mp4Atom stsd = stbl.Lookup(Mp4Atom.TypeToInt("stsd"), 0); if (stsd != null) { #if !SILVERLIGHT log.Debug("Sample description atom found"); #endif //log.Debug("Sample description (video) stsd children: {}", stsd.getChildren()); Mp4Atom avc1 = stsd.Lookup(Mp4Atom.TypeToInt("avc1"), 0); if (avc1 != null) { //log.debug("AVC1 children: {}", avc1.getChildren()); //set the video codec here - may be avc1 or mp4v SetVideoCodecId(Mp4Atom.IntToType(avc1.Type)); //video decoder config //TODO may need to be generic later Mp4Atom codecChild = avc1.Lookup(Mp4Atom.TypeToInt("avcC"), 0); if (codecChild != null) { _avcLevel = codecChild.AvcLevel; #if !SILVERLIGHT log.Debug(string.Format("AVC level: {0}", _avcLevel)); #endif _avcProfile = codecChild.AvcProfile; #if !SILVERLIGHT log.Debug(string.Format("AVC Profile: {0}", _avcProfile)); log.Debug(string.Format("AVCC size: {0}", codecChild.Size)); #endif _videoDecoderBytes = codecChild.VideoConfigBytes; //log.Debug(string.Format("Video config bytes: {0}", ToStringBuilder.reflectionToString(videoDecoderBytes))); } else { //quicktime and ipods use a pixel aspect atom //since we have no avcC check for this and avcC may //be a child Mp4Atom pasp = avc1.Lookup(Mp4Atom.TypeToInt("pasp"), 0); if (pasp != null) { //log.debug("PASP children: {}", pasp.getChildren()); codecChild = pasp.Lookup(Mp4Atom.TypeToInt("avcC"), 0); if (codecChild != null) { _avcLevel = codecChild.AvcLevel; #if !SILVERLIGHT log.Debug(string.Format("AVC level: {0}", _avcLevel)); #endif _avcProfile = codecChild.AvcProfile; #if !SILVERLIGHT log.Debug(string.Format("AVC Profile: {0}", _avcProfile)); log.Debug(string.Format("AVCC size: {0}", codecChild.Size)); #endif _videoDecoderBytes = codecChild.VideoConfigBytes; //log.debug("Video config bytes: {}", ToStringBuilder.reflectionToString(videoDecoderBytes)); } } } } else { //look for mp4v Mp4Atom mp4v = stsd.Lookup(Mp4Atom.TypeToInt("mp4v"), 0); if (mp4v != null) { //log.debug("MP4V children: {}", mp4v.getChildren()); //set the video codec here - may be avc1 or mp4v SetVideoCodecId(Mp4Atom.IntToType(mp4v.Type)); //look for esds Mp4Atom codecChild = mp4v.Lookup(Mp4Atom.TypeToInt("esds"), 0); if (codecChild != null) { //look for descriptors Mp4Descriptor descriptor = codecChild.EsdDescriptor; //log.debug("{}", ToStringBuilder.reflectionToString(descriptor)); if (descriptor != null) { List<Mp4Descriptor> children = descriptor.Children; for (int e = 0; e < children.Count; e++) { Mp4Descriptor descr = children[e]; //log.debug("{}", ToStringBuilder.reflectionToString(descr)); if (descr.Children.Count > 0) { List<Mp4Descriptor> children2 = descr.Children; for (int e2 = 0; e2 < children2.Count; e2++) { Mp4Descriptor descr2 = children2[e2]; //log.debug("{}", ToStringBuilder.reflectionToString(descr2)); if (descr2.Type == Mp4Descriptor.MP4DecSpecificInfoDescriptorTag) { //we only want the MP4DecSpecificInfoDescriptorTag _videoDecoderBytes = new byte[descr2.DSID.Length - 8]; Array.Copy(descr2.DSID, 8, _videoDecoderBytes, 0, _videoDecoderBytes.Length); //log.debug("Video config bytes: {}", ToStringBuilder.reflectionToString(videoDecoderBytes)); //we want to break out of top level for loop e = 99; break; } } } } } } } } //log.debug("{}", ToStringBuilder.reflectionToString(avc1)); } //stsc - has Records Mp4Atom stsc = stbl.Lookup(Mp4Atom.TypeToInt("stsc"), 0); if (stsc != null) { #if !SILVERLIGHT log.Debug("Sample to chunk atom found"); #endif _videoSamplesToChunks = stsc.Records; #if !SILVERLIGHT log.Debug(string.Format("Record count: {0}", _videoSamplesToChunks.Count)); #endif Mp4Atom.Record rec = _videoSamplesToChunks[0]; #if !SILVERLIGHT log.Debug(string.Format("Record data: Description index={0} Samples per chunk={1}", rec.SampleDescriptionIndex, rec.SamplesPerChunk)); #endif } //stsz - has Samples Mp4Atom stsz = stbl.Lookup(Mp4Atom.TypeToInt("stsz"), 0); if (stsz != null) { #if !SILVERLIGHT log.Debug("Sample size atom found"); #endif //vector full of integers _videoSamples = stsz.Samples; //if sample size is 0 then the table must be checked due //to variable sample sizes #if !SILVERLIGHT log.Debug(string.Format("Sample size: {0}", stsz.SampleSize)); #endif _videoSampleCount = _videoSamples.Count; #if !SILVERLIGHT log.Debug(string.Format("Sample count: {0}", _videoSampleCount)); #endif } //stco - has Chunks Mp4Atom stco = stbl.Lookup(Mp4Atom.TypeToInt("stco"), 0); if (stco != null) { #if !SILVERLIGHT log.Debug("Chunk offset atom found"); #endif //vector full of integers _videoChunkOffsets = stco.Chunks; #if !SILVERLIGHT log.Debug(string.Format("Chunk count: {0}", _videoChunkOffsets.Count)); #endif } //stss - has Sync - no sync means all samples are keyframes Mp4Atom stss = stbl.Lookup(Mp4Atom.TypeToInt("stss"), 0); if (stss != null) { #if !SILVERLIGHT log.Debug("Sync sample atom found"); #endif //vector full of integers _syncSamples = stss.SyncSamples; #if !SILVERLIGHT log.Debug(string.Format("Keyframes: {0}", _syncSamples.Count)); #endif } //stts - has TimeSampleRecords Mp4Atom stts = stbl.Lookup(Mp4Atom.TypeToInt("stts"), 0); if (stts != null) { #if !SILVERLIGHT log.Debug("Time to sample atom found"); #endif List<Mp4Atom.TimeSampleRecord> records = stts.TimeToSamplesRecords; #if !SILVERLIGHT log.Debug(string.Format("Record count: {0}", records.Count)); #endif Mp4Atom.TimeSampleRecord rec = records[0]; #if !SILVERLIGHT log.Debug(string.Format("Record data: Consecutive samples={0} Duration={1}", rec.ConsecutiveSamples, rec.SampleDuration)); #endif //if we have 1 record then all samples have the same duration if (records.Count > 1) { //TODO: handle video samples with varying durations #if !SILVERLIGHT log.Debug("Video samples have differing durations, video playback may fail"); #endif } _videoSampleDuration = rec.SampleDuration; } } } } } } } //calculate FPS _fps = (_videoSampleCount * _timeScale) / (double)_duration; #if !SILVERLIGHT log.Debug(string.Format("FPS calc: ({0} * {1}) / {2}", _videoSampleCount, _timeScale, _duration)); log.Debug(string.Format("FPS: {0}", _fps)); #endif //real duration StringBuilder sb = new StringBuilder(); double videoTime = ((double)_duration / (double)_timeScale); #if !SILVERLIGHT log.Debug(string.Format("Video time: {0}", videoTime)); #endif int minutes = (int)(videoTime / 60); if (minutes > 0) { sb.Append(minutes); sb.Append('.'); } //formatter for seconds / millis //NumberFormat df = DecimalFormat.getInstance(); //df.setMaximumFractionDigits(2); //sb.append(df.format((videoTime % 60))); sb.Append(videoTime % 60); _formattedDuration = sb.ToString(); #if !SILVERLIGHT log.Debug(string.Format("Time: {0}", _formattedDuration)); #endif break; case 1835295092: //mdat topAtoms++; long dataSize = 0L; Mp4Atom mdat = atom; dataSize = mdat.Size; //log.debug("{}", ToStringBuilder.reflectionToString(mdat)); _mdatOffset = _inputStream.Offset - dataSize; //log.Debug(string.Format("File size: {0} mdat size: {1}", _file.Length, dataSize)); #if !SILVERLIGHT log.Debug(string.Format("mdat size: {0}", dataSize)); #endif break; case 1718773093: //free case 2003395685: //wide break; default: #if !SILVERLIGHT log.Warn(string.Format("Unexpected atom: {}", Mp4Atom.IntToType(atom.Type))); #endif break; } } //add the tag name (size) to the offsets _moovOffset += 8; _mdatOffset += 8; #if !SILVERLIGHT log.Debug(string.Format("Offsets moov: {0} mdat: {1}", _moovOffset, _mdatOffset)); #endif } catch(Exception ex) { #if !SILVERLIGHT log.Error("Exception decoding header / atoms", ex); #endif } } public long Position { get { return GetCurrentPosition(); } set { throw new Exception("The method or operation is not implemented."); } } public bool HasMoreTags() { return _currentFrame < _frames.Count; } /// <summary> /// Packages media data for return to providers. /// </summary> /// <returns></returns> public ITag ReadTag() { lock (this.SyncRoot) { ITag tag = null; //empty-out the pre-streaming tags first if (_firstTags.Count > 0) { //log.debug("Returning pre-tag"); // Return first tags before media data tag = _firstTags.First.Value; _firstTags.RemoveFirst(); return tag; } //log.debug("Read tag - sample {} prevFrameSize {} audio: {} video: {}", new Object[]{currentSample, prevFrameSize, audioCount, videoCount}); //get the current frame Mp4Frame frame = _frames[_currentFrame]; #if !SILVERLIGHT log.Debug(string.Format("Playback #{0} {1}", _currentFrame, frame)); #endif int sampleSize = frame.Size; int time = (int)Math.Round(frame.Time * 1000.0); //log.debug("Read tag - dst: {} base: {} time: {}", new Object[]{frameTs, baseTs, time}); long samplePos = frame.Offset; //log.debug("Read tag - samplePos {}", samplePos); //determine frame type and packet body padding byte type = frame.Type; //assume video type int pad = 5; if (type == IOConstants.TYPE_AUDIO) { pad = 2; } //create a byte buffer of the size of the sample byte[] data = new byte[sampleSize + pad]; try { //prefix is different for keyframes if (type == IOConstants.TYPE_VIDEO) { if (frame.IsKeyFrame) { //log.debug("Writing keyframe prefix"); Array.Copy(PREFIX_VIDEO_KEYFRAME, data, PREFIX_VIDEO_KEYFRAME.Length); } else { //log.debug("Writing interframe prefix"); Array.Copy(PREFIX_VIDEO_FRAME, data, PREFIX_VIDEO_FRAME.Length); } _videoCount++; } else { //log.debug("Writing audio prefix"); Array.Copy(PREFIX_AUDIO_FRAME, data, PREFIX_AUDIO_FRAME.Length); _audioCount++; } //do we need to add the mdat offset to the sample position? _stream.Position = samplePos; _stream.Read(data, pad, sampleSize); } catch (Exception ex) { #if !SILVERLIGHT log.Error("Error on channel position / read", ex); #endif } //create the tag tag = new Tag(type, time, data.Length, data, _prevFrameSize); //log.debug("Read tag - type: {} body size: {}", (type == TYPE_AUDIO ? "Audio" : "Video"), tag.getBodySize()); //increment the frame number _currentFrame++; //set the frame / tag size _prevFrameSize = tag.BodySize; //log.debug("Tag: {}", tag); return tag; } } public void Close() { //_fs.Close(); _inputStream.Close(); } public bool HasVideo() { try { return _hasVideo; } finally { if (_frames != null) { _frames.Clear(); _frames = null; } } } #endregion /// <summary> /// Performs frame analysis and generates metadata for use in seeking. All the frames are analyzed and sorted together based on time and offset. /// </summary> public void AnalyzeFrames() { #if !SILVERLIGHT log.Debug("Analyzing frames"); #endif // Maps positions, samples, timestamps to one another _timePosMap = new Dictionary<int, long>(); _samplePosMap = new Dictionary<int, long>(); // tag == sample int sample = 1; long pos; for (int i = 0; i < _videoSamplesToChunks.Count; i++) { Mp4Atom.Record record = _videoSamplesToChunks[i]; int firstChunk = record.FirstChunk; int lastChunk = _videoChunkOffsets.Count; if (i < _videoSamplesToChunks.Count - 1) { Mp4Atom.Record nextRecord = _videoSamplesToChunks[i + 1]; lastChunk = nextRecord.FirstChunk - 1; } for (int chunk = firstChunk; chunk <= lastChunk; chunk++) { int sampleCount = record.SamplesPerChunk; pos = _videoChunkOffsets[chunk - 1]; while (sampleCount > 0) { //log.debug("Position: {}", pos); _samplePosMap.Add(sample, pos); //calculate ts double ts = (_videoSampleDuration * (sample - 1)) / _videoTimeScale; //check to see if the sample is a keyframe bool keyframe = false; //some files appear not to have sync samples if (_syncSamples != null) { keyframe = _syncSamples.Contains(sample); if (_seekPoints == null) { _seekPoints = new LinkedList<int>(); } int keyframeTs = (int)Math.Round(ts * 1000.0); _seekPoints.AddLast(keyframeTs); _timePosMap.Add(keyframeTs, pos); } //size of the sample int size = _videoSamples[sample - 1]; //create a frame Mp4Frame frame = new Mp4Frame(); frame.IsKeyFrame = keyframe; frame.Offset = pos; frame.Size = size; frame.Time = ts; frame.Type = IOConstants.TYPE_VIDEO; _frames.Add(frame); //log.debug("Sample #{} {}", sample, frame); //inc and dec stuff pos += size; sampleCount--; sample++; } } } //log.debug("Sample position map (video): {}", samplePosMap); //add the audio frames / samples / chunks sample = 1; for (int i = 0; i < _audioSamplesToChunks.Count; i++) { Mp4Atom.Record record = _audioSamplesToChunks[i]; int firstChunk = record.FirstChunk; int lastChunk = _audioChunkOffsets.Count; if (i < _audioSamplesToChunks.Count - 1) { Mp4Atom.Record nextRecord = _audioSamplesToChunks[i + 1]; lastChunk = nextRecord.FirstChunk - 1; } for (int chunk = firstChunk; chunk <= lastChunk; chunk++) { int sampleCount = record.SamplesPerChunk; pos = _audioChunkOffsets[chunk - 1]; while (sampleCount > 0) { //calculate ts double ts = (_audioSampleDuration * (sample - 1)) / _audioTimeScale; //sample size int size = _audioSamples[sample - 1]; //create a frame Mp4Frame frame = new Mp4Frame(); frame.Offset = pos; frame.Size = size; frame.Time = ts; frame.Type = IOConstants.TYPE_AUDIO; _frames.Add(frame); //log.debug("Sample #{} {}", sample, frame); //inc and dec stuff pos += size; sampleCount--; sample++; } } } //sort the frames _frames.Sort(); #if !SILVERLIGHT log.Debug(string.Format("Frames count: {0}", _frames.Count)); //log.debug("Frames: {}", frames); #endif //release some memory, since we're done with the vectors _audioChunkOffsets.Clear(); _audioChunkOffsets = null; _audioSamplesToChunks.Clear(); _audioSamplesToChunks = null; _videoChunkOffsets.Clear(); _videoChunkOffsets = null; _videoSamplesToChunks.Clear(); _videoSamplesToChunks = null; _syncSamples.Clear(); _syncSamples = null; } /// <summary> /// Create tag for metadata event. /// /// Info from http://www.kaourantin.net/2007/08/what-just-happened-to-video-on-web_20.html /// <para> /// duration - Obvious. But unlike for FLV files this field will always be present. /// videocodecid - For H.264 we report 'avc1'. /// audiocodecid - For AAC we report 'mp4a', for MP3 we report '.mp3'. /// avcprofile - 66, 77, 88, 100, 110, 122 or 144 which corresponds to the H.264 profiles. /// avclevel - A number between 10 and 51. Consult this list to find out more. /// aottype - Either 0, 1 or 2. This corresponds to AAC Main, AAC LC and SBR audio types. /// moovposition - The offset in bytes of the moov atom in a file. /// trackinfo - An array of objects containing various infomation about all the tracks in a file /// ex. /// trackinfo[0].length: 7081 /// trackinfo[0].timescale: 600 /// trackinfo[0].sampledescription.sampletype: avc1 /// trackinfo[0].language: und /// trackinfo[1].length: 525312 /// trackinfo[1].timescale: 44100 /// trackinfo[1].sampledescription.sampletype: mp4a /// trackinfo[1].language: und /// /// chapters - As mentioned above information about chapters in audiobooks. /// seekpoints - As mentioned above times you can directly feed into NetStream.seek(); /// videoframerate - The frame rate of the video if a monotone frame rate is used. Most videos will have a monotone frame rate. /// audiosamplerate - The original sampling rate of the audio track. /// audiochannels - The original number of channels of the audio track. /// tags - As mentioned above ID3 like tag information. /// </para> /// /// <para> /// width: Display width in pixels. /// height: Display height in pixels. /// duration: Duration in seconds. /// avcprofile: AVC profile number such as 55, 77, 100 etc. /// avclevel: AVC IDC level number such as 10, 11, 20, 21 etc. /// aacaot: AAC audio object type; 0, 1 or 2 are supported. /// videoframerate: Frame rate of the video in this MP4. /// seekpoints: Array that lists the available keyframes in a file as time stamps in milliseconds. /// This is optional as the MP4 file might not contain this information. Generally speaking, /// most MP4 files will include this by default. /// videocodecid: Usually a string such as "avc1" or "VP6F." /// audiocodecid: Usually a string such as ".mp3" or "mp4a." /// progressivedownloadinfo: Object that provides information from the "pdin" atom. This is optional /// and many files will not have this field. /// trackinfo: Object that provides information on all the tracks in the MP4 file, including their sample description ID. /// tags: Array of key value pairs representing the information present in the "ilst" atom, which is /// the equivalent of ID3 tags for MP4 files. These tags are mostly used by iTunes. /// </para> /// </summary> /// <returns>Metadata event tag.</returns> ITag CreateFileMeta() { #if !SILVERLIGHT log.Debug("Creating onMetaData"); #endif // Create tag for onMetaData event ByteBuffer buf = ByteBuffer.Allocate(1024); buf.AutoExpand = true; AMFWriter output = new AMFWriter(buf); output.WriteString("onMetaData"); Dictionary<string, object> props = new Dictionary<string, object>(); // Duration property props.Add("duration", ((double)_duration / (double)_timeScale)); props.Add("width", _width); props.Add("height", _height); // Video codec id props.Add("videocodecid", _videoCodecId); props.Add("avcprofile", _avcProfile); props.Add("avclevel", _avcLevel); props.Add("videoframerate", _fps); // Audio codec id - watch for mp3 instead of aac props.Add("audiocodecid", _audioCodecId); props.Add("aacaot", _audioCodecType); props.Add("audiosamplerate", _audioTimeScale); props.Add("audiochannels", _audioChannels); props.Add("moovposition", _moovOffset); //props.put("chapters", ""); //this is for f4b - books if (_seekPoints != null) { props.Add("seekpoints", _seekPoints); } //tags will only appear if there is an "ilst" atom in the file //props.put("tags", ""); List<Dictionary<String, Object>> arr = new List<Dictionary<String, Object>>(2); if (_hasAudio) { Dictionary<String, Object> audioMap = new Dictionary<String, Object>(4); audioMap.Add("timescale", _audioTimeScale); audioMap.Add("language", "und"); List<Dictionary<String, String>> desc = new List<Dictionary<String, String>>(1); audioMap.Add("sampledescription", desc); Dictionary<String, String> sampleMap = new Dictionary<String, String>(1); sampleMap.Add("sampletype", _audioCodecId); desc.Add(sampleMap); if (_audioSamples != null) { audioMap.Add("length_property", _audioSampleDuration * _audioSamples.Count); //release some memory, since we're done with the vectors _audioSamples.Clear(); _audioSamples = null; } arr.Add(audioMap); } if (_hasVideo) { Dictionary<String, Object> videoMap = new Dictionary<String, Object>(3); videoMap.Add("timescale", _videoTimeScale); videoMap.Add("language", "und"); List<Dictionary<String, String>> desc = new List<Dictionary<String, String>>(1); videoMap.Add("sampledescription", desc); Dictionary<String, String> sampleMap = new Dictionary<String, String>(1); sampleMap.Add("sampletype", _videoCodecId); desc.Add(sampleMap); if (_videoSamples != null) { videoMap.Add("length_property", _videoSampleDuration * _videoSamples.Count); //release some memory, since we're done with the vectors _videoSamples.Clear(); _videoSamples = null; } arr.Add(videoMap); } props.Add("trackinfo", arr.ToArray()); //set this based on existence of seekpoints props.Add("canSeekToEnd", (_seekPoints != null)); output.WriteAssociativeArray(ObjectEncoding.AMF0, props); buf.Flip(); //now that all the meta properties are done, update the duration _duration = (long)Math.Round(_duration * 1000d); ITag result = new Tag(IOConstants.TYPE_METADATA, 0, buf.Limit, buf.ToArray(), 0); return result; } /// <summary> /// Tag sequence /// MetaData, Video config, Audio config, remaining audio and video /// /// Packet prefixes: /// 17 00 00 00 00 = Video extra data (first video packet) /// 17 01 00 00 00 = Video keyframe /// 27 01 00 00 00 = Video interframe /// af 00 ... 06 = Audio extra data (first audio packet) /// af 01 = Audio frame /// /// Audio extra data(s): /// af 00 = Prefix /// 11 90 4f 14 = AAC Main = aottype 0 /// 12 10 = AAC LC = aottype 1 /// 13 90 56 e5 a5 48 00 = HE-AAC SBR = aottype 2 /// 06 = Suffix /// /// Still not absolutely certain about this order or the bytes - need to verify later /// </summary> private void CreatePreStreamingTags() { #if !SILVERLIGHT log.Debug("Creating pre-streaming tags"); #endif ITag tag = null; //byte[] body = null; ByteBuffer body; if (_hasVideo) { //video tag #1 body = ByteBuffer.Allocate(41); body.AutoExpand = true; body.Put(PREFIX_VIDEO_CONFIG_FRAME); if (_videoDecoderBytes != null) { body.Put(_videoDecoderBytes); } tag = new Tag(IOConstants.TYPE_VIDEO, 0, (int)body.Length, body.ToArray(), 0); //add tag _firstTags.AddLast(tag); } if (_hasAudio) { //audio tag #1 body = ByteBuffer.Allocate(7); body.AutoExpand = true; body.Put(new byte[] { (byte)0xaf, (byte)0 }); //prefix if (_audioDecoderBytes != null) { body.Put(_audioDecoderBytes); } else { //default to aac-lc when the esds doesn't contain descriptor bytes //Array.Copy(AUDIO_CONFIG_FRAME_AAC_LC, 0, body, PREFIX_AUDIO_FRAME.Length, AUDIO_CONFIG_FRAME_AAC_LC.Length); //body[PREFIX_AUDIO_FRAME.Length + AUDIO_CONFIG_FRAME_AAC_LC.Length] = 0x06; //suffix body.Put(AUDIO_CONFIG_FRAME_AAC_LC); } body.Put((byte)0x06); //suffix tag = new Tag(IOConstants.TYPE_AUDIO, 0, (int)body.Length, body.ToArray(), tag.BodySize); //add tag _firstTags.AddLast(tag); } } public void SetVideoCodecId(String videoCodecId) { this._videoCodecId = videoCodecId; } public void SetAudioCodecId(String audioCodecId) { this._audioCodecId = audioCodecId; } } }
// // Encog(tm) Core v3.2 - .Net Version // http://www.heatonresearch.com/encog/ // // Copyright 2008-2014 Heaton Research, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // // For more information on Heaton Research copyrights, licenses // and trademarks visit: // http://www.heatonresearch.com/copyright // using System; using Encog.MathUtil; using Encog.MathUtil.Randomize; using Encog.ML.Data; using Encog.ML.Data.Specific; using Encog.Util; namespace Encog.Neural.Thermal { /// <summary> /// Implements a Boltzmann machine. /// </summary> [Serializable] public class BoltzmannMachine : ThermalNetwork { /// <summary> /// The property for run cycles. /// </summary> /// public const String ParamRunCycles = "runCycles"; /// <summary> /// The property for anneal cycles. /// </summary> /// public const String ParamAnnealCycles = "annealCycles"; /// <summary> /// The number of cycles to anneal for. /// </summary> /// private int _annealCycles; /// <summary> /// Count used to internally determine if a neuron is "off". /// </summary> [NonSerialized] private int[] _off; /// <summary> /// Count used to internally determine if a neuron is "on". /// </summary> [NonSerialized] private int[] _on; /// <summary> /// The number of cycles to run the network through before annealing. /// </summary> /// private int _runCycles; /// <summary> /// The current temperature of the neural network. The higher the /// temperature, the more random the network will behave. /// </summary> /// private double _temperature; /// <summary> /// The thresholds. /// </summary> /// private double[] _threshold; /// <summary> /// Default constructors. /// </summary> /// public BoltzmannMachine() { _annealCycles = 100; _runCycles = 1000; } /// <summary> /// Construct a Boltzmann machine with the specified number of neurons. /// </summary> public BoltzmannMachine(int neuronCount) : base(neuronCount) { _annealCycles = 100; _runCycles = 1000; _threshold = new double[neuronCount]; } /// <value>the annealCycles to set</value> public int AnnealCycles { get { return _annealCycles; } set { _annealCycles = value; } } /// <inheritdoc/> public override int InputCount { get { return NeuronCount; } } /// <inheritdoc/> public override int OutputCount { get { return NeuronCount; } } /// <value>the runCycles to set</value> public int RunCycles { get { return _runCycles; } set { _runCycles = value; } } /// <summary> /// Set the network temperature. /// </summary> public double Temperature { get { return _temperature; } set { _temperature = value; } } /// <summary> /// Set the thresholds. /// </summary> public double[] Threshold { get { return _threshold; } set { _threshold = value; } } /// <summary> /// Note: for Boltzmann networks, you will usually want to call the "run" /// method to compute the output. /// This method can be used to copy the input data to the current state. A /// single iteration is then run, and the new current state is returned. /// </summary> /// /// <param name="input">The input pattern.</param> /// <returns>The new current state.</returns> public override sealed IMLData Compute(IMLData input) { var result = new BiPolarMLData(input.Count); input.CopyTo(CurrentState.Data, 0, input.Count); Run(); EngineArray.ArrayCopy(CurrentState.Data, result.Data); return result; } /// <summary> /// Decrease the temperature by the specified amount. /// </summary> /// /// <param name="d">The amount to decrease by.</param> public void DecreaseTemperature(double d) { _temperature *= d; } /// <summary> /// Run the network until thermal equilibrium is established. /// </summary> /// public void EstablishEquilibrium() { int count = NeuronCount; if (_on == null) { _on = new int[count]; _off = new int[count]; } for (int i = 0; i < count; i++) { _on[i] = 0; _off[i] = 0; } for (int n = 0; n < _runCycles*count; n++) { Run((int) RangeRandomizer.Randomize(0, count - 1)); } for (int n = 0; n < _annealCycles*count; n++) { var i = (int) RangeRandomizer.Randomize(0, count - 1); Run(i); if (CurrentState.GetBoolean(i)) { _on[i]++; } else { _off[i]++; } } for (int i = 0; i < count; i++) { CurrentState.SetBoolean(i, _on[i] > _off[i]); } } /// <summary> /// Run the network for all neurons present. /// </summary> /// public void Run() { int count = NeuronCount; for (int i = 0; i < count; i++) { Run(i); } } /// <summary> /// Run the network for the specified neuron. /// </summary> /// /// <param name="i">The neuron to run for.</param> public void Run(int i) { int j; int count = NeuronCount; double sum = 0; for (j = 0; j < count; j++) { sum += GetWeight(i, j)*((CurrentState.GetBoolean(j)) ? 1 : 0); } sum -= _threshold[i]; double probability = 1/(1 + BoundMath.Exp(-sum/_temperature)); CurrentState.SetBoolean(i, RangeRandomizer.Randomize(0, 1) <= probability); } /// <summary> /// /// </summary> /// public override void UpdateProperties() { // nothing needed here } } }
// <copyright file="ConcurrentObservableDictionary{TKey,TValue}.cs" company="Adrian Mos"> // Copyright (c) Adrian Mos with all rights reserved. Part of the IX Framework. // </copyright> using System; using System.Collections.Generic; using System.Diagnostics; using System.Runtime.Serialization; using IX.Observable.DebugAide; using IX.StandardExtensions.Contracts; using IX.StandardExtensions.Threading; using IX.System.Threading; using JetBrains.Annotations; using GlobalThreading = System.Threading; namespace IX.Observable { /// <summary> /// A dictionary that broadcasts its changes. /// </summary> /// <typeparam name="TKey">The data key type.</typeparam> /// <typeparam name="TValue">The data value type.</typeparam> [DebuggerDisplay("ConcurrentObservableDictionary, Count = {" + nameof(Count) + "}")] [DebuggerTypeProxy(typeof(DictionaryDebugView<,>))] [CollectionDataContract( Namespace = Constants.DataContractNamespace, Name = "ConcurrentObservable{1}DictionaryBy{0}", ItemName = "Entry", KeyName = "Key", ValueName = "Value")] [PublicAPI] public partial class ConcurrentObservableDictionary<TKey, TValue> : ObservableDictionary<TKey, TValue> { private Lazy<ReaderWriterLockSlim> locker; #region Constructors /// <summary> /// Initializes a new instance of the <see cref="ConcurrentObservableDictionary{TKey, TValue}" /> class. /// </summary> public ConcurrentObservableDictionary() { this.locker = EnvironmentSettings.GenerateDefaultLocker(); } /// <summary> /// Initializes a new instance of the <see cref="ConcurrentObservableDictionary{TKey, TValue}" /> class. /// </summary> /// <param name="capacity">The initial capacity of the dictionary.</param> public ConcurrentObservableDictionary(int capacity) : base(capacity) { this.locker = EnvironmentSettings.GenerateDefaultLocker(); } /// <summary> /// Initializes a new instance of the <see cref="ConcurrentObservableDictionary{TKey, TValue}" /> class. /// </summary> /// <param name="equalityComparer">A comparer object to use for equality comparison.</param> public ConcurrentObservableDictionary(IEqualityComparer<TKey> equalityComparer) : base(equalityComparer) { this.locker = EnvironmentSettings.GenerateDefaultLocker(); } /// <summary> /// Initializes a new instance of the <see cref="ConcurrentObservableDictionary{TKey, TValue}" /> class. /// </summary> /// <param name="capacity">The initial capacity of the dictionary.</param> /// <param name="equalityComparer">A comparer object to use for equality comparison.</param> public ConcurrentObservableDictionary( int capacity, IEqualityComparer<TKey> equalityComparer) : base( capacity, equalityComparer) { this.locker = EnvironmentSettings.GenerateDefaultLocker(); } /// <summary> /// Initializes a new instance of the <see cref="ConcurrentObservableDictionary{TKey, TValue}" /> class. /// </summary> /// <param name="dictionary">A dictionary of items to copy from.</param> public ConcurrentObservableDictionary(IDictionary<TKey, TValue> dictionary) : base(dictionary) { this.locker = EnvironmentSettings.GenerateDefaultLocker(); } /// <summary> /// Initializes a new instance of the <see cref="ConcurrentObservableDictionary{TKey, TValue}" /> class. /// </summary> /// <param name="dictionary">A dictionary of items to copy from.</param> /// <param name="comparer">A comparer object to use for equality comparison.</param> public ConcurrentObservableDictionary( IDictionary<TKey, TValue> dictionary, IEqualityComparer<TKey> comparer) : base( dictionary, comparer) { this.locker = EnvironmentSettings.GenerateDefaultLocker(); } /// <summary> /// Initializes a new instance of the <see cref="ConcurrentObservableDictionary{TKey, TValue}" /> class. /// </summary> /// <param name="context">The synchronization context top use when posting observable messages.</param> public ConcurrentObservableDictionary(GlobalThreading.SynchronizationContext context) : base(context) { this.locker = EnvironmentSettings.GenerateDefaultLocker(); } /// <summary> /// Initializes a new instance of the <see cref="ConcurrentObservableDictionary{TKey, TValue}" /> class. /// </summary> /// <param name="context">The synchronization context top use when posting observable messages.</param> /// <param name="capacity">The initial capacity of the dictionary.</param> public ConcurrentObservableDictionary( GlobalThreading.SynchronizationContext context, int capacity) : base( context, capacity) { this.locker = EnvironmentSettings.GenerateDefaultLocker(); } /// <summary> /// Initializes a new instance of the <see cref="ConcurrentObservableDictionary{TKey, TValue}" /> class. /// </summary> /// <param name="context">The synchronization context top use when posting observable messages.</param> /// <param name="equalityComparer">A comparer object to use for equality comparison.</param> public ConcurrentObservableDictionary( GlobalThreading.SynchronizationContext context, IEqualityComparer<TKey> equalityComparer) : base( context, equalityComparer) { this.locker = EnvironmentSettings.GenerateDefaultLocker(); } /// <summary> /// Initializes a new instance of the <see cref="ConcurrentObservableDictionary{TKey, TValue}" /> class. /// </summary> /// <param name="context">The synchronization context top use when posting observable messages.</param> /// <param name="capacity">The initial capacity of the dictionary.</param> /// <param name="equalityComparer">A comparer object to use for equality comparison.</param> public ConcurrentObservableDictionary( GlobalThreading.SynchronizationContext context, int capacity, IEqualityComparer<TKey> equalityComparer) : base( context, capacity, equalityComparer) { this.locker = EnvironmentSettings.GenerateDefaultLocker(); } /// <summary> /// Initializes a new instance of the <see cref="ConcurrentObservableDictionary{TKey, TValue}" /> class. /// </summary> /// <param name="context">The synchronization context top use when posting observable messages.</param> /// <param name="dictionary">A dictionary of items to copy from.</param> public ConcurrentObservableDictionary( GlobalThreading.SynchronizationContext context, IDictionary<TKey, TValue> dictionary) : base( context, dictionary) { this.locker = EnvironmentSettings.GenerateDefaultLocker(); } /// <summary> /// Initializes a new instance of the <see cref="ConcurrentObservableDictionary{TKey, TValue}" /> class. /// </summary> /// <param name="context">The synchronization context top use when posting observable messages.</param> /// <param name="dictionary">A dictionary of items to copy from.</param> /// <param name="comparer">A comparer object to use for equality comparison.</param> public ConcurrentObservableDictionary( GlobalThreading.SynchronizationContext context, IDictionary<TKey, TValue> dictionary, IEqualityComparer<TKey> comparer) : base( context, dictionary, comparer) { this.locker = EnvironmentSettings.GenerateDefaultLocker(); } /// <summary> /// Initializes a new instance of the <see cref="ConcurrentObservableDictionary{TKey, TValue}" /> class. /// </summary> /// <param name="suppressUndoable">If set to <see langword="true" />, suppresses undoable capabilities of this collection.</param> public ConcurrentObservableDictionary(bool suppressUndoable) : base(suppressUndoable) { this.locker = EnvironmentSettings.GenerateDefaultLocker(); } /// <summary> /// Initializes a new instance of the <see cref="ConcurrentObservableDictionary{TKey, TValue}" /> class. /// </summary> /// <param name="capacity">The initial capacity of the dictionary.</param> /// <param name="suppressUndoable">If set to <see langword="true" />, suppresses undoable capabilities of this collection.</param> public ConcurrentObservableDictionary( int capacity, bool suppressUndoable) : base( capacity, suppressUndoable) { this.locker = EnvironmentSettings.GenerateDefaultLocker(); } /// <summary> /// Initializes a new instance of the <see cref="ConcurrentObservableDictionary{TKey, TValue}" /> class. /// </summary> /// <param name="equalityComparer">A comparer object to use for equality comparison.</param> /// <param name="suppressUndoable">If set to <see langword="true" />, suppresses undoable capabilities of this collection.</param> public ConcurrentObservableDictionary( IEqualityComparer<TKey> equalityComparer, bool suppressUndoable) : base( equalityComparer, suppressUndoable) { this.locker = EnvironmentSettings.GenerateDefaultLocker(); } /// <summary> /// Initializes a new instance of the <see cref="ConcurrentObservableDictionary{TKey, TValue}" /> class. /// </summary> /// <param name="capacity">The initial capacity of the dictionary.</param> /// <param name="equalityComparer">A comparer object to use for equality comparison.</param> /// <param name="suppressUndoable">If set to <see langword="true" />, suppresses undoable capabilities of this collection.</param> public ConcurrentObservableDictionary( int capacity, IEqualityComparer<TKey> equalityComparer, bool suppressUndoable) : base( capacity, equalityComparer, suppressUndoable) { this.locker = EnvironmentSettings.GenerateDefaultLocker(); } /// <summary> /// Initializes a new instance of the <see cref="ConcurrentObservableDictionary{TKey, TValue}" /> class. /// </summary> /// <param name="dictionary">A dictionary of items to copy from.</param> /// <param name="suppressUndoable">If set to <see langword="true" />, suppresses undoable capabilities of this collection.</param> public ConcurrentObservableDictionary( IDictionary<TKey, TValue> dictionary, bool suppressUndoable) : base( dictionary, suppressUndoable) { this.locker = EnvironmentSettings.GenerateDefaultLocker(); } /// <summary> /// Initializes a new instance of the <see cref="ConcurrentObservableDictionary{TKey, TValue}" /> class. /// </summary> /// <param name="dictionary">A dictionary of items to copy from.</param> /// <param name="comparer">A comparer object to use for equality comparison.</param> /// <param name="suppressUndoable">If set to <see langword="true" />, suppresses undoable capabilities of this collection.</param> public ConcurrentObservableDictionary( IDictionary<TKey, TValue> dictionary, IEqualityComparer<TKey> comparer, bool suppressUndoable) : base( dictionary, comparer, suppressUndoable) { this.locker = EnvironmentSettings.GenerateDefaultLocker(); } /// <summary> /// Initializes a new instance of the <see cref="ConcurrentObservableDictionary{TKey, TValue}" /> class. /// </summary> /// <param name="context">The synchronization context top use when posting observable messages.</param> /// <param name="suppressUndoable">If set to <see langword="true" />, suppresses undoable capabilities of this collection.</param> public ConcurrentObservableDictionary( GlobalThreading.SynchronizationContext context, bool suppressUndoable) : base( context, suppressUndoable) { this.locker = EnvironmentSettings.GenerateDefaultLocker(); } /// <summary> /// Initializes a new instance of the <see cref="ConcurrentObservableDictionary{TKey, TValue}" /> class. /// </summary> /// <param name="context">The synchronization context top use when posting observable messages.</param> /// <param name="capacity">The initial capacity of the dictionary.</param> /// <param name="suppressUndoable">If set to <see langword="true" />, suppresses undoable capabilities of this collection.</param> public ConcurrentObservableDictionary( GlobalThreading.SynchronizationContext context, int capacity, bool suppressUndoable) : base( context, capacity, suppressUndoable) { this.locker = EnvironmentSettings.GenerateDefaultLocker(); } /// <summary> /// Initializes a new instance of the <see cref="ConcurrentObservableDictionary{TKey, TValue}" /> class. /// </summary> /// <param name="context">The synchronization context top use when posting observable messages.</param> /// <param name="equalityComparer">A comparer object to use for equality comparison.</param> /// <param name="suppressUndoable">If set to <see langword="true" />, suppresses undoable capabilities of this collection.</param> public ConcurrentObservableDictionary( GlobalThreading.SynchronizationContext context, IEqualityComparer<TKey> equalityComparer, bool suppressUndoable) : base( context, equalityComparer, suppressUndoable) { this.locker = EnvironmentSettings.GenerateDefaultLocker(); } /// <summary> /// Initializes a new instance of the <see cref="ConcurrentObservableDictionary{TKey, TValue}" /> class. /// </summary> /// <param name="context">The synchronization context top use when posting observable messages.</param> /// <param name="capacity">The initial capacity of the dictionary.</param> /// <param name="equalityComparer">A comparer object to use for equality comparison.</param> /// <param name="suppressUndoable">If set to <see langword="true" />, suppresses undoable capabilities of this collection.</param> public ConcurrentObservableDictionary( GlobalThreading.SynchronizationContext context, int capacity, IEqualityComparer<TKey> equalityComparer, bool suppressUndoable) : base( context, capacity, equalityComparer, suppressUndoable) { this.locker = EnvironmentSettings.GenerateDefaultLocker(); } /// <summary> /// Initializes a new instance of the <see cref="ConcurrentObservableDictionary{TKey, TValue}" /> class. /// </summary> /// <param name="context">The synchronization context top use when posting observable messages.</param> /// <param name="dictionary">A dictionary of items to copy from.</param> /// <param name="suppressUndoable">If set to <see langword="true" />, suppresses undoable capabilities of this collection.</param> public ConcurrentObservableDictionary( GlobalThreading.SynchronizationContext context, IDictionary<TKey, TValue> dictionary, bool suppressUndoable) : base( context, dictionary, suppressUndoable) { this.locker = EnvironmentSettings.GenerateDefaultLocker(); } /// <summary> /// Initializes a new instance of the <see cref="ConcurrentObservableDictionary{TKey, TValue}" /> class. /// </summary> /// <param name="context">The synchronization context top use when posting observable messages.</param> /// <param name="dictionary">A dictionary of items to copy from.</param> /// <param name="comparer">A comparer object to use for equality comparison.</param> /// <param name="suppressUndoable">If set to <see langword="true" />, suppresses undoable capabilities of this collection.</param> public ConcurrentObservableDictionary( GlobalThreading.SynchronizationContext context, IDictionary<TKey, TValue> dictionary, IEqualityComparer<TKey> comparer, bool suppressUndoable) : base( context, dictionary, comparer, suppressUndoable) { this.locker = EnvironmentSettings.GenerateDefaultLocker(); } #endregion /// <summary> /// Gets a synchronization lock item to be used when trying to synchronize read/write operations between threads. /// </summary> protected override IReaderWriterLock SynchronizationLock => this.locker.Value; /// <summary> /// Gets a value from the dictionary, optionally generating one if the key is not found. /// </summary> /// <param name="key">The key.</param> /// <param name="valueGenerator">The value generator.</param> /// <returns>The value corresponding to the key, that is guaranteed to exist in the dictionary after this method.</returns> /// <remarks> /// <para>The <paramref name="valueGenerator" /> method is guaranteed to not be invoked if the key exists.</para> /// <para> /// When the <paramref name="valueGenerator" /> method is invoked, it will be invoked within the write lock. /// Please ensure that no member of the dictionary is called within it. /// </para> /// </remarks> public TValue GetOrAdd( TKey key, Func<TValue> valueGenerator) { // PRECONDITIONS // Current object not disposed this.RequiresNotDisposed(); // ACTION int newIndex; TValue value; // Under read/write lock using (ReadWriteSynchronizationLocker rwl = this.ReadWriteLock()) { if (this.InternalContainer.TryGetValue( key, out value)) { // Within read lock, if the key is found, return the value. return value; } rwl.Upgrade(); if (this.InternalContainer.TryGetValue( key, out value)) { // Re-check within a write lock, to ensure that something else hasn't already added it. return value; } // Generate the value value = valueGenerator(); // Add the item newIndex = this.InternalContainer.Add( key, value); } // NOTIFICATIONS // Collection changed if (newIndex == -1) { // If no index could be found for an item (Dictionary add) this.RaiseCollectionReset(); } else { // If index was added at a specific index this.RaiseCollectionChangedAdd( new KeyValuePair<TKey, TValue>( key, value), newIndex); } // Property changed this.RaisePropertyChanged(nameof(this.Count)); // Contents may have changed this.ContentsMayHaveChanged(); return value; } /// <summary> /// Creates an item or changes its state, if one exists. /// </summary> /// <param name="key">The key.</param> /// <param name="valueGenerator">The value generator.</param> /// <param name="valueAction">The value action.</param> /// <returns>The created or state-changed item.</returns> public TValue CreateOrChangeState( TKey key, Func<TValue> valueGenerator, Action<TValue> valueAction) { // PRECONDITIONS // Current object not disposed this.RequiresNotDisposed(); // ACTION int newIndex; TValue value; // Under read/write lock using (ReadWriteSynchronizationLocker rwl = this.ReadWriteLock()) { if (this.InternalContainer.TryGetValue( key, out value)) { // Within read lock, if the key is found, return the value. valueAction(value); return value; } rwl.Upgrade(); if (this.InternalContainer.TryGetValue( key, out value)) { // Re-check within a write lock, to ensure that something else hasn't already added it. valueAction(value); return value; } // Generate the value value = valueGenerator(); // Add the item newIndex = this.InternalContainer.Add( key, value); } // NOTIFICATIONS // Collection changed if (newIndex == -1) { // If no index could be found for an item (Dictionary add) this.RaiseCollectionReset(); } else { // If index was added at a specific index this.RaiseCollectionChangedAdd( new KeyValuePair<TKey, TValue>( key, value), newIndex); } // Property changed this.RaisePropertyChanged(nameof(this.Count)); // Contents may have changed this.ContentsMayHaveChanged(); return value; } /// <summary> /// Removes a key from the dictionary, then acts on its resulting value. /// </summary> /// <param name="key">The key.</param> /// <param name="action">The action.</param> /// <returns><see langword="true" /> if the variable was successfully removed, <see langword="false" /> otherwise.</returns> public bool RemoveThenAct( TKey key, Action<TValue> action) { // PRECONDITIONS // Current object not disposed this.RequiresNotDisposed(); // ACTION int oldIndex; TValue value; // Under read/write lock using (ReadWriteSynchronizationLocker rwl = this.ReadWriteLock()) { if (this.InternalContainer.TryGetValue( key, out value)) { rwl.Upgrade(); if (this.InternalContainer.TryGetValue( key, out value)) { // Re-check within a write lock, to ensure that something else hasn't already removed it. oldIndex = this.InternalContainer.Remove( new KeyValuePair<TKey, TValue>( key, value)); action(value); } else { return false; } } else { return false; } } // NOTIFICATIONS // Collection changed if (oldIndex == -1) { // If no index could be found for an item (Dictionary remove) this.RaiseCollectionReset(); } else { // If index was added at a specific index this.RaiseCollectionChangedRemove( new KeyValuePair<TKey, TValue>( key, value), oldIndex); } // Property changed this.RaisePropertyChanged(nameof(this.Count)); // Contents may have changed this.ContentsMayHaveChanged(); return true; } /// <summary> /// Called when the object is being deserialized, in order to set the locker to a new value. /// </summary> /// <param name="context">The streaming context.</param> [OnDeserializing] internal void OnDeserializingMethod(StreamingContext context) => GlobalThreading.Interlocked.Exchange( ref this.locker, EnvironmentSettings.GenerateDefaultLocker()); /// <summary> /// Disposes the managed context. /// </summary> protected override void DisposeManagedContext() { Lazy<ReaderWriterLockSlim> l = GlobalThreading.Interlocked.Exchange( ref this.locker, null); if (l?.IsValueCreated ?? false) { l.Value.Dispose(); } base.DisposeManagedContext(); } /// <summary> /// Disposes the general context. /// </summary> protected override void DisposeGeneralContext() { GlobalThreading.Interlocked.Exchange( ref this.locker, null); base.DisposeGeneralContext(); } } }
using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Text; using UnityEngine; using UnityEditor.Graphing; using UnityEditor.Graphing.Util; using Debug = UnityEngine.Debug; using Object = UnityEngine.Object; using UnityEngine.Rendering; using UnityEditor.UIElements; using Edge = UnityEditor.Experimental.GraphView.Edge; using UnityEditor.Experimental.GraphView; using UnityEngine.UIElements; namespace UnityEditor.ShaderGraph.Drawing { class MaterialGraphEditWindow : EditorWindow { [SerializeField] string m_Selected; [SerializeField] GraphObject m_GraphObject; [NonSerialized] bool m_HasError; [NonSerialized] public bool updatePreviewShaders = false; ColorSpace m_ColorSpace; RenderPipelineAsset m_RenderPipelineAsset; bool m_FrameAllAfterLayout; GraphEditorView m_GraphEditorView; MessageManager m_MessageManager; MessageManager messageManager { get { return m_MessageManager ?? (m_MessageManager = new MessageManager()); } } GraphEditorView graphEditorView { get { return m_GraphEditorView; } set { if (m_GraphEditorView != null) { m_GraphEditorView.RemoveFromHierarchy(); m_GraphEditorView.Dispose(); } m_GraphEditorView = value; if (m_GraphEditorView != null) { m_GraphEditorView.saveRequested += UpdateAsset; m_GraphEditorView.convertToSubgraphRequested += ToSubGraph; m_GraphEditorView.showInProjectRequested += PingAsset; m_GraphEditorView.RegisterCallback<GeometryChangedEvent>(OnGeometryChanged); m_FrameAllAfterLayout = true; this.rootVisualElement.Add(graphEditorView); } } } GraphObject graphObject { get { return m_GraphObject; } set { if (m_GraphObject != null) DestroyImmediate(m_GraphObject); m_GraphObject = value; } } public string selectedGuid { get { return m_Selected; } private set { m_Selected = value; } } public string assetName { get { return titleContent.text; } set { titleContent.text = value; graphEditorView.assetName = value; } } void Update() { if (m_HasError) return; if (PlayerSettings.colorSpace != m_ColorSpace) { graphEditorView = null; m_ColorSpace = PlayerSettings.colorSpace; } if (GraphicsSettings.renderPipelineAsset != m_RenderPipelineAsset) { graphEditorView = null; m_RenderPipelineAsset = GraphicsSettings.renderPipelineAsset; } try { if (graphObject == null && selectedGuid != null) { var guid = selectedGuid; selectedGuid = null; Initialize(guid); } if (graphObject == null) { Close(); return; } var materialGraph = graphObject.graph as GraphData; if (materialGraph == null) return; if (graphEditorView == null) { messageManager.ClearAll(); materialGraph.messageManager = messageManager; var asset = AssetDatabase.LoadAssetAtPath<Object>(AssetDatabase.GUIDToAssetPath(selectedGuid)); graphEditorView = new GraphEditorView(this, materialGraph, messageManager) { viewDataKey = selectedGuid, assetName = asset.name.Split('/').Last() }; m_ColorSpace = PlayerSettings.colorSpace; m_RenderPipelineAsset = GraphicsSettings.renderPipelineAsset; graphObject.Validate(); } if (graphObject.wasUndoRedoPerformed) { graphEditorView.HandleGraphChanges(); graphObject.graph.ClearChanges(); graphObject.HandleUndoRedo(); } if (updatePreviewShaders) { m_GraphEditorView.UpdatePreviewShaders(); updatePreviewShaders = false; } graphEditorView.HandleGraphChanges(); graphObject.graph.ClearChanges(); } catch (Exception e) { m_HasError = true; m_GraphEditorView = null; graphObject = null; Debug.LogException(e); throw; } } void OnEnable() { this.SetAntiAliasing(4); } void OnDisable() { graphEditorView = null; messageManager.ClearAll(); } void OnDestroy() { if (graphObject != null) { string nameOfFile = AssetDatabase.GUIDToAssetPath(selectedGuid); if (graphObject.isDirty && EditorUtility.DisplayDialog("Shader Graph Has Been Modified", "Do you want to save the changes you made in the Shader Graph?\n" + nameOfFile + "\n\nYour changes will be lost if you don't save them.", "Save", "Don't Save")) UpdateAsset(); Undo.ClearUndo(graphObject); DestroyImmediate(graphObject); } graphEditorView = null; } public void PingAsset() { if (selectedGuid != null) { var path = AssetDatabase.GUIDToAssetPath(selectedGuid); var asset = AssetDatabase.LoadAssetAtPath<Object>(path); EditorGUIUtility.PingObject(asset); } } public void UpdateAsset() { if (selectedGuid != null && graphObject != null) { var path = AssetDatabase.GUIDToAssetPath(selectedGuid); if (string.IsNullOrEmpty(path) || graphObject == null) return; bool VCSEnabled = (VersionControl.Provider.enabled && VersionControl.Provider.isActive); CheckoutIfValid(path, VCSEnabled); UpdateShaderGraphOnDisk(path); graphObject.isDirty = false; var windows = Resources.FindObjectsOfTypeAll<MaterialGraphEditWindow>(); foreach (var materialGraphEditWindow in windows) { materialGraphEditWindow.Rebuild(); } } } public void ToSubGraph() { var graphView = graphEditorView.graphView; var path = EditorUtility.SaveFilePanelInProject("Save Sub Graph", "New Shader Sub Graph", ShaderSubGraphImporter.Extension, ""); path = path.Replace(Application.dataPath, "Assets"); if (path.Length == 0) return; graphObject.RegisterCompleteObjectUndo("Convert To Subgraph"); var nodes = graphView.selection.OfType<IShaderNodeView>().Where(x => !(x.node is PropertyNode || x.node is SubGraphOutputNode)).Select(x => x.node).Where(x => x.allowedInSubGraph).ToArray(); var bounds = Rect.MinMaxRect(float.PositiveInfinity, float.PositiveInfinity, float.NegativeInfinity, float.NegativeInfinity); foreach (var node in nodes) { var center = node.drawState.position.center; bounds = Rect.MinMaxRect( Mathf.Min(bounds.xMin, center.x), Mathf.Min(bounds.yMin, center.y), Mathf.Max(bounds.xMax, center.x), Mathf.Max(bounds.yMax, center.y)); } var middle = bounds.center; bounds.center = Vector2.zero; // Collect the property nodes and get the corresponding properties var propertyNodeGuids = graphView.selection.OfType<IShaderNodeView>().Where(x => (x.node is PropertyNode)).Select(x => ((PropertyNode)x.node).propertyGuid); var metaProperties = graphView.graph.properties.Where(x => propertyNodeGuids.Contains(x.guid)); var copyPasteGraph = new CopyPasteGraph( graphView.graph.assetGuid, graphView.selection.OfType<ShaderGroup>().Select(x => x.userData), graphView.selection.OfType<IShaderNodeView>().Where(x => !(x.node is PropertyNode || x.node is SubGraphOutputNode)).Select(x => x.node).Where(x => x.allowedInSubGraph).ToArray(), graphView.selection.OfType<Edge>().Select(x => x.userData as IEdge), graphView.selection.OfType<BlackboardField>().Select(x => x.userData as AbstractShaderProperty), metaProperties); var deserialized = CopyPasteGraph.FromJson(JsonUtility.ToJson(copyPasteGraph, false)); if (deserialized == null) return; var subGraph = new GraphData { isSubGraph = true }; subGraph.path = "Sub Graphs"; var subGraphOutputNode = new SubGraphOutputNode(); { var drawState = subGraphOutputNode.drawState; drawState.position = new Rect(new Vector2(bounds.xMax + 200f, 0f), drawState.position.size); subGraphOutputNode.drawState = drawState; } subGraph.AddNode(subGraphOutputNode); var nodeGuidMap = new Dictionary<Guid, Guid>(); foreach (var node in deserialized.GetNodes<AbstractMaterialNode>()) { var oldGuid = node.guid; var newGuid = node.RewriteGuid(); nodeGuidMap[oldGuid] = newGuid; var drawState = node.drawState; drawState.position = new Rect(drawState.position.position - middle, drawState.position.size); node.drawState = drawState; subGraph.AddNode(node); } // figure out what needs remapping var externalOutputSlots = new List<IEdge>(); var externalInputSlots = new List<IEdge>(); foreach (var edge in deserialized.edges) { var outputSlot = edge.outputSlot; var inputSlot = edge.inputSlot; Guid remappedOutputNodeGuid; Guid remappedInputNodeGuid; var outputSlotExistsInSubgraph = nodeGuidMap.TryGetValue(outputSlot.nodeGuid, out remappedOutputNodeGuid); var inputSlotExistsInSubgraph = nodeGuidMap.TryGetValue(inputSlot.nodeGuid, out remappedInputNodeGuid); // pasting nice internal links! if (outputSlotExistsInSubgraph && inputSlotExistsInSubgraph) { var outputSlotRef = new SlotReference(remappedOutputNodeGuid, outputSlot.slotId); var inputSlotRef = new SlotReference(remappedInputNodeGuid, inputSlot.slotId); subGraph.Connect(outputSlotRef, inputSlotRef); } // one edge needs to go to outside world else if (outputSlotExistsInSubgraph) { externalInputSlots.Add(edge); } else if (inputSlotExistsInSubgraph) { externalOutputSlots.Add(edge); } } // Find the unique edges coming INTO the graph var uniqueIncomingEdges = externalOutputSlots.GroupBy( edge => edge.outputSlot, edge => edge, (key, edges) => new { slotRef = key, edges = edges.ToList() }); var externalInputNeedingConnection = new List<KeyValuePair<IEdge, AbstractShaderProperty>>(); foreach (var group in uniqueIncomingEdges) { var sr = group.slotRef; var fromNode = graphObject.graph.GetNodeFromGuid(sr.nodeGuid); var fromSlot = fromNode.FindOutputSlot<MaterialSlot>(sr.slotId); AbstractShaderProperty prop; switch (fromSlot.concreteValueType) { case ConcreteSlotValueType.Texture2D: prop = new TextureShaderProperty(); break; case ConcreteSlotValueType.Texture2DArray: prop = new Texture2DArrayShaderProperty(); break; case ConcreteSlotValueType.Texture3D: prop = new Texture3DShaderProperty(); break; case ConcreteSlotValueType.Cubemap: prop = new CubemapShaderProperty(); break; case ConcreteSlotValueType.Vector4: prop = new Vector4ShaderProperty(); break; case ConcreteSlotValueType.Vector3: prop = new Vector3ShaderProperty(); break; case ConcreteSlotValueType.Vector2: prop = new Vector2ShaderProperty(); break; case ConcreteSlotValueType.Vector1: prop = new Vector1ShaderProperty(); break; case ConcreteSlotValueType.Boolean: prop = new BooleanShaderProperty(); break; case ConcreteSlotValueType.Matrix2: prop = new Matrix2ShaderProperty(); break; case ConcreteSlotValueType.Matrix3: prop = new Matrix3ShaderProperty(); break; case ConcreteSlotValueType.Matrix4: prop = new Matrix4ShaderProperty(); break; case ConcreteSlotValueType.SamplerState: prop = new SamplerStateShaderProperty(); break; case ConcreteSlotValueType.Gradient: prop = new GradientShaderProperty(); break; default: throw new ArgumentOutOfRangeException(); } if (prop != null) { var materialGraph = (GraphData)graphObject.graph; var fromPropertyNode = fromNode as PropertyNode; var fromProperty = fromPropertyNode != null ? materialGraph.properties.FirstOrDefault(p => p.guid == fromPropertyNode.propertyGuid) : null; prop.displayName = fromProperty != null ? fromProperty.displayName : fromSlot.concreteValueType.ToString(); subGraph.AddShaderProperty(prop); var propNode = new PropertyNode(); { var drawState = propNode.drawState; drawState.position = new Rect(new Vector2(bounds.xMin - 300f, 0f), drawState.position.size); propNode.drawState = drawState; } subGraph.AddNode(propNode); propNode.propertyGuid = prop.guid; foreach (var edge in group.edges) { subGraph.Connect( new SlotReference(propNode.guid, PropertyNode.OutputSlotId), new SlotReference(nodeGuidMap[edge.inputSlot.nodeGuid], edge.inputSlot.slotId)); externalInputNeedingConnection.Add(new KeyValuePair<IEdge, AbstractShaderProperty>(edge, prop)); } } } var uniqueOutgoingEdges = externalInputSlots.GroupBy( edge => edge.outputSlot, edge => edge, (key, edges) => new { slot = key, edges = edges.ToList() }); var externalOutputsNeedingConnection = new List<KeyValuePair<IEdge, IEdge>>(); foreach (var group in uniqueOutgoingEdges) { var outputNode = subGraph.outputNode as SubGraphOutputNode; AbstractMaterialNode node = graphView.graph.GetNodeFromGuid(group.edges[0].outputSlot.nodeGuid); MaterialSlot slot = node.FindSlot<MaterialSlot>(group.edges[0].outputSlot.slotId); var slotId = outputNode.AddSlot(slot.concreteValueType); var inputSlotRef = new SlotReference(outputNode.guid, slotId); foreach (var edge in group.edges) { var newEdge = subGraph.Connect(new SlotReference(nodeGuidMap[edge.outputSlot.nodeGuid], edge.outputSlot.slotId), inputSlotRef); externalOutputsNeedingConnection.Add(new KeyValuePair<IEdge, IEdge>(edge, newEdge)); } } File.WriteAllText(path, EditorJsonUtility.ToJson(subGraph)); AssetDatabase.ImportAsset(path); var loadedSubGraph = AssetDatabase.LoadAssetAtPath(path, typeof(SubGraphAsset)) as SubGraphAsset; if (loadedSubGraph == null) return; var subGraphNode = new SubGraphNode(); var ds = subGraphNode.drawState; ds.position = new Rect(middle - new Vector2(100f, 150f), Vector2.zero); subGraphNode.drawState = ds; graphObject.graph.AddNode(subGraphNode); subGraphNode.subGraphAsset = loadedSubGraph; foreach (var edgeMap in externalInputNeedingConnection) { graphObject.graph.Connect(edgeMap.Key.outputSlot, new SlotReference(subGraphNode.guid, edgeMap.Value.guid.GetHashCode())); } foreach (var edgeMap in externalOutputsNeedingConnection) { graphObject.graph.Connect(new SlotReference(subGraphNode.guid, edgeMap.Value.inputSlot.slotId), edgeMap.Key.inputSlot); } graphObject.graph.RemoveElements( graphView.selection.OfType<IShaderNodeView>().Select(x => x.node).Where(x => x.allowedInSubGraph), Enumerable.Empty<IEdge>(), Enumerable.Empty<GroupData>()); graphObject.graph.ValidateGraph(); } void UpdateShaderGraphOnDisk(string path) { File.WriteAllText(path, EditorJsonUtility.ToJson(graphObject.graph, true)); AssetDatabase.ImportAsset(path); } private void Rebuild() { if (graphObject != null && graphObject.graph != null) { var subNodes = graphObject.graph.GetNodes<SubGraphNode>(); foreach (var node in subNodes) node.UpdateSlots(); } } public void Initialize(string assetGuid) { try { m_ColorSpace = PlayerSettings.colorSpace; m_RenderPipelineAsset = GraphicsSettings.renderPipelineAsset; var asset = AssetDatabase.LoadAssetAtPath<Object>(AssetDatabase.GUIDToAssetPath(assetGuid)); if (asset == null) return; if (!EditorUtility.IsPersistent(asset)) return; if (selectedGuid == assetGuid) return; var path = AssetDatabase.GetAssetPath(asset); var extension = Path.GetExtension(path); if (extension == null) return; // Path.GetExtension returns the extension prefixed with ".", so we remove it. We force lower case such that // the comparison will be case-insensitive. extension = extension.Substring(1).ToLowerInvariant(); bool isSubGraph; switch (extension) { case ShaderGraphImporter.Extension: isSubGraph = false; break; case ShaderSubGraphImporter.Extension: isSubGraph = true; break; default: return; } selectedGuid = assetGuid; var textGraph = File.ReadAllText(path, Encoding.UTF8); graphObject = CreateInstance<GraphObject>(); graphObject.hideFlags = HideFlags.HideAndDontSave; graphObject.graph = JsonUtility.FromJson<GraphData>(textGraph); graphObject.graph.assetGuid = assetGuid; graphObject.graph.isSubGraph = isSubGraph; graphObject.graph.messageManager = messageManager; graphObject.graph.OnEnable(); graphObject.graph.ValidateGraph(); graphEditorView = new GraphEditorView(this, m_GraphObject.graph, messageManager) { viewDataKey = selectedGuid, assetName = asset.name.Split('/').Last() }; titleContent = new GUIContent(asset.name.Split('/').Last()); Repaint(); } catch (Exception) { m_HasError = true; m_GraphEditorView = null; graphObject = null; throw; } } void OnGeometryChanged(GeometryChangedEvent evt) { graphEditorView.UnregisterCallback<GeometryChangedEvent>(OnGeometryChanged); if (m_FrameAllAfterLayout) graphEditorView.graphView.FrameAll(); m_FrameAllAfterLayout = false; foreach (var node in m_GraphObject.graph.GetNodes<AbstractMaterialNode>()) node.Dirty(ModificationScope.Node); } void CheckoutIfValid(string path, bool VCSEnabled) { if (VCSEnabled) { var asset = VersionControl.Provider.GetAssetByPath(path); if (asset != null) { if (VersionControl.Provider.CheckoutIsValid(asset)) { var task = VersionControl.Provider.Checkout(asset, VersionControl.CheckoutMode.Both); task.Wait(); if (!task.success) Debug.Log(task.text + " " + task.resultCode); } } } } } }
//----------------------------------------------------------------------- // <copyright file="ActorGraphInterpreter.cs" company="Akka.NET Project"> // Copyright (C) 2015-2016 Lightbend Inc. <http://www.lightbend.com> // Copyright (C) 2013-2016 Akka.NET project <https://github.com/akkadotnet/akka.net> // </copyright> //----------------------------------------------------------------------- using System; using System.Collections.Generic; using System.Linq; using System.Reflection; using System.Text; using Akka.Actor; using Akka.Annotations; using Akka.Event; using Akka.Pattern; using Akka.Streams.Stage; using Reactive.Streams; using static Akka.Streams.Implementation.Fusing.GraphInterpreter; // ReSharper disable MemberHidesStaticFromOuterClass namespace Akka.Streams.Implementation.Fusing { /// <summary> /// INTERNAL API /// </summary> [InternalApi] public sealed class GraphModule : AtomicModule { /// <summary> /// TBD /// </summary> public readonly IModule[] MaterializedValueIds; /// <summary> /// TBD /// </summary> public readonly GraphAssembly Assembly; /// <summary> /// TBD /// </summary> /// <param name="assembly">TBD</param> /// <param name="shape">TBD</param> /// <param name="attributes">TBD</param> /// <param name="materializedValueIds">TBD</param> public GraphModule(GraphAssembly assembly, Shape shape, Attributes attributes, IModule[] materializedValueIds) { Assembly = assembly; Shape = shape; Attributes = attributes; MaterializedValueIds = materializedValueIds; } /// <summary> /// TBD /// </summary> public override Shape Shape { get; } /// <summary> /// TBD /// </summary> public override Attributes Attributes { get; } /// <summary> /// TBD /// </summary> /// <param name="attributes">TBD</param> /// <returns>TBD</returns> public override IModule WithAttributes(Attributes attributes) { return new GraphModule(Assembly, Shape, attributes, MaterializedValueIds); } /// <summary> /// TBD /// </summary> /// <returns>TBD</returns> public override IModule CarbonCopy() { return new CopiedModule(Shape.DeepCopy(), Attributes.None, this); } /// <summary> /// TBD /// </summary> /// <param name="newShape">TBD</param> /// <returns>TBD</returns> public override IModule ReplaceShape(Shape newShape) { if (!newShape.Equals(Shape)) return CompositeModule.Create(this, newShape); return this; } /// <summary> /// TBD /// </summary> /// <returns>TBD</returns> public override string ToString() => "GraphModule\n" + $" {Assembly.ToString().Replace("\n", "\n ")}\n" + $" shape={Shape}, attributes={Attributes}\n" + $" MaterializedValueIds={string.Join<IModule>("\n ", MaterializedValueIds)}"; } /// <summary> /// INTERNAL API /// </summary> [InternalApi] public sealed class GraphInterpreterShell { private readonly GraphAssembly _assembly; private readonly Connection[] _connections; private readonly GraphStageLogic[] _logics; private readonly Shape _shape; private readonly ActorMaterializerSettings _settings; /// <summary> /// TBD /// </summary> internal readonly ExtendedActorMaterializer Materializer; /// <summary> /// Limits the number of events processed by the interpreter before scheduling /// a self-message for fairness with other actors. The basic assumption here is /// to give each input buffer slot a chance to run through the whole pipeline /// and back (for the elements). /// /// Considered use case: /// - assume a composite Sink of one expand and one fold /// - assume an infinitely fast source of data /// - assume maxInputBufferSize == 1 /// - if the event limit is greater than maxInputBufferSize * (ins + outs) than there will always be expand activity /// because no data can enter "fast enough" from the outside /// </summary> private readonly int _shellEventLimit; // Limits the number of events processed by the interpreter on an abort event. private readonly int _abortLimit; private readonly ActorGraphInterpreter.BatchingActorInputBoundary[] _inputs; private readonly ActorGraphInterpreter.IActorOutputBoundary[] _outputs; private ILoggingAdapter _log; private GraphInterpreter _interpreter; private int _subscribersPending; private int _publishersPending; private bool _resumeScheduled; private bool _waitingForShutdown; private Action<object> _enqueueToShortCircuit; private bool _interpreterCompleted; private readonly ActorGraphInterpreter.Resume _resume; /// <summary> /// TBD /// </summary> /// <param name="assembly">TBD</param> /// <param name="connections">TBD</param> /// <param name="logics">TBD</param> /// <param name="shape">TBD</param> /// <param name="settings">TbD</param> /// <param name="materializer">TBD</param> public GraphInterpreterShell(GraphAssembly assembly, Connection[] connections, GraphStageLogic[] logics, Shape shape, ActorMaterializerSettings settings, ExtendedActorMaterializer materializer) { _assembly = assembly; _connections = connections; _logics = logics; _shape = shape; _settings = settings; Materializer = materializer; _inputs = new ActorGraphInterpreter.BatchingActorInputBoundary[shape.Inlets.Count()]; _outputs = new ActorGraphInterpreter.IActorOutputBoundary[shape.Outlets.Count()]; _subscribersPending = _inputs.Length; _publishersPending = _outputs.Length; _shellEventLimit = settings.MaxInputBufferSize * (assembly.Inlets.Length + assembly.Outlets.Length); _abortLimit = _shellEventLimit * 2; _resume = new ActorGraphInterpreter.Resume(this); } /// <summary> /// TBD /// </summary> public bool IsInitialized => Self != null; /// <summary> /// TBD /// </summary> public bool IsTerminated => _interpreterCompleted && CanShutdown; /// <summary> /// TBD /// </summary> public bool CanShutdown => _subscribersPending + _publishersPending == 0; /// <summary> /// TBD /// </summary> public IActorRef Self { get; private set; } /// <summary> /// TBD /// </summary> public ILoggingAdapter Log => _log ?? (_log = GetLogger()); /// <summary> /// TBD /// </summary> public GraphInterpreter Interpreter => _interpreter ?? (_interpreter = GetInterpreter()); /// <summary> /// TBD /// </summary> /// <param name="self">TBD</param> /// <param name="subMat">TBD</param> /// <param name="enqueueToShourtCircuit">TBD</param> /// <param name="eventLimit">TBD</param> /// <returns>TBD</returns> public int Init(IActorRef self, SubFusingActorMaterializerImpl subMat, Action<object> enqueueToShourtCircuit, int eventLimit) { Self = self; _enqueueToShortCircuit = enqueueToShourtCircuit; for (int i = 0; i < _inputs.Length; i++) { var input = new ActorGraphInterpreter.BatchingActorInputBoundary(_settings.MaxInputBufferSize, i); _inputs[i] = input; Interpreter.AttachUpstreamBoundary(_connections[i], input); } var offset = _assembly.ConnectionCount - _outputs.Length; for (int i = 0; i < _outputs.Length; i++) { var outputType = _shape.Outlets[i].GetType().GetGenericArguments().First(); var output = (ActorGraphInterpreter.IActorOutputBoundary) typeof(ActorGraphInterpreter.ActorOutputBoundary<>).Instantiate(outputType, Self, this, i); _outputs[i] = output; Interpreter.AttachDownstreamBoundary(_connections[i + offset], (DownstreamBoundaryStageLogic) output); } Interpreter.Init(subMat); return RunBatch(eventLimit); } /// <summary> /// TBD /// </summary> /// <param name="e">TBD</param> /// <param name="eventLimit">TBD</param> /// <returns>TBD</returns> public int Receive(ActorGraphInterpreter.IBoundaryEvent e, int eventLimit) { _resumeScheduled = false; if (_waitingForShutdown) { switch (e) { case ActorGraphInterpreter.ExposedPublisher exposedPublisher: _outputs[exposedPublisher.Id].ExposedPublisher(exposedPublisher.Publisher); _publishersPending--; if (CanShutdown) _interpreterCompleted = true; break; case ActorGraphInterpreter.OnSubscribe onSubscribe: ReactiveStreamsCompliance.TryCancel(onSubscribe.Subscription); _subscribersPending--; if (CanShutdown) _interpreterCompleted = true; break; case ActorGraphInterpreter.Abort _: TryAbort(new TimeoutException( $"Streaming actor has been already stopped processing (normally), but not all of its inputs or outputs have been subscribed in [{_settings.SubscriptionTimeoutSettings.Timeout}]. Aborting actor now.")); break; } return eventLimit; } // Cases that are most likely on the hot path, in decreasing order of frequency switch (e) { case ActorGraphInterpreter.OnNext onNext: if (IsDebug) Console.WriteLine($"{Interpreter.Name} OnNext {onNext.Event} id={onNext.Id}"); _inputs[onNext.Id].OnNext(onNext.Event); return RunBatch(eventLimit); case ActorGraphInterpreter.RequestMore requestMore: if (IsDebug) Console.WriteLine($"{Interpreter.Name} Request {requestMore.Demand} id={requestMore.Id}"); _outputs[requestMore.Id].RequestMore(requestMore.Demand); return RunBatch(eventLimit); case ActorGraphInterpreter.Resume _: if (IsDebug) Console.WriteLine($"{Interpreter.Name} Resume"); if (Interpreter.IsSuspended) return RunBatch(eventLimit); return eventLimit; case ActorGraphInterpreter.AsyncInput asyncInput: Interpreter.RunAsyncInput(asyncInput.Logic, asyncInput.Event, asyncInput.Handler); if (eventLimit == 1 && _interpreter.IsSuspended) { SendResume(true); return 0; } return RunBatch(eventLimit - 1); case ActorGraphInterpreter.OnError onError: if (IsDebug) Console.WriteLine($"{Interpreter.Name} OnError id={onError.Id}"); _inputs[onError.Id].OnError(onError.Cause); return RunBatch(eventLimit); case ActorGraphInterpreter.OnComplete onComplete: if (IsDebug) Console.WriteLine($"{Interpreter.Name} OnComplete id={onComplete.Id}"); _inputs[onComplete.Id].OnComplete(); return RunBatch(eventLimit); case ActorGraphInterpreter.OnSubscribe onSubscribe: if (IsDebug) Console.WriteLine($"{Interpreter.Name} OnSubscribe id={onSubscribe.Id}"); _subscribersPending--; _inputs[onSubscribe.Id].OnSubscribe(onSubscribe.Subscription); return RunBatch(eventLimit); case ActorGraphInterpreter.Cancel cancel: if (IsDebug) Console.WriteLine($"{Interpreter.Name} Cancel id={cancel.Id}"); _outputs[cancel.Id].Cancel(); return RunBatch(eventLimit); case ActorGraphInterpreter.SubscribePending subscribePending: _outputs[subscribePending.Id].SubscribePending(); return eventLimit; case ActorGraphInterpreter.ExposedPublisher exposedPublisher: _publishersPending--; _outputs[exposedPublisher.Id].ExposedPublisher(exposedPublisher.Publisher); return eventLimit; } return eventLimit; } /** * Attempts to abort execution, by first propagating the reason given until either * - the interpreter successfully finishes * - the event limit is reached * - a new error is encountered */ /// <summary> /// TBD /// </summary> /// <param name="reason">TBD</param> /// <exception cref="IllegalStateException">TBD</exception> /// <returns>TBD</returns> public void TryAbort(Exception reason) { var ex = reason is ISpecViolation ? new IllegalStateException("Shutting down because of violation of the Reactive Streams specification", reason) : reason; // This should handle termination while interpreter is running. If the upstream have been closed already this // call has no effect and therefore does the right thing: nothing. try { foreach (var input in _inputs) input.OnInternalError(ex); Interpreter.Execute(_abortLimit); Interpreter.Finish(); } catch (Exception) { /* swallow? */ } finally { _interpreterCompleted = true; // Will only have an effect if the above call to the interpreter failed to emit a proper failure to the downstream // otherwise this will have no effect foreach (var output in _outputs) output.Fail(ex); foreach (var input in _inputs) input.Cancel(); } } private int RunBatch(int actorEventLimit) { try { var usingShellLimit = _shellEventLimit < actorEventLimit; var remainingQuota = _interpreter.Execute(Math.Min(actorEventLimit, _shellEventLimit)); if (Interpreter.IsCompleted) { // Cannot stop right away if not completely subscribed if (CanShutdown) _interpreterCompleted = true; else { _waitingForShutdown = true; Materializer.ScheduleOnce(_settings.SubscriptionTimeoutSettings.Timeout, () => Self.Tell(new ActorGraphInterpreter.Abort(this))); } } else if (Interpreter.IsSuspended && !_resumeScheduled) SendResume(!usingShellLimit); return usingShellLimit ? actorEventLimit - _shellEventLimit + remainingQuota : remainingQuota; } catch (Exception reason) { TryAbort(reason); return actorEventLimit - 1; } } private void SendResume(bool sendResume) { _resumeScheduled = true; if (sendResume) Self.Tell(_resume); else _enqueueToShortCircuit(_resume); } private GraphInterpreter GetInterpreter() { return new GraphInterpreter(_assembly, Materializer, Log, _logics, _connections, (logic, @event, handler) => { var asyncInput = new ActorGraphInterpreter.AsyncInput(this, logic, @event, handler); var currentInterpreter = CurrentInterpreterOrNull; if (currentInterpreter == null || !Equals(currentInterpreter.Context, Self)) Self.Tell(new ActorGraphInterpreter.AsyncInput(this, logic, @event, handler)); else _enqueueToShortCircuit(asyncInput); }, _settings.IsFuzzingMode, Self); } private BusLogging GetLogger() { return new BusLogging(Materializer.System.EventStream, Self.ToString(), typeof(GraphInterpreterShell), new DefaultLogMessageFormatter()); } /// <summary> /// TBD /// </summary> /// <returns>TBD</returns> public override string ToString() => $"GraphInterpreterShell\n {_assembly.ToString().Replace("\n", "\n ")}"; } /// <summary> /// INTERNAL API /// </summary> [InternalApi] public class ActorGraphInterpreter : ActorBase { #region messages /// <summary> /// TBD /// </summary> public interface IBoundaryEvent : INoSerializationVerificationNeeded, IDeadLetterSuppression { /// <summary> /// TBD /// </summary> GraphInterpreterShell Shell { get; } } /// <summary> /// TBD /// </summary> public struct OnError : IBoundaryEvent { /// <summary> /// TBD /// </summary> public readonly int Id; /// <summary> /// TBD /// </summary> public readonly Exception Cause; /// <summary> /// TBD /// </summary> /// <param name="shell">TBD</param> /// <param name="id">TBD</param> /// <param name="cause">TBD</param> public OnError(GraphInterpreterShell shell, int id, Exception cause) { Shell = shell; Id = id; Cause = cause; } /// <summary> /// TBD /// </summary> public GraphInterpreterShell Shell { get; } } /// <summary> /// TBD /// </summary> public struct OnComplete : IBoundaryEvent { /// <summary> /// TBD /// </summary> public readonly int Id; /// <summary> /// TBD /// </summary> /// <param name="shell">TBD</param> /// <param name="id">TBD</param> public OnComplete(GraphInterpreterShell shell, int id) { Shell = shell; Id = id; } /// <summary> /// TBD /// </summary> public GraphInterpreterShell Shell { get; } } /// <summary> /// TBD /// </summary> public struct OnNext : IBoundaryEvent { /// <summary> /// TBD /// </summary> public readonly int Id; /// <summary> /// TBD /// </summary> public readonly object Event; /// <summary> /// TBD /// </summary> /// <param name="shell">TBD</param> /// <param name="id">TBD</param> /// <param name="event">TBD</param> public OnNext(GraphInterpreterShell shell, int id, object @event) { Shell = shell; Id = id; Event = @event; } /// <summary> /// TBD /// </summary> public GraphInterpreterShell Shell { get; } } /// <summary> /// TBD /// </summary> public struct OnSubscribe : IBoundaryEvent { /// <summary> /// TBD /// </summary> public readonly int Id; /// <summary> /// TBD /// </summary> public readonly ISubscription Subscription; /// <summary> /// TBD /// </summary> /// <param name="shell">TBD</param> /// <param name="id">TBD</param> /// <param name="subscription">TBD</param> public OnSubscribe(GraphInterpreterShell shell, int id, ISubscription subscription) { Shell = shell; Id = id; Subscription = subscription; } /// <summary> /// TBD /// </summary> public GraphInterpreterShell Shell { get; } } /// <summary> /// TBD /// </summary> public struct RequestMore : IBoundaryEvent { /// <summary> /// TBD /// </summary> public readonly int Id; /// <summary> /// TBD /// </summary> public readonly long Demand; /// <summary> /// TBD /// </summary> /// <param name="shell">TBD</param> /// <param name="id">TBD</param> /// <param name="demand">TBD</param> public RequestMore(GraphInterpreterShell shell, int id, long demand) { Shell = shell; Id = id; Demand = demand; } /// <summary> /// TBD /// </summary> public GraphInterpreterShell Shell { get; } } /// <summary> /// TBD /// </summary> public struct Cancel : IBoundaryEvent { /// <summary> /// TBD /// </summary> public readonly int Id; /// <summary> /// TBD /// </summary> /// <param name="shell">TBD</param> /// <param name="id">TBD</param> public Cancel(GraphInterpreterShell shell, int id) { Shell = shell; Id = id; } /// <summary> /// TBD /// </summary> public GraphInterpreterShell Shell { get; } } /// <summary> /// TBD /// </summary> public struct SubscribePending : IBoundaryEvent { /// <summary> /// TBD /// </summary> public readonly int Id; /// <summary> /// TBD /// </summary> /// <param name="shell">TBD</param> /// <param name="id">TBD</param> public SubscribePending(GraphInterpreterShell shell, int id) { Shell = shell; Id = id; } /// <summary> /// TBD /// </summary> public GraphInterpreterShell Shell { get; } } /// <summary> /// TBD /// </summary> public struct ExposedPublisher : IBoundaryEvent { /// <summary> /// TBD /// </summary> public readonly int Id; /// <summary> /// TBD /// </summary> public readonly IActorPublisher Publisher; /// <summary> /// TBD /// </summary> /// <param name="shell">TBD</param> /// <param name="id">TBD</param> /// <param name="publisher">TBD</param> public ExposedPublisher(GraphInterpreterShell shell, int id, IActorPublisher publisher) { Shell = shell; Id = id; Publisher = publisher; } /// <summary> /// TBD /// </summary> public GraphInterpreterShell Shell { get; } } /// <summary> /// TBD /// </summary> public struct AsyncInput : IBoundaryEvent { /// <summary> /// TBD /// </summary> public readonly GraphStageLogic Logic; /// <summary> /// TBD /// </summary> public readonly object Event; /// <summary> /// TBD /// </summary> public readonly Action<object> Handler; /// <summary> /// TBD /// </summary> /// <param name="shell">TBD</param> /// <param name="logic">TBD</param> /// <param name="event">TBD</param> /// <param name="handler">TBD</param> /// <returns>TBD</returns> public AsyncInput(GraphInterpreterShell shell, GraphStageLogic logic, object @event, Action<object> handler) { Shell = shell; Logic = logic; Event = @event; Handler = handler; } /// <summary> /// TBD /// </summary> public GraphInterpreterShell Shell { get; } } /// <summary> /// TBD /// </summary> public struct Resume : IBoundaryEvent { /// <summary> /// TBD /// </summary> /// <param name="shell">TBD</param> public Resume(GraphInterpreterShell shell) { Shell = shell; } /// <summary> /// TBD /// </summary> public GraphInterpreterShell Shell { get; } } /// <summary> /// TBD /// </summary> public struct Abort : IBoundaryEvent { /// <summary> /// TBD /// </summary> /// <param name="shell">TBD</param> public Abort(GraphInterpreterShell shell) { Shell = shell; } /// <summary> /// TBD /// </summary> public GraphInterpreterShell Shell { get; } } private class ShellRegistered { public static readonly ShellRegistered Instance = new ShellRegistered(); private ShellRegistered() { } } #endregion #region internal classes /// <summary> /// TBD /// </summary> /// <typeparam name="T">TBD</typeparam> public sealed class BoundaryPublisher<T> : ActorPublisher<T> { /// <summary> /// TBD /// </summary> /// <param name="parent">TBD</param> /// <param name="shell">TBD</param> /// <param name="id">TBD</param> public BoundaryPublisher(IActorRef parent, GraphInterpreterShell shell, int id) : base(parent) { _wakeUpMessage = new SubscribePending(shell, id); } private readonly SubscribePending _wakeUpMessage; /// <summary> /// TBD /// </summary> protected override object WakeUpMessage => _wakeUpMessage; } /// <summary> /// TBD /// </summary> public sealed class BoundarySubscription : ISubscription { private readonly IActorRef _parent; private readonly GraphInterpreterShell _shell; private readonly int _id; /// <summary> /// TBD /// </summary> /// <param name="parent">TBD</param> /// <param name="shell">TBD</param> /// <param name="id">TBD</param> public BoundarySubscription(IActorRef parent, GraphInterpreterShell shell, int id) { _parent = parent; _shell = shell; _id = id; } /// <summary> /// TBD /// </summary> /// <param name="elements">TBD</param> public void Request(long elements) => _parent.Tell(new RequestMore(_shell, _id, elements)); /// <summary> /// TBD /// </summary> public void Cancel() => _parent.Tell(new Cancel(_shell, _id)); /// <summary> /// TBD /// </summary> /// <returns>TBD</returns> public override string ToString() => $"BoundarySubscription[{_parent}, {_id}]"; } /// <summary> /// TBD /// </summary> /// <typeparam name="T">TBD</typeparam> public sealed class BoundarySubscriber<T> : ISubscriber<T> { private readonly IActorRef _parent; private readonly GraphInterpreterShell _shell; private readonly int _id; /// <summary> /// TBD /// </summary> /// <param name="parent">TBD</param> /// <param name="shell">TBD</param> /// <param name="id">TBD</param> public BoundarySubscriber(IActorRef parent, GraphInterpreterShell shell, int id) { _parent = parent; _shell = shell; _id = id; } /// <summary> /// TBD /// </summary> /// <param name="subscription">TBD</param> public void OnSubscribe(ISubscription subscription) { ReactiveStreamsCompliance.RequireNonNullSubscription(subscription); _parent.Tell(new OnSubscribe(_shell, _id, subscription)); } /// <summary> /// TBD /// </summary> /// <param name="cause">TBD</param> public void OnError(Exception cause) { ReactiveStreamsCompliance.RequireNonNullException(cause); _parent.Tell(new OnError(_shell, _id, cause)); } /// <summary> /// TBD /// </summary> public void OnComplete() => _parent.Tell(new OnComplete(_shell, _id)); /// <summary> /// TBD /// </summary> /// <param name="element">TBD</param> public void OnNext(T element) { ReactiveStreamsCompliance.RequireNonNullElement(element); _parent.Tell(new OnNext(_shell, _id, element)); } } /// <summary> /// TBD /// </summary> public class BatchingActorInputBoundary : UpstreamBoundaryStageLogic { #region OutHandler private sealed class OutHandler : Stage.OutHandler { private readonly BatchingActorInputBoundary _that; public OutHandler(BatchingActorInputBoundary that) { _that = that; } public override void OnPull() { var elementsCount = _that._inputBufferElements; var upstreamCompleted = _that._upstreamCompleted; if (elementsCount > 1) _that.Push(_that.Out, _that.Dequeue()); else if (elementsCount == 1) { if (upstreamCompleted) { _that.Push(_that.Out, _that.Dequeue()); _that.Complete(_that.Out); } else _that.Push(_that.Out, _that.Dequeue()); } else if (upstreamCompleted) _that.Complete(_that.Out); } public override void OnDownstreamFinish() => _that.Cancel(); public override string ToString() => _that.ToString(); } #endregion private readonly int _size; private readonly int _id; private readonly object[] _inputBuffer; private readonly int _indexMask; private ISubscription _upstream; private int _inputBufferElements; private int _nextInputElementCursor; private bool _upstreamCompleted; private bool _downstreamCanceled; private readonly int _requestBatchSize; private int _batchRemaining; private readonly Outlet _outlet; /// <summary> /// TBD /// </summary> /// <param name="size">TBD</param> /// <param name="id">TBD</param> /// <exception cref="ArgumentException">TBD</exception> public BatchingActorInputBoundary(int size, int id) { if (size <= 0) throw new ArgumentException("Buffer size cannot be zero", nameof(size)); if ((size & (size - 1)) != 0) throw new ArgumentException("Buffer size must be power of two", nameof(size)); _size = size; _id = id; _inputBuffer = new object[size]; _indexMask = size - 1; _requestBatchSize = Math.Max(1, _inputBuffer.Length/2); _batchRemaining = _requestBatchSize; _outlet = new Outlet<object>("UpstreamBoundary" + id) { Id = 0 }; SetHandler(_outlet, new OutHandler(this)); } /// <summary> /// TBD /// </summary> public override Outlet Out => _outlet; // Call this when an error happens that does not come from the usual onError channel // (exceptions while calling RS interfaces, abrupt termination etc) /// <summary> /// TBD /// </summary> /// <param name="reason">TBD</param> public void OnInternalError(Exception reason) { if (!(_upstreamCompleted || _downstreamCanceled) && !ReferenceEquals(_upstream, null)) _upstream.Cancel(); if (!IsClosed(Out)) OnError(reason); } /// <summary> /// TBD /// </summary> /// <param name="reason">TBD</param> public void OnError(Exception reason) { if (!_upstreamCompleted || !_downstreamCanceled) { _upstreamCompleted = true; Clear(); Fail(Out, reason); } } /// <summary> /// TBD /// </summary> public void OnComplete() { if (!_upstreamCompleted) { _upstreamCompleted = true; if (_inputBufferElements == 0) Complete(Out); } } /// <summary> /// TBD /// </summary> /// <param name="subscription">TBD</param> /// <exception cref="ArgumentException">TBD</exception> public void OnSubscribe(ISubscription subscription) { if (subscription == null) throw new ArgumentException("Subscription cannot be null"); if (_upstreamCompleted) ReactiveStreamsCompliance.TryCancel(subscription); else if (_downstreamCanceled) { _upstreamCompleted = true; ReactiveStreamsCompliance.TryCancel(subscription); } else { _upstream = subscription; // prefetch ReactiveStreamsCompliance.TryRequest(_upstream, _inputBuffer.Length); } } /// <summary> /// TBD /// </summary> /// <param name="element">TBD</param> /// <exception cref="IllegalStateException">TBD</exception> public void OnNext(object element) { if (!_upstreamCompleted) { if (_inputBufferElements == _size) throw new IllegalStateException("Input buffer overrun"); _inputBuffer[(_nextInputElementCursor + _inputBufferElements) & _indexMask] = element; _inputBufferElements++; if (IsAvailable(Out)) Push(Out, Dequeue()); } } /// <summary> /// TBD /// </summary> public void Cancel() { _downstreamCanceled = true; if (!_upstreamCompleted) { _upstreamCompleted = true; if (!ReferenceEquals(_upstream, null)) ReactiveStreamsCompliance.TryCancel(_upstream); Clear(); } } private object Dequeue() { var element = _inputBuffer[_nextInputElementCursor]; if (element == null) throw new IllegalStateException("Internal queue must never contain a null"); _inputBuffer[_nextInputElementCursor] = null; _batchRemaining--; if (_batchRemaining == 0 && !_upstreamCompleted) { ReactiveStreamsCompliance.TryRequest(_upstream, _requestBatchSize); _batchRemaining = _requestBatchSize; } _inputBufferElements--; _nextInputElementCursor = (_nextInputElementCursor + 1) & _indexMask; return element; } private void Clear() { _inputBuffer.Initialize(); _inputBufferElements = 0; } /// <summary> /// TBD /// </summary> /// <returns>TBD</returns> public override string ToString() => $"BatchingActorInputBoundary(id={_id}, fill={_inputBufferElements}/{_size}, completed={_upstreamCompleted}, canceled={_downstreamCanceled})"; } /// <summary> /// TBD /// </summary> internal interface IActorOutputBoundary { /// <summary> /// TBD /// </summary> void SubscribePending(); /// <summary> /// TBD /// </summary> /// <param name="publisher">TBD</param> void ExposedPublisher(IActorPublisher publisher); /// <summary> /// TBD /// </summary> /// <param name="elements">TBD</param> void RequestMore(long elements); /// <summary> /// TBD /// </summary> void Cancel(); /// <summary> /// TBD /// </summary> /// <param name="reason">TBD</param> void Fail(Exception reason); } /// <summary> /// TBD /// </summary> /// <typeparam name="T">TBD</typeparam> internal class ActorOutputBoundary<T> : DownstreamBoundaryStageLogic, IActorOutputBoundary { #region InHandler private sealed class InHandler : Stage.InHandler { private readonly ActorOutputBoundary<T> _that; public InHandler(ActorOutputBoundary<T> that) { _that = that; } public override void OnPush() { _that.OnNext(_that.Grab<T>(_that.In)); if (_that._downstreamCompleted) _that.Cancel(_that.In); else if (_that._downstreamDemand > 0) _that.Pull(_that.In); } public override void OnUpstreamFinish() => _that.Complete(); public override void OnUpstreamFailure(Exception e) => _that.Fail(e); public override string ToString() => _that.ToString(); } #endregion private readonly IActorRef _actor; private readonly GraphInterpreterShell _shell; private readonly int _id; private ActorPublisher<T> _exposedPublisher; private ISubscriber<T> _subscriber; private long _downstreamDemand; // This flag is only used if complete/fail is called externally since this op turns into a Finished one inside the // interpreter (i.e. inside this op this flag has no effects since if it is completed the op will not be invoked) private bool _downstreamCompleted; // when upstream failed before we got the exposed publisher private Exception _upstreamFailed; private bool _upstreamCompleted; private readonly Inlet<T> _inlet; /// <summary> /// TBD /// </summary> /// <param name="actor">TBD</param> /// <param name="shell">TBD</param> /// <param name="id">TBD</param> public ActorOutputBoundary(IActorRef actor, GraphInterpreterShell shell, int id) { _actor = actor; _shell = shell; _id = id; _inlet = new Inlet<T>("UpstreamBoundary" + id) { Id = 0 }; SetHandler(_inlet, new InHandler(this)); } /// <summary> /// TBD /// </summary> public override Inlet In => _inlet; /// <summary> /// TBD /// </summary> /// <param name="elements">TBD</param> public void RequestMore(long elements) { if (elements < 1) { Cancel((Inlet<T>) In); Fail(ReactiveStreamsCompliance.NumberOfElementsInRequestMustBePositiveException); } else { _downstreamDemand += elements; if (_downstreamDemand < 0) _downstreamDemand = long.MaxValue; // Long overflow, Reactive Streams Spec 3:17: effectively unbounded if (!HasBeenPulled(In) && !IsClosed(In)) Pull(In); } } /// <summary> /// TBD /// </summary> public void SubscribePending() { foreach (var subscriber in _exposedPublisher.TakePendingSubscribers()) { if (ReferenceEquals(_subscriber, null)) { _subscriber = subscriber; ReactiveStreamsCompliance.TryOnSubscribe(_subscriber, new BoundarySubscription(_actor, _shell, _id)); if (IsDebug) Console.WriteLine($"{Interpreter.Name} Subscribe subscriber={subscriber}"); } else ReactiveStreamsCompliance.RejectAdditionalSubscriber(subscriber, GetType().FullName); } } void IActorOutputBoundary.ExposedPublisher(IActorPublisher publisher) => ExposedPublisher((ActorPublisher<T>) publisher); /// <summary> /// TBD /// </summary> /// <param name="publisher">TBD</param> public void ExposedPublisher(ActorPublisher<T> publisher) { _exposedPublisher = publisher; if (_upstreamFailed != null) publisher.Shutdown(_upstreamFailed); else { if (_upstreamCompleted) publisher.Shutdown(null); } } /// <summary> /// TBD /// </summary> public void Cancel() { _downstreamCompleted = true; _subscriber = null; _exposedPublisher.Shutdown(new NormalShutdownException("UpstreamBoundary")); Cancel(In); } /// <summary> /// TBD /// </summary> /// <param name="reason">TBD</param> public void Fail(Exception reason) { // No need to fail if had already been cancelled, or we closed earlier if (!(_downstreamCompleted || _upstreamCompleted)) { _upstreamCompleted = true; _upstreamFailed = reason; if (!ReferenceEquals(_exposedPublisher, null)) _exposedPublisher.Shutdown(reason); if (!ReferenceEquals(_subscriber, null) && !(reason is ISpecViolation)) ReactiveStreamsCompliance.TryOnError(_subscriber, reason); } } private void OnNext(T element) { _downstreamDemand--; ReactiveStreamsCompliance.TryOnNext(_subscriber, element); } private void Complete() { // No need to complete if had already been cancelled, or we closed earlier if (!(_upstreamCompleted || _downstreamCompleted)) { _upstreamCompleted = true; if (!ReferenceEquals(_exposedPublisher, null)) _exposedPublisher.Shutdown(null); if (!ReferenceEquals(_subscriber, null)) ReactiveStreamsCompliance.TryOnComplete(_subscriber); } } } #endregion /// <summary> /// TBD /// </summary> /// <param name="shell">TBD</param> /// <returns>TBD</returns> public static Props Props(GraphInterpreterShell shell) => Actor.Props.Create(() => new ActorGraphInterpreter(shell)).WithDeploy(Deploy.Local); private ISet<GraphInterpreterShell> _activeInterpreters = new HashSet<GraphInterpreterShell>(); private readonly Queue<GraphInterpreterShell> _newShells = new Queue<GraphInterpreterShell>(); private readonly SubFusingActorMaterializerImpl _subFusingMaterializerImpl; private readonly GraphInterpreterShell _initial; private ILoggingAdapter _log; //this limits number of messages that can be processed synchronously during one actor receive. private readonly int _eventLimit; private int _currentLimit; //this is a var in order to save the allocation when no short-circuiting actually happens private Queue<object> _shortCircuitBuffer; /// <summary> /// TBD /// </summary> /// <param name="shell">TBD</param> public ActorGraphInterpreter(GraphInterpreterShell shell) { _initial = shell; _subFusingMaterializerImpl = new SubFusingActorMaterializerImpl(shell.Materializer, RegisterShell); _eventLimit = _initial.Materializer.Settings.SyncProcessingLimit; _currentLimit = _eventLimit; } /// <summary> /// TBD /// </summary> public ILoggingAdapter Log => _log ?? (_log = Context.GetLogger()); private void EnqueueToShortCircuit(object input) { if(_shortCircuitBuffer == null) _shortCircuitBuffer = new Queue<object>(); _shortCircuitBuffer.Enqueue(input); } private bool TryInit(GraphInterpreterShell shell) { try { _currentLimit = shell.Init(Self, _subFusingMaterializerImpl, EnqueueToShortCircuit, _currentLimit); if (IsDebug) Console.WriteLine($"registering new shell in {_initial}\n {shell.ToString().Replace("\n", "\n ")}"); if (shell.IsTerminated) return false; _activeInterpreters.Add(shell); return true; } catch (Exception e) { if (Log.IsErrorEnabled) Log.Error(e, "Initialization of GraphInterpreterShell failed for {0}", shell); return false; } } /// <summary> /// TBD /// </summary> /// <param name="shell">TBD</param> /// <returns>TBD</returns> public IActorRef RegisterShell(GraphInterpreterShell shell) { _newShells.Enqueue(shell); EnqueueToShortCircuit(ShellRegistered.Instance); return Self; } // Avoid performing the initialization (which starts the first RunBatch()) // within RegisterShell in order to avoid unbounded recursion. private void FinishShellRegistration() { if (_newShells.Count == 0) { if (_activeInterpreters.Count == 0) Context.Stop(Self); } else { var shell = _newShells.Dequeue(); if (shell.IsInitialized) { // yes, this steals another shell's Resume, but that's okay because extra ones will just not do anything FinishShellRegistration(); } else if (!TryInit(shell)) { if (_activeInterpreters.Count == 0) FinishShellRegistration(); } } } /// <summary> /// TBD /// </summary> protected override void PreStart() { TryInit(_initial); if (_activeInterpreters.Count == 0) Context.Stop(Self); else if (_shortCircuitBuffer != null) ShortCircuitBatch(); } private void ShortCircuitBatch() { while (_shortCircuitBuffer.Count != 0 && _currentLimit > 0 && _activeInterpreters.Count != 0) { var element = _shortCircuitBuffer.Dequeue(); var boundary = element as IBoundaryEvent; if (boundary != null) ProcessEvent(boundary); else if (element is ShellRegistered) FinishShellRegistration(); } if(_shortCircuitBuffer.Count != 0 && _currentLimit == 0) Self.Tell(ShellRegistered.Instance); } private void ProcessEvent(IBoundaryEvent b) { var shell = b.Shell; if (!shell.IsTerminated && (shell.IsInitialized || TryInit(shell))) { try { _currentLimit = shell.Receive(b, _currentLimit); } catch (Exception ex) { shell.TryAbort(ex); } if (shell.IsTerminated) { _activeInterpreters.Remove(shell); if(_activeInterpreters.Count == 0 && _newShells.Count == 0) Context.Stop(Self); } } } /// <summary> /// TBD /// </summary> /// <param name="message">TBD</param> /// <returns>TBD</returns> protected override bool Receive(object message) { switch (message) { case IBoundaryEvent _: _currentLimit = _eventLimit; ProcessEvent((IBoundaryEvent)message); if (_shortCircuitBuffer != null) ShortCircuitBatch(); return true; case ShellRegistered _: _currentLimit = _eventLimit; if (_shortCircuitBuffer != null) ShortCircuitBatch(); return true; case StreamSupervisor.PrintDebugDump print: var builder = new StringBuilder($"activeShells (actor: {Self}):\n"); foreach (var shell in _activeInterpreters) { builder.Append(" " + shell.ToString().Replace("\n", "\n ")); builder.Append(shell.Interpreter); } builder.AppendLine("NewShells:\n"); foreach (var shell in _newShells) { builder.Append(" " + shell.ToString().Replace("\n", "\n ")); builder.Append(shell.Interpreter); } Console.WriteLine(builder); return true; default: return false; } } /// <summary> /// TBD /// </summary> protected override void PostStop() { var ex = new AbruptTerminationException(Self); foreach (var shell in _activeInterpreters) shell.TryAbort(ex); _activeInterpreters = new HashSet<GraphInterpreterShell>(); foreach (var shell in _newShells) { if (TryInit(shell)) shell.TryAbort(ex); } } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Collections.Immutable; using System.Linq; using System.Threading; using System.Threading.Tasks; using Microsoft.CodeAnalysis.CodeActions; using Microsoft.CodeAnalysis.CodeFixes; using Microsoft.CodeAnalysis.CodeFixes.Suppression; using Microsoft.CodeAnalysis.Diagnostics; using Microsoft.CodeAnalysis.Editor.UnitTests.CodeActions; using Microsoft.CodeAnalysis.Editor.UnitTests.Diagnostics.GenerateType; using Microsoft.CodeAnalysis.Editor.UnitTests.Workspaces; using Microsoft.CodeAnalysis.GenerateType; using Microsoft.CodeAnalysis.Options; using Microsoft.CodeAnalysis.Text; using Microsoft.CodeAnalysis.UnitTests; using Microsoft.CodeAnalysis.UnitTests.Diagnostics; using Microsoft.VisualStudio.Text.Differencing; using Roslyn.Test.Utilities; using Roslyn.Utilities; using Xunit; namespace Microsoft.CodeAnalysis.Editor.UnitTests.Diagnostics { public abstract class AbstractUserDiagnosticTest : AbstractCodeActionOrUserDiagnosticTest { internal abstract Task<IEnumerable<Tuple<Diagnostic, CodeFixCollection>>> GetDiagnosticAndFixesAsync(TestWorkspace workspace, string fixAllActionEquivalenceKey); internal abstract Task<IEnumerable<Diagnostic>> GetDiagnosticsAsync(TestWorkspace workspace); protected override async Task<IList<CodeAction>> GetCodeActionsWorkerAsync(TestWorkspace workspace, string fixAllActionEquivalenceKey) { var diagnostics = await GetDiagnosticAndFixAsync(workspace, fixAllActionEquivalenceKey); return diagnostics?.Item2?.Fixes.Select(f => f.Action).ToList(); } internal async Task<Tuple<Diagnostic, CodeFixCollection>> GetDiagnosticAndFixAsync(TestWorkspace workspace, string fixAllActionEquivalenceKey = null) { return (await GetDiagnosticAndFixesAsync(workspace, fixAllActionEquivalenceKey)).FirstOrDefault(); } protected Document GetDocumentAndSelectSpan(TestWorkspace workspace, out TextSpan span) { var hostDocument = workspace.Documents.Single(d => d.SelectedSpans.Any()); span = hostDocument.SelectedSpans.Single(); return workspace.CurrentSolution.GetDocument(hostDocument.Id); } protected bool TryGetDocumentAndSelectSpan(TestWorkspace workspace, out Document document, out TextSpan span) { var hostDocument = workspace.Documents.FirstOrDefault(d => d.SelectedSpans.Any()); if (hostDocument == null) { document = null; span = default(TextSpan); return false; } span = hostDocument.SelectedSpans.Single(); document = workspace.CurrentSolution.GetDocument(hostDocument.Id); return true; } protected Document GetDocumentAndAnnotatedSpan(TestWorkspace workspace, out string annotation, out TextSpan span) { var hostDocument = workspace.Documents.Single(d => d.AnnotatedSpans.Any()); var annotatedSpan = hostDocument.AnnotatedSpans.Single(); annotation = annotatedSpan.Key; span = annotatedSpan.Value.Single(); return workspace.CurrentSolution.GetDocument(hostDocument.Id); } protected FixAllScope? GetFixAllScope(string annotation) { if (annotation == null) { return null; } switch (annotation) { case "FixAllInDocument": return FixAllScope.Document; case "FixAllInProject": return FixAllScope.Project; case "FixAllInSolution": return FixAllScope.Solution; case "FixAllInSelection": return FixAllScope.Custom; } throw new InvalidProgramException("Incorrect FixAll annotation in test"); } internal async Task<IEnumerable<Tuple<Diagnostic, CodeFixCollection>>> GetDiagnosticAndFixesAsync( IEnumerable<Diagnostic> diagnostics, DiagnosticAnalyzer provider, CodeFixProvider fixer, TestDiagnosticAnalyzerDriver testDriver, Document document, TextSpan span, string annotation, string fixAllActionId) { if (diagnostics.IsEmpty()) { return SpecializedCollections.EmptyEnumerable<Tuple<Diagnostic, CodeFixCollection>>(); } FixAllScope? scope = GetFixAllScope(annotation); return await GetDiagnosticAndFixesAsync(diagnostics, provider, fixer, testDriver, document, span, scope, fixAllActionId); } private async Task<IEnumerable<Tuple<Diagnostic, CodeFixCollection>>> GetDiagnosticAndFixesAsync( IEnumerable<Diagnostic> diagnostics, DiagnosticAnalyzer provider, CodeFixProvider fixer, TestDiagnosticAnalyzerDriver testDriver, Document document, TextSpan span, FixAllScope? scope, string fixAllActionId) { Assert.NotEmpty(diagnostics); var result = new List<Tuple<Diagnostic, CodeFixCollection>>(); if (scope == null) { // Simple code fix. foreach (var diagnostic in diagnostics) { var fixes = new List<CodeFix>(); var context = new CodeFixContext(document, diagnostic, (a, d) => fixes.Add(new CodeFix(document.Project, a, d)), CancellationToken.None); await fixer.RegisterCodeFixesAsync(context); if (fixes.Any()) { var codeFix = new CodeFixCollection(fixer, diagnostic.Location.SourceSpan, fixes); result.Add(Tuple.Create(diagnostic, codeFix)); } } } else { // Fix all fix. var fixAllProvider = fixer.GetFixAllProvider(); Assert.NotNull(fixAllProvider); var fixAllContext = GetFixAllContext(diagnostics, provider, fixer, testDriver, document, scope.Value, fixAllActionId); var fixAllFix = await fixAllProvider.GetFixAsync(fixAllContext); if (fixAllFix != null) { // Same fix applies to each diagnostic in scope. foreach (var diagnostic in diagnostics) { var diagnosticSpan = diagnostic.Location.IsInSource ? diagnostic.Location.SourceSpan : default(TextSpan); var codeFix = new CodeFixCollection(fixAllProvider, diagnosticSpan, ImmutableArray.Create(new CodeFix(document.Project, fixAllFix, diagnostic))); result.Add(Tuple.Create(diagnostic, codeFix)); } } } return result; } private static FixAllContext GetFixAllContext( IEnumerable<Diagnostic> diagnostics, DiagnosticAnalyzer provider, CodeFixProvider fixer, TestDiagnosticAnalyzerDriver testDriver, Document document, FixAllScope scope, string fixAllActionId) { Assert.NotEmpty(diagnostics); if (scope == FixAllScope.Custom) { // Bulk fixing diagnostics in selected scope. var diagnosticsToFix = ImmutableDictionary.CreateRange(SpecializedCollections.SingletonEnumerable(KeyValuePair.Create(document, diagnostics.ToImmutableArray()))); return FixMultipleContext.Create(diagnosticsToFix, fixer, fixAllActionId, CancellationToken.None); } var diagnostic = diagnostics.First(); Func<Document, ImmutableHashSet<string>, CancellationToken, Task<IEnumerable<Diagnostic>>> getDocumentDiagnosticsAsync = async (d, diagIds, c) => { var root = d.GetSyntaxRootAsync().Result; var diags = await testDriver.GetDocumentDiagnosticsAsync(provider, d, root.FullSpan); diags = diags.Where(diag => diagIds.Contains(diag.Id)); return diags; }; Func<Project, bool, ImmutableHashSet<string>, CancellationToken, Task<IEnumerable<Diagnostic>>> getProjectDiagnosticsAsync = async (p, includeAllDocumentDiagnostics, diagIds, c) => { var diags = includeAllDocumentDiagnostics ? await testDriver.GetAllDiagnosticsAsync(provider, p) : await testDriver.GetProjectDiagnosticsAsync(provider, p); diags = diags.Where(diag => diagIds.Contains(diag.Id)); return diags; }; var diagnosticIds = ImmutableHashSet.Create(diagnostic.Id); var fixAllDiagnosticProvider = new FixAllCodeActionContext.FixAllDiagnosticProvider(diagnosticIds, getDocumentDiagnosticsAsync, getProjectDiagnosticsAsync); return diagnostic.Location.IsInSource ? new FixAllContext(document, fixer, scope, fixAllActionId, diagnosticIds, fixAllDiagnosticProvider, CancellationToken.None) : new FixAllContext(document.Project, fixer, scope, fixAllActionId, diagnosticIds, fixAllDiagnosticProvider, CancellationToken.None); } protected async Task TestEquivalenceKeyAsync(string initialMarkup, string equivalenceKey) { using (var workspace = await CreateWorkspaceFromFileAsync(initialMarkup, parseOptions: null, compilationOptions: null)) { var diagnosticAndFix = await GetDiagnosticAndFixAsync(workspace); Assert.Equal(equivalenceKey, diagnosticAndFix.Item2.Fixes.ElementAt(index: 0).Action.EquivalenceKey); } } protected async Task TestActionCountInAllFixesAsync( string initialMarkup, int count, ParseOptions parseOptions = null, CompilationOptions compilationOptions = null) { using (var workspace = await CreateWorkspaceFromFileAsync(initialMarkup, parseOptions, compilationOptions)) { var diagnosticAndFix = await GetDiagnosticAndFixesAsync(workspace, null); var diagnosticCount = diagnosticAndFix.Select(x => x.Item2.Fixes.Count()).Sum(); Assert.Equal(count, diagnosticCount); } } protected async Task TestSpansAsync( string initialMarkup, string expectedMarkup, int index = 0, ParseOptions parseOptions = null, CompilationOptions compilationOptions = null, string diagnosticId = null, string fixAllActionEquivalenceId = null) { IList<TextSpan> spansList; string unused; MarkupTestFile.GetSpans(expectedMarkup, out unused, out spansList); var expectedTextSpans = spansList.ToSet(); using (var workspace = await CreateWorkspaceFromFileAsync(initialMarkup, parseOptions, compilationOptions)) { ISet<TextSpan> actualTextSpans; if (diagnosticId == null) { var diagnosticsAndFixes = await GetDiagnosticAndFixesAsync(workspace, fixAllActionEquivalenceId); var diagnostics = diagnosticsAndFixes.Select(t => t.Item1); actualTextSpans = diagnostics.Select(d => d.Location.SourceSpan).ToSet(); } else { var diagnostics = await GetDiagnosticsAsync(workspace); actualTextSpans = diagnostics.Where(d => d.Id == diagnosticId).Select(d => d.Location.SourceSpan).ToSet(); } Assert.True(expectedTextSpans.SetEquals(actualTextSpans)); } } protected async Task TestAddDocument( string initialMarkup, string expectedMarkup, IList<string> expectedContainers, string expectedDocumentName, int index = 0, bool compareTokens = true, bool isLine = true) { await TestAddDocument(initialMarkup, expectedMarkup, index, expectedContainers, expectedDocumentName, null, null, compareTokens, isLine); await TestAddDocument(initialMarkup, expectedMarkup, index, expectedContainers, expectedDocumentName, GetScriptOptions(), null, compareTokens, isLine); } private async Task TestAddDocument( string initialMarkup, string expectedMarkup, int index, IList<string> expectedContainers, string expectedDocumentName, ParseOptions parseOptions, CompilationOptions compilationOptions, bool compareTokens, bool isLine) { using (var workspace = isLine ? await CreateWorkspaceFromFileAsync(initialMarkup, parseOptions, compilationOptions) : await TestWorkspaceFactory.CreateWorkspaceAsync(initialMarkup)) { var codeActions = await GetCodeActionsAsync(workspace, fixAllActionEquivalenceKey: null); await TestAddDocument(workspace, expectedMarkup, index, expectedContainers, expectedDocumentName, codeActions, compareTokens); } } private async Task TestAddDocument( TestWorkspace workspace, string expectedMarkup, int index, IList<string> expectedFolders, string expectedDocumentName, IList<CodeAction> actions, bool compareTokens) { var operations = await VerifyInputsAndGetOperationsAsync(index, actions); await TestAddDocument( workspace, expectedMarkup, operations, hasProjectChange: false, modifiedProjectId: null, expectedFolders: expectedFolders, expectedDocumentName: expectedDocumentName, compareTokens: compareTokens); } private async Task<Tuple<Solution, Solution>> TestAddDocument( TestWorkspace workspace, string expected, IEnumerable<CodeActionOperation> operations, bool hasProjectChange, ProjectId modifiedProjectId, IList<string> expectedFolders, string expectedDocumentName, bool compareTokens) { var appliedChanges = ApplyOperationsAndGetSolution(workspace, operations); var oldSolution = appliedChanges.Item1; var newSolution = appliedChanges.Item2; Document addedDocument = null; if (!hasProjectChange) { addedDocument = SolutionUtilities.GetSingleAddedDocument(oldSolution, newSolution); } else { Assert.NotNull(modifiedProjectId); addedDocument = newSolution.GetProject(modifiedProjectId).Documents.SingleOrDefault(doc => doc.Name == expectedDocumentName); } Assert.NotNull(addedDocument); AssertEx.Equal(expectedFolders, addedDocument.Folders); Assert.Equal(expectedDocumentName, addedDocument.Name); if (compareTokens) { TokenUtilities.AssertTokensEqual( expected, addedDocument.GetTextAsync().Result.ToString(), GetLanguage()); } else { Assert.Equal(expected, addedDocument.GetTextAsync().Result.ToString()); } var editHandler = workspace.ExportProvider.GetExportedValue<ICodeActionEditHandlerService>(); if (!hasProjectChange) { // If there is just one document change then we expect the preview to be a WpfTextView var content = (await editHandler.GetPreviews(workspace, operations, CancellationToken.None).GetPreviewsAsync())[0]; var diffView = content as IWpfDifferenceViewer; Assert.NotNull(diffView); diffView.Close(); } else { // If there are more changes than just the document we need to browse all the changes and get the document change var contents = editHandler.GetPreviews(workspace, operations, CancellationToken.None); bool hasPreview = false; var previews = await contents.GetPreviewsAsync(); if (previews != null) { foreach (var preview in previews) { if (preview != null) { var diffView = preview as IWpfDifferenceViewer; if (diffView != null) { hasPreview = true; diffView.Close(); break; } } } } Assert.True(hasPreview); } return Tuple.Create(oldSolution, newSolution); } internal async Task TestWithMockedGenerateTypeDialog( string initial, string languageName, string typeName, string expected = null, bool isLine = true, bool isMissing = false, Accessibility accessibility = Accessibility.NotApplicable, TypeKind typeKind = TypeKind.Class, string projectName = null, bool isNewFile = false, string existingFilename = null, IList<string> newFileFolderContainers = null, string fullFilePath = null, string newFileName = null, string assertClassName = null, bool checkIfUsingsIncluded = false, bool checkIfUsingsNotIncluded = false, string expectedTextWithUsings = null, string defaultNamespace = "", bool areFoldersValidIdentifiers = true, GenerateTypeDialogOptions assertGenerateTypeDialogOptions = null, IList<TypeKindOptions> assertTypeKindPresent = null, IList<TypeKindOptions> assertTypeKindAbsent = null, bool isCancelled = false) { using (var testState = await GenerateTypeTestState.CreateAsync(initial, isLine, projectName, typeName, existingFilename, languageName)) { // Initialize the viewModel values testState.TestGenerateTypeOptionsService.SetGenerateTypeOptions( accessibility: accessibility, typeKind: typeKind, typeName: testState.TypeName, project: testState.ProjectToBeModified, isNewFile: isNewFile, newFileName: newFileName, folders: newFileFolderContainers, fullFilePath: fullFilePath, existingDocument: testState.ExistingDocument, areFoldersValidIdentifiers: areFoldersValidIdentifiers, isCancelled: isCancelled); testState.TestProjectManagementService.SetDefaultNamespace( defaultNamespace: defaultNamespace); var diagnosticsAndFixes = await GetDiagnosticAndFixesAsync(testState.Workspace, null); var generateTypeDiagFixes = diagnosticsAndFixes.SingleOrDefault(df => GenerateTypeTestState.FixIds.Contains(df.Item1.Id)); if (isMissing) { Assert.Null(generateTypeDiagFixes); return; } var fixes = generateTypeDiagFixes.Item2.Fixes; Assert.NotNull(fixes); var fixActions = MassageActions(fixes.Select(f => f.Action).ToList()); Assert.NotNull(fixActions); // Since the dialog option is always fed as the last CodeAction var index = fixActions.Count() - 1; var action = fixActions.ElementAt(index); Assert.Equal(action.Title, FeaturesResources.GenerateNewType); var operations = action.GetOperationsAsync(CancellationToken.None).Result; Tuple<Solution, Solution> oldSolutionAndNewSolution = null; if (!isNewFile) { oldSolutionAndNewSolution = await TestOperationsAsync( testState.Workspace, expected, operations, conflictSpans: null, renameSpans: null, warningSpans: null, compareTokens: false, expectedChangedDocumentId: testState.ExistingDocument.Id); } else { oldSolutionAndNewSolution = await TestAddDocument( testState.Workspace, expected, operations, projectName != null, testState.ProjectToBeModified.Id, newFileFolderContainers, newFileName, compareTokens: false); } if (checkIfUsingsIncluded) { Assert.NotNull(expectedTextWithUsings); await TestOperationsAsync(testState.Workspace, expectedTextWithUsings, operations, conflictSpans: null, renameSpans: null, warningSpans: null, compareTokens: false, expectedChangedDocumentId: testState.InvocationDocument.Id); } if (checkIfUsingsNotIncluded) { var oldSolution = oldSolutionAndNewSolution.Item1; var newSolution = oldSolutionAndNewSolution.Item2; var changedDocumentIds = SolutionUtilities.GetChangedDocuments(oldSolution, newSolution); Assert.False(changedDocumentIds.Contains(testState.InvocationDocument.Id)); } // Added into a different project than the triggering project if (projectName != null) { var appliedChanges = ApplyOperationsAndGetSolution(testState.Workspace, operations); var newSolution = appliedChanges.Item2; var triggeredProject = newSolution.GetProject(testState.TriggeredProject.Id); // Make sure the Project reference is present Assert.True(triggeredProject.ProjectReferences.Any(pr => pr.ProjectId == testState.ProjectToBeModified.Id)); } // Assert Option Calculation if (assertClassName != null) { Assert.True(assertClassName == testState.TestGenerateTypeOptionsService.ClassName); } if (assertGenerateTypeDialogOptions != null || assertTypeKindPresent != null || assertTypeKindAbsent != null) { var generateTypeDialogOptions = testState.TestGenerateTypeOptionsService.GenerateTypeDialogOptions; if (assertGenerateTypeDialogOptions != null) { Assert.True(assertGenerateTypeDialogOptions.IsPublicOnlyAccessibility == generateTypeDialogOptions.IsPublicOnlyAccessibility); Assert.True(assertGenerateTypeDialogOptions.TypeKindOptions == generateTypeDialogOptions.TypeKindOptions); Assert.True(assertGenerateTypeDialogOptions.IsAttribute == generateTypeDialogOptions.IsAttribute); } if (assertTypeKindPresent != null) { foreach (var typeKindPresentEach in assertTypeKindPresent) { Assert.True((typeKindPresentEach & generateTypeDialogOptions.TypeKindOptions) != 0); } } if (assertTypeKindAbsent != null) { foreach (var typeKindPresentEach in assertTypeKindAbsent) { Assert.True((typeKindPresentEach & generateTypeDialogOptions.TypeKindOptions) == 0); } } } } } } }
using System; using System.Collections.Generic; using System.ComponentModel; using System.Diagnostics; using System.IO; using System.Linq; using NDesk.Options; using Sep.Git.Tfs.Core; using Sep.Git.Tfs.Util; using StructureMap; namespace Sep.Git.Tfs.Commands { [Pluggable("fetch")] [Description("fetch [options] [tfs-remote-id]...")] [RequiresValidGitRepository] public class Fetch : GitTfsCommand { private readonly RemoteOptions remoteOptions; private readonly TextWriter stdout; private readonly Globals globals; private readonly ConfigProperties properties; private readonly AuthorsFile authors; private readonly Labels labels; public Fetch(Globals globals, ConfigProperties properties, TextWriter stdout, RemoteOptions remoteOptions, AuthorsFile authors, Labels labels) { this.globals = globals; this.properties = properties; this.stdout = stdout; this.remoteOptions = remoteOptions; this.authors = authors; this.labels = labels; this.upToChangeSet = -1; BranchStrategy = BranchStrategy = BranchStrategy.Auto; } bool FetchAll { get; set; } bool FetchLabels { get; set; } bool FetchParents { get; set; } string BareBranch { get; set; } bool ForceFetch { get; set; } bool ExportMetadatas { get; set; } string ExportMetadatasFile { get; set; } public BranchStrategy BranchStrategy { get; set; } public string BatchSizeOption { set { int batchSize; if (!int.TryParse(value, out batchSize)) throw new GitTfsException("error: batch size parameter should be an integer."); properties.BatchSize = batchSize; } } int upToChangeSet { get; set; } public string UpToChangeSetOption { set { int changesetIdParsed; if (!int.TryParse(value, out changesetIdParsed)) throw new GitTfsException("error: 'up-to' parameter should be an integer."); upToChangeSet = changesetIdParsed; } } protected int? InitialChangeset { get; set; } public virtual OptionSet OptionSet { get { return new OptionSet { { "all|fetch-all", "Fetch TFS changesets of all the initialized tfs remotes", v => FetchAll = v != null }, { "parents", "Fetch TFS changesets of the parent(s) initialized tfs remotes", v => FetchParents = v != null }, { "l|with-labels|fetch-labels", "Fetch the labels also when fetching TFS changesets", v => FetchLabels = v != null }, { "b|bare-branch=", "The name of the branch on which the fetch will be done for a bare repository", v => BareBranch = v }, { "force", "Force fetch of tfs changesets when there is ahead commits (ahead commits will be lost!)", v => ForceFetch = v != null }, { "x|export", "Export metadatas", v => ExportMetadatas = v != null }, { "export-work-item-mapping=", "Path to Work-items mapping export file", v => ExportMetadatasFile = v }, { "branches=", "Strategy to manage branches:"+ Environment.NewLine + "* none: Ignore branches and merge changesets, fetching only the cloned tfs path"+ Environment.NewLine + "* auto:(default) Manage the encountered merged changesets and initialize only the merged branches"+ Environment.NewLine + "* all: Manage merged changesets and initialize all the branches during the clone", v => { BranchStrategy branchStrategy; if (Enum.TryParse(v, true, out branchStrategy)) BranchStrategy = branchStrategy; else throw new GitTfsException("error: 'branches' parameter should be of value none/auto/all."); } }, { "batch-size=", "Size of a the batch of tfs changesets fetched (-1 for all in one batch)", v => BatchSizeOption = v }, { "c|changeset=", "The changeset to clone from (must be a number)", v => InitialChangeset = Convert.ToInt32(v) }, { "t|up-to=", "up-to changeset # (optional, -1 for up to maximum, must be a number, not prefixed with C)", v => UpToChangeSetOption = v } }.Merge(remoteOptions.OptionSet); } } public int Run() { return Run(globals.RemoteId); } public void Run(bool stopOnFailMergeCommit) { Run(stopOnFailMergeCommit, globals.RemoteId); } public int Run(params string[] args) { return Run(false, args); } private int Run(bool stopOnFailMergeCommit, params string[] args) { if (!FetchAll && BranchStrategy == BranchStrategy.None) globals.Repository.SetConfig(GitTfsConstants.IgnoreBranches, true.ToString()); var remotesToFetch = GetRemotesToFetch(args).ToList(); foreach (var remote in remotesToFetch) { FetchRemote(stopOnFailMergeCommit, remote); } return 0; } private void FetchRemote(bool stopOnFailMergeCommit, IGitTfsRemote remote) { stdout.WriteLine("Fetching from TFS remote '{0}'...", remote.Id); DoFetch(remote, stopOnFailMergeCommit); if (labels != null && FetchLabels) { stdout.WriteLine("Fetching labels from TFS remote '{0}'...", remote.Id); labels.Run(remote); } } protected virtual void DoFetch(IGitTfsRemote remote, bool stopOnFailMergeCommit) { var bareBranch = string.IsNullOrEmpty(BareBranch) ? remote.Id : BareBranch; // It is possible that we have outdated refs/remotes/tfs/<id>. // E.g. someone already fetched changesets from TFS into another git repository and we've pulled it since // in that case tfs fetch will retrieve same changes again unnecessarily. To prevent it we will scan tree from HEAD and see if newer changesets from // TFS exists (by checking git-tfs-id mark in commit's comments). // The process is similar to bootstrapping. if (!ForceFetch) { if (!remote.Repository.IsBare) remote.Repository.MoveTfsRefForwardIfNeeded(remote); else remote.Repository.MoveTfsRefForwardIfNeeded(remote, bareBranch); } if (!ForceFetch && remote.Repository.IsBare && remote.Repository.HasRef(GitRepository.ShortToLocalName(bareBranch)) && remote.MaxCommitHash != remote.Repository.GetCommit(bareBranch).Sha) { throw new GitTfsException("error : fetch is not allowed when there is ahead commits!", new[] {"Remove ahead commits and retry", "use the --force option (ahead commits will be lost!)"}); } var metadataExportInitializer = new ExportMetadatasInitializer(globals); bool shouldExport = ExportMetadatas || remote.Repository.GetConfig(GitTfsConstants.ExportMetadatasConfigKey) == "true"; if (ExportMetadatas) { metadataExportInitializer.InitializeConfig(remote.Repository, ExportMetadatasFile); } metadataExportInitializer.InitializeRemote(remote, shouldExport); try { if (InitialChangeset.HasValue) { properties.InitialChangeset = InitialChangeset.Value; properties.PersistAllOverrides(); remote.QuickFetch(InitialChangeset.Value); remote.Fetch(stopOnFailMergeCommit); } else { remote.Fetch(stopOnFailMergeCommit,upToChangeSet); } } finally { Trace.WriteLine("Cleaning..."); remote.CleanupWorkspaceDirectory(); if (remote.Repository.IsBare) remote.Repository.UpdateRef(GitRepository.ShortToLocalName(bareBranch), remote.MaxCommitHash); } } private IEnumerable<IGitTfsRemote> GetRemotesToFetch(IList<string> args) { IEnumerable<IGitTfsRemote> remotesToFetch; if (FetchParents) remotesToFetch = globals.Repository.GetLastParentTfsCommits("HEAD").Select(commit => commit.Remote); else if (FetchAll) remotesToFetch = globals.Repository.ReadAllTfsRemotes(); else remotesToFetch = args.Select(arg => globals.Repository.ReadTfsRemote(arg)); return remotesToFetch; } } public enum BranchStrategy { None, Auto, All } }
// // Copyright (C) DataStax Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // using System; using System.Collections.Generic; using System.Linq; using Cassandra.Data.Linq; using Cassandra.IntegrationTests.Linq.Structures; using Cassandra.IntegrationTests.TestBase; using NUnit.Framework; namespace Cassandra.IntegrationTests.Linq.CqlOperatorTests { public class Append : SimulacronTest { private readonly string _tableName = "EntityWithListType_" + Randomm.RandomAlphaNum(12); /// <summary> /// Validate that the a List can be appended to, then validate that the expected data exists in Cassandra /// </summary> [Test] public void Append_ToList() { var (table, expectedEntities) = EntityWithListType.GetDefaultTable(Session, _tableName); var listToAdd = new List<int> { -1, 0, 5, 6 }; var singleEntity = expectedEntities.First(); var expectedEntity = singleEntity.Clone(); expectedEntity.ListType.AddRange(listToAdd); // Append the values table.Where(t => t.Id == singleEntity.Id) .Select(t => new EntityWithListType { ListType = CqlOperator.Append(listToAdd) }) .Update().Execute(); VerifyBoundStatement( $"UPDATE {_tableName} SET ListType = ListType + ? WHERE Id = ?", 1, listToAdd, singleEntity.Id); } /// <summary> /// Validate that the a List can be appended to, then validate that the expected data exists in Cassandra /// </summary> [Test] public void Append_ToList_StartsOutEmpty() { var (table, expectedEntities) = EntityWithListType.GetDefaultTable(Session, _tableName); // overwrite the row we're querying with empty list var singleEntity = expectedEntities.First(); singleEntity.ListType.Clear(); var expectedEntity = singleEntity.Clone(); var listToAdd = new List<int> { -1, 0, 5, 6 }; expectedEntity.ListType.AddRange(listToAdd); // Append the values table.Where(t => t.Id == singleEntity.Id) .Select(t => new EntityWithListType { ListType = CqlOperator.Append(listToAdd) }) .Update().Execute(); VerifyBoundStatement( $"UPDATE {_tableName} SET ListType = ListType + ? WHERE Id = ?", 1, listToAdd, singleEntity.Id); } /// <summary> /// Validate that appending an empty list to a list type does not cause any unexpected behavior /// </summary> [Test] public void Append_ToList_AppendEmptyList() { var (table, expectedEntities) = EntityWithListType.GetDefaultTable(Session, _tableName); var listToAdd = new List<int>(); var singleEntity = expectedEntities.First(); table.Where(t => t.Id == singleEntity.Id) .Select(t => new EntityWithListType { ListType = CqlOperator.Append(listToAdd) }) .Update().Execute(); VerifyBoundStatement( $"UPDATE {_tableName} SET ListType = ListType + ? WHERE Id = ?", 1, listToAdd, singleEntity.Id); } /// <summary> /// Validate that the a List can be appended to, then validate that the expected data exists in Cassandra /// </summary> [Test] public void Append_ToArray() { var (table, expectedEntities) = EntityWithArrayType.GetDefaultTable(Session, _tableName); var arrToAdd = new string[] { "random_" + Randomm.RandomAlphaNum(10), "random_" + Randomm.RandomAlphaNum(10), "random_" + Randomm.RandomAlphaNum(10), }; var singleEntity = expectedEntities.First(); var expectedEntity = singleEntity.Clone(); var strValsAsList = new List<string>(); strValsAsList.AddRange(expectedEntity.ArrayType); strValsAsList.AddRange(arrToAdd); expectedEntity.ArrayType = strValsAsList.ToArray(); // Append the values table.Where(t => t.Id == singleEntity.Id) .Select(t => new EntityWithArrayType { ArrayType = CqlOperator.Append(arrToAdd) }) .Update().Execute(); VerifyBoundStatement( $"UPDATE {_tableName} SET ArrayType = ArrayType + ? WHERE Id = ?", 1, arrToAdd, singleEntity.Id); } /// <summary> /// Validate that when appending an empty array, the array remains unchanged in C* /// </summary> [Test] public void Append_ToArray_AppendEmptyArray_QueryUsingCql() { var (table, expectedEntities) = EntityWithArrayType.GetDefaultTable(Session, _tableName); var arrToAdd = new string[] { }; var singleEntity = expectedEntities.First(); var expectedEntity = singleEntity.Clone(); var strValsAsList = new List<string>(); strValsAsList.AddRange(expectedEntity.ArrayType); strValsAsList.AddRange(arrToAdd); expectedEntity.ArrayType = strValsAsList.ToArray(); // Append the values table.Where(t => t.Id == singleEntity.Id) .Select(t => new EntityWithArrayType { ArrayType = CqlOperator.Append(arrToAdd) }) .Update().Execute(); VerifyBoundStatement( $"UPDATE {_tableName} SET ArrayType = ArrayType + ? WHERE Id = ?", 1, arrToAdd, singleEntity.Id); } /// <summary> /// Validate that the a Dictionary (or Map in C*) can be appended to, then validate that the expected data exists after the Update /// </summary> [Test] public void Append_ToDictionary() { var (table, expectedEntities) = EntityWithDictionaryType.GetDefaultTable(Session, _tableName); var dictToAdd = new Dictionary<string, string>() { {"randomKey_" + Randomm.RandomAlphaNum(10), "randomVal_" + Randomm.RandomAlphaNum(10)}, {"randomKey_" + Randomm.RandomAlphaNum(10), "randomVal_" + Randomm.RandomAlphaNum(10)}, }; var singleEntity = expectedEntities.First(); var expectedEntity = singleEntity.Clone(); foreach (var keyValPair in dictToAdd) { expectedEntity.DictionaryType.Add(keyValPair.Key, keyValPair.Value); } // Append the values table.Where(t => t.Id == singleEntity.Id) .Select(t => new EntityWithDictionaryType { DictionaryType = CqlOperator.Append(dictToAdd) }) .Update().Execute(); VerifyBoundStatement( $"UPDATE {_tableName} SET DictionaryType = DictionaryType + ? WHERE Id = ?", 1, dictToAdd, singleEntity.Id); } /// <summary> /// Validate that Map data does not change after appending an empty dictionary to that C* value /// </summary> [Test] public void Append_ToDictionary_EmptyDictionary() { var (table, expectedEntities) = EntityWithDictionaryType.GetDefaultTable(Session, _tableName); var dictToAdd = new Dictionary<string, string>() { }; var singleEntity = expectedEntities.First(); var expectedEntity = singleEntity.Clone(); foreach (var keyValPair in dictToAdd) expectedEntity.DictionaryType.Add(keyValPair.Key, keyValPair.Value); // Append the values table.Where(t => t.Id == singleEntity.Id) .Select(t => new EntityWithDictionaryType { DictionaryType = CqlOperator.Append(dictToAdd) }) .Update().Execute(); VerifyBoundStatement( $"UPDATE {_tableName} SET DictionaryType = DictionaryType + ? WHERE Id = ?", 1, dictToAdd, singleEntity.Id); } /// <summary> /// Validate that, in a mix of key-value pairs to insert, only non-duplicate keys are inserted. /// </summary> [Test] public void Append_ToDictionary_DuplicateAndNonDuplicateKey() { var (table, expectedEntities) = EntityWithDictionaryType.GetDefaultTable(Session, _tableName); var singleEntity = expectedEntities.First(); var dictToAdd = new Dictionary<string, string>() { {"randomKey_" + Randomm.RandomAlphaNum(10), "randomVal_" + Randomm.RandomAlphaNum(10)}, {"randomKey_" + Randomm.RandomAlphaNum(10), "randomVal_" + Randomm.RandomAlphaNum(10)}, { singleEntity.DictionaryType.First().Key, singleEntity.DictionaryType.First().Value } }; var expectedEntity = singleEntity.Clone(); foreach (var keyValPair in dictToAdd) { if (!expectedEntity.DictionaryType.ContainsKey(keyValPair.Key)) { expectedEntity.DictionaryType.Add(keyValPair.Key, keyValPair.Value); } } // Append the values table.Where(t => t.Id == singleEntity.Id) .Select(t => new EntityWithDictionaryType { DictionaryType = CqlOperator.Append(dictToAdd) }) .Update().Execute(); VerifyBoundStatement( $"UPDATE {_tableName} SET DictionaryType = DictionaryType + ? WHERE Id = ?", 1, dictToAdd, singleEntity.Id); } /// <summary> /// Validate that the a List can be appended to and then queried, using a table that contains all collection types /// </summary> [Test] public void Append_ToList_TableWithAllCollectionTypes() { var (table, expectedEntities) = EntityWithAllCollectionTypes.GetDefaultTable(Session, _tableName); var singleEntity = expectedEntities.First(); var toAppend = new List<int> { 5, 6 }; table.Where(t => t.Id == singleEntity.Id) .Select(t => new EntityWithAllCollectionTypes { ListType = CqlOperator.Append(toAppend) }) .Update().Execute(); VerifyBoundStatement( $"UPDATE {_tableName} SET ListType = ListType + ? WHERE Id = ?", 1, toAppend, singleEntity.Id); TestCluster.PrimeFluent( b => b.WhenQuery( $"SELECT ArrayType, DictionaryType, Id, ListType FROM {_tableName} WHERE Id = ?", when => when.WithParam(singleEntity.Id)) .ThenRowsSuccess( new[] { "ArrayType", "DictionaryType", "Id", "ListType" }, r => r.WithRow( singleEntity.ArrayType, singleEntity.DictionaryType, singleEntity.Id, singleEntity.ListType.Concat(toAppend)))); var entityList = table.Where(m => m.Id == singleEntity.Id).ExecuteAsync().Result.ToList(); Assert.AreEqual(1, entityList.Count); CollectionAssert.AreEqual(singleEntity.ListType.Concat(toAppend), entityList.First().ListType); } /// <summary> /// Validate that the an Array can be appended to and then queried /// </summary> [Test] public void Append_ToArray_TableWithAllCollectionTypes() { var (table, expectedEntities) = EntityWithAllCollectionTypes.GetDefaultTable(Session, _tableName); var singleEntity = expectedEntities.First(); var toAppend = new string[] { "tag1", "tag2", "tag3" }; table.Where(t => t.Id == singleEntity.Id) .Select(t => new EntityWithAllCollectionTypes { ArrayType = CqlOperator.Append(toAppend) }) .Update().Execute(); VerifyBoundStatement( $"UPDATE {_tableName} SET ArrayType = ArrayType + ? WHERE Id = ?", 1, toAppend, singleEntity.Id); TestCluster.PrimeFluent( b => b.WhenQuery( $"SELECT ArrayType, DictionaryType, Id, ListType FROM {_tableName} WHERE Id = ?", when => when.WithParam(singleEntity.Id)) .ThenRowsSuccess( new[] { "ArrayType", "DictionaryType", "Id", "ListType" }, r => r.WithRow( singleEntity.ArrayType.Concat(toAppend), singleEntity.DictionaryType, singleEntity.Id, singleEntity.ListType))); var entityList = table.Where(m => m.Id == singleEntity.Id).ExecuteAsync().Result.ToList(); Assert.AreEqual(1, entityList.Count); CollectionAssert.AreEqual(singleEntity.ArrayType.Concat(toAppend), entityList.First().ArrayType); } /// <summary> /// Validate that the a Dictionary can be appended to and then queried /// </summary> [Test] public void Append_ToDictionary_TableWithAllCollectionTypes() { var (table, expectedEntities) = EntityWithAllCollectionTypes.GetDefaultTable(Session, _tableName); var singleEntity = expectedEntities.First(); var expectedEntity = singleEntity.Clone(); expectedEntity.DictionaryType.Add("key1", "val1"); table.Where(t => t.Id == singleEntity.Id) .Select(t => new EntityWithAllCollectionTypes { DictionaryType = CqlOperator.Append(expectedEntity.DictionaryType) }) .Update().Execute(); VerifyBoundStatement( $"UPDATE {_tableName} SET DictionaryType = DictionaryType + ? WHERE Id = ?", 1, expectedEntity.DictionaryType, singleEntity.Id); TestCluster.PrimeFluent( b => b.WhenQuery( $"SELECT ArrayType, DictionaryType, Id, ListType FROM {_tableName} WHERE Id = ?", when => when.WithParam(singleEntity.Id)) .ThenRowsSuccess( new[] { "ArrayType", "DictionaryType", "Id", "ListType" }, r => r.WithRow( expectedEntity.ArrayType, expectedEntity.DictionaryType, expectedEntity.Id, expectedEntity.ListType))); var entityList = table.Where(m => m.Id == singleEntity.Id).ExecuteAsync().Result.ToList(); Assert.AreEqual(1, entityList.Count); CollectionAssert.AreEqual(expectedEntity.ArrayType, singleEntity.ArrayType); entityList.First().AssertEquals(expectedEntity); } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using OpenMetaverse; using Nini.Config; using System; using System.IO; using System.Text; using System.Xml; using System.Xml.Serialization; using System.Collections; using System.Collections.Generic; using System.Reflection; using log4net; using OpenSim.Framework; using OpenSim.Framework.ServiceAuth; using OpenSim.Framework.Communications; using OpenSim.Region.Framework.Interfaces; using OpenSim.Region.Framework.Scenes; using OpenSim.Services.Interfaces; using Mono.Addins; namespace OpenSim.Region.CoreModules.Avatar.BakedTextures { [Extension(Path = "/OpenSim/RegionModules", NodeName = "RegionModule", Id = "XBakes.Module")] public class XBakesModule : INonSharedRegionModule, IBakedTextureModule { protected Scene m_Scene; private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); private UTF8Encoding enc = new UTF8Encoding(); private string m_URL = String.Empty; private static XmlSerializer m_serializer = new XmlSerializer(typeof(AssetBase)); private static IServiceAuth m_Auth; public void Initialise(IConfigSource configSource) { IConfig config = configSource.Configs["XBakes"]; if (config == null) return; m_URL = config.GetString("URL", String.Empty); m_Auth = ServiceAuth.Create(configSource, "XBakes"); } public void AddRegion(Scene scene) { // m_log.InfoFormat("[XBakes]: Enabled for region {0}", scene.RegionInfo.RegionName); m_Scene = scene; scene.RegisterModuleInterface<IBakedTextureModule>(this); } public void RegionLoaded(Scene scene) { } public void RemoveRegion(Scene scene) { } public void Close() { } public string Name { get { return "XBakes.Module"; } } public Type ReplaceableInterface { get { return null; } } public WearableCacheItem[] Get(UUID id) { if (m_URL == String.Empty) return null; int size = 0; using (RestClient rc = new RestClient(m_URL)) { List<WearableCacheItem> ret = new List<WearableCacheItem>(); rc.AddResourcePath("bakes"); rc.AddResourcePath(id.ToString()); rc.RequestMethod = "GET"; try { Stream s = rc.Request(m_Auth); using (XmlTextReader sr = new XmlTextReader(s)) { sr.ReadStartElement("BakedAppearance"); while (sr.LocalName == "BakedTexture") { string sTextureIndex = sr.GetAttribute("TextureIndex"); int lTextureIndex = Convert.ToInt32(sTextureIndex); string sCacheId = sr.GetAttribute("CacheId"); UUID lCacheId = UUID.Zero; if (!(UUID.TryParse(sCacheId, out lCacheId))) { // ?? Nothing here } ++size; sr.ReadStartElement("BakedTexture"); AssetBase a = (AssetBase)m_serializer.Deserialize(sr); ret.Add(new WearableCacheItem() { CacheId = lCacheId, TextureIndex = (uint)lTextureIndex, TextureAsset = a, TextureID = a.FullID }); sr.ReadEndElement(); } m_log.DebugFormat("[XBakes]: read {0} textures for user {1}", ret.Count, id); } return ret.ToArray(); } catch (XmlException) { return null; } } } public void Store(UUID agentId, WearableCacheItem[] data) { if (m_URL == String.Empty) return; MemoryStream reqStream; using (MemoryStream bakeStream = new MemoryStream()) using (XmlTextWriter bakeWriter = new XmlTextWriter(bakeStream, null)) { bakeWriter.WriteStartElement(String.Empty, "BakedAppearance", String.Empty); for (int i = 0; i < data.Length; i++) { if (data[i] != null) { bakeWriter.WriteStartElement(String.Empty, "BakedTexture", String.Empty); bakeWriter.WriteAttributeString(String.Empty, "TextureIndex", String.Empty, data[i].TextureIndex.ToString()); bakeWriter.WriteAttributeString(String.Empty, "CacheId", String.Empty, data[i].CacheId.ToString()); if (data[i].TextureAsset != null) m_serializer.Serialize(bakeWriter, data[i].TextureAsset); bakeWriter.WriteEndElement(); } } bakeWriter.WriteEndElement(); bakeWriter.Flush(); reqStream = new MemoryStream(bakeStream.ToArray()); } RestClient rc = new RestClient(m_URL); rc.AddResourcePath("bakes"); rc.AddResourcePath(agentId.ToString()); rc.RequestMethod = "POST"; Util.FireAndForget( delegate { rc.Request(reqStream, m_Auth); m_log.DebugFormat("[XBakes]: stored {0} textures for user {1}", data.Length, agentId); }, null, "XBakesModule.Store" ); } } }
using System; using System.Collections.Generic; using Orleans.Runtime; namespace Orleans.Streams { [Serializable] internal class ImplicitStreamSubscriberTable { private readonly Dictionary<string, HashSet<int>> table; internal ImplicitStreamSubscriberTable() { table = new Dictionary<string, HashSet<int>>(); } /// <summary> /// Initializes any implicit stream subscriptions specified for a grain class type. If the grain class specified does not have any associated namespaces, then nothing is done. /// </summary> /// <param name="grainClass">A grain class type.</param> /// <exception cref="System.ArgumentException"> /// Duplicate specification of namespace "...". /// </exception> internal void InitImplicitStreamSubscribers(IEnumerable<Type> grainClasses) { foreach (var grainClass in grainClasses) { if (!TypeUtils.IsGrainClass(grainClass)) { continue; } // we collect all namespaces that the specified grain class should implicitly subscribe to. ISet<string> namespaces = GetNamespacesFromAttributes(grainClass); if (null == namespaces) continue; if (namespaces.Count > 0) { // the grain class is subscribed to at least one namespace. in order to create a grain reference later, we need a qualifying interface but it doesn't matter which (because we'll be creating references to extensions), so we'll take the first interface in the sequence. AddImplicitSubscriber(grainClass, namespaces); } } } /// <summary> /// Retrieve a map of implicit subscriptionsIds to implicit subscribers, given a stream ID. This method throws an exception if there's no namespace associated with the stream ID. /// </summary> /// <param name="streamId">A stream ID.</param> /// <returns>A set of references to implicitly subscribed grains. They are expected to support the streaming consumer extension.</returns> /// <exception cref="System.ArgumentException">The stream ID doesn't have an associated namespace.</exception> /// <exception cref="System.InvalidOperationException">Internal invariant violation.</exception> internal IDictionary<Guid, IStreamConsumerExtension> GetImplicitSubscribers(StreamId streamId) { if (String.IsNullOrWhiteSpace(streamId.Namespace)) { throw new ArgumentException("The stream ID doesn't have an associated namespace.", "streamId"); } HashSet<int> entry; var result = new Dictionary<Guid, IStreamConsumerExtension>(); if (table.TryGetValue(streamId.Namespace, out entry)) { foreach (var i in entry) { IStreamConsumerExtension consumer = MakeConsumerReference(streamId.Guid, i); Guid subscriptionGuid = MakeSubscriptionGuid(i, streamId); if (result.ContainsKey(subscriptionGuid)) { throw new InvalidOperationException(string.Format("Internal invariant violation: generated duplicate subscriber reference: {0}, subscriptionId: {1}", consumer, subscriptionGuid)); } result.Add(subscriptionGuid, consumer); } return result; } return result; } /// <summary> /// Determines whether the specified grain is an implicit subscriber of a given stream. /// </summary> /// <param name="grainId">The grain identifier.</param> /// <param name="streamId">The stream identifier.</param> /// <returns>true if the grain id describes an implicit subscriber of the stream described by the stream id.</returns> internal bool IsImplicitSubscriber(GrainId grainId, StreamId streamId) { return HasImplicitSubscription(streamId.Namespace, grainId.GetTypeCode()); } /// <summary> /// Try to get the implicit subscriptionId. /// If an implicit subscription exists, return a subscription Id that is unique per grain type, grainId, namespace combination. /// </summary> /// <param name="grainId"></param> /// <param name="streamId"></param> /// <param name="subscriptionId"></param> /// <returns></returns> internal bool TryGetImplicitSubscriptionGuid(GrainId grainId, StreamId streamId, out Guid subscriptionId) { subscriptionId = Guid.Empty; if (!HasImplicitSubscription(streamId.Namespace, grainId.GetTypeCode())) { return false; } // make subscriptionId subscriptionId = MakeSubscriptionGuid(grainId, streamId); return true; } /// <summary> /// Create a subscriptionId that is unique per grainId, grainType, namespace combination. /// </summary> /// <param name="grainId"></param> /// <param name="streamId"></param> /// <returns></returns> private Guid MakeSubscriptionGuid(GrainId grainId, StreamId streamId) { // first int in guid is grain type code int grainIdTypeCode = grainId.GetTypeCode(); return MakeSubscriptionGuid(grainIdTypeCode, streamId); } /// <summary> /// Create a subscriptionId that is unique per grainId, grainType, namespace combination. /// </summary> /// <param name="grainIdTypeCode"></param> /// <param name="streamId"></param> /// <returns></returns> private Guid MakeSubscriptionGuid(int grainIdTypeCode, StreamId streamId) { // next 2 shorts ing guid are from namespace hash int namespaceHash = streamId.Namespace.GetHashCode(); byte[] namespaceHashByes = BitConverter.GetBytes(namespaceHash); short s1 = BitConverter.ToInt16(namespaceHashByes, 0); short s2 = BitConverter.ToInt16(namespaceHashByes, 2); // Tailing 8 bytes of the guid are from the hash of the streamId Guid and a hash of the full streamId. // get streamId guid hash code int streamIdGuidHash = streamId.Guid.GetHashCode(); // get full streamId hash code int streamIdHash = streamId.GetHashCode(); // build guid tailing 8 bytes from grainIdHash and the hash of the full streamId. var tail = new List<byte>(); tail.AddRange(BitConverter.GetBytes(streamIdGuidHash)); tail.AddRange(BitConverter.GetBytes(streamIdHash)); // make guid. // - First int is grain type // - Two shorts from namespace hash // - 8 byte tail from streamId Guid and full stream hash. return SubscriptionMarker.MarkAsImplictSubscriptionId(new Guid(grainIdTypeCode, s1, s2, tail.ToArray())); } private bool HasImplicitSubscription(string streamNamespace, int grainIdTypeCode) { if (String.IsNullOrWhiteSpace(streamNamespace)) { return false; } HashSet<int> entry; return (table.TryGetValue(streamNamespace, out entry) && // if we don't have implictit subscriptions for this namespace, fail out entry.Contains(grainIdTypeCode)); // if we don't have an implicit subscription for this type of grain on this namespace, fail out } /// <summary> /// Add an implicit subscriber to the table. /// </summary> /// <param name="grainClass">Type of the grain class whose instances subscribe to the specified namespaces.</param> /// <param name="namespaces">Namespaces instances of the grain class should subscribe to.</param> /// <exception cref="System.ArgumentException"> /// No namespaces specified. /// or /// Duplicate specification of namespace "...". /// </exception> private void AddImplicitSubscriber(Type grainClass, ISet<string> namespaces) { // convert IEnumerable<> to an array without copying, if possible. if (namespaces.Count == 0) { throw new ArgumentException("no namespaces specified", "namespaces"); } // we'll need the class type code. int implTypeCode = CodeGeneration.GrainInterfaceUtils.GetGrainClassTypeCode(grainClass); foreach (string s in namespaces) { // first, we trim whitespace off of the namespace string. leaving these would lead to misleading log messages. string key = s.Trim(); // if the table already holds the namespace we're looking at, then we don't need to create a new entry. each entry is a dictionary that holds associations between class names and interface ids. e.g.: // // "namespace0" -> HashSet {implTypeCode.0, implTypeCode.1, ..., implTypeCode.n} // // each class in the entry used the ImplicitStreamSubscriptionAtrribute with the associated namespace. this information will be used later to create grain references on-demand. we must use string representations to ensure that this information is serializable. if (table.ContainsKey(key)) { // an entry already exists. we append a class/interface association to the current set. HashSet<int> entries = table[key]; if (!entries.Add(implTypeCode)) { throw new InvalidOperationException(String.Format("attempt to initialize implicit subscriber more than once (key={0}, implTypeCode={1}).", key, implTypeCode)); } } else { // an entry does not already exist. we create a new one with one class/interface association. table[key] = new HashSet<int> { implTypeCode }; } } } /// <summary> /// Create a reference to a grain that we expect to support the stream consumer extension. /// </summary> /// <param name="primaryKey">The primary key of the grain.</param> /// <param name="implTypeCode">The type code of the grain interface.</param> /// <returns></returns> private IStreamConsumerExtension MakeConsumerReference(Guid primaryKey, int implTypeCode) { GrainId grainId = GrainId.GetGrainId(implTypeCode, primaryKey); IAddressable addressable = GrainReference.FromGrainId(grainId); return addressable.Cast<IStreamConsumerExtension>(); } /// <summary> /// Collects the namespaces associated with a grain class type through the use of ImplicitStreamSubscriptionAttribute. /// </summary> /// <param name="grainClass">A grain class type that might have ImplicitStreamSubscriptionAttributes associated with it.</param> /// <returns></returns> /// <exception cref="System.ArgumentException">grainType does not describe a grain class.</exception> /// <exception cref="System.InvalidOperationException">duplicate specification of ImplicitConsumerActivationAttribute(...).</exception> private static ISet<string> GetNamespacesFromAttributes(Type grainClass) { if (!TypeUtils.IsGrainClass(grainClass)) { throw new ArgumentException(string.Format("{0} is not a grain class.", grainClass.FullName), "grainClass"); } object[] attribs = grainClass.GetCustomAttributes(typeof(ImplicitStreamSubscriptionAttribute), inherit: false); // otherwise, we'll consider all of them and aggregate the specifications. duplicates will not be permitted. var result = new HashSet<string>(); foreach (var ob in attribs) { var attrib = (ImplicitStreamSubscriptionAttribute)ob; if (string.IsNullOrWhiteSpace(attrib.Namespace)) { throw new InvalidOperationException("ImplicitConsumerActivationAttribute argument cannot be null nor whitespace"); } string trimmed = attrib.Namespace; if (!result.Add(trimmed)) { throw new InvalidOperationException(string.Format("duplicate specification of attribute ImplicitConsumerActivationAttribute({0}).", attrib.Namespace)); } } return result; } } }
/* Genuine Channels product. * * Copyright (c) 2002-2007 Dmitry Belikov. All rights reserved. * * This source code comes under and must be used and distributed according to the Genuine Channels license agreement. */ using System; using System.IO; using System.Net.Sockets; using System.Threading; using Belikov.GenuineChannels.BufferPooling; using Belikov.GenuineChannels.Logbook; using Belikov.GenuineChannels.Messaging; namespace Belikov.GenuineChannels.GenuineTcp { /// <summary> /// Implements a stream reading data from a socket synchronously. /// Automatically initiates receiving after the current message is read up entirely. /// </summary> internal class SyncSocketReadingStream : Stream, IDisposable { /// <summary> /// Constructs an instance of the SyncSocketReadingStream class. /// </summary> /// <param name="tcpConnectionManager">TCP Connection Manager.</param> /// <param name="tcpSocketInfo">The connection.</param> /// <param name="receiveTimeout">The moment at which the message must be received entirely.</param> /// <param name="automaticallyContinueReading">Indicates whether this instance will automatically initiate reading of the next message from the specified connection.</param> public SyncSocketReadingStream(TcpConnectionManager tcpConnectionManager, TcpSocketInfo tcpSocketInfo, int receiveTimeout, bool automaticallyContinueReading) { this._readBuffer = BufferPool.ObtainBuffer(); this._tcpConnectionManager = tcpConnectionManager; this._tcpSocketInfo = tcpSocketInfo; this._receiveTimeout = receiveTimeout; this._automaticallyContinueReading = automaticallyContinueReading; // first, complete receiving of the first header // it may read up the entire message and release the underlying connection ReadNextPortion(true); } private TcpConnectionManager _tcpConnectionManager; private TcpSocketInfo _tcpSocketInfo; private int _receiveTimeout; private byte[] _readBuffer; private bool _automaticallyContinueReading; private int _validLength; private int _currentPosition; private int _currentPacketSize; private int _currentPacketBytesRead; private bool _messageRead; /// <summary> /// The unique identifier of the current stream. /// </summary> public int DbgStreamId { get { return this._dbgStreamId; } } private int _dbgStreamId = Interlocked.Increment(ref _totalDbgStreamId); private static int _totalDbgStreamId = 0; /// <summary> /// Reads a sequence of bytes from the current stream and advances the position within the stream by the number of bytes read. /// </summary> /// <param name="buffer">An array of bytes.</param> /// <param name="offset">The zero-based byte offset in buffer at which to begin storing the data read from the current stream.</param> /// <param name="count">The maximum number of bytes to be read from the current stream.</param> /// <returns>The total number of bytes read into the buffer.</returns> public override int Read(byte[] buffer, int offset, int count) { int size = 0; int resultSize = 0; for ( ; ; ) { // check whether we have the next portion if (this._currentPosition < this._validLength) { size = Math.Min(this._validLength - this._currentPosition, count); Buffer.BlockCopy(this._readBuffer, this._currentPosition, buffer, offset, size); this._currentPosition += size; count -= size; resultSize += size; offset += size; } // recycle the buffer if possible if (this._readBuffer != null && this._messageRead && this._currentPacketBytesRead >= this._currentPacketSize && this._currentPosition >= this._validLength) { BufferPool.RecycleBuffer(this._readBuffer); this._readBuffer = null; } if (count <= 0 || (this._messageRead && this._currentPacketBytesRead >= this._currentPacketSize)) return resultSize; ReadNextPortion(false); } } /// <summary> /// Reads a byte from the stream and advances the position within the stream by one byte, or returns -1 if at the end of the stream. /// </summary> /// <returns>The unsigned byte cast to an Int32, or -1 if at the end of the stream.</returns> public override int ReadByte() { try { // get a byte if (this._currentPosition < this._validLength) return this._readBuffer[ this._currentPosition++ ]; ReadNextPortion(false); if (this._currentPosition < this._validLength) return this._readBuffer[ this._currentPosition++ ]; return -1; } finally { // recycle the buffer if possible if (this._readBuffer != null && this._messageRead && this._currentPacketBytesRead >= this._currentPacketSize && this._currentPosition >= this._validLength) { BufferPool.RecycleBuffer(this._readBuffer); this._readBuffer = null; } } } /// <summary> /// Synchronously reads the next network packet if it is available. /// </summary> /// <param name="deriveHeader">Indicates whether it is necessary to take header from the provided connection.</param> private void ReadNextPortion(bool deriveHeader) { int bytesRead = 0; int lengthToRead = 0; if (! deriveHeader) { // try to read the remaining part of the packet if (this._currentPacketBytesRead < this._currentPacketSize) { // read the next part of the packet lengthToRead = Math.Min(this._currentPacketSize - this._currentPacketBytesRead, this._readBuffer.Length); this._validLength = this.ReadFromSocket(this._readBuffer, 0, lengthToRead); if (this._validLength == 0) throw GenuineExceptions.Get_Receive_Portion(); // Fixed in 2.5.9.7 //this._tcpSocketInfo.ITransportContext.ConnectionManager.IncreaseBytesReceived(this._validLength); this._currentPacketBytesRead += this._validLength; this._currentPosition = 0; if (this._currentPacketBytesRead == this._currentPacketSize && this._messageRead) { this.ReadingCompleted(); } return ; } // the underlying stream ends if (this._messageRead) return ; // prepare for reading a header this._currentPosition = 0; } lengthToRead = TcpConnectionManager.HEADER_SIZE; if (deriveHeader) { if (this._tcpSocketInfo.ReceivingBufferCurrentPosition > 0) Buffer.BlockCopy(this._tcpSocketInfo.ReceivingHeaderBuffer, 0, this._readBuffer, 0, this._tcpSocketInfo.ReceivingBufferCurrentPosition); this._currentPosition = this._tcpSocketInfo.ReceivingBufferCurrentPosition; } // read the header while (this._currentPosition < lengthToRead) { bytesRead = this.ReadFromSocket(this._readBuffer, this._currentPosition, lengthToRead - this._currentPosition); if (bytesRead == 0) throw GenuineExceptions.Get_Receive_Portion(); // Fixed in 2.5.9.7 //this._tcpSocketInfo.ITransportContext.ConnectionManager.IncreaseBytesReceived(bytesRead); this._currentPosition += bytesRead; } // parse the header if (this._readBuffer[0] != MessageCoder.COMMAND_MAGIC_CODE) throw GenuineExceptions.Get_Receive_IncorrectData(); this._currentPacketSize = MessageCoder.ReadInt32(this._readBuffer, 1); this._messageRead = this._readBuffer[5] != 0; this._currentPacketBytesRead = 0; // and read the part of the packet if (this._currentPacketBytesRead < this._currentPacketSize) { // read the next part of the packet lengthToRead = Math.Min(this._currentPacketSize - this._currentPacketBytesRead, this._readBuffer.Length); this._validLength = this.ReadFromSocket(this._readBuffer, 0, lengthToRead); if (this._validLength == 0) throw GenuineExceptions.Get_Receive_Portion(); // Fixed in 2.5.9.7 //this._tcpSocketInfo.ITransportContext.ConnectionManager.IncreaseBytesReceived(this._validLength); this._currentPacketBytesRead += this._validLength; this._currentPosition = 0; } if (this._currentPacketBytesRead == this._currentPacketSize && this._messageRead) { this.ReadingCompleted(); } } /// <summary> /// Completes reading from the connection. /// </summary> private void ReadingCompleted() { if (this._automaticallyContinueReading) this._tcpConnectionManager.LowLevel_HalfSync_StartReceiving(this._tcpSocketInfo); } /// <summary> /// Reads data from the socket. /// </summary> /// <param name="buffer">An array of type Byte that is the storage location for received data.</param> /// <param name="offset">The location in buffer to store the received data.</param> /// <param name="count">The number of bytes to receive.</param> /// <returns>The number of bytes read.</returns> public int ReadFromSocket(byte[] buffer, int offset, int count) { BinaryLogWriter binaryLogWriter = this._tcpConnectionManager.ITransportContext.BinaryLogWriter; try { int millisecondsRemained = GenuineUtility.GetMillisecondsLeft(this._receiveTimeout); if (millisecondsRemained <= 0) throw GenuineExceptions.Get_Send_ServerDidNotReply(); this._tcpSocketInfo.Socket.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReceiveTimeout, millisecondsRemained); int bytesReceived = this._tcpSocketInfo.Socket.Receive(buffer, offset, count, SocketFlags.None); // LOG: if ( binaryLogWriter != null && binaryLogWriter[LogCategory.LowLevelTransport] > 0 ) binaryLogWriter.WriteTransportContentEvent(LogCategory.LowLevelTransport, "SyncSocketReadingStream.ReadFromSocket", LogMessageType.LowLevelTransport_SyncReceivingCompleted, null, null, this._tcpSocketInfo.Remote, binaryLogWriter[LogCategory.LowLevelTransport] > 1 ? new MemoryStream(buffer, offset, bytesReceived) : null, GenuineUtility.CurrentThreadId, Thread.CurrentThread.Name, this._tcpSocketInfo.DbgConnectionId, bytesReceived, this._tcpSocketInfo.Socket.RemoteEndPoint.ToString(), null, null, "Socket.Receive(). Bytes received: {0}.", bytesReceived); this._tcpConnectionManager.IncreaseBytesReceived(bytesReceived); return bytesReceived; } catch (Exception ex) { // LOG: if ( binaryLogWriter != null && binaryLogWriter[LogCategory.LowLevelTransport] > 0 ) binaryLogWriter.WriteEvent(LogCategory.LowLevelTransport, "SyncSocketReadingStream.ReadFromSocket", LogMessageType.LowLevelTransport_SyncReceivingCompleted, ex, null, this._tcpSocketInfo.Remote, null, GenuineUtility.CurrentThreadId, Thread.CurrentThread.Name, null, null, this._tcpSocketInfo.DbgConnectionId, 0, 0, 0, null, null, null, null, "Socket.Receive() failed."); throw; } } /// <summary> /// Skips the remaining part of the message. /// </summary> public void Dispose() { this.Close(); } /// <summary> /// Closes the current stream and releases all resources associated with the current stream. /// </summary> public override void Close() { this.SkipMessage(); } /// <summary> /// Skips the current message in the transport stream. /// </summary> public void SkipMessage() { while (! this.IsReadingFinished) ReadNextPortion(false); } /// <summary> /// Gets an indication whether the message reading from the underlying provider was completed. /// </summary> public bool IsReadingFinished { get { return this._currentPacketBytesRead >= this._currentPacketSize && this._messageRead; } } /// <summary> /// Gets an indication whether the message has been read from this stream. /// </summary> public bool IsMessageProcessed { get { return this._currentPacketBytesRead >= this._currentPacketSize && this._messageRead && this._currentPosition >= this._validLength; } } #region -- Insignificant stream members ---------------------------------------------------- /// <summary> /// Writes a sequence of bytes to the current stream and advances the current position within this stream by the number of bytes written. /// </summary> /// <param name="buffer">An array of bytes.</param> /// <param name="offset">The zero-based byte offset in buffer at which to begin copying bytes to the current stream.</param> /// <param name="count">The number of bytes to be written to the current stream.</param> public override void Write(byte[] buffer, int offset, int count) { throw new NotSupportedException(); } /// <summary> /// Gets a value indicating whether the current stream supports reading. /// </summary> public override bool CanRead { get { return true; } } /// <summary> /// Gets a value indicating whether the current stream supports seeking. /// </summary> public override bool CanSeek { get { return false; } } /// <summary> /// Gets a value indicating whether the current stream supports writing. /// </summary> public override bool CanWrite { get { return true; } } /// <summary> /// Gets the length in bytes of the stream. /// </summary> public override long Length { get { throw new NotSupportedException(); } } /// <summary> /// Gets or sets the position within the current stream. /// Always fires NotSupportedException exception. /// </summary> public override long Position { get { throw new NotSupportedException(); } set { throw new NotSupportedException(); } } /// <summary> /// Begins an asynchronous read operation. /// </summary> /// <param name="buffer">The buffer to read the data into.</param> /// <param name="offset">The byte offset in buffer at which to begin writing data read from the stream.</param> /// <param name="count">The maximum number of bytes to read.</param> /// <param name="callback">An optional asynchronous callback, to be called when the read is complete.</param> /// <param name="state">A user-provided object that distinguishes this particular asynchronous read request from other requests.</param> /// <returns>An IAsyncResult that represents the asynchronous read, which could still be pending.</returns> public override IAsyncResult BeginRead(byte[] buffer, int offset, int count, AsyncCallback callback, object state) { throw new NotSupportedException(); } /// <summary> /// Begins an asynchronous write operation. /// </summary> /// <param name="buffer">The buffer to write data from.</param> /// <param name="offset">The byte offset in buffer from which to begin writing.</param> /// <param name="count">The maximum number of bytes to write.</param> /// <param name="callback">An optional asynchronous callback, to be called when the write is complete.</param> /// <param name="state">A user-provided object that distinguishes this particular asynchronous write request from other requests.</param> /// <returns>An IAsyncResult that represents the asynchronous write, which could still be pending.</returns> public override IAsyncResult BeginWrite(byte[] buffer, int offset, int count, AsyncCallback callback, object state) { throw new NotSupportedException(); } /// <summary> /// Waits for the pending asynchronous read to complete. /// </summary> /// <param name="asyncResult">The reference to the pending asynchronous request to finish.</param> /// <returns>The number of bytes read from the stream, between zero (0) and the number of bytes you requested. Streams only return zero (0) at the end of the stream, otherwise, they should block until at least one byte is available.</returns> public override int EndRead(IAsyncResult asyncResult) { throw new NotSupportedException(); } /// <summary> /// Ends an asynchronous write operation. /// </summary> /// <param name="asyncResult">A reference to the outstanding asynchronous I/O request.</param> public override void EndWrite(IAsyncResult asyncResult) { throw new NotSupportedException(); } /// <summary> /// Clears all buffers for this stream and causes any buffered data to be written to the underlying device. /// </summary> public override void Flush() { } /// <summary> /// Sets the position within the current stream. /// </summary> /// <param name="offset">A byte offset relative to the origin parameter.</param> /// <param name="origin">A value of type SeekOrigin indicating the reference point used to obtain the new position.</param> /// <returns>The new position within the current stream.</returns> public override long Seek(long offset, SeekOrigin origin) { throw new NotSupportedException(); } /// <summary> /// Sets the length of the current stream. /// </summary> /// <param name="val">The desired length of the current stream in bytes.</param> public override void SetLength(long val) { throw new NotSupportedException(); } #endregion } }
using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics; using System.Linq; using NUnit.Framework; namespace StructureMap.Testing { public static class Exception<T> where T : Exception { public static T ShouldBeThrownBy(Action action) { T exception = null; try { action(); } catch (Exception e) { exception = e.ShouldBeOfType<T>(); } if (exception == null) Assert.Fail("An exception was expected, but not thrown by the given action."); return exception; } } public delegate void MethodThatThrows(); public static class SpecificationExtensions { public static void ShouldHaveTheSameElementsAs<T>(this IEnumerable<T> actual, IEnumerable<T> expected) { IList actualList = (actual is IList) ? (IList) actual : actual.ToList(); IList expectedList = (expected is IList) ? (IList) expected : expected.ToList(); ShouldHaveTheSameElementsAs(actualList, expectedList); } public static void ShouldHaveTheSameElementsAs<T>(this IEnumerable<T> actual, params T[] expected) { IList actualList = (actual is IList) ? (IList) actual : actual.ToList(); IList expectedList = (expected is IList) ? (IList) expected : expected.ToList(); ShouldHaveTheSameElementsAs(actualList, expectedList); } public static void ShouldHaveTheSameElementsAs(this IList actual, IList expected) { actual.ShouldNotBeNull(); expected.ShouldNotBeNull(); try { actual.Count.ShouldEqual(expected.Count); for (int i = 0; i < actual.Count; i++) { actual[i].ShouldEqual(expected[i]); } } catch (Exception) { Debug.WriteLine("ACTUAL:"); foreach (object o in actual) { Debug.WriteLine(o); } throw; } } public static void ShouldBeFalse(this bool condition) { Assert.IsFalse(condition); } public static void ShouldBeTrue(this bool condition) { Assert.IsTrue(condition); } public static object ShouldEqual(this object actual, object expected) { Assert.AreEqual(expected, actual); return expected; } public static object ShouldNotEqual(this object actual, object expected) { Assert.AreNotEqual(expected, actual); return expected; } public static void ShouldBeNull(this object anObject) { Assert.IsNull(anObject); } public static void ShouldNotBeNull(this object anObject) { Assert.IsNotNull(anObject); } public static object ShouldBeTheSameAs(this object actual, object expected) { Assert.AreSame(expected, actual); return expected; } public static T IsType<T>(this object actual) { actual.ShouldBeOfType(typeof (T)); return (T) actual; } public static object ShouldNotBeTheSameAs(this object actual, object expected) { Assert.AreNotSame(expected, actual); return expected; } public static void ShouldBeOfType(this object actual, Type expected) { Assert.IsInstanceOf(expected, actual); } public static T ShouldBeOfType<T>(this object actual) { Assert.IsInstanceOf(typeof (T), actual); return (T) actual; } public static void ShouldNotBeOfType(this object actual, Type expected) { Assert.IsNotInstanceOf(expected, actual); } public static void ShouldContain(this IList actual, object expected) { Assert.Contains(expected, actual); } public static IComparable ShouldBeGreaterThan(this IComparable arg1, IComparable arg2) { Assert.Greater(arg1, arg2); return arg2; } public static IComparable ShouldBeLessThan(this IComparable arg1, IComparable arg2) { Assert.Less(arg1, arg2); return arg2; } public static void ShouldBeEmpty(this ICollection collection) { Assert.IsEmpty(collection); } public static void ShouldBeEmpty(this string aString) { Assert.IsEmpty(aString); } public static void ShouldNotBeEmpty(this ICollection collection) { Assert.IsNotEmpty(collection); } public static string ShouldNotBeEmpty(this string aString) { Assert.IsNotEmpty(aString); return aString; } public static void ShouldContain(this string actual, string expected) { StringAssert.Contains(expected, actual); } public static string ShouldNotContain(this string actual, string expected) { Assert.IsTrue(!actual.Contains(expected)); return actual; } public static string ShouldBeEqualIgnoringCase(this string actual, string expected) { StringAssert.AreEqualIgnoringCase(expected, actual); return expected; } public static void ShouldEndWith(this string actual, string expected) { StringAssert.EndsWith(expected, actual); } public static void ShouldStartWith(this string actual, string expected) { StringAssert.StartsWith(expected, actual); } public static void ShouldContainErrorMessage(this Exception exception, string expected) { StringAssert.Contains(expected, exception.Message); } public static Exception ShouldBeThrownBy(this Type exceptionType, MethodThatThrows method) { Exception exception = null; try { method(); } catch (Exception e) { Assert.AreEqual(exceptionType, e.GetType()); exception = e; } if (exception == null) { Assert.Fail(String.Format("Expected {0} to be thrown.", exceptionType.FullName)); } return exception; } public static void ShouldEqualSqlDate(this DateTime actual, DateTime expected) { TimeSpan timeSpan = actual - expected; Assert.Less(Math.Abs(timeSpan.TotalMilliseconds), 3); } } }
using System; using System.IO; using System.Linq; using NUnit.Framework; using StructureMap.Configuration.DSL; using StructureMap.Graph; using StructureMap.Testing.DocumentationExamples; using StructureMap.Testing.Widget; using StructureMap.Testing.Widget3; using StructureMap.Testing.Widget5; using StructureMap.TypeRules; namespace StructureMap.Testing.Graph { public class TestingRegistry : Registry { public static bool WasUsed; public TestingRegistry() { WasUsed = true; ForRequestedType<Rule>().TheDefault.IsThis(new ColorRule("Green")); } public static void Reset() { WasUsed = false; } } [TestFixture] public class AssemblyScannerTester { #region Setup/Teardown [SetUp] public void SetUp() { TestingRegistry.Reset(); theGraph = null; } #endregion [TestFixtureSetUp] public void FixtureSetUp() { string binFolder = Path.GetDirectoryName(GetType().Assembly.Location); assemblyScanningFolder = Path.Combine(binFolder, "DynamicallyLoaded"); if (!Directory.Exists(assemblyScanningFolder)) Directory.CreateDirectory(assemblyScanningFolder); string assembly1 = typeof (RedGreenRegistry).Assembly.Location; string assembly2 = typeof (IWorker).Assembly.Location; File.Copy(assembly1, Path.Combine(assemblyScanningFolder, Path.GetFileName(assembly1)), true); File.Copy(assembly2, Path.Combine(assemblyScanningFolder, Path.GetFileName(assembly2)), true); } private PluginGraph theGraph; private string assemblyScanningFolder; private void Scan(Action<AssemblyScanner> action) { var scanner = new AssemblyScanner(); action(scanner); theGraph = new PluginGraph(); scanner.ExcludeNamespaceContainingType<ScanningRegistry>(); scanner.ScanForAll(theGraph); theGraph.Log.AssertFailures(); } private void shouldHaveFamily<T>() { theGraph.PluginFamilies.Contains(typeof (T)).ShouldBeTrue(); } private void shouldNotHaveFamily<T>() { theGraph.PluginFamilies.Contains(typeof (T)).ShouldBeFalse(); } private void shouldHaveFamilyWithSameName<T>() { // The Types may not be "Equal" if their assemblies were loaded in different load contexts (.LoadFrom) // so we will consider them equal if their names match. theGraph.PluginFamilies.Any(family => family.PluginType.FullName == typeof (T).FullName).ShouldBeTrue(); } private void shouldNotHaveFamilyWithSameName<T>() { theGraph.PluginFamilies.Any(family => family.PluginType.FullName == typeof (T).FullName).ShouldBeFalse(); } [Test] public void AssemblyScanner_will_scan_for_attributes_by_default() { Scan(x => { x.AssemblyContainingType<ITypeThatHasAttributeButIsNotInRegistry>(); }); shouldHaveFamily<ITypeThatHasAttributeButIsNotInRegistry>(); } [Test] public void is_in_namespace() { GetType().IsInNamespace("blah").ShouldBeFalse(); GetType().IsInNamespace("StructureMap").ShouldBeTrue(); GetType().IsInNamespace("StructureMap.Testing").ShouldBeTrue(); GetType().IsInNamespace("StructureMap.Testing.Graph").ShouldBeTrue(); GetType().IsInNamespace("StructureMap.Testing.Graph.Something").ShouldBeFalse(); } [Test] public void Only_scan_for_registries_ignores_attributes() { Scan(x => { x.AssemblyContainingType<ITypeThatHasAttributeButIsNotInRegistry>(); x.IgnoreStructureMapAttributes(); }); shouldNotHaveFamily<ITypeThatHasAttributeButIsNotInRegistry>(); } [Test] public void scan_all_assemblies_in_a_folder() { Scan(x => x.AssembliesFromPath(assemblyScanningFolder)); shouldHaveFamilyWithSameName<IInterfaceInWidget5>(); shouldHaveFamilyWithSameName<IWorker>(); } [Test, Explicit] public void scan_all_assemblies_in_application_base_directory() { Scan(x => x.AssembliesFromApplicationBaseDirectory()); shouldHaveFamilyWithSameName<IInterfaceInWidget5>(); shouldHaveFamilyWithSameName<IWorker>(); } [Test] public void scan_but_ignore_registries_by_default() { Scan(x => { x.TheCallingAssembly(); }); TestingRegistry.WasUsed.ShouldBeFalse(); } [Test] public void scan_specific_assemblies_in_a_folder() { string assemblyToSpecificallyExclude = typeof (IWorker).Assembly.GetName().Name; Scan( x => x.AssembliesFromPath(assemblyScanningFolder, asm => asm.GetName().Name != assemblyToSpecificallyExclude)); shouldHaveFamilyWithSameName<IInterfaceInWidget5>(); shouldNotHaveFamilyWithSameName<IWorker>(); } [Test] public void scan_specific_assemblies_in_application_base_directory() { string assemblyToSpecificallyExclude = typeof (IWorker).Assembly.GetName().Name; Scan( x => x.AssembliesFromPath(assemblyScanningFolder, asm => asm.GetName().Name != assemblyToSpecificallyExclude)); shouldHaveFamilyWithSameName<IInterfaceInWidget5>(); shouldNotHaveFamilyWithSameName<IWorker>(); } [Test] public void Search_for_registries_when_explicitly_told() { Scan(x => { x.TheCallingAssembly(); x.LookForRegistries(); }); TestingRegistry.WasUsed.ShouldBeTrue(); } [Test] public void test_the_family_attribute_scanner() { var scanner = new FamilyAttributeScanner(); var graph = new PluginGraph(); var registry = new Registry(); scanner.Process(typeof (ITypeThatHasAttributeButIsNotInRegistry), registry); registry.ConfigurePluginGraph(graph); graph.PluginFamilies.Contains(typeof (ITypeThatHasAttributeButIsNotInRegistry)).ShouldBeTrue(); graph = new PluginGraph(); registry = new Registry(); scanner.Process(GetType(), registry); registry.ConfigurePluginGraph(graph); graph.PluginFamilies.Contains(GetType()).ShouldBeFalse(); } [Test] public void use_a_dual_exclude() { Scan(x => { x.AssemblyContainingType<ITypeThatHasAttributeButIsNotInRegistry>(); x.Exclude(type => type == typeof (ITypeThatHasAttributeButIsNotInRegistry)); x.Exclude(type => type == typeof (IInterfaceInWidget5)); }); shouldNotHaveFamily<IInterfaceInWidget5>(); shouldNotHaveFamily<ITypeThatHasAttributeButIsNotInRegistry>(); } [Test] public void use_a_dual_exclude2() { Scan(x => { x.AssemblyContainingType<ITypeThatHasAttributeButIsNotInRegistry>(); x.Exclude(type => type == typeof (ITypeThatHasAttributeButIsNotInRegistry)); x.Exclude(type => type == GetType()); }); shouldHaveFamily<IInterfaceInWidget5>(); shouldNotHaveFamily<ITypeThatHasAttributeButIsNotInRegistry>(); } [Test] public void use_a_single_exclude() { Scan(x => { x.AssemblyContainingType<ITypeThatHasAttributeButIsNotInRegistry>(); x.Exclude(type => type == typeof (ITypeThatHasAttributeButIsNotInRegistry)); }); shouldHaveFamily<IInterfaceInWidget5>(); shouldNotHaveFamily<ITypeThatHasAttributeButIsNotInRegistry>(); } [Test] public void use_a_single_exclude_of_type() { Scan(x => { x.AssemblyContainingType<ITypeThatHasAttributeButIsNotInRegistry>(); x.ExcludeType<ITypeThatHasAttributeButIsNotInRegistry>(); }); shouldHaveFamily<IInterfaceInWidget5>(); shouldNotHaveFamily<ITypeThatHasAttributeButIsNotInRegistry>(); } [Test] public void use_a_single_exclude2() { Scan(x => { x.AssemblyContainingType<ITypeThatHasAttributeButIsNotInRegistry>(); x.ExcludeNamespace("StructureMap.Testing.Widget5"); }); shouldNotHaveFamily<IInterfaceInWidget5>(); shouldNotHaveFamily<ITypeThatHasAttributeButIsNotInRegistry>(); } [Test] public void use_a_single_exclude3() { Scan(x => { x.AssemblyContainingType<ITypeThatHasAttributeButIsNotInRegistry>(); x.ExcludeNamespaceContainingType<ITypeThatHasAttributeButIsNotInRegistry>(); }); shouldNotHaveFamily<IInterfaceInWidget5>(); shouldNotHaveFamily<ITypeThatHasAttributeButIsNotInRegistry>(); } [Test] public void Use_a_single_include_predicate() { Scan(x => { x.AssemblyContainingType<ITypeThatHasAttributeButIsNotInRegistry>(); }); shouldHaveFamily<IInterfaceInWidget5>(); shouldHaveFamily<ITypeThatHasAttributeButIsNotInRegistry>(); Scan(x => { x.AssemblyContainingType<ITypeThatHasAttributeButIsNotInRegistry>(); x.Include(type => type == typeof (ITypeThatHasAttributeButIsNotInRegistry)); }); shouldNotHaveFamily<IInterfaceInWidget5>(); shouldHaveFamily<ITypeThatHasAttributeButIsNotInRegistry>(); } [Test] public void Use_a_single_include_predicate_2() { Scan(x => { x.AssemblyContainingType<ITypeThatHasAttributeButIsNotInRegistry>(); }); shouldHaveFamily<IInterfaceInWidget5>(); shouldHaveFamily<ITypeThatHasAttributeButIsNotInRegistry>(); Scan(x => { x.AssemblyContainingType<ITypeThatHasAttributeButIsNotInRegistry>(); x.IncludeNamespace(typeof (ITypeThatHasAttributeButIsNotInRegistry).Namespace); }); shouldHaveFamily<IInterfaceInWidget5>(); shouldHaveFamily<ITypeThatHasAttributeButIsNotInRegistry>(); } [Test] public void Use_a_single_include_predicate_3() { Scan(x => { x.AssemblyContainingType<ITypeThatHasAttributeButIsNotInRegistry>(); }); shouldHaveFamily<IInterfaceInWidget5>(); shouldHaveFamily<ITypeThatHasAttributeButIsNotInRegistry>(); Scan(x => { x.AssemblyContainingType<ITypeThatHasAttributeButIsNotInRegistry>(); x.IncludeNamespaceContainingType<ITypeThatHasAttributeButIsNotInRegistry>(); }); shouldHaveFamily<IInterfaceInWidget5>(); shouldHaveFamily<ITypeThatHasAttributeButIsNotInRegistry>(); } [Test] public void use_two_predicates_for_includes() { Scan(x => { x.AssemblyContainingType<ITypeThatHasAttributeButIsNotInRegistry>(); x.Include(type => type == typeof (ITypeThatHasAttributeButIsNotInRegistry)); x.Include(type => type == typeof (IInterfaceInWidget5)); }); shouldHaveFamily<IInterfaceInWidget5>(); shouldHaveFamily<ITypeThatHasAttributeButIsNotInRegistry>(); } [Test] public void use_two_predicates_for_includes2() { Scan(x => { x.AssemblyContainingType<ITypeThatHasAttributeButIsNotInRegistry>(); x.Include(type => type == typeof (ITypeThatHasAttributeButIsNotInRegistry)); x.Include(type => type == GetType()); }); shouldNotHaveFamily<IInterfaceInWidget5>(); shouldHaveFamily<ITypeThatHasAttributeButIsNotInRegistry>(); } } public interface IController { } public class AddressController : IController { } public class SiteController : IController { } [TestFixture] public class when_attaching_types_with_naming_pattern { #region Setup/Teardown [SetUp] public void SetUp() { container = new Container(x => { x.Scan(o => { o.TheCallingAssembly(); o.AddAllTypesOf<IController>().NameBy(type => type.Name.Replace("Controller", "")); }); }); } #endregion private IContainer container; [Test] public void can_find_objects_later_by_name() { container.GetInstance<IController>("Address") .ShouldBeOfType<AddressController>(); container.GetInstance<IController>("Site") .ShouldBeOfType<SiteController>(); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.Globalization; namespace System.Collections.Immutable { /// <content> /// Contains the inner <see cref="ImmutableDictionary{TKey, TValue}.HashBucket"/> struct. /// </content> public partial class ImmutableDictionary<TKey, TValue> { /// <summary> /// Contains all the key/values in the collection that hash to the same value. /// </summary> internal readonly struct HashBucket : IEnumerable<KeyValuePair<TKey, TValue>> { /// <summary> /// One of the values in this bucket. /// </summary> private readonly KeyValuePair<TKey, TValue> _firstValue; /// <summary> /// Any other elements that hash to the same value. /// </summary> /// <value> /// This is null if and only if the entire bucket is empty (including <see cref="_firstValue"/>). /// It's empty if <see cref="_firstValue"/> has an element but no additional elements. /// </value> private readonly ImmutableList<KeyValuePair<TKey, TValue>>.Node _additionalElements; /// <summary> /// Initializes a new instance of the <see cref="ImmutableDictionary{TKey, TValue}.HashBucket"/> struct. /// </summary> /// <param name="firstElement">The first element.</param> /// <param name="additionalElements">The additional elements.</param> private HashBucket(KeyValuePair<TKey, TValue> firstElement, ImmutableList<KeyValuePair<TKey, TValue>>.Node additionalElements = null) { _firstValue = firstElement; _additionalElements = additionalElements ?? ImmutableList<KeyValuePair<TKey, TValue>>.Node.EmptyNode; } /// <summary> /// Gets a value indicating whether this instance is empty. /// </summary> /// <value> /// <c>true</c> if this instance is empty; otherwise, <c>false</c>. /// </value> internal bool IsEmpty { get { return _additionalElements == null; } } /// <summary> /// Gets the first value in this bucket. /// </summary> internal KeyValuePair<TKey, TValue> FirstValue { get { if (this.IsEmpty) { throw new InvalidOperationException(); } return _firstValue; } } /// <summary> /// Gets the list of additional (hash collision) elements. /// </summary> internal ImmutableList<KeyValuePair<TKey, TValue>>.Node AdditionalElements { get { return _additionalElements; } } /// <summary> /// Returns an enumerator that iterates through the collection. /// </summary> public Enumerator GetEnumerator() { return new Enumerator(this); } /// <summary> /// Returns an enumerator that iterates through the collection. /// </summary> /// <returns> /// A <see cref="IEnumerator{T}"/> that can be used to iterate through the collection. /// </returns> IEnumerator<KeyValuePair<TKey, TValue>> IEnumerable<KeyValuePair<TKey, TValue>>.GetEnumerator() { return this.GetEnumerator(); } /// <summary> /// Returns an enumerator that iterates through a collection. /// </summary> /// <returns> /// An <see cref="IEnumerator"/> object that can be used to iterate through the collection. /// </returns> IEnumerator IEnumerable.GetEnumerator() { return this.GetEnumerator(); } /// <summary> /// Throws an exception to catch any errors in comparing <see cref="HashBucket"/> instances. /// </summary> public override bool Equals(object obj) { // This should never be called, as hash buckets don't know how to equate themselves. throw new NotSupportedException(); } /// <summary> /// Throws an exception to catch any errors in comparing <see cref="HashBucket"/> instances. /// </summary> public override int GetHashCode() { // This should never be called, as hash buckets don't know how to hash themselves. throw new NotSupportedException(); } /// <summary> /// Adds the specified key. /// </summary> /// <param name="key">The key to add.</param> /// <param name="value">The value to add.</param> /// <param name="keyOnlyComparer">The key comparer.</param> /// <param name="valueComparer">The value comparer.</param> /// <param name="behavior">The intended behavior for certain cases that may come up during the operation.</param> /// <param name="result">A description of the effect was on adding an element to this <see cref="HashBucket"/>.</param> /// <returns>A new <see cref="HashBucket"/> that contains the added value and any values already held by this <see cref="HashBucket"/>.</returns> internal HashBucket Add(TKey key, TValue value, IEqualityComparer<KeyValuePair<TKey, TValue>> keyOnlyComparer, IEqualityComparer<TValue> valueComparer, KeyCollisionBehavior behavior, out OperationResult result) { var kv = new KeyValuePair<TKey, TValue>(key, value); if (this.IsEmpty) { result = OperationResult.SizeChanged; return new HashBucket(kv); } if (keyOnlyComparer.Equals(kv, _firstValue)) { switch (behavior) { case KeyCollisionBehavior.SetValue: result = OperationResult.AppliedWithoutSizeChange; return new HashBucket(kv, _additionalElements); case KeyCollisionBehavior.Skip: result = OperationResult.NoChangeRequired; return this; case KeyCollisionBehavior.ThrowIfValueDifferent: if (!valueComparer.Equals(_firstValue.Value, value)) { throw new ArgumentException(SR.Format(SR.DuplicateKey, key)); } result = OperationResult.NoChangeRequired; return this; case KeyCollisionBehavior.ThrowAlways: throw new ArgumentException(SR.Format(SR.DuplicateKey, key)); default: throw new InvalidOperationException(); // unreachable } } int keyCollisionIndex = _additionalElements.IndexOf(kv, keyOnlyComparer); if (keyCollisionIndex < 0) { result = OperationResult.SizeChanged; return new HashBucket(_firstValue, _additionalElements.Add(kv)); } else { switch (behavior) { case KeyCollisionBehavior.SetValue: result = OperationResult.AppliedWithoutSizeChange; return new HashBucket(_firstValue, _additionalElements.ReplaceAt(keyCollisionIndex, kv)); case KeyCollisionBehavior.Skip: result = OperationResult.NoChangeRequired; return this; case KeyCollisionBehavior.ThrowIfValueDifferent: #if !NETSTANDARD10 ref readonly var existingEntry = ref _additionalElements.ItemRef(keyCollisionIndex); #else var existingEntry = _additionalElements[keyCollisionIndex]; #endif if (!valueComparer.Equals(existingEntry.Value, value)) { throw new ArgumentException(SR.Format(SR.DuplicateKey, key)); } result = OperationResult.NoChangeRequired; return this; case KeyCollisionBehavior.ThrowAlways: throw new ArgumentException(SR.Format(SR.DuplicateKey, key)); default: throw new InvalidOperationException(); // unreachable } } } /// <summary> /// Removes the specified value if it exists in the collection. /// </summary> /// <param name="key">The key to remove.</param> /// <param name="keyOnlyComparer">The equality comparer.</param> /// <param name="result">A description of the effect was on adding an element to this <see cref="HashBucket"/>.</param> /// <returns>A new <see cref="HashBucket"/> that does not contain the removed value and any values already held by this <see cref="HashBucket"/>.</returns> internal HashBucket Remove(TKey key, IEqualityComparer<KeyValuePair<TKey, TValue>> keyOnlyComparer, out OperationResult result) { if (this.IsEmpty) { result = OperationResult.NoChangeRequired; return this; } var kv = new KeyValuePair<TKey, TValue>(key, default(TValue)); if (keyOnlyComparer.Equals(_firstValue, kv)) { if (_additionalElements.IsEmpty) { result = OperationResult.SizeChanged; return new HashBucket(); } else { // We can promote any element from the list into the first position, but it's most efficient // to remove the root node in the binary tree that implements the list. int indexOfRootNode = _additionalElements.Left.Count; result = OperationResult.SizeChanged; return new HashBucket(_additionalElements.Key, _additionalElements.RemoveAt(indexOfRootNode)); } } int index = _additionalElements.IndexOf(kv, keyOnlyComparer); if (index < 0) { result = OperationResult.NoChangeRequired; return this; } else { result = OperationResult.SizeChanged; return new HashBucket(_firstValue, _additionalElements.RemoveAt(index)); } } /// <summary> /// Gets the value for the given key in the collection if one exists.. /// </summary> /// <param name="key">The key to search for.</param> /// <param name="comparers">The comparers.</param> /// <param name="value">The value for the given key.</param> /// <returns>A value indicating whether the key was found.</returns> internal bool TryGetValue(TKey key, Comparers comparers, out TValue value) { if (this.IsEmpty) { value = default(TValue); return false; } if (comparers.KeyComparer.Equals(_firstValue.Key, key)) { value = _firstValue.Value; return true; } var kv = new KeyValuePair<TKey, TValue>(key, default(TValue)); var index = _additionalElements.IndexOf(kv, comparers.KeyOnlyComparer); if (index < 0) { value = default(TValue); return false; } #if !NETSTANDARD10 value = _additionalElements.ItemRef(index).Value; #else value = _additionalElements[index].Value; #endif return true; } /// <summary> /// Searches the dictionary for a given key and returns the equal key it finds, if any. /// </summary> /// <param name="equalKey">The key to search for.</param> /// <param name="comparers">The comparers.</param> /// <param name="actualKey">The key from the dictionary that the search found, or <paramref name="equalKey"/> if the search yielded no match.</param> /// <returns>A value indicating whether the search was successful.</returns> /// <remarks> /// This can be useful when you want to reuse a previously stored reference instead of /// a newly constructed one (so that more sharing of references can occur) or to look up /// the canonical value, or a value that has more complete data than the value you currently have, /// although their comparer functions indicate they are equal. /// </remarks> internal bool TryGetKey(TKey equalKey, Comparers comparers, out TKey actualKey) { if (this.IsEmpty) { actualKey = equalKey; return false; } if (comparers.KeyComparer.Equals(_firstValue.Key, equalKey)) { actualKey = _firstValue.Key; return true; } var kv = new KeyValuePair<TKey, TValue>(equalKey, default(TValue)); var index = _additionalElements.IndexOf(kv, comparers.KeyOnlyComparer); if (index < 0) { actualKey = equalKey; return false; } #if !NETSTANDARD10 actualKey = _additionalElements.ItemRef(index).Key; #else actualKey = _additionalElements[index].Key; #endif return true; } /// <summary> /// Freezes this instance so that any further mutations require new memory allocations. /// </summary> internal void Freeze() { if (_additionalElements != null) { _additionalElements.Freeze(); } } /// <summary> /// Enumerates all the elements in this instance. /// </summary> internal struct Enumerator : IEnumerator<KeyValuePair<TKey, TValue>>, IDisposable { /// <summary> /// The bucket being enumerated. /// </summary> private readonly HashBucket _bucket; /// <summary> /// The current position of this enumerator. /// </summary> private Position _currentPosition; /// <summary> /// The enumerator that represents the current position over the <see cref="_additionalElements"/> of the <see cref="HashBucket"/>. /// </summary> private ImmutableList<KeyValuePair<TKey, TValue>>.Enumerator _additionalEnumerator; /// <summary> /// Initializes a new instance of the <see cref="ImmutableDictionary{TKey, TValue}.HashBucket.Enumerator"/> struct. /// </summary> /// <param name="bucket">The bucket.</param> internal Enumerator(HashBucket bucket) { _bucket = bucket; _currentPosition = Position.BeforeFirst; _additionalEnumerator = default(ImmutableList<KeyValuePair<TKey, TValue>>.Enumerator); } /// <summary> /// Describes the positions the enumerator state machine may be in. /// </summary> private enum Position { /// <summary> /// The first element has not yet been moved to. /// </summary> BeforeFirst, /// <summary> /// We're at the <see cref="_firstValue"/> of the containing bucket. /// </summary> First, /// <summary> /// We're enumerating the <see cref="_additionalElements"/> in the bucket. /// </summary> Additional, /// <summary> /// The end of enumeration has been reached. /// </summary> End, } /// <summary> /// Gets the current element. /// </summary> object IEnumerator.Current { get { return this.Current; } } /// <summary> /// Gets the current element. /// </summary> public KeyValuePair<TKey, TValue> Current { get { switch (_currentPosition) { case Position.First: return _bucket._firstValue; case Position.Additional: return _additionalEnumerator.Current; default: throw new InvalidOperationException(); } } } /// <summary> /// Advances the enumerator to the next element of the collection. /// </summary> /// <returns> /// true if the enumerator was successfully advanced to the next element; false if the enumerator has passed the end of the collection. /// </returns> /// <exception cref="InvalidOperationException">The collection was modified after the enumerator was created. </exception> public bool MoveNext() { if (_bucket.IsEmpty) { _currentPosition = Position.End; return false; } switch (_currentPosition) { case Position.BeforeFirst: _currentPosition = Position.First; return true; case Position.First: if (_bucket._additionalElements.IsEmpty) { _currentPosition = Position.End; return false; } _currentPosition = Position.Additional; _additionalEnumerator = new ImmutableList<KeyValuePair<TKey, TValue>>.Enumerator(_bucket._additionalElements); return _additionalEnumerator.MoveNext(); case Position.Additional: return _additionalEnumerator.MoveNext(); case Position.End: return false; default: throw new InvalidOperationException(); } } /// <summary> /// Sets the enumerator to its initial position, which is before the first element in the collection. /// </summary> /// <exception cref="InvalidOperationException">The collection was modified after the enumerator was created. </exception> public void Reset() { // We can safely dispose of the additional enumerator because if the client reuses this enumerator // we'll acquire a new one anyway (and so for that matter we should be sure to dispose of this). _additionalEnumerator.Dispose(); _currentPosition = Position.BeforeFirst; } /// <summary> /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. /// </summary> public void Dispose() { _additionalEnumerator.Dispose(); } } } } }
namespace AspNetWebApi.Areas.HelpPage { using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Linq; using System.Net.Http.Headers; using System.Web.Http; using System.Web.Http.Description; using AspNetWebApi.Areas.HelpPage.Models; public static class HelpPageConfigurationExtensions { private const string ApiModelPrefix = "MS_HelpPageApiModel_"; /// <summary> /// Sets the documentation provider for help page. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="documentationProvider">The documentation provider.</param> public static void SetDocumentationProvider(this HttpConfiguration config, IDocumentationProvider documentationProvider) { config.Services.Replace(typeof(IDocumentationProvider), documentationProvider); } /// <summary> /// Sets the objects that will be used by the formatters to produce sample requests/responses. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sampleObjects">The sample objects.</param> public static void SetSampleObjects(this HttpConfiguration config, IDictionary<Type, object> sampleObjects) { config.GetHelpPageSampleGenerator().SampleObjects = sampleObjects; } /// <summary> /// Sets the sample request directly for the specified media type and action. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample request.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, new[] { "*" }), sample); } /// <summary> /// Sets the sample request directly for the specified media type and action with parameters. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample request.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, parameterNames), sample); } /// <summary> /// Sets the sample request directly for the specified media type of the action. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample response.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, new[] { "*" }), sample); } /// <summary> /// Sets the sample response directly for the specified media type of the action with specific parameters. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample response.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, parameterNames), sample); } /// <summary> /// Sets the sample directly for all actions with the specified type and media type. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample.</param> /// <param name="mediaType">The media type.</param> /// <param name="type">The parameter type or return type of an action.</param> public static void SetSampleForType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, Type type) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, type), sample); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate request samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, new[] { "*" }), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate request samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, parameterNames), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate response samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, new[] { "*" }), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate response samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, parameterNames), type); } /// <summary> /// Gets the help page sample generator. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <returns>The help page sample generator.</returns> public static HelpPageSampleGenerator GetHelpPageSampleGenerator(this HttpConfiguration config) { return (HelpPageSampleGenerator)config.Properties.GetOrAdd( typeof(HelpPageSampleGenerator), k => new HelpPageSampleGenerator()); } /// <summary> /// Sets the help page sample generator. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sampleGenerator">The help page sample generator.</param> public static void SetHelpPageSampleGenerator(this HttpConfiguration config, HelpPageSampleGenerator sampleGenerator) { config.Properties.AddOrUpdate( typeof(HelpPageSampleGenerator), k => sampleGenerator, (k, o) => sampleGenerator); } /// <summary> /// Gets the model that represents an API displayed on the help page. The model is initialized on the first call and cached for subsequent calls. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="apiDescriptionId">The <see cref="ApiDescription"/> ID.</param> /// <returns> /// An <see cref="HelpPageApiModel"/> /// </returns> public static HelpPageApiModel GetHelpPageApiModel(this HttpConfiguration config, string apiDescriptionId) { object model; string modelId = ApiModelPrefix + apiDescriptionId; if (!config.Properties.TryGetValue(modelId, out model)) { Collection<ApiDescription> apiDescriptions = config.Services.GetApiExplorer().ApiDescriptions; ApiDescription apiDescription = apiDescriptions.FirstOrDefault(api => string.Equals(api.GetFriendlyId(), apiDescriptionId, StringComparison.OrdinalIgnoreCase)); if (apiDescription != null) { HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator(); model = GenerateApiModel(apiDescription, sampleGenerator); config.Properties.TryAdd(modelId, model); } } return (HelpPageApiModel)model; } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as ErrorMessages.")] private static HelpPageApiModel GenerateApiModel(ApiDescription apiDescription, HelpPageSampleGenerator sampleGenerator) { HelpPageApiModel apiModel = new HelpPageApiModel(); apiModel.ApiDescription = apiDescription; try { foreach (var item in sampleGenerator.GetSampleRequests(apiDescription)) { apiModel.SampleRequests.Add(item.Key, item.Value); LogInvalidSampleAsError(apiModel, item.Value); } foreach (var item in sampleGenerator.GetSampleResponses(apiDescription)) { apiModel.SampleResponses.Add(item.Key, item.Value); LogInvalidSampleAsError(apiModel, item.Value); } } catch (Exception e) { apiModel.ErrorMessages.Add(string.Format(CultureInfo.CurrentCulture, "An exception has occurred while generating the sample. Exception Message: {0}", e.Message)); } return apiModel; } private static void LogInvalidSampleAsError(HelpPageApiModel apiModel, object sample) { InvalidSample invalidSample = sample as InvalidSample; if (invalidSample != null) { apiModel.ErrorMessages.Add(invalidSample.ErrorMessage); } } } }
// Visual Studio Shared Project // Copyright(c) Microsoft Corporation // All rights reserved. // // Licensed under the Apache License, Version 2.0 (the License); you may not use // this file except in compliance with the License. You may obtain a copy of the // License at http://www.apache.org/licenses/LICENSE-2.0 // // THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS // OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY // IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE, // MERCHANTABLITY OR NON-INFRINGEMENT. // // See the Apache Version 2.0 License for specific language governing // permissions and limitations under the License. using System; using System.Collections.Generic; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Runtime.InteropServices; using Microsoft.VisualStudio; using Microsoft.VisualStudio.Shell.Interop; namespace Microsoft.PythonTools.Infrastructure { public sealed class TaskDialog { private readonly IServiceProvider _provider; private readonly List<TaskDialogButton> _buttons; private readonly List<TaskDialogButton> _radioButtons; public TaskDialog(IServiceProvider provider) { _provider = provider; _buttons = new List<TaskDialogButton>(); _radioButtons = new List<TaskDialogButton>(); UseCommandLinks = true; } public static TaskDialog ForException( IServiceProvider provider, Exception exception, string message = null, string issueTrackerUrl = null ) { string suffix = string.IsNullOrEmpty(issueTrackerUrl) ? "Please press Ctrl+C to copy the contents of this dialog and report this error." : "Please press Ctrl+C to copy the contents of this dialog and report this error to our <a href=\"issuetracker\">issue tracker</a>."; if (string.IsNullOrEmpty(message)) { message = suffix; } else { message += Environment.NewLine + Environment.NewLine + suffix; } var td = new TaskDialog(provider) { MainInstruction = "An unexpected error occurred", Content = message, EnableHyperlinks = true, CollapsedControlText = "Show &details", ExpandedControlText = "Hide &details", ExpandedInformation = "```{0}{1}{0}```".FormatUI(Environment.NewLine, exception) }; td.Buttons.Add(TaskDialogButton.Close); if (!string.IsNullOrEmpty(issueTrackerUrl)) { td.HyperlinkClicked += (s, e) => { if (e.Url == "issuetracker") { Process.Start(issueTrackerUrl); } }; } return td; } public static void CallWithRetry( Action<int> action, IServiceProvider provider, string title, string failedText, string expandControlText, string retryButtonText, string cancelButtonText, Func<Exception, bool> canRetry = null ) { for (int retryCount = 1; ; ++retryCount) { try { action(retryCount); return; } catch (Exception ex) { if (ex.IsCriticalException()) { throw; } if (canRetry != null && !canRetry(ex)) { throw; } var td = new TaskDialog(provider) { Title = title, MainInstruction = failedText, Content = ex.Message, CollapsedControlText = expandControlText, ExpandedControlText = expandControlText, ExpandedInformation = ex.ToString() }; var retry = new TaskDialogButton(retryButtonText); td.Buttons.Add(retry); td.Buttons.Add(new TaskDialogButton(cancelButtonText)); var button = td.ShowModal(); if (button != retry) { throw new OperationCanceledException(); } } } } public static T CallWithRetry<T>( Func<int, T> func, IServiceProvider provider, string title, string failedText, string expandControlText, string retryButtonText, string cancelButtonText, Func<Exception, bool> canRetry = null ) { for (int retryCount = 1; ; ++retryCount) { try { return func(retryCount); } catch (Exception ex) { if (ex.IsCriticalException()) { throw; } if (canRetry != null && !canRetry(ex)) { throw; } var td = new TaskDialog(provider) { Title = title, MainInstruction = failedText, Content = ex.Message, CollapsedControlText = expandControlText, ExpandedControlText = expandControlText, ExpandedInformation = ex.ToString() }; var retry = new TaskDialogButton(retryButtonText); var cancel = new TaskDialogButton(cancelButtonText); td.Buttons.Add(retry); td.Buttons.Add(cancel); var button = td.ShowModal(); if (button == cancel) { throw new OperationCanceledException(); } } } } public TaskDialogButton ShowModal() { var config = new NativeMethods.TASKDIALOGCONFIG(); config.cbSize = (uint)Marshal.SizeOf(typeof(NativeMethods.TASKDIALOGCONFIG)); config.pButtons = IntPtr.Zero; config.pRadioButtons = IntPtr.Zero; var uiShell = (IVsUIShell)_provider.GetService(typeof(SVsUIShell)); uiShell.GetDialogOwnerHwnd(out config.hwndParent); uiShell.EnableModeless(0); var customButtons = new List<TaskDialogButton>(); config.dwCommonButtons = 0; foreach (var button in Buttons) { var flag = GetButtonFlag(button); if (flag != 0) { config.dwCommonButtons |= flag; } else { customButtons.Add(button); } } try { if (customButtons.Any()) { config.cButtons = (uint)customButtons.Count; var ptr = config.pButtons = Marshal.AllocHGlobal(customButtons.Count * Marshal.SizeOf(typeof(NativeMethods.TASKDIALOG_BUTTON))); for (int i = 0; i < customButtons.Count; ++i) { NativeMethods.TASKDIALOG_BUTTON data; data.nButtonID = GetButtonId(null, null, i); if (string.IsNullOrEmpty(customButtons[i].Subtext)) { data.pszButtonText = customButtons[i].Text; } else { data.pszButtonText = string.Format("{0}\n{1}", customButtons[i].Text, customButtons[i].Subtext); } Marshal.StructureToPtr(data, ptr + i * Marshal.SizeOf(typeof(NativeMethods.TASKDIALOG_BUTTON)), false); } } else { config.cButtons = 0; config.pButtons = IntPtr.Zero; } if (_buttons.Any() && SelectedButton != null) { config.nDefaultButton = GetButtonId(SelectedButton, customButtons); } else { config.nDefaultButton = 0; } if (_radioButtons.Any()) { config.cRadioButtons = (uint)_radioButtons.Count; var ptr = config.pRadioButtons = Marshal.AllocHGlobal(_radioButtons.Count * Marshal.SizeOf(typeof(NativeMethods.TASKDIALOG_BUTTON))); for (int i = 0; i < _radioButtons.Count; ++i) { NativeMethods.TASKDIALOG_BUTTON data; data.nButtonID = GetRadioId(null, null, i); data.pszButtonText = _radioButtons[i].Text; Marshal.StructureToPtr(data, ptr + i * Marshal.SizeOf(typeof(NativeMethods.TASKDIALOG_BUTTON)), false); } if (SelectedRadioButton != null) { config.nDefaultRadioButton = GetRadioId(SelectedRadioButton, _radioButtons); } else { config.nDefaultRadioButton = 0; } } config.pszWindowTitle = Title; config.pszMainInstruction = MainInstruction; config.pszContent = Content; config.pszExpandedInformation = ExpandedInformation; config.pszExpandedControlText = ExpandedControlText; config.pszCollapsedControlText = CollapsedControlText; config.pszFooter = Footer; config.pszVerificationText = VerificationText; config.pfCallback = Callback; config.hMainIcon = (IntPtr)GetIconResource(MainIcon); config.hFooterIcon = (IntPtr)GetIconResource(FooterIcon); if (Width.HasValue) { config.cxWidth = (uint)Width.Value; } else { config.dwFlags |= NativeMethods.TASKDIALOG_FLAGS.TDF_SIZE_TO_CONTENT; } if (EnableHyperlinks) { config.dwFlags |= NativeMethods.TASKDIALOG_FLAGS.TDF_ENABLE_HYPERLINKS; } if (AllowCancellation) { config.dwFlags |= NativeMethods.TASKDIALOG_FLAGS.TDF_ALLOW_DIALOG_CANCELLATION; } if (UseCommandLinks && config.cButtons > 0) { config.dwFlags |= NativeMethods.TASKDIALOG_FLAGS.TDF_USE_COMMAND_LINKS; } if (!ShowExpandedInformationInContent) { config.dwFlags |= NativeMethods.TASKDIALOG_FLAGS.TDF_EXPAND_FOOTER_AREA; } if (ExpandedByDefault) { config.dwFlags |= NativeMethods.TASKDIALOG_FLAGS.TDF_EXPANDED_BY_DEFAULT; } if (SelectedVerified) { config.dwFlags |= NativeMethods.TASKDIALOG_FLAGS.TDF_VERIFICATION_FLAG_CHECKED; } if (CanMinimize) { config.dwFlags |= NativeMethods.TASKDIALOG_FLAGS.TDF_CAN_BE_MINIMIZED; } config.dwFlags |= NativeMethods.TASKDIALOG_FLAGS.TDF_POSITION_RELATIVE_TO_WINDOW; int selectedButton, selectedRadioButton; bool verified; ErrorHandler.ThrowOnFailure(NativeMethods.TaskDialogIndirect( ref config, out selectedButton, out selectedRadioButton, out verified )); SelectedButton = GetButton(selectedButton, customButtons); SelectedRadioButton = GetRadio(selectedRadioButton, _radioButtons); SelectedVerified = verified; } finally { uiShell.EnableModeless(1); if (config.pButtons != IntPtr.Zero) { for (int i = 0; i < customButtons.Count; ++i) { Marshal.DestroyStructure(config.pButtons + i * Marshal.SizeOf(typeof(NativeMethods.TASKDIALOG_BUTTON)), typeof(NativeMethods.TASKDIALOG_BUTTON)); } Marshal.FreeHGlobal(config.pButtons); } if (config.pRadioButtons != IntPtr.Zero) { for (int i = 0; i < _radioButtons.Count; ++i) { Marshal.DestroyStructure(config.pRadioButtons + i * Marshal.SizeOf(typeof(NativeMethods.TASKDIALOG_BUTTON)), typeof(NativeMethods.TASKDIALOG_BUTTON)); } Marshal.FreeHGlobal(config.pRadioButtons); } } return SelectedButton; } private int Callback(IntPtr hwnd, uint uNotification, UIntPtr wParam, IntPtr lParam, IntPtr lpRefData) { try { switch ((NativeMethods.TASKDIALOG_NOTIFICATION)uNotification) { case NativeMethods.TASKDIALOG_NOTIFICATION.TDN_CREATED: foreach (var btn in _buttons.Where(b => b.ElevationRequired)) { NativeMethods.SendMessage( hwnd, (int)NativeMethods.TASKDIALOG_MESSAGE.TDM_SET_BUTTON_ELEVATION_REQUIRED_STATE, new IntPtr(GetButtonId(btn, _buttons)), new IntPtr(1) ); } break; case NativeMethods.TASKDIALOG_NOTIFICATION.TDN_NAVIGATED: break; case NativeMethods.TASKDIALOG_NOTIFICATION.TDN_BUTTON_CLICKED: break; case NativeMethods.TASKDIALOG_NOTIFICATION.TDN_HYPERLINK_CLICKED: var url = Marshal.PtrToStringUni(lParam); var hevt = HyperlinkClicked; if (hevt != null) { hevt(this, new TaskDialogHyperlinkClickedEventArgs(url)); } else { Process.Start(new ProcessStartInfo { FileName = url, UseShellExecute = true }); } break; case NativeMethods.TASKDIALOG_NOTIFICATION.TDN_TIMER: break; case NativeMethods.TASKDIALOG_NOTIFICATION.TDN_DESTROYED: break; case NativeMethods.TASKDIALOG_NOTIFICATION.TDN_RADIO_BUTTON_CLICKED: break; case NativeMethods.TASKDIALOG_NOTIFICATION.TDN_DIALOG_CONSTRUCTED: break; case NativeMethods.TASKDIALOG_NOTIFICATION.TDN_VERIFICATION_CLICKED: break; case NativeMethods.TASKDIALOG_NOTIFICATION.TDN_HELP: break; case NativeMethods.TASKDIALOG_NOTIFICATION.TDN_EXPANDO_BUTTON_CLICKED: break; default: break; } return VSConstants.S_OK; } catch (Exception ex) { if (ex.IsCriticalException()) { throw; } return Marshal.GetHRForException(ex); } } public string Title { get; set; } public string MainInstruction { get; set; } public string Content { get; set; } public string VerificationText { get; set; } public string ExpandedInformation { get; set; } public string Footer { get; set; } public bool ExpandedByDefault { get; set; } public bool ShowExpandedInformationInContent { get; set; } public string ExpandedControlText { get; set; } public string CollapsedControlText { get; set; } public int? Width { get; set; } public bool EnableHyperlinks { get; set; } public bool AllowCancellation { get; set; } public bool UseCommandLinks { get; set; } public bool CanMinimize { get; set; } public TaskDialogIcon MainIcon { get; set; } public TaskDialogIcon FooterIcon { get; set; } /// <summary> /// Raised when a hyperlink in the dialog is clicked. If no event /// handlers are added, the default behavior is to open an external /// browser. /// </summary> public event EventHandler<TaskDialogHyperlinkClickedEventArgs> HyperlinkClicked; public List<TaskDialogButton> Buttons { get { return _buttons; } } public List<TaskDialogButton> RadioButtons { get { return _radioButtons; } } public TaskDialogButton SelectedButton { get; set; } public TaskDialogButton SelectedRadioButton { get; set; } public bool SelectedVerified { get; set; } private static NativeMethods.TASKDIALOG_COMMON_BUTTON_FLAGS GetButtonFlag(TaskDialogButton button) { if (button == TaskDialogButton.OK) { return NativeMethods.TASKDIALOG_COMMON_BUTTON_FLAGS.TDCBF_OK_BUTTON; } else if (button == TaskDialogButton.Cancel) { return NativeMethods.TASKDIALOG_COMMON_BUTTON_FLAGS.TDCBF_CANCEL_BUTTON; } else if (button == TaskDialogButton.Yes) { return NativeMethods.TASKDIALOG_COMMON_BUTTON_FLAGS.TDCBF_YES_BUTTON; } else if (button == TaskDialogButton.No) { return NativeMethods.TASKDIALOG_COMMON_BUTTON_FLAGS.TDCBF_NO_BUTTON; } else if (button == TaskDialogButton.Retry) { return NativeMethods.TASKDIALOG_COMMON_BUTTON_FLAGS.TDCBF_RETRY_BUTTON; } else if (button == TaskDialogButton.Close) { return NativeMethods.TASKDIALOG_COMMON_BUTTON_FLAGS.TDCBF_CLOSE_BUTTON; } else { return 0; } } private static NativeMethods.TASKDIALOG_ICON GetIconResource(TaskDialogIcon icon) { switch (icon) { case TaskDialogIcon.None: return 0; case TaskDialogIcon.Error: return NativeMethods.TASKDIALOG_ICON.TD_ERROR_ICON; case TaskDialogIcon.Warning: return NativeMethods.TASKDIALOG_ICON.TD_WARNING_ICON; case TaskDialogIcon.Information: return NativeMethods.TASKDIALOG_ICON.TD_INFORMATION_ICON; case TaskDialogIcon.Shield: return NativeMethods.TASKDIALOG_ICON.TD_SHIELD_ICON; default: throw new ArgumentException("Invalid TaskDialogIcon value", "icon"); } } private static int GetButtonId( TaskDialogButton button, IList<TaskDialogButton> customButtons = null, int indexHint = -1 ) { if (indexHint >= 0) { return indexHint + 1000; } if (button == TaskDialogButton.OK) { return NativeMethods.IDOK; } else if (button == TaskDialogButton.Cancel) { return NativeMethods.IDCANCEL; } else if (button == TaskDialogButton.Yes) { return NativeMethods.IDYES; } else if (button == TaskDialogButton.No) { return NativeMethods.IDNO; } else if (button == TaskDialogButton.Retry) { return NativeMethods.IDRETRY; } else if (button == TaskDialogButton.Close) { return NativeMethods.IDCLOSE; } else if (customButtons != null) { int i = customButtons.IndexOf(button); if (i >= 0) { return i + 1000; } } return -1; } private static TaskDialogButton GetButton(int id, IList<TaskDialogButton> customButtons = null) { switch (id) { case NativeMethods.IDOK: return TaskDialogButton.OK; case NativeMethods.IDCANCEL: return TaskDialogButton.Cancel; case NativeMethods.IDYES: return TaskDialogButton.Yes; case NativeMethods.IDNO: return TaskDialogButton.No; case NativeMethods.IDRETRY: return TaskDialogButton.Retry; case NativeMethods.IDCLOSE: return TaskDialogButton.Close; } if (customButtons != null && id >= 1000 && id - 1000 < customButtons.Count) { return customButtons[id - 1000]; } return null; } private static int GetRadioId( TaskDialogButton button, IList<TaskDialogButton> buttons, int indexHint = -1 ) { if (indexHint >= 0) { return indexHint + 2000; } return buttons.IndexOf(button) + 2000; } private static TaskDialogButton GetRadio(int id, IList<TaskDialogButton> buttons) { if (id >= 2000 && id - 2000 < buttons.Count) { return buttons[id - 2000]; } return null; } private static class NativeMethods { internal const int IDOK = 1; internal const int IDCANCEL = 2; internal const int IDABORT = 3; internal const int IDRETRY = 4; internal const int IDIGNORE = 5; internal const int IDYES = 6; internal const int IDNO = 7; internal const int IDCLOSE = 8; internal enum TASKDIALOG_FLAGS { TDF_ENABLE_HYPERLINKS = 0x0001, TDF_USE_HICON_MAIN = 0x0002, TDF_USE_HICON_FOOTER = 0x0004, TDF_ALLOW_DIALOG_CANCELLATION = 0x0008, TDF_USE_COMMAND_LINKS = 0x0010, TDF_USE_COMMAND_LINKS_NO_ICON = 0x0020, TDF_EXPAND_FOOTER_AREA = 0x0040, TDF_EXPANDED_BY_DEFAULT = 0x0080, TDF_VERIFICATION_FLAG_CHECKED = 0x0100, TDF_SHOW_PROGRESS_BAR = 0x0200, TDF_SHOW_MARQUEE_PROGRESS_BAR = 0x0400, TDF_CALLBACK_TIMER = 0x0800, TDF_POSITION_RELATIVE_TO_WINDOW = 0x1000, TDF_RTL_LAYOUT = 0x2000, TDF_NO_DEFAULT_RADIO_BUTTON = 0x4000, TDF_CAN_BE_MINIMIZED = 0x8000, TDF_SIZE_TO_CONTENT = 0x01000000 } internal enum TASKDIALOG_COMMON_BUTTON_FLAGS { TDCBF_OK_BUTTON = 0x0001, TDCBF_YES_BUTTON = 0x0002, TDCBF_NO_BUTTON = 0x0004, TDCBF_CANCEL_BUTTON = 0x0008, TDCBF_RETRY_BUTTON = 0x0010, TDCBF_CLOSE_BUTTON = 0x0020 } internal enum TASKDIALOG_NOTIFICATION : uint { TDN_CREATED = 0, TDN_NAVIGATED = 1, TDN_BUTTON_CLICKED = 2, // wParam = Button ID TDN_HYPERLINK_CLICKED = 3, // lParam = (LPCWSTR)pszHREF TDN_TIMER = 4, // wParam = Milliseconds since dialog created or timer reset TDN_DESTROYED = 5, TDN_RADIO_BUTTON_CLICKED = 6, // wParam = Radio Button ID TDN_DIALOG_CONSTRUCTED = 7, TDN_VERIFICATION_CLICKED = 8, // wParam = 1 if checkbox checked, 0 if not, lParam is unused and always 0 TDN_HELP = 9, TDN_EXPANDO_BUTTON_CLICKED = 10 // wParam = 0 (dialog is now collapsed), wParam != 0 (dialog is now expanded) }; internal enum TASKDIALOG_ICON : ushort { TD_WARNING_ICON = unchecked((ushort)-1), TD_ERROR_ICON = unchecked((ushort)-2), TD_INFORMATION_ICON = unchecked((ushort)-3), TD_SHIELD_ICON = unchecked((ushort)-4) } const int WM_USER = 0x0400; internal enum TASKDIALOG_MESSAGE : int { TDM_NAVIGATE_PAGE = WM_USER + 101, TDM_CLICK_BUTTON = WM_USER + 102, // wParam = Button ID TDM_SET_MARQUEE_PROGRESS_BAR = WM_USER + 103, // wParam = 0 (nonMarque) wParam != 0 (Marquee) TDM_SET_PROGRESS_BAR_STATE = WM_USER + 104, // wParam = new progress state TDM_SET_PROGRESS_BAR_RANGE = WM_USER + 105, // lParam = MAKELPARAM(nMinRange, nMaxRange) TDM_SET_PROGRESS_BAR_POS = WM_USER + 106, // wParam = new position TDM_SET_PROGRESS_BAR_MARQUEE = WM_USER + 107, // wParam = 0 (stop marquee), wParam != 0 (start marquee), lparam = speed (milliseconds between repaints) TDM_SET_ELEMENT_TEXT = WM_USER + 108, // wParam = element (TASKDIALOG_ELEMENTS), lParam = new element text (LPCWSTR) TDM_CLICK_RADIO_BUTTON = WM_USER + 110, // wParam = Radio Button ID TDM_ENABLE_BUTTON = WM_USER + 111, // lParam = 0 (disable), lParam != 0 (enable), wParam = Button ID TDM_ENABLE_RADIO_BUTTON = WM_USER + 112, // lParam = 0 (disable), lParam != 0 (enable), wParam = Radio Button ID TDM_CLICK_VERIFICATION = WM_USER + 113, // wParam = 0 (unchecked), 1 (checked), lParam = 1 (set key focus) TDM_UPDATE_ELEMENT_TEXT = WM_USER + 114, // wParam = element (TASKDIALOG_ELEMENTS), lParam = new element text (LPCWSTR) TDM_SET_BUTTON_ELEVATION_REQUIRED_STATE = WM_USER + 115, // wParam = Button ID, lParam = 0 (elevation not required), lParam != 0 (elevation required) TDM_UPDATE_ICON = WM_USER + 116 // wParam = icon element (TASKDIALOG_ICON_ELEMENTS), lParam = new icon (hIcon if TDF_USE_HICON_* was set, PCWSTR otherwise) } [SuppressMessage("Microsoft.Interoperability", "CA1400:PInvokeEntryPointsShouldExist", Justification = "Entry point exists but CA can't find it")] [DllImport("comctl32.dll", SetLastError = true)] internal static extern int TaskDialogIndirect( ref TASKDIALOGCONFIG pTaskConfig, out int pnButton, out int pnRadioButton, [MarshalAs(UnmanagedType.Bool)] out bool pfverificationFlagChecked); internal delegate int PFTASKDIALOGCALLBACK(IntPtr hwnd, uint uNotification, UIntPtr wParam, IntPtr lParam, IntPtr lpRefData); [StructLayout(LayoutKind.Sequential, CharSet = CharSet.Unicode)] internal struct TASKDIALOG_BUTTON { public int nButtonID; [MarshalAs(UnmanagedType.LPWStr)] public string pszButtonText; } [StructLayout(LayoutKind.Sequential, CharSet = CharSet.Unicode)] internal struct TASKDIALOGCONFIG { public uint cbSize; public IntPtr hwndParent; public IntPtr hInstance; public TASKDIALOG_FLAGS dwFlags; public TASKDIALOG_COMMON_BUTTON_FLAGS dwCommonButtons; [MarshalAs(UnmanagedType.LPWStr)] public string pszWindowTitle; public IntPtr hMainIcon; [MarshalAs(UnmanagedType.LPWStr)] public string pszMainInstruction; [MarshalAs(UnmanagedType.LPWStr)] public string pszContent; public uint cButtons; public IntPtr pButtons; public int nDefaultButton; public uint cRadioButtons; public IntPtr pRadioButtons; public int nDefaultRadioButton; [MarshalAs(UnmanagedType.LPWStr)] public string pszVerificationText; [MarshalAs(UnmanagedType.LPWStr)] public string pszExpandedInformation; [MarshalAs(UnmanagedType.LPWStr)] public string pszExpandedControlText; [MarshalAs(UnmanagedType.LPWStr)] public string pszCollapsedControlText; public IntPtr hFooterIcon; [MarshalAs(UnmanagedType.LPWStr)] public string pszFooter; public PFTASKDIALOGCALLBACK pfCallback; public IntPtr lpCallbackData; public uint cxWidth; } [DllImport("user32.dll")] internal static extern IntPtr SendMessage(IntPtr hWnd, int Msg, IntPtr wParam, IntPtr lParam); } } public sealed class TaskDialogButton { public TaskDialogButton(string text) { int i = text.IndexOfAny(Environment.NewLine.ToCharArray()); if (i < 0) { Text = text; } else { Text = text.Remove(i); Subtext = text.Substring(i).TrimStart(); } } public TaskDialogButton(string text, string subtext) { Text = text; Subtext = subtext; } public string Text { get; set; } public string Subtext { get; set; } public bool ElevationRequired { get; set; } private TaskDialogButton() { } public static readonly TaskDialogButton OK = new TaskDialogButton(); public static readonly TaskDialogButton Cancel = new TaskDialogButton(); public static readonly TaskDialogButton Yes = new TaskDialogButton(); public static readonly TaskDialogButton No = new TaskDialogButton(); public static readonly TaskDialogButton Retry = new TaskDialogButton(); public static readonly TaskDialogButton Close = new TaskDialogButton(); } public sealed class TaskDialogHyperlinkClickedEventArgs : EventArgs { private readonly string _url; public TaskDialogHyperlinkClickedEventArgs(string url) { _url = url; } public string Url { get { return _url; } } } public enum TaskDialogIcon { None, Error, Warning, Information, Shield } }