context stringlengths 2.52k 185k | gt stringclasses 1
value |
|---|---|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
// The worker functions in this file was optimized for performance. If you make changes
// you should use care to consider all of the interesting cases.
// The code of all worker functions in this file is written twice: Once as a slow loop, and the
// second time as a fast loop. The slow loops handles all special cases, throws exceptions, etc.
// The fast loops attempts to blaze through as fast as possible with optimistic range checks,
// processing multiple characters at a time, and falling back to the slow loop for all special cases.
using System;
using System.Buffers;
using System.Diagnostics;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Text.Unicode;
namespace System.Text
{
// Encodes text into and out of UTF-8. UTF-8 is a way of writing
// Unicode characters with variable numbers of bytes per character,
// optimized for the lower 127 ASCII characters. It's an efficient way
// of encoding US English in an internationalizable way.
//
// Don't override IsAlwaysNormalized because it is just a Unicode Transformation and could be confused.
//
// The UTF-8 byte order mark is simply the Unicode byte order mark
// (0xFEFF) written in UTF-8 (0xEF 0xBB 0xBF). The byte order mark is
// used mostly to distinguish UTF-8 text from other encodings, and doesn't
// switch the byte orderings.
public class UTF8Encoding : Encoding
{
/*
bytes bits UTF-8 representation
----- ---- -----------------------------------
1 7 0vvvvvvv
2 11 110vvvvv 10vvvvvv
3 16 1110vvvv 10vvvvvv 10vvvvvv
4 21 11110vvv 10vvvvvv 10vvvvvv 10vvvvvv
----- ---- -----------------------------------
Surrogate:
Real Unicode value = (HighSurrogate - 0xD800) * 0x400 + (LowSurrogate - 0xDC00) + 0x10000
*/
private const int UTF8_CODEPAGE = 65001;
// Allow for de-virtualization (see https://github.com/dotnet/coreclr/pull/9230)
internal sealed class UTF8EncodingSealed : UTF8Encoding
{
public UTF8EncodingSealed(bool encoderShouldEmitUTF8Identifier) : base(encoderShouldEmitUTF8Identifier) { }
public override ReadOnlySpan<byte> Preamble => _emitUTF8Identifier ? PreambleSpan : default;
}
// Used by Encoding.UTF8 for lazy initialization
// The initialization code will not be run until a static member of the class is referenced
internal static readonly UTF8EncodingSealed s_default = new UTF8EncodingSealed(encoderShouldEmitUTF8Identifier: true);
internal static ReadOnlySpan<byte> PreambleSpan => new byte[3] { 0xEF, 0xBB, 0xBF }; // uses C# compiler's optimization for static byte[] data
// Yes, the idea of emitting U+FEFF as a UTF-8 identifier has made it into
// the standard.
internal readonly bool _emitUTF8Identifier = false;
private readonly bool _isThrowException = false;
public UTF8Encoding() : this(false)
{
}
public UTF8Encoding(bool encoderShouldEmitUTF8Identifier) :
base(UTF8_CODEPAGE)
{
_emitUTF8Identifier = encoderShouldEmitUTF8Identifier;
}
public UTF8Encoding(bool encoderShouldEmitUTF8Identifier, bool throwOnInvalidBytes) :
this(encoderShouldEmitUTF8Identifier)
{
_isThrowException = throwOnInvalidBytes;
// Encoding's constructor already did this, but it'll be wrong if we're throwing exceptions
if (_isThrowException)
SetDefaultFallbacks();
}
internal sealed override void SetDefaultFallbacks()
{
// For UTF-X encodings, we use a replacement fallback with an empty string
if (_isThrowException)
{
this.encoderFallback = EncoderFallback.ExceptionFallback;
this.decoderFallback = DecoderFallback.ExceptionFallback;
}
else
{
this.encoderFallback = new EncoderReplacementFallback("\xFFFD");
this.decoderFallback = new DecoderReplacementFallback("\xFFFD");
}
}
// WARNING: GetByteCount(string chars)
// WARNING: has different variable names than EncodingNLS.cs, so this can't just be cut & pasted,
// WARNING: otherwise it'll break VB's way of declaring these.
//
// The following methods are copied from EncodingNLS.cs.
// Unfortunately EncodingNLS.cs is internal and we're public, so we have to re-implement them here.
// These should be kept in sync for the following classes:
// EncodingNLS, UTF7Encoding, UTF8Encoding, UTF32Encoding, ASCIIEncoding, UnicodeEncoding
// Returns the number of bytes required to encode a range of characters in
// a character array.
//
// All of our public Encodings that don't use EncodingNLS must have this (including EncodingNLS)
// So if you fix this, fix the others. Currently those include:
// EncodingNLS, UTF7Encoding, UTF8Encoding, UTF32Encoding, ASCIIEncoding, UnicodeEncoding
// parent method is safe
public override unsafe int GetByteCount(char[] chars, int index, int count)
{
// Validate input parameters
if (chars is null)
{
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.chars, ExceptionResource.ArgumentNull_Array);
}
if ((index | count) < 0)
{
ThrowHelper.ThrowArgumentOutOfRangeException((index < 0) ? ExceptionArgument.index : ExceptionArgument.count, ExceptionResource.ArgumentOutOfRange_NeedNonNegNum);
}
if (chars.Length - index < count)
{
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.chars, ExceptionResource.ArgumentOutOfRange_IndexCountBuffer);
}
fixed (char* pChars = chars)
{
return GetByteCountCommon(pChars + index, count);
}
}
// All of our public Encodings that don't use EncodingNLS must have this (including EncodingNLS)
// So if you fix this, fix the others. Currently those include:
// EncodingNLS, UTF7Encoding, UTF8Encoding, UTF32Encoding, ASCIIEncoding, UnicodeEncoding
// parent method is safe
public override unsafe int GetByteCount(string chars)
{
// Validate input parameters
if (chars is null)
{
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.chars);
}
fixed (char* pChars = chars)
{
return GetByteCountCommon(pChars, chars.Length);
}
}
// All of our public Encodings that don't use EncodingNLS must have this (including EncodingNLS)
// So if you fix this, fix the others. Currently those include:
// EncodingNLS, UTF7Encoding, UTF8Encoding, UTF32Encoding, ASCIIEncoding, UnicodeEncoding
[CLSCompliant(false)]
public override unsafe int GetByteCount(char* chars, int count)
{
// Validate Parameters
if (chars == null)
{
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.chars);
}
if (count < 0)
{
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.count, ExceptionResource.ArgumentOutOfRange_NeedNonNegNum);
}
return GetByteCountCommon(chars, count);
}
public override unsafe int GetByteCount(ReadOnlySpan<char> chars)
{
// It's ok for us to pass null pointers down to the workhorse below.
fixed (char* charsPtr = &MemoryMarshal.GetReference(chars))
{
return GetByteCountCommon(charsPtr, chars.Length);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private unsafe int GetByteCountCommon(char* pChars, int charCount)
{
// Common helper method for all non-EncoderNLS entry points to GetByteCount.
// A modification of this method should be copied in to each of the supported encodings: ASCII, UTF8, UTF16, UTF32.
Debug.Assert(charCount >= 0, "Caller shouldn't specify negative length buffer.");
Debug.Assert(pChars != null || charCount == 0, "Input pointer shouldn't be null if non-zero length specified.");
// First call into the fast path.
// Don't bother providing a fallback mechanism; our fast path doesn't use it.
int totalByteCount = GetByteCountFast(pChars, charCount, fallback: null, out int charsConsumed);
if (charsConsumed != charCount)
{
// If there's still data remaining in the source buffer, go down the fallback path.
// We need to check for integer overflow since the fallback could change the required
// output count in unexpected ways.
totalByteCount += GetByteCountWithFallback(pChars, charCount, charsConsumed);
if (totalByteCount < 0)
{
ThrowConversionOverflow();
}
}
return totalByteCount;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)] // called directly by GetCharCountCommon
private protected sealed override unsafe int GetByteCountFast(char* pChars, int charsLength, EncoderFallback? fallback, out int charsConsumed)
{
// The number of UTF-8 code units may exceed the number of UTF-16 code units,
// so we'll need to check for overflow before casting to Int32.
char* ptrToFirstInvalidChar = Utf16Utility.GetPointerToFirstInvalidChar(pChars, charsLength, out long utf8CodeUnitCountAdjustment, out _);
int tempCharsConsumed = (int)(ptrToFirstInvalidChar - pChars);
charsConsumed = tempCharsConsumed;
long totalUtf8Bytes = tempCharsConsumed + utf8CodeUnitCountAdjustment;
if ((ulong)totalUtf8Bytes > int.MaxValue)
{
ThrowConversionOverflow();
}
return (int)totalUtf8Bytes;
}
// Parent method is safe.
// All of our public Encodings that don't use EncodingNLS must have this (including EncodingNLS)
// So if you fix this, fix the others. Currently those include:
// EncodingNLS, UTF7Encoding, UTF8Encoding, UTF32Encoding, ASCIIEncoding, UnicodeEncoding
public override unsafe int GetBytes(string s, int charIndex, int charCount,
byte[] bytes, int byteIndex)
{
// Validate Parameters
if (s is null || bytes is null)
{
ThrowHelper.ThrowArgumentNullException(
argument: (s is null) ? ExceptionArgument.s : ExceptionArgument.bytes,
resource: ExceptionResource.ArgumentNull_Array);
}
if ((charIndex | charCount) < 0)
{
ThrowHelper.ThrowArgumentOutOfRangeException(
argument: (charIndex < 0) ? ExceptionArgument.charIndex : ExceptionArgument.charCount,
resource: ExceptionResource.ArgumentOutOfRange_NeedNonNegNum);
}
if (s.Length - charIndex < charCount)
{
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.s, ExceptionResource.ArgumentOutOfRange_IndexCount);
}
if ((uint)byteIndex > bytes.Length)
{
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.byteIndex, ExceptionResource.ArgumentOutOfRange_Index);
}
fixed (char* pChars = s)
fixed (byte* pBytes = bytes)
{
return GetBytesCommon(pChars + charIndex, charCount, pBytes + byteIndex, bytes.Length - byteIndex);
}
}
// Encodes a range of characters in a character array into a range of bytes
// in a byte array. An exception occurs if the byte array is not large
// enough to hold the complete encoding of the characters. The
// GetByteCount method can be used to determine the exact number of
// bytes that will be produced for a given range of characters.
// Alternatively, the GetMaxByteCount method can be used to
// determine the maximum number of bytes that will be produced for a given
// number of characters, regardless of the actual character values.
//
// All of our public Encodings that don't use EncodingNLS must have this (including EncodingNLS)
// So if you fix this, fix the others. Currently those include:
// EncodingNLS, UTF7Encoding, UTF8Encoding, UTF32Encoding, ASCIIEncoding, UnicodeEncoding
// parent method is safe
public override unsafe int GetBytes(char[] chars, int charIndex, int charCount,
byte[] bytes, int byteIndex)
{
// Validate parameters
if (chars is null || bytes is null)
{
ThrowHelper.ThrowArgumentNullException(
argument: (chars is null) ? ExceptionArgument.chars : ExceptionArgument.bytes,
resource: ExceptionResource.ArgumentNull_Array);
}
if ((charIndex | charCount) < 0)
{
ThrowHelper.ThrowArgumentOutOfRangeException(
argument: (charIndex < 0) ? ExceptionArgument.charIndex : ExceptionArgument.charCount,
resource: ExceptionResource.ArgumentOutOfRange_NeedNonNegNum);
}
if (chars.Length - charIndex < charCount)
{
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.chars, ExceptionResource.ArgumentOutOfRange_IndexCount);
}
if ((uint)byteIndex > bytes.Length)
{
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.byteIndex, ExceptionResource.ArgumentOutOfRange_Index);
}
fixed (char* pChars = chars)
fixed (byte* pBytes = bytes)
{
return GetBytesCommon(pChars + charIndex, charCount, pBytes + byteIndex, bytes.Length - byteIndex);
}
}
// All of our public Encodings that don't use EncodingNLS must have this (including EncodingNLS)
// So if you fix this, fix the others. Currently those include:
// EncodingNLS, UTF7Encoding, UTF8Encoding, UTF32Encoding, ASCIIEncoding, UnicodeEncoding
[CLSCompliant(false)]
public override unsafe int GetBytes(char* chars, int charCount, byte* bytes, int byteCount)
{
// Validate Parameters
if (chars == null || bytes == null)
{
ThrowHelper.ThrowArgumentNullException(
argument: (chars is null) ? ExceptionArgument.chars : ExceptionArgument.bytes,
resource: ExceptionResource.ArgumentNull_Array);
}
if ((charCount | byteCount) < 0)
{
ThrowHelper.ThrowArgumentOutOfRangeException(
argument: (charCount < 0) ? ExceptionArgument.charCount : ExceptionArgument.byteCount,
resource: ExceptionResource.ArgumentOutOfRange_NeedNonNegNum);
}
return GetBytesCommon(chars, charCount, bytes, byteCount);
}
public override unsafe int GetBytes(ReadOnlySpan<char> chars, Span<byte> bytes)
{
// It's ok for us to operate on null / empty spans.
fixed (char* charsPtr = &MemoryMarshal.GetReference(chars))
fixed (byte* bytesPtr = &MemoryMarshal.GetReference(bytes))
{
return GetBytesCommon(charsPtr, chars.Length, bytesPtr, bytes.Length);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private unsafe int GetBytesCommon(char* pChars, int charCount, byte* pBytes, int byteCount)
{
// Common helper method for all non-EncoderNLS entry points to GetBytes.
// A modification of this method should be copied in to each of the supported encodings: ASCII, UTF8, UTF16, UTF32.
Debug.Assert(charCount >= 0, "Caller shouldn't specify negative length buffer.");
Debug.Assert(pChars != null || charCount == 0, "Input pointer shouldn't be null if non-zero length specified.");
Debug.Assert(byteCount >= 0, "Caller shouldn't specify negative length buffer.");
Debug.Assert(pBytes != null || byteCount == 0, "Input pointer shouldn't be null if non-zero length specified.");
// First call into the fast path.
int bytesWritten = GetBytesFast(pChars, charCount, pBytes, byteCount, out int charsConsumed);
if (charsConsumed == charCount)
{
// All elements converted - return immediately.
return bytesWritten;
}
else
{
// Simple narrowing conversion couldn't operate on entire buffer - invoke fallback.
return GetBytesWithFallback(pChars, charCount, pBytes, byteCount, charsConsumed, bytesWritten);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)] // called directly by GetBytesCommon
private protected sealed override unsafe int GetBytesFast(char* pChars, int charsLength, byte* pBytes, int bytesLength, out int charsConsumed)
{
// We don't care about the exact OperationStatus value returned by the workhorse routine; we only
// care if the workhorse was able to consume the entire input payload. If we're unable to do so,
// we'll handle the remainder in the fallback routine.
Utf8Utility.TranscodeToUtf8(pChars, charsLength, pBytes, bytesLength, out char* pInputBufferRemaining, out byte* pOutputBufferRemaining);
charsConsumed = (int)(pInputBufferRemaining - pChars);
return (int)(pOutputBufferRemaining - pBytes);
}
// Returns the number of characters produced by decoding a range of bytes
// in a byte array.
//
// All of our public Encodings that don't use EncodingNLS must have this (including EncodingNLS)
// So if you fix this, fix the others. Currently those include:
// EncodingNLS, UTF7Encoding, UTF8Encoding, UTF32Encoding, ASCIIEncoding, UnicodeEncoding
// parent method is safe
public override unsafe int GetCharCount(byte[] bytes, int index, int count)
{
// Validate Parameters
if (bytes is null)
{
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.bytes, ExceptionResource.ArgumentNull_Array);
}
if ((index | count) < 0)
{
ThrowHelper.ThrowArgumentOutOfRangeException((index < 0) ? ExceptionArgument.index : ExceptionArgument.count, ExceptionResource.ArgumentOutOfRange_NeedNonNegNum);
}
if (bytes.Length - index < count)
{
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.bytes, ExceptionResource.ArgumentOutOfRange_IndexCountBuffer);
}
fixed (byte* pBytes = bytes)
{
return GetCharCountCommon(pBytes + index, count);
}
}
// All of our public Encodings that don't use EncodingNLS must have this (including EncodingNLS)
// So if you fix this, fix the others. Currently those include:
// EncodingNLS, UTF7Encoding, UTF8Encoding, UTF32Encoding, ASCIIEncoding, UnicodeEncoding
[CLSCompliant(false)]
public override unsafe int GetCharCount(byte* bytes, int count)
{
// Validate Parameters
if (bytes == null)
{
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.bytes, ExceptionResource.ArgumentNull_Array);
}
if (count < 0)
{
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.count, ExceptionResource.ArgumentOutOfRange_NeedNonNegNum);
}
return GetCharCountCommon(bytes, count);
}
public override unsafe int GetCharCount(ReadOnlySpan<byte> bytes)
{
// It's ok for us to pass null pointers down to the workhorse routine.
fixed (byte* bytesPtr = &MemoryMarshal.GetReference(bytes))
{
return GetCharCountCommon(bytesPtr, bytes.Length);
}
}
// All of our public Encodings that don't use EncodingNLS must have this (including EncodingNLS)
// So if you fix this, fix the others. Currently those include:
// EncodingNLS, UTF7Encoding, UTF8Encoding, UTF32Encoding, ASCIIEncoding, UnicodeEncoding
// parent method is safe
public override unsafe int GetChars(byte[] bytes, int byteIndex, int byteCount,
char[] chars, int charIndex)
{
// Validate Parameters
if (bytes is null || chars is null)
{
ThrowHelper.ThrowArgumentNullException(
argument: (bytes is null) ? ExceptionArgument.bytes : ExceptionArgument.chars,
resource: ExceptionResource.ArgumentNull_Array);
}
if ((byteIndex | byteCount) < 0)
{
ThrowHelper.ThrowArgumentOutOfRangeException(
argument: (byteIndex < 0) ? ExceptionArgument.byteIndex : ExceptionArgument.byteCount,
resource: ExceptionResource.ArgumentOutOfRange_NeedNonNegNum);
}
if (bytes.Length - byteIndex < byteCount)
{
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.bytes, ExceptionResource.ArgumentOutOfRange_IndexCountBuffer);
}
if ((uint)charIndex > (uint)chars.Length)
{
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.charIndex, ExceptionResource.ArgumentOutOfRange_Index);
}
fixed (byte* pBytes = bytes)
fixed (char* pChars = chars)
{
return GetCharsCommon(pBytes + byteIndex, byteCount, pChars + charIndex, chars.Length - charIndex);
}
}
// All of our public Encodings that don't use EncodingNLS must have this (including EncodingNLS)
// So if you fix this, fix the others. Currently those include:
// EncodingNLS, UTF7Encoding, UTF8Encoding, UTF32Encoding, ASCIIEncoding, UnicodeEncoding
[CLSCompliant(false)]
public unsafe override int GetChars(byte* bytes, int byteCount, char* chars, int charCount)
{
// Validate Parameters
if (bytes is null || chars is null)
{
ThrowHelper.ThrowArgumentNullException(
argument: (bytes is null) ? ExceptionArgument.bytes : ExceptionArgument.chars,
resource: ExceptionResource.ArgumentNull_Array);
}
if ((byteCount | charCount) < 0)
{
ThrowHelper.ThrowArgumentOutOfRangeException(
argument: (byteCount < 0) ? ExceptionArgument.byteCount : ExceptionArgument.charCount,
resource: ExceptionResource.ArgumentOutOfRange_NeedNonNegNum);
}
return GetCharsCommon(bytes, byteCount, chars, charCount);
}
public override unsafe int GetChars(ReadOnlySpan<byte> bytes, Span<char> chars)
{
// It's ok for us to pass null pointers down to the workhorse below.
fixed (byte* bytesPtr = &MemoryMarshal.GetReference(bytes))
fixed (char* charsPtr = &MemoryMarshal.GetReference(chars))
{
return GetCharsCommon(bytesPtr, bytes.Length, charsPtr, chars.Length);
}
}
// WARNING: If we throw an error, then System.Resources.ResourceReader calls this method.
// So if we're really broken, then that could also throw an error... recursively.
// So try to make sure GetChars can at least process all uses by
// System.Resources.ResourceReader!
//
// Note: We throw exceptions on individually encoded surrogates and other non-shortest forms.
// If exceptions aren't turned on, then we drop all non-shortest &individual surrogates.
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private unsafe int GetCharsCommon(byte* pBytes, int byteCount, char* pChars, int charCount)
{
// Common helper method for all non-DecoderNLS entry points to GetChars.
// A modification of this method should be copied in to each of the supported encodings: ASCII, UTF8, UTF16, UTF32.
Debug.Assert(byteCount >= 0, "Caller shouldn't specify negative length buffer.");
Debug.Assert(pBytes != null || byteCount == 0, "Input pointer shouldn't be null if non-zero length specified.");
Debug.Assert(charCount >= 0, "Caller shouldn't specify negative length buffer.");
Debug.Assert(pChars != null || charCount == 0, "Input pointer shouldn't be null if non-zero length specified.");
// First call into the fast path.
int charsWritten = GetCharsFast(pBytes, byteCount, pChars, charCount, out int bytesConsumed);
if (bytesConsumed == byteCount)
{
// All elements converted - return immediately.
return charsWritten;
}
else
{
// Simple narrowing conversion couldn't operate on entire buffer - invoke fallback.
return GetCharsWithFallback(pBytes, byteCount, pChars, charCount, bytesConsumed, charsWritten);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)] // called directly by GetCharsCommon
private protected sealed override unsafe int GetCharsFast(byte* pBytes, int bytesLength, char* pChars, int charsLength, out int bytesConsumed)
{
// We don't care about the exact OperationStatus value returned by the workhorse routine; we only
// care if the workhorse was able to consume the entire input payload. If we're unable to do so,
// we'll handle the remainder in the fallback routine.
Utf8Utility.TranscodeToUtf16(pBytes, bytesLength, pChars, charsLength, out byte* pInputBufferRemaining, out char* pOutputBufferRemaining);
bytesConsumed = (int)(pInputBufferRemaining - pBytes);
return (int)(pOutputBufferRemaining - pChars);
}
private protected sealed override unsafe int GetCharsWithFallback(ReadOnlySpan<byte> bytes, int originalBytesLength, Span<char> chars, int originalCharsLength, DecoderNLS? decoder)
{
// We special-case DecoderReplacementFallback if it's telling us to write a single U+FFFD char,
// since we believe this to be relatively common and we can handle it more efficiently than
// the base implementation.
if (((decoder is null) ? this.DecoderFallback : decoder.Fallback) is DecoderReplacementFallback replacementFallback
&& replacementFallback.MaxCharCount == 1
&& replacementFallback.DefaultString[0] == UnicodeUtility.ReplacementChar)
{
// Don't care about the exact OperationStatus, just how much of the payload we were able
// to process.
Utf8.ToUtf16(bytes, chars, out int bytesRead, out int charsWritten, replaceInvalidSequences: true, isFinalBlock: decoder is null || decoder.MustFlush);
// Slice off how much we consumed / wrote.
bytes = bytes.Slice(bytesRead);
chars = chars.Slice(charsWritten);
}
// If we couldn't go through our fast fallback mechanism, or if we still have leftover
// data because we couldn't consume everything in the loop above, we need to go down the
// slow fallback path.
if (bytes.IsEmpty)
{
return originalCharsLength - chars.Length; // total number of chars written
}
else
{
return base.GetCharsWithFallback(bytes, originalBytesLength, chars, originalCharsLength, decoder);
}
}
// Returns a string containing the decoded representation of a range of
// bytes in a byte array.
//
// All of our public Encodings that don't use EncodingNLS must have this (including EncodingNLS)
// So if you fix this, fix the others. Currently those include:
// EncodingNLS, UTF7Encoding, UTF8Encoding, UTF32Encoding, ASCIIEncoding, UnicodeEncoding
// parent method is safe
public override unsafe string GetString(byte[] bytes, int index, int count)
{
// Validate Parameters
if (bytes is null)
{
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.bytes, ExceptionResource.ArgumentNull_Array);
}
if ((index | count) < 0)
{
ThrowHelper.ThrowArgumentOutOfRangeException(
argument: (index < 0) ? ExceptionArgument.index : ExceptionArgument.count,
resource: ExceptionResource.ArgumentOutOfRange_NeedNonNegNum);
}
if (bytes.Length - index < count)
{
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.bytes, ExceptionResource.ArgumentOutOfRange_IndexCountBuffer);
}
// Avoid problems with empty input buffer
if (count == 0)
return string.Empty;
fixed (byte* pBytes = bytes)
{
return string.CreateStringFromEncoding(pBytes + index, count, this);
}
}
//
// End of standard methods copied from EncodingNLS.cs
//
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private unsafe int GetCharCountCommon(byte* pBytes, int byteCount)
{
// Common helper method for all non-DecoderNLS entry points to GetCharCount.
// A modification of this method should be copied in to each of the supported encodings: ASCII, UTF8, UTF16, UTF32.
Debug.Assert(byteCount >= 0, "Caller shouldn't specify negative length buffer.");
Debug.Assert(pBytes != null || byteCount == 0, "Input pointer shouldn't be null if non-zero length specified.");
// First call into the fast path.
// Don't bother providing a fallback mechanism; our fast path doesn't use it.
int totalCharCount = GetCharCountFast(pBytes, byteCount, fallback: null, out int bytesConsumed);
if (bytesConsumed != byteCount)
{
// If there's still data remaining in the source buffer, go down the fallback path.
// We need to check for integer overflow since the fallback could change the required
// output count in unexpected ways.
totalCharCount += GetCharCountWithFallback(pBytes, byteCount, bytesConsumed);
if (totalCharCount < 0)
{
ThrowConversionOverflow();
}
}
return totalCharCount;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)] // called directly by GetCharCountCommon
private protected sealed override unsafe int GetCharCountFast(byte* pBytes, int bytesLength, DecoderFallback? fallback, out int bytesConsumed)
{
// The number of UTF-16 code units will never exceed the number of UTF-8 code units,
// so the addition at the end of this method will not overflow.
byte* ptrToFirstInvalidByte = Utf8Utility.GetPointerToFirstInvalidByte(pBytes, bytesLength, out int utf16CodeUnitCountAdjustment, out _);
int tempBytesConsumed = (int)(ptrToFirstInvalidByte - pBytes);
bytesConsumed = tempBytesConsumed;
return tempBytesConsumed + utf16CodeUnitCountAdjustment;
}
public override Decoder GetDecoder()
{
return new DecoderNLS(this);
}
public override Encoder GetEncoder()
{
return new EncoderNLS(this);
}
//
// Beginning of methods used by shared fallback logic.
//
internal sealed override bool TryGetByteCount(Rune value, out int byteCount)
{
// All well-formed Rune instances can be converted to 1..4 UTF-8 code units.
byteCount = value.Utf8SequenceLength;
return true;
}
internal sealed override OperationStatus EncodeRune(Rune value, Span<byte> bytes, out int bytesWritten)
{
// All well-formed Rune instances can be encoded as 1..4 UTF-8 code units.
// If there's an error, it's because the destination was too small.
return value.TryEncodeToUtf8(bytes, out bytesWritten) ? OperationStatus.Done : OperationStatus.DestinationTooSmall;
}
internal sealed override OperationStatus DecodeFirstRune(ReadOnlySpan<byte> bytes, out Rune value, out int bytesConsumed)
{
return Rune.DecodeFromUtf8(bytes, out value, out bytesConsumed);
}
//
// End of methods used by shared fallback logic.
//
public override int GetMaxByteCount(int charCount)
{
if (charCount < 0)
throw new ArgumentOutOfRangeException(nameof(charCount),
SR.ArgumentOutOfRange_NeedNonNegNum);
// Characters would be # of characters + 1 in case left over high surrogate is ? * max fallback
long byteCount = (long)charCount + 1;
if (EncoderFallback.MaxCharCount > 1)
byteCount *= EncoderFallback.MaxCharCount;
// Max 3 bytes per char. (4 bytes per 2 chars for surrogates)
byteCount *= 3;
if (byteCount > 0x7fffffff)
throw new ArgumentOutOfRangeException(nameof(charCount), SR.ArgumentOutOfRange_GetByteCountOverflow);
return (int)byteCount;
}
public override int GetMaxCharCount(int byteCount)
{
if (byteCount < 0)
throw new ArgumentOutOfRangeException(nameof(byteCount),
SR.ArgumentOutOfRange_NeedNonNegNum);
// Figure out our length, 1 char per input byte + 1 char if 1st byte is last byte of 4 byte surrogate pair
long charCount = ((long)byteCount + 1);
// Non-shortest form would fall back, so get max count from fallback.
// So would 11... followed by 11..., so you could fall back every byte
if (DecoderFallback.MaxCharCount > 1)
{
charCount *= DecoderFallback.MaxCharCount;
}
if (charCount > 0x7fffffff)
throw new ArgumentOutOfRangeException(nameof(byteCount), SR.ArgumentOutOfRange_GetCharCountOverflow);
return (int)charCount;
}
public override byte[] GetPreamble()
{
if (_emitUTF8Identifier)
{
// Allocate new array to prevent users from modifying it.
return new byte[3] { 0xEF, 0xBB, 0xBF };
}
else
return Array.Empty<byte>();
}
public override ReadOnlySpan<byte> Preamble =>
GetType() != typeof(UTF8Encoding) ? new ReadOnlySpan<byte>(GetPreamble()) : // in case a derived UTF8Encoding overrode GetPreamble
_emitUTF8Identifier ? PreambleSpan :
default;
public override bool Equals(object? value)
{
if (value is UTF8Encoding that)
{
return (_emitUTF8Identifier == that._emitUTF8Identifier) &&
(EncoderFallback.Equals(that.EncoderFallback)) &&
(DecoderFallback.Equals(that.DecoderFallback));
}
return false;
}
public override int GetHashCode()
{
//Not great distribution, but this is relatively unlikely to be used as the key in a hashtable.
return this.EncoderFallback.GetHashCode() + this.DecoderFallback.GetHashCode() +
UTF8_CODEPAGE + (_emitUTF8Identifier ? 1 : 0);
}
}
}
| |
// Copyright (c) Alexandre Mutel. All rights reserved.
// Licensed under the BSD-Clause 2 license.
// See license.txt file in the project root for full license information.
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Runtime.CompilerServices;
using Scriban.Functions;
using Scriban.Helpers;
using Scriban.Parsing;
using Scriban.Syntax;
namespace Scriban.Runtime
{
/// <summary>
/// Base runtime object for arrays.
/// </summary>
/// <seealso cref="object" />
/// <seealso cref="System.Collections.IList" />
[DebuggerDisplay("Count = {Count}")]
[DebuggerTypeProxy(typeof(ScriptArray<>.DebugListView))]
public class ScriptArray<T> : IList<T>, IList, IScriptObject, IScriptCustomBinaryOperation, IScriptTransformable
{
private List<T> _values;
private bool _isReadOnly;
// Attached ScriptObject is only created if needed
private ScriptObject _script;
/// <summary>
/// Initializes a new instance of the <see cref="ScriptArray"/> class.
/// </summary>
public ScriptArray()
{
_values = new List<T>();
}
/// <summary>
/// Initializes a new instance of the <see cref="ScriptArray"/> class.
/// </summary>
/// <param name="capacity">The capacity.</param>
public ScriptArray(int capacity)
{
_values = new List<T>(capacity);
}
public ScriptArray(T[] array)
{
if (array == null) throw new ArgumentNullException(nameof(array));
_values = new List<T>(array.Length);
for (int i = 0; i < array.Length; i++)
{
_values.Add(array[i]);
}
}
/// <summary>
/// Initializes a new instance of the <see cref="ScriptArray"/> class.
/// </summary>
/// <param name="values">The values.</param>
public ScriptArray(IEnumerable<T> values)
{
this._values = new List<T>(values);
}
public ScriptArray(IEnumerable values)
{
this._values = new List<T>();
foreach (var value in values)
{
_values.Add((T)value);
}
}
public int Capacity
{
get => _values.Capacity;
set => _values.Capacity = value;
}
public virtual bool IsReadOnly
{
get => _isReadOnly;
set
{
if (_script != null)
{
_script.IsReadOnly = value;
}
_isReadOnly = value;
}
}
public virtual IScriptObject Clone(bool deep)
{
var array = (ScriptArray<T>) MemberwiseClone();
array._values = new List<T>(_values.Count);
array._script = null;
if (deep)
{
foreach (var value in _values)
{
var fromValue = value;
if (value is IScriptObject)
{
var fromObject = (IScriptObject)value;
fromValue = (T)fromObject.Clone(true);
}
array._values.Add(fromValue);
}
if (_script != null)
{
array._script = (ScriptObject)_script.Clone(true);
}
}
else
{
foreach (var value in _values)
{
array._values.Add(value);
}
if (_script != null)
{
array._script = (ScriptObject)_script.Clone(false);
}
}
return array;
}
public ScriptObject ScriptObject => _script ?? (_script = new ScriptObject() { IsReadOnly = IsReadOnly});
public int Count => _values.Count;
public virtual T this[int index]
{
get => index < 0 || index >= _values.Count ? default : _values[index];
set
{
if (index < 0)
{
return;
}
this.AssertNotReadOnly();
// Auto-expand the array in case of accessing a range outside the current value
for (int i = _values.Count; i <= index; i++)
{
_values.Add(default);
}
_values[index] = value;
}
}
public virtual void Add(T item)
{
this.AssertNotReadOnly();
_values.Add(item);
}
public void AddRange(IEnumerable<T> items)
{
if (items == null) throw new ArgumentNullException(nameof(items));
foreach (var item in items)
{
Add(item);
}
}
int IList.Add(object value)
{
Add((T) value);
return 0;
}
bool IList.Contains(object value)
{
return ((IList) _values).Contains(value);
}
public virtual void Clear()
{
this.AssertNotReadOnly();
_values.Clear();
}
int IList.IndexOf(object value)
{
return ((IList)_values).IndexOf(value);
}
void IList.Insert(int index, object value)
{
Insert(index, (T)value);
}
public virtual bool Contains(T item)
{
return _values.Contains(item);
}
public virtual void CopyTo(T[] array, int arrayIndex)
{
_values.CopyTo(array, arrayIndex);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void CopyTo(int index, T[] array, int arrayIndex, int count)
{
_values.CopyTo(index, array, arrayIndex, count);
}
public virtual int IndexOf(T item)
{
return _values.IndexOf(item);
}
public virtual void Insert(int index, T item)
{
this.AssertNotReadOnly();
// Auto-expand the array in case of accessing a range outside the current value
for (int i = _values.Count; i < index; i++)
{
_values.Add(default);
}
_values.Insert(index, item);
}
void IList.Remove(object value)
{
Remove((T) value);
}
public virtual void RemoveAt(int index)
{
this.AssertNotReadOnly();
if (index < 0 || index >= _values.Count)
{
return;
}
_values.RemoveAt(index);
}
object IList.this[int index]
{
get => this[index];
set
{
if (typeof(T) == typeof(object))
{
this[index] = (T)value;
}
else
{
if (value is T tValue)
{
this[index] = tValue;
}
else
{
this[index] = (T)Convert.ChangeType(value, typeof(T));
}
}
}
}
public virtual bool Remove(T item)
{
this.AssertNotReadOnly();
return _values.Remove(item);
}
public List<T>.Enumerator GetEnumerator()
{
return _values.GetEnumerator();
}
bool IList.IsFixedSize => ((IList)_values).IsFixedSize;
bool ICollection.IsSynchronized => ((ICollection)_values).IsSynchronized;
object ICollection.SyncRoot => ((ICollection)_values).SyncRoot;
bool IList.IsReadOnly => IsReadOnly;
IEnumerator<T> IEnumerable<T>.GetEnumerator()
{
return _values.GetEnumerator();
}
IEnumerator IEnumerable.GetEnumerator()
{
return _values.GetEnumerator();
}
bool ICollection<T>.IsReadOnly => IsReadOnly;
void ICollection.CopyTo(Array array, int index)
{
((ICollection)_values).CopyTo(array, index);
}
public IEnumerable<string> GetMembers()
{
yield return "size";
if (_script != null)
{
foreach (var member in _script.GetMembers())
{
yield return member;
}
}
}
public virtual bool Contains(string member)
{
return ScriptObject.Contains(member);
}
public virtual bool TryGetValue(TemplateContext context, SourceSpan span, string member, out object value)
{
if (member == "size")
{
value = Count;
return true;
}
return ScriptObject.TryGetValue(context, span, member, out value);
}
public virtual bool CanWrite(string member)
{
if (member == "size")
{
return false;
}
return ScriptObject.CanWrite(member);
}
public virtual bool TrySetValue(TemplateContext context, SourceSpan span, string member, object value, bool readOnly)
{
return ScriptObject.TrySetValue(context, span, member, value, readOnly);
}
public virtual bool Remove(string member)
{
return ScriptObject.Remove(member);
}
public virtual void SetReadOnly(string member, bool readOnly)
{
ScriptObject.SetReadOnly(member, readOnly);
}
public bool TryEvaluate(TemplateContext context, SourceSpan span, ScriptBinaryOperator op, SourceSpan leftSpan, object leftValue, SourceSpan rightSpan, object rightValue, out object result)
{
result = null;
var leftArray = TryGetArray(leftValue);
var rightArray = TryGetArray(rightValue);
int intModifier = 0;
var intSpan = leftSpan;
var errorSpan = span;
string reason = null;
switch (op)
{
case ScriptBinaryOperator.BinaryOr:
case ScriptBinaryOperator.BinaryAnd:
case ScriptBinaryOperator.CompareEqual:
case ScriptBinaryOperator.CompareNotEqual:
case ScriptBinaryOperator.CompareLessOrEqual:
case ScriptBinaryOperator.CompareGreaterOrEqual:
case ScriptBinaryOperator.CompareLess:
case ScriptBinaryOperator.CompareGreater:
case ScriptBinaryOperator.Add:
if (leftArray == null)
{
errorSpan = leftSpan;
reason = " Expecting an array for the left argument.";
}
if (rightArray == null)
{
errorSpan = rightSpan;
reason = " Expecting an array for the right argument.";
}
break;
case ScriptBinaryOperator.Multiply:
if (leftArray == null && rightArray == null || leftArray != null && rightArray != null)
{
reason = " Expecting only one array for the left or right argument.";
}
else
{
intModifier = context.ToInt(span, leftArray == null ? leftValue : rightValue);
if (rightArray == null) intSpan = rightSpan;
}
break;
case ScriptBinaryOperator.Divide:
case ScriptBinaryOperator.DivideRound:
case ScriptBinaryOperator.Modulus:
if (leftArray == null)
{
errorSpan = leftSpan;
reason = " Expecting an array for the left argument.";
}
else
{
intModifier = context.ToInt(span, rightValue);
intSpan = rightSpan;
}
break;
case ScriptBinaryOperator.ShiftLeft:
if (leftArray == null)
{
errorSpan = leftSpan;
reason = " Expecting an array for the left argument.";
}
break;
case ScriptBinaryOperator.ShiftRight:
if (rightArray == null)
{
errorSpan = rightSpan;
reason = " Expecting an array for the right argument.";
}
break;
default:
reason = string.Empty;
break;
}
if (intModifier < 0)
{
errorSpan = intSpan;
reason = $" Integer {intModifier} cannot be negative when multiplying";
}
if (reason != null)
{
throw new ScriptRuntimeException(errorSpan, $"The operator `{op.ToText()}` is not supported between {context.GetTypeName(leftValue)} and {context.GetTypeName(rightValue)}.{reason}");
}
switch (op)
{
case ScriptBinaryOperator.BinaryOr:
result = new ScriptArray<T>(leftArray.Union(rightArray));
return true;
case ScriptBinaryOperator.BinaryAnd:
result = new ScriptArray<T>(leftArray.Intersect(rightArray));
return true;
case ScriptBinaryOperator.Add:
result = ArrayFunctions.Concat(leftArray, rightArray);
return true;
case ScriptBinaryOperator.CompareEqual:
case ScriptBinaryOperator.CompareNotEqual:
case ScriptBinaryOperator.CompareLessOrEqual:
case ScriptBinaryOperator.CompareGreaterOrEqual:
case ScriptBinaryOperator.CompareLess:
case ScriptBinaryOperator.CompareGreater:
result = CompareTo(context, span, op, leftArray, rightArray);
return true;
case ScriptBinaryOperator.Multiply:
{
// array with integer
var array = leftArray ?? rightArray;
if (intModifier == 0)
{
result = new ScriptArray<T>();
return true;
}
var newArray = new ScriptArray<T>(intModifier * array.Count);
for (int i = 0; i < intModifier; i++)
{
newArray.AddRange(array);
}
result = newArray;
return true;
}
case ScriptBinaryOperator.Divide:
case ScriptBinaryOperator.DivideRound:
{
// array with integer
var array = leftArray ?? rightArray;
if (intModifier == 0) throw new ScriptRuntimeException(intSpan, "Cannot divide by 0");
var newLength = array.Count / intModifier;
var newArray = new ScriptArray<T>(newLength);
for (int i = 0; i < newLength; i++)
{
newArray.Add(array[i]);
}
result = newArray;
return true;
}
case ScriptBinaryOperator.Modulus:
{
// array with integer
var array = leftArray ?? rightArray;
if (intModifier == 0) throw new ScriptRuntimeException(intSpan, "Cannot divide by 0");
var newArray = new ScriptArray<T>(array.Count);
for (int i = 0; i < array.Count; i++)
{
if ((i % intModifier) == 0)
{
newArray.Add(array[i]);
}
}
result = newArray;
return true;
}
case ScriptBinaryOperator.ShiftLeft:
var newLeft = new ScriptArray<T>(leftArray);
newLeft.Add(typeof(T) == typeof(object) ? (T)rightValue : context.ToObject<T>(rightSpan, rightValue));
result = newLeft;
return true;
case ScriptBinaryOperator.ShiftRight:
var newRight = new ScriptArray<T>(rightArray);
newRight.Insert(0, typeof(T) == typeof(object) ? (T)leftValue : context.ToObject<T>(leftSpan, leftValue));
result = newRight;
return true;
}
return false;
}
private static ScriptArray<T> TryGetArray(object rightValue)
{
var rightArray = rightValue as ScriptArray<T>;
if (rightArray == null)
{
var list = rightValue as IList;
if (list != null)
{
rightArray = new ScriptArray<T>(list);
}
else if (rightValue is IEnumerable enumerable && !(rightValue is string))
{
rightArray = new ScriptArray<T>(enumerable);
}
}
return rightArray;
}
private static bool CompareTo(TemplateContext context, SourceSpan span, ScriptBinaryOperator op, ScriptArray<T> left, ScriptArray<T> right)
{
// Compare the length first
var compare = left.Count.CompareTo(right.Count);
switch (op)
{
case ScriptBinaryOperator.CompareEqual:
if (compare != 0) return false;
break;
case ScriptBinaryOperator.CompareNotEqual:
if (compare != 0) return true;
if (left.Count == 0) return false;
break;
case ScriptBinaryOperator.CompareLessOrEqual:
case ScriptBinaryOperator.CompareLess:
if (compare < 0) return true;
if (compare > 0) return false;
if (left.Count == 0 && op == ScriptBinaryOperator.CompareLess) return false;
break;
case ScriptBinaryOperator.CompareGreaterOrEqual:
case ScriptBinaryOperator.CompareGreater:
if (compare < 0) return false;
if (compare > 0) return true;
if (left.Count == 0 && op == ScriptBinaryOperator.CompareGreater) return false;
break;
default:
throw new ScriptRuntimeException(span, $"The operator `{op.ToText()}` is not supported between {context.GetTypeName(left)} and {context.GetTypeName(right)}.");
}
// Otherwise we need to compare each element
for (int i = 0; i < left.Count; i++)
{
var result = (bool) ScriptBinaryExpression.Evaluate(context, span, op, left[i], right[i]);
if (!result)
{
return false;
}
}
return true;
}
public Type ElementType => typeof(object);
public virtual bool CanTransform(Type transformType)
{
return true;
}
public virtual bool Visit(TemplateContext context, SourceSpan span, Func<object, bool> visit)
{
foreach (var item in this)
{
if (!visit(item)) return false;
}
return true;
}
public virtual object Transform(TemplateContext context, SourceSpan span, Func<object, object> apply, Type destType)
{
if (apply == null) throw new ArgumentNullException(nameof(apply));
var clone = (ScriptArray<T>)Clone(true);
var values = clone._values;
if (typeof(T) == typeof(object))
{
for (int i = 0; i < values.Count; i++)
{
values[i] = (T)apply(values[i]);
}
}
else
{
for (int i = 0; i < values.Count; i++)
{
values[i] = context.ToObject<T>(span, apply(values[i]));
}
}
return clone;
}
internal class DebugListView
{
private readonly ScriptArray<T> _collection;
public DebugListView(ScriptArray<T> collection)
{
this._collection = collection;
}
[DebuggerBrowsable(DebuggerBrowsableState.RootHidden)]
public object[] Items => _collection._values.Cast<object>().ToArray();
}
}
public class ScriptArray : ScriptArray<object>
{
public ScriptArray()
{
}
public ScriptArray(int capacity) : base(capacity)
{
}
public ScriptArray(IEnumerable<object> values) : base(values)
{
}
public ScriptArray(IEnumerable values) : base(values)
{
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
using Internal.Runtime.CompilerServices;
namespace System.Buffers.Text
{
// AVX2 version based on https://github.com/aklomp/base64/tree/e516d769a2a432c08404f1981e73b431566057be/lib/arch/avx2
// SSSE3 version based on https://github.com/aklomp/base64/tree/e516d769a2a432c08404f1981e73b431566057be/lib/arch/ssse3
public static partial class Base64
{
/// <summary>
/// Decode the span of UTF-8 encoded text represented as base 64 into binary data.
/// If the input is not a multiple of 4, it will decode as much as it can, to the closest multiple of 4.
/// </summary>
/// <param name="utf8">The input span which contains UTF-8 encoded text in base 64 that needs to be decoded.</param>
/// <param name="bytes">The output span which contains the result of the operation, i.e. the decoded binary data.</param>
/// <param name="bytesConsumed">The number of input bytes consumed during the operation. This can be used to slice the input for subsequent calls, if necessary.</param>
/// <param name="bytesWritten">The number of bytes written into the output span. This can be used to slice the output for subsequent calls, if necessary.</param>
/// <param name="isFinalBlock">True (default) when the input span contains the entire data to decode.
/// Set to false only if it is known that the input span contains partial data with more data to follow.</param>
/// <returns>It returns the OperationStatus enum values:
/// - Done - on successful processing of the entire input span
/// - DestinationTooSmall - if there is not enough space in the output span to fit the decoded input
/// - NeedMoreData - only if isFinalBlock is false and the input is not a multiple of 4, otherwise the partial input would be considered as InvalidData
/// - InvalidData - if the input contains bytes outside of the expected base 64 range, or if it contains invalid/more than two padding characters,
/// or if the input is incomplete (i.e. not a multiple of 4) and isFinalBlock is true.
/// </returns>
public static unsafe OperationStatus DecodeFromUtf8(ReadOnlySpan<byte> utf8, Span<byte> bytes, out int bytesConsumed, out int bytesWritten, bool isFinalBlock = true)
{
if (utf8.IsEmpty)
{
bytesConsumed = 0;
bytesWritten = 0;
return OperationStatus.Done;
}
fixed (byte* srcBytes = &MemoryMarshal.GetReference(utf8))
fixed (byte* destBytes = &MemoryMarshal.GetReference(bytes))
{
int srcLength = utf8.Length & ~0x3; // only decode input up to the closest multiple of 4.
int destLength = bytes.Length;
int maxSrcLength = srcLength;
int decodedLength = GetMaxDecodedFromUtf8Length(srcLength);
// max. 2 padding chars
if (destLength < decodedLength - 2)
{
// For overflow see comment below
maxSrcLength = destLength / 3 * 4;
}
byte* src = srcBytes;
byte* dest = destBytes;
byte* srcEnd = srcBytes + (uint)srcLength;
byte* srcMax = srcBytes + (uint)maxSrcLength;
if (maxSrcLength >= 24)
{
byte* end = srcMax - 45;
if (Avx2.IsSupported && (end >= src))
{
Avx2Decode(ref src, ref dest, end, maxSrcLength, destLength, srcBytes, destBytes);
if (src == srcEnd)
goto DoneExit;
}
end = srcMax - 24;
if (Ssse3.IsSupported && (end >= src))
{
Ssse3Decode(ref src, ref dest, end, maxSrcLength, destLength, srcBytes, destBytes);
if (src == srcEnd)
goto DoneExit;
}
}
// Last bytes could have padding characters, so process them separately and treat them as valid only if isFinalBlock is true
// if isFinalBlock is false, padding characters are considered invalid
int skipLastChunk = isFinalBlock ? 4 : 0;
if (destLength >= decodedLength)
{
maxSrcLength = srcLength - skipLastChunk;
}
else
{
// This should never overflow since destLength here is less than int.MaxValue / 4 * 3 (i.e. 1610612733)
// Therefore, (destLength / 3) * 4 will always be less than 2147483641
Debug.Assert(destLength < (int.MaxValue / 4 * 3));
maxSrcLength = (destLength / 3) * 4;
}
ref sbyte decodingMap = ref MemoryMarshal.GetReference(s_decodingMap);
srcMax = srcBytes + (uint)maxSrcLength;
while (src < srcMax)
{
int result = Decode(src, ref decodingMap);
if (result < 0)
goto InvalidDataExit;
WriteThreeLowOrderBytes(dest, result);
src += 4;
dest += 3;
}
if (maxSrcLength != srcLength - skipLastChunk)
goto DestinationTooSmallExit;
// If input is less than 4 bytes, srcLength == sourceIndex == 0
// If input is not a multiple of 4, sourceIndex == srcLength != 0
if (src == srcEnd)
{
if (isFinalBlock)
goto InvalidDataExit;
goto NeedMoreDataExit;
}
// if isFinalBlock is false, we will never reach this point
// Handle last four bytes. There are 0, 1, 2 padding chars.
uint t0 = srcEnd[-4];
uint t1 = srcEnd[-3];
uint t2 = srcEnd[-2];
uint t3 = srcEnd[-1];
int i0 = Unsafe.Add(ref decodingMap, (IntPtr)t0);
int i1 = Unsafe.Add(ref decodingMap, (IntPtr)t1);
i0 <<= 18;
i1 <<= 12;
i0 |= i1;
byte* destMax = destBytes + (uint)destLength;
if (t3 != EncodingPad)
{
int i2 = Unsafe.Add(ref decodingMap, (IntPtr)t2);
int i3 = Unsafe.Add(ref decodingMap, (IntPtr)t3);
i2 <<= 6;
i0 |= i3;
i0 |= i2;
if (i0 < 0)
goto InvalidDataExit;
if (dest + 3 > destMax)
goto DestinationTooSmallExit;
WriteThreeLowOrderBytes(dest, i0);
dest += 3;
}
else if (t2 != EncodingPad)
{
int i2 = Unsafe.Add(ref decodingMap, (IntPtr)t2);
i2 <<= 6;
i0 |= i2;
if (i0 < 0)
goto InvalidDataExit;
if (dest + 2 > destMax)
goto DestinationTooSmallExit;
dest[0] = (byte)(i0 >> 16);
dest[1] = (byte)(i0 >> 8);
dest += 2;
}
else
{
if (i0 < 0)
goto InvalidDataExit;
if (dest + 1 > destMax)
goto DestinationTooSmallExit;
dest[0] = (byte)(i0 >> 16);
dest += 1;
}
src += 4;
if (srcLength != utf8.Length)
goto InvalidDataExit;
DoneExit:
bytesConsumed = (int)(src - srcBytes);
bytesWritten = (int)(dest - destBytes);
return OperationStatus.Done;
DestinationTooSmallExit:
if (srcLength != utf8.Length && isFinalBlock)
goto InvalidDataExit; // if input is not a multiple of 4, and there is no more data, return invalid data instead
bytesConsumed = (int)(src - srcBytes);
bytesWritten = (int)(dest - destBytes);
return OperationStatus.DestinationTooSmall;
NeedMoreDataExit:
bytesConsumed = (int)(src - srcBytes);
bytesWritten = (int)(dest - destBytes);
return OperationStatus.NeedMoreData;
InvalidDataExit:
bytesConsumed = (int)(src - srcBytes);
bytesWritten = (int)(dest - destBytes);
return OperationStatus.InvalidData;
}
}
/// <summary>
/// Returns the maximum length (in bytes) of the result if you were to deocde base 64 encoded text within a byte span of size "length".
/// </summary>
/// <exception cref="System.ArgumentOutOfRangeException">
/// Thrown when the specified <paramref name="length"/> is less than 0.
/// </exception>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static int GetMaxDecodedFromUtf8Length(int length)
{
if (length < 0)
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.length);
return (length >> 2) * 3;
}
/// <summary>
/// Decode the span of UTF-8 encoded text in base 64 (in-place) into binary data.
/// The decoded binary output is smaller than the text data contained in the input (the operation deflates the data).
/// If the input is not a multiple of 4, it will not decode any.
/// </summary>
/// <param name="buffer">The input span which contains the base 64 text data that needs to be decoded.</param>
/// <param name="bytesWritten">The number of bytes written into the buffer.</param>
/// <returns>It returns the OperationStatus enum values:
/// - Done - on successful processing of the entire input span
/// - InvalidData - if the input contains bytes outside of the expected base 64 range, or if it contains invalid/more than two padding characters,
/// or if the input is incomplete (i.e. not a multiple of 4).
/// It does not return DestinationTooSmall since that is not possible for base 64 decoding.
/// It does not return NeedMoreData since this method tramples the data in the buffer and
/// hence can only be called once with all the data in the buffer.
/// </returns>
public static unsafe OperationStatus DecodeFromUtf8InPlace(Span<byte> buffer, out int bytesWritten)
{
if (buffer.IsEmpty)
{
bytesWritten = 0;
return OperationStatus.Done;
}
fixed (byte* bufferBytes = &MemoryMarshal.GetReference(buffer))
{
int bufferLength = buffer.Length;
uint sourceIndex = 0;
uint destIndex = 0;
// only decode input if it is a multiple of 4
if (bufferLength != ((bufferLength >> 2) * 4))
goto InvalidExit;
if (bufferLength == 0)
goto DoneExit;
ref sbyte decodingMap = ref MemoryMarshal.GetReference(s_decodingMap);
while (sourceIndex < bufferLength - 4)
{
int result = Decode(bufferBytes + sourceIndex, ref decodingMap);
if (result < 0)
goto InvalidExit;
WriteThreeLowOrderBytes(bufferBytes + destIndex, result);
destIndex += 3;
sourceIndex += 4;
}
uint t0 = bufferBytes[bufferLength - 4];
uint t1 = bufferBytes[bufferLength - 3];
uint t2 = bufferBytes[bufferLength - 2];
uint t3 = bufferBytes[bufferLength - 1];
int i0 = Unsafe.Add(ref decodingMap, (IntPtr)t0);
int i1 = Unsafe.Add(ref decodingMap, (IntPtr)t1);
i0 <<= 18;
i1 <<= 12;
i0 |= i1;
if (t3 != EncodingPad)
{
int i2 = Unsafe.Add(ref decodingMap, (IntPtr)t2);
int i3 = Unsafe.Add(ref decodingMap, (IntPtr)t3);
i2 <<= 6;
i0 |= i3;
i0 |= i2;
if (i0 < 0)
goto InvalidExit;
WriteThreeLowOrderBytes(bufferBytes + destIndex, i0);
destIndex += 3;
}
else if (t2 != EncodingPad)
{
int i2 = Unsafe.Add(ref decodingMap, (IntPtr)t2);
i2 <<= 6;
i0 |= i2;
if (i0 < 0)
goto InvalidExit;
bufferBytes[destIndex] = (byte)(i0 >> 16);
bufferBytes[destIndex + 1] = (byte)(i0 >> 8);
destIndex += 2;
}
else
{
if (i0 < 0)
goto InvalidExit;
bufferBytes[destIndex] = (byte)(i0 >> 16);
destIndex += 1;
}
DoneExit:
bytesWritten = (int)destIndex;
return OperationStatus.Done;
InvalidExit:
bytesWritten = (int)destIndex;
return OperationStatus.InvalidData;
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static unsafe void Avx2Decode(ref byte* srcBytes, ref byte* destBytes, byte* srcEnd, int sourceLength, int destLength, byte* srcStart, byte* destStart)
{
// If we have AVX2 support, pick off 32 bytes at a time for as long as we can,
// but make sure that we quit before seeing any == markers at the end of the
// string. Also, because we write 8 zeroes at the end of the output, ensure
// that there are at least 11 valid bytes of input data remaining to close the
// gap. 32 + 2 + 11 = 45 bytes.
// See SSSE3-version below for an explanation of how the code works.
// The JIT won't hoist these "constants", so help it
Vector256<sbyte> lutHi = ReadVector<Vector256<sbyte>>(s_avxDecodeLutHi);
Vector256<sbyte> lutLo = ReadVector<Vector256<sbyte>>(s_avxDecodeLutLo);
Vector256<sbyte> lutShift = ReadVector<Vector256<sbyte>>(s_avxDecodeLutShift);
Vector256<sbyte> mask2F = Vector256.Create((sbyte)'/');
Vector256<sbyte> mergeConstant0 = Vector256.Create(0x01400140).AsSByte();
Vector256<short> mergeConstant1 = Vector256.Create(0x00011000).AsInt16();
Vector256<sbyte> packBytesInLaneMask = ReadVector<Vector256<sbyte>>(s_avxDecodePackBytesInLaneMask);
Vector256<int> packLanesControl = ReadVector<Vector256<sbyte>>(s_avxDecodePackLanesControl).AsInt32();
byte* src = srcBytes;
byte* dest = destBytes;
//while (remaining >= 45)
do
{
AssertRead<Vector256<sbyte>>(src, srcStart, sourceLength);
Vector256<sbyte> str = Avx.LoadVector256(src).AsSByte();
Vector256<sbyte> hiNibbles = Avx2.And(Avx2.ShiftRightLogical(str.AsInt32(), 4).AsSByte(), mask2F);
Vector256<sbyte> loNibbles = Avx2.And(str, mask2F);
Vector256<sbyte> hi = Avx2.Shuffle(lutHi, hiNibbles);
Vector256<sbyte> lo = Avx2.Shuffle(lutLo, loNibbles);
if (!Avx.TestZ(lo, hi))
break;
Vector256<sbyte> eq2F = Avx2.CompareEqual(str, mask2F);
Vector256<sbyte> shift = Avx2.Shuffle(lutShift, Avx2.Add(eq2F, hiNibbles));
str = Avx2.Add(str, shift);
// in, lower lane, bits, upper case are most significant bits, lower case are least significant bits:
// 00llllll 00kkkkLL 00jjKKKK 00JJJJJJ
// 00iiiiii 00hhhhII 00ggHHHH 00GGGGGG
// 00ffffff 00eeeeFF 00ddEEEE 00DDDDDD
// 00cccccc 00bbbbCC 00aaBBBB 00AAAAAA
Vector256<short> merge_ab_and_bc = Avx2.MultiplyAddAdjacent(str.AsByte(), mergeConstant0);
// 0000kkkk LLllllll 0000JJJJ JJjjKKKK
// 0000hhhh IIiiiiii 0000GGGG GGggHHHH
// 0000eeee FFffffff 0000DDDD DDddEEEE
// 0000bbbb CCcccccc 0000AAAA AAaaBBBB
Vector256<int> output = Avx2.MultiplyAddAdjacent(merge_ab_and_bc, mergeConstant1);
// 00000000 JJJJJJjj KKKKkkkk LLllllll
// 00000000 GGGGGGgg HHHHhhhh IIiiiiii
// 00000000 DDDDDDdd EEEEeeee FFffffff
// 00000000 AAAAAAaa BBBBbbbb CCcccccc
// Pack bytes together in each lane:
output = Avx2.Shuffle(output.AsSByte(), packBytesInLaneMask).AsInt32();
// 00000000 00000000 00000000 00000000
// LLllllll KKKKkkkk JJJJJJjj IIiiiiii
// HHHHhhhh GGGGGGgg FFffffff EEEEeeee
// DDDDDDdd CCcccccc BBBBbbbb AAAAAAaa
// Pack lanes
str = Avx2.PermuteVar8x32(output, packLanesControl).AsSByte();
AssertWrite<Vector256<sbyte>>(dest, destStart, destLength);
Avx.Store(dest, str.AsByte());
src += 32;
dest += 24;
}
while (src <= srcEnd);
srcBytes = src;
destBytes = dest;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static unsafe void Ssse3Decode(ref byte* srcBytes, ref byte* destBytes, byte* srcEnd, int sourceLength, int destLength, byte* srcStart, byte* destStart)
{
// If we have SSSE3 support, pick off 16 bytes at a time for as long as we can,
// but make sure that we quit before seeing any == markers at the end of the
// string. Also, because we write four zeroes at the end of the output, ensure
// that there are at least 6 valid bytes of input data remaining to close the
// gap. 16 + 2 + 6 = 24 bytes.
// The input consists of six character sets in the Base64 alphabet,
// which we need to map back to the 6-bit values they represent.
// There are three ranges, two singles, and then there's the rest.
//
// # From To Add Characters
// 1 [43] [62] +19 +
// 2 [47] [63] +16 /
// 3 [48..57] [52..61] +4 0..9
// 4 [65..90] [0..25] -65 A..Z
// 5 [97..122] [26..51] -71 a..z
// (6) Everything else => invalid input
// We will use LUTS for character validation & offset computation
// Remember that 0x2X and 0x0X are the same index for _mm_shuffle_epi8,
// this allows to mask with 0x2F instead of 0x0F and thus save one constant declaration (register and/or memory access)
// For offsets:
// Perfect hash for lut = ((src>>4)&0x2F)+((src==0x2F)?0xFF:0x00)
// 0000 = garbage
// 0001 = /
// 0010 = +
// 0011 = 0-9
// 0100 = A-Z
// 0101 = A-Z
// 0110 = a-z
// 0111 = a-z
// 1000 >= garbage
// For validation, here's the table.
// A character is valid if and only if the AND of the 2 lookups equals 0:
// hi \ lo 0000 0001 0010 0011 0100 0101 0110 0111 1000 1001 1010 1011 1100 1101 1110 1111
// LUT 0x15 0x11 0x11 0x11 0x11 0x11 0x11 0x11 0x11 0x11 0x13 0x1A 0x1B 0x1B 0x1B 0x1A
// 0000 0X10 char NUL SOH STX ETX EOT ENQ ACK BEL BS HT LF VT FF CR SO SI
// andlut 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10
// 0001 0x10 char DLE DC1 DC2 DC3 DC4 NAK SYN ETB CAN EM SUB ESC FS GS RS US
// andlut 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10
// 0010 0x01 char ! " # $ % & ' ( ) * + , - . /
// andlut 0x01 0x01 0x01 0x01 0x01 0x01 0x01 0x01 0x01 0x01 0x01 0x00 0x01 0x01 0x01 0x00
// 0011 0x02 char 0 1 2 3 4 5 6 7 8 9 : ; < = > ?
// andlut 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x02 0x02 0x02 0x02 0x02 0x02
// 0100 0x04 char @ A B C D E F G H I J K L M N 0
// andlut 0x04 0x00 0x00 0x00 0X00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00
// 0101 0x08 char P Q R S T U V W X Y Z [ \ ] ^ _
// andlut 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x08 0x08 0x08 0x08 0x08
// 0110 0x04 char ` a b c d e f g h i j k l m n o
// andlut 0x04 0x00 0x00 0x00 0X00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00
// 0111 0X08 char p q r s t u v w x y z { | } ~
// andlut 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x08 0x08 0x08 0x08 0x08
// 1000 0x10 andlut 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10
// 1001 0x10 andlut 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10
// 1010 0x10 andlut 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10
// 1011 0x10 andlut 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10
// 1100 0x10 andlut 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10
// 1101 0x10 andlut 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10
// 1110 0x10 andlut 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10
// 1111 0x10 andlut 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10
// The JIT won't hoist these "constants", so help it
Vector128<sbyte> lutHi = ReadVector<Vector128<sbyte>>(s_sseDecodeLutHi);
Vector128<sbyte> lutLo = ReadVector<Vector128<sbyte>>(s_sseDecodeLutLo);
Vector128<sbyte> lutShift = ReadVector<Vector128<sbyte>>(s_sseDecodeLutShift);
Vector128<sbyte> mask2F = Vector128.Create((sbyte)'/');
Vector128<sbyte> mergeConstant0 = Vector128.Create(0x01400140).AsSByte();
Vector128<short> mergeConstant1 = Vector128.Create(0x00011000).AsInt16();
Vector128<sbyte> packBytesMask = ReadVector<Vector128<sbyte>>(s_sseDecodePackBytesMask);
Vector128<sbyte> zero = Vector128<sbyte>.Zero;
byte* src = srcBytes;
byte* dest = destBytes;
//while (remaining >= 24)
do
{
AssertRead<Vector128<sbyte>>(src, srcStart, sourceLength);
Vector128<sbyte> str = Sse2.LoadVector128(src).AsSByte();
// lookup
Vector128<sbyte> hiNibbles = Sse2.And(Sse2.ShiftRightLogical(str.AsInt32(), 4).AsSByte(), mask2F);
Vector128<sbyte> loNibbles = Sse2.And(str, mask2F);
Vector128<sbyte> hi = Ssse3.Shuffle(lutHi, hiNibbles);
Vector128<sbyte> lo = Ssse3.Shuffle(lutLo, loNibbles);
// Check for invalid input: if any "and" values from lo and hi are not zero,
// fall back on bytewise code to do error checking and reporting:
if (Sse2.MoveMask(Sse2.CompareGreaterThan(Sse2.And(lo, hi), zero)) != 0)
break;
Vector128<sbyte> eq2F = Sse2.CompareEqual(str, mask2F);
Vector128<sbyte> shift = Ssse3.Shuffle(lutShift, Sse2.Add(eq2F, hiNibbles));
// Now simply add the delta values to the input:
str = Sse2.Add(str, shift);
// in, bits, upper case are most significant bits, lower case are least significant bits
// 00llllll 00kkkkLL 00jjKKKK 00JJJJJJ
// 00iiiiii 00hhhhII 00ggHHHH 00GGGGGG
// 00ffffff 00eeeeFF 00ddEEEE 00DDDDDD
// 00cccccc 00bbbbCC 00aaBBBB 00AAAAAA
Vector128<short> merge_ab_and_bc = Ssse3.MultiplyAddAdjacent(str.AsByte(), mergeConstant0);
// 0000kkkk LLllllll 0000JJJJ JJjjKKKK
// 0000hhhh IIiiiiii 0000GGGG GGggHHHH
// 0000eeee FFffffff 0000DDDD DDddEEEE
// 0000bbbb CCcccccc 0000AAAA AAaaBBBB
Vector128<int> output = Sse2.MultiplyAddAdjacent(merge_ab_and_bc, mergeConstant1);
// 00000000 JJJJJJjj KKKKkkkk LLllllll
// 00000000 GGGGGGgg HHHHhhhh IIiiiiii
// 00000000 DDDDDDdd EEEEeeee FFffffff
// 00000000 AAAAAAaa BBBBbbbb CCcccccc
// Pack bytes together:
str = Ssse3.Shuffle(output.AsSByte(), packBytesMask);
// 00000000 00000000 00000000 00000000
// LLllllll KKKKkkkk JJJJJJjj IIiiiiii
// HHHHhhhh GGGGGGgg FFffffff EEEEeeee
// DDDDDDdd CCcccccc BBBBbbbb AAAAAAaa
AssertWrite<Vector128<sbyte>>(dest, destStart, destLength);
Sse2.Store(dest, str.AsByte());
src += 16;
dest += 12;
}
while (src <= srcEnd);
srcBytes = src;
destBytes = dest;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static unsafe int Decode(byte* encodedBytes, ref sbyte decodingMap)
{
uint t0 = encodedBytes[0];
uint t1 = encodedBytes[1];
uint t2 = encodedBytes[2];
uint t3 = encodedBytes[3];
int i0 = Unsafe.Add(ref decodingMap, (IntPtr)t0);
int i1 = Unsafe.Add(ref decodingMap, (IntPtr)t1);
int i2 = Unsafe.Add(ref decodingMap, (IntPtr)t2);
int i3 = Unsafe.Add(ref decodingMap, (IntPtr)t3);
i0 <<= 18;
i1 <<= 12;
i2 <<= 6;
i0 |= i3;
i1 |= i2;
i0 |= i1;
return i0;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static unsafe void WriteThreeLowOrderBytes(byte* destination, int value)
{
destination[0] = (byte)(value >> 16);
destination[1] = (byte)(value >> 8);
destination[2] = (byte)(value);
}
// Pre-computing this table using a custom string(s_characters) and GenerateDecodingMapAndVerify (found in tests)
private static ReadOnlySpan<sbyte> s_decodingMap => new sbyte[] {
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 62, -1, -1, -1, 63, //62 is placed at index 43 (for +), 63 at index 47 (for /)
52, 53, 54, 55, 56, 57, 58, 59, 60, 61, -1, -1, -1, -1, -1, -1, //52-61 are placed at index 48-57 (for 0-9), 64 at index 61 (for =)
-1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,
15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, -1, -1, -1, -1, -1, //0-25 are placed at index 65-90 (for A-Z)
-1, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40,
41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, -1, -1, -1, -1, -1, //26-51 are placed at index 97-122 (for a-z)
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, // Bytes over 122 ('z') are invalid and cannot be decoded
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, // Hence, padding the map with 255, which indicates invalid input
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
};
private static ReadOnlySpan<sbyte> s_sseDecodePackBytesMask => new sbyte[] {
2, 1, 0, 6,
5, 4, 10, 9,
8, 14, 13, 12,
-1, -1, -1, -1
};
private static ReadOnlySpan<sbyte> s_sseDecodeLutLo => new sbyte[] {
0x15, 0x11, 0x11, 0x11,
0x11, 0x11, 0x11, 0x11,
0x11, 0x11, 0x13, 0x1A,
0x1B, 0x1B, 0x1B, 0x1A
};
private static ReadOnlySpan<sbyte> s_sseDecodeLutHi => new sbyte[] {
0x10, 0x10, 0x01, 0x02,
0x04, 0x08, 0x04, 0x08,
0x10, 0x10, 0x10, 0x10,
0x10, 0x10, 0x10, 0x10
};
private static ReadOnlySpan<sbyte> s_sseDecodeLutShift => new sbyte[] {
0, 16, 19, 4,
-65, -65, -71, -71,
0, 0, 0, 0,
0, 0, 0, 0
};
private static ReadOnlySpan<sbyte> s_avxDecodePackBytesInLaneMask => new sbyte[] {
2, 1, 0, 6,
5, 4, 10, 9,
8, 14, 13, 12,
-1, -1, -1, -1,
2, 1, 0, 6,
5, 4, 10, 9,
8, 14, 13, 12,
-1, -1, -1, -1
};
private static ReadOnlySpan<sbyte> s_avxDecodePackLanesControl => new sbyte[] {
0, 0, 0, 0,
1, 0, 0, 0,
2, 0, 0, 0,
4, 0, 0, 0,
5, 0, 0, 0,
6, 0, 0, 0,
-1, -1, -1, -1,
-1, -1, -1, -1
};
private static ReadOnlySpan<sbyte> s_avxDecodeLutLo => new sbyte[] {
0x15, 0x11, 0x11, 0x11,
0x11, 0x11, 0x11, 0x11,
0x11, 0x11, 0x13, 0x1A,
0x1B, 0x1B, 0x1B, 0x1A,
0x15, 0x11, 0x11, 0x11,
0x11, 0x11, 0x11, 0x11,
0x11, 0x11, 0x13, 0x1A,
0x1B, 0x1B, 0x1B, 0x1A
};
private static ReadOnlySpan<sbyte> s_avxDecodeLutHi => new sbyte[] {
0x10, 0x10, 0x01, 0x02,
0x04, 0x08, 0x04, 0x08,
0x10, 0x10, 0x10, 0x10,
0x10, 0x10, 0x10, 0x10,
0x10, 0x10, 0x01, 0x02,
0x04, 0x08, 0x04, 0x08,
0x10, 0x10, 0x10, 0x10,
0x10, 0x10, 0x10, 0x10
};
private static ReadOnlySpan<sbyte> s_avxDecodeLutShift => new sbyte[] {
0, 16, 19, 4,
-65, -65, -71, -71,
0, 0, 0, 0,
0, 0, 0, 0,
0, 16, 19, 4,
-65, -65, -71, -71,
0, 0, 0, 0,
0, 0, 0, 0
};
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System.Collections.Generic;
using System.Diagnostics.Contracts;
using System.Linq;
using Validation;
using Xunit;
namespace System.Collections.Immutable.Test
{
public abstract class ImmutableDictionaryTestBase : ImmutablesTestBase
{
[Fact]
public virtual void EmptyTest()
{
this.EmptyTestHelper(Empty<int, bool>(), 5);
}
[Fact]
public void EnumeratorTest()
{
this.EnumeratorTestHelper(this.Empty<int, GenericParameterHelper>());
}
[Fact]
public void ContainsTest()
{
this.ContainsTestHelper(Empty<int, string>(), 5, "foo");
}
[Fact]
public void RemoveTest()
{
this.RemoveTestHelper(Empty<int, GenericParameterHelper>(), 5);
}
[Fact]
public void KeysTest()
{
this.KeysTestHelper(Empty<int, bool>(), 5);
}
[Fact]
public void ValuesTest()
{
this.ValuesTestHelper(Empty<int, bool>(), 5);
}
[Fact]
public void AddAscendingTest()
{
this.AddAscendingTestHelper(Empty<int, GenericParameterHelper>());
}
[Fact]
public void AddRangeTest()
{
var map = Empty<int, GenericParameterHelper>();
map = map.AddRange(Enumerable.Range(1, 100).Select(n => new KeyValuePair<int, GenericParameterHelper>(n, new GenericParameterHelper())));
CollectionAssertAreEquivalent(map.Select(kv => kv.Key).ToList(), Enumerable.Range(1, 100).ToList());
this.VerifyAvlTreeState(map);
Assert.Equal(100, map.Count);
// Test optimization for empty map.
var map2 = Empty<int, GenericParameterHelper>();
var jointMap = map2.AddRange(map);
Assert.Same(map, jointMap);
jointMap = map2.AddRange(map.ToReadOnlyDictionary());
Assert.Same(map, jointMap);
jointMap = map2.AddRange(map.ToBuilder());
Assert.Same(map, jointMap);
}
[Fact]
public void AddDescendingTest()
{
this.AddDescendingTestHelper(Empty<int, GenericParameterHelper>());
}
[Fact]
public void AddRemoveRandomDataTest()
{
this.AddRemoveRandomDataTestHelper(Empty<double, GenericParameterHelper>());
}
[Fact]
public void AddRemoveEnumerableTest()
{
this.AddRemoveEnumerableTestHelper(Empty<int, int>());
}
[Fact]
public void SetItemTest()
{
var map = this.Empty<string, int>()
.SetItem("Microsoft", 100)
.SetItem("Corporation", 50);
Assert.Equal(2, map.Count);
map = map.SetItem("Microsoft", 200);
Assert.Equal(2, map.Count);
Assert.Equal(200, map["Microsoft"]);
// Set it to the same thing again and make sure it's all good.
var sameMap = map.SetItem("Microsoft", 200);
Assert.Same(map, sameMap);
}
[Fact]
public void SetItemsTest()
{
var template = new Dictionary<string, int>
{
{ "Microsoft", 100 },
{ "Corporation", 50 },
};
var map = this.Empty<string, int>().SetItems(template);
Assert.Equal(2, map.Count);
var changes = new Dictionary<string, int>
{
{ "Microsoft", 150 },
{ "Dogs", 90 },
};
map = map.SetItems(changes);
Assert.Equal(3, map.Count);
Assert.Equal(150, map["Microsoft"]);
Assert.Equal(50, map["Corporation"]);
Assert.Equal(90, map["Dogs"]);
map = map.SetItems(
new[] {
new KeyValuePair<string, int>("Microsoft", 80),
new KeyValuePair<string, int>("Microsoft", 70),
});
Assert.Equal(3, map.Count);
Assert.Equal(70, map["Microsoft"]);
Assert.Equal(50, map["Corporation"]);
Assert.Equal(90, map["Dogs"]);
map = this.Empty<string, int>().SetItems(new[] { // use an array for code coverage
new KeyValuePair<string, int>("a", 1), new KeyValuePair<string, int>("b", 2),
new KeyValuePair<string, int>("a", 3),
});
Assert.Equal(2, map.Count);
Assert.Equal(3, map["a"]);
Assert.Equal(2, map["b"]);
}
[Fact]
public void ContainsKeyTest()
{
this.ContainsKeyTestHelper(Empty<int, GenericParameterHelper>(), 1, new GenericParameterHelper());
}
[Fact]
public void IndexGetNonExistingKeyThrowsTest()
{
Assert.Throws<KeyNotFoundException>(() => this.Empty<int, int>()[3]);
}
[Fact]
public void IndexGetTest()
{
var map = this.Empty<int, int>().Add(3, 5);
Assert.Equal(5, map[3]);
}
[Fact]
public void DictionaryRemoveThrowsTest()
{
IDictionary<int, int> map = this.Empty<int, int>().Add(5, 3).ToReadOnlyDictionary();
Assert.Throws<NotSupportedException>(() => map.Remove(5));
}
[Fact]
public void DictionaryAddThrowsTest()
{
IDictionary<int, int> map = this.Empty<int, int>().ToReadOnlyDictionary();
Assert.Throws<NotSupportedException>(() => map.Add(5, 3));
}
[Fact]
public void DictionaryIndexSetThrowsTest()
{
IDictionary<int, int> map = this.Empty<int, int>().ToReadOnlyDictionary();
Assert.Throws<NotSupportedException>(() => map[3] = 5);
}
[Fact]
public void EqualsTest()
{
Assert.False(Empty<int, int>().Equals(null));
Assert.False(Empty<int, int>().Equals("hi"));
Assert.True(Empty<int, int>().Equals(Empty<int, int>()));
Assert.False(Empty<int, int>().Add(3, 2).Equals(Empty<int, int>().Add(3, 2)));
Assert.False(Empty<int, int>().Add(3, 2).Equals(Empty<int, int>().Add(3, 1)));
Assert.False(Empty<int, int>().Add(5, 1).Equals(Empty<int, int>().Add(3, 1)));
Assert.False(Empty<int, int>().Add(3, 1).Add(5, 1).Equals(Empty<int, int>().Add(3, 1)));
Assert.False(Empty<int, int>().Add(3, 1).Equals(Empty<int, int>().Add(3, 1).Add(5, 1)));
Assert.True(Empty<int, int>().ToReadOnlyDictionary().Equals(Empty<int, int>()));
Assert.True(Empty<int, int>().Equals(Empty<int, int>().ToReadOnlyDictionary()));
Assert.True(Empty<int, int>().ToReadOnlyDictionary().Equals(Empty<int, int>().ToReadOnlyDictionary()));
Assert.False(Empty<int, int>().Add(3, 1).ToReadOnlyDictionary().Equals(Empty<int, int>()));
Assert.False(Empty<int, int>().Equals(Empty<int, int>().Add(3, 1).ToReadOnlyDictionary()));
Assert.False(Empty<int, int>().ToReadOnlyDictionary().Equals(Empty<int, int>().Add(3, 1).ToReadOnlyDictionary()));
}
/// <summary>
/// Verifies that the GetHashCode method returns the standard one.
/// </summary>
[Fact]
public void GetHashCodeTest()
{
var dictionary = Empty<string, int>();
Assert.Equal(EqualityComparer<object>.Default.GetHashCode(dictionary), dictionary.GetHashCode());
}
[Fact]
public void ICollectionOfKVMembers()
{
var dictionary = (ICollection<KeyValuePair<string, int>>)Empty<string, int>();
Assert.Throws<NotSupportedException>(() => dictionary.Add(new KeyValuePair<string, int>()));
Assert.Throws<NotSupportedException>(() => dictionary.Remove(new KeyValuePair<string, int>()));
Assert.Throws<NotSupportedException>(() => dictionary.Clear());
Assert.True(dictionary.IsReadOnly);
}
[Fact]
public void ICollectionMembers()
{
((ICollection)Empty<string, int>()).CopyTo(new object[0], 0);
var dictionary = (ICollection)Empty<string, int>().Add("a", 1);
Assert.True(dictionary.IsSynchronized);
Assert.NotNull(dictionary.SyncRoot);
Assert.Same(dictionary.SyncRoot, dictionary.SyncRoot);
var array = new object[2];
dictionary.CopyTo(array, 1);
Assert.Null(array[0]);
Assert.Equal(new DictionaryEntry("a", 1), (DictionaryEntry)array[1]);
}
[Fact]
public void IDictionaryOfKVMembers()
{
var dictionary = (IDictionary<string, int>)Empty<string, int>().Add("c", 3);
Assert.Throws<NotSupportedException>(() => dictionary.Add("a", 1));
Assert.Throws<NotSupportedException>(() => dictionary.Remove("a"));
Assert.Throws<NotSupportedException>(() => dictionary["a"] = 2);
Assert.Throws<KeyNotFoundException>(() => dictionary["a"]);
Assert.Equal(3, dictionary["c"]);
}
[Fact]
public void IDictionaryMembers()
{
var dictionary = (IDictionary)Empty<string, int>().Add("c", 3);
Assert.Throws<NotSupportedException>(() => dictionary.Add("a", 1));
Assert.Throws<NotSupportedException>(() => dictionary.Remove("a"));
Assert.Throws<NotSupportedException>(() => dictionary["a"] = 2);
Assert.Throws<NotSupportedException>(() => dictionary.Clear());
Assert.False(dictionary.Contains("a"));
Assert.True(dictionary.Contains("c"));
Assert.Throws<KeyNotFoundException>(() => dictionary["a"]);
Assert.Equal(3, dictionary["c"]);
Assert.True(dictionary.IsFixedSize);
Assert.True(dictionary.IsReadOnly);
Assert.Equal(new[] { "c" }, dictionary.Keys.Cast<string>().ToArray());
Assert.Equal(new[] { 3 }, dictionary.Values.Cast<int>().ToArray());
}
[Fact]
public void IDictionaryEnumerator()
{
var dictionary = (IDictionary)Empty<string, int>().Add("a", 1);
var enumerator = dictionary.GetEnumerator();
Assert.Throws<InvalidOperationException>(() => enumerator.Current);
Assert.Throws<InvalidOperationException>(() => enumerator.Key);
Assert.Throws<InvalidOperationException>(() => enumerator.Value);
Assert.Throws<InvalidOperationException>(() => enumerator.Entry);
Assert.True(enumerator.MoveNext());
Assert.Equal(enumerator.Entry, enumerator.Current);
Assert.Equal(enumerator.Key, enumerator.Entry.Key);
Assert.Equal(enumerator.Value, enumerator.Entry.Value);
Assert.Equal("a", enumerator.Key);
Assert.Equal(1, enumerator.Value);
Assert.False(enumerator.MoveNext());
Assert.Throws<InvalidOperationException>(() => enumerator.Current);
Assert.Throws<InvalidOperationException>(() => enumerator.Key);
Assert.Throws<InvalidOperationException>(() => enumerator.Value);
Assert.Throws<InvalidOperationException>(() => enumerator.Entry);
Assert.False(enumerator.MoveNext());
enumerator.Reset();
Assert.Throws<InvalidOperationException>(() => enumerator.Current);
Assert.Throws<InvalidOperationException>(() => enumerator.Key);
Assert.Throws<InvalidOperationException>(() => enumerator.Value);
Assert.Throws<InvalidOperationException>(() => enumerator.Entry);
Assert.True(enumerator.MoveNext());
Assert.Equal(enumerator.Key, ((DictionaryEntry)enumerator.Current).Key);
Assert.Equal(enumerator.Value, ((DictionaryEntry)enumerator.Current).Value);
Assert.Equal("a", enumerator.Key);
Assert.Equal(1, enumerator.Value);
Assert.False(enumerator.MoveNext());
Assert.Throws<InvalidOperationException>(() => enumerator.Current);
Assert.Throws<InvalidOperationException>(() => enumerator.Key);
Assert.Throws<InvalidOperationException>(() => enumerator.Value);
Assert.Throws<InvalidOperationException>(() => enumerator.Entry);
Assert.False(enumerator.MoveNext());
}
[Fact]
public void TryGetKey()
{
var dictionary = Empty<int>(StringComparer.OrdinalIgnoreCase)
.Add("a", 1);
string actualKey;
Assert.True(dictionary.TryGetKey("a", out actualKey));
Assert.Equal("a", actualKey);
Assert.True(dictionary.TryGetKey("A", out actualKey));
Assert.Equal("a", actualKey);
Assert.False(dictionary.TryGetKey("b", out actualKey));
Assert.Equal("b", actualKey);
}
protected void EmptyTestHelper<K, V>(IImmutableDictionary<K, V> empty, K someKey)
{
Assert.Same(empty, empty.Clear());
Assert.Equal(0, empty.Count);
Assert.Equal(0, empty.Count());
Assert.Equal(0, empty.Keys.Count());
Assert.Equal(0, empty.Values.Count());
Assert.Same(EqualityComparer<V>.Default, GetValueComparer(empty));
Assert.False(empty.ContainsKey(someKey));
Assert.False(empty.Contains(new KeyValuePair<K, V>(someKey, default(V))));
Assert.Equal(default(V), empty.GetValueOrDefault(someKey));
V value;
Assert.False(empty.TryGetValue(someKey, out value));
Assert.Equal(default(V), value);
}
private IImmutableDictionary<TKey, TValue> AddTestHelper<TKey, TValue>(IImmutableDictionary<TKey, TValue> map, TKey key, TValue value) where TKey : IComparable<TKey>
{
Contract.Requires(map != null);
Contract.Requires(key != null);
IImmutableDictionary<TKey, TValue> addedMap = map.Add(key, value);
Assert.NotSame(map, addedMap);
////Assert.Equal(map.Count + 1, addedMap.Count);
Assert.False(map.ContainsKey(key));
Assert.True(addedMap.ContainsKey(key));
AssertAreSame(value, addedMap.GetValueOrDefault(key));
this.VerifyAvlTreeState(addedMap);
return addedMap;
}
protected void AddAscendingTestHelper(IImmutableDictionary<int, GenericParameterHelper> map)
{
Contract.Requires(map != null);
for (int i = 0; i < 10; i++)
{
map = this.AddTestHelper(map, i, new GenericParameterHelper(i));
}
Assert.Equal(10, map.Count);
for (int i = 0; i < 10; i++)
{
Assert.True(map.ContainsKey(i));
}
}
protected void AddDescendingTestHelper(IImmutableDictionary<int, GenericParameterHelper> map)
{
for (int i = 10; i > 0; i--)
{
map = this.AddTestHelper(map, i, new GenericParameterHelper(i));
}
Assert.Equal(10, map.Count);
for (int i = 10; i > 0; i--)
{
Assert.True(map.ContainsKey(i));
}
}
protected void AddRemoveRandomDataTestHelper(IImmutableDictionary<double, GenericParameterHelper> map)
{
Contract.Requires(map != null);
double[] inputs = GenerateDummyFillData();
for (int i = 0; i < inputs.Length; i++)
{
map = this.AddTestHelper(map, inputs[i], new GenericParameterHelper());
}
Assert.Equal(inputs.Length, map.Count);
for (int i = 0; i < inputs.Length; i++)
{
Assert.True(map.ContainsKey(inputs[i]));
}
for (int i = 0; i < inputs.Length; i++)
{
map = map.Remove(inputs[i]);
this.VerifyAvlTreeState(map);
}
Assert.Equal(0, map.Count);
}
protected void AddRemoveEnumerableTestHelper(IImmutableDictionary<int, int> empty)
{
Contract.Requires(empty != null);
Assert.Same(empty, empty.RemoveRange(Enumerable.Empty<int>()));
Assert.Same(empty, empty.AddRange(Enumerable.Empty<KeyValuePair<int, int>>()));
var list = new List<KeyValuePair<int, int>> { new KeyValuePair<int, int>(3, 5), new KeyValuePair<int, int>(8, 10) };
var nonEmpty = empty.AddRange(list);
this.VerifyAvlTreeState(nonEmpty);
var halfRemoved = nonEmpty.RemoveRange(Enumerable.Range(1, 5));
Assert.Equal(1, halfRemoved.Count);
Assert.True(halfRemoved.ContainsKey(8));
this.VerifyAvlTreeState(halfRemoved);
}
protected void AddExistingKeySameValueTestHelper<TKey, TValue>(IImmutableDictionary<TKey, TValue> map, TKey key, TValue value1, TValue value2)
{
Contract.Requires(map != null);
Contract.Requires(key != null);
Contract.Requires(GetValueComparer(map).Equals(value1, value2));
map = map.Add(key, value1);
Assert.Same(map, map.Add(key, value2));
Assert.Same(map, map.AddRange(new[] { new KeyValuePair<TKey, TValue>(key, value2) }));
}
/// <summary>
/// Verifies that adding a key-value pair where the key already is in the map but with a different value throws.
/// </summary>
/// <typeparam name="TKey">The type of key in the map.</typeparam>
/// <typeparam name="TValue">The type of value in the map.</typeparam>
/// <param name="map">The map to manipulate.</param>
/// <param name="key">The key to add.</param>
/// <param name="value1">The first value to add.</param>
/// <param name="value2">The second value to add.</param>
/// <remarks>
/// Adding a key-value pair to a map where that key already exists, but with a different value, cannot fit the
/// semantic of "adding", either by just returning or mutating the value on the existing key. Throwing is the only reasonable response.
/// </remarks>
protected void AddExistingKeyDifferentValueTestHelper<TKey, TValue>(IImmutableDictionary<TKey, TValue> map, TKey key, TValue value1, TValue value2)
{
Contract.Requires(map != null);
Contract.Requires(key != null);
Contract.Requires(!GetValueComparer(map).Equals(value1, value2));
var map1 = map.Add(key, value1);
var map2 = map.Add(key, value2);
Assert.Throws<ArgumentException>(() => map1.Add(key, value2));
Assert.Throws<ArgumentException>(() => map2.Add(key, value1));
}
protected void ContainsKeyTestHelper<TKey, TValue>(IImmutableDictionary<TKey, TValue> map, TKey key, TValue value)
{
Assert.False(map.ContainsKey(key));
Assert.True(map.Add(key, value).ContainsKey(key));
}
protected void ContainsTestHelper<TKey, TValue>(IImmutableDictionary<TKey, TValue> map, TKey key, TValue value)
{
Assert.False(map.Contains(new KeyValuePair<TKey, TValue>(key, value)));
Assert.False(map.Contains(key, value));
Assert.True(map.Add(key, value).Contains(new KeyValuePair<TKey, TValue>(key, value)));
Assert.True(map.Add(key, value).Contains(key, value));
}
protected void RemoveTestHelper<TKey, TValue>(IImmutableDictionary<TKey, TValue> map, TKey key)
{
// no-op remove
Assert.Same(map, map.Remove(key));
Assert.Same(map, map.RemoveRange(Enumerable.Empty<TKey>()));
// substantial remove
var addedMap = map.Add(key, default(TValue));
var removedMap = addedMap.Remove(key);
Assert.NotSame(addedMap, removedMap);
Assert.False(removedMap.ContainsKey(key));
}
protected void KeysTestHelper<TKey, TValue>(IImmutableDictionary<TKey, TValue> map, TKey key)
{
Assert.Equal(0, map.Keys.Count());
Assert.Equal(0, map.ToReadOnlyDictionary().Keys.Count());
var nonEmpty = map.Add(key, default(TValue));
Assert.Equal(1, nonEmpty.Keys.Count());
Assert.Equal(1, nonEmpty.ToReadOnlyDictionary().Keys.Count());
KeysOrValuesTestHelper(((IDictionary<TKey, TValue>)nonEmpty).Keys, key);
}
protected void ValuesTestHelper<TKey, TValue>(IImmutableDictionary<TKey, TValue> map, TKey key)
{
Assert.Equal(0, map.Values.Count());
Assert.Equal(0, map.ToReadOnlyDictionary().Values.Count());
var nonEmpty = map.Add(key, default(TValue));
Assert.Equal(1, nonEmpty.Values.Count());
Assert.Equal(1, nonEmpty.ToReadOnlyDictionary().Values.Count());
KeysOrValuesTestHelper(((IDictionary<TKey, TValue>)nonEmpty).Values, default(TValue));
}
protected void EnumeratorTestHelper(IImmutableDictionary<int, GenericParameterHelper> map)
{
for (int i = 0; i < 10; i++)
{
map = this.AddTestHelper(map, i, new GenericParameterHelper(i));
}
int j = 0;
foreach (KeyValuePair<int, GenericParameterHelper> pair in map)
{
Assert.Equal(j, pair.Key);
Assert.Equal(j, pair.Value.Data);
j++;
}
var list = map.ToList();
Assert.Equal<KeyValuePair<int, GenericParameterHelper>>(list, ImmutableSetTest.ToListNonGeneric<KeyValuePair<int, GenericParameterHelper>>(map));
// Apply some less common uses to the enumerator to test its metal.
using (var enumerator = map.GetEnumerator())
{
enumerator.Reset(); // reset isn't usually called before MoveNext
ManuallyEnumerateTest(list, enumerator);
enumerator.Reset();
ManuallyEnumerateTest(list, enumerator);
// this time only partially enumerate
enumerator.Reset();
enumerator.MoveNext();
enumerator.Reset();
ManuallyEnumerateTest(list, enumerator);
}
var manualEnum = map.GetEnumerator();
Assert.Throws<InvalidOperationException>(() => manualEnum.Current);
while (manualEnum.MoveNext()) { }
Assert.False(manualEnum.MoveNext());
Assert.Throws<InvalidOperationException>(() => manualEnum.Current);
}
protected abstract IImmutableDictionary<TKey, TValue> Empty<TKey, TValue>();
protected abstract IImmutableDictionary<string, TValue> Empty<TValue>(StringComparer comparer);
protected abstract IEqualityComparer<TValue> GetValueComparer<TKey, TValue>(IImmutableDictionary<TKey, TValue> dictionary);
internal abstract IBinaryTree GetRootNode<TKey, TValue>(IImmutableDictionary<TKey, TValue> dictionary);
private static void KeysOrValuesTestHelper<T>(ICollection<T> collection, T containedValue)
{
Requires.NotNull(collection, "collection");
Assert.True(collection.Contains(containedValue));
Assert.Throws<NotSupportedException>(() => collection.Add(default(T)));
Assert.Throws<NotSupportedException>(() => collection.Clear());
var nonGeneric = (ICollection)collection;
Assert.NotNull(nonGeneric.SyncRoot);
Assert.Same(nonGeneric.SyncRoot, nonGeneric.SyncRoot);
Assert.True(nonGeneric.IsSynchronized);
Assert.True(collection.IsReadOnly);
Assert.Throws<ArgumentNullException>(() => nonGeneric.CopyTo(null, 0));
var array = new T[collection.Count + 1];
nonGeneric.CopyTo(array, 1);
Assert.Equal(default(T), array[0]);
Assert.Equal(array.Skip(1), nonGeneric.Cast<T>().ToArray());
}
private void VerifyAvlTreeState<TKey, TValue>(IImmutableDictionary<TKey, TValue> dictionary)
{
var rootNode = this.GetRootNode(dictionary);
rootNode.VerifyBalanced();
rootNode.VerifyHeightIsWithinTolerance(dictionary.Count);
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using NUnit.Framework;
namespace Elasticsearch.Net.Integration.Yaml.IndicesGetWarmer1
{
public partial class IndicesGetWarmer1YamlTests
{
public class IndicesGetWarmer110BasicYamlBase : YamlTestsBase
{
public IndicesGetWarmer110BasicYamlBase() : base()
{
//do indices.create
_body = new {
warmers= new {
warmer_1= new {
source= new {
query= new {
match_all= new {}
}
}
},
warmer_2= new {
source= new {
query= new {
match_all= new {}
}
}
}
}
};
this.Do(()=> _client.IndicesCreate("test_1", _body));
//do indices.create
_body = new {
warmers= new {
warmer_2= new {
source= new {
query= new {
match_all= new {}
}
}
},
warmer_3= new {
source= new {
query= new {
match_all= new {}
}
}
}
}
};
this.Do(()=> _client.IndicesCreate("test_2", _body));
//do cluster.health
this.Do(()=> _client.ClusterHealth(nv=>nv
.AddQueryString("wait_for_status", @"yellow")
));
}
}
[NCrunch.Framework.ExclusivelyUses("ElasticsearchYamlTests")]
public class GetWarmer2Tests : IndicesGetWarmer110BasicYamlBase
{
[Test]
public void GetWarmer2Test()
{
//do indices.get_warmer
this.Do(()=> _client.IndicesGetWarmerForAll());
//match _response.test_1.warmers.warmer_1.source.query.match_all:
this.IsMatch(_response.test_1.warmers.warmer_1.source.query.match_all, new {});
//match _response.test_1.warmers.warmer_2.source.query.match_all:
this.IsMatch(_response.test_1.warmers.warmer_2.source.query.match_all, new {});
//match _response.test_2.warmers.warmer_2.source.query.match_all:
this.IsMatch(_response.test_2.warmers.warmer_2.source.query.match_all, new {});
//match _response.test_2.warmers.warmer_3.source.query.match_all:
this.IsMatch(_response.test_2.warmers.warmer_3.source.query.match_all, new {});
}
}
[NCrunch.Framework.ExclusivelyUses("ElasticsearchYamlTests")]
public class GetIndexWarmer3Tests : IndicesGetWarmer110BasicYamlBase
{
[Test]
public void GetIndexWarmer3Test()
{
//do indices.get_warmer
this.Do(()=> _client.IndicesGetWarmer("test_1"));
//match _response.test_1.warmers.warmer_1.source.query.match_all:
this.IsMatch(_response.test_1.warmers.warmer_1.source.query.match_all, new {});
//match _response.test_1.warmers.warmer_2.source.query.match_all:
this.IsMatch(_response.test_1.warmers.warmer_2.source.query.match_all, new {});
//is_false _response.test_2;
this.IsFalse(_response.test_2);
}
}
[NCrunch.Framework.ExclusivelyUses("ElasticsearchYamlTests")]
public class GetIndexWarmerAll4Tests : IndicesGetWarmer110BasicYamlBase
{
[Test]
public void GetIndexWarmerAll4Test()
{
//do indices.get_warmer
this.Do(()=> _client.IndicesGetWarmer("test_1", "_all"));
//match _response.test_1.warmers.warmer_1.source.query.match_all:
this.IsMatch(_response.test_1.warmers.warmer_1.source.query.match_all, new {});
//match _response.test_1.warmers.warmer_2.source.query.match_all:
this.IsMatch(_response.test_1.warmers.warmer_2.source.query.match_all, new {});
//is_false _response.test_2;
this.IsFalse(_response.test_2);
}
}
[NCrunch.Framework.ExclusivelyUses("ElasticsearchYamlTests")]
public class GetIndexWarmer5Tests : IndicesGetWarmer110BasicYamlBase
{
[Test]
public void GetIndexWarmer5Test()
{
//do indices.get_warmer
this.Do(()=> _client.IndicesGetWarmer("test_1", "*"));
//match _response.test_1.warmers.warmer_1.source.query.match_all:
this.IsMatch(_response.test_1.warmers.warmer_1.source.query.match_all, new {});
//match _response.test_1.warmers.warmer_2.source.query.match_all:
this.IsMatch(_response.test_1.warmers.warmer_2.source.query.match_all, new {});
//is_false _response.test_2;
this.IsFalse(_response.test_2);
}
}
[NCrunch.Framework.ExclusivelyUses("ElasticsearchYamlTests")]
public class GetIndexWarmerName6Tests : IndicesGetWarmer110BasicYamlBase
{
[Test]
public void GetIndexWarmerName6Test()
{
//do indices.get_warmer
this.Do(()=> _client.IndicesGetWarmer("test_1", "warmer_1"));
//match _response.test_1.warmers.warmer_1.source.query.match_all:
this.IsMatch(_response.test_1.warmers.warmer_1.source.query.match_all, new {});
//is_false _response.test_1.warmers.warmer_2;
this.IsFalse(_response.test_1.warmers.warmer_2);
//is_false _response.test_2;
this.IsFalse(_response.test_2);
}
}
[NCrunch.Framework.ExclusivelyUses("ElasticsearchYamlTests")]
public class GetIndexWarmerNameName7Tests : IndicesGetWarmer110BasicYamlBase
{
[Test]
public void GetIndexWarmerNameName7Test()
{
//do indices.get_warmer
this.Do(()=> _client.IndicesGetWarmer("test_1", "warmer_1,warmer_2"));
//match _response.test_1.warmers.warmer_1.source.query.match_all:
this.IsMatch(_response.test_1.warmers.warmer_1.source.query.match_all, new {});
//match _response.test_1.warmers.warmer_2.source.query.match_all:
this.IsMatch(_response.test_1.warmers.warmer_2.source.query.match_all, new {});
//is_false _response.test_2;
this.IsFalse(_response.test_2);
}
}
[NCrunch.Framework.ExclusivelyUses("ElasticsearchYamlTests")]
public class GetIndexWarmerName8Tests : IndicesGetWarmer110BasicYamlBase
{
[Test]
public void GetIndexWarmerName8Test()
{
//do indices.get_warmer
this.Do(()=> _client.IndicesGetWarmer("test_1", "*2"));
//match _response.test_1.warmers.warmer_2.source.query.match_all:
this.IsMatch(_response.test_1.warmers.warmer_2.source.query.match_all, new {});
//is_false _response.test_1.warmers.warmer_1;
this.IsFalse(_response.test_1.warmers.warmer_1);
//is_false _response.test_2;
this.IsFalse(_response.test_2);
}
}
[NCrunch.Framework.ExclusivelyUses("ElasticsearchYamlTests")]
public class GetWarmerName9Tests : IndicesGetWarmer110BasicYamlBase
{
[Test]
public void GetWarmerName9Test()
{
//do indices.get_warmer
this.Do(()=> _client.IndicesGetWarmerForAll("warmer_2"));
//match _response.test_1.warmers.warmer_2.source.query.match_all:
this.IsMatch(_response.test_1.warmers.warmer_2.source.query.match_all, new {});
//match _response.test_2.warmers.warmer_2.source.query.match_all:
this.IsMatch(_response.test_2.warmers.warmer_2.source.query.match_all, new {});
//is_false _response.test_1.warmers.warmer_1;
this.IsFalse(_response.test_1.warmers.warmer_1);
//is_false _response.test_2.warmers.warmer_3;
this.IsFalse(_response.test_2.warmers.warmer_3);
}
}
[NCrunch.Framework.ExclusivelyUses("ElasticsearchYamlTests")]
public class GetAllWarmerName10Tests : IndicesGetWarmer110BasicYamlBase
{
[Test]
public void GetAllWarmerName10Test()
{
//do indices.get_warmer
this.Do(()=> _client.IndicesGetWarmer("_all", "warmer_2"));
//match _response.test_1.warmers.warmer_2.source.query.match_all:
this.IsMatch(_response.test_1.warmers.warmer_2.source.query.match_all, new {});
//match _response.test_2.warmers.warmer_2.source.query.match_all:
this.IsMatch(_response.test_2.warmers.warmer_2.source.query.match_all, new {});
//is_false _response.test_1.warmers.warmer_1;
this.IsFalse(_response.test_1.warmers.warmer_1);
//is_false _response.test_2.warmers.warmer_3;
this.IsFalse(_response.test_2.warmers.warmer_3);
}
}
[NCrunch.Framework.ExclusivelyUses("ElasticsearchYamlTests")]
public class GetWarmerName11Tests : IndicesGetWarmer110BasicYamlBase
{
[Test]
public void GetWarmerName11Test()
{
//do indices.get_warmer
this.Do(()=> _client.IndicesGetWarmer("*", "warmer_2"));
//match _response.test_1.warmers.warmer_2.source.query.match_all:
this.IsMatch(_response.test_1.warmers.warmer_2.source.query.match_all, new {});
//match _response.test_2.warmers.warmer_2.source.query.match_all:
this.IsMatch(_response.test_2.warmers.warmer_2.source.query.match_all, new {});
//is_false _response.test_1.warmers.warmer_1;
this.IsFalse(_response.test_1.warmers.warmer_1);
//is_false _response.test_2.warmers.warmer_3;
this.IsFalse(_response.test_2.warmers.warmer_3);
}
}
[NCrunch.Framework.ExclusivelyUses("ElasticsearchYamlTests")]
public class GetIndexIndexWarmerName12Tests : IndicesGetWarmer110BasicYamlBase
{
[Test]
public void GetIndexIndexWarmerName12Test()
{
//do indices.get_warmer
this.Do(()=> _client.IndicesGetWarmer("test_1,test_2", "warmer_2"));
//match _response.test_1.warmers.warmer_2.source.query.match_all:
this.IsMatch(_response.test_1.warmers.warmer_2.source.query.match_all, new {});
//match _response.test_2.warmers.warmer_2.source.query.match_all:
this.IsMatch(_response.test_2.warmers.warmer_2.source.query.match_all, new {});
//is_false _response.test_2.warmers.warmer_3;
this.IsFalse(_response.test_2.warmers.warmer_3);
}
}
[NCrunch.Framework.ExclusivelyUses("ElasticsearchYamlTests")]
public class GetIndexWarmerName13Tests : IndicesGetWarmer110BasicYamlBase
{
[Test]
public void GetIndexWarmerName13Test()
{
//do indices.get_warmer
this.Do(()=> _client.IndicesGetWarmer("*2", "warmer_2"));
//match _response.test_2.warmers.warmer_2.source.query.match_all:
this.IsMatch(_response.test_2.warmers.warmer_2.source.query.match_all, new {});
//is_false _response.test_1;
this.IsFalse(_response.test_1);
//is_false _response.test_2.warmers.warmer_3;
this.IsFalse(_response.test_2.warmers.warmer_3);
}
}
[NCrunch.Framework.ExclusivelyUses("ElasticsearchYamlTests")]
public class EmptyResponseWhenNoMatchingWarmer14Tests : IndicesGetWarmer110BasicYamlBase
{
[Test]
public void EmptyResponseWhenNoMatchingWarmer14Test()
{
//do indices.get_warmer
this.Do(()=> _client.IndicesGetWarmer("*", "non_existent"));
//match this._status:
this.IsMatch(this._status, new {});
}
}
[NCrunch.Framework.ExclusivelyUses("ElasticsearchYamlTests")]
public class Throw404OnMissingIndex15Tests : IndicesGetWarmer110BasicYamlBase
{
[Test]
public void Throw404OnMissingIndex15Test()
{
//do indices.get_warmer
this.Do(()=> _client.IndicesGetWarmer("non_existent", "*"), shouldCatch: @"missing");
}
}
[NCrunch.Framework.ExclusivelyUses("ElasticsearchYamlTests")]
public class GetWarmerWithLocalFlag16Tests : IndicesGetWarmer110BasicYamlBase
{
[Test]
public void GetWarmerWithLocalFlag16Test()
{
//do indices.get_warmer
this.Do(()=> _client.IndicesGetWarmerForAll(nv=>nv
.AddQueryString("local", @"true")
));
//is_true _response.test_1;
this.IsTrue(_response.test_1);
//is_true _response.test_2;
this.IsTrue(_response.test_2);
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
// =+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
//
// ConcurrentExclusiveSchedulerPair.cs
//
//
// A pair of schedulers that together support concurrent (reader) / exclusive (writer)
// task scheduling. Using just the exclusive scheduler can be used to simulate a serial
// processing queue, and using just the concurrent scheduler with a specified
// MaximumConcurrentlyLevel can be used to achieve a MaxDegreeOfParallelism across
// a bunch of tasks, parallel loops, dataflow blocks, etc.
//
// =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
using System.Collections.Generic;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Diagnostics.Contracts;
using System.Security;
namespace System.Threading.Tasks
{
/// <summary>
/// Provides concurrent and exclusive task schedulers that coordinate to execute
/// tasks while ensuring that concurrent tasks may run concurrently and exclusive tasks never do.
/// </summary>
[DebuggerDisplay("Concurrent={ConcurrentTaskCountForDebugger}, Exclusive={ExclusiveTaskCountForDebugger}, Mode={ModeForDebugger}")]
[DebuggerTypeProxy(typeof(ConcurrentExclusiveSchedulerPair.DebugView))]
public class ConcurrentExclusiveSchedulerPair
{
/// <summary>A processing mode to denote what kinds of tasks are currently being processed on this thread.</summary>
private readonly ThreadLocal<ProcessingMode> m_threadProcessingMode = new ThreadLocal<ProcessingMode>();
/// <summary>The scheduler used to queue and execute "concurrent" tasks that may run concurrently with other concurrent tasks.</summary>
private readonly ConcurrentExclusiveTaskScheduler m_concurrentTaskScheduler;
/// <summary>The scheduler used to queue and execute "exclusive" tasks that must run exclusively while no other tasks for this pair are running.</summary>
private readonly ConcurrentExclusiveTaskScheduler m_exclusiveTaskScheduler;
/// <summary>The underlying task scheduler to which all work should be scheduled.</summary>
private readonly TaskScheduler m_underlyingTaskScheduler;
/// <summary>
/// The maximum number of tasks allowed to run concurrently. This only applies to concurrent tasks,
/// since exlusive tasks are inherently limited to 1.
/// </summary>
private readonly int m_maxConcurrencyLevel;
/// <summary>The maximum number of tasks we can process before recyling our runner tasks.</summary>
private readonly int m_maxItemsPerTask;
/// <summary>
/// If positive, it represents the number of concurrently running concurrent tasks.
/// If negative, it means an exclusive task has been scheduled.
/// If 0, nothing has been scheduled.
/// </summary>
private int m_processingCount;
/// <summary>Completion state for a task representing the completion of this pair.</summary>
/// <remarks>Lazily-initialized only if the scheduler pair is shutting down or if the Completion is requested.</remarks>
private CompletionState m_completionState;
/// <summary>A constant value used to signal unlimited processing.</summary>
private const int UNLIMITED_PROCESSING = -1;
/// <summary>Constant used for m_processingCount to indicate that an exclusive task is being processed.</summary>
private const int EXCLUSIVE_PROCESSING_SENTINEL = -1;
/// <summary>Default MaxItemsPerTask to use for processing if none is specified.</summary>
private const int DEFAULT_MAXITEMSPERTASK = UNLIMITED_PROCESSING;
/// <summary>Default MaxConcurrencyLevel is the processor count if not otherwise specified.</summary>
private static Int32 DefaultMaxConcurrencyLevel { get { return Environment.ProcessorCount; } }
/// <summary>Gets the sync obj used to protect all state on this instance.</summary>
private readonly Lock ValueLock = new Lock();
/// <summary>
/// Initializes the ConcurrentExclusiveSchedulerPair.
/// </summary>
public ConcurrentExclusiveSchedulerPair() :
this(TaskScheduler.Default, DefaultMaxConcurrencyLevel, DEFAULT_MAXITEMSPERTASK)
{ }
/// <summary>
/// Initializes the ConcurrentExclusiveSchedulerPair to target the specified scheduler.
/// </summary>
/// <param name="taskScheduler">The target scheduler on which this pair should execute.</param>
public ConcurrentExclusiveSchedulerPair(TaskScheduler taskScheduler) :
this(taskScheduler, DefaultMaxConcurrencyLevel, DEFAULT_MAXITEMSPERTASK)
{ }
/// <summary>
/// Initializes the ConcurrentExclusiveSchedulerPair to target the specified scheduler with a maximum concurrency level.
/// </summary>
/// <param name="taskScheduler">The target scheduler on which this pair should execute.</param>
/// <param name="maxConcurrencyLevel">The maximum number of tasks to run concurrently.</param>
public ConcurrentExclusiveSchedulerPair(TaskScheduler taskScheduler, int maxConcurrencyLevel) :
this(taskScheduler, maxConcurrencyLevel, DEFAULT_MAXITEMSPERTASK)
{ }
/// <summary>
/// Initializes the ConcurrentExclusiveSchedulerPair to target the specified scheduler with a maximum
/// concurrency level and a maximum number of scheduled tasks that may be processed as a unit.
/// </summary>
/// <param name="taskScheduler">The target scheduler on which this pair should execute.</param>
/// <param name="maxConcurrencyLevel">The maximum number of tasks to run concurrently.</param>
/// <param name="maxItemsPerTask">The maximum number of tasks to process for each underlying scheduled task used by the pair.</param>
public ConcurrentExclusiveSchedulerPair(TaskScheduler taskScheduler, int maxConcurrencyLevel, int maxItemsPerTask)
{
// Validate arguments
if (taskScheduler == null) throw new ArgumentNullException("taskScheduler");
if (maxConcurrencyLevel == 0 || maxConcurrencyLevel < -1) throw new ArgumentOutOfRangeException("maxConcurrencyLevel");
if (maxItemsPerTask == 0 || maxItemsPerTask < -1) throw new ArgumentOutOfRangeException("maxItemsPerTask");
Contract.EndContractBlock();
// Store configuration
m_underlyingTaskScheduler = taskScheduler;
m_maxConcurrencyLevel = maxConcurrencyLevel;
m_maxItemsPerTask = maxItemsPerTask;
// Downgrade to the underlying scheduler's max degree of parallelism if it's lower than the user-supplied level
int mcl = taskScheduler.MaximumConcurrencyLevel;
if (mcl > 0 && mcl < m_maxConcurrencyLevel) m_maxConcurrencyLevel = mcl;
// Treat UNLIMITED_PROCESSING/-1 for both MCL and MIPT as the biggest possible value so that we don't
// have to special case UNLIMITED_PROCESSING later on in processing.
if (m_maxConcurrencyLevel == UNLIMITED_PROCESSING) m_maxConcurrencyLevel = Int32.MaxValue;
if (m_maxItemsPerTask == UNLIMITED_PROCESSING) m_maxItemsPerTask = Int32.MaxValue;
// Create the concurrent/exclusive schedulers for this pair
m_exclusiveTaskScheduler = new ConcurrentExclusiveTaskScheduler(this, 1, ProcessingMode.ProcessingExclusiveTask);
m_concurrentTaskScheduler = new ConcurrentExclusiveTaskScheduler(this, m_maxConcurrencyLevel, ProcessingMode.ProcessingConcurrentTasks);
}
/// <summary>Informs the scheduler pair that it should not accept any more tasks.</summary>
/// <remarks>
/// Calling <see cref="Complete"/> is optional, and it's only necessary if the <see cref="Completion"/>
/// will be relied on for notification of all processing being completed.
/// </remarks>
public void Complete()
{
lock (ValueLock)
{
if (!CompletionRequested)
{
RequestCompletion();
CleanupStateIfCompletingAndQuiesced();
}
}
}
/// <summary>Gets a <see cref="System.Threading.Tasks.Task"/> that will complete when the scheduler has completed processing.</summary>
public Task Completion
{
// ValueLock not needed, but it's ok if it's held
get { return EnsureCompletionStateInitialized().Task; }
}
/// <summary>Gets the lazily-initialized completion state.</summary>
private CompletionState EnsureCompletionStateInitialized()
{
// ValueLock not needed, but it's ok if it's held
return LazyInitializer.EnsureInitialized(ref m_completionState, () => new CompletionState());
}
/// <summary>Gets whether completion has been requested.</summary>
private bool CompletionRequested
{
// ValueLock not needed, but it's ok if it's held
get { return m_completionState != null && Volatile.Read(ref m_completionState.m_completionRequested); }
}
/// <summary>Sets that completion has been requested.</summary>
private void RequestCompletion()
{
ContractAssertMonitorStatus(ValueLock, held: true);
EnsureCompletionStateInitialized().m_completionRequested = true;
}
/// <summary>
/// Cleans up state if and only if there's no processing currently happening
/// and no more to be done later.
/// </summary>
private void CleanupStateIfCompletingAndQuiesced()
{
ContractAssertMonitorStatus(ValueLock, held: true);
if (ReadyToComplete) CompleteTaskAsync();
}
/// <summary>Gets whether the pair is ready to complete.</summary>
private bool ReadyToComplete
{
get
{
ContractAssertMonitorStatus(ValueLock, held: true);
// We can only complete if completion has been requested and no processing is currently happening.
if (!CompletionRequested || m_processingCount != 0) return false;
// Now, only allow shutdown if an exception occurred or if there are no more tasks to process.
var cs = EnsureCompletionStateInitialized();
return
(cs.m_exceptions != null && cs.m_exceptions.Count > 0) ||
(m_concurrentTaskScheduler.m_tasks.IsEmpty && m_exclusiveTaskScheduler.m_tasks.IsEmpty);
}
}
/// <summary>Completes the completion task asynchronously.</summary>
private void CompleteTaskAsync()
{
Contract.Requires(ReadyToComplete, "The block must be ready to complete to be here.");
ContractAssertMonitorStatus(ValueLock, held: true);
// Ensure we only try to complete once, then schedule completion
// in order to escape held locks and the caller's context
var cs = EnsureCompletionStateInitialized();
if (!cs.m_completionQueued)
{
cs.m_completionQueued = true;
ThreadPool.QueueUserWorkItem(state =>
{
var localCs = (CompletionState)state; // don't use 'cs', as it'll force a closure
Contract.Assert(!localCs.Task.IsCompleted, "Completion should only happen once.");
var exceptions = localCs.m_exceptions;
bool success = (exceptions != null && exceptions.Count > 0) ?
localCs.TrySetException(exceptions) :
localCs.TrySetResult(default(VoidTaskResult));
Contract.Assert(success, "Expected to complete completion task.");
}, cs);
}
}
/// <summary>Initiatites scheduler shutdown due to a worker task faulting..</summary>
/// <param name="faultedTask">The faulted worker task that's initiating the shutdown.</param>
private void FaultWithTask(Task faultedTask)
{
Contract.Requires(faultedTask != null && faultedTask.IsFaulted && faultedTask.Exception.InnerExceptions.Count > 0,
"Needs a task in the faulted state and thus with exceptions.");
ContractAssertMonitorStatus(ValueLock, held: true);
// Store the faulted task's exceptions
var cs = EnsureCompletionStateInitialized();
if (cs.m_exceptions == null) cs.m_exceptions = new List<Exception>();
cs.m_exceptions.AddRange(faultedTask.Exception.InnerExceptions);
// Now that we're doomed, request completion
RequestCompletion();
}
/// <summary>
/// Gets a TaskScheduler that can be used to schedule tasks to this pair
/// that may run concurrently with other tasks on this pair.
/// </summary>
public TaskScheduler ConcurrentScheduler { get { return m_concurrentTaskScheduler; } }
/// <summary>
/// Gets a TaskScheduler that can be used to schedule tasks to this pair
/// that must run exclusively with regards to other tasks on this pair.
/// </summary>
public TaskScheduler ExclusiveScheduler { get { return m_exclusiveTaskScheduler; } }
/// <summary>Gets the number of tasks waiting to run concurrently.</summary>
/// <remarks>This does not take the necessary lock, as it's only called from under the debugger.</remarks>
[SuppressMessage("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
private int ConcurrentTaskCountForDebugger { get { return m_concurrentTaskScheduler.m_tasks.Count; } }
/// <summary>Gets the number of tasks waiting to run exclusively.</summary>
/// <remarks>This does not take the necessary lock, as it's only called from under the debugger.</remarks>
[SuppressMessage("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
private int ExclusiveTaskCountForDebugger { get { return m_exclusiveTaskScheduler.m_tasks.Count; } }
/// <summary>Notifies the pair that new work has arrived to be processed.</summary>
/// <param name="fairly">Whether tasks should be scheduled fairly with regards to other tasks.</param>
/// <remarks>Must only be called while holding the lock.</remarks>
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes")]
[SuppressMessage("Microsoft.Performance", "CA1804:RemoveUnusedLocals")]
[SuppressMessage("Microsoft.Reliability", "CA2000:Dispose objects before losing scope")]
private void ProcessAsyncIfNecessary(bool fairly = false)
{
ContractAssertMonitorStatus(ValueLock, held: true);
// If the current processing count is >= 0, we can potentially launch further processing.
if (m_processingCount >= 0)
{
// We snap whether there are any exclusive tasks or concurrent tasks waiting.
// (We grab the concurrent count below only once we know we need it.)
// With processing happening concurrent to this operation, this data may
// immediately be out of date, but it can only go from non-empty
// to empty and not the other way around. As such, this is safe,
// as worst case is we'll schedule an extra task when we didn't
// otherwise need to, and we'll just eat its overhead.
bool exclusiveTasksAreWaiting = !m_exclusiveTaskScheduler.m_tasks.IsEmpty;
// If there's no processing currently happening but there are waiting exclusive tasks,
// let's start processing those exclusive tasks.
Task processingTask = null;
if (m_processingCount == 0 && exclusiveTasksAreWaiting)
{
// Launch exclusive task processing
m_processingCount = EXCLUSIVE_PROCESSING_SENTINEL; // -1
try
{
processingTask = new Task(thisPair => ((ConcurrentExclusiveSchedulerPair)thisPair).ProcessExclusiveTasks(), this,
default(CancellationToken), GetCreationOptionsForTask(fairly));
processingTask.Start(m_underlyingTaskScheduler);
// When we call Start, if the underlying scheduler throws in QueueTask, TPL will fault the task and rethrow
// the exception. To deal with that, we need a reference to the task object, so that we can observe its exception.
// Hence, we separate creation and starting, so that we can store a reference to the task before we attempt QueueTask.
}
catch
{
m_processingCount = 0;
FaultWithTask(processingTask);
}
}
// If there are no waiting exclusive tasks, there are concurrent tasks, and we haven't reached our maximum
// concurrency level for processing, let's start processing more concurrent tasks.
else
{
int concurrentTasksWaitingCount = m_concurrentTaskScheduler.m_tasks.Count;
if (concurrentTasksWaitingCount > 0 && !exclusiveTasksAreWaiting && m_processingCount < m_maxConcurrencyLevel)
{
// Launch concurrent task processing, up to the allowed limit
for (int i = 0; i < concurrentTasksWaitingCount && m_processingCount < m_maxConcurrencyLevel; ++i)
{
++m_processingCount;
try
{
processingTask = new Task(thisPair => ((ConcurrentExclusiveSchedulerPair)thisPair).ProcessConcurrentTasks(), this,
default(CancellationToken), GetCreationOptionsForTask(fairly));
processingTask.Start(m_underlyingTaskScheduler); // See above logic for why we use new + Start rather than StartNew
}
catch
{
--m_processingCount;
FaultWithTask(processingTask);
}
}
}
}
// Check to see if all tasks have completed and if completion has been requested.
CleanupStateIfCompletingAndQuiesced();
}
else Contract.Assert(m_processingCount == EXCLUSIVE_PROCESSING_SENTINEL, "The processing count must be the sentinel if it's not >= 0.");
}
/// <summary>
/// Processes exclusive tasks serially until either there are no more to process
/// or we've reached our user-specified maximum limit.
/// </summary>
private void ProcessExclusiveTasks()
{
Contract.Requires(m_processingCount == EXCLUSIVE_PROCESSING_SENTINEL, "Processing exclusive tasks requires being in exclusive mode.");
Contract.Requires(!m_exclusiveTaskScheduler.m_tasks.IsEmpty, "Processing exclusive tasks requires tasks to be processed.");
ContractAssertMonitorStatus(ValueLock, held: false);
try
{
// Note that we're processing exclusive tasks on the current thread
Contract.Assert(m_threadProcessingMode.Value == ProcessingMode.NotCurrentlyProcessing,
"This thread should not yet be involved in this pair's processing.");
m_threadProcessingMode.Value = ProcessingMode.ProcessingExclusiveTask;
// Process up to the maximum number of items per task allowed
for (int i = 0; i < m_maxItemsPerTask; i++)
{
// Get the next available exclusive task. If we can't find one, bail.
Task exclusiveTask;
if (!m_exclusiveTaskScheduler.m_tasks.TryDequeue(out exclusiveTask)) break;
// Execute the task. If the scheduler was previously faulted,
// this task could have been faulted when it was queued; ignore such tasks.
if (!exclusiveTask.IsFaulted) m_exclusiveTaskScheduler.ExecuteTask(exclusiveTask);
}
}
finally
{
// We're no longer processing exclusive tasks on the current thread
Contract.Assert(m_threadProcessingMode.Value == ProcessingMode.ProcessingExclusiveTask,
"Somehow we ended up escaping exclusive mode.");
m_threadProcessingMode.Value = ProcessingMode.NotCurrentlyProcessing;
lock (ValueLock)
{
// When this task was launched, we tracked it by setting m_processingCount to WRITER_IN_PROGRESS.
// now reset it to 0. Then check to see whether there's more processing to be done.
// There might be more concurrent tasks available, for example, if concurrent tasks arrived
// after we exited the loop, or if we exited the loop while concurrent tasks were still
// available but we hit our maxItemsPerTask limit.
Contract.Assert(m_processingCount == EXCLUSIVE_PROCESSING_SENTINEL, "The processing mode should not have deviated from exclusive.");
m_processingCount = 0;
ProcessAsyncIfNecessary(true);
}
}
}
/// <summary>
/// Processes concurrent tasks serially until either there are no more to process,
/// we've reached our user-specified maximum limit, or exclusive tasks have arrived.
/// </summary>
private void ProcessConcurrentTasks()
{
Contract.Requires(m_processingCount > 0, "Processing concurrent tasks requires us to be in concurrent mode.");
ContractAssertMonitorStatus(ValueLock, held: false);
try
{
// Note that we're processing concurrent tasks on the current thread
Contract.Assert(m_threadProcessingMode.Value == ProcessingMode.NotCurrentlyProcessing,
"This thread should not yet be involved in this pair's processing.");
m_threadProcessingMode.Value = ProcessingMode.ProcessingConcurrentTasks;
// Process up to the maximum number of items per task allowed
for (int i = 0; i < m_maxItemsPerTask; i++)
{
// Get the next available concurrent task. If we can't find one, bail.
Task concurrentTask;
if (!m_concurrentTaskScheduler.m_tasks.TryDequeue(out concurrentTask)) break;
// Execute the task. If the scheduler was previously faulted,
// this task could have been faulted when it was queued; ignore such tasks.
if (!concurrentTask.IsFaulted) m_concurrentTaskScheduler.ExecuteTask(concurrentTask);
// Now check to see if exclusive tasks have arrived; if any have, they take priority
// so we'll bail out here. Note that we could have checked this condition
// in the for loop's condition, but that could lead to extra overhead
// in the case where a concurrent task arrives, this task is launched, and then
// before entering the loop an exclusive task arrives. If we didn't execute at
// least one task, we would have spent all of the overhead to launch a
// task but with none of the benefit. There's of course also an inherent
// race here with regards to exclusive tasks arriving, and we're ok with
// executing one more concurrent task than we should before giving priority to exclusive tasks.
if (!m_exclusiveTaskScheduler.m_tasks.IsEmpty) break;
}
}
finally
{
// We're no longer processing concurrent tasks on the current thread
Contract.Assert(m_threadProcessingMode.Value == ProcessingMode.ProcessingConcurrentTasks,
"Somehow we ended up escaping concurrent mode.");
m_threadProcessingMode.Value = ProcessingMode.NotCurrentlyProcessing;
lock (ValueLock)
{
// When this task was launched, we tracked it with a positive processing count;
// decrement that count. Then check to see whether there's more processing to be done.
// There might be more concurrent tasks available, for example, if concurrent tasks arrived
// after we exited the loop, or if we exited the loop while concurrent tasks were still
// available but we hit our maxItemsPerTask limit.
Contract.Assert(m_processingCount > 0, "The procesing mode should not have deviated from concurrent.");
if (m_processingCount > 0) --m_processingCount;
ProcessAsyncIfNecessary(true);
}
}
}
/// <summary>
/// Holder for lazily-initialized state about the completion of a scheduler pair.
/// Completion is only triggered either by rare exceptional conditions or by
/// the user calling Complete, and as such we only lazily initialize this
/// state in one of those conditions or if the user explicitly asks for
/// the Completion.
/// </summary>
[SuppressMessage("Microsoft.Performance", "CA1812:AvoidUninstantiatedInternalClasses")]
private sealed class CompletionState : TaskCompletionSource<VoidTaskResult>
{
/// <summary>Whether the scheduler has had completion requested.</summary>
/// <remarks>This variable is not volatile, so to gurantee safe reading reads, Volatile.Read is used in TryExecuteTaskInline.</remarks>
internal bool m_completionRequested;
/// <summary>Whether completion processing has been queued.</summary>
internal bool m_completionQueued;
/// <summary>Unrecoverable exceptions incurred while processing.</summary>
internal List<Exception> m_exceptions;
}
/// <summary>
/// A scheduler shim used to queue tasks to the pair and execute those tasks on request of the pair.
/// </summary>
[DebuggerDisplay("Count={CountForDebugger}, MaxConcurrencyLevel={m_maxConcurrencyLevel}, Id={Id}")]
[DebuggerTypeProxy(typeof(ConcurrentExclusiveTaskScheduler.DebugView))]
private sealed class ConcurrentExclusiveTaskScheduler : TaskScheduler
{
/// <summary>Cached delegate for invoking TryExecuteTaskShim.</summary>
private static readonly Func<object, bool> s_tryExecuteTaskShim = new Func<object, bool>(TryExecuteTaskShim);
/// <summary>The parent pair.</summary>
private readonly ConcurrentExclusiveSchedulerPair m_pair;
/// <summary>The maximum concurrency level for the scheduler.</summary>
private readonly int m_maxConcurrencyLevel;
/// <summary>The processing mode of this scheduler, exclusive or concurrent.</summary>
private readonly ProcessingMode m_processingMode;
/// <summary>Gets the queue of tasks for this scheduler.</summary>
internal readonly IProducerConsumerQueue<Task> m_tasks;
/// <summary>Initializes the scheduler.</summary>
/// <param name="pair">The parent pair.</param>
/// <param name="maxConcurrencyLevel">The maximum degree of concurrency this scheduler may use.</param>
/// <param name="processingMode">The processing mode of this scheduler.</param>
internal ConcurrentExclusiveTaskScheduler(ConcurrentExclusiveSchedulerPair pair, int maxConcurrencyLevel, ProcessingMode processingMode)
{
Contract.Requires(pair != null, "Scheduler must be associated with a valid pair.");
Contract.Requires(processingMode == ProcessingMode.ProcessingConcurrentTasks || processingMode == ProcessingMode.ProcessingExclusiveTask,
"Scheduler must be for concurrent or exclusive processing.");
Contract.Requires(
(processingMode == ProcessingMode.ProcessingConcurrentTasks && (maxConcurrencyLevel >= 1 || maxConcurrencyLevel == UNLIMITED_PROCESSING)) ||
(processingMode == ProcessingMode.ProcessingExclusiveTask && maxConcurrencyLevel == 1),
"If we're in concurrent mode, our concurrency level should be positive or unlimited. If exclusive, it should be 1.");
m_pair = pair;
m_maxConcurrencyLevel = maxConcurrencyLevel;
m_processingMode = processingMode;
m_tasks = (processingMode == ProcessingMode.ProcessingExclusiveTask) ?
(IProducerConsumerQueue<Task>)new SingleProducerSingleConsumerQueue<Task>() :
(IProducerConsumerQueue<Task>)new MultiProducerMultiConsumerQueue<Task>();
}
/// <summary>Gets the maximum concurrency level this scheduler is able to support.</summary>
public override int MaximumConcurrencyLevel { get { return m_maxConcurrencyLevel; } }
/// <summary>Queues a task to the scheduler.</summary>
/// <param name="task">The task to be queued.</param>
protected internal override void QueueTask(Task task)
{
Contract.Assert(task != null, "Infrastructure should have provided a non-null task.");
lock (m_pair.ValueLock)
{
// If the scheduler has already had completion requested, no new work is allowed to be scheduled
if (m_pair.CompletionRequested) throw new InvalidOperationException(GetType().ToString());
// Queue the task, and then let the pair know that more work is now available to be scheduled
m_tasks.Enqueue(task);
m_pair.ProcessAsyncIfNecessary();
}
}
/// <summary>Executes a task on this scheduler.</summary>
/// <param name="task">The task to be executed.</param>
internal void ExecuteTask(Task task)
{
Contract.Assert(task != null, "Infrastructure should have provided a non-null task.");
base.TryExecuteTask(task);
}
/// <summary>Tries to execute the task synchronously on this scheduler.</summary>
/// <param name="task">The task to execute.</param>
/// <param name="taskWasPreviouslyQueued">Whether the task was previously queued to the scheduler.</param>
/// <returns>true if the task could be executed; otherwise, false.</returns>
protected override bool TryExecuteTaskInline(Task task, bool taskWasPreviouslyQueued)
{
Contract.Assert(task != null, "Infrastructure should have provided a non-null task.");
// If the scheduler has had completion requested, no new work is allowed to be scheduled.
// A non-locked read on m_completionRequested (in CompletionRequested) is acceptable here because:
// a) we don't need to be exact... a Complete call could come in later in the function anyway
// b) this is only a fast path escape hatch. To actually inline the task,
// we need to be inside of an already executing task, and in such a case,
// while completion may have been requested, we can't have shutdown yet.
if (!taskWasPreviouslyQueued && m_pair.CompletionRequested) return false;
// We know the implementation of the default scheduler and how it will behave.
// As it's the most common underlying scheduler, we optimize for it.
bool isDefaultScheduler = m_pair.m_underlyingTaskScheduler == TaskScheduler.Default;
// If we're targeting the default scheduler and taskWasPreviouslyQueued is true,
// we know that the default scheduler will only allow it to be inlined
// if we're on a thread pool thread (but it won't always allow it in that case,
// since it'll only allow inlining if it can find the task in the local queue).
// As such, if we're not on a thread pool thread, we know for sure the
// task won't be inlined, so let's not even try.
if (isDefaultScheduler && taskWasPreviouslyQueued && !ThreadPool.IsThreadPoolThread)
{
return false;
}
else
{
// If a task is already running on this thread, allow inline execution to proceed.
// If there's already a task from this scheduler running on the current thread, we know it's safe
// to run this task, in effect temporarily taking that task's count allocation.
if (m_pair.m_threadProcessingMode.Value == m_processingMode)
{
// If we're targeting the default scheduler and taskWasPreviouslyQueued is false,
// we know the default scheduler will allow it, so we can just execute it here.
// Otherwise, delegate to the target scheduler's inlining.
return (isDefaultScheduler && !taskWasPreviouslyQueued) ?
TryExecuteTask(task) :
TryExecuteTaskInlineOnTargetScheduler(task);
}
}
// We're not in the context of a task already executing on this scheduler. Bail.
return false;
}
/// <summary>
/// Implements a reasonable approximation for TryExecuteTaskInline on the underlying scheduler,
/// which we can't call directly on the underlying scheduler.
/// </summary>
/// <param name="task">The task to execute inline if possible.</param>
/// <returns>true if the task was inlined successfully; otherwise, false.</returns>
[SuppressMessage("Microsoft.Performance", "CA1804:RemoveUnusedLocals", MessageId = "ignored")]
private bool TryExecuteTaskInlineOnTargetScheduler(Task task)
{
// We'd like to simply call TryExecuteTaskInline here, but we can't.
// As there's no built-in API for this, a workaround is to create a new task that,
// when executed, will simply call TryExecuteTask to run the real task, and then
// we run our new shim task synchronously on the target scheduler. If all goes well,
// our synchronous invocation will succeed in running the shim task on the current thread,
// which will in turn run the real task on the current thread. If the scheduler
// doesn't allow that execution, RunSynchronously will block until the underlying scheduler
// is able to invoke the task, which might account for an additional but unavoidable delay.
// Once it's done, we can return whether the task executed by returning the
// shim task's Result, which is in turn the result of TryExecuteTask.
var t = new Task<bool>(s_tryExecuteTaskShim, Tuple.Create(this, task));
try
{
t.RunSynchronously(m_pair.m_underlyingTaskScheduler);
return t.Result;
}
catch
{
Contract.Assert(t.IsFaulted, "Task should be faulted due to the scheduler faulting it and throwing the exception.");
var ignored = t.Exception;
throw;
}
}
/// <summary>Shim used to invoke this.TryExecuteTask(task).</summary>
/// <param name="state">A tuple of the ConcurrentExclusiveTaskScheduler and the task to execute.</param>
/// <returns>true if the task was successfully inlined; otherwise, false.</returns>
/// <remarks>
/// This method is separated out not because of performance reasons but so that
/// the SecuritySafeCritical attribute may be employed.
/// </remarks>
private static bool TryExecuteTaskShim(object state)
{
var tuple = (Tuple<ConcurrentExclusiveTaskScheduler, Task>)state;
return tuple.Item1.TryExecuteTask(tuple.Item2);
}
/// <summary>Gets for debugging purposes the tasks scheduled to this scheduler.</summary>
/// <returns>An enumerable of the tasks queued.</returns>
protected override IEnumerable<Task> GetScheduledTasks() { return m_tasks; }
/// <summary>Gets the number of tasks queued to this scheduler.</summary>
[SuppressMessage("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
private int CountForDebugger { get { return m_tasks.Count; } }
/// <summary>Provides a debug view for ConcurrentExclusiveTaskScheduler.</summary>
private sealed class DebugView
{
/// <summary>The scheduler being debugged.</summary>
private readonly ConcurrentExclusiveTaskScheduler m_taskScheduler;
/// <summary>Initializes the debug view.</summary>
/// <param name="scheduler">The scheduler being debugged.</param>
public DebugView(ConcurrentExclusiveTaskScheduler scheduler)
{
Contract.Requires(scheduler != null, "Need a scheduler with which to construct the debug view.");
m_taskScheduler = scheduler;
}
/// <summary>Gets this pair's maximum allowed concurrency level.</summary>
public int MaximumConcurrencyLevel { get { return m_taskScheduler.m_maxConcurrencyLevel; } }
/// <summary>Gets the tasks scheduled to this scheduler.</summary>
public IEnumerable<Task> ScheduledTasks { get { return m_taskScheduler.m_tasks; } }
/// <summary>Gets the scheduler pair with which this scheduler is associated.</summary>
public ConcurrentExclusiveSchedulerPair SchedulerPair { get { return m_taskScheduler.m_pair; } }
}
}
/// <summary>Provides a debug view for ConcurrentExclusiveSchedulerPair.</summary>
private sealed class DebugView
{
/// <summary>The pair being debugged.</summary>
private readonly ConcurrentExclusiveSchedulerPair m_pair;
/// <summary>Initializes the debug view.</summary>
/// <param name="pair">The pair being debugged.</param>
public DebugView(ConcurrentExclusiveSchedulerPair pair)
{
Contract.Requires(pair != null, "Need a pair with which to construct the debug view.");
m_pair = pair;
}
/// <summary>Gets a representation of the execution state of the pair.</summary>
public ProcessingMode Mode { get { return m_pair.ModeForDebugger; } }
/// <summary>Gets the number of tasks waiting to run exclusively.</summary>
public IEnumerable<Task> ScheduledExclusive { get { return m_pair.m_exclusiveTaskScheduler.m_tasks; } }
/// <summary>Gets the number of tasks waiting to run concurrently.</summary>
public IEnumerable<Task> ScheduledConcurrent { get { return m_pair.m_concurrentTaskScheduler.m_tasks; } }
/// <summary>Gets the number of tasks currently being executed.</summary>
public int CurrentlyExecutingTaskCount
{
get { return (m_pair.m_processingCount == EXCLUSIVE_PROCESSING_SENTINEL) ? 1 : m_pair.m_processingCount; }
}
/// <summary>Gets the underlying task scheduler that actually executes the tasks.</summary>
public TaskScheduler TargetScheduler { get { return m_pair.m_underlyingTaskScheduler; } }
}
/// <summary>Gets an enumeration for debugging that represents the current state of the scheduler pair.</summary>
/// <remarks>This is only for debugging. It does not take the necessary locks to be useful for runtime usage.</remarks>
private ProcessingMode ModeForDebugger
{
get
{
// If our completion task is done, so are we.
if (m_completionState != null && m_completionState.Task.IsCompleted) return ProcessingMode.Completed;
// Otherwise, summarize our current state.
var mode = ProcessingMode.NotCurrentlyProcessing;
if (m_processingCount == EXCLUSIVE_PROCESSING_SENTINEL) mode |= ProcessingMode.ProcessingExclusiveTask;
if (m_processingCount >= 1) mode |= ProcessingMode.ProcessingConcurrentTasks;
if (CompletionRequested) mode |= ProcessingMode.Completing;
return mode;
}
}
/// <summary>Asserts that a given synchronization object is either held or not held.</summary>
/// <param name="syncObj">The monitor to check.</param>
/// <param name="held">Whether we want to assert that it's currently held or not held.</param>
[Conditional("DEBUG")]
internal static void ContractAssertMonitorStatus(object syncObj, bool held)
{
Contract.Requires(syncObj != null, "The monitor object to check must be provided.");
Contract.Assert(Monitor.IsEntered(syncObj) == held, "The locking scheme was not correctly followed.");
}
/// <summary>Gets the options to use for tasks.</summary>
/// <param name="isReplacementReplica">If this task is being created to replace another.</param>
/// <remarks>
/// These options should be used for all tasks that have the potential to run user code or
/// that are repeatedly spawned and thus need a modicum of fair treatment.
/// </remarks>
/// <returns>The options to use.</returns>
internal static TaskCreationOptions GetCreationOptionsForTask(bool isReplacementReplica = false)
{
TaskCreationOptions options =
TaskCreationOptions.DenyChildAttach;
if (isReplacementReplica) options |= TaskCreationOptions.PreferFairness;
return options;
}
/// <summary>Provides an enumeration that represents the current state of the scheduler pair.</summary>
[Flags]
internal enum ProcessingMode : byte
{
/// <summary>The scheduler pair is currently dormant, with no work scheduled.</summary>
NotCurrentlyProcessing = 0x0,
/// <summary>The scheduler pair has queued processing for exclusive tasks.</summary>
ProcessingExclusiveTask = 0x1,
/// <summary>The scheduler pair has queued processing for concurrent tasks.</summary>
ProcessingConcurrentTasks = 0x2,
/// <summary>Completion has been requested.</summary>
Completing = 0x4,
/// <summary>The scheduler pair is finished processing.</summary>
Completed = 0x8
}
}
}
| |
using System;
using System.Diagnostics;
using System.IO;
namespace YAF.Lucene.Net.Store
{
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/// <summary>
/// A straightforward implementation of <see cref="FSDirectory"/>
/// using <see cref="FileStream"/>. However, this class has
/// poor concurrent performance (multiple threads will
/// bottleneck) as it synchronizes when multiple threads
/// read from the same file. It's usually better to use
/// <see cref="NIOFSDirectory"/> or <see cref="MMapDirectory"/> instead.
/// </summary>
public class SimpleFSDirectory : FSDirectory
{
/// <summary>
/// Create a new <see cref="SimpleFSDirectory"/> for the named location.
/// </summary>
/// <param name="path"> the path of the directory </param>
/// <param name="lockFactory"> the lock factory to use, or null for the default
/// (<see cref="NativeFSLockFactory"/>); </param>
/// <exception cref="IOException"> if there is a low-level I/O error </exception>
public SimpleFSDirectory(DirectoryInfo path, LockFactory lockFactory)
: base(path, lockFactory)
{
}
/// <summary>
/// Create a new <see cref="SimpleFSDirectory"/> for the named location and <see cref="NativeFSLockFactory"/>.
/// </summary>
/// <param name="path"> the path of the directory </param>
/// <exception cref="IOException"> if there is a low-level I/O error </exception>
public SimpleFSDirectory(DirectoryInfo path)
: base(path, null)
{
}
/// <summary>
/// Create a new <see cref="SimpleFSDirectory"/> for the named location.
/// <para/>
/// LUCENENET specific overload for convenience using string instead of <see cref="DirectoryInfo"/>.
/// </summary>
/// <param name="path"> the path of the directory </param>
/// <param name="lockFactory"> the lock factory to use, or null for the default
/// (<see cref="NativeFSLockFactory"/>); </param>
/// <exception cref="IOException"> if there is a low-level I/O error </exception>
public SimpleFSDirectory(string path, LockFactory lockFactory)
: this(new DirectoryInfo(path), lockFactory)
{
}
/// <summary>
/// Create a new <see cref="SimpleFSDirectory"/> for the named location and <see cref="NativeFSLockFactory"/>.
/// <para/>
/// LUCENENET specific overload for convenience using string instead of <see cref="DirectoryInfo"/>.
/// </summary>
/// <param name="path"> the path of the directory </param>
/// <exception cref="IOException"> if there is a low-level I/O error </exception>
public SimpleFSDirectory(string path)
: this(path, null)
{
}
/// <summary>
/// Creates an <see cref="IndexInput"/> for the file with the given name. </summary>
public override IndexInput OpenInput(string name, IOContext context)
{
EnsureOpen();
var path = new FileInfo(Path.Combine(Directory.FullName, name));
var raf = new FileStream(path.FullName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
return new SimpleFSIndexInput("SimpleFSIndexInput(path=\"" + path.FullName + "\")", raf, context);
}
public override IndexInputSlicer CreateSlicer(string name, IOContext context)
{
EnsureOpen();
var file = new FileInfo(Path.Combine(Directory.FullName, name));
var descriptor = new FileStream(file.FullName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
return new IndexInputSlicerAnonymousInnerClassHelper(context, file, descriptor);
}
private class IndexInputSlicerAnonymousInnerClassHelper : IndexInputSlicer
{
private readonly IOContext context;
private readonly FileInfo file;
private readonly FileStream descriptor;
public IndexInputSlicerAnonymousInnerClassHelper(IOContext context, FileInfo file, FileStream descriptor)
{
this.context = context;
this.file = file;
this.descriptor = descriptor;
}
protected override void Dispose(bool disposing)
{
if (disposing)
{
descriptor.Dispose();
}
}
public override IndexInput OpenSlice(string sliceDescription, long offset, long length)
{
return new SimpleFSIndexInput("SimpleFSIndexInput(" + sliceDescription + " in path=\"" + file.FullName + "\" slice=" + offset + ":" + (offset + length) + ")", descriptor, offset, length, BufferedIndexInput.GetBufferSize(context));
}
[Obsolete("Only for reading CFS files from 3.x indexes.")]
public override IndexInput OpenFullSlice()
{
try
{
return OpenSlice("full-slice", 0, descriptor.Length);
}
catch (IOException ex)
{
throw new Exception(ex.ToString(), ex);
}
}
}
/// <summary>
/// Reads bytes with <see cref="FileStream.Seek(long, SeekOrigin)"/> followed by
/// <see cref="FileStream.Read(byte[], int, int)"/>.
/// </summary>
protected internal class SimpleFSIndexInput : BufferedIndexInput
{
// LUCENENET specific: chunk size not needed
///// <summary>
///// The maximum chunk size is 8192 bytes, because <seealso cref="RandomAccessFile"/> mallocs
///// a native buffer outside of stack if the read buffer size is larger.
///// </summary>
//private const int CHUNK_SIZE = 8192;
/// <summary>
/// the file channel we will read from </summary>
protected internal readonly FileStream m_file;
/// <summary>
/// is this instance a clone and hence does not own the file to close it </summary>
public bool IsClone { get; set; }
/// <summary>
/// start offset: non-zero in the slice case </summary>
protected internal readonly long m_off;
/// <summary>
/// end offset (start+length) </summary>
protected internal readonly long m_end;
public SimpleFSIndexInput(string resourceDesc, FileStream file, IOContext context)
: base(resourceDesc, context)
{
this.m_file = file;
this.m_off = 0L;
this.m_end = file.Length;
this.IsClone = false;
}
public SimpleFSIndexInput(string resourceDesc, FileStream file, long off, long length, int bufferSize)
: base(resourceDesc, bufferSize)
{
this.m_file = file;
this.m_off = off;
this.m_end = off + length;
this.IsClone = true;
}
protected override void Dispose(bool disposing)
{
if (disposing && !IsClone)
{
m_file.Dispose();
}
}
public override object Clone()
{
SimpleFSIndexInput clone = (SimpleFSIndexInput)base.Clone();
clone.IsClone = true;
return clone;
}
public override sealed long Length
{
get { return m_end - m_off; }
}
/// <summary>
/// <see cref="IndexInput"/> methods </summary>
protected override void ReadInternal(byte[] b, int offset, int len)
{
lock (m_file)
{
long position = m_off + GetFilePointer();
m_file.Seek(position, SeekOrigin.Begin);
int total = 0;
if (position + len > m_end)
{
throw new EndOfStreamException("read past EOF: " + this);
}
try
{
//while (total < len)
//{
// int toRead = Math.Min(CHUNK_SIZE, len - total);
// int i = m_file.Read(b, offset + total, toRead);
// if (i < 0) // be defensive here, even though we checked before hand, something could have changed
// {
// throw new EndOfStreamException("read past EOF: " + this + " off: " + offset + " len: " + len + " total: " + total + " chunkLen: " + toRead + " end: " + m_end);
// }
// Debug.Assert(i > 0, "RandomAccessFile.read with non zero-length toRead must always read at least one byte");
// total += i;
//}
// LUCENENET specific: FileStream is already optimized to read natively
// using the buffer size that is passed through its constructor. So,
// all we need to do is Read().
total = m_file.Read(b, offset, len);
//Debug.Assert(total == len);
}
catch (IOException ioe)
{
throw new IOException(ioe.Message + ": " + this, ioe);
}
}
}
protected override void SeekInternal(long position)
{
}
public virtual bool IsFDValid
{
get
{
return m_file != null;
}
}
}
}
}
| |
/*
* Exchange Web Services Managed API
*
* Copyright (c) Microsoft Corporation
* All rights reserved.
*
* MIT License
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this
* software and associated documentation files (the "Software"), to deal in the Software
* without restriction, including without limitation the rights to use, copy, modify, merge,
* publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons
* to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or
* substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
* INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
* PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
* FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
* OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
namespace Microsoft.Exchange.WebServices.Data
{
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Text;
/// <summary>
/// Represents an item's attachment collection.
/// </summary>
[EditorBrowsable(EditorBrowsableState.Never)]
public sealed class AttachmentCollection : ComplexPropertyCollection<Attachment>, IOwnedProperty
{
#region Fields
/// <summary>
/// The item owner that owns this attachment collection
/// </summary>
private Item owner;
#endregion
#region Constructors
/// <summary>
/// Initializes a new instance of AttachmentCollection.
/// </summary>
internal AttachmentCollection()
: base()
{
}
#endregion
#region Properties
#region IOwnedProperty Members
/// <summary>
/// The owner of this attachment collection.
/// </summary>
ServiceObject IOwnedProperty.Owner
{
get
{
return this.owner;
}
set
{
Item item = value as Item;
EwsUtilities.Assert(
item != null,
"AttachmentCollection.IOwnedProperty.set_Owner",
"value is not a descendant of ItemBase");
this.owner = item;
}
}
#endregion
#endregion
#region Methods
/// <summary>
/// Adds a file attachment to the collection.
/// </summary>
/// <param name="fileName">The name of the file representing the content of the attachment.</param>
/// <returns>A FileAttachment instance.</returns>
public FileAttachment AddFileAttachment(string fileName)
{
return this.AddFileAttachment(Path.GetFileName(fileName), fileName);
}
/// <summary>
/// Adds a file attachment to the collection.
/// </summary>
/// <param name="name">The display name of the new attachment.</param>
/// <param name="fileName">The name of the file representing the content of the attachment.</param>
/// <returns>A FileAttachment instance.</returns>
public FileAttachment AddFileAttachment(string name, string fileName)
{
FileAttachment fileAttachment = new FileAttachment(this.owner);
fileAttachment.Name = name;
fileAttachment.FileName = fileName;
this.InternalAdd(fileAttachment);
return fileAttachment;
}
/// <summary>
/// Adds a file attachment to the collection.
/// </summary>
/// <param name="name">The display name of the new attachment.</param>
/// <param name="contentStream">The stream from which to read the content of the attachment.</param>
/// <returns>A FileAttachment instance.</returns>
public FileAttachment AddFileAttachment(string name, Stream contentStream)
{
FileAttachment fileAttachment = new FileAttachment(this.owner);
fileAttachment.Name = name;
fileAttachment.ContentStream = contentStream;
this.InternalAdd(fileAttachment);
return fileAttachment;
}
/// <summary>
/// Adds a file attachment to the collection.
/// </summary>
/// <param name="name">The display name of the new attachment.</param>
/// <param name="content">A byte arrays representing the content of the attachment.</param>
/// <returns>A FileAttachment instance.</returns>
public FileAttachment AddFileAttachment(string name, byte[] content)
{
FileAttachment fileAttachment = new FileAttachment(this.owner);
fileAttachment.Name = name;
fileAttachment.Content = content;
this.InternalAdd(fileAttachment);
return fileAttachment;
}
/// <summary>
/// Adds an item attachment to the collection
/// </summary>
/// <typeparam name="TItem">The type of the item to attach.</typeparam>
/// <returns>An ItemAttachment instance.</returns>
public ItemAttachment<TItem> AddItemAttachment<TItem>()
where TItem : Item
{
if (typeof(TItem).GetCustomAttributes(typeof(AttachableAttribute), false).Length == 0)
{
throw new InvalidOperationException(
string.Format(
"Items of type {0} are not supported as attachments.",
typeof(TItem).Name));
}
ItemAttachment<TItem> itemAttachment = new ItemAttachment<TItem>(this.owner);
itemAttachment.Item = (TItem)EwsUtilities.CreateItemFromItemClass(itemAttachment, typeof(TItem), true);
this.InternalAdd(itemAttachment);
return itemAttachment;
}
/// <summary>
/// Removes all attachments from this collection.
/// </summary>
public void Clear()
{
this.InternalClear();
}
/// <summary>
/// Removes the attachment at the specified index.
/// </summary>
/// <param name="index">Index of the attachment to remove.</param>
public void RemoveAt(int index)
{
if (index < 0 || index >= this.Count)
{
throw new ArgumentOutOfRangeException("index", Strings.IndexIsOutOfRange);
}
this.InternalRemoveAt(index);
}
/// <summary>
/// Removes the specified attachment.
/// </summary>
/// <param name="attachment">The attachment to remove.</param>
/// <returns>True if the attachment was successfully removed from the collection, false otherwise.</returns>
public bool Remove(Attachment attachment)
{
EwsUtilities.ValidateParam(attachment, "attachment");
return this.InternalRemove(attachment);
}
/// <summary>
/// Instantiate the appropriate attachment type depending on the current XML element name.
/// </summary>
/// <param name="xmlElementName">The XML element name from which to determine the type of attachment to create.</param>
/// <returns>An Attachment instance.</returns>
internal override Attachment CreateComplexProperty(string xmlElementName)
{
switch (xmlElementName)
{
case XmlElementNames.FileAttachment:
return new FileAttachment(this.owner);
case XmlElementNames.ItemAttachment:
return new ItemAttachment(this.owner);
default:
return null;
}
}
/// <summary>
/// Determines the name of the XML element associated with the complexProperty parameter.
/// </summary>
/// <param name="complexProperty">The attachment object for which to determine the XML element name with.</param>
/// <returns>The XML element name associated with the complexProperty parameter.</returns>
internal override string GetCollectionItemXmlElementName(Attachment complexProperty)
{
if (complexProperty is FileAttachment)
{
return XmlElementNames.FileAttachment;
}
else
{
return XmlElementNames.ItemAttachment;
}
}
/// <summary>
/// Saves this collection by creating new attachment and deleting removed ones.
/// </summary>
internal void Save()
{
List<Attachment> attachments = new List<Attachment>();
// Retrieve a list of attachments that have to be deleted.
foreach (Attachment attachment in this.RemovedItems)
{
if (!attachment.IsNew)
{
attachments.Add(attachment);
}
}
// If any, delete them by calling the DeleteAttachment web method.
if (attachments.Count > 0)
{
this.InternalDeleteAttachments(attachments);
}
attachments.Clear();
// Retrieve a list of attachments that have to be created.
foreach (Attachment attachment in this)
{
if (attachment.IsNew)
{
attachments.Add(attachment);
}
}
// If there are any, create them by calling the CreateAttachment web method.
if (attachments.Count > 0)
{
if (this.owner.IsAttachment)
{
this.InternalCreateAttachments(this.owner.ParentAttachment.Id, attachments);
}
else
{
this.InternalCreateAttachments(this.owner.Id.UniqueId, attachments);
}
}
// Process all of the item attachments in this collection.
foreach (Attachment attachment in this)
{
ItemAttachment itemAttachment = attachment as ItemAttachment;
if (itemAttachment != null)
{
// Make sure item was created/loaded before trying to create/delete sub-attachments
if (itemAttachment.Item != null)
{
// Create/delete any sub-attachments
itemAttachment.Item.Attachments.Save();
// Clear the item's change log
itemAttachment.Item.ClearChangeLog();
}
}
}
base.ClearChangeLog();
}
/// <summary>
/// Determines whether there are any unsaved attachment collection changes.
/// </summary>
/// <returns>True if attachment adds or deletes haven't been processed yet.</returns>
internal bool HasUnprocessedChanges()
{
// Any new attachments?
foreach (Attachment attachment in this)
{
if (attachment.IsNew)
{
return true;
}
}
// Any pending deletions?
foreach (Attachment attachment in this.RemovedItems)
{
if (!attachment.IsNew)
{
return true;
}
}
// Recurse: process item attachments to check for new or deleted sub-attachments.
foreach (ItemAttachment itemAttachment in this.OfType<ItemAttachment>())
{
if (itemAttachment.Item != null)
{
if (itemAttachment.Item.Attachments.HasUnprocessedChanges())
{
return true;
}
}
}
return false;
}
/// <summary>
/// Disables the change log clearing mechanism. Attachment collections are saved separately
/// from the items they belong to.
/// </summary>
internal override void ClearChangeLog()
{
// Do nothing
}
/// <summary>
/// Validates this instance.
/// </summary>
internal void Validate()
{
// Validate all added attachments
bool contactPhotoFound = false;
for (int attachmentIndex = 0; attachmentIndex < this.AddedItems.Count; attachmentIndex++)
{
Attachment attachment = this.AddedItems[attachmentIndex];
if (attachment.IsNew)
{
// At the server side, only the last attachment with IsContactPhoto is kept, all other IsContactPhoto
// attachments are removed. CreateAttachment will generate AttachmentId for each of such attachments (although
// only the last one is valid).
//
// With E14 SP2 CreateItemWithAttachment, such request will only return 1 AttachmentId; but the client
// expects to see all, so let us prevent such "invalid" request in the first place.
//
// The IsNew check is to still let CreateAttachmentRequest allow multiple IsContactPhoto attachments.
//
if (this.owner.IsNew && this.owner.Service.RequestedServerVersion >= ExchangeVersion.Exchange2010_SP2)
{
FileAttachment fileAttachment = attachment as FileAttachment;
if (fileAttachment != null && fileAttachment.IsContactPhoto)
{
if (contactPhotoFound)
{
throw new ServiceValidationException(Strings.MultipleContactPhotosInAttachment);
}
contactPhotoFound = true;
}
}
attachment.Validate(attachmentIndex);
}
}
}
/// <summary>
/// Calls the DeleteAttachment web method to delete a list of attachments.
/// </summary>
/// <param name="attachments">The attachments to delete.</param>
private void InternalDeleteAttachments(IEnumerable<Attachment> attachments)
{
ServiceResponseCollection<DeleteAttachmentResponse> responses = this.owner.Service.DeleteAttachments(attachments);
foreach (DeleteAttachmentResponse response in responses)
{
// We remove all attachments that were successfully deleted from the change log. We should never
// receive a warning from EWS, so we ignore them.
if (response.Result != ServiceResult.Error)
{
this.RemoveFromChangeLog(response.Attachment);
}
}
// TODO : Should we throw for warnings as well?
if (responses.OverallResult == ServiceResult.Error)
{
throw new DeleteAttachmentException(responses, Strings.AtLeastOneAttachmentCouldNotBeDeleted);
}
}
/// <summary>
/// Calls the CreateAttachment web method to create a list of attachments.
/// </summary>
/// <param name="parentItemId">The Id of the parent item of the new attachments.</param>
/// <param name="attachments">The attachments to create.</param>
private void InternalCreateAttachments(string parentItemId, IEnumerable<Attachment> attachments)
{
ServiceResponseCollection<CreateAttachmentResponse> responses = this.owner.Service.CreateAttachments(parentItemId, attachments);
foreach (CreateAttachmentResponse response in responses)
{
// We remove all attachments that were successfully created from the change log. We should never
// receive a warning from EWS, so we ignore them.
if (response.Result != ServiceResult.Error)
{
this.RemoveFromChangeLog(response.Attachment);
}
}
// TODO : Should we throw for warnings as well?
if (responses.OverallResult == ServiceResult.Error)
{
throw new CreateAttachmentException(responses, Strings.AttachmentCreationFailed);
}
}
#endregion
}
}
| |
// Copyright (C) 2014 dot42
//
// Original filename: List.cs
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System.Collections.Generic;
using Dot42.Collections;
using Java.Lang;
namespace System.Collections.ObjectModel
{
public class Collection<T> : ICollection<T>, IEnumerable<T>, IList<T>, IList, ICollection, IEnumerable
{
private readonly List<T> list;
protected IList<T> Items { get { return list; } }
/// <summary>
/// Default ctor
/// </summary>
public Collection()
{
list = new List<T>();
}
/// <summary>
/// Copy ctor
/// </summary>
public Collection(IEnumerable<T> source)
{
list = new List<T>(source);
}
/// <summary>
/// Copy ctor
/// </summary>
public Collection(IIterable<T> source)
{
list = new List<T>(source);
}
protected virtual void ClearItems()
{
list.Clear();
}
protected virtual void InsertItem(int index, T item)
{
list.Insert(index, item);
}
protected virtual void RemoveItem(int index)
{
list.RemoveAt(index);
}
protected virtual void SetItem(int index, T item)
{
list[index] = item;
}
/// <summary>
/// Initialize a new list with initial capacity
/// </summary>
public Collection(int capacity)
{
list = new List<T>(capacity);
}
/// <summary>
/// Gets an enummerator to enumerate over all elements in this sequence.
/// </summary>
/// <returns></returns>
public IEnumerator<T> GetEnumerator()
{
return list.GetEnumerator();
}
/// <summary>
/// Gets an enummerator to enumerate over all elements in this sequence.
/// </summary>
/// <returns></returns>
IEnumerator<T> IEnumerable<T>.GetEnumerator()
{
return GetEnumerator();
}
/// <summary>
/// Gets an enummerator to enumerate over all elements in this sequence.
/// </summary>
/// <returns></returns>
IEnumerator IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
/// <summary>
/// Gets number of elements in this collection.
/// </summary>
public int Count
{
get { return list.Count; }
}
/// <summary>
/// Is this collection thread safe.
/// </summary>
public bool IsSynchronized
{
get { return list.IsSynchronized; }
}
/// <summary>
/// Gets the object used to synchronize access to this collection.
/// </summary>
public object SyncRoot
{
get { return list.SyncRoot; }
}
/// <summary>
/// Copy all my elements to the given array starting at the given index.
/// </summary>
/// <param name="array">Array to copy my elements into.</param>
/// <param name="index">Position in <see cref="array"/> where the first element will be copied to.</param>
public void CopyTo(Array array, int index)
{
list.CopyTo(array, index);
}
/// <summary>
/// Does this list have a fixed size?
/// </summary>
public bool IsFixedSize
{
get { return false; }
}
/// <summary>
/// Is this list read-only.
/// </summary>
public bool IsReadOnly
{
get { return false; }
}
/// <summary>
/// Gets/sets an item in this list at the given index.
/// </summary>
public T this[int index]
{
get { return list[index]; }
set { SetItem(index, value); }
}
/// <summary>
/// Add the given item to this collection.
/// </summary>
public void Add(T item)
{
InsertItem(list.Count, item);
}
/// <summary>
/// Adds the elements of the specified collection to the end of this List.
/// </summary>
public void AddRange(IEnumerable<T> collection)
{
foreach(var element in collection)
InsertItem(list.Count, element);
}
/// <summary>
/// Is the given element contained in this list?
/// </summary>
public bool Contains(T element)
{
return list.Contains(element);
}
/// <summary>
/// Removes the first occurrance of the given element from this list.
/// </summary>
public bool Remove(T element)
{
int idx = IndexOf(element);
if (idx == -1) return false;
RemoveItem(idx);
return true;
}
public int IndexOf(T element)
{
return list.IndexOf(element);
}
public void RemoveAt(int index)
{
RemoveItem(index);
}
public void Insert(int index, T element)
{
InsertItem(index, element);
}
public void Clear()
{
ClearItems();
}
public void CopyTo(T[] array, int index)
{
list.CopyTo(array, index);
}
/// <summary>
/// Gets/sets an item in this list at the given index.
/// </summary>
object IList.this[int index]
{
get { return list[index]; }
set { SetItem(index, (T)value); }
}
/// <summary>
/// Add ths given element to the end of this list.
/// </summary>
/// <returns>The index at which the element was added or -1 if the element was not added.</returns>
int IList.Add(object element)
{
int idx = list.Count;
InsertItem(idx, (T)element);
return idx;
}
bool IList.Contains(object element)
{
return list.Contains((T)element);
}
int IList.IndexOf(object element)
{
return list.IndexOf((T)element);
}
void IList.Insert(int index, object element)
{
InsertItem(index, (T)element);
}
void IList.Remove(object element)
{
Remove((T) element);
}
public class Enumerator<T> : IteratorWrapper<T>
{
public Enumerator(IIterable<T> iterable)
: base(iterable)
{
}
}
}
}
| |
// Copyright (c) 2015, Outercurve Foundation.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// - Redistributions of source code must retain the above copyright notice, this
// list of conditions and the following disclaimer.
//
// - Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// - Neither the name of the Outercurve Foundation nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
// ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
namespace WebsitePanel.Portal.ProviderControls {
public partial class Exchange2010_Settings {
/// <summary>
/// powershellUrl1 control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.HtmlControls.HtmlTableRow powershellUrl1;
/// <summary>
/// lblFileServiceInfo control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Label lblFileServiceInfo;
/// <summary>
/// powershellUrl2 control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.HtmlControls.HtmlTableRow powershellUrl2;
/// <summary>
/// loclocPowerShellUrl control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize loclocPowerShellUrl;
/// <summary>
/// txtPowerShellUrl control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.TextBox txtPowerShellUrl;
/// <summary>
/// storageGroup control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.HtmlControls.HtmlTableRow storageGroup;
/// <summary>
/// locStorageGroup control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locStorageGroup;
/// <summary>
/// txtStorageGroup control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.TextBox txtStorageGroup;
/// <summary>
/// locMailboxDatabase control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locMailboxDatabase;
/// <summary>
/// locMailboxDAG control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locMailboxDAG;
/// <summary>
/// txtMailboxDatabase control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.TextBox txtMailboxDatabase;
/// <summary>
/// archivingGroup control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.HtmlControls.HtmlTableCell archivingGroup;
/// <summary>
/// locArchivingDatabase control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locArchivingDatabase;
/// <summary>
/// txtArchivingDatabase control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.TextBox txtArchivingDatabase;
/// <summary>
/// locKeepDeletedItems control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locKeepDeletedItems;
/// <summary>
/// txtKeepDeletedItems control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.TextBox txtKeepDeletedItems;
/// <summary>
/// locKeepDeletedMailboxes control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locKeepDeletedMailboxes;
/// <summary>
/// txtKeepDeletedMailboxes control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.TextBox txtKeepDeletedMailboxes;
/// <summary>
/// clusteredMailboxServer control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.HtmlControls.HtmlTableRow clusteredMailboxServer;
/// <summary>
/// locMailboxClusterName control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locMailboxClusterName;
/// <summary>
/// txtMailboxClusterName control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.TextBox txtMailboxClusterName;
/// <summary>
/// Localize1 control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize Localize1;
/// <summary>
/// txtPublicFolderServer control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.TextBox txtPublicFolderServer;
/// <summary>
/// Localize2 control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize Localize2;
/// <summary>
/// txtOABServer control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.TextBox txtOABServer;
/// <summary>
/// locHubTransport control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locHubTransport;
/// <summary>
/// ddlHubTransport control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.DropDownList ddlHubTransport;
/// <summary>
/// btnAdd control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Button btnAdd;
/// <summary>
/// gvHubTransport control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.GridView gvHubTransport;
/// <summary>
/// locClientAccess control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locClientAccess;
/// <summary>
/// ddlClientAccess control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.DropDownList ddlClientAccess;
/// <summary>
/// Button1 control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Button Button1;
/// <summary>
/// gvClients control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.GridView gvClients;
/// <summary>
/// lblSetupVariables control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Label lblSetupVariables;
/// <summary>
/// locSmtpServers control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locSmtpServers;
/// <summary>
/// txtSmtpServers control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.TextBox txtSmtpServers;
/// <summary>
/// locSmtpComments control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locSmtpComments;
/// <summary>
/// locAutodiscoverIP control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locAutodiscoverIP;
/// <summary>
/// txtAutodiscoverIP control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.TextBox txtAutodiscoverIP;
/// <summary>
/// locAutodiscoverDomain control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locAutodiscoverDomain;
/// <summary>
/// txtAutodiscoverDomain control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.TextBox txtAutodiscoverDomain;
/// <summary>
/// locOwaUrl control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locOwaUrl;
/// <summary>
/// txtOwaUrl control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.TextBox txtOwaUrl;
/// <summary>
/// locActiveSyncServer control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locActiveSyncServer;
/// <summary>
/// txtActiveSyncServer control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.TextBox txtActiveSyncServer;
}
}
| |
using System;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Runtime.CompilerServices;
using GitVersion;
using GitVersion.Helpers;
using NUnit.Framework;
using Shouldly;
using YamlDotNet.Serialization;
[TestFixture]
public class ConfigProviderTests
{
string repoPath;
IFileSystem fileSystem;
[SetUp]
public void Setup()
{
fileSystem = new TestFileSystem();
repoPath = "c:\\MyGitRepo";
}
[Test]
public void CanReadDocumentAndMigrate()
{
const string text = @"
assembly-versioning-scheme: MajorMinor
next-version: 2.0.0
tag-prefix: '[vV|version-]'
mode: ContinuousDelivery
branches:
develop:
mode: ContinuousDeployment
tag: dev
release[/-]:
mode: continuousDeployment
tag: rc
";
SetupConfigFileContent(text);
var config = ConfigurationProvider.Provide(repoPath, fileSystem);
config.AssemblyVersioningScheme.ShouldBe(AssemblyVersioningScheme.MajorMinor);
config.AssemblyInformationalFormat.ShouldBe(null);
config.NextVersion.ShouldBe("2.0.0");
config.TagPrefix.ShouldBe("[vV|version-]");
config.VersioningMode.ShouldBe(VersioningMode.ContinuousDelivery);
config.Branches["dev(elop)?(ment)?$"].Tag.ShouldBe("dev");
config.Branches["releases?[/-]"].Tag.ShouldBe("rc");
config.Branches["releases?[/-]"].VersioningMode.ShouldBe(VersioningMode.ContinuousDeployment);
config.Branches["dev(elop)?(ment)?$"].VersioningMode.ShouldBe(VersioningMode.ContinuousDeployment);
}
[Test]
public void CanReadOldDocument()
{
const string text = @"
assemblyVersioningScheme: MajorMinor
develop-branch-tag: alpha
release-branch-tag: rc
";
SetupConfigFileContent(text);
var error = Should.Throw<OldConfigurationException>(() => ConfigurationProvider.Provide(repoPath, fileSystem));
error.Message.ShouldContainWithoutWhitespace(@"GitVersion configuration file contains old configuration, please fix the following errors:
assemblyVersioningScheme has been replaced by assembly-versioning-scheme
develop-branch-tag has been replaced by branch specific configuration.See http://gitversion.readthedocs.org/en/latest/configuration/#branch-configuration
release-branch-tag has been replaced by branch specific configuration.See http://gitversion.readthedocs.org/en/latest/configuration/#branch-configuration");
}
[Test]
public void OverwritesDefaultsWithProvidedConfig()
{
var defaultConfig = ConfigurationProvider.Provide(repoPath, fileSystem);
const string text = @"
next-version: 2.0.0
branches:
dev(elop)?(ment)?$:
mode: ContinuousDeployment
tag: dev";
SetupConfigFileContent(text);
var config = ConfigurationProvider.Provide(repoPath, fileSystem);
config.NextVersion.ShouldBe("2.0.0");
config.Branches["dev(elop)?(ment)?$"].Increment.ShouldBe(defaultConfig.Branches["dev(elop)?(ment)?$"].Increment);
config.Branches["dev(elop)?(ment)?$"].VersioningMode.ShouldBe(defaultConfig.Branches["dev(elop)?(ment)?$"].VersioningMode);
config.Branches["dev(elop)?(ment)?$"].Tag.ShouldBe("dev");
}
[Test]
public void CanRemoveTag()
{
const string text = @"
next-version: 2.0.0
branches:
releases?[/-]:
tag: """"";
SetupConfigFileContent(text);
var config = ConfigurationProvider.Provide(repoPath, fileSystem);
config.NextVersion.ShouldBe("2.0.0");
config.Branches["releases?[/-]"].Tag.ShouldBe(string.Empty);
}
[Test]
public void CanProvideConfigForNewBranch()
{
const string text = @"
next-version: 2.0.0
branches:
bug[/-]:
tag: bugfix";
SetupConfigFileContent(text);
var config = ConfigurationProvider.Provide(repoPath, fileSystem);
config.Branches["bug[/-]"].Tag.ShouldBe("bugfix");
}
[Test]
public void NextVersionCanBeInteger()
{
const string text = "next-version: 2";
SetupConfigFileContent(text);
var config = ConfigurationProvider.Provide(repoPath, fileSystem);
config.NextVersion.ShouldBe("2.0");
}
[Test]
public void NextVersionCanHaveEnormousMinorVersion()
{
const string text = "next-version: 2.118998723";
SetupConfigFileContent(text);
var config = ConfigurationProvider.Provide(repoPath, fileSystem);
config.NextVersion.ShouldBe("2.118998723");
}
[Test]
public void NextVersionCanHavePatch()
{
const string text = "next-version: 2.12.654651698";
SetupConfigFileContent(text);
var config = ConfigurationProvider.Provide(repoPath, fileSystem);
config.NextVersion.ShouldBe("2.12.654651698");
}
[Test]
[MethodImpl(MethodImplOptions.NoInlining)]
public void CanWriteOutEffectiveConfiguration()
{
var config = ConfigurationProvider.GetEffectiveConfigAsString(repoPath, fileSystem);
config.ShouldMatchApproved();
}
[Test]
public void CanUpdateAssemblyInformationalVersioningScheme()
{
const string text = @"
assembly-versioning-scheme: MajorMinor
assembly-informational-format: '{NugetVersion}'";
SetupConfigFileContent(text);
var config = ConfigurationProvider.Provide(repoPath, fileSystem);
config.AssemblyVersioningScheme.ShouldBe(AssemblyVersioningScheme.MajorMinor);
config.AssemblyInformationalFormat.ShouldBe("{NugetVersion}");
}
[Test]
public void CanUpdateAssemblyInformationalVersioningSchemeWithMultipleVariables()
{
const string text = @"
assembly-versioning-scheme: MajorMinor
assembly-informational-format: '{Major}.{Minor}.{Patch}'";
SetupConfigFileContent(text);
var config = ConfigurationProvider.Provide(repoPath, fileSystem);
config.AssemblyVersioningScheme.ShouldBe(AssemblyVersioningScheme.MajorMinor);
config.AssemblyInformationalFormat.ShouldBe("{Major}.{Minor}.{Patch}");
}
[Test]
public void CanUpdateAssemblyInformationalVersioningSchemeWithFullSemVer()
{
const string text = @"assembly-versioning-scheme: MajorMinorPatch
assembly-informational-format: '{FullSemVer}'
mode: ContinuousDelivery
next-version: 5.3.0
branches: {}";
SetupConfigFileContent(text);
var config = ConfigurationProvider.Provide(repoPath, fileSystem);
config.AssemblyVersioningScheme.ShouldBe(AssemblyVersioningScheme.MajorMinorPatch);
config.AssemblyInformationalFormat.ShouldBe("{FullSemVer}");
}
[Test]
public void CanReadDefaultDocument()
{
const string text = "";
SetupConfigFileContent(text);
var config = ConfigurationProvider.Provide(repoPath, fileSystem);
config.AssemblyVersioningScheme.ShouldBe(AssemblyVersioningScheme.MajorMinorPatch);
config.AssemblyInformationalFormat.ShouldBe(null);
config.Branches["dev(elop)?(ment)?$"].Tag.ShouldBe("unstable");
config.Branches["releases?[/-]"].Tag.ShouldBe("beta");
config.TagPrefix.ShouldBe(ConfigurationProvider.DefaultTagPrefix);
config.NextVersion.ShouldBe(null);
}
[Test]
public void VerifyAliases()
{
var config = typeof(Config);
var propertiesMissingAlias = config.GetProperties()
.Where(p => p.GetCustomAttribute<ObsoleteAttribute>() == null)
.Where(p => p.GetCustomAttribute(typeof(YamlMemberAttribute)) == null)
.Select(p => p.Name);
propertiesMissingAlias.ShouldBeEmpty();
}
[Test]
public void WarnOnExistingGitVersionConfigYamlFile()
{
SetupConfigFileContent(string.Empty, "GitVersionConfig.yaml");
var s = string.Empty;
Action<string> action = info => { s = info; };
Logger.SetLoggers(action, action, action);
ConfigurationProvider.Provide(repoPath, fileSystem);
s.Contains("'GitVersionConfig.yaml' is deprecated, use 'GitVersion.yml' instead.").ShouldBe(true);
}
[Test]
public void NoWarnOnGitVersionYmlFile()
{
SetupConfigFileContent(string.Empty);
var s = string.Empty;
Action<string> action = info => { s = info; };
Logger.SetLoggers(action, action, action);
ConfigurationProvider.Provide(repoPath, fileSystem);
s.Length.ShouldBe(0);
}
void SetupConfigFileContent(string text, string fileName = "GitVersion.yml")
{
fileSystem.WriteAllText(Path.Combine(repoPath, fileName), text);
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: tenancy_config/hk_phone_codes.proto
#pragma warning disable 1591, 0612, 3021
#region Designer generated code
using pb = global::Google.Protobuf;
using pbc = global::Google.Protobuf.Collections;
using pbr = global::Google.Protobuf.Reflection;
using scg = global::System.Collections.Generic;
namespace HOLMS.Types.TenancyConfig {
/// <summary>Holder for reflection information generated from tenancy_config/hk_phone_codes.proto</summary>
public static partial class HkPhoneCodesReflection {
#region Descriptor
/// <summary>File descriptor for tenancy_config/hk_phone_codes.proto</summary>
public static pbr::FileDescriptor Descriptor {
get { return descriptor; }
}
private static pbr::FileDescriptor descriptor;
static HkPhoneCodesReflection() {
byte[] descriptorData = global::System.Convert.FromBase64String(
string.Concat(
"CiN0ZW5hbmN5X2NvbmZpZy9oa19waG9uZV9jb2Rlcy5wcm90bxIaaG9sbXMu",
"dHlwZXMudGVuYW5jeV9jb25maWciiAEKDEhLUGhvbmVDb2RlcxIUCgxtYWlk",
"X2luX3Jvb20YASABKAkSDwoHaW5zcGVjdBgCIAEoCRIgChhjbGVhbl9zaGVl",
"dHNfbm90X2NoYW5nZWQYAyABKAkSIAoYY2xlYW5fYW5kX3NoZWV0c19jaGFu",
"Z2VkGAQgASgJEg0KBWRpcnR5GAUgASgJQitaDXRlbmFuY3ljb25maWeqAhlI",
"T0xNUy5UeXBlcy5UZW5hbmN5Q29uZmlnYgZwcm90bzM="));
descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
new pbr::FileDescriptor[] { },
new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] {
new pbr::GeneratedClrTypeInfo(typeof(global::HOLMS.Types.TenancyConfig.HKPhoneCodes), global::HOLMS.Types.TenancyConfig.HKPhoneCodes.Parser, new[]{ "MaidInRoom", "Inspect", "CleanSheetsNotChanged", "CleanAndSheetsChanged", "Dirty" }, null, null, null)
}));
}
#endregion
}
#region Messages
public sealed partial class HKPhoneCodes : pb::IMessage<HKPhoneCodes> {
private static readonly pb::MessageParser<HKPhoneCodes> _parser = new pb::MessageParser<HKPhoneCodes>(() => new HKPhoneCodes());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<HKPhoneCodes> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::HOLMS.Types.TenancyConfig.HkPhoneCodesReflection.Descriptor.MessageTypes[0]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public HKPhoneCodes() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public HKPhoneCodes(HKPhoneCodes other) : this() {
maidInRoom_ = other.maidInRoom_;
inspect_ = other.inspect_;
cleanSheetsNotChanged_ = other.cleanSheetsNotChanged_;
cleanAndSheetsChanged_ = other.cleanAndSheetsChanged_;
dirty_ = other.dirty_;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public HKPhoneCodes Clone() {
return new HKPhoneCodes(this);
}
/// <summary>Field number for the "maid_in_room" field.</summary>
public const int MaidInRoomFieldNumber = 1;
private string maidInRoom_ = "";
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string MaidInRoom {
get { return maidInRoom_; }
set {
maidInRoom_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "inspect" field.</summary>
public const int InspectFieldNumber = 2;
private string inspect_ = "";
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string Inspect {
get { return inspect_; }
set {
inspect_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "clean_sheets_not_changed" field.</summary>
public const int CleanSheetsNotChangedFieldNumber = 3;
private string cleanSheetsNotChanged_ = "";
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string CleanSheetsNotChanged {
get { return cleanSheetsNotChanged_; }
set {
cleanSheetsNotChanged_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "clean_and_sheets_changed" field.</summary>
public const int CleanAndSheetsChangedFieldNumber = 4;
private string cleanAndSheetsChanged_ = "";
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string CleanAndSheetsChanged {
get { return cleanAndSheetsChanged_; }
set {
cleanAndSheetsChanged_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "dirty" field.</summary>
public const int DirtyFieldNumber = 5;
private string dirty_ = "";
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string Dirty {
get { return dirty_; }
set {
dirty_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as HKPhoneCodes);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(HKPhoneCodes other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (MaidInRoom != other.MaidInRoom) return false;
if (Inspect != other.Inspect) return false;
if (CleanSheetsNotChanged != other.CleanSheetsNotChanged) return false;
if (CleanAndSheetsChanged != other.CleanAndSheetsChanged) return false;
if (Dirty != other.Dirty) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (MaidInRoom.Length != 0) hash ^= MaidInRoom.GetHashCode();
if (Inspect.Length != 0) hash ^= Inspect.GetHashCode();
if (CleanSheetsNotChanged.Length != 0) hash ^= CleanSheetsNotChanged.GetHashCode();
if (CleanAndSheetsChanged.Length != 0) hash ^= CleanAndSheetsChanged.GetHashCode();
if (Dirty.Length != 0) hash ^= Dirty.GetHashCode();
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (MaidInRoom.Length != 0) {
output.WriteRawTag(10);
output.WriteString(MaidInRoom);
}
if (Inspect.Length != 0) {
output.WriteRawTag(18);
output.WriteString(Inspect);
}
if (CleanSheetsNotChanged.Length != 0) {
output.WriteRawTag(26);
output.WriteString(CleanSheetsNotChanged);
}
if (CleanAndSheetsChanged.Length != 0) {
output.WriteRawTag(34);
output.WriteString(CleanAndSheetsChanged);
}
if (Dirty.Length != 0) {
output.WriteRawTag(42);
output.WriteString(Dirty);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (MaidInRoom.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(MaidInRoom);
}
if (Inspect.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(Inspect);
}
if (CleanSheetsNotChanged.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(CleanSheetsNotChanged);
}
if (CleanAndSheetsChanged.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(CleanAndSheetsChanged);
}
if (Dirty.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(Dirty);
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(HKPhoneCodes other) {
if (other == null) {
return;
}
if (other.MaidInRoom.Length != 0) {
MaidInRoom = other.MaidInRoom;
}
if (other.Inspect.Length != 0) {
Inspect = other.Inspect;
}
if (other.CleanSheetsNotChanged.Length != 0) {
CleanSheetsNotChanged = other.CleanSheetsNotChanged;
}
if (other.CleanAndSheetsChanged.Length != 0) {
CleanAndSheetsChanged = other.CleanAndSheetsChanged;
}
if (other.Dirty.Length != 0) {
Dirty = other.Dirty;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 10: {
MaidInRoom = input.ReadString();
break;
}
case 18: {
Inspect = input.ReadString();
break;
}
case 26: {
CleanSheetsNotChanged = input.ReadString();
break;
}
case 34: {
CleanAndSheetsChanged = input.ReadString();
break;
}
case 42: {
Dirty = input.ReadString();
break;
}
}
}
}
}
#endregion
}
#endregion Designer generated code
| |
using System;
using System.Diagnostics;
using System.IO;
using System.Reflection;
using System.Windows.Forms;
namespace Alchemi.AppStart
{
//**************************************************************
// AppStart Class
// This is the main class for appstart.exe
//**************************************************************
public class Starter
{
public Starter() {}
static string AppExePath;
static Process AppProcess;
static string[] CommandLineArgs;
static string[] RestartCommandLineArgs;
static string CommandLineString;
static string RestartCommandLineString;
//**************************************************************
// Main()
//**************************************************************
[STAThread]
static void Main(string[] args)
{
//Retrive cmdline to pass to new process
CommandLineString = "";
for (int i = 0; i < args.Length; i++)
{
CommandLineString = string.Format ("{0} {1}", CommandLineString, args[i]);
}
CommandLineString += " /appstartversion " + Assembly.GetExecutingAssembly().GetName().Version.ToString();
RestartCommandLineString = CommandLineString + " /restart ";
CommandLineArgs = new String[args.Length+2];
CommandLineArgs[CommandLineArgs.Length-2] = "/appstartversion";
CommandLineArgs[CommandLineArgs.Length-1] = Assembly.GetExecutingAssembly().GetName().Version.ToString();
RestartCommandLineArgs = new String[CommandLineArgs.Length+1];
RestartCommandLineArgs[RestartCommandLineArgs.Length-1] = "/restart";
AppStartConfig Config = LoadConfig();
if (Config.AppLaunchMode == AppStartConfig.LaunchModes.Process)
StartAndWait_Process();
else
StartAndWait_Domain();
}
/*********************************************************************
* StartAndWait_Domain()
**********************************************************************/
private static void StartAndWait_Domain()
{
bool restartApp = true;
int returnCode = 0;
while (restartApp)
{
try
{
returnCode = StartApp_Domain(false);
Debug.WriteLine(returnCode.ToString());
}
catch (Exception e)
{
Debug.WriteLine("APPLICATION STARTER: Process.WaitForExit() failed, it's possible the process is not running");
HandleTerminalError(e);
}
if (returnCode == 2)
{
restartApp = true;
}
else
restartApp = false;
}
}
/*********************************************************************
* StartAndWait_Process()
**********************************************************************/
private static void StartAndWait_Process()
{
bool restartApp = true;
StartApp_Process(false);
while (restartApp)
{
try
{
AppProcess.WaitForExit();
}
catch (Exception e)
{
Debug.WriteLine("APPLICATION STARTER: Process.WaitForExit() failed, it's possible the process is not running");
Debug.WriteLine("APPLICATION STARTER: " + e.ToString());
return;
}
if (AppProcess.ExitCode == 2)
{
restartApp = true;
AppProcess = null;
StartApp_Process(true);
}
else
restartApp = false;
}
}
/*********************************************************************
* StartApp_Domain()
**********************************************************************/
public static int StartApp_Domain(bool restartApp)
{
Debug.WriteLine("APPLICATION STARTER: Starting the app in a seperate domain");
//Load the config file
AppStartConfig Config;
Config = LoadConfig();
AppExePath = Config.AppExePath;
//Load the app
int retValue=0;
try
{
//Create the new app domain
AppDomain NewDomain = AppDomain.CreateDomain (
"New App Domain",
AppDomain.CurrentDomain.Evidence,
Path.GetDirectoryName(AppExePath)+@"\",
"",
false);
//Execute the app in the new appdomain
string[] cmdLineArgs;
if(restartApp)
cmdLineArgs = RestartCommandLineArgs;
else
cmdLineArgs = CommandLineArgs;
retValue = NewDomain.ExecuteAssembly(AppExePath,AppDomain.CurrentDomain.Evidence,cmdLineArgs);
//Unload the app domain
AppDomain.Unload(NewDomain);
}
catch (Exception e)
{
Debug.WriteLine("APPLICATION STARTER: Failed to start app at: " + AppExePath);
HandleTerminalError(e);
}
return (retValue);
}
/*********************************************************************
* StartApp_Process()
**********************************************************************/
public static void StartApp_Process(bool restartApp)
{
Debug.WriteLine("APPLICATION STARTER: Starting the app in a seperate process");
//Load the config file
AppStartConfig Config;
Config = LoadConfig();
AppExePath = Config.AppExePath;
//If the app has been started by this process before
if (AppProcess != null)
{
//& the app is still running, no need to start the app
if (!AppProcess.HasExited)
return;
}
//Start the app
try
{
ProcessStartInfo p = new ProcessStartInfo (AppExePath);
p.WorkingDirectory = Path.GetDirectoryName(AppExePath);
// Notify the app if we are restarting in case there's something they want to do differently
if(restartApp)
p.Arguments = RestartCommandLineString;
else
p.Arguments = CommandLineString;
AppProcess = Process.Start (p);
Debug.WriteLine("APPLICATION STARTER: Started app: " + AppExePath);
}
catch (Exception e)
{
Debug.WriteLine("APPLICATION STARTER: Failed to start process at: " + AppExePath);
HandleTerminalError(e);
}
}
/*********************************************************************
* LoadConfig()
**********************************************************************/
private static AppStartConfig LoadConfig()
{
AppStartConfig Config;
//Load the config file which knows where the app lives
try
{
//Try app specific config file name
Config = AppStartConfig.Load(CalcConfigFileLocation());
return Config;
}
catch (Exception e)
{
try
{
//Try default config file name
Debug.WriteLine("APPLICATION STARTER: Falling back to try to read appstart.config.");
Config = AppStartConfig.Load(AppDomain.CurrentDomain.BaseDirectory + @"AppStart.Config");
return Config;
}
catch
{
HandleTerminalError(e);
}
}
return null;
}
/*********************************************************************
* GetAppExePath()
**********************************************************************/
private static string CalcConfigFileLocation()
{
//The config file name should be appstart.config if the exe name is appstart.exe
string ConfigFileName;
try
{
ConfigFileName = Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location);
ConfigFileName = Path.Combine(ConfigFileName,Path.GetFileNameWithoutExtension(Assembly.GetExecutingAssembly().Location));
ConfigFileName += @".config";
return ConfigFileName;
}
catch (Exception e)
{
Debug.WriteLine("APPLICATION STARTER: Failed to properly calculate config file name");
HandleTerminalError(e);
return null;
}
}
/*********************************************************************
* HandleTerminalError()
* Prints out the terminal exception & shuts down the app
**********************************************************************/
private static void HandleTerminalError(Exception e)
{
Debug.WriteLine("APPLICATION STARTER: Terminal error encountered.");
Debug.WriteLine("APPLICATION STARTER: The following exception was encoutered:");
Debug.WriteLine(e.ToString());
Debug.WriteLine("APPLICATION STARTER: Shutting down");
MessageBox.Show("The auto-update feature of this application has encountered a configuration error.\r\n"
+"Please uninstall and reinstall the application."); Environment.Exit(0);
}
}
}
| |
using System;
using System.Text;
using System.Data;
using System.Data.SqlClient;
using System.Data.Common;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.Configuration;
using System.Xml;
using System.Xml.Serialization;
using SubSonic;
using SubSonic.Utilities;
namespace DalSic
{
/// <summary>
/// Strongly-typed collection for the PnCapacitado class.
/// </summary>
[Serializable]
public partial class PnCapacitadoCollection : ActiveList<PnCapacitado, PnCapacitadoCollection>
{
public PnCapacitadoCollection() {}
/// <summary>
/// Filters an existing collection based on the set criteria. This is an in-memory filter
/// Thanks to developingchris for this!
/// </summary>
/// <returns>PnCapacitadoCollection</returns>
public PnCapacitadoCollection Filter()
{
for (int i = this.Count - 1; i > -1; i--)
{
PnCapacitado o = this[i];
foreach (SubSonic.Where w in this.wheres)
{
bool remove = false;
System.Reflection.PropertyInfo pi = o.GetType().GetProperty(w.ColumnName);
if (pi.CanRead)
{
object val = pi.GetValue(o, null);
switch (w.Comparison)
{
case SubSonic.Comparison.Equals:
if (!val.Equals(w.ParameterValue))
{
remove = true;
}
break;
}
}
if (remove)
{
this.Remove(o);
break;
}
}
}
return this;
}
}
/// <summary>
/// This is an ActiveRecord class which wraps the PN_capacitados table.
/// </summary>
[Serializable]
public partial class PnCapacitado : ActiveRecord<PnCapacitado>, IActiveRecord
{
#region .ctors and Default Settings
public PnCapacitado()
{
SetSQLProps();
InitSetDefaults();
MarkNew();
}
private void InitSetDefaults() { SetDefaults(); }
public PnCapacitado(bool useDatabaseDefaults)
{
SetSQLProps();
if(useDatabaseDefaults)
ForceDefaults();
MarkNew();
}
public PnCapacitado(object keyID)
{
SetSQLProps();
InitSetDefaults();
LoadByKey(keyID);
}
public PnCapacitado(string columnName, object columnValue)
{
SetSQLProps();
InitSetDefaults();
LoadByParam(columnName,columnValue);
}
protected static void SetSQLProps() { GetTableSchema(); }
#endregion
#region Schema and Query Accessor
public static Query CreateQuery() { return new Query(Schema); }
public static TableSchema.Table Schema
{
get
{
if (BaseSchema == null)
SetSQLProps();
return BaseSchema;
}
}
private static void GetTableSchema()
{
if(!IsSchemaInitialized)
{
//Schema declaration
TableSchema.Table schema = new TableSchema.Table("PN_capacitados", TableType.Table, DataService.GetInstance("sicProvider"));
schema.Columns = new TableSchema.TableColumnCollection();
schema.SchemaName = @"dbo";
//columns
TableSchema.TableColumn colvarIdLegajo = new TableSchema.TableColumn(schema);
colvarIdLegajo.ColumnName = "id_legajo";
colvarIdLegajo.DataType = DbType.Int32;
colvarIdLegajo.MaxLength = 0;
colvarIdLegajo.AutoIncrement = true;
colvarIdLegajo.IsNullable = false;
colvarIdLegajo.IsPrimaryKey = true;
colvarIdLegajo.IsForeignKey = false;
colvarIdLegajo.IsReadOnly = false;
colvarIdLegajo.DefaultSetting = @"";
colvarIdLegajo.ForeignKeyTableName = "";
schema.Columns.Add(colvarIdLegajo);
TableSchema.TableColumn colvarIdCapacitacion = new TableSchema.TableColumn(schema);
colvarIdCapacitacion.ColumnName = "id_capacitacion";
colvarIdCapacitacion.DataType = DbType.Int32;
colvarIdCapacitacion.MaxLength = 0;
colvarIdCapacitacion.AutoIncrement = false;
colvarIdCapacitacion.IsNullable = false;
colvarIdCapacitacion.IsPrimaryKey = false;
colvarIdCapacitacion.IsForeignKey = false;
colvarIdCapacitacion.IsReadOnly = false;
colvarIdCapacitacion.DefaultSetting = @"";
colvarIdCapacitacion.ForeignKeyTableName = "";
schema.Columns.Add(colvarIdCapacitacion);
TableSchema.TableColumn colvarCalificacion = new TableSchema.TableColumn(schema);
colvarCalificacion.ColumnName = "calificacion";
colvarCalificacion.DataType = DbType.Int16;
colvarCalificacion.MaxLength = 0;
colvarCalificacion.AutoIncrement = false;
colvarCalificacion.IsNullable = true;
colvarCalificacion.IsPrimaryKey = false;
colvarCalificacion.IsForeignKey = false;
colvarCalificacion.IsReadOnly = false;
colvarCalificacion.DefaultSetting = @"";
colvarCalificacion.ForeignKeyTableName = "";
schema.Columns.Add(colvarCalificacion);
TableSchema.TableColumn colvarCalificador = new TableSchema.TableColumn(schema);
colvarCalificador.ColumnName = "calificador";
colvarCalificador.DataType = DbType.AnsiString;
colvarCalificador.MaxLength = -1;
colvarCalificador.AutoIncrement = false;
colvarCalificador.IsNullable = true;
colvarCalificador.IsPrimaryKey = false;
colvarCalificador.IsForeignKey = false;
colvarCalificador.IsReadOnly = false;
colvarCalificador.DefaultSetting = @"";
colvarCalificador.ForeignKeyTableName = "";
schema.Columns.Add(colvarCalificador);
TableSchema.TableColumn colvarFechaCalificacion = new TableSchema.TableColumn(schema);
colvarFechaCalificacion.ColumnName = "fecha_calificacion";
colvarFechaCalificacion.DataType = DbType.DateTime;
colvarFechaCalificacion.MaxLength = 0;
colvarFechaCalificacion.AutoIncrement = false;
colvarFechaCalificacion.IsNullable = true;
colvarFechaCalificacion.IsPrimaryKey = false;
colvarFechaCalificacion.IsForeignKey = false;
colvarFechaCalificacion.IsReadOnly = false;
colvarFechaCalificacion.DefaultSetting = @"";
colvarFechaCalificacion.ForeignKeyTableName = "";
schema.Columns.Add(colvarFechaCalificacion);
BaseSchema = schema;
//add this schema to the provider
//so we can query it later
DataService.Providers["sicProvider"].AddSchema("PN_capacitados",schema);
}
}
#endregion
#region Props
[XmlAttribute("IdLegajo")]
[Bindable(true)]
public int IdLegajo
{
get { return GetColumnValue<int>(Columns.IdLegajo); }
set { SetColumnValue(Columns.IdLegajo, value); }
}
[XmlAttribute("IdCapacitacion")]
[Bindable(true)]
public int IdCapacitacion
{
get { return GetColumnValue<int>(Columns.IdCapacitacion); }
set { SetColumnValue(Columns.IdCapacitacion, value); }
}
[XmlAttribute("Calificacion")]
[Bindable(true)]
public short? Calificacion
{
get { return GetColumnValue<short?>(Columns.Calificacion); }
set { SetColumnValue(Columns.Calificacion, value); }
}
[XmlAttribute("Calificador")]
[Bindable(true)]
public string Calificador
{
get { return GetColumnValue<string>(Columns.Calificador); }
set { SetColumnValue(Columns.Calificador, value); }
}
[XmlAttribute("FechaCalificacion")]
[Bindable(true)]
public DateTime? FechaCalificacion
{
get { return GetColumnValue<DateTime?>(Columns.FechaCalificacion); }
set { SetColumnValue(Columns.FechaCalificacion, value); }
}
#endregion
//no foreign key tables defined (0)
//no ManyToMany tables defined (0)
#region ObjectDataSource support
/// <summary>
/// Inserts a record, can be used with the Object Data Source
/// </summary>
public static void Insert(int varIdCapacitacion,short? varCalificacion,string varCalificador,DateTime? varFechaCalificacion)
{
PnCapacitado item = new PnCapacitado();
item.IdCapacitacion = varIdCapacitacion;
item.Calificacion = varCalificacion;
item.Calificador = varCalificador;
item.FechaCalificacion = varFechaCalificacion;
if (System.Web.HttpContext.Current != null)
item.Save(System.Web.HttpContext.Current.User.Identity.Name);
else
item.Save(System.Threading.Thread.CurrentPrincipal.Identity.Name);
}
/// <summary>
/// Updates a record, can be used with the Object Data Source
/// </summary>
public static void Update(int varIdLegajo,int varIdCapacitacion,short? varCalificacion,string varCalificador,DateTime? varFechaCalificacion)
{
PnCapacitado item = new PnCapacitado();
item.IdLegajo = varIdLegajo;
item.IdCapacitacion = varIdCapacitacion;
item.Calificacion = varCalificacion;
item.Calificador = varCalificador;
item.FechaCalificacion = varFechaCalificacion;
item.IsNew = false;
if (System.Web.HttpContext.Current != null)
item.Save(System.Web.HttpContext.Current.User.Identity.Name);
else
item.Save(System.Threading.Thread.CurrentPrincipal.Identity.Name);
}
#endregion
#region Typed Columns
public static TableSchema.TableColumn IdLegajoColumn
{
get { return Schema.Columns[0]; }
}
public static TableSchema.TableColumn IdCapacitacionColumn
{
get { return Schema.Columns[1]; }
}
public static TableSchema.TableColumn CalificacionColumn
{
get { return Schema.Columns[2]; }
}
public static TableSchema.TableColumn CalificadorColumn
{
get { return Schema.Columns[3]; }
}
public static TableSchema.TableColumn FechaCalificacionColumn
{
get { return Schema.Columns[4]; }
}
#endregion
#region Columns Struct
public struct Columns
{
public static string IdLegajo = @"id_legajo";
public static string IdCapacitacion = @"id_capacitacion";
public static string Calificacion = @"calificacion";
public static string Calificador = @"calificador";
public static string FechaCalificacion = @"fecha_calificacion";
}
#endregion
#region Update PK Collections
#endregion
#region Deep Save
#endregion
}
}
| |
//
// App.cs
//
// Author:
// Ruben Vermeersch <ruben@savanne.be>
// Stephane Delcroix <stephane@delcroix.org>
//
// Copyright (C) 2009-2010 Novell, Inc.
// Copyright (C) 2010 Ruben Vermeersch
// Copyright (C) 2009-2010 Stephane Delcroix
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System;
using System.IO;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using Unique;
using Mono.Unix;
using Hyena;
using FSpot.Core;
using FSpot.Database;
namespace FSpot
{
public class App : Unique.App
{
static object sync_handle = new object ();
#region public API
static App app;
public static App Instance {
get {
lock (sync_handle) {
if (app == null)
app = new App ();
}
return app;
}
}
Thread constructing_organizer = null;
public MainWindow Organizer {
get {
lock (sync_handle) {
if (organizer == null) {
if (constructing_organizer == Thread.CurrentThread) {
throw new Exception ("Recursively tried to acquire App.Organizer!");
}
constructing_organizer = Thread.CurrentThread;
organizer = new MainWindow (Database);
Register (organizer.Window);
}
}
return organizer;
}
}
public Db Database {
get {
lock (sync_handle) {
if (db == null) {
if (!File.Exists (Global.BaseDirectory))
Directory.CreateDirectory (Global.BaseDirectory);
db = new Db ();
try {
db.Init (Path.Combine (Global.BaseDirectory, "photos.db"), true);
} catch (Exception e) {
new FSpot.UI.Dialog.RepairDbDialog (e, db.Repair (), null);
db.Init (Path.Combine (Global.BaseDirectory, "photos.db"), true);
}
}
}
return db;
}
}
public void Import (string path)
{
if (IsRunning) {
var md = new MessageData ();
md.Text = path;
SendMessage (Command.Import, md);
return;
}
HandleImport (path);
}
public void Organize ()
{
if (IsRunning) {
SendMessage (Command.Organize, null);
return;
}
HandleOrganize ();
}
public void Shutdown ()
{
if (IsRunning) {
SendMessage (Command.Shutdown, null);
return;
}
HandleShutdown ();
}
public void Slideshow (string tagname)
{
if (IsRunning) {
var md = new MessageData ();
md.Text = tagname ?? String.Empty;
SendMessage (Command.Slideshow, md);
return;
}
HandleSlideshow (tagname);
}
public void View (SafeUri uri)
{
View (new[] {uri});
}
public void View (IEnumerable<SafeUri> uris)
{
var uri_s = from uri in uris select uri.ToString ();
View (uri_s);
}
public void View (string uri)
{
View (new[] {uri});
}
public void View (IEnumerable<string> uris)
{
if (IsRunning) {
var md = new MessageData ();
md.Uris = uris.ToArray ();
SendMessage (Command.View, md);
return;
}
HandleView (uris.ToArray());
}
#endregion
#region private ctor and stuffs
enum Command {
Invalid = 0,
Import,
View,
Organize,
Shutdown,
Version,
Slideshow,
}
List<Gtk.Window> toplevels;
MainWindow organizer;
Db db;
App (): base ("org.gnome.FSpot.Core", null,
"Import", Command.Import,
"View", Command.View,
"Organize", Command.Organize,
"Shutdown", Command.Shutdown,
"Slideshow", Command.Slideshow)
{
toplevels = new List<Gtk.Window> ();
if (IsRunning) {
Log.Information ("Found active FSpot process");
} else {
MessageReceived += HandleMessageReceived;
}
}
void SendMessage (Command command, MessageData md)
{
SendMessage ((Unique.Command)command, md);
}
#endregion
#region Command Handlers
void HandleMessageReceived (object sender, MessageReceivedArgs e)
{
switch ((Command)e.Command) {
case Command.Import:
HandleImport (e.MessageData.Text);
e.RetVal = Response.Ok;
break;
case Command.Organize:
HandleOrganize ();
e.RetVal = Response.Ok;
break;
case Command.Shutdown:
HandleShutdown ();
e.RetVal = Response.Ok;
break;
case Command.Slideshow:
HandleSlideshow (e.MessageData.Text);
e.RetVal = Response.Ok;
break;
case Command.View:
HandleView (e.MessageData.Uris);
e.RetVal = Response.Ok;
break;
case Command.Invalid:
default:
Log.Debug ("Wrong command received");
break;
}
}
void HandleImport (string path)
{
// Some users get wonky URIs here, trying to work around below.
// https://bugzilla.gnome.org/show_bug.cgi?id=629248
if (path != null && path.StartsWith ("gphoto2:usb:")) {
path = String.Format ("gphoto2://[{0}]", path.Substring (8));
}
Hyena.Log.DebugFormat ("Importing from {0}", path);
Organizer.Window.Present ();
Organizer.ImportFile (path == null ? null : new SafeUri(path));
}
void HandleOrganize ()
{
if (Database.Empty)
HandleImport (null);
else
Organizer.Window.Present ();
}
void HandleShutdown ()
{
try {
App.Instance.Organizer.Close ();
} catch {
System.Environment.Exit (0);
}
}
//FIXME move all this in a standalone class
void HandleSlideshow (string tagname)
{
Tag tag;
FSpot.Widgets.SlideShow slideshow = null;
if (!String.IsNullOrEmpty (tagname))
tag = Database.Tags.GetTagByName (tagname);
else
tag = Database.Tags.GetTagById (Preferences.Get<int> (Preferences.SCREENSAVER_TAG));
IPhoto[] photos;
if (tag != null)
photos = Database.Photos.Query (new Tag[] {tag});
else if (Preferences.Get<int> (Preferences.SCREENSAVER_TAG) == 0)
photos = Database.Photos.Query (new Tag [] {});
else
photos = new IPhoto [0];
// Minimum delay 1 second; default is 4s
var delay = Math.Max (1.0, Preferences.Get<double> (Preferences.SCREENSAVER_DELAY));
var window = new XScreenSaverSlide ();
window.ModifyFg (Gtk.StateType.Normal, new Gdk.Color (127, 127, 127));
window.ModifyBg (Gtk.StateType.Normal, new Gdk.Color (0, 0, 0));
if (photos.Length > 0) {
Array.Sort (photos, new IPhotoComparer.RandomSort ());
slideshow = new FSpot.Widgets.SlideShow (new BrowsablePointer (new PhotoList (photos), 0), (uint)(delay * 1000), true);
window.Add (slideshow);
} else {
Gtk.HBox outer = new Gtk.HBox ();
Gtk.HBox hbox = new Gtk.HBox ();
Gtk.VBox vbox = new Gtk.VBox ();
outer.PackStart (new Gtk.Label (String.Empty));
outer.PackStart (vbox, false, false, 0);
vbox.PackStart (new Gtk.Label (String.Empty));
vbox.PackStart (hbox, false, false, 0);
hbox.PackStart (new Gtk.Image (Gtk.Stock.DialogWarning, Gtk.IconSize.Dialog),
false, false, 0);
outer.PackStart (new Gtk.Label (String.Empty));
string msg;
string long_msg;
if (tag != null) {
msg = String.Format (Catalog.GetString ("No photos matching {0} found"), tag.Name);
long_msg = String.Format (Catalog.GetString ("The tag \"{0}\" is not applied to any photos. Try adding\n" +
"the tag to some photos or selecting a different tag in the\n" +
"F-Spot preference dialog."), tag.Name);
} else {
msg = Catalog.GetString ("Search returned no results");
long_msg = Catalog.GetString ("The tag F-Spot is looking for does not exist. Try\n" +
"selecting a different tag in the F-Spot preference\n" +
"dialog.");
}
Gtk.Label label = new Gtk.Label (msg);
hbox.PackStart (label, false, false, 0);
Gtk.Label long_label = new Gtk.Label (long_msg);
long_label.Markup = String.Format ("<small>{0}</small>", long_msg);
vbox.PackStart (long_label, false, false, 0);
vbox.PackStart (new Gtk.Label (String.Empty));
window.Add (outer);
label.ModifyFg (Gtk.StateType.Normal, new Gdk.Color (127, 127, 127));
label.ModifyBg (Gtk.StateType.Normal, new Gdk.Color (0, 0, 0));
long_label.ModifyFg (Gtk.StateType.Normal, new Gdk.Color (127, 127, 127));
long_label.ModifyBg (Gtk.StateType.Normal, new Gdk.Color (0, 0, 0));
}
window.ShowAll ();
Register (window);
GLib.Idle.Add (delegate {
if (slideshow != null)
slideshow.Start ();
return false;
});
}
void HandleView (string[] uris)
{
List<SafeUri> ul = new List<SafeUri> ();
foreach (var u in uris)
ul.Add (new SafeUri (u, true));
try {
Register (new FSpot.SingleView (ul.ToArray ()).Window);
} catch (System.Exception e) {
Log.Exception (e);
Log.Debug ("no real valid path to view from");
}
}
#endregion
#region Track toplevel windows
void Register (Gtk.Window window)
{
toplevels.Add (window);
window.Destroyed += HandleDestroyed;
}
void HandleDestroyed (object sender, EventArgs e)
{
toplevels.Remove (sender as Gtk.Window);
if (toplevels.Count == 0) {
Log.Information ("Exiting...");
Banshee.Kernel.Scheduler.Dispose ();
Database.Dispose ();
ImageLoaderThread.CleanAll ();
Gtk.Application.Quit ();
}
if (organizer != null && organizer.Window == sender)
organizer = null;
}
#endregion
}
}
| |
// Date.cs
// Script#/Libraries/CoreLib
// This source code is subject to terms and conditions of the Apache License, Version 2.0.
//
using System.Globalization;
using System.Runtime.CompilerServices;
namespace System {
/// <summary>
/// Equivalent to the Date type in Javascript, but emulates value-type semantics by removing all mutators.
/// </summary>
[IgnoreNamespace]
[Imported(ObeysTypeSystem = true)]
[ScriptName("Date")]
public struct DateTime : IComparable<DateTime>, IEquatable<DateTime>, IFormattable {
/// <summary>
/// Creates a new instance of Date initialized from the specified number of milliseconds.
/// </summary>
/// <param name="milliseconds">Milliseconds since January 1st, 1970.</param>
[AlternateSignature]
public DateTime(long milliseconds) {
}
/// <summary>
/// Creates a new instance of Date initialized from parsing the specified date.
/// </summary>
/// <param name="date"></param>
[AlternateSignature]
public DateTime(string date) {
}
/// <summary>
/// Creates a new instance of Date.
/// </summary>
/// <param name="year">The full year.</param>
/// <param name="month">The month (1 through 12)</param>
/// <param name="day">The day of the month (1 through # of days in the specified month)</param>
[InlineCode("new {$System.DateTime}({year}, {month} - 1, {day})")]
public DateTime(int year, int month, int day) {
}
/// <summary>
/// Creates a new instance of Date.
/// </summary>
/// <param name="year">The full year.</param>
/// <param name="month">The month (1 through 12)</param>
/// <param name="day">The day of the month (1 through # of days in the specified month)</param>
/// <param name="hours">The hours (0 through 23)</param>
[InlineCode("new {$System.DateTime}({year}, {month} - 1, {day}, {hours})")]
public DateTime(int year, int month, int day, int hours)
{
}
/// <summary>
/// Creates a new instance of Date.
/// </summary>
/// <param name="year">The full year.</param>
/// <param name="month">The month (1 through 12)</param>
/// <param name="day">The day of the month (1 through # of days in the specified month)</param>
/// <param name="hours">The hours (0 through 23)</param>
/// <param name="minutes">The minutes (0 through 59)</param>
[InlineCode("new {$System.DateTime}({year}, {month} - 1, {day}, {hours}, {minutes})")]
public DateTime(int year, int month, int day, int hours, int minutes)
{
}
/// <summary>
/// Creates a new instance of Date.
/// </summary>
/// <param name="year">The full year.</param>
/// <param name="month">The month (1 through 12)</param>
/// <param name="day">The day of the month (1 through # of days in the specified month)</param>
/// <param name="hours">The hours (0 through 23)</param>
/// <param name="minutes">The minutes (0 through 59)</param>
/// <param name="seconds">The seconds (0 through 59)</param>
[InlineCode("new {$System.DateTime}({year}, {month} - 1, {day}, {hours}, {minutes}, {seconds})")]
public DateTime(int year, int month, int day, int hours, int minutes, int seconds)
{
}
/// <summary>
/// Creates a new instance of Date.
/// </summary>
/// <param name="year">The full year.</param>
/// <param name="month">The month (1 through 12)</param>
/// <param name="day">The day of the month (1 through # of days in the specified month)</param>
/// <param name="hours">The hours (0 through 23)</param>
/// <param name="minutes">The minutes (0 through 59)</param>
/// <param name="seconds">The seconds (0 through 59)</param>
/// <param name="milliseconds">The milliseconds (0 through 999)</param>
[InlineCode("new {$System.DateTime}({year}, {month} - 1, {day}, {hours}, {minutes}, {seconds}, {milliseconds})")]
public DateTime(int year, int month, int day, int hours, int minutes, int seconds, int milliseconds)
{
}
/// <summary>
/// Returns the current date and time.
/// </summary>
public static DateTime Now { [InlineCode("new Date()")] get { return default(DateTime); } }
/// <summary>
/// Returns the current date and time according to UTC
/// </summary>
public static DateTime UtcNow { [InlineCode("{$System.Script}.utcNow()")] get { return default(DateTime); } }
/// <summary>
/// Gets the current date.
/// </summary>
/// <returns>
/// An object that is set to today's date, with the time component set to 00:00:00.
/// </returns>
public static DateTime Today { [InlineCode("{$System.Script}.today()")] get { return default(DateTime); } }
[InlineCode("{$System.Script}.formatDate({this}, {format})")]
public string Format(string format) {
return null;
}
[InlineCode("{$System.Script}.formatDate({this}, {format})")]
public string ToString(string format) {
return null;
}
public int GetDate() {
return 0;
}
public int GetDay() {
return 0;
}
public int GetFullYear() {
return 0;
}
public int GetHours() {
return 0;
}
public int GetMilliseconds() {
return 0;
}
public int GetMinutes() {
return 0;
}
[InlineCode("{this}.getMonth() + 1")]
public int GetMonth() {
return 0;
}
public int GetSeconds() {
return 0;
}
public long GetTime() {
return 0;
}
public int GetTimezoneOffset() {
return 0;
}
[ScriptName("getUTCDate")]
public int GetUtcDate() {
return 0;
}
[ScriptName("getUTCDay")]
public int GetUtcDay() {
return 0;
}
[ScriptName("getUTCFullYear")]
public int GetUtcFullYear() {
return 0;
}
[ScriptName("getUTCHours")]
public int GetUtcHours() {
return 0;
}
[ScriptName("getUTCMilliseconds")]
public int GetUtcMilliseconds() {
return 0;
}
[ScriptName("getUTCMinutes")]
public int GetUtcMinutes() {
return 0;
}
[InlineCode("{this}.getUTCMonth() + 1")]
public int GetUtcMonth() {
return 0;
}
[ScriptName("getUTCSeconds")]
public int GetUtcSeconds() {
return 0;
}
[InlineCode("{$System.Script}.localeFormatDate({this}, {format})")]
public string LocaleFormat(string format) {
return null;
}
[InlineCode("new Date(Date.parse({value}))")]
public static DateTime Parse(string value) {
return default(DateTime);
}
[InlineCode("{$System.Script}.parseExactDate({value}, {format})")]
public static DateTime? ParseExact(string value, string format) {
return null;
}
[InlineCode("{$System.Script}.parseExactDate({value}, {format}, {culture})")]
public static DateTime? ParseExact(string value, string format, CultureInfo culture) {
return null;
}
[InlineCode("{$System.Script}.parseExactDateUTC({value}, {format})")]
public static DateTime? ParseExactUtc(string value, string format) {
return null;
}
[InlineCode("{$System.Script}.parseExactDateUTC({value}, {format}, {culture})")]
public static DateTime? ParseExactUtc(string value, string format, CultureInfo culture) {
return null;
}
public string ToDateString() {
return null;
}
public string ToLocaleDateString() {
return null;
}
public string ToLocaleTimeString() {
return null;
}
public string ToTimeString() {
return null;
}
[ScriptName("toUTCString")]
public string ToUtcString() {
return null;
}
public long ValueOf() {
return 0;
}
[InlineCode("new Date(Date.UTC({year}, {month} - 1, {day}))")]
public static DateTime FromUtc(int year, int month, int day) {
return default(DateTime);
}
[InlineCode("new Date(Date.UTC({year}, {month} - 1, {day}, {hours}))")]
public static DateTime FromUtc(int year, int month, int day, int hours) {
return default(DateTime);
}
[InlineCode("new Date(Date.UTC({year}, {month} - 1, {day}, {hours}, {minutes}))")]
public static DateTime FromUtc(int year, int month, int day, int hours, int minutes) {
return default(DateTime);
}
[InlineCode("new Date(Date.UTC({year}, {month} - 1, {day}, {hours}, {minutes}, {seconds}))")]
public static DateTime FromUtc(int year, int month, int day, int hours, int minutes, int seconds) {
return default(DateTime);
}
[InlineCode("new Date(Date.UTC({year}, {month} - 1, {day}, {hours}, {minutes}, {seconds}, {milliseconds}))")]
public static DateTime FromUtc(int year, int month, int day, int hours, int minutes, int seconds, int milliseconds) {
return default(DateTime);
}
[InlineCode("Date.UTC({year}, {month} - 1, {day})")]
public static int Utc(int year, int month, int day) {
return 0;
}
[InlineCode("Date.UTC({year}, {month} - 1, {day}, {hours})")]
public static int Utc(int year, int month, int day, int hours) {
return 0;
}
[InlineCode("Date.UTC({year}, {month} - 1, {day}, {hours}, {minutes})")]
public static int Utc(int year, int month, int day, int hours, int minutes) {
return 0;
}
[InlineCode("Date.UTC({year}, {month} - 1, {day}, {hours}, {minutes}, {seconds})")]
public static int Utc(int year, int month, int day, int hours, int minutes, int seconds) {
return 0;
}
[InlineCode("Date.UTC({year}, {month} - 1, {day}, {hours}, {minutes}, {seconds}, {milliseconds})")]
public static int Utc(int year, int month, int day, int hours, int minutes, int seconds, int milliseconds) {
return 0;
}
// NOTE: There is no + operator since in JavaScript that returns the
// concatenation of the date strings, which is pretty much useless.
/// <summary>
/// Returns the difference in milliseconds between two dates.
/// </summary>
[IntrinsicOperator]
public static int operator -(DateTime a, DateTime b) {
return 0;
}
[InlineCode("new {$System.TimeSpan}(({this} - {value}) * 10000)")]
public TimeSpan Subtract(DateTime value) {
return default(TimeSpan);
}
[InlineCode("{$System.Script}.staticEquals({a}, {b})")]
public static bool AreEqual(DateTime? a, DateTime? b) {
return false;
}
[InlineCode("!{$System.Script}.staticEquals({a}, {b})")]
public static bool AreNotEqual(DateTime? a, DateTime? b) {
return false;
}
[InlineCode("{$System.Script}.staticEquals({a}, {b})")]
public static bool operator==(DateTime a, DateTime b) {
return false;
}
[InlineCode("{$System.Script}.staticEquals({a}, {b})")]
public static bool operator==(DateTime? a, DateTime b) {
return false;
}
[InlineCode("{$System.Script}.staticEquals({a}, {b})")]
public static bool operator==(DateTime a, DateTime? b) {
return false;
}
[InlineCode("{$System.Script}.staticEquals({a}, {b})")]
public static bool operator==(DateTime? a, DateTime? b) {
return false;
}
[InlineCode("!{$System.Script}.staticEquals({a}, {b})")]
public static bool operator!=(DateTime a, DateTime b) {
return false;
}
[InlineCode("!{$System.Script}.staticEquals({a}, {b})")]
public static bool operator!=(DateTime? a, DateTime b) {
return false;
}
[InlineCode("!{$System.Script}.staticEquals({a}, {b})")]
public static bool operator!=(DateTime a, DateTime? b) {
return false;
}
[InlineCode("!{$System.Script}.staticEquals({a}, {b})")]
public static bool operator!=(DateTime? a, DateTime? b) {
return false;
}
/// <summary>
/// Compares two dates
/// </summary>
[IntrinsicOperator]
public static bool operator <(DateTime a, DateTime b) {
return false;
}
/// <summary>
/// Compares two dates
/// </summary>
[IntrinsicOperator]
public static bool operator >(DateTime a, DateTime b) {
return false;
}
/// <summary>
/// Compares two dates
/// </summary>
[IntrinsicOperator]
public static bool operator <=(DateTime a, DateTime b) {
return false;
}
/// <summary>
/// Compares two dates
/// </summary>
[IntrinsicOperator]
public static bool operator >=(DateTime a, DateTime b) {
return false;
}
/// <summary>
/// Converts a DateTime to a JsDate. Returns a copy of the immutable datetime.
/// </summary>
[InlineCode("new Date({dt}.valueOf())")]
public static explicit operator DateTime(JsDate dt) {
return default(DateTime);
}
/// <summary>
/// Converts a JsDate to a DateTime. Returns a copy of the mutable datetime.
/// </summary>
[InlineCode("new Date({dt}.valueOf())")]
public static explicit operator JsDate(DateTime dt) {
return null;
}
/// <summary>
/// Gets the date component of this instance.
/// </summary>
/// <returns>
/// A new object with the same date as this instance, and the time value set to 12:00:00 midnight (00:00:00).
/// </returns>
public DateTime Date { [InlineCode("new Date({this}.getFullYear(), {this}.getMonth(), {this}.getDate())")] get { return default(DateTime); } }
/// <summary>
/// Gets the day of the month represented by this instance.
/// </summary>
/// <returns>
/// The day component, expressed as a value between 1 and 31.
/// </returns>
/// <filterpriority>1</filterpriority>
public int Day { [ScriptName("getDate")] get { return 0; } }
/// <summary>
/// Gets the day of the week represented by this instance.
/// </summary>
/// <returns>
/// An enumerated constant that indicates the day of the week of this <see cref="T:System.DateTime"/> value.
/// </returns>
public DayOfWeek DayOfWeek { [ScriptName("getDay")] get { return 0; } }
/// <summary>
/// Gets the day of the year represented by this instance.
/// </summary>
/// <returns>
/// The day of the year, expressed as a value between 1 and 366.
/// </returns>
public int DayOfYear { [InlineCode("Math.ceil(({this} - new Date({this}.getFullYear(), 0, 1)) / 86400000)")] get { return 0; } }
/// <summary>
/// Gets the hour component of the date represented by this instance.
/// </summary>
/// <returns>
/// The hour component, expressed as a value between 0 and 23.
/// </returns>
public int Hour { [ScriptName("getHours")] get { return 0; } }
/// <summary>
/// Gets the milliseconds component of the date represented by this instance.
/// </summary>
/// <returns>
/// The milliseconds component, expressed as a value between 0 and 999.
/// </returns>
public int Millisecond { [ScriptName("getMilliseconds")] get { return 0; } }
/// <summary>
/// Gets the minute component of the date represented by this instance.
/// </summary>
/// <returns>
/// The minute component, expressed as a value between 0 and 59.
/// </returns>
public int Minute { [ScriptName("getMinutes")] get { return 0; } }
/// <summary>
/// Gets the month component of the date represented by this instance.
/// </summary>
/// <returns>
/// The month component, expressed as a value between 1 and 12.
/// </returns>
public int Month { [InlineCode("{this}.getMonth() + 1")] get { return 0; } }
/// <summary>
/// Gets the seconds component of the date represented by this instance.
/// </summary>
/// <returns>
/// The seconds component, expressed as a value between 0 and 59.
/// </returns>
public int Second { [ScriptName("getSeconds")] get { return 0; } }
/// <summary>
/// Gets the year component of the date represented by this instance.
/// </summary>
/// <returns>
/// The year, between 1 and 9999.
/// </returns>
public int Year { [ScriptName("getFullYear")] get { return 0; } }
/// <summary>
/// Returns a new <see cref="T:System.DateTime"/> that adds the specified number of days to the value of this instance.
/// </summary>
/// <returns>
/// An object whose value is the sum of the date and time represented by this instance and the number of days represented by <paramref name="value"/>.
/// </returns>
/// <param name="value">A number of whole and fractional days. The <paramref name="value"/> parameter can be negative or positive. </param>
[InlineCode("new Date({this}.valueOf() + Math.round({value} * 86400000))")]
public DateTime AddDays(double value) {
return default(DateTime);
}
/// <summary>
/// Returns a new <see cref="T:System.DateTime"/> that adds the specified number of hours to the value of this instance.
/// </summary>
/// <returns>
/// An object whose value is the sum of the date and time represented by this instance and the number of hours represented by <paramref name="value"/>.
/// </returns>
/// <param name="value">A number of whole and fractional hours. The <paramref name="value"/> parameter can be negative or positive. </param>
[InlineCode("new Date({this}.valueOf() + Math.round({value} * 3600000))")]
public DateTime AddHours(double value) {
return default(DateTime);
}
/// <summary>
/// Returns a new <see cref="T:System.DateTime"/> that adds the specified number of milliseconds to the value of this instance.
/// </summary>
/// <returns>
/// An object whose value is the sum of the date and time represented by this instance and the number of milliseconds represented by <paramref name="value"/>.
/// </returns>
/// <param name="value">A number of whole and fractional milliseconds. The <paramref name="value"/> parameter can be negative or positive. Note that this value is rounded to the nearest integer.</param>
[InlineCode("new Date({this}.valueOf() + Math.round({value}))")]
public DateTime AddMilliseconds(double value) {
return default(DateTime);
}
/// <summary>
/// Returns a new <see cref="T:System.DateTime"/> that adds the specified number of minutes to the value of this instance.
/// </summary>
/// <returns>
/// An object whose value is the sum of the date and time represented by this instance and the number of minutes represented by <paramref name="value"/>.
/// </returns>
/// <param name="value">A number of whole and fractional minutes. The <paramref name="value"/> parameter can be negative or positive. </param>
[InlineCode("new Date({this}.valueOf() + Math.round({value} * 60000))")]
public DateTime AddMinutes(double value) {
return default(DateTime);
}
/// <summary>
/// Returns a new <see cref="T:System.DateTime"/> that adds the specified number of months to the value of this instance.
/// </summary>
/// <returns>
/// An object whose value is the sum of the date and time represented by this instance and <paramref name="months"/>.
/// </returns>
/// <param name="months">A number of months. The <paramref name="months"/> parameter can be negative or positive. </param>
[InlineCode("new Date({this}.getFullYear(), {this}.getMonth() + {months}, {this}.getDate(), {this}.getHours(), {this}.getMinutes(), {this}.getSeconds(), {this}.getMilliseconds())")]
public DateTime AddMonths(int months) {
return default(DateTime);
}
/// <summary>
/// Returns a new <see cref="T:System.DateTime"/> that adds the specified number of seconds to the value of this instance.
/// </summary>
/// <returns>
/// An object whose value is the sum of the date and time represented by this instance and the number of seconds represented by <paramref name="value"/>.
/// </returns>
/// <param name="value">A number of whole and fractional seconds. The <paramref name="value"/> parameter can be negative or positive. </param>
[InlineCode("new Date({this}.valueOf() + Math.round({value} * 1000))")]
public DateTime AddSeconds(double value) {
return default(DateTime);
}
/// <summary>
/// Returns a new <see cref="T:System.DateTime"/> that adds the specified number of years to the value of this instance.
/// </summary>
/// <returns>
/// An object whose value is the sum of the date and time represented by this instance and the number of years represented by <paramref name="value"/>.
/// </returns>
/// <param name="value">A number of years. The <paramref name="value"/> parameter can be negative or positive. </param>
[InlineCode("new Date({this}.getFullYear() + {value}, {this}.getMonth(), {this}.getDate(), {this}.getHours(), {this}.getMinutes(), {this}.getSeconds(), {this}.getMilliseconds())")]
public DateTime AddYears(int value) {
return default(DateTime);
}
/// <summary>
/// Returns the number of days in the specified month and year.
/// </summary>
/// <returns>
/// The number of days in <paramref name="month"/> for the specified <paramref name="year"/>.For example, if <paramref name="month"/> equals 2 for February, the return value is 28 or 29 depending upon whether <paramref name="year"/> is a leap year.
/// </returns>
/// <param name="year">The year. </param><param name="month">The month (a number ranging from 1 to 12). </param>
[InlineCode("new Date({year}, {month}, -1).getDate() + 1)")]
public static int DaysInMonth(int year, int month) {
return 0;
}
/// <summary>
/// Returns an indication whether the specified year is a leap year.
/// </summary>
/// <returns>
/// true if <paramref name="year"/> is a leap year; otherwise, false.
/// </returns>
/// <param name="year">A 4-digit year. </param>
[InlineCode("new Date({year}, 2, -1).getDate() === 28")]
public static bool IsLeapYear(int year) {
return false;
}
[InlineCode("{$System.Script}.compare({this}, {other})")]
public int CompareTo(DateTime other) {
return 0;
}
/// <summary>
/// Compares two instances of <see cref="T:System.DateTime"/> and returns an integer that indicates whether the first instance is earlier than, the same as, or later than the second instance.
/// </summary>
/// <returns>
/// A signed number indicating the relative values of <paramref name="t1"/> and <paramref name="t2"/>.Value Type Condition Less than zero <paramref name="t1"/> is earlier than <paramref name="t2"/>. Zero <paramref name="t1"/> is the same as <paramref name="t2"/>. Greater than zero <paramref name="t1"/> is later than <paramref name="t2"/>.
/// </returns>
/// <param name="t1">The first object to compare. </param><param name="t2">The second object to compare. </param>
[InlineCode("{$System.Script}.compare({t1}, {t2})")]
public static int Compare(DateTime t1, DateTime t2) {
return 0;
}
[InlineCode("{$System.Script}.equalsT({this}, {other})")]
public bool Equals(DateTime other) {
return false;
}
/// <summary>
/// Returns a value indicating whether two <see cref="T:System.DateTime"/> instances have the same date and time value.
/// </summary>
/// <returns>
/// true if the two values are equal; otherwise, false.
/// </returns>
/// <param name="t1">The first object to compare. </param><param name="t2">The second object to compare. </param>
[InlineCode("{$System.Script}.equalsT({t1}, {t2})")]
public static bool Equals(DateTime t1, DateTime t2) {
return false;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Text;
using System.Collections;
using System.Reflection;
using FlatRedBall.Instructions.Reflection;
using System.IO;
#if FRB_XNA
using Microsoft.Xna.Framework;
#endif
#if SILVERLIGHT
using System.Windows.Resources;
#endif
namespace FlatRedBall.IO.Csv
{
#region XML Docs
/// <summary>
/// Class providing methods for interacting with .CSV spreadsheet files.
/// </summary>
#endregion
public static class CsvFileManager
{
public static char Delimiter = ',';
#if FRB_RAW
public static string ContentManagerName = "Global";
#else
public static string ContentManagerName = FlatRedBallServices.GlobalContentManager;
#endif
public static List<object> CsvDeserializeList(Type typeOfElement, string fileName)
{
List<object> listOfObjects = new List<object>();
CsvDeserializeList(typeOfElement, fileName, listOfObjects);
return listOfObjects;
}
public static void CsvDeserializeList(Type typeOfElement, string fileName, IList listToPopulate)
{
RuntimeCsvRepresentation rcr = CsvDeserializeToRuntime(fileName);
rcr.CreateObjectList(typeOfElement, listToPopulate, ContentManagerName);
}
public static void CsvDeserializeDictionary<KeyType, ValueType>(string fileName, Dictionary<KeyType, ValueType> dictionaryToPopulate)
{
RuntimeCsvRepresentation rcr = null;
CsvDeserializeDictionary(fileName, dictionaryToPopulate, out rcr);
}
public static void CsvDeserializeDictionary<KeyType, ValueType>(string fileName, Dictionary<KeyType, ValueType> dictionaryToPopulate, out RuntimeCsvRepresentation rcr)
{
rcr = CsvDeserializeToRuntime(fileName);
rcr.CreateObjectDictionary<KeyType, ValueType>(dictionaryToPopulate, ContentManagerName);
}
public static void UpdateDictionaryValuesFromCsv<KeyType, ValueType>(Dictionary<KeyType, ValueType> dictionaryToUpdate, string fileName)
{
var rcr = CsvDeserializeToRuntime(fileName);
rcr.UpdateValues(dictionaryToUpdate, ContentManagerName);
}
public static void CsvDeserializeDictionary<KeyType, ValueType>(Stream stream, Dictionary<KeyType, ValueType> dictionaryToPopulate)
{
var rcr = CsvDeserializeToRuntime<RuntimeCsvRepresentation>(stream);
rcr.CreateObjectDictionary<KeyType, ValueType>(dictionaryToPopulate, ContentManagerName);
}
public static RuntimeCsvRepresentation CsvDeserializeToRuntime(string fileName)
{
return CsvDeserializeToRuntime<RuntimeCsvRepresentation>(fileName);
}
public static T CsvDeserializeToRuntime<T>(string fileName) where T : RuntimeCsvRepresentation, new()
{
if (FileManager.IsRelative(fileName))
{
fileName = FileManager.MakeAbsolute(fileName);
}
#if ANDROID || IOS
fileName = fileName.ToLowerInvariant();
#endif
FileManager.ThrowExceptionIfFileDoesntExist(fileName);
T runtimeCsvRepresentation = null;
string extension = FileManager.GetExtension(fileName).ToLower();
if (extension == "csv" || extension == "txt")
{
#if SILVERLIGHT || XBOX360 || WINDOWS_PHONE || MONOGAME
Stream stream = FileManager.GetStreamForFile(fileName);
#else
// Creating a filestream then using that enables us to open files that are open by other apps.
FileStream stream = new FileStream(fileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
#endif
runtimeCsvRepresentation = CsvDeserializeToRuntime<T>(stream);
FileManager.Close(stream);
stream.Dispose();
#if XBOX360
if (FileManager.IsFileNameInUserFolder(fileName))
{
FileManager.DisposeLastStorageContainer();
}
#endif
}
else
{
if (extension != String.Empty)
{
#if DEBUG
if (extension != "xnb")
throw new ArgumentException(string.Format("CSV files with extension '.{0}' are not supported", extension));
#endif
fileName = FileManager.RemoveExtension(fileName);
}
runtimeCsvRepresentation = FlatRedBallServices.Load<T>(fileName);
}
return runtimeCsvRepresentation;
}
public static T CsvDeserializeToRuntime<T>(Stream stream) where T : RuntimeCsvRepresentation, new()
{
T runtimeCsvRepresentation;
using (System.IO.StreamReader streamReader = new StreamReader(stream))
using (CsvReader csv = new CsvReader(streamReader, true, Delimiter, CsvReader.DefaultQuote, CsvReader.DefaultEscape, CsvReader.DefaultComment, true, CsvReader.DefaultBufferSize))
{
runtimeCsvRepresentation = new T();
string[] fileHeaders = csv.GetFieldHeaders();
runtimeCsvRepresentation.Headers = new CsvHeader[fileHeaders.Length];
for (int i = 0; i < fileHeaders.Length; i++)
{
runtimeCsvRepresentation.Headers[i] = new CsvHeader(fileHeaders[i]);
}
int numberOfHeaders = runtimeCsvRepresentation.Headers.Length;
runtimeCsvRepresentation.Records = new List<string[]>();
int recordIndex = 0;
int columnIndex = 0;
string[] newRecord = null;
try
{
while (csv.ReadNextRecord())
{
newRecord = new string[numberOfHeaders];
if (recordIndex == 123)
{
int m = 3;
}
bool anyNonEmpty = false;
for (columnIndex = 0; columnIndex < numberOfHeaders; columnIndex++)
{
string record = csv[columnIndex];
newRecord[columnIndex] = record;
if (record != "")
{
anyNonEmpty = true;
}
}
if (anyNonEmpty)
{
runtimeCsvRepresentation.Records.Add(newRecord);
}
recordIndex++;
}
}
catch (Exception e)
{
string message =
"Error reading record " + recordIndex + " at column " + columnIndex;
if (columnIndex != 0 && newRecord != null)
{
foreach (string s in newRecord)
{
message += "\n" + s;
}
}
throw new Exception(message, e);
}
}
return runtimeCsvRepresentation;
}
public static void Serialize(RuntimeCsvRepresentation rcr, string fileName)
{
if (rcr == null)
throw new ArgumentNullException("rcr");
string toSave = rcr.GenerateCsvString(Delimiter);
FileManager.SaveText(toSave, fileName);
}
private static void AppendMemberValue(StringBuilder stringBuilder, ref bool first, Type type, Object valueAsObject)
{
if (first)
first = false;
else
stringBuilder.Append(" ,");
String value;
bool isString = false;
if (type == typeof(string)) //check if the value is a string if so, it should be surrounded in quotes
{
isString = true;
}
if (valueAsObject == null)
{
value = "";
stringBuilder.Append(value);
}
else //if not null, append the value
{
if (isString)
{
stringBuilder.Append("\"");
}
value = valueAsObject.ToString();
value = value.Replace('\n', ' '); //replace newlines
stringBuilder.Append(value);
if (isString)
{
stringBuilder.Append("\"");
}
}
}
}
}
| |
using System;
namespace WeifenLuo.WinFormsUI.Docking.Win32
{
[Flags]
internal enum FlagsSetWindowPos : uint
{
SWP_NOSIZE = 0x0001,
SWP_NOMOVE = 0x0002,
SWP_NOZORDER = 0x0004,
SWP_NOREDRAW = 0x0008,
SWP_NOACTIVATE = 0x0010,
SWP_FRAMECHANGED = 0x0020,
SWP_SHOWWINDOW = 0x0040,
SWP_HIDEWINDOW = 0x0080,
SWP_NOCOPYBITS = 0x0100,
SWP_NOOWNERZORDER = 0x0200,
SWP_NOSENDCHANGING = 0x0400,
SWP_DRAWFRAME = 0x0020,
SWP_NOREPOSITION = 0x0200,
SWP_DEFERERASE = 0x2000,
SWP_ASYNCWINDOWPOS = 0x4000
}
internal enum ShowWindowStyles : short
{
SW_HIDE = 0,
SW_SHOWNORMAL = 1,
SW_NORMAL = 1,
SW_SHOWMINIMIZED = 2,
SW_SHOWMAXIMIZED = 3,
SW_MAXIMIZE = 3,
SW_SHOWNOACTIVATE = 4,
SW_SHOW = 5,
SW_MINIMIZE = 6,
SW_SHOWMINNOACTIVE = 7,
SW_SHOWNA = 8,
SW_RESTORE = 9,
SW_SHOWDEFAULT = 10,
SW_FORCEMINIMIZE = 11,
SW_MAX = 11
}
internal enum WindowStyles : uint
{
WS_OVERLAPPED = 0x00000000,
WS_POPUP = 0x80000000,
WS_CHILD = 0x40000000,
WS_MINIMIZE = 0x20000000,
WS_VISIBLE = 0x10000000,
WS_DISABLED = 0x08000000,
WS_CLIPSIBLINGS = 0x04000000,
WS_CLIPCHILDREN = 0x02000000,
WS_MAXIMIZE = 0x01000000,
WS_CAPTION = 0x00C00000,
WS_BORDER = 0x00800000,
WS_DLGFRAME = 0x00400000,
WS_VSCROLL = 0x00200000,
WS_HSCROLL = 0x00100000,
WS_SYSMENU = 0x00080000,
WS_THICKFRAME = 0x00040000,
WS_GROUP = 0x00020000,
WS_TABSTOP = 0x00010000,
WS_MINIMIZEBOX = 0x00020000,
WS_MAXIMIZEBOX = 0x00010000,
WS_TILED = 0x00000000,
WS_ICONIC = 0x20000000,
WS_SIZEBOX = 0x00040000,
WS_POPUPWINDOW = 0x80880000,
WS_OVERLAPPEDWINDOW = 0x00CF0000,
WS_TILEDWINDOW = 0x00CF0000,
WS_CHILDWINDOW = 0x40000000
}
internal enum WindowExStyles
{
WS_EX_DLGMODALFRAME = 0x00000001,
WS_EX_NOPARENTNOTIFY = 0x00000004,
WS_EX_TOPMOST = 0x00000008,
WS_EX_ACCEPTFILES = 0x00000010,
WS_EX_TRANSPARENT = 0x00000020,
WS_EX_MDICHILD = 0x00000040,
WS_EX_TOOLWINDOW = 0x00000080,
WS_EX_WINDOWEDGE = 0x00000100,
WS_EX_CLIENTEDGE = 0x00000200,
WS_EX_CONTEXTHELP = 0x00000400,
WS_EX_RIGHT = 0x00001000,
WS_EX_LEFT = 0x00000000,
WS_EX_RTLREADING = 0x00002000,
WS_EX_LTRREADING = 0x00000000,
WS_EX_LEFTSCROLLBAR = 0x00004000,
WS_EX_RIGHTSCROLLBAR = 0x00000000,
WS_EX_CONTROLPARENT = 0x00010000,
WS_EX_STATICEDGE = 0x00020000,
WS_EX_APPWINDOW = 0x00040000,
WS_EX_OVERLAPPEDWINDOW = 0x00000300,
WS_EX_PALETTEWINDOW = 0x00000188,
WS_EX_LAYERED = 0x00080000,
WS_EX_NOACTIVATE = 0x08000000
}
internal enum Msgs
{
WM_NULL = 0x0000,
WM_CREATE = 0x0001,
WM_DESTROY = 0x0002,
WM_MOVE = 0x0003,
WM_SIZE = 0x0005,
WM_ACTIVATE = 0x0006,
WM_SETFOCUS = 0x0007,
WM_KILLFOCUS = 0x0008,
WM_ENABLE = 0x000A,
WM_SETREDRAW = 0x000B,
WM_SETTEXT = 0x000C,
WM_GETTEXT = 0x000D,
WM_GETTEXTLENGTH = 0x000E,
WM_PAINT = 0x000F,
WM_CLOSE = 0x0010,
WM_QUERYENDSESSION = 0x0011,
WM_QUIT = 0x0012,
WM_QUERYOPEN = 0x0013,
WM_ERASEBKGND = 0x0014,
WM_SYSCOLORCHANGE = 0x0015,
WM_ENDSESSION = 0x0016,
WM_SHOWWINDOW = 0x0018,
WM_WININICHANGE = 0x001A,
WM_SETTINGCHANGE = 0x001A,
WM_DEVMODECHANGE = 0x001B,
WM_ACTIVATEAPP = 0x001C,
WM_FONTCHANGE = 0x001D,
WM_TIMECHANGE = 0x001E,
WM_CANCELMODE = 0x001F,
WM_SETCURSOR = 0x0020,
WM_MOUSEACTIVATE = 0x0021,
WM_CHILDACTIVATE = 0x0022,
WM_QUEUESYNC = 0x0023,
WM_GETMINMAXINFO = 0x0024,
WM_PAINTICON = 0x0026,
WM_ICONERASEBKGND = 0x0027,
WM_NEXTDLGCTL = 0x0028,
WM_SPOOLERSTATUS = 0x002A,
WM_DRAWITEM = 0x002B,
WM_MEASUREITEM = 0x002C,
WM_DELETEITEM = 0x002D,
WM_VKEYTOITEM = 0x002E,
WM_CHARTOITEM = 0x002F,
WM_SETFONT = 0x0030,
WM_GETFONT = 0x0031,
WM_SETHOTKEY = 0x0032,
WM_GETHOTKEY = 0x0033,
WM_QUERYDRAGICON = 0x0037,
WM_COMPAREITEM = 0x0039,
WM_GETOBJECT = 0x003D,
WM_COMPACTING = 0x0041,
WM_COMMNOTIFY = 0x0044 ,
WM_WINDOWPOSCHANGING = 0x0046,
WM_WINDOWPOSCHANGED = 0x0047,
WM_POWER = 0x0048,
WM_COPYDATA = 0x004A,
WM_CANCELJOURNAL = 0x004B,
WM_NOTIFY = 0x004E,
WM_INPUTLANGCHANGEREQUEST = 0x0050,
WM_INPUTLANGCHANGE = 0x0051,
WM_TCARD = 0x0052,
WM_HELP = 0x0053,
WM_USERCHANGED = 0x0054,
WM_NOTIFYFORMAT = 0x0055,
WM_CONTEXTMENU = 0x007B,
WM_STYLECHANGING = 0x007C,
WM_STYLECHANGED = 0x007D,
WM_DISPLAYCHANGE = 0x007E,
WM_GETICON = 0x007F,
WM_SETICON = 0x0080,
WM_NCCREATE = 0x0081,
WM_NCDESTROY = 0x0082,
WM_NCCALCSIZE = 0x0083,
WM_NCHITTEST = 0x0084,
WM_NCPAINT = 0x0085,
WM_NCACTIVATE = 0x0086,
WM_GETDLGCODE = 0x0087,
WM_SYNCPAINT = 0x0088,
WM_NCMOUSEMOVE = 0x00A0,
WM_NCLBUTTONDOWN = 0x00A1,
WM_NCLBUTTONUP = 0x00A2,
WM_NCLBUTTONDBLCLK = 0x00A3,
WM_NCRBUTTONDOWN = 0x00A4,
WM_NCRBUTTONUP = 0x00A5,
WM_NCRBUTTONDBLCLK = 0x00A6,
WM_NCMBUTTONDOWN = 0x00A7,
WM_NCMBUTTONUP = 0x00A8,
WM_NCMBUTTONDBLCLK = 0x00A9,
WM_KEYDOWN = 0x0100,
WM_KEYUP = 0x0101,
WM_CHAR = 0x0102,
WM_DEADCHAR = 0x0103,
WM_SYSKEYDOWN = 0x0104,
WM_SYSKEYUP = 0x0105,
WM_SYSCHAR = 0x0106,
WM_SYSDEADCHAR = 0x0107,
WM_KEYLAST = 0x0108,
WM_IME_STARTCOMPOSITION = 0x010D,
WM_IME_ENDCOMPOSITION = 0x010E,
WM_IME_COMPOSITION = 0x010F,
WM_IME_KEYLAST = 0x010F,
WM_INITDIALOG = 0x0110,
WM_COMMAND = 0x0111,
WM_SYSCOMMAND = 0x0112,
WM_TIMER = 0x0113,
WM_HSCROLL = 0x0114,
WM_VSCROLL = 0x0115,
WM_INITMENU = 0x0116,
WM_INITMENUPOPUP = 0x0117,
WM_MENUSELECT = 0x011F,
WM_MENUCHAR = 0x0120,
WM_ENTERIDLE = 0x0121,
WM_MENURBUTTONUP = 0x0122,
WM_MENUDRAG = 0x0123,
WM_MENUGETOBJECT = 0x0124,
WM_UNINITMENUPOPUP = 0x0125,
WM_MENUCOMMAND = 0x0126,
WM_CTLCOLORMSGBOX = 0x0132,
WM_CTLCOLOREDIT = 0x0133,
WM_CTLCOLORLISTBOX = 0x0134,
WM_CTLCOLORBTN = 0x0135,
WM_CTLCOLORDLG = 0x0136,
WM_CTLCOLORSCROLLBAR = 0x0137,
WM_CTLCOLORSTATIC = 0x0138,
WM_MOUSEMOVE = 0x0200,
WM_LBUTTONDOWN = 0x0201,
WM_LBUTTONUP = 0x0202,
WM_LBUTTONDBLCLK = 0x0203,
WM_RBUTTONDOWN = 0x0204,
WM_RBUTTONUP = 0x0205,
WM_RBUTTONDBLCLK = 0x0206,
WM_MBUTTONDOWN = 0x0207,
WM_MBUTTONUP = 0x0208,
WM_MBUTTONDBLCLK = 0x0209,
WM_MOUSEWHEEL = 0x020A,
WM_PARENTNOTIFY = 0x0210,
WM_ENTERMENULOOP = 0x0211,
WM_EXITMENULOOP = 0x0212,
WM_NEXTMENU = 0x0213,
WM_SIZING = 0x0214,
WM_CAPTURECHANGED = 0x0215,
WM_MOVING = 0x0216,
WM_DEVICECHANGE = 0x0219,
WM_MDICREATE = 0x0220,
WM_MDIDESTROY = 0x0221,
WM_MDIACTIVATE = 0x0222,
WM_MDIRESTORE = 0x0223,
WM_MDINEXT = 0x0224,
WM_MDIMAXIMIZE = 0x0225,
WM_MDITILE = 0x0226,
WM_MDICASCADE = 0x0227,
WM_MDIICONARRANGE = 0x0228,
WM_MDIGETACTIVE = 0x0229,
WM_MDISETMENU = 0x0230,
WM_ENTERSIZEMOVE = 0x0231,
WM_EXITSIZEMOVE = 0x0232,
WM_DROPFILES = 0x0233,
WM_MDIREFRESHMENU = 0x0234,
WM_IME_SETCONTEXT = 0x0281,
WM_IME_NOTIFY = 0x0282,
WM_IME_CONTROL = 0x0283,
WM_IME_COMPOSITIONFULL = 0x0284,
WM_IME_SELECT = 0x0285,
WM_IME_CHAR = 0x0286,
WM_IME_REQUEST = 0x0288,
WM_IME_KEYDOWN = 0x0290,
WM_IME_KEYUP = 0x0291,
WM_MOUSEHOVER = 0x02A1,
WM_MOUSELEAVE = 0x02A3,
WM_CUT = 0x0300,
WM_COPY = 0x0301,
WM_PASTE = 0x0302,
WM_CLEAR = 0x0303,
WM_UNDO = 0x0304,
WM_RENDERFORMAT = 0x0305,
WM_RENDERALLFORMATS = 0x0306,
WM_DESTROYCLIPBOARD = 0x0307,
WM_DRAWCLIPBOARD = 0x0308,
WM_PAINTCLIPBOARD = 0x0309,
WM_VSCROLLCLIPBOARD = 0x030A,
WM_SIZECLIPBOARD = 0x030B,
WM_ASKCBFORMATNAME = 0x030C,
WM_CHANGECBCHAIN = 0x030D,
WM_HSCROLLCLIPBOARD = 0x030E,
WM_QUERYNEWPALETTE = 0x030F,
WM_PALETTEISCHANGING = 0x0310,
WM_PALETTECHANGED = 0x0311,
WM_HOTKEY = 0x0312,
WM_PRINT = 0x0317,
WM_PRINTCLIENT = 0x0318,
WM_HANDHELDFIRST = 0x0358,
WM_HANDHELDLAST = 0x035F,
WM_AFXFIRST = 0x0360,
WM_AFXLAST = 0x037F,
WM_PENWINFIRST = 0x0380,
WM_PENWINLAST = 0x038F,
WM_APP = 0x8000,
WM_USER = 0x0400
}
internal enum HitTest
{
HTERROR = -2,
HTTRANSPARENT = -1,
HTNOWHERE = 0,
HTCLIENT = 1,
HTCAPTION = 2,
HTSYSMENU = 3,
HTGROWBOX = 4,
HTSIZE = 4,
HTMENU = 5,
HTHSCROLL = 6,
HTVSCROLL = 7,
HTMINBUTTON = 8,
HTMAXBUTTON = 9,
HTLEFT = 10,
HTRIGHT = 11,
HTTOP = 12,
HTTOPLEFT = 13,
HTTOPRIGHT = 14,
HTBOTTOM = 15,
HTBOTTOMLEFT = 16,
HTBOTTOMRIGHT = 17,
HTBORDER = 18,
HTREDUCE = 8,
HTZOOM = 9 ,
HTSIZEFIRST = 10,
HTSIZELAST = 17,
HTOBJECT = 19,
HTCLOSE = 20,
HTHELP = 21
}
internal enum ScrollBars : uint
{
SB_HORZ = 0,
SB_VERT = 1,
SB_CTL = 2,
SB_BOTH = 3
}
internal enum GetWindowLongIndex : int
{
GWL_STYLE = -16,
GWL_EXSTYLE = -20
}
// Hook Types
internal enum HookType : int
{
WH_JOURNALRECORD = 0,
WH_JOURNALPLAYBACK = 1,
WH_KEYBOARD = 2,
WH_GETMESSAGE = 3,
WH_CALLWNDPROC = 4,
WH_CBT = 5,
WH_SYSMSGFILTER = 6,
WH_MOUSE = 7,
WH_HARDWARE = 8,
WH_DEBUG = 9,
WH_SHELL = 10,
WH_FOREGROUNDIDLE = 11,
WH_CALLWNDPROCRET = 12,
WH_KEYBOARD_LL = 13,
WH_MOUSE_LL = 14
}
}
| |
// Copyright (c) Brock Allen & Dominick Baier. All rights reserved.
// Licensed under the Apache License, Version 2.0. See LICENSE in the project root for license information.
using IdentityModel;
using IdentityServer4.Events;
using IdentityServer4.Extensions;
using IdentityServer4.Models;
using IdentityServer4.Services;
using IdentityServer4.Stores;
using IdentityServer4.Test;
using Microsoft.AspNetCore.Authentication;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using System;
using System.Linq;
using System.Threading.Tasks;
namespace IdentityServer
{
/// <summary>
/// This sample controller implements a typical login/logout/provision workflow for local and external accounts.
/// The login service encapsulates the interactions with the user data store. This data store is in-memory only and cannot be used for production!
/// The interaction service provides a way for the UI to communicate with identityserver for validation and context retrieval
/// </summary>
[SecurityHeaders]
[AllowAnonymous]
public class AccountController : Controller
{
private readonly TestUserStore _users;
private readonly IIdentityServerInteractionService _interaction;
private readonly IClientStore _clientStore;
private readonly IAuthenticationSchemeProvider _schemeProvider;
private readonly IEventService _events;
public AccountController(
IIdentityServerInteractionService interaction,
IClientStore clientStore,
IAuthenticationSchemeProvider schemeProvider,
IEventService events,
TestUserStore users = null)
{
// if the TestUserStore is not in DI, then we'll just use the global users collection
// this is where you would plug in your own custom identity management library (e.g. ASP.NET Identity)
_users = users ?? new TestUserStore(TestUsers.Users);
_interaction = interaction;
_clientStore = clientStore;
_schemeProvider = schemeProvider;
_events = events;
}
/// <summary>
/// Entry point into the login workflow
/// </summary>
[HttpGet]
public async Task<IActionResult> Login(string returnUrl)
{
// build a model so we know what to show on the login page
var vm = await BuildLoginViewModelAsync(returnUrl);
if (vm.IsExternalLoginOnly)
{
// we only have one option for logging in and it's an external provider
return RedirectToAction("Challenge", "External", new { provider = vm.ExternalLoginScheme, returnUrl });
}
return View(vm);
}
/// <summary>
/// Handle postback from username/password login
/// </summary>
[HttpPost]
[ValidateAntiForgeryToken]
public async Task<IActionResult> Login(LoginInputModel model, string button)
{
// check if we are in the context of an authorization request
var context = await _interaction.GetAuthorizationContextAsync(model.ReturnUrl);
// the user clicked the "cancel" button
if (button != "login")
{
if (context != null)
{
// if the user cancels, send a result back into IdentityServer as if they
// denied the consent (even if this client does not require consent).
// this will send back an access denied OIDC error response to the client.
await _interaction.GrantConsentAsync(context, ConsentResponse.Denied);
// we can trust model.ReturnUrl since GetAuthorizationContextAsync returned non-null
if (await _clientStore.IsPkceClientAsync(context.ClientId))
{
// if the client is PKCE then we assume it's native, so this change in how to
// return the response is for better UX for the end user.
return View("Redirect", new RedirectViewModel { RedirectUrl = model.ReturnUrl });
}
return Redirect(model.ReturnUrl);
}
else
{
// since we don't have a valid context, then we just go back to the home page
return Redirect("~/");
}
}
if (ModelState.IsValid)
{
// validate username/password against in-memory store
if (_users.ValidateCredentials(model.Username, model.Password))
{
var user = _users.FindByUsername(model.Username);
await _events.RaiseAsync(new UserLoginSuccessEvent(user.Username, user.SubjectId, user.Username));
// only set explicit expiration here if user chooses "remember me".
// otherwise we rely upon expiration configured in cookie middleware.
AuthenticationProperties props = null;
if (AccountOptions.AllowRememberLogin && model.RememberLogin)
{
props = new AuthenticationProperties
{
IsPersistent = true,
ExpiresUtc = DateTimeOffset.UtcNow.Add(AccountOptions.RememberMeLoginDuration)
};
};
// issue authentication cookie with subject ID and username
await HttpContext.SignInAsync(user.SubjectId, user.Username, props);
if (context != null)
{
if (await _clientStore.IsPkceClientAsync(context.ClientId))
{
// if the client is PKCE then we assume it's native, so this change in how to
// return the response is for better UX for the end user.
return View("Redirect", new RedirectViewModel { RedirectUrl = model.ReturnUrl });
}
// we can trust model.ReturnUrl since GetAuthorizationContextAsync returned non-null
return Redirect(model.ReturnUrl);
}
// request for a local page
if (Url.IsLocalUrl(model.ReturnUrl))
{
return Redirect(model.ReturnUrl);
}
else if (string.IsNullOrEmpty(model.ReturnUrl))
{
return Redirect("~/");
}
else
{
// user might have clicked on a malicious link - should be logged
throw new Exception("invalid return URL");
}
}
await _events.RaiseAsync(new UserLoginFailureEvent(model.Username, "invalid credentials"));
ModelState.AddModelError(string.Empty, AccountOptions.InvalidCredentialsErrorMessage);
}
// something went wrong, show form with error
var vm = await BuildLoginViewModelAsync(model);
return View(vm);
}
/// <summary>
/// Show logout page
/// </summary>
[HttpGet]
public async Task<IActionResult> Logout(string logoutId)
{
// build a model so the logout page knows what to display
var vm = await BuildLogoutViewModelAsync(logoutId);
if (vm.ShowLogoutPrompt == false)
{
// if the request for logout was properly authenticated from IdentityServer, then
// we don't need to show the prompt and can just log the user out directly.
return await Logout(vm);
}
return View(vm);
}
/// <summary>
/// Handle logout page postback
/// </summary>
[HttpPost]
[ValidateAntiForgeryToken]
public async Task<IActionResult> Logout(LogoutInputModel model)
{
// build a model so the logged out page knows what to display
var vm = await BuildLoggedOutViewModelAsync(model.LogoutId);
if (User?.Identity.IsAuthenticated == true)
{
// delete local authentication cookie
await HttpContext.SignOutAsync();
// raise the logout event
await _events.RaiseAsync(new UserLogoutSuccessEvent(User.GetSubjectId(), User.GetDisplayName()));
}
// check if we need to trigger sign-out at an upstream identity provider
if (vm.TriggerExternalSignout)
{
// build a return URL so the upstream provider will redirect back
// to us after the user has logged out. this allows us to then
// complete our single sign-out processing.
string url = Url.Action("Logout", new { logoutId = vm.LogoutId });
// this triggers a redirect to the external provider for sign-out
return SignOut(new AuthenticationProperties { RedirectUri = url }, vm.ExternalAuthenticationScheme);
}
return View("LoggedOut", vm);
}
/*****************************************/
/* helper APIs for the AccountController */
/*****************************************/
private async Task<LoginViewModel> BuildLoginViewModelAsync(string returnUrl)
{
var context = await _interaction.GetAuthorizationContextAsync(returnUrl);
if (context?.IdP != null)
{
var local = context.IdP == IdentityServer4.IdentityServerConstants.LocalIdentityProvider;
// this is meant to short circuit the UI and only trigger the one external IdP
var vm = new LoginViewModel
{
EnableLocalLogin = local,
ReturnUrl = returnUrl,
Username = context?.LoginHint,
};
if (!local)
{
vm.ExternalProviders = new[] { new ExternalProvider { AuthenticationScheme = context.IdP } };
}
return vm;
}
var schemes = await _schemeProvider.GetAllSchemesAsync();
var providers = schemes
.Where(x => x.DisplayName != null ||
(x.Name.Equals(AccountOptions.WindowsAuthenticationSchemeName, StringComparison.OrdinalIgnoreCase))
)
.Select(x => new ExternalProvider
{
DisplayName = x.DisplayName,
AuthenticationScheme = x.Name
}).ToList();
var allowLocal = true;
if (context?.ClientId != null)
{
var client = await _clientStore.FindEnabledClientByIdAsync(context.ClientId);
if (client != null)
{
allowLocal = client.EnableLocalLogin;
if (client.IdentityProviderRestrictions != null && client.IdentityProviderRestrictions.Any())
{
providers = providers.Where(provider => client.IdentityProviderRestrictions.Contains(provider.AuthenticationScheme)).ToList();
}
}
}
return new LoginViewModel
{
AllowRememberLogin = AccountOptions.AllowRememberLogin,
EnableLocalLogin = allowLocal && AccountOptions.AllowLocalLogin,
ReturnUrl = returnUrl,
Username = context?.LoginHint,
ExternalProviders = providers.ToArray()
};
}
private async Task<LoginViewModel> BuildLoginViewModelAsync(LoginInputModel model)
{
var vm = await BuildLoginViewModelAsync(model.ReturnUrl);
vm.Username = model.Username;
vm.RememberLogin = model.RememberLogin;
return vm;
}
private async Task<LogoutViewModel> BuildLogoutViewModelAsync(string logoutId)
{
var vm = new LogoutViewModel { LogoutId = logoutId, ShowLogoutPrompt = AccountOptions.ShowLogoutPrompt };
if (User?.Identity.IsAuthenticated != true)
{
// if the user is not authenticated, then just show logged out page
vm.ShowLogoutPrompt = false;
return vm;
}
var context = await _interaction.GetLogoutContextAsync(logoutId);
if (context?.ShowSignoutPrompt == false)
{
// it's safe to automatically sign-out
vm.ShowLogoutPrompt = false;
return vm;
}
// show the logout prompt. this prevents attacks where the user
// is automatically signed out by another malicious web page.
return vm;
}
private async Task<LoggedOutViewModel> BuildLoggedOutViewModelAsync(string logoutId)
{
// get context information (client name, post logout redirect URI and iframe for federated signout)
var logout = await _interaction.GetLogoutContextAsync(logoutId);
var vm = new LoggedOutViewModel
{
AutomaticRedirectAfterSignOut = AccountOptions.AutomaticRedirectAfterSignOut,
PostLogoutRedirectUri = logout?.PostLogoutRedirectUri,
ClientName = string.IsNullOrEmpty(logout?.ClientName) ? logout?.ClientId : logout?.ClientName,
SignOutIframeUrl = logout?.SignOutIFrameUrl,
LogoutId = logoutId
};
if (User?.Identity.IsAuthenticated == true)
{
var idp = User.FindFirst(JwtClaimTypes.IdentityProvider)?.Value;
if (idp != null && idp != IdentityServer4.IdentityServerConstants.LocalIdentityProvider)
{
var providerSupportsSignout = await HttpContext.GetSchemeSupportsSignOutAsync(idp);
if (providerSupportsSignout)
{
if (vm.LogoutId == null)
{
// if there's no current logout context, we need to create one
// this captures necessary info from the current logged in user
// before we signout and redirect away to the external IdP for signout
vm.LogoutId = await _interaction.CreateLogoutContextAsync();
}
vm.ExternalAuthenticationScheme = idp;
}
}
}
return vm;
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.CodeActions;
using Microsoft.CodeAnalysis.CodeGeneration;
using Microsoft.CodeAnalysis.ProjectManagement;
using Microsoft.CodeAnalysis.Shared.Extensions;
using Microsoft.CodeAnalysis.Text;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis.GenerateType
{
internal abstract partial class AbstractGenerateTypeService<TService, TSimpleNameSyntax, TObjectCreationExpressionSyntax, TExpressionSyntax, TTypeDeclarationSyntax, TArgumentSyntax>
{
protected abstract bool IsConversionImplicit(Compilation compilation, ITypeSymbol sourceType, ITypeSymbol targetType);
private partial class Editor
{
private TService _service;
private TargetProjectChangeInLanguage _targetProjectChangeInLanguage = TargetProjectChangeInLanguage.NoChange;
private IGenerateTypeService _targetLanguageService;
private readonly SemanticDocument _document;
private readonly State _state;
private readonly bool _intoNamespace;
private readonly bool _inNewFile;
private readonly bool _fromDialog;
private readonly GenerateTypeOptionsResult _generateTypeOptionsResult;
private readonly CancellationToken _cancellationToken;
public Editor(
TService service,
SemanticDocument document,
State state,
bool intoNamespace,
bool inNewFile,
CancellationToken cancellationToken)
{
_service = service;
_document = document;
_state = state;
_intoNamespace = intoNamespace;
_inNewFile = inNewFile;
_cancellationToken = cancellationToken;
}
public Editor(
TService service,
SemanticDocument document,
State state,
bool fromDialog,
GenerateTypeOptionsResult generateTypeOptionsResult,
CancellationToken cancellationToken)
{
_service = service;
_document = document;
_state = state;
_fromDialog = fromDialog;
_generateTypeOptionsResult = generateTypeOptionsResult;
_cancellationToken = cancellationToken;
}
private enum TargetProjectChangeInLanguage
{
NoChange,
CSharpToVisualBasic,
VisualBasicToCSharp
}
internal async Task<IEnumerable<CodeActionOperation>> GetOperationsAsync()
{
// Check to see if it is from GFU Dialog
if (!_fromDialog)
{
// Generate the actual type declaration.
var namedType = GenerateNamedType();
if (_intoNamespace)
{
if (_inNewFile)
{
// Generating into a new file is somewhat complicated.
var documentName = GetTypeName(_state) + _service.DefaultFileExtension;
return await GetGenerateInNewFileOperationsAsync(
namedType,
documentName,
null,
true,
null,
_document.Project,
_document.Project,
isDialog: false).ConfigureAwait(false);
}
else
{
return await GetGenerateIntoContainingNamespaceOperationsAsync(namedType).ConfigureAwait(false);
}
}
else
{
return await GetGenerateIntoTypeOperationsAsync(namedType).ConfigureAwait(false);
}
}
else
{
var namedType = GenerateNamedType(_generateTypeOptionsResult);
// Honor the options from the dialog
// Check to see if the type is requested to be generated in cross language Project
// e.g.: C# -> VB or VB -> C#
if (_document.Project.Language != _generateTypeOptionsResult.Project.Language)
{
_targetProjectChangeInLanguage =
_generateTypeOptionsResult.Project.Language == LanguageNames.CSharp
? TargetProjectChangeInLanguage.VisualBasicToCSharp
: TargetProjectChangeInLanguage.CSharpToVisualBasic;
// Get the cross language service
_targetLanguageService = _generateTypeOptionsResult.Project.LanguageServices.GetService<IGenerateTypeService>();
}
if (_generateTypeOptionsResult.IsNewFile)
{
return await GetGenerateInNewFileOperationsAsync(
namedType,
_generateTypeOptionsResult.NewFileName,
_generateTypeOptionsResult.Folders,
_generateTypeOptionsResult.AreFoldersValidIdentifiers,
_generateTypeOptionsResult.FullFilePath,
_generateTypeOptionsResult.Project,
_document.Project,
isDialog: true).ConfigureAwait(false);
}
else
{
return await GetGenerateIntoExistingDocumentAsync(
namedType,
_document.Project,
_generateTypeOptionsResult,
isDialog: true).ConfigureAwait(false);
}
}
}
private string GetNamespaceToGenerateInto()
{
var namespaceToGenerateInto = _state.NamespaceToGenerateInOpt.Trim();
var rootNamespace = _service.GetRootNamespace(_document.SemanticModel.Compilation.Options).Trim();
if (!string.IsNullOrWhiteSpace(rootNamespace))
{
if (namespaceToGenerateInto == rootNamespace ||
namespaceToGenerateInto.StartsWith(rootNamespace + ".", StringComparison.Ordinal))
{
namespaceToGenerateInto = namespaceToGenerateInto.Substring(rootNamespace.Length);
}
}
return namespaceToGenerateInto;
}
private string GetNamespaceToGenerateIntoForUsageWithNamespace(Project targetProject, Project triggeringProject)
{
var namespaceToGenerateInto = _state.NamespaceToGenerateInOpt.Trim();
if (targetProject.Language == LanguageNames.CSharp ||
targetProject == triggeringProject)
{
// If the target project is C# project then we don't have to make any modification to the namespace
// or
// This is a VB project generation into itself which requires no change as well
return namespaceToGenerateInto;
}
// If the target Project is VB then we have to check if the RootNamespace of the VB project is the parent most namespace of the type being generated
// True, Remove the RootNamespace
// False, Add Global to the Namespace
Contract.Assert(targetProject.Language == LanguageNames.VisualBasic);
IGenerateTypeService targetLanguageService = null;
if (_document.Project.Language == LanguageNames.VisualBasic)
{
targetLanguageService = _service;
}
else
{
Debug.Assert(_targetLanguageService != null);
targetLanguageService = _targetLanguageService;
}
var rootNamespace = targetLanguageService.GetRootNamespace(targetProject.CompilationOptions).Trim();
if (!string.IsNullOrWhiteSpace(rootNamespace))
{
var rootNamespaceLength = CheckIfRootNamespacePresentInNamespace(namespaceToGenerateInto, rootNamespace);
if (rootNamespaceLength > -1)
{
// True, Remove the RootNamespace
namespaceToGenerateInto = namespaceToGenerateInto.Substring(rootNamespaceLength);
}
else
{
// False, Add Global to the Namespace
namespaceToGenerateInto = AddGlobalDotToTheNamespace(namespaceToGenerateInto);
}
}
else
{
// False, Add Global to the Namespace
namespaceToGenerateInto = AddGlobalDotToTheNamespace(namespaceToGenerateInto);
}
return namespaceToGenerateInto;
}
private string AddGlobalDotToTheNamespace(string namespaceToBeGenerated)
{
return "Global." + namespaceToBeGenerated;
}
// Returns the length of the meaningful rootNamespace substring part of namespaceToGenerateInto
private int CheckIfRootNamespacePresentInNamespace(string namespaceToGenerateInto, string rootNamespace)
{
if (namespaceToGenerateInto == rootNamespace)
{
return rootNamespace.Length;
}
if (namespaceToGenerateInto.StartsWith(rootNamespace + ".", StringComparison.Ordinal))
{
return rootNamespace.Length + 1;
}
return -1;
}
private void AddFoldersToNamespaceContainers(List<string> container, IList<string> folders)
{
// Add the folder as part of the namespace if there are not empty
if (folders != null && folders.Count != 0)
{
// Remove the empty entries and replace the spaces in the folder name to '_'
var refinedFolders = folders.Where(n => n != null && !n.IsEmpty()).Select(n => n.Replace(' ', '_')).ToArray();
container.AddRange(refinedFolders);
}
}
private async Task<IEnumerable<CodeActionOperation>> GetGenerateInNewFileOperationsAsync(
INamedTypeSymbol namedType,
string documentName,
IList<string> folders,
bool areFoldersValidIdentifiers,
string fullFilePath,
Project projectToBeUpdated,
Project triggeringProject,
bool isDialog)
{
// First, we fork the solution with a new, empty, file in it.
var newDocumentId = DocumentId.CreateNewId(projectToBeUpdated.Id, debugName: documentName);
var newSolution = projectToBeUpdated.Solution.AddDocument(newDocumentId, documentName, string.Empty, folders, fullFilePath);
// Now we get the semantic model for that file we just added. We do that to get the
// root namespace in that new document, along with location for that new namespace.
// That way, when we use the code gen service we can say "add this symbol to the
// root namespace" and it will pick the one in the new file.
var newDocument = newSolution.GetDocument(newDocumentId);
var newSemanticModel = await newDocument.GetSemanticModelAsync(_cancellationToken).ConfigureAwait(false);
var enclosingNamespace = newSemanticModel.GetEnclosingNamespace(0, _cancellationToken);
var namespaceContainersAndUsings = GetNamespaceContainersAndAddUsingsOrImport(isDialog, folders, areFoldersValidIdentifiers, projectToBeUpdated, triggeringProject);
var containers = namespaceContainersAndUsings.Item1;
var includeUsingsOrImports = namespaceContainersAndUsings.Item2;
var rootNamespaceOrType = namedType.GenerateRootNamespaceOrType(containers);
// Now, actually ask the code gen service to add this namespace or type to the root
// namespace in the new file. This will properly generate the code, and add any
// additional niceties like imports/usings.
var codeGenResult = await CodeGenerator.AddNamespaceOrTypeDeclarationAsync(
newSolution,
enclosingNamespace,
rootNamespaceOrType,
new CodeGenerationOptions(newSemanticModel.SyntaxTree.GetLocation(new TextSpan())),
_cancellationToken).ConfigureAwait(false);
// containers is determined to be
// 1: folders -> if triggered from Dialog
// 2: containers -> if triggered not from a Dialog but from QualifiedName
// 3: triggering document folder structure -> if triggered not from a Dialog and a SimpleName
var adjustedContainer = isDialog ? folders :
_state.SimpleName != _state.NameOrMemberAccessExpression ? containers.ToList() : _document.Document.Folders.ToList();
// Now, take the code that would be generated and actually create an edit that would
// produce a document with that code in it.
return CreateAddDocumentAndUpdateUsingsOrImportsOperations(
projectToBeUpdated,
triggeringProject,
documentName,
await codeGenResult.GetSyntaxRootAsync(_cancellationToken).ConfigureAwait(false),
_document.Document,
includeUsingsOrImports,
adjustedContainer,
SourceCodeKind.Regular,
_cancellationToken);
}
private IEnumerable<CodeActionOperation> CreateAddDocumentAndUpdateUsingsOrImportsOperations(
Project projectToBeUpdated,
Project triggeringProject,
string documentName,
SyntaxNode root,
Document generatingDocument,
string includeUsingsOrImports,
IList<string> containers,
SourceCodeKind sourceCodeKind,
CancellationToken cancellationToken)
{
// TODO(cyrusn): make sure documentId is unique.
var documentId = DocumentId.CreateNewId(projectToBeUpdated.Id, documentName);
var updatedSolution = projectToBeUpdated.Solution.AddDocument(DocumentInfo.Create(
documentId,
documentName,
containers,
sourceCodeKind));
updatedSolution = updatedSolution.WithDocumentSyntaxRoot(documentId, root, PreservationMode.PreserveIdentity);
// Update the Generating Document with a using if required
if (includeUsingsOrImports != null)
{
updatedSolution = _service.TryAddUsingsOrImportToDocument(updatedSolution, null, _document.Document, _state.SimpleName, includeUsingsOrImports, cancellationToken);
}
// Add reference of the updated project to the triggering Project if they are 2 different projects
updatedSolution = AddProjectReference(projectToBeUpdated, triggeringProject, updatedSolution);
return new CodeActionOperation[] { new ApplyChangesOperation(updatedSolution), new OpenDocumentOperation(documentId) };
}
private static Solution AddProjectReference(Project projectToBeUpdated, Project triggeringProject, Solution updatedSolution)
{
if (projectToBeUpdated != triggeringProject)
{
if (!triggeringProject.ProjectReferences.Any(pr => pr.ProjectId == projectToBeUpdated.Id))
{
updatedSolution = updatedSolution.AddProjectReference(triggeringProject.Id, new ProjectReference(projectToBeUpdated.Id));
}
}
return updatedSolution;
}
private async Task<IEnumerable<CodeActionOperation>> GetGenerateIntoContainingNamespaceOperationsAsync(INamedTypeSymbol namedType)
{
var enclosingNamespace = _document.SemanticModel.GetEnclosingNamespace(
_state.SimpleName.SpanStart, _cancellationToken);
var solution = _document.Project.Solution;
var codeGenResult = await CodeGenerator.AddNamedTypeDeclarationAsync(
solution,
enclosingNamespace,
namedType,
new CodeGenerationOptions(afterThisLocation: _document.SyntaxTree.GetLocation(_state.SimpleName.Span)),
_cancellationToken)
.ConfigureAwait(false);
return new CodeActionOperation[] { new ApplyChangesOperation(codeGenResult.Project.Solution) };
}
private async Task<IEnumerable<CodeActionOperation>> GetGenerateIntoExistingDocumentAsync(
INamedTypeSymbol namedType,
Project triggeringProject,
GenerateTypeOptionsResult generateTypeOptionsResult,
bool isDialog)
{
var root = await generateTypeOptionsResult.ExistingDocument.GetSyntaxRootAsync(_cancellationToken).ConfigureAwait(false);
var folders = generateTypeOptionsResult.ExistingDocument.Folders;
var namespaceContainersAndUsings = GetNamespaceContainersAndAddUsingsOrImport(isDialog, new List<string>(folders), generateTypeOptionsResult.AreFoldersValidIdentifiers, generateTypeOptionsResult.Project, triggeringProject);
var containers = namespaceContainersAndUsings.Item1;
var includeUsingsOrImports = namespaceContainersAndUsings.Item2;
Tuple<INamespaceSymbol, INamespaceOrTypeSymbol, Location> enclosingNamespaceGeneratedTypeToAddAndLocation = null;
if (_targetProjectChangeInLanguage == TargetProjectChangeInLanguage.NoChange)
{
enclosingNamespaceGeneratedTypeToAddAndLocation = _service.GetOrGenerateEnclosingNamespaceSymbol(
namedType,
containers,
generateTypeOptionsResult.ExistingDocument,
root,
_cancellationToken).WaitAndGetResult(_cancellationToken);
}
else
{
enclosingNamespaceGeneratedTypeToAddAndLocation = _targetLanguageService.GetOrGenerateEnclosingNamespaceSymbol(
namedType,
containers,
generateTypeOptionsResult.ExistingDocument,
root,
_cancellationToken).WaitAndGetResult(_cancellationToken);
}
var solution = _document.Project.Solution;
var codeGenResult = await CodeGenerator.AddNamespaceOrTypeDeclarationAsync(
solution,
enclosingNamespaceGeneratedTypeToAddAndLocation.Item1,
enclosingNamespaceGeneratedTypeToAddAndLocation.Item2,
new CodeGenerationOptions(afterThisLocation: enclosingNamespaceGeneratedTypeToAddAndLocation.Item3),
_cancellationToken)
.ConfigureAwait(false);
var newRoot = await codeGenResult.GetSyntaxRootAsync(_cancellationToken).ConfigureAwait(false);
var updatedSolution = solution.WithDocumentSyntaxRoot(generateTypeOptionsResult.ExistingDocument.Id, newRoot, PreservationMode.PreserveIdentity);
// Update the Generating Document with a using if required
if (includeUsingsOrImports != null)
{
updatedSolution = _service.TryAddUsingsOrImportToDocument(
updatedSolution,
generateTypeOptionsResult.ExistingDocument.Id == _document.Document.Id ? newRoot : null,
_document.Document,
_state.SimpleName,
includeUsingsOrImports,
_cancellationToken);
}
updatedSolution = AddProjectReference(generateTypeOptionsResult.Project, triggeringProject, updatedSolution);
return new CodeActionOperation[] { new ApplyChangesOperation(updatedSolution) };
}
private Tuple<string[], string> GetNamespaceContainersAndAddUsingsOrImport(
bool isDialog,
IList<string> folders,
bool areFoldersValidIdentifiers,
Project targetProject,
Project triggeringProject)
{
string includeUsingsOrImports = null;
if (!areFoldersValidIdentifiers)
{
folders = SpecializedCollections.EmptyList<string>();
}
// Now actually create the symbol that we want to add to the root namespace. The
// symbol may either be a named type (if we're not generating into a namespace) or
// it may be a namespace symbol.
string[] containers = null;
if (!isDialog)
{
// Not generated from the Dialog
containers = GetNamespaceToGenerateInto().Split(new[] { '.' }, StringSplitOptions.RemoveEmptyEntries);
}
else if (!_service.IsSimpleName(_state.NameOrMemberAccessExpression))
{
// If the usage was with a namespace
containers = GetNamespaceToGenerateIntoForUsageWithNamespace(targetProject, triggeringProject).Split(new[] { '.' }, StringSplitOptions.RemoveEmptyEntries);
}
else
{
// Generated from the Dialog
List<string> containerList = new List<string>();
string rootNamespaceOfTheProjectGeneratedInto;
if (_targetProjectChangeInLanguage == TargetProjectChangeInLanguage.NoChange)
{
rootNamespaceOfTheProjectGeneratedInto = _service.GetRootNamespace(_generateTypeOptionsResult.Project.CompilationOptions).Trim();
}
else
{
rootNamespaceOfTheProjectGeneratedInto = _targetLanguageService.GetRootNamespace(_generateTypeOptionsResult.Project.CompilationOptions).Trim();
}
var projectManagementService = _document.Project.Solution.Workspace.Services.GetService<IProjectManagementService>();
var defaultNamespace = projectManagementService.GetDefaultNamespace(targetProject, targetProject.Solution.Workspace);
// Case 1 : If the type is generated into the same C# project or
// Case 2 : If the type is generated from a C# project to a C# Project
// Case 3 : If the Type is generated from a VB Project to a C# Project
// Using and Namespace will be the DefaultNamespace + Folder Structure
if ((_document.Project == _generateTypeOptionsResult.Project && _document.Project.Language == LanguageNames.CSharp) ||
(_targetProjectChangeInLanguage == TargetProjectChangeInLanguage.NoChange && _generateTypeOptionsResult.Project.Language == LanguageNames.CSharp) ||
_targetProjectChangeInLanguage == TargetProjectChangeInLanguage.VisualBasicToCSharp)
{
if (!string.IsNullOrWhiteSpace(defaultNamespace))
{
containerList.Add(defaultNamespace);
}
// Populate the ContainerList
AddFoldersToNamespaceContainers(containerList, folders);
containers = containerList.ToArray();
includeUsingsOrImports = string.Join(".", containerList.ToArray());
}
// Case 4 : If the type is generated into the same VB project or
// Case 5 : If Type is generated from a VB Project to VB Project
// Case 6 : If Type is generated from a C# Project to VB Project
// Namespace will be Folder Structure and Import will have the RootNamespace of the project generated into as part of the Imports
if ((_document.Project == _generateTypeOptionsResult.Project && _document.Project.Language == LanguageNames.VisualBasic) ||
(_document.Project != _generateTypeOptionsResult.Project && _targetProjectChangeInLanguage == TargetProjectChangeInLanguage.NoChange && _generateTypeOptionsResult.Project.Language == LanguageNames.VisualBasic) ||
_targetProjectChangeInLanguage == TargetProjectChangeInLanguage.CSharpToVisualBasic)
{
// Populate the ContainerList
AddFoldersToNamespaceContainers(containerList, folders);
containers = containerList.ToArray();
includeUsingsOrImports = string.Join(".", containerList.ToArray());
if (!string.IsNullOrWhiteSpace(rootNamespaceOfTheProjectGeneratedInto))
{
includeUsingsOrImports = string.IsNullOrEmpty(includeUsingsOrImports) ?
rootNamespaceOfTheProjectGeneratedInto :
rootNamespaceOfTheProjectGeneratedInto + "." + includeUsingsOrImports;
}
}
Contract.Assert(includeUsingsOrImports != null);
}
return Tuple.Create(containers, includeUsingsOrImports);
}
private async Task<IEnumerable<CodeActionOperation>> GetGenerateIntoTypeOperationsAsync(INamedTypeSymbol namedType)
{
var codeGenService = GetCodeGenerationService();
var solution = _document.Project.Solution;
var codeGenResult = await CodeGenerator.AddNamedTypeDeclarationAsync(
solution,
_state.TypeToGenerateInOpt,
namedType,
new CodeGenerationOptions(contextLocation: _state.SimpleName.GetLocation()),
_cancellationToken)
.ConfigureAwait(false);
return new CodeActionOperation[] { new ApplyChangesOperation(codeGenResult.Project.Solution) };
}
private IList<ITypeSymbol> GetArgumentTypes(IList<TArgumentSyntax> argumentList)
{
var types = argumentList.Select(a => _service.DetermineArgumentType(_document.SemanticModel, a, _cancellationToken));
return types.Select(FixType).ToList();
}
private ITypeSymbol FixType(
ITypeSymbol typeSymbol)
{
var compilation = _document.SemanticModel.Compilation;
return typeSymbol.RemoveUnnamedErrorTypes(compilation);
}
private ICodeGenerationService GetCodeGenerationService()
{
var language = _state.TypeToGenerateInOpt == null
? _state.SimpleName.Language
: _state.TypeToGenerateInOpt.Language;
return _document.Project.Solution.Workspace.Services.GetLanguageServices(language).GetService<ICodeGenerationService>();
}
private bool TryFindMatchingField(
string parameterName,
ITypeSymbol parameterType,
Dictionary<string, ISymbol> parameterToFieldMap,
bool caseSensitive)
{
// If the base types have an accessible field or property with the same name and
// an acceptable type, then we should just defer to that.
if (_state.BaseTypeOrInterfaceOpt != null)
{
var comparison = caseSensitive ? StringComparison.Ordinal : StringComparison.OrdinalIgnoreCase;
var query =
_state.BaseTypeOrInterfaceOpt
.GetBaseTypesAndThis()
.SelectMany(t => t.GetMembers())
.Where(s => s.Name.Equals(parameterName, comparison));
var symbol = query.FirstOrDefault(IsSymbolAccessible);
if (IsViableFieldOrProperty(parameterType, symbol))
{
parameterToFieldMap[parameterName] = symbol;
return true;
}
}
return false;
}
private bool IsViableFieldOrProperty(
ITypeSymbol parameterType,
ISymbol symbol)
{
if (symbol != null && !symbol.IsStatic && parameterType.Language == symbol.Language)
{
if (symbol is IFieldSymbol)
{
var field = (IFieldSymbol)symbol;
return
!field.IsReadOnly &&
_service.IsConversionImplicit(_document.SemanticModel.Compilation, parameterType, field.Type);
}
else if (symbol is IPropertySymbol)
{
var property = (IPropertySymbol)symbol;
return
property.Parameters.Length == 0 &&
property.SetMethod != null &&
IsSymbolAccessible(property.SetMethod) &&
_service.IsConversionImplicit(_document.SemanticModel.Compilation, parameterType, property.Type);
}
}
return false;
}
private bool IsSymbolAccessible(ISymbol symbol)
{
// Public and protected constructors are accessible. Internal constructors are
// accessible if we have friend access. We can't call the normal accessibility
// checkers since they will think that a protected constructor isn't accessible
// (since we don't have the destination type that would have access to them yet).
switch (symbol.DeclaredAccessibility)
{
case Accessibility.ProtectedOrInternal:
case Accessibility.Protected:
case Accessibility.Public:
return true;
case Accessibility.ProtectedAndInternal:
case Accessibility.Internal:
// TODO: Code coverage
return _document.SemanticModel.Compilation.Assembly.IsSameAssemblyOrHasFriendAccessTo(
symbol.ContainingAssembly);
default:
return false;
}
}
}
}
}
| |
/*
* Copyright 2008 Matthias Sessler
*
* This file is part of LibMpc.net.
*
* LibMpc.net is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 2.1 of the License, or
* (at your option) any later version.
*
* LibMpc.net is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with LibMpc.net. If not, see <http://www.gnu.org/licenses/>.
*/
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.IO;
using System.Net;
using System.Net.Sockets;
using System.Text;
using System.Text.RegularExpressions;
namespace Libmpc
{
/// <summary>
/// The delegate for the <see cref="MpcConnection.OnConnected"/> and <see cref="MpcConnection.OnDisconnected"/> events.
/// </summary>
/// <param name="connection">The connection firing the event.</param>
public delegate void MpcConnectionEventDelegate( MpcConnection connection );
/// <summary>
/// Keeps the connection to the MPD server and handels the most basic structure of the
/// MPD protocol. The high level commands are handeled in the <see cref="Libmpc.Mpc"/>
/// class.
/// </summary>
public class MpcConnection
{
/// <summary>
/// Is fired when a connection to a MPD server is established.
/// </summary>
public event MpcConnectionEventDelegate OnConnected;
/// <summary>
/// Is fired when the connection to the MPD server is closed.
/// </summary>
public event MpcConnectionEventDelegate OnDisconnected;
private static readonly string FIRST_LINE_PREFIX = "OK MPD ";
private static readonly string OK = "OK";
private static readonly string ACK = "ACK";
private static readonly Regex ACK_REGEX = new Regex("^ACK \\[(?<code>[0-9]*)@(?<nr>[0-9]*)] \\{(?<command>[a-z]*)} (?<message>.*)$");
private IPEndPoint ipEndPoint = null;
private TcpClient tcpClient = null;
private NetworkStream networkStream = null;
private StreamReader reader;
private StreamWriter writer;
private string version;
/// <summary>
/// If the connection to the MPD is connected.
/// </summary>
public bool Connected { get { return (this.tcpClient != null) && this.tcpClient.Connected; } }
/// <summary>
/// The version of the MPD.
/// </summary>
public string Version { get { return this.version; } }
private bool autoConnect = false;
/// <summary>
/// If a connection should be established when a command is to be
/// executed in disconnected state.
/// </summary>
public bool AutoConnect
{
get{ return this.autoConnect; }
set { this.autoConnect = value; }
}
/// <summary>
/// Creates a new MpdConnection.
/// </summary>
public MpcConnection() {}
/// <summary>
/// Creates a new MpdConnection.
/// </summary>
/// <param name="server">The IPEndPoint of the MPD server.</param>
public MpcConnection(IPEndPoint server) { this.Connect(server); }
/// <summary>
/// The IPEndPoint of the MPD server.
/// </summary>
/// <exception cref="AlreadyConnectedException">When a conenction to a MPD server is already established.</exception>
public IPEndPoint Server
{
get { return this.ipEndPoint; }
set
{
if (this.Connected)
throw new AlreadyConnectedException();
this.ipEndPoint = value;
this.ClearConnectionFields();
}
}
/// <summary>
/// Connects to a MPD server.
/// </summary>
/// <param name="server">The IPEndPoint of the server.</param>
public void Connect(IPEndPoint server)
{
this.Server = server;
this.Connect();
}
/// <summary>
/// Connects to the MPD server who's IPEndPoint was set in the Server property.
/// </summary>
/// <exception cref="InvalidOperationException">If no IPEndPoint was set to the Server property.</exception>
public void Connect()
{
if (this.ipEndPoint == null)
throw new InvalidOperationException("Server IPEndPoint not set.");
if (this.Connected)
throw new AlreadyConnectedException();
this.tcpClient = new TcpClient(
this.ipEndPoint.Address.ToString(),
this.ipEndPoint.Port);
this.networkStream = this.tcpClient.GetStream();
this.reader = new StreamReader(this.networkStream, Encoding.UTF8);
this.writer = new StreamWriter(this.networkStream, Encoding.UTF8);
this.writer.NewLine = "\n";
string firstLine = this.reader.ReadLine();
if( !firstLine.StartsWith( FIRST_LINE_PREFIX ) )
{
this.Disconnect();
throw new InvalidDataException("Response of mpd does not start with \"" + FIRST_LINE_PREFIX + "\"." );
}
this.version = firstLine.Substring(FIRST_LINE_PREFIX.Length);
this.writer.WriteLine();
this.writer.Flush();
this.readResponse();
if( this.OnConnected != null )
this.OnConnected.Invoke( this );
}
/// <summary>
/// Disconnects from the current MPD server.
/// </summary>
public void Disconnect()
{
if (this.tcpClient == null)
return;
this.networkStream.Close();
this.ClearConnectionFields();
if( this.OnDisconnected != null )
this.OnDisconnected.Invoke( this );
}
/// <summary>
/// Executes a simple command without arguments on the MPD server and returns the response.
/// </summary>
/// <param name="command">The command to execute.</param>
/// <returns>The MPD server response parsed into a basic object.</returns>
/// <exception cref="ArgumentException">If the command contains a space of a newline charakter.</exception>
public MpdResponse Exec(string command)
{
if (command == null)
throw new ArgumentNullException("command");
if (command.Contains(" "))
throw new ArgumentException("command contains space");
if (command.Contains("\n"))
throw new ArgumentException("command contains newline");
this.CheckConnected();
try
{
this.writer.WriteLine(command);
this.writer.Flush();
return this.readResponse();
}
catch (Exception)
{
try { this.Disconnect(); }
catch (Exception) { }
throw;
}
}
/// <summary>
/// Executes a MPD command with arguments on the MPD server.
/// </summary>
/// <param name="command">The command to execute.</param>
/// <param name="argument">The arguments of the command.</param>
/// <returns>The MPD server response parsed into a basic object.</returns>
/// <exception cref="ArgumentException">If the command contains a space of a newline charakter.</exception>
public MpdResponse Exec(string command, string[] argument)
{
if (command == null)
throw new ArgumentNullException("command");
if (command.Contains(" "))
throw new ArgumentException("command contains space");
if (command.Contains("\n"))
throw new ArgumentException("command contains newline");
if (argument == null)
throw new ArgumentNullException("argument");
for (int i = 0; i < argument.Length; i++)
{
if (argument[i] == null)
throw new ArgumentNullException("argument[" + i + "]");
if (argument[i].Contains("\n"))
throw new ArgumentException("argument[" + i + "] contains newline");
}
this.CheckConnected();
try
{
this.writer.Write(command);
foreach (string arg in argument)
{
this.writer.Write(' ');
this.WriteToken(arg);
}
this.writer.WriteLine();
this.writer.Flush();
return this.readResponse();
}
catch (Exception)
{
try { this.Disconnect(); } catch (Exception) { }
throw;
}
}
private void CheckConnected()
{
if (!this.Connected)
{
if (this.autoConnect)
this.Connect();
else
throw new NotConnectedException();
}
}
private void WriteToken(string token)
{
if (token.Contains(" "))
{
this.writer.Write("\"");
foreach (char chr in token)
{
if (chr == '"')
this.writer.Write("\\\"");
else
this.writer.Write(chr);
}
this.writer.Write("\"");
}
else
this.writer.Write(token);
}
private MpdResponse readResponse()
{
List<string> ret = new List<string>();
string line = this.reader.ReadLine();
while (!(line.Equals(OK) || line.StartsWith(ACK)))
{
ret.Add(line);
line = this.reader.ReadLine();
}
if (line.Equals(OK))
return new MpdResponse(new ReadOnlyCollection<string>(ret));
else
{
Match match = ACK_REGEX.Match(line);
if (match.Groups.Count != 5)
throw new InvalidDataException( "Error response not as expected" );
return new MpdResponse(
int.Parse( match.Result("${code}") ),
int.Parse( match.Result("${nr}") ),
match.Result("${command}"),
match.Result("${message}"),
new ReadOnlyCollection<string>(ret)
);
}
}
private void ClearConnectionFields()
{
this.tcpClient = null;
this.networkStream = null;
this.reader = null;
this.writer = null;
this.version = null;
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Composition;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.CodeActions;
using Microsoft.CodeAnalysis.CodeFixes;
using Microsoft.CodeAnalysis.CodeFixes.AddImport;
using Microsoft.CodeAnalysis.CSharp.Extensions;
using Microsoft.CodeAnalysis.CSharp.Symbols;
using Microsoft.CodeAnalysis.CSharp.Syntax;
using Microsoft.CodeAnalysis.Formatting;
using Microsoft.CodeAnalysis.LanguageServices;
using Microsoft.CodeAnalysis.Packaging;
using Microsoft.CodeAnalysis.Shared.Extensions;
using Microsoft.CodeAnalysis.SymbolSearch;
using Roslyn.Utilities;
using static Microsoft.CodeAnalysis.CSharp.CodeFixes.AddImport.AddImportDiagnosticIds;
namespace Microsoft.CodeAnalysis.CSharp.CodeFixes.AddImport
{
internal static class AddImportDiagnosticIds
{
/// <summary>
/// name does not exist in context
/// </summary>
public const string CS0103 = nameof(CS0103);
/// <summary>
/// type or namespace could not be found
/// </summary>
public const string CS0246 = nameof(CS0246);
/// <summary>
/// wrong number of type args
/// </summary>
public const string CS0305 = nameof(CS0305);
/// <summary>
/// type does not contain a definition of method or extension method
/// </summary>
public const string CS1061 = nameof(CS1061);
/// <summary>
/// cannot find implementation of query pattern
/// </summary>
public const string CS1935 = nameof(CS1935);
/// <summary>
/// The non-generic type 'A' cannot be used with type arguments
/// </summary>
public const string CS0308 = nameof(CS0308);
/// <summary>
/// 'A' is inaccessible due to its protection level
/// </summary>
public const string CS0122 = nameof(CS0122);
/// <summary>
/// The using alias 'A' cannot be used with type arguments
/// </summary>
public const string CS0307 = nameof(CS0307);
/// <summary>
/// 'A' is not an attribute class
/// </summary>
public const string CS0616 = nameof(CS0616);
/// <summary>
/// No overload for method 'X' takes 'N' arguments
/// </summary>
public const string CS1501 = nameof(CS1501);
/// <summary>
/// cannot convert from 'int' to 'string'
/// </summary>
public const string CS1503 = nameof(CS1503);
/// <summary>
/// XML comment on 'construct' has syntactically incorrect cref attribute 'name'
/// </summary>
public const string CS1574 = nameof(CS1574);
/// <summary>
/// Invalid type for parameter 'parameter number' in XML comment cref attribute
/// </summary>
public const string CS1580 = nameof(CS1580);
/// <summary>
/// Invalid return type in XML comment cref attribute
/// </summary>
public const string CS1581 = nameof(CS1581);
/// <summary>
/// XML comment has syntactically incorrect cref attribute
/// </summary>
public const string CS1584 = nameof(CS1584);
/// <summary>
/// Type 'X' does not contain a valid extension method accepting 'Y'
/// </summary>
public const string CS1929 = nameof(CS1929);
/// <summary>
/// Cannot convert method group 'X' to non-delegate type 'Y'. Did you intend to invoke the method?
/// </summary>
public const string CS0428 = nameof(CS0428);
/// <summary>
/// There is no argument given that corresponds to the required formal parameter 'X' of 'Y'
/// </summary>
public const string CS7036 = nameof(CS7036);
public static ImmutableArray<string> FixableTypeIds =
ImmutableArray.Create(
CS0103,
CS0246,
CS0305,
CS0308,
CS0122,
CS0307,
CS0616,
CS1580,
CS1581);
public static ImmutableArray<string> FixableDiagnosticIds =
FixableTypeIds.Concat(ImmutableArray.Create(
CS1061,
CS1935,
CS1501,
CS1503,
CS1574,
CS1584,
CS1929,
CS0428,
CS7036));
}
[ExportCodeFixProvider(LanguageNames.CSharp, Name = PredefinedCodeFixProviderNames.AddUsingOrImport), Shared]
internal class CSharpAddImportCodeFixProvider : AbstractAddImportCodeFixProvider<SimpleNameSyntax>
{
public override ImmutableArray<string> FixableDiagnosticIds => AddImportDiagnosticIds.FixableDiagnosticIds;
public CSharpAddImportCodeFixProvider()
{
}
/// <summary>For testing purposes only (so that tests can pass in mock values)</summary>
internal CSharpAddImportCodeFixProvider(
IPackageInstallerService installerService,
ISymbolSearchService symbolSearchService)
: base(installerService, symbolSearchService)
{
}
protected override bool CanAddImport(SyntaxNode node, CancellationToken cancellationToken)
{
if (cancellationToken.IsCancellationRequested)
{
return false;
}
return node.CanAddUsingDirectives(cancellationToken);
}
protected override bool CanAddImportForMethod(
Diagnostic diagnostic, ISyntaxFactsService syntaxFacts, SyntaxNode node, out SimpleNameSyntax nameNode)
{
nameNode = null;
switch (diagnostic.Id)
{
case CS7036:
case CS0428:
case CS1061:
if (node.IsKind(SyntaxKind.ConditionalAccessExpression))
{
node = (node as ConditionalAccessExpressionSyntax).WhenNotNull;
}
else if (node.IsKind(SyntaxKind.MemberBindingExpression))
{
node = (node as MemberBindingExpressionSyntax).Name;
}
else if (node.Parent.IsKind(SyntaxKind.CollectionInitializerExpression))
{
return true;
}
break;
case CS0122:
case CS1501:
if (node is SimpleNameSyntax)
{
break;
}
else if (node is MemberBindingExpressionSyntax)
{
node = (node as MemberBindingExpressionSyntax).Name;
}
break;
case CS1929:
var memberAccessName = (node.Parent as MemberAccessExpressionSyntax)?.Name;
var conditionalAccessName = (((node.Parent as ConditionalAccessExpressionSyntax)?.WhenNotNull as InvocationExpressionSyntax)?.Expression as MemberBindingExpressionSyntax)?.Name;
if (memberAccessName == null && conditionalAccessName == null)
{
return false;
}
node = memberAccessName ?? conditionalAccessName;
break;
case CS1503:
//// look up its corresponding method name
var parent = node.GetAncestor<InvocationExpressionSyntax>();
if (parent == null)
{
return false;
}
var method = parent.Expression as MemberAccessExpressionSyntax;
if (method != null)
{
node = method.Name;
}
break;
default:
return false;
}
nameNode = node as SimpleNameSyntax;
if (!nameNode.IsParentKind(SyntaxKind.SimpleMemberAccessExpression) &&
!nameNode.IsParentKind(SyntaxKind.MemberBindingExpression))
{
return false;
}
var memberAccess = nameNode.Parent as MemberAccessExpressionSyntax;
var memberBinding = nameNode.Parent as MemberBindingExpressionSyntax;
if (memberAccess.IsParentKind(SyntaxKind.SimpleMemberAccessExpression) ||
memberAccess.IsParentKind(SyntaxKind.ElementAccessExpression) ||
memberBinding.IsParentKind(SyntaxKind.SimpleMemberAccessExpression) ||
memberBinding.IsParentKind(SyntaxKind.ElementAccessExpression))
{
return false;
}
if (!syntaxFacts.IsMemberAccessExpressionName(node))
{
return false;
}
return true;
}
protected override bool CanAddImportForNamespace(Diagnostic diagnostic, SyntaxNode node, out SimpleNameSyntax nameNode)
{
nameNode = null;
return false;
}
protected override bool CanAddImportForQuery(Diagnostic diagnostic, SyntaxNode node)
{
if (diagnostic.Id != CS1935)
{
return false;
}
return node.AncestorsAndSelf().Any(n => n is QueryExpressionSyntax && !(n.Parent is QueryContinuationSyntax));
}
protected override bool CanAddImportForType(Diagnostic diagnostic, SyntaxNode node, out SimpleNameSyntax nameNode)
{
nameNode = null;
switch (diagnostic.Id)
{
case CS0103:
case CS0246:
case CS0305:
case CS0308:
case CS0122:
case CS0307:
case CS0616:
case CS1580:
case CS1581:
break;
case CS1574:
case CS1584:
var cref = node as QualifiedCrefSyntax;
if (cref != null)
{
node = cref.Container;
}
break;
default:
return false;
}
return TryFindStandaloneType(node, out nameNode);
}
private static bool TryFindStandaloneType(SyntaxNode node, out SimpleNameSyntax nameNode)
{
var qn = node as QualifiedNameSyntax;
if (qn != null)
{
node = GetLeftMostSimpleName(qn);
}
nameNode = node as SimpleNameSyntax;
return nameNode.LooksLikeStandaloneTypeName();
}
private static SimpleNameSyntax GetLeftMostSimpleName(QualifiedNameSyntax qn)
{
while (qn != null)
{
var left = qn.Left;
var simpleName = left as SimpleNameSyntax;
if (simpleName != null)
{
return simpleName;
}
qn = left as QualifiedNameSyntax;
}
return null;
}
protected override ISet<INamespaceSymbol> GetNamespacesInScope(
SemanticModel semanticModel,
SyntaxNode node,
CancellationToken cancellationToken)
{
return semanticModel.GetUsingNamespacesInScope(node);
}
protected override ITypeSymbol GetQueryClauseInfo(
SemanticModel semanticModel,
SyntaxNode node,
CancellationToken cancellationToken)
{
var query = node.AncestorsAndSelf().OfType<QueryExpressionSyntax>().First();
if (InfoBoundSuccessfully(semanticModel.GetQueryClauseInfo(query.FromClause, cancellationToken)))
{
return null;
}
foreach (var clause in query.Body.Clauses)
{
if (InfoBoundSuccessfully(semanticModel.GetQueryClauseInfo(clause, cancellationToken)))
{
return null;
}
}
if (InfoBoundSuccessfully(semanticModel.GetSymbolInfo(query.Body.SelectOrGroup, cancellationToken)))
{
return null;
}
var fromClause = query.FromClause;
return semanticModel.GetTypeInfo(fromClause.Expression, cancellationToken).Type;
}
private bool InfoBoundSuccessfully(SymbolInfo symbolInfo)
{
return InfoBoundSuccessfully(symbolInfo.Symbol);
}
private bool InfoBoundSuccessfully(QueryClauseInfo semanticInfo)
{
return InfoBoundSuccessfully(semanticInfo.OperationInfo);
}
private static bool InfoBoundSuccessfully(ISymbol operation)
{
operation = operation.GetOriginalUnreducedDefinition();
return operation != null;
}
protected override string GetDescription(IReadOnlyList<string> nameParts)
{
return $"using { string.Join(".", nameParts) };";
}
protected override string TryGetDescription(
INamespaceOrTypeSymbol namespaceOrTypeSymbol,
SemanticModel semanticModel,
SyntaxNode contextNode, bool checkForExistingUsing)
{
var root = GetCompilationUnitSyntaxNode(contextNode);
// See if this is a reference to a type from a reference that has a specific alias
// associated with it. If that extern alias hasn't already been brought into scope
// then add that one.
var externAlias = TryGetExternAliasDirective(
namespaceOrTypeSymbol, semanticModel, contextNode,
checkForExistingExternAlias: true);
if (externAlias != null)
{
return $"extern alias {externAlias.Identifier.ValueText};";
}
var usingDirective = TryGetUsingDirective(
namespaceOrTypeSymbol, semanticModel, root, contextNode);
if (usingDirective != null)
{
var displayString = namespaceOrTypeSymbol.ToDisplayString();
return namespaceOrTypeSymbol.IsKind(SymbolKind.Namespace)
? $"using {displayString};"
: $"using static {displayString};";
}
return null;
}
private bool HasExistingUsingDirective(
CompilationUnitSyntax root,
NamespaceDeclarationSyntax namespaceToAddTo,
UsingDirectiveSyntax usingDirective)
{
var usings = namespaceToAddTo?.Usings ?? root.Usings;
foreach (var existingUsing in usings)
{
if (SyntaxFactory.AreEquivalent(usingDirective, existingUsing))
{
return true;
}
}
return false;
}
protected override async Task<Document> AddImportAsync(
SyntaxNode contextNode,
INamespaceOrTypeSymbol namespaceOrTypeSymbol,
Document document,
bool placeSystemNamespaceFirst,
CancellationToken cancellationToken)
{
var root = GetCompilationUnitSyntaxNode(contextNode, cancellationToken);
var newRoot = await AddImportWorkerAsync(document, root, contextNode, namespaceOrTypeSymbol, placeSystemNamespaceFirst, cancellationToken).ConfigureAwait(false);
return document.WithSyntaxRoot(newRoot);
}
private async Task<CompilationUnitSyntax> AddImportWorkerAsync(
Document document, CompilationUnitSyntax root, SyntaxNode contextNode,
INamespaceOrTypeSymbol namespaceOrTypeSymbol,
bool placeSystemNamespaceFirst, CancellationToken cancellationToken)
{
var semanticModel = await document.GetSemanticModelAsync(cancellationToken).ConfigureAwait(false);
var firstContainingNamespaceWithUsings = GetFirstContainingNamespaceWithUsings(contextNode);
var namespaceToUpdate = firstContainingNamespaceWithUsings;
var externAliasDirective = TryGetExternAliasDirective(
namespaceOrTypeSymbol, semanticModel, contextNode,
checkForExistingExternAlias: true);
var usingDirective = TryGetUsingDirective(
namespaceOrTypeSymbol, semanticModel, root, contextNode);
if (externAliasDirective != null)
{
AddExterns(ref root, ref namespaceToUpdate, externAliasDirective);
}
if (usingDirective != null)
{
AddUsingDirective(ref root, ref namespaceToUpdate,
placeSystemNamespaceFirst, usingDirective);
}
return firstContainingNamespaceWithUsings != null
? root.ReplaceNode(firstContainingNamespaceWithUsings, namespaceToUpdate)
: root;
}
private void AddUsingDirective(
ref CompilationUnitSyntax root,
ref NamespaceDeclarationSyntax namespaceToUpdate,
bool placeSystemNamespaceFirst,
UsingDirectiveSyntax usingDirective)
{
IList<UsingDirectiveSyntax> directives = new[] { usingDirective };
if (namespaceToUpdate != null)
{
namespaceToUpdate = namespaceToUpdate.AddUsingDirectives(
directives, placeSystemNamespaceFirst);
}
else
{
root = root.AddUsingDirectives(
directives, placeSystemNamespaceFirst);
}
}
private void AddExterns(
ref CompilationUnitSyntax root,
ref NamespaceDeclarationSyntax namespaceToUpdate,
ExternAliasDirectiveSyntax externAliasDirective)
{
if (namespaceToUpdate != null)
{
namespaceToUpdate = namespaceToUpdate.AddExterns(externAliasDirective);
}
else
{
root = root.AddExterns(externAliasDirective);
}
}
protected override Task<Document> AddImportAsync(
SyntaxNode contextNode, IReadOnlyList<string> namespaceParts, Document document, bool placeSystemNamespaceFirst, CancellationToken cancellationToken)
{
var root = GetCompilationUnitSyntaxNode(contextNode, cancellationToken);
// Suppress diagnostics on the import we create. Because we only get here when we are
// adding a nuget package, it is certainly the case that in the preview this will not
// bind properly. It will look silly to show such an error, so we just suppress things.
var simpleUsingDirective = SyntaxFactory.UsingDirective(
CreateNameSyntax(namespaceParts, namespaceParts.Count - 1)).WithAdditionalAnnotations(
SuppressDiagnosticsAnnotation.Create());
// If we have an existing using with this name then don't bother adding this new using.
if (root.Usings.Any(u => u.IsEquivalentTo(simpleUsingDirective, topLevel: false)))
{
return Task.FromResult(document);
}
var newRoot = root.AddUsingDirective(
simpleUsingDirective, contextNode, placeSystemNamespaceFirst,
Formatter.Annotation);
return Task.FromResult(document.WithSyntaxRoot(newRoot));
}
private NameSyntax CreateNameSyntax(IReadOnlyList<string> namespaceParts, int index)
{
var part = namespaceParts[index];
if (SyntaxFacts.GetKeywordKind(part) != SyntaxKind.None)
{
part = "@" + part;
}
var namePiece = SyntaxFactory.IdentifierName(part);
return index == 0
? (NameSyntax)namePiece
: SyntaxFactory.QualifiedName(CreateNameSyntax(namespaceParts, index - 1), namePiece);
}
private static ExternAliasDirectiveSyntax TryGetExternAliasDirective(
INamespaceOrTypeSymbol namespaceSymbol,
SemanticModel semanticModel,
SyntaxNode contextNode,
bool checkForExistingExternAlias)
{
string externAliasString;
if (TryGetExternAliasString(namespaceSymbol, semanticModel, contextNode, checkForExistingExternAlias, out externAliasString))
{
return SyntaxFactory.ExternAliasDirective(SyntaxFactory.Identifier(externAliasString))
.WithAdditionalAnnotations(Formatter.Annotation);
}
return null;
}
private UsingDirectiveSyntax TryGetUsingDirective(
INamespaceOrTypeSymbol namespaceOrTypeSymbol,
SemanticModel semanticModel,
CompilationUnitSyntax root,
SyntaxNode contextNode)
{
var namespaceToAddTo = GetFirstContainingNamespaceWithUsings(contextNode);
var usingDirectives = namespaceToAddTo?.Usings ?? root.Usings;
var nameSyntax = namespaceOrTypeSymbol.GenerateNameSyntax();
// Replace the alias that GenerateTypeSyntax added if we want this to be looked
// up off of an extern alias.
var externAliasDirective = TryGetExternAliasDirective(
namespaceOrTypeSymbol, semanticModel, contextNode,
checkForExistingExternAlias: false);
var externAlias = externAliasDirective?.Identifier.ValueText;
if (externAlias != null)
{
nameSyntax = AddOrReplaceAlias(nameSyntax, SyntaxFactory.IdentifierName(externAlias));
}
else
{
// The name we generated will have the global:: alias on it. We only need
// that if the name of our symbol is actually ambiguous in this context.
// If so, keep global:: on it, otherwise remove it.
//
// Note: doing this has a couple of benefits. First, it's easy for us to see
// if we have an existing using for this with the same syntax. Second,
// it's easy to sort usings properly. If "global::" was attached to the
// using directive, then it would make both of those operations more difficult
// to achieve.
nameSyntax = RemoveGlobalAliasIfUnnecessary(semanticModel, nameSyntax, namespaceToAddTo);
}
var usingDirective = SyntaxFactory.UsingDirective(nameSyntax)
.WithAdditionalAnnotations(Formatter.Annotation);
if (HasExistingUsingDirective(root, namespaceToAddTo, usingDirective))
{
return null;
}
return namespaceOrTypeSymbol.IsKind(SymbolKind.Namespace)
? usingDirective
: usingDirective.WithStaticKeyword(SyntaxFactory.Token(SyntaxKind.StaticKeyword));
}
private NameSyntax RemoveGlobalAliasIfUnnecessary(
SemanticModel semanticModel,
NameSyntax nameSyntax,
NamespaceDeclarationSyntax namespaceToAddTo)
{
var aliasQualifiedName = nameSyntax.DescendantNodesAndSelf()
.OfType<AliasQualifiedNameSyntax>()
.FirstOrDefault();
if (aliasQualifiedName != null)
{
var rightOfAliasName = aliasQualifiedName.Name.Identifier.ValueText;
if (!ConflictsWithExistingMember(semanticModel, namespaceToAddTo, rightOfAliasName))
{
// Strip off the alias.
return nameSyntax.ReplaceNode(aliasQualifiedName, aliasQualifiedName.Name);
}
}
return nameSyntax;
}
private bool ConflictsWithExistingMember(
SemanticModel semanticModel,
NamespaceDeclarationSyntax namespaceToAddTo,
string rightOfAliasName)
{
if (namespaceToAddTo != null)
{
var containingNamespaceSymbol = semanticModel.GetDeclaredSymbol(namespaceToAddTo);
while (containingNamespaceSymbol != null && !containingNamespaceSymbol.IsGlobalNamespace)
{
if (containingNamespaceSymbol.GetMembers(rightOfAliasName).Any())
{
// A containing namespace had this name in it. We need to stay globally qualified.
return true;
}
containingNamespaceSymbol = containingNamespaceSymbol.ContainingNamespace;
}
}
// Didn't conflict with anything. We shoudl remove the global:: alias.
return false;
}
private NameSyntax AddOrReplaceAlias(
NameSyntax nameSyntax, IdentifierNameSyntax alias)
{
var simpleName = nameSyntax as SimpleNameSyntax;
if (simpleName != null)
{
return SyntaxFactory.AliasQualifiedName(alias, simpleName);
}
var qualifiedName = nameSyntax as QualifiedNameSyntax;
if (qualifiedName != null)
{
return qualifiedName.WithLeft(AddOrReplaceAlias(qualifiedName.Left, alias));
}
var aliasName = nameSyntax as AliasQualifiedNameSyntax;
return aliasName.WithAlias(alias);
}
private NamespaceDeclarationSyntax GetFirstContainingNamespaceWithUsings(SyntaxNode contextNode)
{
var usingDirective = contextNode.GetAncestor<UsingDirectiveSyntax>();
if (usingDirective != null)
{
contextNode = usingDirective.Parent;
}
return contextNode.GetAncestors<NamespaceDeclarationSyntax>()
.Where(n => n.Usings.Count > 0)
.FirstOrDefault();
}
private static bool TryGetExternAliasString(
INamespaceOrTypeSymbol namespaceSymbol,
SemanticModel semanticModel,
SyntaxNode contextNode,
bool checkForExistingExternAlias,
out string externAliasString)
{
externAliasString = null;
var metadataReference = semanticModel.Compilation.GetMetadataReference(namespaceSymbol.ContainingAssembly);
if (metadataReference == null)
{
return false;
}
var aliases = metadataReference.Properties.Aliases;
if (aliases.IsEmpty)
{
return false;
}
aliases = metadataReference.Properties.Aliases.Where(a => a != MetadataReferenceProperties.GlobalAlias).ToImmutableArray();
if (!aliases.Any())
{
return false;
}
// Just default to using the first alias we see for this symbol.
externAliasString = aliases.First();
return !checkForExistingExternAlias || ShouldAddExternAlias(externAliasString, contextNode);
}
private static bool ShouldAddExternAlias(string alias, SyntaxNode contextNode)
{
foreach (var externAlias in contextNode.GetEnclosingExternAliasDirectives())
{
// We already have this extern alias in scope. No need to add it.
if (externAlias.Identifier.ValueText == alias)
{
return false;
}
}
return true;
}
private static CompilationUnitSyntax GetCompilationUnitSyntaxNode(SyntaxNode contextNode, CancellationToken cancellationToken = default(CancellationToken))
{
return (CompilationUnitSyntax)contextNode.SyntaxTree.GetRoot(cancellationToken);
}
protected override bool IsViableExtensionMethod(IMethodSymbol method, SyntaxNode expression, SemanticModel semanticModel, ISyntaxFactsService syntaxFacts, CancellationToken cancellationToken)
{
var leftExpression = syntaxFacts.GetExpressionOfMemberAccessExpression(expression);
if (leftExpression == null)
{
if (expression.IsKind(SyntaxKind.CollectionInitializerExpression))
{
leftExpression = expression.GetAncestor<ObjectCreationExpressionSyntax>();
}
else
{
return false;
}
}
var semanticInfo = semanticModel.GetTypeInfo(leftExpression, cancellationToken);
var leftExpressionType = semanticInfo.Type;
return IsViableExtensionMethod(method, leftExpressionType);
}
internal override bool IsAddMethodContext(SyntaxNode node, SemanticModel semanticModel)
{
if (node.Parent.IsKind(SyntaxKind.CollectionInitializerExpression))
{
var objectCreationExpressionSyntax = node.GetAncestor<ObjectCreationExpressionSyntax>();
if (objectCreationExpressionSyntax == null)
{
return false;
}
return true;
}
return false;
}
}
}
| |
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Licensed under the Oculus SDK License Version 3.4.1 (the "License");
you may not use the Oculus SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
using UnityEditor;
using System.IO;
/// <summary>
/// From the selected transform, takes a cubemap screenshot that can be submitted with the application
/// as a screenshot (or additionally used for reflection shaders).
/// </summary>
class OVRScreenshotWizard : ScriptableWizard
{
public enum TexFormat
{
JPEG, // 512kb at 1k x 1k resolution vs
PNG, // 5.3mb
}
public enum SaveMode {
SaveCubemapScreenshot,
SaveUnityCubemap,
SaveBoth,
}
public GameObject renderFrom = null;
public int size = 2048;
public SaveMode saveMode = SaveMode.SaveUnityCubemap;
public string cubeMapFolder = "Assets/Textures/Cubemaps";
public TexFormat textureFormat = TexFormat.PNG;
/// <summary>
/// Validates the user's input
/// </summary>
void OnWizardUpdate()
{
helpString = "Select a game object positioned in the place where\nyou want to render the cubemap screenshot from: ";
isValid = (renderFrom != null);
}
/// <summary>
/// Create the asset path if it is not available.
/// Assuming the newFolderPath is stated with "Assets", which is a requirement.
/// </summary>
static bool CreateAssetPath( string newFolderPath )
{
const int maxFoldersCount = 32;
string currentPath;
string[] pathFolders;
pathFolders = newFolderPath.Split (new char[]{ '/' }, maxFoldersCount);
if (!string.Equals ("Assets", pathFolders [0], System.StringComparison.OrdinalIgnoreCase))
{
Debug.LogError( "Folder path has to be started with \" Assets \" " );
return false;
}
currentPath = "Assets";
for (int i = 1; i < pathFolders.Length; i++)
{
if (!string.IsNullOrEmpty(pathFolders[i]))
{
string newPath = currentPath + "/" + pathFolders[i];
if (!AssetDatabase.IsValidFolder(newPath))
AssetDatabase.CreateFolder(currentPath, pathFolders[i]);
currentPath = newPath;
}
}
Debug.Log( "Created path: " + currentPath );
return true;
}
/// <summary>
/// Renders the cubemap
/// </summary>
void OnWizardCreate()
{
if ( !AssetDatabase.IsValidFolder( cubeMapFolder ) )
{
if (!CreateAssetPath(cubeMapFolder))
{
Debug.LogError( "Created path failed: " + cubeMapFolder );
return;
}
}
bool existingCamera = true;
bool existingCameraStateSave = true;
Camera camera = renderFrom.GetComponent<Camera>();
if (camera == null)
{
camera = renderFrom.AddComponent<Camera>();
camera.farClipPlane = 10000f;
existingCamera = false;
}
else
{
existingCameraStateSave = camera.enabled;
camera.enabled = true;
}
// find the last screenshot saved
if (cubeMapFolder[cubeMapFolder.Length-1] != '/')
{
cubeMapFolder += "/";
}
int idx = 0;
string[] fileNames = Directory.GetFiles(cubeMapFolder);
foreach(string fileName in fileNames)
{
if (!fileName.ToLower().EndsWith(".cubemap"))
{
continue;
}
string temp = fileName.Replace(cubeMapFolder + "vr_screenshot_", string.Empty);
temp = temp.Replace(".cubemap", string.Empty);
int tempIdx = 0;
if (int.TryParse( temp, out tempIdx ))
{
if (tempIdx > idx)
{
idx = tempIdx;
}
}
}
string pathName = string.Format("{0}vr_screenshot_{1}.cubemap", cubeMapFolder, (++idx).ToString("d2"));
Cubemap cubemap = new Cubemap(size, TextureFormat.RGB24, false);
// render into cubemap
if ((camera != null) && (cubemap != null))
{
// set up cubemap defaults
OVRCubemapCapture.RenderIntoCubemap(camera, cubemap);
if (existingCamera)
{
camera.enabled = existingCameraStateSave;
}
else
{
DestroyImmediate(camera);
}
// generate a regular texture as well?
if ( ( saveMode == SaveMode.SaveCubemapScreenshot ) || ( saveMode == SaveMode.SaveBoth ) )
{
GenerateTexture(cubemap, pathName);
}
if ( ( saveMode == SaveMode.SaveUnityCubemap ) || ( saveMode == SaveMode.SaveBoth ) )
{
Debug.Log( "Saving: " + pathName );
// by default the unity cubemap isn't saved
AssetDatabase.CreateAsset( cubemap, pathName );
// reimport as necessary
AssetDatabase.SaveAssets();
// select it in the project tree so developers can find it
EditorGUIUtility.PingObject( cubemap );
Selection.activeObject = cubemap;
}
AssetDatabase.Refresh();
}
}
/// <summary>
/// Generates a NPOT 6x1 cubemap in the following format PX NX PY NY PZ NZ
/// </summary>
void GenerateTexture(Cubemap cubemap, string pathName)
{
// Encode the texture and save it to disk
pathName = pathName.Replace(".cubemap", (textureFormat == TexFormat.PNG) ? ".png" : ".jpg" ).ToLower();
pathName = pathName.Replace( cubeMapFolder.ToLower(), "" );
string format = textureFormat.ToString();
string fullPath = EditorUtility.SaveFilePanel( string.Format( "Save Cubemap Screenshot as {0}", format ), "", pathName, format.ToLower() );
if ( !string.IsNullOrEmpty( fullPath ) )
{
Debug.Log( "Saving: " + fullPath );
OVRCubemapCapture.SaveCubemapCapture(cubemap, fullPath);
}
}
/// <summary>
/// Unity Editor menu option to take a screenshot
/// </summary>
[MenuItem("Oculus/Tools/OVR Screenshot Wizard", false, 100000)]
static void TakeOVRScreenshot()
{
OVRScreenshotWizard wizard = ScriptableWizard.DisplayWizard<OVRScreenshotWizard>("OVR Screenshot Wizard", "Render Cubemap");
if (wizard != null)
{
if (Selection.activeGameObject != null)
wizard.renderFrom = Selection.activeGameObject;
else
wizard.renderFrom = Camera.main.gameObject;
wizard.isValid = (wizard.renderFrom != null);
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.Xna.Framework;
using Microsoft.Xna.Framework.Audio;
using Microsoft.Xna.Framework.Content;
using Microsoft.Xna.Framework.GamerServices;
using Microsoft.Xna.Framework.Graphics;
using Microsoft.Xna.Framework.Input;
using Microsoft.Xna.Framework.Media;
using Box2D.XNA;
//using C3.XNA;
using Configuration;
using System.Text;
using System.Diagnostics;
using Squircle.Physics;
namespace Squircle
{
/// <summary>
/// Declare some debug infos about a class or a specific property of an object.
/// </summary>
[AttributeUsage(AttributeTargets.Class | AttributeTargets.Property, Inherited = true, AllowMultiple = false)]
public class DebugData : System.Attribute
{
public bool Ignore { get; set; }
public DebugData()
{
Ignore = false;
}
}
public enum GameStateType
{
Invalid,
Loading,
Menu,
Running,
}
public class GameState
{
public GameStateType PreviousValue { get; private set; }
public GameStateType Value { get; set; }
public bool WasLoading { get { return PreviousValue == GameStateType.Loading; } }
public bool WasInMenu { get { return PreviousValue == GameStateType.Menu; } }
public bool WasRunning { get { return PreviousValue == GameStateType.Running; } }
public bool IsLoading { get { return Value == GameStateType.Loading; } }
public bool IsInMenu { get { return Value == GameStateType.Menu; } }
public bool IsRunning { get { return Value == GameStateType.Running; } }
public GameState()
{
PreviousValue = GameStateType.Invalid;
Value = GameStateType.Menu;
}
public void SetLoading() { PreviousValue = Value; Value = GameStateType.Loading; }
public void SetInMenu() { PreviousValue = Value; Value = GameStateType.Menu; }
public void SetRunning() { PreviousValue = Value; Value = GameStateType.Running; }
public void ToggleRunningAndInMenu()
{
Debug.Assert(IsRunning || IsInMenu,
"Cannot toggle between Running and InMenu if we are in another state.");
if (IsRunning)
{
SetInMenu();
}
else
{
SetRunning();
}
}
public override string ToString()
{
return Value.ToString();
}
}
/// <summary>
/// This is the main type for your game
/// </summary>
public class Game : Microsoft.Xna.Framework.Game
{
GraphicsDeviceManager graphics;
SpriteBatch spriteBatch;
public Level level;
public ConfigFile gameConfig { get; set; }
public bool drawPhysics { get; set; } ///< Draws the physical world.
public DebugLevel drawDebugData { get; set; } ///< Draws textual game object data above them.
public DebugLevel drawVisualHelpers { get; set; } ///< Draws some visual helpers for game objects.
public SpriteFont debugFont { get; set; }
public EventSystem Events { get; set; }
public InputHandler InputHandler { get; set; }
public GameState GameState { get; set; }
public StringBuilder DebugInfo { get; set; }
public bool LoadingScreenDrawn { get; set; }
public Event ToggleRunningAndInMenuEvent { get; set; }
public AudioManager Audio { get; set; }
public Vector2 ViewportDimensions { get; set; }
public Vector2 DebugCameraPosition;
public bool UseDebugCamera = true;
public Game()
{
graphics = new GraphicsDeviceManager(this);
Content.RootDirectory = "Content";
drawPhysics = false;
drawVisualHelpers = new DebugLevel();
drawDebugData = new DebugLevel();
GameState = new GameState();
GameState.SetLoading();
ViewportDimensions = new Vector2(800, 480);
DebugCameraPosition = -0.5f * ViewportDimensions;
level = new Level(this);
level.Name = "level_01";
level.Menu.InitialWindowName = "mainWindow";
gameConfig = ConfigFile.FromFile("Content/level/game.cfg");
gameConfig.IfSectionExists("Audio", section =>
{
Audio = new AudioManager(this);
Audio.Initialize(section);
});
}
/// <summary>
/// Allows the game to perform any initialization it needs to before starting to run.
/// This is where it can query for any required services and load any non-graphic
/// related content. Calling base.Initialize will enumerate through any components
/// and initialize them as well.
/// </summary>
protected override void Initialize()
{
ViewportDimensions = new Vector2(GraphicsDevice.Viewport.Width,
GraphicsDevice.Viewport.Height);
DebugInfo = new StringBuilder();
InputHandler = new InputHandler();
Events = new EventSystem();
Events["endLevel"].addListener(onEndLevel);
Events["exit"].addListener(onExit);
Events["ui.show"].addListener(OnUIShow);
Events["ui.close"].addListener(OnUIClose);
level.Initialize(gameConfig["Levels"][level.Name]);
base.Initialize();
GameState.SetInMenu();
}
/// <summary>
/// LoadContent will be called once per game and is the place to load
/// all of your content.
/// </summary>
protected override void LoadContent()
{
// Create a new SpriteBatch, which can be used to draw textures.
spriteBatch = new SpriteBatch(GraphicsDevice);
debugFont = Content.Load<SpriteFont>(gameConfig.GlobalSection["debugFont"]);
if (Audio != null)
Audio.LoadContent(Content);
level.LoadContent(Content);
}
/// <summary>
/// UnloadContent will be called once per game and is the place to unload
/// all content.
/// </summary>
protected override void UnloadContent()
{
if (Audio != null)
{
Audio.CleanUp();
}
}
/// <summary>
/// Allows the game to run logic such as updating the world,
/// checking for collisions, gathering input, and playing audio.
/// </summary>
/// <param name="gameTime">Provides a snapshot of timing values.</param>
///
protected override void Update(GameTime gameTime)
{
var dt = (float)gameTime.ElapsedGameTime.TotalSeconds;
base.Update(gameTime);
InputHandler.Update(gameTime);
if (Audio != null)
Audio.Update(gameTime);
if (GameState.IsLoading)
{
if (LoadingScreenDrawn)
{
EndLoadLevel();
}
return;
}
else if (GameState.IsInMenu)
{
level.Menu.Update(gameTime);
return;
}
if (InputHandler.WasTriggered(Keys.Escape) || InputHandler.WasTriggered(Buttons.Start))
{
// Show the main menu.
Events["ui.show"].trigger("mainWindow");
}
if (InputHandler.WasTriggered(Keys.R) || InputHandler.WasTriggered(Buttons.Back))
{
Events["endLevel"].trigger(level.Name);
return;
}
if (InputHandler.WasTriggered(Keys.F9))
{
drawPhysics = !drawPhysics;
}
if (InputHandler.WasTriggered(Keys.F10))
{
drawDebugData.CycleForward();
}
if (InputHandler.WasTriggered(Keys.F11))
{
drawVisualHelpers.CycleForward();
}
if (InputHandler.WasTriggered(Keys.F8))
{
UseDebugCamera = !UseDebugCamera;
}
level.Update(gameTime);
if(UseDebugCamera)
{
var speed = 100.0f;
if(InputHandler.IsDown(Keys.W))
{
DebugCameraPosition.Y -= dt * speed;
}
if(InputHandler.IsDown(Keys.A))
{
DebugCameraPosition.X -= dt * speed;
}
if(InputHandler.IsDown(Keys.S))
{
DebugCameraPosition.Y += dt * speed;
}
if(InputHandler.IsDown(Keys.D))
{
DebugCameraPosition.X += dt * speed;
}
}
if (InputHandler.IsDown(Keys.Add))
{
level.camera.Scale += 0.01f;
}
if (InputHandler.IsDown(Keys.Subtract))
{
level.camera.Scale -= 0.01f;
}
}
/// <summary>
/// This is called when the game should draw itself.
/// </summary>
/// <param name="gameTime">Provides a snapshot of timing values.</param>
protected override void Draw(GameTime gameTime)
{
GraphicsDevice.Clear(Color.Black);
if (GameState.IsLoading)
{
spriteBatch.Begin();
var toDraw = "Loading...";
var measurements = debugFont.MeasureString(toDraw);
spriteBatch.DrawString(debugFont,
toDraw,
new Vector2(GraphicsDevice.Viewport.Width / 2, GraphicsDevice.Viewport.Height / 2) - measurements / 2,
Color.White);
spriteBatch.End();
LoadingScreenDrawn = true;
return;
}
Matrix transform;
if (UseDebugCamera)
{
transform = Matrix.CreateTranslation(-DebugCameraPosition.X, -DebugCameraPosition.Y, 0);
}
else
{
transform = level.camera.Transform;
}
//transform.Translation = new Vector3(ViewportDimensions / 2.0f, 0.0f);
spriteBatch.Begin(
SpriteSortMode.Deferred,
BlendState.NonPremultiplied,
null,
null,
null,
null,
transform);
level.Draw(spriteBatch, gameTime);
base.Draw(gameTime);
if (drawPhysics)
{
DrawOnScreen("Drawing physical world");
}
if (!drawVisualHelpers.IsNone)
{
DrawOnScreen("Drawing visual helpers");
if (drawVisualHelpers.IsVerbose)
{
DrawBoundingBoxes(gameTime);
}
}
if (!drawDebugData.IsNone)
{
DrawDebugData(gameTime);
DrawOnScreen("Drawing debug data");
DrawOnScreen(string.Format("Game state: {0}", GameState));
if (drawDebugData.IsVerbose)
{
// TODO draw more verbose data.
}
}
spriteBatch.End();
// Draw user interface related stuff
spriteBatch.Begin();
level.DrawUserInterface(spriteBatch);
spriteBatch.DrawString(debugFont, DebugInfo, new Vector2(20, 20), Color.White);
DebugInfo.Clear();
spriteBatch.End();
}
private void DrawBoundingBoxes(GameTime gameTime)
{
var drawSize = new Vector2(4, 4);
foreach (var go in level.GameObjects)
{
spriteBatch.FillRectangle(go.Pos - drawSize / 2, drawSize, Color.Red);
spriteBatch.DrawRectangle(scBoundingUtils.toXNARectangle(go.Body.calculateBoundingBox()), Color.Red);
}
}
private void DrawDebugData(GameTime gameTime)
{
foreach (var go in level.GameObjects)
{
var debugMessage = new StringBuilder();
debugMessage.AppendFormat("[{0}]", go.Name);
var type = go.GetType();
var goDebugData = (DebugData)Attribute.GetCustomAttribute(type, typeof(DebugData), true);
if (goDebugData == null || goDebugData.Ignore)
{ continue; }
var properties = type.GetProperties();
foreach (var prop in properties)
{
var debugData = (DebugData)Attribute.GetCustomAttribute(prop, typeof(DebugData), true);
if (debugData == null)
{
if (!drawDebugData.IsVerbose)
continue; // No debug data and we are not verbose today.
}
else
{
if (debugData.Ignore)
continue; // Debug data says, we should ignore it.
}
var key = prop.Name;
var value = prop.GetValue(go, null);
debugMessage.AppendFormat("\n{0}: {1}", key, value);
}
var dimensions = debugFont.MeasureString(debugMessage);
var boundingBox = go.Body.calculateBoundingBox();
var position = go.Pos - boundingBox.halfExtents;
position.Y -= dimensions.Y;
spriteBatch.DrawString(debugFont, debugMessage, position, Color.White);
}
}
public void DrawOnScreen(string message, Vector2? position = null)
{
if (position == null)
{
DebugInfo.AppendLine(message);
return;
}
var pos = position.Value;
var upperLeft = Vector2.Zero;
if (level.camera != null)
{
upperLeft = level.camera.Position - new Vector2(GraphicsDevice.Viewport.Width / 2, GraphicsDevice.Viewport.Height / 2);
}
spriteBatch.DrawString(debugFont, message, upperLeft + pos, Color.White);
}
private void StartLoadingLevel(String name)
{
LoadingScreenDrawn = false;
GameState.SetLoading();
level = new Level(this);
level.Name = name;
}
private void EndLoadLevel()
{
// Keep the reference to the current event system because the call to this.Ininitialize() will create a new one.
Initialize();
GameState.SetRunning();
LoadingScreenDrawn = false;
Events["levelInitialized"].trigger(level.Name);
}
private void onEndLevel(String data)
{
StartLoadingLevel(data);
}
private void onExit(String data)
{
Exit();
}
private void OnUIShow(String data)
{
GameState.SetInMenu();
}
private void OnUIClose(String data)
{
GameState.SetRunning();
}
}
}
| |
//
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Warning: This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if the
// code is regenerated.
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using System.Xml.Linq;
using Hyak.Common;
using Microsoft.Azure;
using Microsoft.WindowsAzure.Management.WebSites;
using Microsoft.WindowsAzure.Management.WebSites.Models;
namespace Microsoft.WindowsAzure.Management.WebSites
{
/// <summary>
/// Operations for managing web hosting plans beneath your subscription.
/// </summary>
internal partial class WebHostingPlanOperations : IServiceOperations<WebSiteManagementClient>, IWebHostingPlanOperations
{
/// <summary>
/// Initializes a new instance of the WebHostingPlanOperations class.
/// </summary>
/// <param name='client'>
/// Reference to the service client.
/// </param>
internal WebHostingPlanOperations(WebSiteManagementClient client)
{
this._client = client;
}
private WebSiteManagementClient _client;
/// <summary>
/// Gets a reference to the
/// Microsoft.WindowsAzure.Management.WebSites.WebSiteManagementClient.
/// </summary>
public WebSiteManagementClient Client
{
get { return this._client; }
}
/// <summary>
/// Creates a new Web Hosting Plan. (see
/// http://azure.microsoft.com/en-us/documentation/articles/azure-web-sites-web-hosting-plans-in-depth-overview/
/// for more information)
/// </summary>
/// <param name='webSpaceName'>
/// Required. The name of the web space.
/// </param>
/// <param name='parameters'>
/// Required. Web Hosting Plan Parameters.
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// The Create Web Web Hosting Plan operation response.
/// </returns>
public async Task<WebHostingPlanCreateResponse> CreateAsync(string webSpaceName, WebHostingPlanCreateParameters parameters, CancellationToken cancellationToken)
{
// Validate
if (webSpaceName == null)
{
throw new ArgumentNullException("webSpaceName");
}
if (parameters == null)
{
throw new ArgumentNullException("parameters");
}
if (parameters.Name == null)
{
throw new ArgumentNullException("parameters.Name");
}
// Tracing
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("webSpaceName", webSpaceName);
tracingParameters.Add("parameters", parameters);
TracingAdapter.Enter(invocationId, this, "CreateAsync", tracingParameters);
}
// Construct URL
string url = "";
url = url + "/";
if (this.Client.Credentials.SubscriptionId != null)
{
url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId);
}
url = url + "/services/WebSpaces/";
url = url + Uri.EscapeDataString(webSpaceName);
url = url + "/ServerFarms";
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Post;
httpRequest.RequestUri = new Uri(url);
// Set Headers
httpRequest.Headers.Add("x-ms-version", "2014-04-01");
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Serialize Request
string requestContent = null;
XDocument requestDoc = new XDocument();
XElement serverFarmElement = new XElement(XName.Get("ServerFarm", "http://schemas.microsoft.com/windowsazure"));
requestDoc.Add(serverFarmElement);
XElement nameElement = new XElement(XName.Get("Name", "http://schemas.microsoft.com/windowsazure"));
nameElement.Value = parameters.Name;
serverFarmElement.Add(nameElement);
if (parameters.NumberOfWorkers != null)
{
XElement numberOfWorkersElement = new XElement(XName.Get("NumberOfWorkers", "http://schemas.microsoft.com/windowsazure"));
numberOfWorkersElement.Value = parameters.NumberOfWorkers.ToString();
serverFarmElement.Add(numberOfWorkersElement);
}
XElement sKUElement = new XElement(XName.Get("SKU", "http://schemas.microsoft.com/windowsazure"));
sKUElement.Value = parameters.SKU.ToString();
serverFarmElement.Add(sKUElement);
if (parameters.WorkerSize != null)
{
XElement workerSizeElement = new XElement(XName.Get("WorkerSize", "http://schemas.microsoft.com/windowsazure"));
workerSizeElement.Value = parameters.WorkerSize.ToString();
serverFarmElement.Add(workerSizeElement);
}
if (parameters.AdminSiteName != null)
{
XElement adminSiteNameElement = new XElement(XName.Get("AdminSiteName", "http://schemas.microsoft.com/windowsazure"));
adminSiteNameElement.Value = parameters.AdminSiteName;
serverFarmElement.Add(adminSiteNameElement);
}
requestContent = requestDoc.ToString();
httpRequest.Content = new StringContent(requestContent, Encoding.UTF8);
httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/xml");
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
TracingAdapter.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
TracingAdapter.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, requestContent, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
TracingAdapter.Error(invocationId, ex);
}
throw ex;
}
// Create Result
WebHostingPlanCreateResponse result = null;
// Deserialize Response
if (statusCode == HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
result = new WebHostingPlanCreateResponse();
XDocument responseDoc = XDocument.Parse(responseContent);
XElement serverFarmElement2 = responseDoc.Element(XName.Get("ServerFarm", "http://schemas.microsoft.com/windowsazure"));
if (serverFarmElement2 != null)
{
WebHostingPlan webHostingPlanInstance = new WebHostingPlan();
result.WebHostingPlan = webHostingPlanInstance;
XElement nameElement2 = serverFarmElement2.Element(XName.Get("Name", "http://schemas.microsoft.com/windowsazure"));
if (nameElement2 != null)
{
string nameInstance = nameElement2.Value;
webHostingPlanInstance.Name = nameInstance;
}
XElement numberOfWorkersElement2 = serverFarmElement2.Element(XName.Get("NumberOfWorkers", "http://schemas.microsoft.com/windowsazure"));
if (numberOfWorkersElement2 != null && !string.IsNullOrEmpty(numberOfWorkersElement2.Value))
{
int numberOfWorkersInstance = int.Parse(numberOfWorkersElement2.Value, CultureInfo.InvariantCulture);
webHostingPlanInstance.NumberOfWorkers = numberOfWorkersInstance;
}
XElement sKUElement2 = serverFarmElement2.Element(XName.Get("SKU", "http://schemas.microsoft.com/windowsazure"));
if (sKUElement2 != null)
{
SkuOptions sKUInstance = ((SkuOptions)Enum.Parse(typeof(SkuOptions), sKUElement2.Value, true));
webHostingPlanInstance.SKU = sKUInstance;
}
XElement workerSizeElement2 = serverFarmElement2.Element(XName.Get("WorkerSize", "http://schemas.microsoft.com/windowsazure"));
if (workerSizeElement2 != null && !string.IsNullOrEmpty(workerSizeElement2.Value))
{
WorkerSizeOptions workerSizeInstance = ((WorkerSizeOptions)Enum.Parse(typeof(WorkerSizeOptions), workerSizeElement2.Value, true));
webHostingPlanInstance.WorkerSize = workerSizeInstance;
}
XElement adminSiteNameElement2 = serverFarmElement2.Element(XName.Get("AdminSiteName", "http://schemas.microsoft.com/windowsazure"));
if (adminSiteNameElement2 != null)
{
string adminSiteNameInstance = adminSiteNameElement2.Value;
webHostingPlanInstance.AdminSiteName = adminSiteNameInstance;
}
}
}
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
/// <summary>
/// Deletes a Web Hosting Plan (see
/// http://azure.microsoft.com/en-us/documentation/articles/azure-web-sites-web-hosting-plans-in-depth-overview/
/// for more information)
/// </summary>
/// <param name='webSpaceName'>
/// Required. The name of the web space.
/// </param>
/// <param name='webHostingPlanName'>
/// Required. The name of the web hosting plan.
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public async Task<AzureOperationResponse> DeleteAsync(string webSpaceName, string webHostingPlanName, CancellationToken cancellationToken)
{
// Validate
if (webSpaceName == null)
{
throw new ArgumentNullException("webSpaceName");
}
if (webHostingPlanName == null)
{
throw new ArgumentNullException("webHostingPlanName");
}
// Tracing
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("webSpaceName", webSpaceName);
tracingParameters.Add("webHostingPlanName", webHostingPlanName);
TracingAdapter.Enter(invocationId, this, "DeleteAsync", tracingParameters);
}
// Construct URL
string url = "";
url = url + "/";
if (this.Client.Credentials.SubscriptionId != null)
{
url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId);
}
url = url + "/services/WebSpaces/";
url = url + Uri.EscapeDataString(webSpaceName);
url = url + "/ServerFarms/";
url = url + Uri.EscapeDataString(webHostingPlanName);
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Delete;
httpRequest.RequestUri = new Uri(url);
// Set Headers
httpRequest.Headers.Add("x-ms-version", "2014-04-01");
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
TracingAdapter.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
TracingAdapter.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
TracingAdapter.Error(invocationId, ex);
}
throw ex;
}
// Create Result
AzureOperationResponse result = null;
// Deserialize Response
result = new AzureOperationResponse();
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
/// <summary>
/// Gets details of an existing Web Hosting Plan (see
/// http://azure.microsoft.com/en-us/documentation/articles/azure-web-sites-web-hosting-plans-in-depth-overview/
/// for more information)
/// </summary>
/// <param name='webSpaceName'>
/// Required. The name of the web space.
/// </param>
/// <param name='webHostingPlanName'>
/// Required. The name of the web hosting plan.
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// The Get Web Hosting Plan operation response.
/// </returns>
public async Task<WebHostingPlanGetResponse> GetAsync(string webSpaceName, string webHostingPlanName, CancellationToken cancellationToken)
{
// Validate
if (webSpaceName == null)
{
throw new ArgumentNullException("webSpaceName");
}
if (webHostingPlanName == null)
{
throw new ArgumentNullException("webHostingPlanName");
}
// Tracing
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("webSpaceName", webSpaceName);
tracingParameters.Add("webHostingPlanName", webHostingPlanName);
TracingAdapter.Enter(invocationId, this, "GetAsync", tracingParameters);
}
// Construct URL
string url = "";
url = url + "/";
if (this.Client.Credentials.SubscriptionId != null)
{
url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId);
}
url = url + "/services/WebSpaces/";
url = url + Uri.EscapeDataString(webSpaceName);
url = url + "/serverFarms/";
url = url + Uri.EscapeDataString(webHostingPlanName);
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Get;
httpRequest.RequestUri = new Uri(url);
// Set Headers
httpRequest.Headers.Add("x-ms-version", "2014-04-01");
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
TracingAdapter.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
TracingAdapter.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
TracingAdapter.Error(invocationId, ex);
}
throw ex;
}
// Create Result
WebHostingPlanGetResponse result = null;
// Deserialize Response
if (statusCode == HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
result = new WebHostingPlanGetResponse();
XDocument responseDoc = XDocument.Parse(responseContent);
XElement serverFarmElement = responseDoc.Element(XName.Get("ServerFarm", "http://schemas.microsoft.com/windowsazure"));
if (serverFarmElement != null)
{
WebHostingPlan webHostingPlanInstance = new WebHostingPlan();
result.WebHostingPlan = webHostingPlanInstance;
XElement nameElement = serverFarmElement.Element(XName.Get("Name", "http://schemas.microsoft.com/windowsazure"));
if (nameElement != null)
{
string nameInstance = nameElement.Value;
webHostingPlanInstance.Name = nameInstance;
}
XElement numberOfWorkersElement = serverFarmElement.Element(XName.Get("NumberOfWorkers", "http://schemas.microsoft.com/windowsazure"));
if (numberOfWorkersElement != null && !string.IsNullOrEmpty(numberOfWorkersElement.Value))
{
int numberOfWorkersInstance = int.Parse(numberOfWorkersElement.Value, CultureInfo.InvariantCulture);
webHostingPlanInstance.NumberOfWorkers = numberOfWorkersInstance;
}
XElement sKUElement = serverFarmElement.Element(XName.Get("SKU", "http://schemas.microsoft.com/windowsazure"));
if (sKUElement != null)
{
SkuOptions sKUInstance = ((SkuOptions)Enum.Parse(typeof(SkuOptions), sKUElement.Value, true));
webHostingPlanInstance.SKU = sKUInstance;
}
XElement workerSizeElement = serverFarmElement.Element(XName.Get("WorkerSize", "http://schemas.microsoft.com/windowsazure"));
if (workerSizeElement != null && !string.IsNullOrEmpty(workerSizeElement.Value))
{
WorkerSizeOptions workerSizeInstance = ((WorkerSizeOptions)Enum.Parse(typeof(WorkerSizeOptions), workerSizeElement.Value, true));
webHostingPlanInstance.WorkerSize = workerSizeInstance;
}
XElement adminSiteNameElement = serverFarmElement.Element(XName.Get("AdminSiteName", "http://schemas.microsoft.com/windowsazure"));
if (adminSiteNameElement != null)
{
string adminSiteNameInstance = adminSiteNameElement.Value;
webHostingPlanInstance.AdminSiteName = adminSiteNameInstance;
}
}
}
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
/// <summary>
/// You can retrieve historical usage metrics for a site by issuing an
/// HTTP GET request. (see
/// http://msdn.microsoft.com/en-us/library/windowsazure/dn166964.aspx
/// for more information)
/// </summary>
/// <param name='webSpaceName'>
/// Required. The name of the web space.
/// </param>
/// <param name='webHostingPlanName'>
/// Required. The name of the web hosting plan.
/// </param>
/// <param name='parameters'>
/// Required. Parameters supplied to the Get Historical Usage Metrics
/// Web hosting plan operation.
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// The Get Historical Usage Metrics Web hosting plan operation
/// response.
/// </returns>
public async Task<WebHostingPlanGetHistoricalUsageMetricsResponse> GetHistoricalUsageMetricsAsync(string webSpaceName, string webHostingPlanName, WebHostingPlanGetHistoricalUsageMetricsParameters parameters, CancellationToken cancellationToken)
{
// Validate
if (webSpaceName == null)
{
throw new ArgumentNullException("webSpaceName");
}
if (webHostingPlanName == null)
{
throw new ArgumentNullException("webHostingPlanName");
}
if (parameters == null)
{
throw new ArgumentNullException("parameters");
}
// Tracing
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("webSpaceName", webSpaceName);
tracingParameters.Add("webHostingPlanName", webHostingPlanName);
tracingParameters.Add("parameters", parameters);
TracingAdapter.Enter(invocationId, this, "GetHistoricalUsageMetricsAsync", tracingParameters);
}
// Construct URL
string url = "";
url = url + "/";
if (this.Client.Credentials.SubscriptionId != null)
{
url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId);
}
url = url + "/services/WebSpaces/";
url = url + Uri.EscapeDataString(webSpaceName);
url = url + "/serverFarms/";
url = url + Uri.EscapeDataString(webHostingPlanName);
url = url + "/metrics";
List<string> queryParameters = new List<string>();
if (parameters.MetricNames != null && parameters.MetricNames.Count > 0)
{
queryParameters.Add("names=" + Uri.EscapeDataString(string.Join(",", parameters.MetricNames)));
}
if (parameters.StartTime != null)
{
queryParameters.Add("StartTime=" + Uri.EscapeDataString(string.Format(CultureInfo.InvariantCulture, "{0:O}", parameters.StartTime.Value.ToUniversalTime())));
}
if (parameters.EndTime != null)
{
queryParameters.Add("EndTime=" + Uri.EscapeDataString(string.Format(CultureInfo.InvariantCulture, "{0:O}", parameters.EndTime.Value.ToUniversalTime())));
}
if (parameters.TimeGrain != null)
{
queryParameters.Add("timeGrain=" + Uri.EscapeDataString(parameters.TimeGrain));
}
queryParameters.Add("details=" + Uri.EscapeDataString(parameters.IncludeInstanceBreakdown.ToString().ToLower()));
if (queryParameters.Count > 0)
{
url = url + "?" + string.Join("&", queryParameters);
}
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Get;
httpRequest.RequestUri = new Uri(url);
// Set Headers
httpRequest.Headers.Add("x-ms-version", "2014-04-01");
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
TracingAdapter.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
TracingAdapter.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
TracingAdapter.Error(invocationId, ex);
}
throw ex;
}
// Create Result
WebHostingPlanGetHistoricalUsageMetricsResponse result = null;
// Deserialize Response
if (statusCode == HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
result = new WebHostingPlanGetHistoricalUsageMetricsResponse();
XDocument responseDoc = XDocument.Parse(responseContent);
XElement metricResponsesElement = responseDoc.Element(XName.Get("MetricResponses", "http://schemas.microsoft.com/windowsazure"));
if (metricResponsesElement != null)
{
if (metricResponsesElement != null)
{
foreach (XElement usageMetricsElement in metricResponsesElement.Elements(XName.Get("MetricResponse", "http://schemas.microsoft.com/windowsazure")))
{
HistoricalUsageMetric metricResponseInstance = new HistoricalUsageMetric();
result.UsageMetrics.Add(metricResponseInstance);
XElement codeElement = usageMetricsElement.Element(XName.Get("Code", "http://schemas.microsoft.com/windowsazure"));
if (codeElement != null)
{
string codeInstance = codeElement.Value;
metricResponseInstance.Code = codeInstance;
}
XElement dataElement = usageMetricsElement.Element(XName.Get("Data", "http://schemas.microsoft.com/windowsazure"));
if (dataElement != null)
{
HistoricalUsageMetricData dataInstance = new HistoricalUsageMetricData();
metricResponseInstance.Data = dataInstance;
XElement displayNameElement = dataElement.Element(XName.Get("DisplayName", "http://schemas.microsoft.com/windowsazure"));
if (displayNameElement != null)
{
string displayNameInstance = displayNameElement.Value;
dataInstance.DisplayName = displayNameInstance;
}
XElement endTimeElement = dataElement.Element(XName.Get("EndTime", "http://schemas.microsoft.com/windowsazure"));
if (endTimeElement != null)
{
DateTime endTimeInstance = DateTime.Parse(endTimeElement.Value, CultureInfo.InvariantCulture, DateTimeStyles.AdjustToUniversal).ToLocalTime();
dataInstance.EndTime = endTimeInstance;
}
XElement nameElement = dataElement.Element(XName.Get("Name", "http://schemas.microsoft.com/windowsazure"));
if (nameElement != null)
{
string nameInstance = nameElement.Value;
dataInstance.Name = nameInstance;
}
XElement primaryAggregationTypeElement = dataElement.Element(XName.Get("PrimaryAggregationType", "http://schemas.microsoft.com/windowsazure"));
if (primaryAggregationTypeElement != null)
{
string primaryAggregationTypeInstance = primaryAggregationTypeElement.Value;
dataInstance.PrimaryAggregationType = primaryAggregationTypeInstance;
}
XElement startTimeElement = dataElement.Element(XName.Get("StartTime", "http://schemas.microsoft.com/windowsazure"));
if (startTimeElement != null)
{
DateTime startTimeInstance = DateTime.Parse(startTimeElement.Value, CultureInfo.InvariantCulture, DateTimeStyles.AdjustToUniversal).ToLocalTime();
dataInstance.StartTime = startTimeInstance;
}
XElement timeGrainElement = dataElement.Element(XName.Get("TimeGrain", "http://schemas.microsoft.com/windowsazure"));
if (timeGrainElement != null)
{
string timeGrainInstance = timeGrainElement.Value;
dataInstance.TimeGrain = timeGrainInstance;
}
XElement unitElement = dataElement.Element(XName.Get("Unit", "http://schemas.microsoft.com/windowsazure"));
if (unitElement != null)
{
string unitInstance = unitElement.Value;
dataInstance.Unit = unitInstance;
}
XElement valuesSequenceElement = dataElement.Element(XName.Get("Values", "http://schemas.microsoft.com/windowsazure"));
if (valuesSequenceElement != null)
{
foreach (XElement valuesElement in valuesSequenceElement.Elements(XName.Get("MetricSample", "http://schemas.microsoft.com/windowsazure")))
{
HistoricalUsageMetricSample metricSampleInstance = new HistoricalUsageMetricSample();
dataInstance.Values.Add(metricSampleInstance);
XElement countElement = valuesElement.Element(XName.Get("Count", "http://schemas.microsoft.com/windowsazure"));
if (countElement != null)
{
int countInstance = int.Parse(countElement.Value, CultureInfo.InvariantCulture);
metricSampleInstance.Count = countInstance;
}
XElement maximumElement = valuesElement.Element(XName.Get("Maximum", "http://schemas.microsoft.com/windowsazure"));
if (maximumElement != null)
{
bool isNil = false;
XAttribute nilAttribute = maximumElement.Attribute(XName.Get("nil", "http://www.w3.org/2001/XMLSchema-instance"));
if (nilAttribute != null)
{
isNil = nilAttribute.Value == "true";
}
if (isNil == false)
{
string maximumInstance = maximumElement.Value;
metricSampleInstance.Maximum = maximumInstance;
}
}
XElement minimumElement = valuesElement.Element(XName.Get("Minimum", "http://schemas.microsoft.com/windowsazure"));
if (minimumElement != null)
{
bool isNil2 = false;
XAttribute nilAttribute2 = minimumElement.Attribute(XName.Get("nil", "http://www.w3.org/2001/XMLSchema-instance"));
if (nilAttribute2 != null)
{
isNil2 = nilAttribute2.Value == "true";
}
if (isNil2 == false)
{
string minimumInstance = minimumElement.Value;
metricSampleInstance.Minimum = minimumInstance;
}
}
XElement timeCreatedElement = valuesElement.Element(XName.Get("TimeCreated", "http://schemas.microsoft.com/windowsazure"));
if (timeCreatedElement != null)
{
DateTime timeCreatedInstance = DateTime.Parse(timeCreatedElement.Value, CultureInfo.InvariantCulture, DateTimeStyles.AdjustToUniversal).ToLocalTime();
metricSampleInstance.TimeCreated = timeCreatedInstance;
}
XElement totalElement = valuesElement.Element(XName.Get("Total", "http://schemas.microsoft.com/windowsazure"));
if (totalElement != null)
{
string totalInstance = totalElement.Value;
metricSampleInstance.Total = totalInstance;
}
XElement instanceNameElement = valuesElement.Element(XName.Get("InstanceName", "http://schemas.microsoft.com/windowsazure"));
if (instanceNameElement != null)
{
string instanceNameInstance = instanceNameElement.Value;
metricSampleInstance.InstanceName = instanceNameInstance;
}
}
}
}
XElement messageElement = usageMetricsElement.Element(XName.Get("Message", "http://schemas.microsoft.com/windowsazure"));
if (messageElement != null)
{
string messageInstance = messageElement.Value;
metricResponseInstance.Message = messageInstance;
}
}
}
}
}
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
/// <summary>
/// You can list the web spaces under the current subscription by
/// issuing a GET request. (see
/// http://msdn.microsoft.com/en-us/library/windowsazure/dn166961.aspx
/// for more information)
/// </summary>
/// <param name='webSpaceName'>
/// Required. The name of the web space.
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// The List Web Hosting Plans operation response.
/// </returns>
public async Task<WebHostingPlanListResponse> ListAsync(string webSpaceName, CancellationToken cancellationToken)
{
// Validate
if (webSpaceName == null)
{
throw new ArgumentNullException("webSpaceName");
}
// Tracing
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("webSpaceName", webSpaceName);
TracingAdapter.Enter(invocationId, this, "ListAsync", tracingParameters);
}
// Construct URL
string url = "";
url = url + "/";
if (this.Client.Credentials.SubscriptionId != null)
{
url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId);
}
url = url + "/services/WebSpaces/";
url = url + Uri.EscapeDataString(webSpaceName);
url = url + "/serverFarms";
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Get;
httpRequest.RequestUri = new Uri(url);
// Set Headers
httpRequest.Headers.Add("x-ms-version", "2014-04-01");
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
TracingAdapter.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
TracingAdapter.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
TracingAdapter.Error(invocationId, ex);
}
throw ex;
}
// Create Result
WebHostingPlanListResponse result = null;
// Deserialize Response
if (statusCode == HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
result = new WebHostingPlanListResponse();
XDocument responseDoc = XDocument.Parse(responseContent);
XElement serverFarmsSequenceElement = responseDoc.Element(XName.Get("ServerFarms", "http://schemas.microsoft.com/windowsazure"));
if (serverFarmsSequenceElement != null)
{
foreach (XElement serverFarmsElement in serverFarmsSequenceElement.Elements(XName.Get("ServerFarm", "http://schemas.microsoft.com/windowsazure")))
{
WebHostingPlan serverFarmInstance = new WebHostingPlan();
result.WebHostingPlans.Add(serverFarmInstance);
XElement nameElement = serverFarmsElement.Element(XName.Get("Name", "http://schemas.microsoft.com/windowsazure"));
if (nameElement != null)
{
string nameInstance = nameElement.Value;
serverFarmInstance.Name = nameInstance;
}
XElement numberOfWorkersElement = serverFarmsElement.Element(XName.Get("NumberOfWorkers", "http://schemas.microsoft.com/windowsazure"));
if (numberOfWorkersElement != null && !string.IsNullOrEmpty(numberOfWorkersElement.Value))
{
int numberOfWorkersInstance = int.Parse(numberOfWorkersElement.Value, CultureInfo.InvariantCulture);
serverFarmInstance.NumberOfWorkers = numberOfWorkersInstance;
}
XElement sKUElement = serverFarmsElement.Element(XName.Get("SKU", "http://schemas.microsoft.com/windowsazure"));
if (sKUElement != null)
{
SkuOptions sKUInstance = ((SkuOptions)Enum.Parse(typeof(SkuOptions), sKUElement.Value, true));
serverFarmInstance.SKU = sKUInstance;
}
XElement workerSizeElement = serverFarmsElement.Element(XName.Get("WorkerSize", "http://schemas.microsoft.com/windowsazure"));
if (workerSizeElement != null && !string.IsNullOrEmpty(workerSizeElement.Value))
{
WorkerSizeOptions workerSizeInstance = ((WorkerSizeOptions)Enum.Parse(typeof(WorkerSizeOptions), workerSizeElement.Value, true));
serverFarmInstance.WorkerSize = workerSizeInstance;
}
XElement adminSiteNameElement = serverFarmsElement.Element(XName.Get("AdminSiteName", "http://schemas.microsoft.com/windowsazure"));
if (adminSiteNameElement != null)
{
string adminSiteNameInstance = adminSiteNameElement.Value;
serverFarmInstance.AdminSiteName = adminSiteNameInstance;
}
}
}
}
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
/// <summary>
/// Updates an existing Web Hosting Plan. (see
/// http://azure.microsoft.com/en-us/documentation/articles/azure-web-sites-web-hosting-plans-in-depth-overview/
/// for more information)
/// </summary>
/// <param name='webSpaceName'>
/// Required. The name of the web space.
/// </param>
/// <param name='webHostingPlanName'>
/// Required. The name of the web hosting plan.
/// </param>
/// <param name='parameters'>
/// Required. Parameters supplied to the Update Web Hosting Plan
/// operation.
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// The Create Web Hosting Plan operation response.
/// </returns>
public async Task<WebHostingPlanUpdateResponse> UpdateAsync(string webSpaceName, string webHostingPlanName, WebHostingPlanUpdateParameters parameters, CancellationToken cancellationToken)
{
// Validate
if (webSpaceName == null)
{
throw new ArgumentNullException("webSpaceName");
}
if (webHostingPlanName == null)
{
throw new ArgumentNullException("webHostingPlanName");
}
if (parameters == null)
{
throw new ArgumentNullException("parameters");
}
// Tracing
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("webSpaceName", webSpaceName);
tracingParameters.Add("webHostingPlanName", webHostingPlanName);
tracingParameters.Add("parameters", parameters);
TracingAdapter.Enter(invocationId, this, "UpdateAsync", tracingParameters);
}
// Construct URL
string url = "";
url = url + "/";
if (this.Client.Credentials.SubscriptionId != null)
{
url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId);
}
url = url + "/services/WebSpaces/";
url = url + Uri.EscapeDataString(webSpaceName);
url = url + "/ServerFarms/";
url = url + Uri.EscapeDataString(webHostingPlanName);
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Put;
httpRequest.RequestUri = new Uri(url);
// Set Headers
httpRequest.Headers.Add("x-ms-version", "2014-04-01");
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Serialize Request
string requestContent = null;
XDocument requestDoc = new XDocument();
XElement serverFarmElement = new XElement(XName.Get("ServerFarm", "http://schemas.microsoft.com/windowsazure"));
requestDoc.Add(serverFarmElement);
if (parameters.NumberOfWorkers != null)
{
XElement numberOfWorkersElement = new XElement(XName.Get("NumberOfWorkers", "http://schemas.microsoft.com/windowsazure"));
numberOfWorkersElement.Value = parameters.NumberOfWorkers.ToString();
serverFarmElement.Add(numberOfWorkersElement);
}
XElement sKUElement = new XElement(XName.Get("SKU", "http://schemas.microsoft.com/windowsazure"));
sKUElement.Value = parameters.SKU.ToString();
serverFarmElement.Add(sKUElement);
if (parameters.WorkerSize != null)
{
XElement workerSizeElement = new XElement(XName.Get("WorkerSize", "http://schemas.microsoft.com/windowsazure"));
workerSizeElement.Value = parameters.WorkerSize.ToString();
serverFarmElement.Add(workerSizeElement);
}
if (parameters.AdminSiteName != null)
{
XElement adminSiteNameElement = new XElement(XName.Get("AdminSiteName", "http://schemas.microsoft.com/windowsazure"));
adminSiteNameElement.Value = parameters.AdminSiteName;
serverFarmElement.Add(adminSiteNameElement);
}
requestContent = requestDoc.ToString();
httpRequest.Content = new StringContent(requestContent, Encoding.UTF8);
httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/xml");
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
TracingAdapter.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
TracingAdapter.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, requestContent, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
TracingAdapter.Error(invocationId, ex);
}
throw ex;
}
// Create Result
WebHostingPlanUpdateResponse result = null;
// Deserialize Response
if (statusCode == HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
result = new WebHostingPlanUpdateResponse();
XDocument responseDoc = XDocument.Parse(responseContent);
XElement serverFarmElement2 = responseDoc.Element(XName.Get("ServerFarm", "http://schemas.microsoft.com/windowsazure"));
if (serverFarmElement2 != null)
{
WebHostingPlan webHostingPlanInstance = new WebHostingPlan();
result.WebHostingPlan = webHostingPlanInstance;
XElement nameElement = serverFarmElement2.Element(XName.Get("Name", "http://schemas.microsoft.com/windowsazure"));
if (nameElement != null)
{
string nameInstance = nameElement.Value;
webHostingPlanInstance.Name = nameInstance;
}
XElement numberOfWorkersElement2 = serverFarmElement2.Element(XName.Get("NumberOfWorkers", "http://schemas.microsoft.com/windowsazure"));
if (numberOfWorkersElement2 != null && !string.IsNullOrEmpty(numberOfWorkersElement2.Value))
{
int numberOfWorkersInstance = int.Parse(numberOfWorkersElement2.Value, CultureInfo.InvariantCulture);
webHostingPlanInstance.NumberOfWorkers = numberOfWorkersInstance;
}
XElement sKUElement2 = serverFarmElement2.Element(XName.Get("SKU", "http://schemas.microsoft.com/windowsazure"));
if (sKUElement2 != null)
{
SkuOptions sKUInstance = ((SkuOptions)Enum.Parse(typeof(SkuOptions), sKUElement2.Value, true));
webHostingPlanInstance.SKU = sKUInstance;
}
XElement workerSizeElement2 = serverFarmElement2.Element(XName.Get("WorkerSize", "http://schemas.microsoft.com/windowsazure"));
if (workerSizeElement2 != null && !string.IsNullOrEmpty(workerSizeElement2.Value))
{
WorkerSizeOptions workerSizeInstance = ((WorkerSizeOptions)Enum.Parse(typeof(WorkerSizeOptions), workerSizeElement2.Value, true));
webHostingPlanInstance.WorkerSize = workerSizeInstance;
}
XElement adminSiteNameElement2 = serverFarmElement2.Element(XName.Get("AdminSiteName", "http://schemas.microsoft.com/windowsazure"));
if (adminSiteNameElement2 != null)
{
string adminSiteNameInstance = adminSiteNameElement2.Value;
webHostingPlanInstance.AdminSiteName = adminSiteNameInstance;
}
}
}
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
}
}
| |
using UnityEngine;
using UnityEditor;
using System.Collections;
using System.Collections.Generic;
namespace Igor
{
public class InspectableObject {
protected Dictionary<string, bool> InspectorArrayExpanded = new Dictionary<string, bool>();
protected bool bInspectorHasChangedProperty = false;
public virtual object GetInspectorInstance()
{
return null;
}
public virtual void GainedFocus()
{
if(GetInspectorInstance() != null)
{
InspectorPicker.AddHandler(GetTypeName(), EntityOnInspectorGUI);
InspectorPicker.SelectInstance(GetTypeName(), GetInspectorInstance());
}
}
public virtual string GetTypeName()
{
return "";
}
public virtual void InspectorGUIString(string NameOfField, ref string FieldValue, bool bReadOnly = false)
{
string PreviousString = FieldValue != null ? FieldValue : "";
if(bReadOnly)
{
EditorGUILayout.LabelField(NameOfField, FieldValue);
}
else
{
FieldValue = EditorGUILayout.TextField(NameOfField, FieldValue);
}
if(PreviousString != FieldValue)
{
bInspectorHasChangedProperty = true;
}
}
public virtual void InspectorGUIDropDown(string DropDownLabel, ref List<string> Values, ref string CurrentValue, bool bAppendAddNewEntryOption = false, string AddNewEntryOptionText = "", GenericButtonCallback OnAddNewEntry = null)
{
List<string> AllValues = Values;
bool bHasAddNewEntryOption = false;
if(bAppendAddNewEntryOption && AddNewEntryOptionText != "" && OnAddNewEntry != null)
{
AllValues = new List<string>();
AllValues.AddRange(Values);
AllValues.Add(AddNewEntryOptionText);
bHasAddNewEntryOption = true;
}
int CurrentIndex = Values.IndexOf(CurrentValue);
if(CurrentIndex == -1)
{
CurrentIndex = 0;
}
int OldIndex = CurrentIndex;
CurrentIndex = EditorGUILayout.Popup(DropDownLabel, CurrentIndex, AllValues.ToArray());
if(bHasAddNewEntryOption && CurrentIndex == (AllValues.Count - 1))
{
OnAddNewEntry();
}
else
{
CurrentValue = Values[CurrentIndex];
}
if(CurrentIndex != OldIndex)
{
bInspectorHasChangedProperty = true;
}
}
public virtual bool InnerInspectorGUI()
{
return false;
}
public delegate InspectableObject GetEditorWrapper(XMLSerializable WrappedObject, int ItemIndex);
public static Dictionary<string, int> CachedIntWaitForEnterValues = new Dictionary<string, int>();
public static bool bRepaint = false;
public virtual bool InspectorGUIIntWaitForEnter(string UniqueFieldName, string Label, int LastSavedValue, out int NewValue)
{
bool bShouldSaveValue = false;
int CachedValue = 0;
NewValue = LastSavedValue;
if(CachedIntWaitForEnterValues.ContainsKey(UniqueFieldName))
{
CachedValue = CachedIntWaitForEnterValues[UniqueFieldName];
}
else
{
CachedValue = LastSavedValue;
CachedIntWaitForEnterValues.Add(UniqueFieldName, CachedValue);
}
GUI.SetNextControlName(UniqueFieldName);
bShouldSaveValue = EditorGUILayout.IntField(Label, CachedValue).KeyPressed<int>(UniqueFieldName, KeyCode.Return, LastSavedValue, out CachedValue);
CachedIntWaitForEnterValues[UniqueFieldName] = CachedValue;
if(bShouldSaveValue)
{
bRepaint = true;
NewValue = CachedValue;
}
return bShouldSaveValue;
}
public delegate bool ValueInspectorDelegate<ValueType>(ref ValueType ValueToInspect);
public virtual void InspectorGUIListEmbedded<ListType>(string ListName, string ItemPrefix, ref List<ListType> CurrentList, ValueInspectorDelegate<ListType> ValueInspector, bool bReadOnly = false, bool bReadOnlyCount = true, bool bStartOpen = false)
where ListType : XMLSerializable, new()
{
if(!InspectorArrayExpanded.ContainsKey(ListName))
{
InspectorArrayExpanded.Add(ListName, bStartOpen);
}
bool bListExpanded = InspectorArrayExpanded[ListName];
bListExpanded = EditorGUILayout.Foldout(bListExpanded, ListName);
InspectorArrayExpanded[ListName] = bListExpanded;
if(bListExpanded)
{
EditorGUI.indentLevel += 1;
int NewListCount = CurrentList.Count;
int OldListCount = NewListCount;
bool bCountActuallyChanged = false;
if(bReadOnly || bReadOnlyCount)
{
EditorGUILayout.LabelField("Count", CurrentList.Count.ToString());
}
else
{
bCountActuallyChanged = InspectorGUIIntWaitForEnter(ListName + "Count", "Count", OldListCount, out NewListCount);
}
if(bCountActuallyChanged && NewListCount != CurrentList.Count)
{
bInspectorHasChangedProperty = true;
if(NewListCount > CurrentList.Count)
{
for(int CurrentElement = CurrentList.Count; CurrentElement < NewListCount; ++CurrentElement)
{
CurrentList.Add(new ListType());
}
}
else
{
for(int CurrentElement = NewListCount; CurrentElement < CurrentList.Count;)
{
CurrentList.RemoveAt(CurrentElement);
}
}
}
int SmallestSize = OldListCount > NewListCount ? NewListCount : OldListCount;
for(int CurrentIndex = 0; CurrentIndex < SmallestSize; ++CurrentIndex)
{
if(ValueInspector != null)
{
EditorGUILayout.BeginVertical("box");
ListType TempRef = CurrentList[CurrentIndex];
bInspectorHasChangedProperty = ValueInspector(ref TempRef) || bInspectorHasChangedProperty;
CurrentList[CurrentIndex] = TempRef;
EditorGUILayout.EndVertical();
}
}
EditorGUI.indentLevel -= 1;
}
}
public virtual void InspectorGUIOrderedListEmbedded<ListType>(string ListName, string ItemPrefix, ref List<ListType> CurrentList, ValueInspectorDelegate<ListType> ValueInspector, bool bReadOnly = false, bool bReadOnlyCount = true, bool bStartOpen = false)
where ListType : XMLSerializable, new()
{
if(!InspectorArrayExpanded.ContainsKey(ListName))
{
InspectorArrayExpanded.Add(ListName, bStartOpen);
}
bool bListExpanded = InspectorArrayExpanded[ListName];
bListExpanded = EditorGUILayout.Foldout(bListExpanded, ListName);
InspectorArrayExpanded[ListName] = bListExpanded;
if(bListExpanded)
{
EditorGUI.indentLevel += 1;
int NewListCount = CurrentList.Count;
int OldListCount = NewListCount;
bool bCountActuallyChanged = false;
if(bReadOnly || bReadOnlyCount)
{
EditorGUILayout.LabelField("Count", CurrentList.Count.ToString());
}
else
{
bCountActuallyChanged = InspectorGUIIntWaitForEnter(ListName + "Count", "Count", OldListCount, out NewListCount);
}
if(bCountActuallyChanged && NewListCount != CurrentList.Count)
{
bInspectorHasChangedProperty = true;
if(NewListCount > CurrentList.Count)
{
for(int CurrentElement = CurrentList.Count; CurrentElement < NewListCount; ++CurrentElement)
{
CurrentList.Add(new ListType());
}
}
else
{
for(int CurrentElement = NewListCount; CurrentElement < CurrentList.Count;)
{
CurrentList.RemoveAt(CurrentElement);
}
}
}
int MoveUpIndex = -1;
int MoveDownIndex = -1;
int SmallestSize = OldListCount > NewListCount ? NewListCount : OldListCount;
for(int CurrentIndex = 0; CurrentIndex < SmallestSize; ++CurrentIndex)
{
if(ValueInspector != null)
{
EditorGUILayout.BeginVertical("box");
ListType TempRef = CurrentList[CurrentIndex];
bInspectorHasChangedProperty = ValueInspector(ref TempRef) || bInspectorHasChangedProperty;
CurrentList[CurrentIndex] = TempRef;
EditorGUILayout.BeginHorizontal();
if(GUILayout.Button("Move up"))
{
MoveUpIndex = CurrentIndex;
}
if(GUILayout.Button("Move down"))
{
MoveDownIndex = CurrentIndex;
}
EditorGUILayout.EndHorizontal();
EditorGUILayout.EndVertical();
}
}
if(MoveUpIndex > 0 && MoveUpIndex < SmallestSize)
{
ListType ItemToMove = CurrentList[MoveUpIndex];
List<ListType> NewList = new List<ListType>();
CurrentList.RemoveAt(MoveUpIndex);
for(int NewItem = 0; NewItem < SmallestSize + 1; ++NewItem)
{
if(NewItem == MoveUpIndex-1)
{
NewList.Add(ItemToMove);
}
else if(NewItem >= MoveUpIndex)
{
NewList.Add(CurrentList[NewItem - 1]);
}
else
{
NewList.Add(CurrentList[NewItem]);
}
}
CurrentList = NewList;
}
if(MoveDownIndex > -1 && MoveDownIndex < SmallestSize - 1)
{
ListType ItemToMove = CurrentList[MoveDownIndex];
List<ListType> NewList = new List<ListType>();
CurrentList.RemoveAt(MoveDownIndex);
for(int NewItem = 0; NewItem < SmallestSize + 1; ++NewItem)
{
if(NewItem == MoveDownIndex+1)
{
NewList.Add(ItemToMove);
}
else if(NewItem > MoveDownIndex+1)
{
NewList.Add(CurrentList[NewItem - 1]);
}
else
{
NewList.Add(CurrentList[NewItem]);
}
}
CurrentList = NewList;
}
EditorGUI.indentLevel -= 1;
}
}
public delegate void LabeledInspector<ValueType>(string Label, ref ValueType ValueInst);
public virtual void InspectorGUIDictionaryEmbeddedAndObject<KeyType, ValueType>(string DictionaryName, string KeyPrefix, string ValuePrefix, ref Dictionary<KeyType, ValueType> CurrentDictionary, LabeledInspector<KeyType> KeyInspector, GetEditorWrapper WrapperForValue, bool bReadOnly = false, bool bStartOpen = false)
where KeyType : XMLSerializable, new()
where ValueType : XMLSerializable, new()
{
if(!InspectorArrayExpanded.ContainsKey(DictionaryName))
{
InspectorArrayExpanded.Add(DictionaryName, bStartOpen);
}
bool bDictionaryExpanded = InspectorArrayExpanded[DictionaryName];
bDictionaryExpanded = EditorGUILayout.Foldout(bDictionaryExpanded, DictionaryName);
InspectorArrayExpanded[DictionaryName] = bDictionaryExpanded;
if(bDictionaryExpanded)
{
EditorGUI.indentLevel += 1;
int NewDictionaryCount = CurrentDictionary.Keys.Count;
int OldDictionaryCount = NewDictionaryCount;
bool bCountActuallyChanged = false;
if(bReadOnly)
{
EditorGUILayout.LabelField("Count", CurrentDictionary.Keys.Count.ToString());
}
else
{
bCountActuallyChanged = InspectorGUIIntWaitForEnter(DictionaryName + "Count", "Count", OldDictionaryCount, out NewDictionaryCount);
}
if(bCountActuallyChanged && NewDictionaryCount != CurrentDictionary.Keys.Count)
{
bInspectorHasChangedProperty = true;
if(NewDictionaryCount > CurrentDictionary.Keys.Count)
{
for(int CurrentElement = CurrentDictionary.Keys.Count; CurrentElement < NewDictionaryCount; ++CurrentElement)
{
CurrentDictionary.Add(new KeyType(), new ValueType());
}
}
else
{
int CurrentIndex = 0;
foreach(KeyValuePair<KeyType, ValueType> CurrentElement in CurrentDictionary)
{
if(CurrentIndex >= NewDictionaryCount)
{
CurrentDictionary.Remove(CurrentElement.Key);
}
++CurrentIndex;
}
}
}
int SmallestSize = OldDictionaryCount > NewDictionaryCount ? NewDictionaryCount : OldDictionaryCount;
for(int CurrentIndex = 0; CurrentIndex < SmallestSize; ++CurrentIndex)
{
int CurrentDictionaryIndex = 0;
foreach(KeyValuePair<KeyType, ValueType> CurrentElement in CurrentDictionary)
{
if(CurrentIndex == CurrentDictionaryIndex)
{
EditorGUILayout.BeginVertical("box");
KeyType NewKey = CurrentElement.Key;
KeyInspector(KeyPrefix + " " + CurrentIndex, ref NewKey);
ValueType NewValue = CurrentElement.Value;
if(NewKey != CurrentElement.Key)
{
CurrentDictionary.Remove(CurrentElement.Key);
CurrentDictionary.Add(NewKey, NewValue);
}
InspectableObject ValueInspector = WrapperForValue(NewValue, CurrentIndex);
if(ValueInspector != null)
{
bInspectorHasChangedProperty = ValueInspector.InnerInspectorGUI() || bInspectorHasChangedProperty;
}
EditorGUILayout.EndVertical();
break;
}
++CurrentDictionaryIndex;
}
}
EditorGUI.indentLevel -= 1;
}
}
public virtual void InspectorGUIDictionaryDropAndObject<KeyType, ValueType>(string DictionaryName, string KeyPrefix, string ValuePrefix, ref Dictionary<KeyType, ValueType> CurrentDictionary, DragAndDropHandler DropHandler, GetEditorWrapper WrapperForValue, bool bReadOnly = false, bool bStartOpen = false)
where KeyType : XMLSerializable, new()
where ValueType : XMLSerializable, new()
{
if(!InspectorArrayExpanded.ContainsKey(DictionaryName))
{
InspectorArrayExpanded.Add(DictionaryName, bStartOpen);
}
bool bDictionaryExpanded = InspectorArrayExpanded[DictionaryName];
bDictionaryExpanded = EditorGUILayout.Foldout(bDictionaryExpanded, DictionaryName);
InspectorArrayExpanded[DictionaryName] = bDictionaryExpanded;
if(bDictionaryExpanded)
{
EditorGUI.indentLevel += 1;
int NewDictionaryCount = CurrentDictionary.Keys.Count;
int OldDictionaryCount = NewDictionaryCount;
bool bCountActuallyChanged = false;
if(bReadOnly)
{
EditorGUILayout.LabelField("Count", CurrentDictionary.Keys.Count.ToString());
}
else
{
bCountActuallyChanged = InspectorGUIIntWaitForEnter(DictionaryName + "Count", "Count", OldDictionaryCount, out NewDictionaryCount);
}
if(bCountActuallyChanged && NewDictionaryCount != CurrentDictionary.Keys.Count)
{
bInspectorHasChangedProperty = true;
if(NewDictionaryCount > CurrentDictionary.Keys.Count)
{
for(int CurrentElement = CurrentDictionary.Keys.Count; CurrentElement < NewDictionaryCount; ++CurrentElement)
{
CurrentDictionary.Add(new KeyType(), new ValueType());
}
}
else
{
int CurrentIndex = 0;
foreach(KeyValuePair<KeyType, ValueType> CurrentElement in CurrentDictionary)
{
if(CurrentIndex >= NewDictionaryCount)
{
CurrentDictionary.Remove(CurrentElement.Key);
}
++CurrentIndex;
}
}
}
int SmallestSize = OldDictionaryCount > NewDictionaryCount ? NewDictionaryCount : OldDictionaryCount;
for(int CurrentIndex = 0; CurrentIndex < SmallestSize; ++CurrentIndex)
{
int CurrentDictionaryIndex = 0;
foreach(KeyValuePair<KeyType, ValueType> CurrentElement in CurrentDictionary)
{
if(CurrentIndex == CurrentDictionaryIndex)
{
EditorGUILayout.BeginVertical("box");
KeyType NewKey = (KeyType)InspectorGUIDragAndDropField(KeyPrefix + " " + CurrentIndex, CurrentElement.Key, DropHandler, CurrentIndex);
ValueType NewValue = CurrentElement.Value;
if(NewKey != CurrentElement.Key)
{
CurrentDictionary.Remove(CurrentElement.Key);
CurrentDictionary.Add(NewKey, NewValue);
}
InspectableObject ValueInspector = WrapperForValue(NewValue, CurrentIndex);
if(ValueInspector != null)
{
bInspectorHasChangedProperty = ValueInspector.InnerInspectorGUI() || bInspectorHasChangedProperty;
}
EditorGUILayout.EndVertical();
break;
}
++CurrentDictionaryIndex;
}
}
EditorGUI.indentLevel -= 1;
}
}
public virtual void InspectorGUIDictionaryEmbeddedFloat<ListType>(string DictionaryName, string KeyPrefix, string ValuePrefix, ref Dictionary<ListType, float> CurrentDictionary, LabeledInspector<ListType> KeyInspector, bool bReadOnly = false, bool bStartOpen = false) where ListType : XMLSerializable, new()
{
if(!InspectorArrayExpanded.ContainsKey(DictionaryName))
{
InspectorArrayExpanded.Add(DictionaryName, bStartOpen);
}
bool bDictionaryExpanded = InspectorArrayExpanded[DictionaryName];
bDictionaryExpanded = EditorGUILayout.Foldout(bDictionaryExpanded, DictionaryName);
InspectorArrayExpanded[DictionaryName] = bDictionaryExpanded;
if(bDictionaryExpanded)
{
EditorGUI.indentLevel += 1;
int NewDictionaryCount = CurrentDictionary.Keys.Count;
int OldDictionaryCount = NewDictionaryCount;
bool bCountActuallyChanged = false;
if(bReadOnly)
{
EditorGUILayout.LabelField("Count", CurrentDictionary.Keys.Count.ToString());
}
else
{
bCountActuallyChanged = InspectorGUIIntWaitForEnter(DictionaryName + "Count", "Count", OldDictionaryCount, out NewDictionaryCount);
}
if(bCountActuallyChanged && NewDictionaryCount != CurrentDictionary.Keys.Count)
{
bInspectorHasChangedProperty = true;
if(NewDictionaryCount > CurrentDictionary.Keys.Count)
{
for(int CurrentElement = CurrentDictionary.Keys.Count; CurrentElement < NewDictionaryCount; ++CurrentElement)
{
CurrentDictionary.Add(new ListType(), 0.0f);
}
}
else
{
int CurrentIndex = 0;
foreach(KeyValuePair<ListType, float> CurrentElement in CurrentDictionary)
{
if(CurrentIndex >= NewDictionaryCount)
{
CurrentDictionary.Remove(CurrentElement.Key);
}
++CurrentIndex;
}
}
}
int SmallestSize = OldDictionaryCount > NewDictionaryCount ? NewDictionaryCount : OldDictionaryCount;
for(int CurrentIndex = 0; CurrentIndex < SmallestSize; ++CurrentIndex)
{
int CurrentDictionaryIndex = 0;
foreach(KeyValuePair<ListType, float> CurrentElement in CurrentDictionary)
{
if(CurrentIndex == CurrentDictionaryIndex)
{
EditorGUILayout.BeginVertical("box");
ListType NewKey = CurrentElement.Key;
KeyInspector(KeyPrefix + " " + CurrentIndex, ref NewKey);
float FloatValue = CurrentElement.Value;
if(NewKey != CurrentElement.Key)
{
CurrentDictionary.Remove(CurrentElement.Key);
CurrentDictionary.Add(NewKey, FloatValue);
}
InspectorGUIFloat(ValuePrefix + " " + CurrentIndex, ref FloatValue, bReadOnly);
CurrentDictionary[NewKey] = FloatValue;
EditorGUILayout.EndVertical();
break;
}
++CurrentDictionaryIndex;
}
}
EditorGUI.indentLevel -= 1;
}
}
public virtual void InspectorGUIBool(string NameOfField, ref bool FieldValue, bool bReadOnly = false)
{
bool PreviousBool = FieldValue;
if(bReadOnly)
{
EditorGUILayout.LabelField(NameOfField, FieldValue.ToString());
}
else
{
FieldValue = EditorGUILayout.Toggle(NameOfField, FieldValue);
}
if(PreviousBool != FieldValue)
{
bInspectorHasChangedProperty = true;
}
}
public virtual void InspectorGUIInt(string NameOfField, ref int FieldValue, bool bReadOnly = false)
{
int PreviousInt = FieldValue;
if(bReadOnly)
{
EditorGUILayout.LabelField(NameOfField, FieldValue.ToString());
}
else
{
FieldValue = EditorGUILayout.IntField(NameOfField, FieldValue);
}
if(PreviousInt != FieldValue)
{
bInspectorHasChangedProperty = true;
}
}
public virtual void InspectorGUIFloat(string NameOfField, ref float FieldValue, bool bReadOnly = false)
{
float PreviousFloat = FieldValue;
if(bReadOnly)
{
EditorGUILayout.LabelField(NameOfField, FieldValue.ToString());
}
else
{
FieldValue = EditorGUILayout.FloatField(NameOfField, FieldValue);
}
if(PreviousFloat != FieldValue)
{
bInspectorHasChangedProperty = true;
}
}
public virtual void InspectorGUIVector3(string NameOfField, ref Vector3 FieldValue, bool bReadOnly = false)
{
Vector3 PreviousValue = FieldValue;
if(bReadOnly)
{
EditorGUILayout.LabelField(NameOfField, "X: " + FieldValue.x + " Y: " + FieldValue.y + " Z: " + FieldValue.z);
}
else
{
FieldValue = EditorGUILayout.Vector3Field(NameOfField, FieldValue);
}
if(PreviousValue != FieldValue)
{
bInspectorHasChangedProperty = true;
}
}
public delegate void GenericButtonCallback();
public virtual void InspectorGUIButton(string ButtonText, GenericButtonCallback ButtonFunction)
{
if(GUILayout.Button(ButtonText))
{
ButtonFunction();
}
}
public delegate void DragAndDropObjectHandler<ObjectType>(ref string ObjectKey, ObjectType NewObject, string NewObjectKey);
public virtual void InspectorGUIDragAndDropObjectField<ObjectType>(string FieldLabel, ref string ObjectStringReference, ref ObjectType ObjectReference, DragAndDropObjectHandler<ObjectType> NewObjectHandler, string NewObjectKey)
where ObjectType : Object
{
EditorGUILayout.BeginHorizontal();
EditorGUILayout.PrefixLabel(FieldLabel);
ObjectType NewObject = (ObjectType)EditorGUILayout.ObjectField(ObjectReference, typeof(ObjectType), true);
EditorGUILayout.EndHorizontal();
ObjectReference = NewObject;
string OldString = ObjectStringReference;
NewObjectHandler(ref ObjectStringReference, ObjectReference, NewObjectKey);
if(OldString != ObjectStringReference)
{
bInspectorHasChangedProperty = true;
}
}
public delegate void DragAndDropAudioClipHandler(ref string ObjectKey, AudioClip NewObject, string NewObjectKey);
public virtual void InspectorGUIDragAndDropAudioClip(string FieldLabel, ref string ObjectStringReference, ref AudioClip ObjectReference, DragAndDropAudioClipHandler NewObjectHandler, string NewObjectKey)
// where ObjectType : Object
{
#if !USE_OBJECT_FIELD
EditorGUILayout.BeginHorizontal();
EditorGUILayout.PrefixLabel(FieldLabel);
AudioClip NewClip = (AudioClip)EditorGUILayout.ObjectField(ObjectReference, typeof(AudioClip), false);
EditorGUILayout.EndHorizontal();
ObjectReference = NewClip;
string OldString = ObjectStringReference;
NewObjectHandler(ref ObjectStringReference, ObjectReference, NewObjectKey);
if(OldString != ObjectStringReference)
{
bInspectorHasChangedProperty = true;
}
#else
int IndentLevel = EditorGUI.indentLevel;
EditorGUILayout.BeginHorizontal();
EditorGUI.indentLevel = IndentLevel;
EditorGUILayout.PrefixLabel(FieldLabel);
GUILayout.Box(ObjectReference != null ? ObjectReference.ToString() : "", GUILayout.ExpandWidth(true));
Rect DropBoxRect = GUILayoutUtility.GetLastRect();
if(DropBoxRect.Contains(Event.current.mousePosition))
{
EventType TypeOfEvent = Event.current.type;
if(TypeOfEvent == EventType.DragUpdated || TypeOfEvent == EventType.DragPerform)
{
DragAndDrop.visualMode = DragAndDropVisualMode.Copy;
if(TypeOfEvent == EventType.DragPerform)
{
DragAndDrop.AcceptDrag();
AudioClip NewObject = null;
Object NewObjectRef = DragAndDrop.objectReferences[0];
if(NewObjectRef.GetType().IsAssignableFrom(typeof(AudioClip)))
{
NewObject = (AudioClip)DragAndDrop.objectReferences[0];
}
if(NewObject != null)
{
ObjectReference = NewObject;
}
string OldString = ObjectStringReference;
NewObjectHandler(ref ObjectStringReference, ObjectReference);
if(OldString != ObjectStringReference)
{
bInspectorHasChangedProperty = true;
}
}
Event.current.Use();
}
}
EditorGUILayout.EndHorizontal();
#endif
}
public delegate XMLSerializable DragAndDropHandler(string DragAndDropData, int ItemIndex);
public virtual XMLSerializable InspectorGUIDragAndDropField(string FieldLabel, XMLSerializable CurrentValue, DragAndDropHandler HandlerFunction, int Index = -1)
{
XMLSerializable NewObject = CurrentValue;
EditorGUILayout.BeginHorizontal();
EditorGUILayout.PrefixLabel(FieldLabel);
GUILayout.Box(CurrentValue != null ? CurrentValue.EditorGetDisplayName() : "", GUILayout.ExpandWidth(true));
Rect DropBoxRect = GUILayoutUtility.GetLastRect();
if(DropBoxRect.Contains(Event.current.mousePosition))
{
EventType TypeOfEvent = Event.current.type;
if(TypeOfEvent == EventType.DragUpdated || TypeOfEvent == EventType.DragPerform)
{
DragAndDrop.visualMode = DragAndDropVisualMode.Copy;
if(TypeOfEvent == EventType.DragPerform)
{
DragAndDrop.AcceptDrag();
NewObject = HandlerFunction(DragAndDrop.paths[0], Index);
if(NewObject == null)
{
NewObject = CurrentValue;
}
}
Event.current.Use();
}
}
EditorGUILayout.EndHorizontal();
return NewObject;
}
public virtual void InspectorGUIOrderedStringList(string ListName, string ItemPrefix, ref List<string> CurrentList, bool bReadOnly = false, bool bReadOnlyCount = true, bool bStartOpen = false)
{
if(!InspectorArrayExpanded.ContainsKey(ListName))
{
InspectorArrayExpanded.Add(ListName, bStartOpen);
}
bool bListExpanded = InspectorArrayExpanded[ListName];
bListExpanded = EditorGUILayout.Foldout(bListExpanded, ListName);
InspectorArrayExpanded[ListName] = bListExpanded;
if(bListExpanded)
{
EditorGUI.indentLevel += 1;
int NewListCount = CurrentList.Count;
int OldListCount = NewListCount;
bool bCountActuallyChanged = false;
if(bReadOnly || bReadOnlyCount)
{
EditorGUILayout.LabelField("Count", CurrentList.Count.ToString());
}
else
{
bCountActuallyChanged = InspectorGUIIntWaitForEnter(ListName + "Count", "Count", OldListCount, out NewListCount);
}
if(bCountActuallyChanged && NewListCount != CurrentList.Count)
{
bInspectorHasChangedProperty = true;
if(NewListCount > CurrentList.Count)
{
for(int CurrentElement = CurrentList.Count; CurrentElement < NewListCount; ++CurrentElement)
{
CurrentList.Add("");
}
}
else
{
for(int CurrentElement = NewListCount; CurrentElement < CurrentList.Count;)
{
CurrentList.RemoveAt(CurrentElement);
}
}
}
int MoveUpIndex = -1;
int MoveDownIndex = -1;
int SmallestSize = OldListCount > NewListCount ? NewListCount : OldListCount;
for(int CurrentIndex = 0; CurrentIndex < SmallestSize; ++CurrentIndex)
{
EditorGUILayout.BeginVertical("box");
string TempRef = CurrentList[CurrentIndex];
InspectorGUIString(ItemPrefix + " " + CurrentIndex, ref TempRef, bReadOnly);
CurrentList[CurrentIndex] = TempRef;
EditorGUILayout.BeginHorizontal();
if(GUILayout.Button("Move up"))
{
MoveUpIndex = CurrentIndex;
}
if(GUILayout.Button("Move down"))
{
MoveDownIndex = CurrentIndex;
}
EditorGUILayout.EndHorizontal();
EditorGUILayout.EndVertical();
}
if(MoveUpIndex > 0 && MoveUpIndex < SmallestSize)
{
string ItemToMove = CurrentList[MoveUpIndex];
List<string> NewList = new List<string>();
CurrentList.RemoveAt(MoveUpIndex);
for(int NewItem = 0; NewItem < SmallestSize; ++NewItem)
{
if(NewItem == MoveUpIndex-1)
{
NewList.Add(ItemToMove);
}
else if(NewItem >= MoveUpIndex)
{
NewList.Add(CurrentList[NewItem - 1]);
}
else
{
NewList.Add(CurrentList[NewItem]);
}
}
CurrentList = NewList;
bInspectorHasChangedProperty = true;
}
if(MoveDownIndex > -1 && MoveDownIndex < SmallestSize - 1)
{
string ItemToMove = CurrentList[MoveDownIndex];
List<string> NewList = new List<string>();
CurrentList.RemoveAt(MoveDownIndex);
for(int NewItem = 0; NewItem < SmallestSize; ++NewItem)
{
if(NewItem == MoveDownIndex+1)
{
NewList.Add(ItemToMove);
}
else if(NewItem > MoveDownIndex+1)
{
NewList.Add(CurrentList[NewItem - 1]);
}
else
{
NewList.Add(CurrentList[NewItem]);
}
}
CurrentList = NewList;
bInspectorHasChangedProperty = true;
}
EditorGUI.indentLevel -= 1;
}
}
public virtual void EntityOnInspectorGUI(object Instance)
{
EntityDrawInspectorWidgets(Instance);
EntityPostDrawInspectorWidgets(Instance);
}
public virtual void EntityDrawInspectorWidgets(object Instance)
{
}
public virtual void EntityPostDrawInspectorWidgets(object Instance)
{
}
public virtual void OnInspectorGUIDrawSaveButton(string SaveText)
{
GUI.enabled = false;
if(bInspectorHasChangedProperty)
{
GUI.enabled = true;
}
if(GUILayout.Button(SaveText))
{
OnInspectorGUIClickedSaveButton();
bInspectorHasChangedProperty = false;
}
GUI.enabled = true;
}
public virtual void OnInspectorGUIClickedSaveButton()
{
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.IO;
using Lucene.Net.Analysis.Tokenattributes;
using Lucene.Net.Util;
using NUnit.Framework;
using Attribute = Lucene.Net.Util.Attribute;
using Payload = Lucene.Net.Index.Payload;
using TestSimpleAttributeImpls = Lucene.Net.Analysis.Tokenattributes.TestSimpleAttributeImpls;
using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
namespace Lucene.Net.Analysis
{
[TestFixture]
public class TestToken:LuceneTestCase
{
public TestToken()
{
}
public TestToken(System.String name):base(name)
{
}
[Test]
public virtual void TestCtor()
{
Token t = new Token();
char[] content = "hello".ToCharArray();
t.SetTermBuffer(content, 0, content.Length);
char[] buf = t.TermBuffer();
Assert.AreNotEqual(t.TermBuffer(), content);
Assert.AreEqual("hello", t.Term);
Assert.AreEqual("word", t.Type);
Assert.AreEqual(0, t.Flags);
t = new Token(6, 22);
t.SetTermBuffer(content, 0, content.Length);
Assert.AreEqual("hello", t.Term);
Assert.AreEqual("(hello,6,22)", t.ToString());
Assert.AreEqual("word", t.Type);
Assert.AreEqual(0, t.Flags);
t = new Token(6, 22, 7);
t.SetTermBuffer(content, 0, content.Length);
Assert.AreEqual("hello", t.Term);
Assert.AreEqual("(hello,6,22)", t.ToString());
Assert.AreEqual(7, t.Flags);
t = new Token(6, 22, "junk");
t.SetTermBuffer(content, 0, content.Length);
Assert.AreEqual("hello", t.Term);
Assert.AreEqual("(hello,6,22,type=junk)", t.ToString());
Assert.AreEqual(0, t.Flags);
}
[Test]
public virtual void TestResize()
{
Token t = new Token();
char[] content = "hello".ToCharArray();
t.SetTermBuffer(content, 0, content.Length);
for (int i = 0; i < 2000; i++)
{
t.ResizeTermBuffer(i);
Assert.IsTrue(i <= t.TermBuffer().Length);
Assert.AreEqual("hello", t.Term);
}
}
[Test]
public virtual void TestGrow()
{
Token t = new Token();
System.Text.StringBuilder buf = new System.Text.StringBuilder("ab");
for (int i = 0; i < 20; i++)
{
char[] content = buf.ToString().ToCharArray();
t.SetTermBuffer(content, 0, content.Length);
Assert.AreEqual(buf.Length, t.TermLength());
Assert.AreEqual(buf.ToString(), t.Term);
buf.Append(buf.ToString());
}
Assert.AreEqual(1048576, t.TermLength());
Assert.AreEqual(1179654, t.TermBuffer().Length);
// now as a string, first variant
t = new Token();
buf = new System.Text.StringBuilder("ab");
for (int i = 0; i < 20; i++)
{
System.String content = buf.ToString();
t.SetTermBuffer(content, 0, content.Length);
Assert.AreEqual(content.Length, t.TermLength());
Assert.AreEqual(content, t.Term);
buf.Append(content);
}
Assert.AreEqual(1048576, t.TermLength());
Assert.AreEqual(1179654, t.TermBuffer().Length);
// now as a string, second variant
t = new Token();
buf = new System.Text.StringBuilder("ab");
for (int i = 0; i < 20; i++)
{
System.String content = buf.ToString();
t.SetTermBuffer(content);
Assert.AreEqual(content.Length, t.TermLength());
Assert.AreEqual(content, t.Term);
buf.Append(content);
}
Assert.AreEqual(1048576, t.TermLength());
Assert.AreEqual(1179654, t.TermBuffer().Length);
// Test for slow growth to a long term
t = new Token();
buf = new System.Text.StringBuilder("a");
for (int i = 0; i < 20000; i++)
{
System.String content = buf.ToString();
t.SetTermBuffer(content);
Assert.AreEqual(content.Length, t.TermLength());
Assert.AreEqual(content, t.Term);
buf.Append("a");
}
Assert.AreEqual(20000, t.TermLength());
Assert.AreEqual(20167, t.TermBuffer().Length);
// Test for slow growth to a long term
t = new Token();
buf = new System.Text.StringBuilder("a");
for (int i = 0; i < 20000; i++)
{
System.String content = buf.ToString();
t.SetTermBuffer(content);
Assert.AreEqual(content.Length, t.TermLength());
Assert.AreEqual(content, t.Term);
buf.Append("a");
}
Assert.AreEqual(20000, t.TermLength());
Assert.AreEqual(20167, t.TermBuffer().Length);
}
[Test]
public virtual void TestToString()
{
char[] b = new char[]{'a', 'l', 'o', 'h', 'a'};
Token t = new Token("", 0, 5);
t.SetTermBuffer(b, 0, 5);
Assert.AreEqual("(aloha,0,5)", t.ToString());
t.SetTermBuffer("hi there");
Assert.AreEqual("(hi there,0,5)", t.ToString());
}
[Test]
public virtual void TestTermBufferEquals()
{
Token t1a = new Token();
char[] content1a = "hello".ToCharArray();
t1a.SetTermBuffer(content1a, 0, 5);
Token t1b = new Token();
char[] content1b = "hello".ToCharArray();
t1b.SetTermBuffer(content1b, 0, 5);
Token t2 = new Token();
char[] content2 = "hello2".ToCharArray();
t2.SetTermBuffer(content2, 0, 6);
Assert.IsTrue(t1a.Equals(t1b));
Assert.IsFalse(t1a.Equals(t2));
Assert.IsFalse(t2.Equals(t1b));
}
[Test]
public virtual void TestMixedStringArray()
{
Token t = new Token("hello", 0, 5);
Assert.AreEqual(t.TermLength(), 5);
Assert.AreEqual(t.Term, "hello");
t.SetTermBuffer("hello2");
Assert.AreEqual(t.TermLength(), 6);
Assert.AreEqual(t.Term, "hello2");
t.SetTermBuffer("hello3".ToCharArray(), 0, 6);
Assert.AreEqual(t.Term, "hello3");
char[] buffer = t.TermBuffer();
buffer[1] = 'o';
Assert.AreEqual(t.Term, "hollo3");
}
[Test]
public virtual void TestClone()
{
Token t = new Token(0, 5);
char[] content = "hello".ToCharArray();
t.SetTermBuffer(content, 0, 5);
char[] buf = t.TermBuffer();
Token copy = (Token) TestSimpleAttributeImpls.AssertCloneIsEqual(t);
Assert.AreEqual(t.Term, copy.Term);
Assert.AreNotSame(buf, copy.TermBuffer());
Payload pl = new Payload(new byte[]{1, 2, 3, 4});
t.Payload = pl;
copy = (Token) TestSimpleAttributeImpls.AssertCloneIsEqual(t);
Assert.AreEqual(pl, copy.Payload);
Assert.AreNotSame(pl, copy.Payload);
}
[Test]
public virtual void TestCopyTo()
{
Token t = new Token();
Token copy = (Token) TestSimpleAttributeImpls.AssertCopyIsEqual(t);
Assert.AreEqual("", t.Term);
Assert.AreEqual("", copy.Term);
t = new Token(0, 5);
char[] content = "hello".ToCharArray();
t.SetTermBuffer(content, 0, 5);
char[] buf = t.TermBuffer();
copy = (Token) TestSimpleAttributeImpls.AssertCopyIsEqual(t);
Assert.AreEqual(t.Term, copy.Term);
Assert.AreNotSame(buf, copy.TermBuffer());
Payload pl = new Payload(new byte[]{1, 2, 3, 4});
t.Payload = pl;
copy = (Token) TestSimpleAttributeImpls.AssertCopyIsEqual(t);
Assert.AreEqual(pl, copy.Payload);
Assert.AreNotSame(pl, copy.Payload);
}
public interface ISenselessAttribute : IAttribute {}
public class SenselessAttribute : Attribute, ISenselessAttribute
{
public override void CopyTo(Attribute target)
{ }
public override void Clear()
{ }
public override bool Equals(object other)
{
return other is SenselessAttribute;
}
public override int GetHashCode()
{
return 0;
}
}
[Test]
public void TestTokenAttributeFactory()
{
TokenStream ts = new WhitespaceTokenizer(Token.TOKEN_ATTRIBUTE_FACTORY, new StringReader("foo, bar"));
Assert.IsTrue(ts.AddAttribute<ISenselessAttribute>() is SenselessAttribute,
"TypeAttribute is not implemented by SenselessAttributeImpl");
Assert.IsTrue(ts.AddAttribute<ITermAttribute>() is Token, "TermAttribute is not implemented by Token");
Assert.IsTrue(ts.AddAttribute<IOffsetAttribute>() is Token, "OffsetAttribute is not implemented by Token");
Assert.IsTrue(ts.AddAttribute<IFlagsAttribute>() is Token, "FlagsAttribute is not implemented by Token");
Assert.IsTrue(ts.AddAttribute<IPayloadAttribute>() is Token, "PayloadAttribute is not implemented by Token");
Assert.IsTrue(ts.AddAttribute<IPositionIncrementAttribute>() is Token, "PositionIncrementAttribute is not implemented by Token");
Assert.IsTrue(ts.AddAttribute<ITypeAttribute>() is Token, "TypeAttribute is not implemented by Token");
}
}
}
| |
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) Under one or more
contributor license agreements. See the NOTICE file distributed with
this work for Additional information regarding copyright ownership.
The ASF licenses this file to You Under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed Under the License is distributed on an "AS Is" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations Under the License.
==================================================================== */
namespace NPOI.SS.Formula.Eval
{
using System;
using System.Text.RegularExpressions;
using System.Globalization;
/**
* Provides functionality for evaluating arguments to functions and operators.
*
* @author Josh Micich
*/
internal class OperandResolver
{
// Based on regular expression defined in JavaDoc at {@link java.lang.Double#valueOf}
// modified to remove support for NaN, Infinity, Hexadecimal support and floating type suffixes
private const String Digits = "\\d+";
private const String Exp = "[eE][+-]?" + Digits;
private const String fpRegex =
("[\\x00-\\x20]*" +
"[+-]?(" +
"(((" + Digits + "(\\.)?(" + Digits + "?)(" + Exp + ")?)|" +
"(\\.(" + Digits + ")(" + Exp + ")?))))" +
"[\\x00-\\x20]*");
private OperandResolver()
{
// no instances of this class
}
/**
* Retrieves a single value from a variety of different argument types according to standard
* Excel rules. Does not perform any type conversion.
* @param arg the Evaluated argument as passed to the function or operator.
* @param srcCellRow used when arg is a single column AreaRef
* @param srcCellCol used when arg is a single row AreaRef
* @return a <c>NumberEval</c>, <c>StringEval</c>, <c>BoolEval</c> or <c>BlankEval</c>.
* Never <c>null</c> or <c>ErrorEval</c>.
* @throws EvaluationException(#VALUE!) if srcCellRow or srcCellCol do not properly index into
* an AreaEval. If the actual value retrieved is an ErrorEval, a corresponding
* EvaluationException is thrown.
*/
public static ValueEval GetSingleValue(ValueEval arg, int srcCellRow, int srcCellCol)
{
ValueEval result;
if (arg is RefEval)
{
result = ((RefEval)arg).InnerValueEval;
}
else if (arg is AreaEval)
{
result = ChooseSingleElementFromArea((AreaEval)arg, srcCellRow, srcCellCol);
}
else
{
result = arg;
}
if (result is ErrorEval)
{
throw new EvaluationException((ErrorEval)result);
}
return result;
}
/**
* Implements (some perhaps not well known) Excel functionality to select a single cell from an
* area depending on the coordinates of the calling cell. Here is an example demonstrating
* both selection from a single row area and a single column area in the same formula.
*
* <table border="1" cellpAdding="1" cellspacing="1" summary="sample spReadsheet">
* <tr><th> </th><th> A </th><th> B </th><th> C </th><th> D </th></tr>
* <tr><th>1</th><td>15</td><td>20</td><td>25</td><td> </td></tr>
* <tr><th>2</th><td> </td><td> </td><td> </td><td>200</td></tr>
* <tr><th>3</th><td> </td><td> </td><td> </td><td>300</td></tr>
* <tr><th>3</th><td> </td><td> </td><td> </td><td>400</td></tr>
* </table>
*
* If the formula "=1000+A1:B1+D2:D3" is put into the 9 cells from A2 to C4, the spReadsheet
* will look like this:
*
* <table border="1" cellpAdding="1" cellspacing="1" summary="sample spReadsheet">
* <tr><th> </th><th> A </th><th> B </th><th> C </th><th> D </th></tr>
* <tr><th>1</th><td>15</td><td>20</td><td>25</td><td> </td></tr>
* <tr><th>2</th><td>1215</td><td>1220</td><td>#VALUE!</td><td>200</td></tr>
* <tr><th>3</th><td>1315</td><td>1320</td><td>#VALUE!</td><td>300</td></tr>
* <tr><th>4</th><td>#VALUE!</td><td>#VALUE!</td><td>#VALUE!</td><td>400</td></tr>
* </table>
*
* Note that the row area (A1:B1) does not include column C and the column area (D2:D3) does
* not include row 4, so the values in C1(=25) and D4(=400) are not accessible to the formula
* as written, but in the 4 cells A2:B3, the row and column selection works ok.<p/>
*
* The same concept is extended to references across sheets, such that even multi-row,
* multi-column areas can be useful.<p/>
*
* Of course with carefully (or carelessly) chosen parameters, cyclic references can occur and
* hence this method <b>can</b> throw a 'circular reference' EvaluationException. Note that
* this method does not attempt to detect cycles. Every cell in the specified Area <c>ae</c>
* has already been Evaluated prior to this method call. Any cell (or cell<b>s</b>) part of
* <c>ae</c> that would incur a cyclic reference error if selected by this method, will
* already have the value <c>ErrorEval.CIRCULAR_REF_ERROR</c> upon entry to this method. It
* is assumed logic exists elsewhere to produce this behaviour.
*
* @return whatever the selected cell's Evaluated value Is. Never <c>null</c>. Never
* <c>ErrorEval</c>.
* @if there is a problem with indexing into the area, or if the
* Evaluated cell has an error.
*/
public static ValueEval ChooseSingleElementFromArea(AreaEval ae,
int srcCellRow, int srcCellCol)
{
ValueEval result = ChooseSingleElementFromAreaInternal(ae, srcCellRow, srcCellCol);
if (result is ErrorEval)
{
throw new EvaluationException((ErrorEval)result);
}
return result;
}
/**
* @return possibly <c>ErrorEval</c>, and <c>null</c>
*/
private static ValueEval ChooseSingleElementFromAreaInternal(AreaEval ae,
int srcCellRow, int srcCellCol)
{
//if (false)
//{
// // this is too simplistic
// if (ae.ContainsRow(srcCellRow) && ae.ContainsColumn(srcCellCol))
// {
// throw new EvaluationException(ErrorEval.CIRCULAR_REF_ERROR);
// }
// /*
// Circular references are not dealt with directly here, but it is worth noting some Issues.
// ANY one of the return statements in this method could return a cell that is identical
// to the one immediately being Evaluated. The evaluating cell is identified by srcCellRow,
// srcCellRow AND sheet. The sheet is not available in any nearby calling method, so that's
// one reason why circular references are not easy to detect here. (The sheet of the returned
// cell can be obtained from ae if it is an Area3DEval.)
// Another reason there's little value in attempting to detect circular references here Is
// that only direct circular references could be detected. If the cycle involved two or more
// cells this method could not detect it.
// Logic to detect evaluation cycles of all kinds has been coded in EvaluationCycleDetector
// (and HSSFFormulaEvaluator).
// */
//}
if (ae.IsColumn)
{
if (ae.IsRow)
{
return ae.GetRelativeValue(0, 0);
}
if (!ae.ContainsRow(srcCellRow))
{
throw EvaluationException.InvalidValue();
}
return ae.GetAbsoluteValue(srcCellRow, ae.FirstColumn);
}
if (!ae.IsRow)
{
// multi-column, multi-row area
if (ae.ContainsRow(srcCellRow) && ae.ContainsColumn(srcCellCol))
{
return ae.GetAbsoluteValue(ae.FirstRow, ae.FirstColumn);
}
throw EvaluationException.InvalidValue();
}
if (!ae.ContainsColumn(srcCellCol))
{
throw EvaluationException.InvalidValue();
}
return ae.GetAbsoluteValue(ae.FirstRow, srcCellCol);
}
/**
* Applies some conversion rules if the supplied value is not already an integer.<br/>
* Value is first Coerced to a <c>double</c> ( See <c>CoerceValueTodouble()</c> ).<p/>
*
* Excel typically Converts doubles to integers by truncating toward negative infinity.<br/>
* The equivalent java code Is:<br/>
* <c>return (int)Math.floor(d);</c><br/>
* <b>not</b>:<br/>
* <c>return (int)d; // wrong - rounds toward zero</c>
*
*/
public static int CoerceValueToInt(ValueEval ev)
{
if (ev == BlankEval.instance)
{
return 0;
}
double d = CoerceValueToDouble(ev);
// Note - the standard java type conversion from double to int truncates toward zero.
// but Math.floor() truncates toward negative infinity
return (int)Math.Floor(d);
}
/**
* Applies some conversion rules if the supplied value is not already a number.
* Note - <c>BlankEval</c> is not supported and must be handled by the caller.
* @param ev must be a <c>NumberEval</c>, <c>StringEval</c> or <c>BoolEval</c>
* @return actual, Parsed or interpreted double value (respectively).
* @throws EvaluationException(#VALUE!) only if a StringEval is supplied and cannot be Parsed
* as a double (See <c>Parsedouble()</c> for allowable formats).
* @throws Exception if the supplied parameter is not <c>NumberEval</c>,
* <c>StringEval</c> or <c>BoolEval</c>
*/
public static double CoerceValueToDouble(ValueEval ev)
{
if (ev == BlankEval.instance)
{
return 0.0;
}
if (ev is NumericValueEval)
{
// this also handles bools
return ((NumericValueEval)ev).NumberValue;
}
if (ev is StringEval)
{
double dd = ParseDouble(((StringEval)ev).StringValue);
if (double.IsNaN(dd))
{
throw EvaluationException.InvalidValue();
}
return dd;
}
throw new Exception("Unexpected arg eval type (" + ev.GetType().Name + ")");
}
/**
* Converts a string to a double using standard rules that Excel would use.<br/>
* Tolerates currency prefixes, commas, leading and trailing spaces.<p/>
*
* Some examples:<br/>
* " 123 " -> 123.0<br/>
* ".123" -> 0.123<br/>
* These not supported yet:<br/>
* " $ 1,000.00 " -> 1000.0<br/>
* "$1.25E4" -> 12500.0<br/>
* "5**2" -> 500<br/>
* "250%" -> 2.5<br/>
*
* @param text
* @return <c>null</c> if the specified text cannot be Parsed as a number
*/
public static double ParseDouble(String pText)
{
//if (Regex.Match(fpRegex, pText).Success)
try
{
double ret = double.Parse(pText, CultureInfo.CurrentCulture);
if (double.IsInfinity(ret))
return double.NaN;
return ret;
}
catch (Exception)
{
return Double.NaN;
}
//else
{
//return Double.NaN;
}
//String text = pText.Trim();
//if (text.Length < 1)
//{
// return double.NaN;
//}
//bool isPositive = true;
//if (text[0] == '-')
//{
// isPositive = false;
// text = text.Substring(1).Trim();
//}
//if (text.Length == 0 || !Char.IsDigit(text[0]))
//{
// // avoid using Exception to tell when string is not a number
// return double.NaN;
//}
//// TODO - support notation like '1E3' (==1000)
//double val;
//try
//{
// val = double.Parse(text);
//}
//catch
//{
// return double.NaN;
//}
//return isPositive ? +val : -val;
}
/**
* @param ve must be a <c>NumberEval</c>, <c>StringEval</c>, <c>BoolEval</c>, or <c>BlankEval</c>
* @return the Converted string value. never <c>null</c>
*/
public static String CoerceValueToString(ValueEval ve)
{
if (ve is StringValueEval)
{
StringValueEval sve = (StringValueEval)ve;
return sve.StringValue;
}
if (ve is BlankEval)
{
return "";
}
throw new ArgumentException("Unexpected eval class (" + ve.GetType().Name + ")");
}
/**
* @return <c>null</c> to represent blank values
* @throws EvaluationException if ve is an ErrorEval, or if a string value cannot be converted
*/
public static Boolean? CoerceValueToBoolean(ValueEval ve, bool stringsAreBlanks)
{
if (ve == null || ve == BlankEval.instance)
{
// TODO - remove 've == null' condition once AreaEval is fixed
return null;
}
if (ve is BoolEval)
{
return ((BoolEval)ve).BooleanValue;
}
if (ve is StringEval)
{
if (stringsAreBlanks)
{
return null;
}
String str = ((StringEval)ve).StringValue;
if (str.Equals("true", StringComparison.OrdinalIgnoreCase))
{
return true;
}
if (str.Equals("false", StringComparison.OrdinalIgnoreCase))
{
return false;
}
// else - string cannot be converted to boolean
throw new EvaluationException(ErrorEval.VALUE_INVALID);
}
if (ve is NumericValueEval)
{
NumericValueEval ne = (NumericValueEval)ve;
double d = ne.NumberValue;
if (Double.IsNaN(d))
{
throw new EvaluationException(ErrorEval.VALUE_INVALID);
}
return d != 0;
}
if (ve is ErrorEval)
{
throw new EvaluationException((ErrorEval)ve);
}
throw new InvalidOperationException("Unexpected eval (" + ve.GetType().Name + ")");
}
}
}
| |
// dnlib: See LICENSE.txt for more info
using System;
using System.Collections.Generic;
using System.Text;
using dnlib.Threading;
namespace dnlib.DotNet {
/// <summary>
/// Finds <see cref="TypeDef"/>s
/// </summary>
sealed class TypeDefFinder : ITypeDefFinder, IDisposable {
const SigComparerOptions TypeComparerOptions = SigComparerOptions.DontCompareTypeScope | SigComparerOptions.TypeRefCanReferenceGlobalType;
bool isCacheEnabled;
readonly bool includeNestedTypes;
Dictionary<ITypeDefOrRef, TypeDef> typeRefCache = new Dictionary<ITypeDefOrRef, TypeDef>(new TypeEqualityComparer(TypeComparerOptions));
Dictionary<string, TypeDef> normalNameCache = new Dictionary<string, TypeDef>(StringComparer.Ordinal);
Dictionary<string, TypeDef> reflectionNameCache = new Dictionary<string, TypeDef>(StringComparer.Ordinal);
readonly StringBuilder sb = new StringBuilder();
IEnumerator<TypeDef> typeEnumerator;
readonly IEnumerable<TypeDef> rootTypes;
#if THREAD_SAFE
readonly Lock theLock = Lock.Create();
#endif
/// <summary>
/// <c>true</c> if the <see cref="TypeDef"/> cache is enabled. <c>false</c> if the cache
/// is disabled and a slower <c>O(n)</c> lookup is performed.
/// </summary>
public bool IsCacheEnabled {
get {
#if THREAD_SAFE
theLock.EnterReadLock(); try {
#endif
return IsCacheEnabled_NoLock;
#if THREAD_SAFE
} finally { theLock.ExitReadLock(); }
#endif
}
set {
#if THREAD_SAFE
theLock.EnterWriteLock(); try {
#endif
IsCacheEnabled_NoLock = value;
#if THREAD_SAFE
} finally { theLock.ExitWriteLock(); }
#endif
}
}
bool IsCacheEnabled_NoLock {
get { return isCacheEnabled; }
set {
if (isCacheEnabled == value)
return;
if (typeEnumerator != null) {
typeEnumerator.Dispose();
typeEnumerator = null;
}
typeRefCache.Clear();
normalNameCache.Clear();
reflectionNameCache.Clear();
if (value)
InitializeTypeEnumerator();
isCacheEnabled = value;
}
}
/// <summary>
/// Constructor
/// </summary>
/// <param name="rootTypes">All root types. All their nested types are also included.</param>
/// <exception cref="ArgumentNullException">If <paramref name="rootTypes"/> is <c>null</c></exception>
public TypeDefFinder(IEnumerable<TypeDef> rootTypes)
: this(rootTypes, true) {
}
/// <summary>
/// Constructor
/// </summary>
/// <param name="rootTypes">All root types</param>
/// <param name="includeNestedTypes"><c>true</c> if all nested types that are reachable
/// from <paramref name="rootTypes"/> should also be included.</param>
/// <exception cref="ArgumentNullException">If <paramref name="rootTypes"/> is <c>null</c></exception>
public TypeDefFinder(IEnumerable<TypeDef> rootTypes, bool includeNestedTypes) {
if (rootTypes == null)
throw new ArgumentNullException("rootTypes");
this.rootTypes = rootTypes;
this.includeNestedTypes = includeNestedTypes;
}
void InitializeTypeEnumerator() {
if (typeEnumerator != null) {
typeEnumerator.Dispose();
typeEnumerator = null;
}
typeEnumerator = (includeNestedTypes ? AllTypesHelper.Types(rootTypes) : rootTypes).GetEnumerator();
}
/// <summary>
/// Resets the cache (clears all cached elements). Use this method if the cache is
/// enabled but some of the types have been modified (eg. removed, added, renamed).
/// </summary>
public void ResetCache() {
#if THREAD_SAFE
theLock.EnterWriteLock(); try {
#endif
bool old = IsCacheEnabled_NoLock;
IsCacheEnabled_NoLock = false;
IsCacheEnabled_NoLock = old;
#if THREAD_SAFE
} finally { theLock.ExitWriteLock(); }
#endif
}
/// <inheritdoc/>
public TypeDef Find(string fullName, bool isReflectionName) {
if (fullName == null)
return null;
#if THREAD_SAFE
theLock.EnterWriteLock(); try {
#endif
if (isCacheEnabled)
return isReflectionName ? FindCacheReflection(fullName) : FindCacheNormal(fullName);
return isReflectionName ? FindSlowReflection(fullName) : FindSlowNormal(fullName);
#if THREAD_SAFE
} finally { theLock.ExitWriteLock(); }
#endif
}
/// <inheritdoc/>
public TypeDef Find(TypeRef typeRef) {
if (typeRef == null)
return null;
#if THREAD_SAFE
theLock.EnterWriteLock(); try {
#endif
return isCacheEnabled ? FindCache(typeRef) : FindSlow(typeRef);
#if THREAD_SAFE
} finally { theLock.ExitWriteLock(); }
#endif
}
TypeDef FindCache(TypeRef typeRef) {
TypeDef cachedType;
if (typeRefCache.TryGetValue(typeRef, out cachedType))
return cachedType;
// Build the cache lazily
var comparer = new SigComparer(TypeComparerOptions);
while (true) {
cachedType = GetNextTypeDefCache();
if (cachedType == null || comparer.Equals(cachedType, typeRef))
return cachedType;
}
}
TypeDef FindCacheReflection(string fullName) {
TypeDef cachedType;
if (reflectionNameCache.TryGetValue(fullName, out cachedType))
return cachedType;
// Build the cache lazily
while (true) {
cachedType = GetNextTypeDefCache();
if (cachedType == null)
return cachedType;
sb.Length = 0;
if (FullNameCreator.FullName(cachedType, true, null, sb) == fullName)
return cachedType;
}
}
TypeDef FindCacheNormal(string fullName) {
TypeDef cachedType;
if (normalNameCache.TryGetValue(fullName, out cachedType))
return cachedType;
// Build the cache lazily
while (true) {
cachedType = GetNextTypeDefCache();
if (cachedType == null)
return cachedType;
sb.Length = 0;
if (FullNameCreator.FullName(cachedType, false, null, sb) == fullName)
return cachedType;
}
}
TypeDef FindSlow(TypeRef typeRef) {
InitializeTypeEnumerator();
var comparer = new SigComparer(TypeComparerOptions);
while (true) {
var type = GetNextTypeDef();
if (type == null || comparer.Equals(type, typeRef))
return type;
}
}
TypeDef FindSlowReflection(string fullName) {
InitializeTypeEnumerator();
while (true) {
var type = GetNextTypeDef();
if (type == null)
return type;
sb.Length = 0;
if (FullNameCreator.FullName(type, true, null, sb) == fullName)
return type;
}
}
TypeDef FindSlowNormal(string fullName) {
InitializeTypeEnumerator();
while (true) {
var type = GetNextTypeDef();
if (type == null)
return type;
sb.Length = 0;
if (FullNameCreator.FullName(type, false, null, sb) == fullName)
return type;
}
}
/// <summary>
/// Gets the next <see cref="TypeDef"/> or <c>null</c> if there are no more left
/// </summary>
/// <returns>The next <see cref="TypeDef"/> or <c>null</c> if none</returns>
TypeDef GetNextTypeDef() {
while (typeEnumerator.MoveNext()) {
var type = typeEnumerator.Current;
if (type != null)
return type;
}
return null;
}
/// <summary>
/// Gets the next <see cref="TypeDef"/> or <c>null</c> if there are no more left.
/// The cache is updated with the returned <see cref="TypeDef"/> before the method
/// returns.
/// </summary>
/// <returns>The next <see cref="TypeDef"/> or <c>null</c> if none</returns>
TypeDef GetNextTypeDefCache() {
var type = GetNextTypeDef();
if (type == null)
return null;
// Only insert it if another type with the exact same sig/name isn't already
// in the cache. This should only happen with some obfuscated assemblies.
if (!typeRefCache.ContainsKey(type))
typeRefCache[type] = type;
string fn;
sb.Length = 0;
if (!normalNameCache.ContainsKey(fn = FullNameCreator.FullName(type, false, null, sb)))
normalNameCache[fn] = type;
sb.Length = 0;
if (!reflectionNameCache.ContainsKey(fn = FullNameCreator.FullName(type, true, null, sb)))
reflectionNameCache[fn] = type;
return type;
}
/// <inheritdoc/>
public void Dispose() {
#if THREAD_SAFE
theLock.EnterWriteLock(); try {
#endif
if (typeEnumerator != null)
typeEnumerator.Dispose();
typeEnumerator = null;
typeRefCache = null;
normalNameCache = null;
reflectionNameCache = null;
#if THREAD_SAFE
} finally { theLock.ExitWriteLock(); }
#endif
}
}
}
| |
/* ****************************************************************************
*
* Copyright (c) Microsoft Corporation.
*
* This source code is subject to terms and conditions of the Apache License, Version 2.0. A
* copy of the license can be found in the License.html file at the root of this distribution. If
* you cannot locate the Apache License, Version 2.0, please send an email to
* dlr@microsoft.com. By using this source code in any fashion, you are agreeing to be bound
* by the terms of the Apache License, Version 2.0.
*
* You must not remove this notice, or any other, from this software.
*
*
* ***************************************************************************/
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Dynamic.Utils;
using System.Reflection;
using System.Reflection.Emit;
using System.Runtime.CompilerServices;
#if SILVERLIGHT
using System.Core;
#endif
#if CLR2
namespace Microsoft.Scripting.Ast.Compiler {
#else
namespace System.Linq.Expressions.Compiler {
#endif
#if CLR2 || SILVERLIGHT
using ILGenerator = OffsetTrackingILGenerator;
#endif
internal static class ILGen {
internal static void Emit(this ILGenerator il, OpCode opcode, MethodBase methodBase) {
Debug.Assert(methodBase is MethodInfo || methodBase is ConstructorInfo);
if (methodBase.MemberType == MemberTypes.Constructor) {
il.Emit(opcode, (ConstructorInfo)methodBase);
} else {
il.Emit(opcode, (MethodInfo)methodBase);
}
}
#region Instruction helpers
internal static void EmitLoadArg(this ILGenerator il, int index) {
Debug.Assert(index >= 0);
switch (index) {
case 0:
il.Emit(OpCodes.Ldarg_0);
break;
case 1:
il.Emit(OpCodes.Ldarg_1);
break;
case 2:
il.Emit(OpCodes.Ldarg_2);
break;
case 3:
il.Emit(OpCodes.Ldarg_3);
break;
default:
if (index <= Byte.MaxValue) {
il.Emit(OpCodes.Ldarg_S, (byte)index);
} else {
il.Emit(OpCodes.Ldarg, index);
}
break;
}
}
internal static void EmitLoadArgAddress(this ILGenerator il, int index) {
Debug.Assert(index >= 0);
if (index <= Byte.MaxValue) {
il.Emit(OpCodes.Ldarga_S, (byte)index);
} else {
il.Emit(OpCodes.Ldarga, index);
}
}
internal static void EmitStoreArg(this ILGenerator il, int index) {
Debug.Assert(index >= 0);
if (index <= Byte.MaxValue) {
il.Emit(OpCodes.Starg_S, (byte)index);
} else {
il.Emit(OpCodes.Starg, index);
}
}
/// <summary>
/// Emits a Ldind* instruction for the appropriate type
/// </summary>
internal static void EmitLoadValueIndirect(this ILGenerator il, Type type) {
ContractUtils.RequiresNotNull(type, "type");
if (type.IsValueType) {
if (type == typeof(int)) {
il.Emit(OpCodes.Ldind_I4);
} else if (type == typeof(uint)) {
il.Emit(OpCodes.Ldind_U4);
} else if (type == typeof(short)) {
il.Emit(OpCodes.Ldind_I2);
} else if (type == typeof(ushort)) {
il.Emit(OpCodes.Ldind_U2);
} else if (type == typeof(long) || type == typeof(ulong)) {
il.Emit(OpCodes.Ldind_I8);
} else if (type == typeof(char)) {
il.Emit(OpCodes.Ldind_I2);
} else if (type == typeof(bool)) {
il.Emit(OpCodes.Ldind_I1);
} else if (type == typeof(float)) {
il.Emit(OpCodes.Ldind_R4);
} else if (type == typeof(double)) {
il.Emit(OpCodes.Ldind_R8);
} else {
il.Emit(OpCodes.Ldobj, type);
}
} else {
il.Emit(OpCodes.Ldind_Ref);
}
}
/// <summary>
/// Emits a Stind* instruction for the appropriate type.
/// </summary>
internal static void EmitStoreValueIndirect(this ILGenerator il, Type type) {
ContractUtils.RequiresNotNull(type, "type");
if (type.IsValueType) {
if (type == typeof(int)) {
il.Emit(OpCodes.Stind_I4);
} else if (type == typeof(short)) {
il.Emit(OpCodes.Stind_I2);
} else if (type == typeof(long) || type == typeof(ulong)) {
il.Emit(OpCodes.Stind_I8);
} else if (type == typeof(char)) {
il.Emit(OpCodes.Stind_I2);
} else if (type == typeof(bool)) {
il.Emit(OpCodes.Stind_I1);
} else if (type == typeof(float)) {
il.Emit(OpCodes.Stind_R4);
} else if (type == typeof(double)) {
il.Emit(OpCodes.Stind_R8);
} else {
il.Emit(OpCodes.Stobj, type);
}
} else {
il.Emit(OpCodes.Stind_Ref);
}
}
// Emits the Ldelem* instruction for the appropriate type
internal static void EmitLoadElement(this ILGenerator il, Type type) {
ContractUtils.RequiresNotNull(type, "type");
if (!type.IsValueType) {
il.Emit(OpCodes.Ldelem_Ref);
} else if (type.IsEnum) {
il.Emit(OpCodes.Ldelem, type);
} else {
switch (Type.GetTypeCode(type)) {
case TypeCode.Boolean:
case TypeCode.SByte:
il.Emit(OpCodes.Ldelem_I1);
break;
case TypeCode.Byte:
il.Emit(OpCodes.Ldelem_U1);
break;
case TypeCode.Int16:
il.Emit(OpCodes.Ldelem_I2);
break;
case TypeCode.Char:
case TypeCode.UInt16:
il.Emit(OpCodes.Ldelem_U2);
break;
case TypeCode.Int32:
il.Emit(OpCodes.Ldelem_I4);
break;
case TypeCode.UInt32:
il.Emit(OpCodes.Ldelem_U4);
break;
case TypeCode.Int64:
case TypeCode.UInt64:
il.Emit(OpCodes.Ldelem_I8);
break;
case TypeCode.Single:
il.Emit(OpCodes.Ldelem_R4);
break;
case TypeCode.Double:
il.Emit(OpCodes.Ldelem_R8);
break;
default:
il.Emit(OpCodes.Ldelem, type);
break;
}
}
}
/// <summary>
/// Emits a Stelem* instruction for the appropriate type.
/// </summary>
internal static void EmitStoreElement(this ILGenerator il, Type type) {
ContractUtils.RequiresNotNull(type, "type");
if (type.IsEnum) {
il.Emit(OpCodes.Stelem, type);
return;
}
switch (Type.GetTypeCode(type)) {
case TypeCode.Boolean:
case TypeCode.SByte:
case TypeCode.Byte:
il.Emit(OpCodes.Stelem_I1);
break;
case TypeCode.Char:
case TypeCode.Int16:
case TypeCode.UInt16:
il.Emit(OpCodes.Stelem_I2);
break;
case TypeCode.Int32:
case TypeCode.UInt32:
il.Emit(OpCodes.Stelem_I4);
break;
case TypeCode.Int64:
case TypeCode.UInt64:
il.Emit(OpCodes.Stelem_I8);
break;
case TypeCode.Single:
il.Emit(OpCodes.Stelem_R4);
break;
case TypeCode.Double:
il.Emit(OpCodes.Stelem_R8);
break;
default:
if (type.IsValueType) {
il.Emit(OpCodes.Stelem, type);
} else {
il.Emit(OpCodes.Stelem_Ref);
}
break;
}
}
internal static void EmitType(this ILGenerator il, Type type) {
ContractUtils.RequiresNotNull(type, "type");
il.Emit(OpCodes.Ldtoken, type);
il.Emit(OpCodes.Call, typeof(Type).GetMethod("GetTypeFromHandle"));
}
#endregion
#region Fields, properties and methods
internal static void EmitFieldAddress(this ILGenerator il, FieldInfo fi) {
ContractUtils.RequiresNotNull(fi, "fi");
if (fi.IsStatic) {
il.Emit(OpCodes.Ldsflda, fi);
} else {
il.Emit(OpCodes.Ldflda, fi);
}
}
internal static void EmitFieldGet(this ILGenerator il, FieldInfo fi) {
ContractUtils.RequiresNotNull(fi, "fi");
if (fi.IsStatic) {
il.Emit(OpCodes.Ldsfld, fi);
} else {
il.Emit(OpCodes.Ldfld, fi);
}
}
internal static void EmitFieldSet(this ILGenerator il, FieldInfo fi) {
ContractUtils.RequiresNotNull(fi, "fi");
if (fi.IsStatic) {
il.Emit(OpCodes.Stsfld, fi);
} else {
il.Emit(OpCodes.Stfld, fi);
}
}
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1711:IdentifiersShouldNotHaveIncorrectSuffix")]
internal static void EmitNew(this ILGenerator il, ConstructorInfo ci) {
ContractUtils.RequiresNotNull(ci, "ci");
if (ci.DeclaringType.ContainsGenericParameters) {
throw Error.IllegalNewGenericParams(ci.DeclaringType);
}
il.Emit(OpCodes.Newobj, ci);
}
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1711:IdentifiersShouldNotHaveIncorrectSuffix")]
internal static void EmitNew(this ILGenerator il, Type type, Type[] paramTypes) {
ContractUtils.RequiresNotNull(type, "type");
ContractUtils.RequiresNotNull(paramTypes, "paramTypes");
ConstructorInfo ci = type.GetConstructor(paramTypes);
if (ci == null) throw Error.TypeDoesNotHaveConstructorForTheSignature();
il.EmitNew(ci);
}
#endregion
#region Constants
internal static void EmitNull(this ILGenerator il) {
il.Emit(OpCodes.Ldnull);
}
internal static void EmitString(this ILGenerator il, string value) {
ContractUtils.RequiresNotNull(value, "value");
il.Emit(OpCodes.Ldstr, value);
}
internal static void EmitBoolean(this ILGenerator il, bool value) {
if (value) {
il.Emit(OpCodes.Ldc_I4_1);
} else {
il.Emit(OpCodes.Ldc_I4_0);
}
}
internal static void EmitChar(this ILGenerator il, char value) {
il.EmitInt(value);
il.Emit(OpCodes.Conv_U2);
}
internal static void EmitByte(this ILGenerator il, byte value) {
il.EmitInt(value);
il.Emit(OpCodes.Conv_U1);
}
internal static void EmitSByte(this ILGenerator il, sbyte value) {
il.EmitInt(value);
il.Emit(OpCodes.Conv_I1);
}
internal static void EmitShort(this ILGenerator il, short value) {
il.EmitInt(value);
il.Emit(OpCodes.Conv_I2);
}
internal static void EmitUShort(this ILGenerator il, ushort value) {
il.EmitInt(value);
il.Emit(OpCodes.Conv_U2);
}
internal static void EmitInt(this ILGenerator il, int value) {
OpCode c;
switch (value) {
case -1:
c = OpCodes.Ldc_I4_M1;
break;
case 0:
c = OpCodes.Ldc_I4_0;
break;
case 1:
c = OpCodes.Ldc_I4_1;
break;
case 2:
c = OpCodes.Ldc_I4_2;
break;
case 3:
c = OpCodes.Ldc_I4_3;
break;
case 4:
c = OpCodes.Ldc_I4_4;
break;
case 5:
c = OpCodes.Ldc_I4_5;
break;
case 6:
c = OpCodes.Ldc_I4_6;
break;
case 7:
c = OpCodes.Ldc_I4_7;
break;
case 8:
c = OpCodes.Ldc_I4_8;
break;
default:
if (value >= -128 && value <= 127) {
il.Emit(OpCodes.Ldc_I4_S, (sbyte)value);
} else {
il.Emit(OpCodes.Ldc_I4, value);
}
return;
}
il.Emit(c);
}
internal static void EmitUInt(this ILGenerator il, uint value) {
il.EmitInt((int)value);
il.Emit(OpCodes.Conv_U4);
}
internal static void EmitLong(this ILGenerator il, long value) {
il.Emit(OpCodes.Ldc_I8, value);
//
// Now, emit convert to give the constant type information.
//
// Otherwise, it is treated as unsigned and overflow is not
// detected if it's used in checked ops.
//
il.Emit(OpCodes.Conv_I8);
}
internal static void EmitULong(this ILGenerator il, ulong value) {
il.Emit(OpCodes.Ldc_I8, (long)value);
il.Emit(OpCodes.Conv_U8);
}
internal static void EmitDouble(this ILGenerator il, double value) {
il.Emit(OpCodes.Ldc_R8, value);
}
internal static void EmitSingle(this ILGenerator il, float value) {
il.Emit(OpCodes.Ldc_R4, value);
}
// matches TryEmitConstant
internal static bool CanEmitConstant(object value, Type type) {
if (value == null || CanEmitILConstant(type)) {
return true;
}
Type t = value as Type;
if (t != null && ShouldLdtoken(t)) {
return true;
}
MethodBase mb = value as MethodBase;
if (mb != null && ShouldLdtoken(mb)) {
return true;
}
return false;
}
// matches TryEmitILConstant
private static bool CanEmitILConstant(Type type) {
switch (Type.GetTypeCode(type)) {
case TypeCode.Boolean:
case TypeCode.SByte:
case TypeCode.Int16:
case TypeCode.Int32:
case TypeCode.Int64:
case TypeCode.Single:
case TypeCode.Double:
case TypeCode.Char:
case TypeCode.Byte:
case TypeCode.UInt16:
case TypeCode.UInt32:
case TypeCode.UInt64:
case TypeCode.Decimal:
case TypeCode.String:
return true;
}
return false;
}
internal static void EmitConstant(this ILGenerator il, object value) {
Debug.Assert(value != null);
EmitConstant(il, value, value.GetType());
}
//
// Note: we support emitting more things as IL constants than
// Linq does
internal static void EmitConstant(this ILGenerator il, object value, Type type) {
if (value == null) {
// Smarter than the Linq implementation which uses the initobj
// pattern for all value types (works, but requires a local and
// more IL)
il.EmitDefault(type);
return;
}
// Handle the easy cases
if (il.TryEmitILConstant(value, type)) {
return;
}
// Check for a few more types that we support emitting as constants
Type t = value as Type;
if (t != null && ShouldLdtoken(t)) {
il.EmitType(t);
if (type != typeof(Type)) {
il.Emit(OpCodes.Castclass, type);
}
return;
}
MethodBase mb = value as MethodBase;
if (mb != null && ShouldLdtoken(mb)) {
il.Emit(OpCodes.Ldtoken, mb);
Type dt = mb.DeclaringType;
if (dt != null && dt.IsGenericType) {
il.Emit(OpCodes.Ldtoken, dt);
il.Emit(OpCodes.Call, typeof(MethodBase).GetMethod("GetMethodFromHandle", new Type[] { typeof(RuntimeMethodHandle), typeof(RuntimeTypeHandle) }));
} else {
il.Emit(OpCodes.Call, typeof(MethodBase).GetMethod("GetMethodFromHandle", new Type[] { typeof(RuntimeMethodHandle) }));
}
if (type != typeof(MethodBase)) {
il.Emit(OpCodes.Castclass, type);
}
return;
}
throw ContractUtils.Unreachable;
}
internal static bool ShouldLdtoken(Type t) {
return t is TypeBuilder || t.IsGenericParameter || t.IsVisible;
}
internal static bool ShouldLdtoken(MethodBase mb) {
// Can't ldtoken on a DynamicMethod
if (mb is DynamicMethod) {
return false;
}
Type dt = mb.DeclaringType;
return dt == null || ShouldLdtoken(dt);
}
private static bool TryEmitILConstant(this ILGenerator il, object value, Type type) {
switch (Type.GetTypeCode(type)) {
case TypeCode.Boolean:
il.EmitBoolean((bool)value);
return true;
case TypeCode.SByte:
il.EmitSByte((sbyte)value);
return true;
case TypeCode.Int16:
il.EmitShort((short)value);
return true;
case TypeCode.Int32:
il.EmitInt((int)value);
return true;
case TypeCode.Int64:
il.EmitLong((long)value);
return true;
case TypeCode.Single:
il.EmitSingle((float)value);
return true;
case TypeCode.Double:
il.EmitDouble((double)value);
return true;
case TypeCode.Char:
il.EmitChar((char)value);
return true;
case TypeCode.Byte:
il.EmitByte((byte)value);
return true;
case TypeCode.UInt16:
il.EmitUShort((ushort)value);
return true;
case TypeCode.UInt32:
il.EmitUInt((uint)value);
return true;
case TypeCode.UInt64:
il.EmitULong((ulong)value);
return true;
case TypeCode.Decimal:
il.EmitDecimal((decimal)value);
return true;
case TypeCode.String:
il.EmitString((string)value);
return true;
default:
return false;
}
}
#endregion
#region Linq Conversions
internal static void EmitConvertToType(this ILGenerator il, Type typeFrom, Type typeTo, bool isChecked) {
if (TypeUtils.AreEquivalent(typeFrom, typeTo)) {
return;
}
if (typeFrom == typeof(void) || typeTo == typeof(void)) {
throw ContractUtils.Unreachable;
}
bool isTypeFromNullable = TypeUtils.IsNullableType(typeFrom);
bool isTypeToNullable = TypeUtils.IsNullableType(typeTo);
Type nnExprType = TypeUtils.GetNonNullableType(typeFrom);
Type nnType = TypeUtils.GetNonNullableType(typeTo);
if (typeFrom.IsInterface || // interface cast
typeTo.IsInterface ||
typeFrom == typeof(object) || // boxing cast
typeTo == typeof(object) ||
typeFrom == typeof(System.Enum) ||
typeFrom == typeof(System.ValueType) ||
TypeUtils.IsLegalExplicitVariantDelegateConversion(typeFrom, typeTo))
{
il.EmitCastToType(typeFrom, typeTo);
} else if (isTypeFromNullable || isTypeToNullable) {
il.EmitNullableConversion(typeFrom, typeTo, isChecked);
} else if (!(TypeUtils.IsConvertible(typeFrom) && TypeUtils.IsConvertible(typeTo)) // primitive runtime conversion
&&
(nnExprType.IsAssignableFrom(nnType) || // down cast
nnType.IsAssignableFrom(nnExprType))) // up cast
{
il.EmitCastToType(typeFrom, typeTo);
} else if (typeFrom.IsArray && typeTo.IsArray) {
// See DevDiv Bugs #94657.
il.EmitCastToType(typeFrom, typeTo);
} else {
il.EmitNumericConversion(typeFrom, typeTo, isChecked);
}
}
private static void EmitCastToType(this ILGenerator il, Type typeFrom, Type typeTo) {
if (!typeFrom.IsValueType && typeTo.IsValueType) {
il.Emit(OpCodes.Unbox_Any, typeTo);
} else if (typeFrom.IsValueType && !typeTo.IsValueType) {
il.Emit(OpCodes.Box, typeFrom);
if (typeTo != typeof(object)) {
il.Emit(OpCodes.Castclass, typeTo);
}
} else if (!typeFrom.IsValueType && !typeTo.IsValueType) {
il.Emit(OpCodes.Castclass, typeTo);
} else {
throw Error.InvalidCast(typeFrom, typeTo);
}
}
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity")]
private static void EmitNumericConversion(this ILGenerator il, Type typeFrom, Type typeTo, bool isChecked) {
bool isFromUnsigned = TypeUtils.IsUnsigned(typeFrom);
bool isFromFloatingPoint = TypeUtils.IsFloatingPoint(typeFrom);
if (typeTo == typeof(Single)) {
if (isFromUnsigned)
il.Emit(OpCodes.Conv_R_Un);
il.Emit(OpCodes.Conv_R4);
} else if (typeTo == typeof(Double)) {
if (isFromUnsigned)
il.Emit(OpCodes.Conv_R_Un);
il.Emit(OpCodes.Conv_R8);
} else {
TypeCode tc = Type.GetTypeCode(typeTo);
if (isChecked) {
// Overflow checking needs to know if the source value on the IL stack is unsigned or not.
if (isFromUnsigned) {
switch (tc) {
case TypeCode.SByte:
il.Emit(OpCodes.Conv_Ovf_I1_Un);
break;
case TypeCode.Int16:
il.Emit(OpCodes.Conv_Ovf_I2_Un);
break;
case TypeCode.Int32:
il.Emit(OpCodes.Conv_Ovf_I4_Un);
break;
case TypeCode.Int64:
il.Emit(OpCodes.Conv_Ovf_I8_Un);
break;
case TypeCode.Byte:
il.Emit(OpCodes.Conv_Ovf_U1_Un);
break;
case TypeCode.UInt16:
case TypeCode.Char:
il.Emit(OpCodes.Conv_Ovf_U2_Un);
break;
case TypeCode.UInt32:
il.Emit(OpCodes.Conv_Ovf_U4_Un);
break;
case TypeCode.UInt64:
il.Emit(OpCodes.Conv_Ovf_U8_Un);
break;
default:
throw Error.UnhandledConvert(typeTo);
}
} else {
switch (tc) {
case TypeCode.SByte:
il.Emit(OpCodes.Conv_Ovf_I1);
break;
case TypeCode.Int16:
il.Emit(OpCodes.Conv_Ovf_I2);
break;
case TypeCode.Int32:
il.Emit(OpCodes.Conv_Ovf_I4);
break;
case TypeCode.Int64:
il.Emit(OpCodes.Conv_Ovf_I8);
break;
case TypeCode.Byte:
il.Emit(OpCodes.Conv_Ovf_U1);
break;
case TypeCode.UInt16:
case TypeCode.Char:
il.Emit(OpCodes.Conv_Ovf_U2);
break;
case TypeCode.UInt32:
il.Emit(OpCodes.Conv_Ovf_U4);
break;
case TypeCode.UInt64:
il.Emit(OpCodes.Conv_Ovf_U8);
break;
default:
throw Error.UnhandledConvert(typeTo);
}
}
} else {
switch (tc) {
case TypeCode.SByte:
il.Emit(OpCodes.Conv_I1);
break;
case TypeCode.Byte:
il.Emit(OpCodes.Conv_U1);
break;
case TypeCode.Int16:
il.Emit(OpCodes.Conv_I2);
break;
case TypeCode.UInt16:
case TypeCode.Char:
il.Emit(OpCodes.Conv_U2);
break;
case TypeCode.Int32:
il.Emit(OpCodes.Conv_I4);
break;
case TypeCode.UInt32:
il.Emit(OpCodes.Conv_U4);
break;
case TypeCode.Int64:
if (isFromUnsigned) {
il.Emit(OpCodes.Conv_U8);
} else {
il.Emit(OpCodes.Conv_I8);
}
break;
case TypeCode.UInt64:
if (isFromUnsigned || isFromFloatingPoint) {
il.Emit(OpCodes.Conv_U8);
} else {
il.Emit(OpCodes.Conv_I8);
}
break;
default:
throw Error.UnhandledConvert(typeTo);
}
}
}
}
private static void EmitNullableToNullableConversion(this ILGenerator il, Type typeFrom, Type typeTo, bool isChecked) {
Debug.Assert(TypeUtils.IsNullableType(typeFrom));
Debug.Assert(TypeUtils.IsNullableType(typeTo));
Label labIfNull = default(Label);
Label labEnd = default(Label);
LocalBuilder locFrom = null;
LocalBuilder locTo = null;
locFrom = il.DeclareLocal(typeFrom);
il.Emit(OpCodes.Stloc, locFrom);
locTo = il.DeclareLocal(typeTo);
// test for null
il.Emit(OpCodes.Ldloca, locFrom);
il.EmitHasValue(typeFrom);
labIfNull = il.DefineLabel();
il.Emit(OpCodes.Brfalse_S, labIfNull);
il.Emit(OpCodes.Ldloca, locFrom);
il.EmitGetValueOrDefault(typeFrom);
Type nnTypeFrom = TypeUtils.GetNonNullableType(typeFrom);
Type nnTypeTo = TypeUtils.GetNonNullableType(typeTo);
il.EmitConvertToType(nnTypeFrom, nnTypeTo, isChecked);
// construct result type
ConstructorInfo ci = typeTo.GetConstructor(new Type[] { nnTypeTo });
il.Emit(OpCodes.Newobj, ci);
il.Emit(OpCodes.Stloc, locTo);
labEnd = il.DefineLabel();
il.Emit(OpCodes.Br_S, labEnd);
// if null then create a default one
il.MarkLabel(labIfNull);
il.Emit(OpCodes.Ldloca, locTo);
il.Emit(OpCodes.Initobj, typeTo);
il.MarkLabel(labEnd);
il.Emit(OpCodes.Ldloc, locTo);
}
private static void EmitNonNullableToNullableConversion(this ILGenerator il, Type typeFrom, Type typeTo, bool isChecked) {
Debug.Assert(!TypeUtils.IsNullableType(typeFrom));
Debug.Assert(TypeUtils.IsNullableType(typeTo));
LocalBuilder locTo = null;
locTo = il.DeclareLocal(typeTo);
Type nnTypeTo = TypeUtils.GetNonNullableType(typeTo);
il.EmitConvertToType(typeFrom, nnTypeTo, isChecked);
ConstructorInfo ci = typeTo.GetConstructor(new Type[] { nnTypeTo });
il.Emit(OpCodes.Newobj, ci);
il.Emit(OpCodes.Stloc, locTo);
il.Emit(OpCodes.Ldloc, locTo);
}
private static void EmitNullableToNonNullableConversion(this ILGenerator il, Type typeFrom, Type typeTo, bool isChecked) {
Debug.Assert(TypeUtils.IsNullableType(typeFrom));
Debug.Assert(!TypeUtils.IsNullableType(typeTo));
if (typeTo.IsValueType)
il.EmitNullableToNonNullableStructConversion(typeFrom, typeTo, isChecked);
else
il.EmitNullableToReferenceConversion(typeFrom);
}
private static void EmitNullableToNonNullableStructConversion(this ILGenerator il, Type typeFrom, Type typeTo, bool isChecked) {
Debug.Assert(TypeUtils.IsNullableType(typeFrom));
Debug.Assert(!TypeUtils.IsNullableType(typeTo));
Debug.Assert(typeTo.IsValueType);
LocalBuilder locFrom = null;
locFrom = il.DeclareLocal(typeFrom);
il.Emit(OpCodes.Stloc, locFrom);
il.Emit(OpCodes.Ldloca, locFrom);
il.EmitGetValue(typeFrom);
Type nnTypeFrom = TypeUtils.GetNonNullableType(typeFrom);
il.EmitConvertToType(nnTypeFrom, typeTo, isChecked);
}
private static void EmitNullableToReferenceConversion(this ILGenerator il, Type typeFrom) {
Debug.Assert(TypeUtils.IsNullableType(typeFrom));
// We've got a conversion from nullable to Object, ValueType, Enum, etc. Just box it so that
// we get the nullable semantics.
il.Emit(OpCodes.Box, typeFrom);
}
private static void EmitNullableConversion(this ILGenerator il, Type typeFrom, Type typeTo, bool isChecked) {
bool isTypeFromNullable = TypeUtils.IsNullableType(typeFrom);
bool isTypeToNullable = TypeUtils.IsNullableType(typeTo);
Debug.Assert(isTypeFromNullable || isTypeToNullable);
if (isTypeFromNullable && isTypeToNullable)
il.EmitNullableToNullableConversion(typeFrom, typeTo, isChecked);
else if (isTypeFromNullable)
il.EmitNullableToNonNullableConversion(typeFrom, typeTo, isChecked);
else
il.EmitNonNullableToNullableConversion(typeFrom, typeTo, isChecked);
}
internal static void EmitHasValue(this ILGenerator il, Type nullableType) {
MethodInfo mi = nullableType.GetMethod("get_HasValue", BindingFlags.Instance | BindingFlags.Public);
Debug.Assert(nullableType.IsValueType);
il.Emit(OpCodes.Call, mi);
}
internal static void EmitGetValue(this ILGenerator il, Type nullableType) {
MethodInfo mi = nullableType.GetMethod("get_Value", BindingFlags.Instance | BindingFlags.Public);
Debug.Assert(nullableType.IsValueType);
il.Emit(OpCodes.Call, mi);
}
internal static void EmitGetValueOrDefault(this ILGenerator il, Type nullableType) {
MethodInfo mi = nullableType.GetMethod("GetValueOrDefault", System.Type.EmptyTypes);
Debug.Assert(nullableType.IsValueType);
il.Emit(OpCodes.Call, mi);
}
#endregion
#region Arrays
/// <summary>
/// Emits an array of constant values provided in the given list.
/// The array is strongly typed.
/// </summary>
internal static void EmitArray<T>(this ILGenerator il, IList<T> items) {
ContractUtils.RequiresNotNull(items, "items");
il.EmitInt(items.Count);
il.Emit(OpCodes.Newarr, typeof(T));
for (int i = 0; i < items.Count; i++) {
il.Emit(OpCodes.Dup);
il.EmitInt(i);
il.EmitConstant(items[i], typeof(T));
il.EmitStoreElement(typeof(T));
}
}
/// <summary>
/// Emits an array of values of count size. The items are emitted via the callback
/// which is provided with the current item index to emit.
/// </summary>
internal static void EmitArray(this ILGenerator il, Type elementType, int count, Action<int> emit) {
ContractUtils.RequiresNotNull(elementType, "elementType");
ContractUtils.RequiresNotNull(emit, "emit");
if (count < 0) throw Error.CountCannotBeNegative();
il.EmitInt(count);
il.Emit(OpCodes.Newarr, elementType);
for (int i = 0; i < count; i++) {
il.Emit(OpCodes.Dup);
il.EmitInt(i);
emit(i);
il.EmitStoreElement(elementType);
}
}
/// <summary>
/// Emits an array construction code.
/// The code assumes that bounds for all dimensions
/// are already emitted.
/// </summary>
internal static void EmitArray(this ILGenerator il, Type arrayType) {
ContractUtils.RequiresNotNull(arrayType, "arrayType");
if (!arrayType.IsArray) throw Error.ArrayTypeMustBeArray();
int rank = arrayType.GetArrayRank();
if (rank == 1) {
il.Emit(OpCodes.Newarr, arrayType.GetElementType());
} else {
Type[] types = new Type[rank];
for (int i = 0; i < rank; i++) {
types[i] = typeof(int);
}
il.EmitNew(arrayType, types);
}
}
#endregion
#region Support for emitting constants
internal static void EmitDecimal(this ILGenerator il, decimal value) {
if (Decimal.Truncate(value) == value) {
if (Int32.MinValue <= value && value <= Int32.MaxValue) {
int intValue = Decimal.ToInt32(value);
il.EmitInt(intValue);
il.EmitNew(typeof(Decimal).GetConstructor(new Type[] { typeof(int) }));
} else if (Int64.MinValue <= value && value <= Int64.MaxValue) {
long longValue = Decimal.ToInt64(value);
il.EmitLong(longValue);
il.EmitNew(typeof(Decimal).GetConstructor(new Type[] { typeof(long) }));
} else {
il.EmitDecimalBits(value);
}
} else {
il.EmitDecimalBits(value);
}
}
private static void EmitDecimalBits(this ILGenerator il, decimal value) {
int[] bits = Decimal.GetBits(value);
il.EmitInt(bits[0]);
il.EmitInt(bits[1]);
il.EmitInt(bits[2]);
il.EmitBoolean((bits[3] & 0x80000000) != 0);
il.EmitByte((byte)(bits[3] >> 16));
il.EmitNew(typeof(decimal).GetConstructor(new Type[] { typeof(int), typeof(int), typeof(int), typeof(bool), typeof(byte) }));
}
/// <summary>
/// Emits default(T)
/// Semantics match C# compiler behavior
/// </summary>
internal static void EmitDefault(this ILGenerator il, Type type) {
switch (Type.GetTypeCode(type)) {
case TypeCode.Object:
case TypeCode.DateTime:
if (type.IsValueType) {
// Type.GetTypeCode on an enum returns the underlying
// integer TypeCode, so we won't get here.
Debug.Assert(!type.IsEnum);
// This is the IL for default(T) if T is a generic type
// parameter, so it should work for any type. It's also
// the standard pattern for structs.
LocalBuilder lb = il.DeclareLocal(type);
il.Emit(OpCodes.Ldloca, lb);
il.Emit(OpCodes.Initobj, type);
il.Emit(OpCodes.Ldloc, lb);
} else {
il.Emit(OpCodes.Ldnull);
}
break;
case TypeCode.Empty:
case TypeCode.String:
case TypeCode.DBNull:
il.Emit(OpCodes.Ldnull);
break;
case TypeCode.Boolean:
case TypeCode.Char:
case TypeCode.SByte:
case TypeCode.Byte:
case TypeCode.Int16:
case TypeCode.UInt16:
case TypeCode.Int32:
case TypeCode.UInt32:
il.Emit(OpCodes.Ldc_I4_0);
break;
case TypeCode.Int64:
case TypeCode.UInt64:
il.Emit(OpCodes.Ldc_I4_0);
il.Emit(OpCodes.Conv_I8);
break;
case TypeCode.Single:
il.Emit(OpCodes.Ldc_R4, default(Single));
break;
case TypeCode.Double:
il.Emit(OpCodes.Ldc_R8, default(Double));
break;
case TypeCode.Decimal:
il.Emit(OpCodes.Ldc_I4_0);
il.Emit(OpCodes.Newobj, typeof(Decimal).GetConstructor(new Type[] { typeof(int) }));
break;
default:
throw ContractUtils.Unreachable;
}
}
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Xml;
using System.Collections;
using System.Diagnostics;
using System.Runtime.Serialization;
using System.Collections.Generic;
using System.Collections.ObjectModel;
namespace System.Xml
{
public class XmlBinaryWriterSession
{
private PriorityDictionary<string, int> _strings;
private PriorityDictionary<IXmlDictionary, IntArray> _maps;
private int _nextKey;
public XmlBinaryWriterSession()
{
_nextKey = 0;
_maps = new PriorityDictionary<IXmlDictionary, IntArray>();
_strings = new PriorityDictionary<string, int>();
}
public virtual bool TryAdd(XmlDictionaryString value, out int key)
{
IntArray keys;
if (value == null)
throw System.Runtime.Serialization.DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("value");
if (_maps.TryGetValue(value.Dictionary, out keys))
{
key = (keys[value.Key] - 1);
if (key != -1)
{
// If the key is already set, then something is wrong
throw System.Runtime.Serialization.DiagnosticUtility.ExceptionUtility.ThrowHelperError(new InvalidOperationException(SR.Format(SR.XmlKeyAlreadyExists)));
}
key = Add(value.Value);
keys[value.Key] = (key + 1);
return true;
}
key = Add(value.Value);
keys = AddKeys(value.Dictionary, value.Key + 1);
keys[value.Key] = (key + 1);
return true;
}
private int Add(string s)
{
int key = _nextKey++;
_strings.Add(s, key);
return key;
}
private IntArray AddKeys(IXmlDictionary dictionary, int minCount)
{
IntArray keys = new IntArray(Math.Max(minCount, 16));
_maps.Add(dictionary, keys);
return keys;
}
public void Reset()
{
_nextKey = 0;
_maps.Clear();
_strings.Clear();
}
internal bool TryLookup(XmlDictionaryString s, out int key)
{
IntArray keys;
if (_maps.TryGetValue(s.Dictionary, out keys))
{
key = (keys[s.Key] - 1);
if (key != -1)
{
return true;
}
}
if (_strings.TryGetValue(s.Value, out key))
{
if (keys == null)
{
keys = AddKeys(s.Dictionary, s.Key + 1);
}
keys[s.Key] = (key + 1);
return true;
}
key = -1;
return false;
}
private class PriorityDictionary<K, V> where K : class
{
private Dictionary<K, V> _dictionary;
private Entry[] _list;
private int _listCount;
private int _now;
public PriorityDictionary()
{
_list = new Entry[16];
}
public void Clear()
{
_now = 0;
_listCount = 0;
Array.Clear(_list, 0, _list.Length);
if (_dictionary != null)
_dictionary.Clear();
}
public bool TryGetValue(K key, out V value)
{
for (int i = 0; i < _listCount; i++)
{
if (_list[i].Key == key)
{
value = _list[i].Value;
_list[i].Time = Now;
return true;
}
}
for (int i = 0; i < _listCount; i++)
{
if (_list[i].Key.Equals(key))
{
value = _list[i].Value;
_list[i].Time = Now;
return true;
}
}
if (_dictionary == null)
{
value = default(V);
return false;
}
if (!_dictionary.TryGetValue(key, out value))
{
return false;
}
int minIndex = 0;
int minTime = _list[0].Time;
for (int i = 1; i < _listCount; i++)
{
if (_list[i].Time < minTime)
{
minIndex = i;
minTime = _list[i].Time;
}
}
_list[minIndex].Key = key;
_list[minIndex].Value = value;
_list[minIndex].Time = Now;
return true;
}
public void Add(K key, V value)
{
if (_listCount < _list.Length)
{
_list[_listCount].Key = key;
_list[_listCount].Value = value;
_listCount++;
}
else
{
if (_dictionary == null)
{
_dictionary = new Dictionary<K, V>();
for (int i = 0; i < _listCount; i++)
{
_dictionary.Add(_list[i].Key, _list[i].Value);
}
}
_dictionary.Add(key, value);
}
}
private int Now
{
get
{
if (++_now == int.MaxValue)
{
DecreaseAll();
}
return _now;
}
}
private void DecreaseAll()
{
for (int i = 0; i < _listCount; i++)
{
_list[i].Time /= 2;
}
_now /= 2;
}
private struct Entry
{
public K Key;
public V Value;
public int Time;
}
}
private class IntArray
{
private int[] _array;
public IntArray(int size)
{
_array = new int[size];
}
public int this[int index]
{
get
{
if (index >= _array.Length)
return 0;
return _array[index];
}
set
{
if (index >= _array.Length)
{
int[] newArray = new int[Math.Max(index + 1, _array.Length * 2)];
Array.Copy(_array, 0, newArray, 0, _array.Length);
_array = newArray;
}
_array[index] = value;
}
}
}
}
}
| |
//
// X509CRL.cs: Handles X.509 certificates revocation lists.
//
// Author:
// Sebastien Pouliot <sebastien@xamarin.com>
//
// Copyright (C) 2004,2006 Novell Inc. (http://www.novell.com)
// Copyright 2013 Xamarin Inc. (http://www.xamarin.com)
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System;
using System.Collections;
using System.Globalization;
using System.IO;
using System.Security.Cryptography;
using Mono.Security.Cryptography;
using Mono.Security.X509.Extensions;
namespace Mono.Security.X509 {
/*
* CertificateList ::= SEQUENCE {
* tbsCertList TBSCertList,
* signatureAlgorithm AlgorithmIdentifier,
* signature BIT STRING
* }
*
* TBSCertList ::= SEQUENCE {
* version Version OPTIONAL,
* -- if present, MUST be v2
* signature AlgorithmIdentifier,
* issuer Name,
* thisUpdate Time,
* nextUpdate Time OPTIONAL,
* revokedCertificates SEQUENCE OF SEQUENCE {
* userCertificate CertificateSerialNumber,
* revocationDate Time,
* crlEntryExtensions Extensions OPTIONAL
* -- if present, MUST be v2
* } OPTIONAL,
* crlExtensions [0] Extensions OPTIONAL }
* -- if present, MUST be v2
*/
#if !INSIDE_CORLIB
public
#endif
class X509Crl {
public class X509CrlEntry {
private byte[] sn;
private DateTime revocationDate;
private X509ExtensionCollection extensions;
internal X509CrlEntry (byte[] serialNumber, DateTime revocationDate, X509ExtensionCollection extensions)
{
sn = serialNumber;
this.revocationDate = revocationDate;
if (extensions == null)
this.extensions = new X509ExtensionCollection ();
else
this.extensions = extensions;
}
internal X509CrlEntry (ASN1 entry)
{
sn = entry [0].Value;
Array.Reverse (sn);
revocationDate = ASN1Convert.ToDateTime (entry [1]);
extensions = new X509ExtensionCollection (entry [2]);
}
public byte[] SerialNumber {
get { return (byte[]) sn.Clone (); }
}
public DateTime RevocationDate {
get { return revocationDate; }
}
public X509ExtensionCollection Extensions {
get { return extensions; }
}
public byte[] GetBytes ()
{
ASN1 sequence = new ASN1 (0x30);
sequence.Add (new ASN1 (0x02, sn));
sequence.Add (ASN1Convert.FromDateTime (revocationDate));
if (extensions.Count > 0)
sequence.Add (new ASN1 (extensions.GetBytes ()));
return sequence.GetBytes ();
}
}
private string issuer;
private byte version;
private DateTime thisUpdate;
private DateTime nextUpdate;
private ArrayList entries;
private string signatureOID;
private byte[] signature;
private X509ExtensionCollection extensions;
private byte[] encoded;
private byte[] hash_value;
public X509Crl (byte[] crl)
{
if (crl == null)
throw new ArgumentNullException ("crl");
encoded = (byte[]) crl.Clone ();
Parse (encoded);
}
private void Parse (byte[] crl)
{
string e = "Input data cannot be coded as a valid CRL.";
try {
// CertificateList ::= SEQUENCE {
ASN1 encodedCRL = new ASN1 (encoded);
if ((encodedCRL.Tag != 0x30) || (encodedCRL.Count != 3))
throw new CryptographicException (e);
// CertificateList / TBSCertList,
ASN1 toBeSigned = encodedCRL [0];
if ((toBeSigned.Tag != 0x30) || (toBeSigned.Count < 3))
throw new CryptographicException (e);
int n = 0;
// CertificateList / TBSCertList / Version OPTIONAL, -- if present, MUST be v2
if (toBeSigned [n].Tag == 0x02) {
version = (byte) (toBeSigned [n++].Value [0] + 1);
}
else
version = 1; // DEFAULT
// CertificateList / TBSCertList / AlgorithmIdentifier,
signatureOID = ASN1Convert.ToOid (toBeSigned [n++][0]);
// CertificateList / TBSCertList / Name,
issuer = X501.ToString (toBeSigned [n++]);
// CertificateList / TBSCertList / Time,
thisUpdate = ASN1Convert.ToDateTime (toBeSigned [n++]);
// CertificateList / TBSCertList / Time OPTIONAL,
ASN1 next = toBeSigned [n++];
if ((next.Tag == 0x17) || (next.Tag == 0x18)) {
nextUpdate = ASN1Convert.ToDateTime (next);
next = toBeSigned [n++];
}
// CertificateList / TBSCertList / revokedCertificates SEQUENCE OF SEQUENCE {
entries = new ArrayList ();
// this is OPTIONAL so it may not be present if no entries exists
if ((next != null) && (next.Tag == 0x30)) {
ASN1 revokedCertificates = next;
for (int i=0; i < revokedCertificates.Count; i++) {
entries.Add (new X509CrlEntry (revokedCertificates [i]));
}
} else {
n--;
}
// CertificateList / TBSCertList / crlExtensions [0] Extensions OPTIONAL }
ASN1 extns = toBeSigned [n];
if ((extns != null) && (extns.Tag == 0xA0) && (extns.Count == 1))
extensions = new X509ExtensionCollection (extns [0]);
else
extensions = new X509ExtensionCollection (null); // result in a read only object
// CertificateList / AlgorithmIdentifier
string signatureAlgorithm = ASN1Convert.ToOid (encodedCRL [1][0]);
if (signatureOID != signatureAlgorithm)
throw new CryptographicException (e + " [Non-matching signature algorithms in CRL]");
// CertificateList / BIT STRING
byte[] bitstring = encodedCRL [2].Value;
// first byte contains unused bits in first byte
signature = new byte [bitstring.Length - 1];
Buffer.BlockCopy (bitstring, 1, signature, 0, signature.Length);
}
catch {
throw new CryptographicException (e);
}
}
public ArrayList Entries {
get { return ArrayList.ReadOnly (entries); }
}
public X509CrlEntry this [int index] {
get { return (X509CrlEntry) entries [index]; }
}
public X509CrlEntry this [byte[] serialNumber] {
get { return GetCrlEntry (serialNumber); }
}
public X509ExtensionCollection Extensions {
get { return extensions; }
}
public byte[] Hash {
get {
if (hash_value == null) {
ASN1 encodedCRL = new ASN1 (encoded);
byte[] toBeSigned = encodedCRL [0].GetBytes ();
using (var ha = PKCS1.CreateFromOid (signatureOID))
hash_value = ha.ComputeHash (toBeSigned);
}
return hash_value;
}
}
public string IssuerName {
get { return issuer; }
}
public DateTime NextUpdate {
get { return nextUpdate; }
}
public DateTime ThisUpdate {
get { return thisUpdate; }
}
public string SignatureAlgorithm {
get { return signatureOID; }
}
public byte[] Signature {
get {
if (signature == null)
return null;
return (byte[]) signature.Clone ();
}
}
public byte[] RawData {
get { return (byte[]) encoded.Clone (); }
}
public byte Version {
get { return version; }
}
public bool IsCurrent {
get { return WasCurrent (DateTime.Now); }
}
public bool WasCurrent (DateTime instant)
{
if (nextUpdate == DateTime.MinValue)
return (instant >= thisUpdate);
else
return ((instant >= thisUpdate) && (instant <= nextUpdate));
}
public byte[] GetBytes ()
{
return (byte[]) encoded.Clone ();
}
private bool Compare (byte[] array1, byte[] array2)
{
if ((array1 == null) && (array2 == null))
return true;
if ((array1 == null) || (array2 == null))
return false;
if (array1.Length != array2.Length)
return false;
for (int i=0; i < array1.Length; i++) {
if (array1 [i] != array2 [i])
return false;
}
return true;
}
public X509CrlEntry GetCrlEntry (X509Certificate x509)
{
if (x509 == null)
throw new ArgumentNullException ("x509");
return GetCrlEntry (x509.SerialNumber);
}
public X509CrlEntry GetCrlEntry (byte[] serialNumber)
{
if (serialNumber == null)
throw new ArgumentNullException ("serialNumber");
for (int i=0; i < entries.Count; i++) {
X509CrlEntry entry = (X509CrlEntry) entries [i];
if (Compare (serialNumber, entry.SerialNumber))
return entry;
}
return null;
}
public bool VerifySignature (X509Certificate x509)
{
if (x509 == null)
throw new ArgumentNullException ("x509");
// 1. x509 certificate must be a CA certificate (unknown for v1 or v2 certs)
if (x509.Version >= 3) {
BasicConstraintsExtension basicConstraints = null;
// 1.2. Check for ca = true in BasicConstraint
X509Extension ext = x509.Extensions ["2.5.29.19"];
if (ext != null) {
basicConstraints = new BasicConstraintsExtension (ext);
if (!basicConstraints.CertificateAuthority)
return false;
}
// 1.1. Check for "cRLSign" bit in KeyUsage extension
ext = x509.Extensions ["2.5.29.15"];
if (ext != null) {
KeyUsageExtension keyUsage = new KeyUsageExtension (ext);
if (!keyUsage.Support (KeyUsages.cRLSign)) {
// 2nd chance if basicConstraints is CertificateAuthority
// and KeyUsage support digitalSignature
if ((basicConstraints == null) || !keyUsage.Support (KeyUsages.digitalSignature))
return false;
}
}
}
// 2. CRL issuer must match CA subject name
if (issuer != x509.SubjectName)
return false;
// 3. Check the CRL signature with the CA certificate public key
switch (signatureOID) {
case "1.2.840.10040.4.3":
return VerifySignature (x509.DSA);
default:
return VerifySignature (x509.RSA);
}
}
internal bool VerifySignature (DSA dsa)
{
if (signatureOID != "1.2.840.10040.4.3")
throw new CryptographicException ("Unsupported hash algorithm: " + signatureOID);
DSASignatureDeformatter v = new DSASignatureDeformatter (dsa);
// only SHA-1 is supported
v.SetHashAlgorithm ("SHA1");
ASN1 sign = new ASN1 (signature);
if ((sign == null) || (sign.Count != 2))
return false;
// parts may be less than 20 bytes (i.e. first bytes were 0x00)
byte[] part1 = sign [0].Value;
byte[] part2 = sign [1].Value;
byte[] sig = new byte [40];
// parts may be less than 20 bytes (i.e. first bytes were 0x00)
// parts may be more than 20 bytes (i.e. first byte > 0x80, negative)
int s1 = System.Math.Max (0, part1.Length - 20);
int e1 = System.Math.Max (0, 20 - part1.Length);
Buffer.BlockCopy (part1, s1, sig, e1, part1.Length - s1);
int s2 = System.Math.Max (0, part2.Length - 20);
int e2 = System.Math.Max (20, 40 - part2.Length);
Buffer.BlockCopy (part2, s2, sig, e2, part2.Length - s2);
return v.VerifySignature (Hash, sig);
}
internal bool VerifySignature (RSA rsa)
{
RSAPKCS1SignatureDeformatter v = new RSAPKCS1SignatureDeformatter (rsa);
v.SetHashAlgorithm (PKCS1.HashNameFromOid (signatureOID));
return v.VerifySignature (Hash, signature);
}
public bool VerifySignature (AsymmetricAlgorithm aa)
{
if (aa == null)
throw new ArgumentNullException ("aa");
// only validate the signature (in case we don't have the CA certificate)
if (aa is RSA)
return VerifySignature (aa as RSA);
else if (aa is DSA)
return VerifySignature (aa as DSA);
else
throw new NotSupportedException ("Unknown Asymmetric Algorithm " + aa.ToString ());
}
static public X509Crl CreateFromFile (string filename)
{
byte[] crl = null;
using (FileStream fs = File.Open (filename, FileMode.Open, FileAccess.Read, FileShare.Read)) {
crl = new byte [fs.Length];
fs.Read (crl, 0, crl.Length);
fs.Close ();
}
return new X509Crl (crl);
}
}
}
| |
/*
* [The "BSD licence"]
* Copyright (c) 2005-2008 Terence Parr
* All rights reserved.
*
* Conversion to C#:
* Copyright (c) 2008-2009 Sam Harwell, Pixel Mine, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
namespace Antlr.Runtime
{
using System.Collections.Generic;
using ArgumentException = System.ArgumentException;
using Math = System.Math;
using DebuggerDisplay = System.Diagnostics.DebuggerDisplayAttribute;
using Exception = System.Exception;
using StringBuilder = System.Text.StringBuilder;
using Type = System.Type;
#if !PORTABLE
using Console = System.Console;
#endif
/** Useful for dumping out the input stream after doing some
* augmentation or other manipulations.
*
* You can insert stuff, replace, and delete chunks. Note that the
* operations are done lazily--only if you convert the buffer to a
* String. This is very efficient because you are not moving data around
* all the time. As the buffer of tokens is converted to strings, the
* toString() method(s) check to see if there is an operation at the
* current index. If so, the operation is done and then normal String
* rendering continues on the buffer. This is like having multiple Turing
* machine instruction streams (programs) operating on a single input tape. :)
*
* Since the operations are done lazily at toString-time, operations do not
* screw up the token index values. That is, an insert operation at token
* index i does not change the index values for tokens i+1..n-1.
*
* Because operations never actually alter the buffer, you may always get
* the original token stream back without undoing anything. Since
* the instructions are queued up, you can easily simulate transactions and
* roll back any changes if there is an error just by removing instructions.
* For example,
*
* CharStream input = new ANTLRFileStream("input");
* TLexer lex = new TLexer(input);
* TokenRewriteStream tokens = new TokenRewriteStream(lex);
* T parser = new T(tokens);
* parser.startRule();
*
* Then in the rules, you can execute
* Token t,u;
* ...
* input.insertAfter(t, "text to put after t");}
* input.insertAfter(u, "text after u");}
* System.out.println(tokens.toString());
*
* Actually, you have to cast the 'input' to a TokenRewriteStream. :(
*
* You can also have multiple "instruction streams" and get multiple
* rewrites from a single pass over the input. Just name the instruction
* streams and use that name again when printing the buffer. This could be
* useful for generating a C file and also its header file--all from the
* same buffer:
*
* tokens.insertAfter("pass1", t, "text to put after t");}
* tokens.insertAfter("pass2", u, "text after u");}
* System.out.println(tokens.toString("pass1"));
* System.out.println(tokens.toString("pass2"));
*
* If you don't use named rewrite streams, a "default" stream is used as
* the first example shows.
*/
[System.Serializable]
[DebuggerDisplay( "TODO: TokenRewriteStream debugger display" )]
public class TokenRewriteStream : CommonTokenStream
{
public const string DEFAULT_PROGRAM_NAME = "default";
public const int PROGRAM_INIT_SIZE = 100;
public const int MIN_TOKEN_INDEX = 0;
// Define the rewrite operation hierarchy
protected class RewriteOperation
{
/** <summary>What index into rewrites List are we?</summary> */
public int instructionIndex;
/** <summary>Token buffer index.</summary> */
public int index;
public object text;
// outer
protected TokenRewriteStream stream;
protected RewriteOperation(TokenRewriteStream stream, int index)
{
this.stream = stream;
this.index = index;
}
protected RewriteOperation( TokenRewriteStream stream, int index, object text )
{
this.index = index;
this.text = text;
this.stream = stream;
}
/** <summary>
* Execute the rewrite operation by possibly adding to the buffer.
* Return the index of the next token to operate on.
* </summary>
*/
public virtual int Execute( StringBuilder buf )
{
return index;
}
public override string ToString()
{
string opName = this.GetType().Name;
int dindex = opName.IndexOf( '$' );
opName = opName.Substring( dindex + 1 );
return string.Format("<{0}@{1}:\"{2}\">", opName, stream._tokens[index], text);
}
}
private class InsertBeforeOp : RewriteOperation
{
public InsertBeforeOp( TokenRewriteStream stream, int index, object text ) :
base( stream, index, text )
{
}
public override int Execute( StringBuilder buf )
{
buf.Append( text );
if (stream._tokens[index].Type != CharStreamConstants.EndOfFile)
buf.Append(stream._tokens[index].Text);
return index + 1;
}
}
/** <summary>
* I'm going to try replacing range from x..y with (y-x)+1 ReplaceOp
* instructions.
* </summary>
*/
private class ReplaceOp : RewriteOperation
{
public int lastIndex;
public ReplaceOp( TokenRewriteStream stream, int from, int to, object text )
: base( stream, from, text )
{
lastIndex = to;
}
public override int Execute( StringBuilder buf )
{
if ( text != null )
{
buf.Append( text );
}
return lastIndex + 1;
}
public override string ToString()
{
if (text == null)
{
return string.Format("<DeleteOp@{0}..{1}>", stream._tokens[index], stream._tokens[lastIndex]);
}
return string.Format("<ReplaceOp@{0}..{1}:\"{2}\">", stream._tokens[index], stream._tokens[lastIndex], text);
}
}
/** <summary>
* You may have multiple, named streams of rewrite operations.
* I'm calling these things "programs."
* Maps String (name) -> rewrite (List)
* </summary>
*/
protected IDictionary<string, IList<RewriteOperation>> programs = null;
/** <summary>Map String (program name) -> Integer index</summary> */
protected IDictionary<string, int> lastRewriteTokenIndexes = null;
public TokenRewriteStream()
{
Init();
}
protected void Init()
{
programs = new Dictionary<string, IList<RewriteOperation>>();
programs[DEFAULT_PROGRAM_NAME] = new List<RewriteOperation>( PROGRAM_INIT_SIZE );
lastRewriteTokenIndexes = new Dictionary<string, int>();
}
public TokenRewriteStream( ITokenSource tokenSource )
: base( tokenSource )
{
Init();
}
public TokenRewriteStream( ITokenSource tokenSource, int channel )
: base( tokenSource, channel )
{
Init();
}
public virtual void Rollback( int instructionIndex )
{
Rollback( DEFAULT_PROGRAM_NAME, instructionIndex );
}
/** <summary>
* Rollback the instruction stream for a program so that
* the indicated instruction (via instructionIndex) is no
* longer in the stream. UNTESTED!
* </summary>
*/
public virtual void Rollback( string programName, int instructionIndex )
{
IList<RewriteOperation> @is;
if ( programs.TryGetValue( programName, out @is ) && @is != null )
{
List<RewriteOperation> sublist = new List<RewriteOperation>();
for ( int i = MIN_TOKEN_INDEX; i <= instructionIndex; i++ )
sublist.Add( @is[i] );
programs[programName] = sublist;
}
}
public virtual void DeleteProgram()
{
DeleteProgram( DEFAULT_PROGRAM_NAME );
}
/** <summary>Reset the program so that no instructions exist</summary> */
public virtual void DeleteProgram( string programName )
{
Rollback( programName, MIN_TOKEN_INDEX );
}
public virtual void InsertAfter( IToken t, object text )
{
InsertAfter( DEFAULT_PROGRAM_NAME, t, text );
}
public virtual void InsertAfter( int index, object text )
{
InsertAfter( DEFAULT_PROGRAM_NAME, index, text );
}
public virtual void InsertAfter( string programName, IToken t, object text )
{
InsertAfter( programName, t.TokenIndex, text );
}
public virtual void InsertAfter( string programName, int index, object text )
{
// to insert after, just insert before next index (even if past end)
InsertBefore( programName, index + 1, text );
}
public virtual void InsertBefore( IToken t, object text )
{
InsertBefore( DEFAULT_PROGRAM_NAME, t, text );
}
public virtual void InsertBefore( int index, object text )
{
InsertBefore( DEFAULT_PROGRAM_NAME, index, text );
}
public virtual void InsertBefore( string programName, IToken t, object text )
{
InsertBefore( programName, t.TokenIndex, text );
}
public virtual void InsertBefore( string programName, int index, object text )
{
RewriteOperation op = new InsertBeforeOp( this, index, text );
IList<RewriteOperation> rewrites = GetProgram( programName );
op.instructionIndex = rewrites.Count;
rewrites.Add( op );
}
public virtual void Replace( int index, object text )
{
Replace( DEFAULT_PROGRAM_NAME, index, index, text );
}
public virtual void Replace( int from, int to, object text )
{
Replace( DEFAULT_PROGRAM_NAME, from, to, text );
}
public virtual void Replace( IToken indexT, object text )
{
Replace( DEFAULT_PROGRAM_NAME, indexT, indexT, text );
}
public virtual void Replace( IToken from, IToken to, object text )
{
Replace( DEFAULT_PROGRAM_NAME, from, to, text );
}
public virtual void Replace( string programName, int from, int to, object text )
{
if ( from > to || from < 0 || to < 0 || to >= _tokens.Count )
{
throw new ArgumentException( "replace: range invalid: " + from + ".." + to + "(size=" + _tokens.Count + ")" );
}
RewriteOperation op = new ReplaceOp( this, from, to, text );
IList<RewriteOperation> rewrites = GetProgram( programName );
op.instructionIndex = rewrites.Count;
rewrites.Add( op );
}
public virtual void Replace( string programName, IToken from, IToken to, object text )
{
Replace( programName,
from.TokenIndex,
to.TokenIndex,
text );
}
public virtual void Delete( int index )
{
Delete( DEFAULT_PROGRAM_NAME, index, index );
}
public virtual void Delete( int from, int to )
{
Delete( DEFAULT_PROGRAM_NAME, from, to );
}
public virtual void Delete( IToken indexT )
{
Delete( DEFAULT_PROGRAM_NAME, indexT, indexT );
}
public virtual void Delete( IToken from, IToken to )
{
Delete( DEFAULT_PROGRAM_NAME, from, to );
}
public virtual void Delete( string programName, int from, int to )
{
Replace( programName, from, to, null );
}
public virtual void Delete( string programName, IToken from, IToken to )
{
Replace( programName, from, to, null );
}
public virtual int GetLastRewriteTokenIndex()
{
return GetLastRewriteTokenIndex( DEFAULT_PROGRAM_NAME );
}
protected virtual int GetLastRewriteTokenIndex( string programName )
{
int value;
if ( lastRewriteTokenIndexes.TryGetValue( programName, out value ) )
return value;
return -1;
}
protected virtual void SetLastRewriteTokenIndex( string programName, int i )
{
lastRewriteTokenIndexes[programName] = i;
}
protected virtual IList<RewriteOperation> GetProgram( string name )
{
IList<RewriteOperation> @is;
if ( !programs.TryGetValue( name, out @is ) || @is == null )
{
@is = InitializeProgram( name );
}
return @is;
}
private IList<RewriteOperation> InitializeProgram( string name )
{
IList<RewriteOperation> @is = new List<RewriteOperation>( PROGRAM_INIT_SIZE );
programs[name] = @is;
return @is;
}
public virtual string ToOriginalString()
{
Fill();
return ToOriginalString( MIN_TOKEN_INDEX, Count - 1 );
}
public virtual string ToOriginalString( int start, int end )
{
StringBuilder buf = new StringBuilder();
for ( int i = start; i >= MIN_TOKEN_INDEX && i <= end && i < _tokens.Count; i++ )
{
if (Get(i).Type != CharStreamConstants.EndOfFile)
buf.Append(Get(i).Text);
}
return buf.ToString();
}
public override string ToString()
{
Fill();
return ToString( MIN_TOKEN_INDEX, Count - 1 );
}
public virtual string ToString( string programName )
{
Fill();
return ToString(programName, MIN_TOKEN_INDEX, Count - 1);
}
public override string ToString( int start, int end )
{
return ToString( DEFAULT_PROGRAM_NAME, start, end );
}
public virtual string ToString( string programName, int start, int end )
{
IList<RewriteOperation> rewrites;
if ( !programs.TryGetValue( programName, out rewrites ) )
rewrites = null;
// ensure start/end are in range
if ( end > _tokens.Count - 1 )
end = _tokens.Count - 1;
if ( start < 0 )
start = 0;
if ( rewrites == null || rewrites.Count == 0 )
{
return ToOriginalString( start, end ); // no instructions to execute
}
StringBuilder buf = new StringBuilder();
// First, optimize instruction stream
IDictionary<int, RewriteOperation> indexToOp = ReduceToSingleOperationPerIndex( rewrites );
// Walk buffer, executing instructions and emitting tokens
int i = start;
while ( i <= end && i < _tokens.Count )
{
RewriteOperation op;
bool exists = indexToOp.TryGetValue( i, out op );
if ( exists )
{
// remove so any left have index size-1
indexToOp.Remove( i );
}
if ( !exists || op == null )
{
IToken t = _tokens[i];
// no operation at that index, just dump token
if (t.Type != CharStreamConstants.EndOfFile)
buf.Append(t.Text);
i++; // move to next token
}
else
{
i = op.Execute( buf ); // execute operation and skip
}
}
// include stuff after end if it's last index in buffer
// So, if they did an insertAfter(lastValidIndex, "foo"), include
// foo if end==lastValidIndex.
if ( end == _tokens.Count - 1 )
{
// Scan any remaining operations after last token
// should be included (they will be inserts).
foreach ( RewriteOperation op in indexToOp.Values )
{
if ( op.index >= _tokens.Count - 1 )
buf.Append( op.text );
}
}
return buf.ToString();
}
/** We need to combine operations and report invalid operations (like
* overlapping replaces that are not completed nested). Inserts to
* same index need to be combined etc... Here are the cases:
*
* I.i.u I.j.v leave alone, nonoverlapping
* I.i.u I.i.v combine: Iivu
*
* R.i-j.u R.x-y.v | i-j in x-y delete first R
* R.i-j.u R.i-j.v delete first R
* R.i-j.u R.x-y.v | x-y in i-j ERROR
* R.i-j.u R.x-y.v | boundaries overlap ERROR
*
* Delete special case of replace (text==null):
* D.i-j.u D.x-y.v | boundaries overlap combine to max(min)..max(right)
*
* I.i.u R.x-y.v | i in (x+1)-y delete I (since insert before
* we're not deleting i)
* I.i.u R.x-y.v | i not in (x+1)-y leave alone, nonoverlapping
* R.x-y.v I.i.u | i in x-y ERROR
* R.x-y.v I.x.u R.x-y.uv (combine, delete I)
* R.x-y.v I.i.u | i not in x-y leave alone, nonoverlapping
*
* I.i.u = insert u before op @ index i
* R.x-y.u = replace x-y indexed tokens with u
*
* First we need to examine replaces. For any replace op:
*
* 1. wipe out any insertions before op within that range.
* 2. Drop any replace op before that is contained completely within
* that range.
* 3. Throw exception upon boundary overlap with any previous replace.
*
* Then we can deal with inserts:
*
* 1. for any inserts to same index, combine even if not adjacent.
* 2. for any prior replace with same left boundary, combine this
* insert with replace and delete this replace.
* 3. throw exception if index in same range as previous replace
*
* Don't actually delete; make op null in list. Easier to walk list.
* Later we can throw as we add to index -> op map.
*
* Note that I.2 R.2-2 will wipe out I.2 even though, technically, the
* inserted stuff would be before the replace range. But, if you
* add tokens in front of a method body '{' and then delete the method
* body, I think the stuff before the '{' you added should disappear too.
*
* Return a map from token index to operation.
*/
protected virtual IDictionary<int, RewriteOperation> ReduceToSingleOperationPerIndex( IList<RewriteOperation> rewrites )
{
//System.out.println("rewrites="+rewrites);
// WALK REPLACES
for ( int i = 0; i < rewrites.Count; i++ )
{
RewriteOperation op = rewrites[i];
if ( op == null )
continue;
if ( !( op is ReplaceOp ) )
continue;
ReplaceOp rop = (ReplaceOp)rewrites[i];
// Wipe prior inserts within range
var inserts = GetKindOfOps( rewrites, typeof( InsertBeforeOp ), i );
for ( int j = 0; j < inserts.Count; j++ )
{
InsertBeforeOp iop = (InsertBeforeOp)inserts[j];
if (iop.index == rop.index)
{
// E.g., insert before 2, delete 2..2; update replace
// text to include insert before, kill insert
rewrites[iop.instructionIndex] = null;
rop.text = iop.text.ToString() + (rop.text != null ? rop.text.ToString() : string.Empty);
}
else if (iop.index > rop.index && iop.index <= rop.lastIndex)
{
// delete insert as it's a no-op.
rewrites[iop.instructionIndex] = null;
}
}
// Drop any prior replaces contained within
var prevReplaces = GetKindOfOps( rewrites, typeof( ReplaceOp ), i );
for ( int j = 0; j < prevReplaces.Count; j++ )
{
ReplaceOp prevRop = (ReplaceOp)prevReplaces[j];
if ( prevRop.index >= rop.index && prevRop.lastIndex <= rop.lastIndex )
{
// delete replace as it's a no-op.
rewrites[prevRop.instructionIndex] = null;
continue;
}
// throw exception unless disjoint or identical
bool disjoint =
prevRop.lastIndex < rop.index || prevRop.index > rop.lastIndex;
bool same =
prevRop.index == rop.index && prevRop.lastIndex == rop.lastIndex;
// Delete special case of replace (text==null):
// D.i-j.u D.x-y.v | boundaries overlap combine to max(min)..max(right)
if (prevRop.text == null && rop.text == null && !disjoint)
{
//System.out.println("overlapping deletes: "+prevRop+", "+rop);
rewrites[prevRop.instructionIndex] = null; // kill first delete
rop.index = Math.Min(prevRop.index, rop.index);
rop.lastIndex = Math.Max(prevRop.lastIndex, rop.lastIndex);
#if !PORTABLE
Console.WriteLine("new rop " + rop);
#endif
}
else if ( !disjoint && !same )
{
throw new ArgumentException( "replace op boundaries of " + rop +
" overlap with previous " + prevRop );
}
}
}
// WALK INSERTS
for ( int i = 0; i < rewrites.Count; i++ )
{
RewriteOperation op = (RewriteOperation)rewrites[i];
if ( op == null )
continue;
if ( !( op is InsertBeforeOp ) )
continue;
InsertBeforeOp iop = (InsertBeforeOp)rewrites[i];
// combine current insert with prior if any at same index
var prevInserts = GetKindOfOps( rewrites, typeof( InsertBeforeOp ), i );
for ( int j = 0; j < prevInserts.Count; j++ )
{
InsertBeforeOp prevIop = (InsertBeforeOp)prevInserts[j];
if ( prevIop.index == iop.index )
{ // combine objects
// convert to strings...we're in process of toString'ing
// whole token buffer so no lazy eval issue with any templates
iop.text = CatOpText( iop.text, prevIop.text );
// delete redundant prior insert
rewrites[prevIop.instructionIndex] = null;
}
}
// look for replaces where iop.index is in range; error
var prevReplaces = GetKindOfOps( rewrites, typeof( ReplaceOp ), i );
for ( int j = 0; j < prevReplaces.Count; j++ )
{
ReplaceOp rop = (ReplaceOp)prevReplaces[j];
if ( iop.index == rop.index )
{
rop.text = CatOpText( iop.text, rop.text );
rewrites[i] = null; // delete current insert
continue;
}
if ( iop.index >= rop.index && iop.index <= rop.lastIndex )
{
throw new ArgumentException( "insert op " + iop +
" within boundaries of previous " + rop );
}
}
}
// System.out.println("rewrites after="+rewrites);
IDictionary<int, RewriteOperation> m = new Dictionary<int, RewriteOperation>();
for ( int i = 0; i < rewrites.Count; i++ )
{
RewriteOperation op = (RewriteOperation)rewrites[i];
if ( op == null )
continue; // ignore deleted ops
RewriteOperation existing;
if ( m.TryGetValue( op.index, out existing ) && existing != null )
{
throw new Exception( "should only be one op per index" );
}
m[op.index] = op;
}
//System.out.println("index to op: "+m);
return m;
}
protected virtual string CatOpText( object a, object b )
{
return string.Concat( a, b );
}
protected virtual IList<RewriteOperation> GetKindOfOps( IList<RewriteOperation> rewrites, Type kind )
{
return GetKindOfOps( rewrites, kind, rewrites.Count );
}
/** <summary>Get all operations before an index of a particular kind</summary> */
protected virtual IList<RewriteOperation> GetKindOfOps( IList<RewriteOperation> rewrites, Type kind, int before )
{
IList<RewriteOperation> ops = new List<RewriteOperation>();
for ( int i = 0; i < before && i < rewrites.Count; i++ )
{
RewriteOperation op = rewrites[i];
if ( op == null )
continue; // ignore deleted
if ( op.GetType() == kind )
ops.Add( op );
}
return ops;
}
public virtual string ToDebugString()
{
return ToDebugString( MIN_TOKEN_INDEX, Count - 1 );
}
public virtual string ToDebugString( int start, int end )
{
StringBuilder buf = new StringBuilder();
for ( int i = start; i >= MIN_TOKEN_INDEX && i <= end && i < _tokens.Count; i++ )
{
buf.Append( Get( i ) );
}
return buf.ToString();
}
}
}
| |
// Python Tools for Visual Studio
// Copyright(c) Microsoft Corporation
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the License); you may not use
// this file except in compliance with the License. You may obtain a copy of the
// License at http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS
// OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY
// IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABILITY OR NON-INFRINGEMENT.
//
// See the Apache Version 2.0 License for specific language governing
// permissions and limitations under the License.
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.PythonTools.Editor;
using Microsoft.PythonTools.Editor.Core;
using Microsoft.PythonTools.Infrastructure;
using Microsoft.PythonTools.Parsing;
using Microsoft.VisualStudio.Text;
namespace Microsoft.PythonTools.Intellisense {
using AP = AnalysisProtocol;
sealed class BufferParser : IPythonTextBufferInfoEventSink, IDisposable {
private readonly Timer _timer;
internal readonly PythonEditorServices _services;
private readonly VsProjectAnalyzer _analyzer;
private PythonTextBufferInfoWithRefCount[] _buffers;
private bool _parsing, _requeue, _textChange, _parseImmediately;
private const int ReparseDelay = 1000; // delay in MS before we re-parse a buffer w/ non-line changes.
public static readonly object DoNotParse = new object();
public static readonly object ParseImmediately = new object();
public BufferParser(PythonEditorServices services, VsProjectAnalyzer analyzer, string filePath) {
_services = services ?? throw new ArgumentNullException(nameof(services));
_analyzer = analyzer ?? throw new ArgumentNullException(nameof(analyzer));
FilePath = filePath;
_buffers = Array.Empty<PythonTextBufferInfoWithRefCount>();
_timer = new Timer(ReparseTimer, null, Timeout.Infinite, Timeout.Infinite);
}
public bool IsDisposed { get; private set; }
public string FilePath { get; }
public bool IsTemporaryFile { get; set; }
public bool SuppressErrorList { get; set; }
public PythonTextBufferInfo GetBuffer(ITextBuffer buffer) {
return buffer == null ? null : _services.GetBufferInfo(buffer);
}
// UNDONE: This is a temporary workaround while we migrate
// from multiple buffers in a single entry to chained entries
public PythonTextBufferInfo DefaultBufferInfo { get; private set; }
public ITextBuffer[] AllBuffers => _buffers.Select(x => x.Buffer.Buffer).ToArray();
public ITextBuffer[] Buffers => _buffers.Where(x => !x.Buffer.DoNotParse).Select(x => x.Buffer.Buffer).ToArray();
internal void AddBuffer(ITextBuffer textBuffer) {
int newId;
var bi = _services.GetBufferInfo(textBuffer);
var entry = bi.AnalysisEntry;
if (entry == null) {
throw new InvalidOperationException("buffer must have a project entry before parsing");
}
lock (this) {
if (DefaultBufferInfo == null) {
DefaultBufferInfo = bi;
}
var existing = _buffers.FirstOrDefault(b => b.Buffer == bi);
if (existing != null) {
existing.AddRef();
return;
}
_buffers = _buffers.Concat(Enumerable.Repeat(new PythonTextBufferInfoWithRefCount(bi), 1)).ToArray();
newId = _buffers.Length - 1;
}
if (bi.ParseImmediately) {
// Any buffer requesting immediate parsing enables it for
// the whole file.
_parseImmediately = true;
}
bi.AddSink(this, this);
VsProjectAnalyzer.ConnectErrorList(bi);
}
internal void ClearBuffers() {
lock (this) {
DefaultBufferInfo = null;
foreach (var bi in _buffers) {
bi.Buffer.ClearAnalysisEntry();
bi.Buffer.RemoveSink(this);
VsProjectAnalyzer.DisconnectErrorList(bi.Buffer);
}
_buffers = Array.Empty<PythonTextBufferInfoWithRefCount>();
}
}
internal int RemoveBuffer(ITextBuffer subjectBuffer) {
int result;
var bi = PythonTextBufferInfo.TryGetForBuffer(subjectBuffer);
lock (this) {
if (bi != null) {
var existing = _buffers.FirstOrDefault(b => b.Buffer == bi);
if (existing != null && existing.Release()) {
if (DefaultBufferInfo == bi) {
DefaultBufferInfo = null;
}
_buffers = _buffers.Where(b => b != existing).ToArray();
bi.ClearAnalysisEntry();
bi.RemoveSink(this);
VsProjectAnalyzer.DisconnectErrorList(bi);
bi.Buffer.Properties.RemoveProperty(typeof(PythonTextBufferInfo));
}
}
result = _buffers.Length;
}
return result;
}
internal void ReparseTimer(object unused) {
RequeueWorker();
}
internal void ReparseWorker(object unused) {
ITextSnapshot[] snapshots;
lock (this) {
if (_parsing) {
return;
}
_parsing = true;
snapshots = _buffers
.Where(b => !b.Buffer.DoNotParse)
.Select(b => b.Buffer.CurrentSnapshot).ToArray();
}
ParseBuffers(snapshots).WaitAndHandleAllExceptions(_services.Site);
lock (this) {
_parsing = false;
if (_requeue) {
RequeueWorker();
}
_requeue = false;
}
}
public Task EnsureCodeSyncedAsync(ITextBuffer buffer) => EnsureCodeSyncedAsync(buffer, false);
public async Task EnsureCodeSyncedAsync(ITextBuffer buffer, bool force) {
var lastSent = force ? null : _services.GetBufferInfo(buffer).LastSentSnapshot;
var snapshot = buffer.CurrentSnapshot;
if (force || lastSent != buffer.CurrentSnapshot) {
await ParseBuffers(Enumerable.Repeat(snapshot, 1)).ConfigureAwait(false);
}
}
private Task ParseBuffers(IEnumerable<ITextSnapshot> snapshots) {
return ParseBuffersAsync(_services, _analyzer, snapshots, true);
}
private static IEnumerable<ITextVersion> GetVersions(ITextVersion from, ITextVersion to) {
for (var v = from; v != null && v != to; v = v.Next) {
yield return v;
}
}
internal static IEnumerable<AP.FileUpdate> GetUpdatesForSnapshot(PythonTextBufferInfo buffer, ITextSnapshot snapshot) {
if (buffer.DoNotParse || snapshot.IsReplBufferWithCommand()) {
yield break;
}
var lastSent = buffer.AddSentSnapshot(snapshot);
// Update last sent snapshot and the analysis cookie to our
// current snapshot.
var entry = buffer.AnalysisEntry;
if (entry != null) {
entry.AnalysisCookie = new SnapshotCookie(snapshot);
}
if (lastSent == null || lastSent == snapshot || lastSent.TextBuffer != buffer.Buffer) {
// First time parsing from a live buffer, send the entire
// file and set our initial snapshot. We'll roll forward
// to new snapshots when we receive the errors event. This
// just makes sure that the content is in sync.
yield return new AP.FileUpdate {
content = snapshot.GetText(),
version = snapshot.Version.VersionNumber,
kind = AP.FileUpdateKind.reset
};
yield break;
}
foreach (var v in GetVersions(lastSent.Version, snapshot.Version)) {
yield return new AP.FileUpdate {
version = v.VersionNumber + 1,
changes = GetChanges(buffer, v).Reverse().ToArray(),
kind = AP.FileUpdateKind.changes
};
}
}
internal static async Task ParseBuffersAsync(
PythonEditorServices services,
VsProjectAnalyzer analyzer,
IEnumerable<ITextSnapshot> snapshots,
bool retryOnFailure
) {
var tasks = new List<Tuple<ITextSnapshot[], Task<AP.FileUpdateResponse>>>();
foreach (var snapshotGroup in snapshots.GroupBy(s => PythonTextBufferInfo.TryGetForBuffer(s.TextBuffer))) {
var entry = snapshotGroup.Key?.AnalysisEntry;
if (entry == null) {
continue;
}
var updates = snapshotGroup.SelectMany(s => GetUpdatesForSnapshot(snapshotGroup.Key, s)).Where(u => u != null).ToArray();
if (!updates.Any()) {
continue;
}
analyzer._analysisComplete = false;
Interlocked.Increment(ref analyzer._parsePending);
tasks.Add(Tuple.Create(snapshotGroup.ToArray(), analyzer.SendRequestAsync(
new AP.FileUpdateRequest {
documentUri = entry.DocumentUri,
updates = updates
}
)));
}
var needRetry = new List<ITextSnapshot>();
foreach (var task in tasks) {
var res = await task.Item2;
if (res?.failed ?? false) {
Interlocked.Decrement(ref analyzer._parsePending);
if (res != null) {
needRetry.AddRange(task.Item1);
}
} else {
analyzer.OnAnalysisStarted();
}
}
if (retryOnFailure && needRetry.Any()) {
foreach (var bi in needRetry.Select(s => PythonTextBufferInfo.TryGetForBuffer(s.TextBuffer))) {
bi.ClearSentSnapshot();
}
await ParseBuffersAsync(services, analyzer, needRetry, false);
}
}
internal static AP.ChangeInfo[] GetChanges(PythonTextBufferInfo buffer, ITextVersion curVersion) {
var changes = new List<AP.ChangeInfo>();
if (curVersion.Changes != null) {
foreach (var change in curVersion.Changes) {
var oldPos = buffer.LocationTracker.GetSourceLocation(change.OldPosition, curVersion.VersionNumber);
var oldEnd = buffer.LocationTracker.GetSourceLocation(change.OldEnd, curVersion.VersionNumber);
changes.Add(new AP.ChangeInfo {
startLine = oldPos.Line,
startColumn = oldPos.Column,
endLine = oldEnd.Line,
endColumn = oldEnd.Column,
newText = change.NewText,
});
}
}
#if DEBUG
Debug.WriteLine("Getting changes for version {0}", curVersion.VersionNumber);
foreach (var c in changes) {
Debug.WriteLine($" - ({c.startLine}, {c.startColumn})-({c.endLine}, {c.endColumn}): \"{c.newText}\"");
}
#endif
return changes.ToArray();
}
internal void Requeue() {
RequeueWorker();
ReparseNever();
}
private void ReparseNever() {
ReparseSoon(Timeout.Infinite);
}
private void ReparseSoon(int delay = ReparseDelay) {
try {
_timer.Change(delay, Timeout.Infinite);
} catch (ObjectDisposedException) {
}
}
private void RequeueWorker() {
ThreadPool.QueueUserWorkItem(ReparseWorker);
}
/// <summary>
/// Used to track if we have line + text changes, just text changes, or just line changes.
///
/// If we have text changes followed by a line change we want to immediately reparse.
/// If we have just text changes we want to reparse in ReparseDelay ms from the last change.
/// If we have just repeated line changes (e.g. someone's holding down enter) we don't want to
/// repeatedly reparse, instead we want to wait ReparseDelay ms.
/// </summary>
private bool LineAndTextChanges(TextContentChangedEventArgs e) {
if (_textChange) {
_textChange = false;
return e.Changes.IncludesLineChanges;
}
bool mixedChanges = false;
if (e.Changes.IncludesLineChanges) {
mixedChanges = IncludesTextChanges(e);
}
return mixedChanges;
}
/// <summary>
/// Returns true if the change incldues text changes (not just line changes).
/// </summary>
private static bool IncludesTextChanges(TextContentChangedEventArgs e) {
bool mixedChanges = false;
foreach (var change in e.Changes) {
if (!string.IsNullOrEmpty(change.OldText) || change.NewText != Environment.NewLine) {
mixedChanges = true;
break;
}
}
return mixedChanges;
}
public void Dispose() {
if (!IsDisposed) {
IsDisposed = true;
ClearBuffers();
_timer.Dispose();
}
}
Task IPythonTextBufferInfoEventSink.PythonTextBufferEventAsync(PythonTextBufferInfo sender, PythonTextBufferInfoEventArgs e) {
switch (e.Event) {
case PythonTextBufferInfoEvents.TextContentChangedLowPriority:
lock (this) {
// only immediately re-parse on line changes after we've seen a text change.
var ne = (e as PythonTextBufferInfoNestedEventArgs)?.NestedEventArgs as TextContentChangedEventArgs;
if (_parsing) {
// we are currently parsing, just reque when we complete
_requeue = true;
ReparseNever();
} else if (_parseImmediately) {
// we are a test buffer, we should requeue immediately
Requeue();
} else if (ne == null) {
// failed to get correct type for this event
Debug.Fail("Failed to get correct event type");
} else if (LineAndTextChanges(ne)) {
// user pressed enter, we should requeue immediately
Requeue();
} else {
// parse if the user doesn't do anything for a while.
_textChange = IncludesTextChanges(ne);
ReparseSoon();
}
}
break;
case PythonTextBufferInfoEvents.DocumentEncodingChanged:
lock (this) {
if (_parsing) {
// we are currently parsing, just reque when we complete
_requeue = true;
ReparseNever();
} else {
Requeue();
}
}
break;
}
return Task.CompletedTask;
}
private class PythonTextBufferInfoWithRefCount {
public readonly PythonTextBufferInfo Buffer;
private int _refCount;
public PythonTextBufferInfoWithRefCount(PythonTextBufferInfo buffer) {
Buffer = buffer;
_refCount = 1;
}
public void AddRef() {
Interlocked.Increment(ref _refCount);
}
public bool Release() {
return Interlocked.Decrement(ref _refCount) == 0;
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Text;
namespace Lucene.Net.Search.Payloads
{
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
using IBits = Lucene.Net.Util.IBits;
using BytesRef = Lucene.Net.Util.BytesRef;
using NearSpansOrdered = Lucene.Net.Search.Spans.NearSpansOrdered;
using NearSpansUnordered = Lucene.Net.Search.Spans.NearSpansUnordered;
using Similarity = Lucene.Net.Search.Similarities.Similarity;
using SpanNearQuery = Lucene.Net.Search.Spans.SpanNearQuery;
using SpanQuery = Lucene.Net.Search.Spans.SpanQuery;
using Spans = Lucene.Net.Search.Spans.Spans;
using SpanScorer = Lucene.Net.Search.Spans.SpanScorer;
using SpanWeight = Lucene.Net.Search.Spans.SpanWeight;
using ToStringUtils = Lucene.Net.Util.ToStringUtils;
/// <summary>
/// This class is very similar to
/// <see cref="Lucene.Net.Search.Spans.SpanNearQuery"/> except that it factors
/// in the value of the payloads located at each of the positions where the
/// <see cref="Lucene.Net.Search.Spans.TermSpans"/> occurs.
/// <para/>
/// NOTE: In order to take advantage of this with the default scoring implementation
/// (<see cref="Similarities.DefaultSimilarity"/>), you must override <see cref="Similarities.DefaultSimilarity.ScorePayload(int, int, int, BytesRef)"/>,
/// which returns 1 by default.
/// <para/>
/// Payload scores are aggregated using a pluggable <see cref="PayloadFunction"/>.
/// </summary>
/// <seealso cref="Lucene.Net.Search.Similarities.Similarity.SimScorer.ComputePayloadFactor(int, int, int, BytesRef)"/>
public class PayloadNearQuery : SpanNearQuery
{
protected string m_fieldName;
protected PayloadFunction m_function;
public PayloadNearQuery(SpanQuery[] clauses, int slop, bool inOrder)
: this(clauses, slop, inOrder, new AveragePayloadFunction())
{
}
public PayloadNearQuery(SpanQuery[] clauses, int slop, bool inOrder, PayloadFunction function)
: base(clauses, slop, inOrder)
{
m_fieldName = clauses[0].Field; // all clauses must have same field
this.m_function = function;
}
public override Weight CreateWeight(IndexSearcher searcher)
{
return new PayloadNearSpanWeight(this, this, searcher);
}
public override object Clone()
{
int sz = m_clauses.Count;
SpanQuery[] newClauses = new SpanQuery[sz];
for (int i = 0; i < sz; i++)
{
newClauses[i] = (SpanQuery)m_clauses[i].Clone();
}
PayloadNearQuery boostingNearQuery = new PayloadNearQuery(newClauses, m_slop, m_inOrder, m_function);
boostingNearQuery.Boost = Boost;
return boostingNearQuery;
}
public override string ToString(string field)
{
StringBuilder buffer = new StringBuilder();
buffer.Append("payloadNear([");
bool hasCommaSpace = false;
foreach (SpanQuery clause in m_clauses)
{
buffer.Append(clause.ToString(field));
buffer.Append(", ");
hasCommaSpace = true;
}
if (hasCommaSpace)
buffer.Remove(buffer.Length - 2, 2);
buffer.Append("], ");
buffer.Append(m_slop);
buffer.Append(", ");
buffer.Append(m_inOrder);
buffer.Append(")");
buffer.Append(ToStringUtils.Boost(Boost));
return buffer.ToString();
}
public override int GetHashCode()
{
const int prime = 31;
int result = base.GetHashCode();
result = prime * result + ((m_fieldName == null) ? 0 : m_fieldName.GetHashCode());
result = prime * result + ((m_function == null) ? 0 : m_function.GetHashCode());
return result;
}
public override bool Equals(object obj)
{
if (this == obj)
{
return true;
}
if (!base.Equals(obj))
{
return false;
}
if (this.GetType() != obj.GetType())
{
return false;
}
PayloadNearQuery other = (PayloadNearQuery)obj;
if (m_fieldName == null)
{
if (other.m_fieldName != null)
{
return false;
}
}
else if (!m_fieldName.Equals(other.m_fieldName, StringComparison.Ordinal))
{
return false;
}
if (m_function == null)
{
if (other.m_function != null)
{
return false;
}
}
else if (!m_function.Equals(other.m_function))
{
return false;
}
return true;
}
public class PayloadNearSpanWeight : SpanWeight
{
private readonly PayloadNearQuery outerInstance;
public PayloadNearSpanWeight(PayloadNearQuery outerInstance, SpanQuery query, IndexSearcher searcher)
: base(query, searcher)
{
this.outerInstance = outerInstance;
}
public override Scorer GetScorer(AtomicReaderContext context, IBits acceptDocs)
{
return new PayloadNearSpanScorer(outerInstance, m_query.GetSpans(context, acceptDocs, m_termContexts), this, m_similarity, m_similarity.GetSimScorer(m_stats, context));
}
public override Explanation Explain(AtomicReaderContext context, int doc)
{
PayloadNearSpanScorer scorer = (PayloadNearSpanScorer)GetScorer(context, (context.AtomicReader).LiveDocs);
if (scorer != null)
{
int newDoc = scorer.Advance(doc);
if (newDoc == doc)
{
float freq = scorer.Freq;
Similarity.SimScorer docScorer = m_similarity.GetSimScorer(m_stats, context);
Explanation expl = new Explanation();
expl.Description = "weight(" + Query + " in " + doc + ") [" + m_similarity.GetType().Name + "], result of:";
Explanation scoreExplanation = docScorer.Explain(doc, new Explanation(freq, "phraseFreq=" + freq));
expl.AddDetail(scoreExplanation);
expl.Value = scoreExplanation.Value;
string field = ((SpanQuery)Query).Field;
// now the payloads part
Explanation payloadExpl = outerInstance.m_function.Explain(doc, field, scorer.payloadsSeen, scorer.m_payloadScore);
// combined
ComplexExplanation result = new ComplexExplanation();
result.AddDetail(expl);
result.AddDetail(payloadExpl);
result.Value = expl.Value * payloadExpl.Value;
result.Description = "PayloadNearQuery, product of:";
return result;
}
}
return new ComplexExplanation(false, 0.0f, "no matching term");
}
}
public class PayloadNearSpanScorer : SpanScorer
{
private readonly PayloadNearQuery outerInstance;
internal Spans spans;
protected internal float m_payloadScore;
internal int payloadsSeen;
protected internal PayloadNearSpanScorer(PayloadNearQuery outerInstance, Spans spans, Weight weight, Similarity similarity, Similarity.SimScorer docScorer)
: base(spans, weight, docScorer)
{
this.outerInstance = outerInstance;
this.spans = spans;
}
// Get the payloads associated with all underlying subspans
public virtual void GetPayloads(Spans[] subSpans)
{
for (var i = 0; i < subSpans.Length; i++)
{
var span = subSpans[i] as NearSpansOrdered;
if (span != null)
{
if (span.IsPayloadAvailable)
{
ProcessPayloads(span.GetPayload(), subSpans[i].Start, subSpans[i].End);
}
GetPayloads(span.SubSpans);
}
else
{
var unordered = subSpans[i] as NearSpansUnordered;
if (unordered != null)
{
if (unordered.IsPayloadAvailable)
{
ProcessPayloads(unordered.GetPayload(), subSpans[i].Start, subSpans[i].End);
}
GetPayloads(unordered.SubSpans);
}
}
}
}
// TODO change the whole spans api to use bytesRef, or nuke spans
internal BytesRef scratch = new BytesRef();
/// <summary>
/// By default, uses the <see cref="PayloadFunction"/> to score the payloads, but
/// can be overridden to do other things.
/// </summary>
/// <param name="payLoads"> The payloads </param>
/// <param name="start"> The start position of the span being scored </param>
/// <param name="end"> The end position of the span being scored
/// </param>
/// <seealso cref="Spans.Spans"/>
protected virtual void ProcessPayloads(ICollection<byte[]> payLoads, int start, int end)
{
foreach (var thePayload in payLoads)
{
scratch.Bytes = thePayload;
scratch.Offset = 0;
scratch.Length = thePayload.Length;
m_payloadScore = outerInstance.m_function.CurrentScore(m_doc, outerInstance.m_fieldName, start, end, payloadsSeen, m_payloadScore, m_docScorer.ComputePayloadFactor(m_doc, spans.Start, spans.End, scratch));
++payloadsSeen;
}
}
//
protected override bool SetFreqCurrentDoc()
{
if (!m_more)
{
return false;
}
m_doc = spans.Doc;
m_freq = 0.0f;
m_payloadScore = 0;
payloadsSeen = 0;
do
{
int matchLength = spans.End - spans.Start;
m_freq += m_docScorer.ComputeSlopFactor(matchLength);
Spans[] spansArr = new Spans[1];
spansArr[0] = spans;
GetPayloads(spansArr);
m_more = spans.Next();
} while (m_more && (m_doc == spans.Doc));
return true;
}
public override float GetScore()
{
return base.GetScore() * outerInstance.m_function.DocScore(m_doc, outerInstance.m_fieldName, payloadsSeen, m_payloadScore);
}
}
}
}
| |
/********************************************************************++
Copyright (c) Microsoft Corporation. All rights reserved.
--********************************************************************/
using System;
using System.Collections.ObjectModel;
using System.Management.Automation;
using Dbg = System.Management.Automation;
namespace Microsoft.PowerShell.Commands
{
/// <summary>
/// A command that adds the parent and child parts of a path together
/// with the appropriate path separator.
/// </summary>
[Cmdlet("Join", "Path", SupportsTransactions = true, HelpUri = "http://go.microsoft.com/fwlink/?LinkID=113347")]
[OutputType(typeof(string))]
public class JoinPathCommand : CoreCommandWithCredentialsBase
{
#region Parameters
/// <summary>
/// Gets or sets the path parameter to the command
/// </summary>
[Parameter(Position = 0, Mandatory = true, ValueFromPipeline = true, ValueFromPipelineByPropertyName = true)]
[Alias("PSPath")]
public string[] Path { get; set; }
/// <summary>
/// Gets or sets the childPath parameter to the command
/// </summary>
[Parameter(Position = 1, Mandatory = true, ValueFromPipelineByPropertyName = true)]
[AllowNull]
[AllowEmptyString]
public string ChildPath { get; set; } = String.Empty;
/// <summary>
/// Determines if the path should be resolved after being joined
/// </summary>
/// <value></value>
[Parameter]
public SwitchParameter Resolve { get; set; }
#endregion Parameters
#region Command code
/// <summary>
/// Parses the specified path and returns the portion determined by the
/// boolean parameters.
/// </summary>
protected override void ProcessRecord()
{
Dbg.Diagnostics.Assert(
Path != null,
"Since Path is a mandatory parameter, paths should never be null");
foreach (string path in Path)
{
// First join the path elements
string joinedPath = null;
try
{
joinedPath =
SessionState.Path.Combine(path, ChildPath, CmdletProviderContext);
}
catch (PSNotSupportedException notSupported)
{
WriteError(
new ErrorRecord(
notSupported.ErrorRecord,
notSupported));
continue;
}
catch (DriveNotFoundException driveNotFound)
{
WriteError(
new ErrorRecord(
driveNotFound.ErrorRecord,
driveNotFound));
continue;
}
catch (ProviderNotFoundException providerNotFound)
{
WriteError(
new ErrorRecord(
providerNotFound.ErrorRecord,
providerNotFound));
continue;
}
catch (ItemNotFoundException pathNotFound)
{
WriteError(
new ErrorRecord(
pathNotFound.ErrorRecord,
pathNotFound));
continue;
}
if (Resolve)
{
// Resolve the paths. The default API (GetResolvedPSPathFromPSPath)
// does not allow non-existing paths.
Collection<PathInfo> resolvedPaths = null;
try
{
resolvedPaths =
SessionState.Path.GetResolvedPSPathFromPSPath(joinedPath, CmdletProviderContext);
}
catch (PSNotSupportedException notSupported)
{
WriteError(
new ErrorRecord(
notSupported.ErrorRecord,
notSupported));
continue;
}
catch (DriveNotFoundException driveNotFound)
{
WriteError(
new ErrorRecord(
driveNotFound.ErrorRecord,
driveNotFound));
continue;
}
catch (ProviderNotFoundException providerNotFound)
{
WriteError(
new ErrorRecord(
providerNotFound.ErrorRecord,
providerNotFound));
continue;
}
catch (ItemNotFoundException pathNotFound)
{
WriteError(
new ErrorRecord(
pathNotFound.ErrorRecord,
pathNotFound));
continue;
}
for (int index = 0; index < resolvedPaths.Count; ++index)
{
try
{
if (resolvedPaths[index] != null)
{
WriteObject(resolvedPaths[index].Path);
}
}
catch (PSNotSupportedException notSupported)
{
WriteError(
new ErrorRecord(
notSupported.ErrorRecord,
notSupported));
continue;
}
catch (DriveNotFoundException driveNotFound)
{
WriteError(
new ErrorRecord(
driveNotFound.ErrorRecord,
driveNotFound));
continue;
}
catch (ProviderNotFoundException providerNotFound)
{
WriteError(
new ErrorRecord(
providerNotFound.ErrorRecord,
providerNotFound));
continue;
}
catch (ItemNotFoundException pathNotFound)
{
WriteError(
new ErrorRecord(
pathNotFound.ErrorRecord,
pathNotFound));
continue;
}
} // for each path
}
else
{
if (joinedPath != null)
{
WriteObject(joinedPath);
}
}
}
} // ProcessRecord
#endregion Command code
} // JoinPathCommand
} // namespace Microsoft.PowerShell.Commands
| |
/*
* CP1255.cs - Hebrew (Windows) code page.
*
* Copyright (c) 2002 Southern Storm Software, Pty Ltd
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
// Generated from "ibm-5351.ucm".
namespace I18N.MidEast
{
using System;
using I18N.Common;
public class CP1255 : ByteEncoding
{
public CP1255()
: base(1255, ToChars, "Hebrew (Windows)",
"windows-1255", "windows-1255", "windows-1255",
true, true, true, true, 1255)
{}
private static readonly char[] ToChars = {
'\u0000', '\u0001', '\u0002', '\u0003', '\u0004', '\u0005',
'\u0006', '\u0007', '\u0008', '\u0009', '\u000A', '\u000B',
'\u000C', '\u000D', '\u000E', '\u000F', '\u0010', '\u0011',
'\u0012', '\u0013', '\u0014', '\u0015', '\u0016', '\u0017',
'\u0018', '\u0019', '\u001A', '\u001B', '\u001C', '\u001D',
'\u001E', '\u001F', '\u0020', '\u0021', '\u0022', '\u0023',
'\u0024', '\u0025', '\u0026', '\u0027', '\u0028', '\u0029',
'\u002A', '\u002B', '\u002C', '\u002D', '\u002E', '\u002F',
'\u0030', '\u0031', '\u0032', '\u0033', '\u0034', '\u0035',
'\u0036', '\u0037', '\u0038', '\u0039', '\u003A', '\u003B',
'\u003C', '\u003D', '\u003E', '\u003F', '\u0040', '\u0041',
'\u0042', '\u0043', '\u0044', '\u0045', '\u0046', '\u0047',
'\u0048', '\u0049', '\u004A', '\u004B', '\u004C', '\u004D',
'\u004E', '\u004F', '\u0050', '\u0051', '\u0052', '\u0053',
'\u0054', '\u0055', '\u0056', '\u0057', '\u0058', '\u0059',
'\u005A', '\u005B', '\u005C', '\u005D', '\u005E', '\u005F',
'\u0060', '\u0061', '\u0062', '\u0063', '\u0064', '\u0065',
'\u0066', '\u0067', '\u0068', '\u0069', '\u006A', '\u006B',
'\u006C', '\u006D', '\u006E', '\u006F', '\u0070', '\u0071',
'\u0072', '\u0073', '\u0074', '\u0075', '\u0076', '\u0077',
'\u0078', '\u0079', '\u007A', '\u007B', '\u007C', '\u007D',
'\u007E', '\u007F', '\u20AC', '\u0081', '\u201A', '\u0192',
'\u201E', '\u2026', '\u2020', '\u2021', '\u02C6', '\u2030',
'\u008A', '\u2039', '\u008C', '\u008D', '\u008E', '\u008F',
'\u0090', '\u2018', '\u2019', '\u201C', '\u201D', '\u2022',
'\u2013', '\u2014', '\u02DC', '\u2122', '\u009A', '\u203A',
'\u009C', '\u009D', '\u009E', '\u009F', '\u00A0', '\u00A1',
'\u00A2', '\u00A3', '\u20AA', '\u00A5', '\u00A6', '\u00A7',
'\u00A8', '\u00A9', '\u00D7', '\u00AB', '\u00AC', '\u00AD',
'\u00AE', '\u00AF', '\u00B0', '\u00B1', '\u00B2', '\u00B3',
'\u00B4', '\u00B5', '\u00B6', '\u00B7', '\u00B8', '\u00B9',
'\u00F7', '\u00BB', '\u00BC', '\u00BD', '\u00BE', '\u00BF',
'\u05B0', '\u05B1', '\u05B2', '\u05B3', '\u05B4', '\u05B5',
'\u05B6', '\u05B7', '\u05B8', '\u05B9', '\u003F', '\u05BB',
'\u05BC', '\u05BD', '\u05BE', '\u05BF', '\u05C0', '\u05C1',
'\u05C2', '\u05C3', '\u05F0', '\u05F1', '\u05F2', '\u05F3',
'\u05F4', '\u003F', '\u003F', '\u003F', '\u003F', '\u003F',
'\u003F', '\u003F', '\u05D0', '\u05D1', '\u05D2', '\u05D3',
'\u05D4', '\u05D5', '\u05D6', '\u05D7', '\u05D8', '\u05D9',
'\u05DA', '\u05DB', '\u05DC', '\u05DD', '\u05DE', '\u05DF',
'\u05E0', '\u05E1', '\u05E2', '\u05E3', '\u05E4', '\u05E5',
'\u05E6', '\u05E7', '\u05E8', '\u05E9', '\u05EA', '\u003F',
'\u003F', '\u200E', '\u200F', '\u003F',
};
protected override void ToBytes(char[] chars, int charIndex, int charCount,
byte[] bytes, int byteIndex)
{
int ch;
while(charCount > 0)
{
ch = (int)(chars[charIndex++]);
if(ch >= 128) switch(ch)
{
case 0x0081:
case 0x008A:
case 0x008C:
case 0x008D:
case 0x008E:
case 0x008F:
case 0x0090:
case 0x009A:
case 0x009C:
case 0x009D:
case 0x009E:
case 0x009F:
case 0x00A0:
case 0x00A1:
case 0x00A2:
case 0x00A3:
case 0x00A5:
case 0x00A6:
case 0x00A7:
case 0x00A8:
case 0x00A9:
case 0x00AB:
case 0x00AC:
case 0x00AD:
case 0x00AE:
case 0x00AF:
case 0x00B0:
case 0x00B1:
case 0x00B2:
case 0x00B3:
case 0x00B4:
case 0x00B5:
case 0x00B6:
case 0x00B7:
case 0x00B8:
case 0x00B9:
case 0x00BB:
case 0x00BC:
case 0x00BD:
case 0x00BE:
case 0x00BF:
break;
case 0x00D7: ch = 0xAA; break;
case 0x00F7: ch = 0xBA; break;
case 0x0192: ch = 0x83; break;
case 0x02C6: ch = 0x88; break;
case 0x02DC: ch = 0x98; break;
case 0x05B0:
case 0x05B1:
case 0x05B2:
case 0x05B3:
case 0x05B4:
case 0x05B5:
case 0x05B6:
case 0x05B7:
case 0x05B8:
case 0x05B9:
ch -= 0x04F0;
break;
case 0x05BB:
case 0x05BC:
case 0x05BD:
case 0x05BE:
case 0x05BF:
case 0x05C0:
case 0x05C1:
case 0x05C2:
case 0x05C3:
ch -= 0x04F0;
break;
case 0x05D0:
case 0x05D1:
case 0x05D2:
case 0x05D3:
case 0x05D4:
case 0x05D5:
case 0x05D6:
case 0x05D7:
case 0x05D8:
case 0x05D9:
case 0x05DA:
case 0x05DB:
case 0x05DC:
case 0x05DD:
case 0x05DE:
case 0x05DF:
case 0x05E0:
case 0x05E1:
case 0x05E2:
case 0x05E3:
case 0x05E4:
case 0x05E5:
case 0x05E6:
case 0x05E7:
case 0x05E8:
case 0x05E9:
case 0x05EA:
ch -= 0x04F0;
break;
case 0x05F0:
case 0x05F1:
case 0x05F2:
case 0x05F3:
case 0x05F4:
ch -= 0x051C;
break;
case 0x200E: ch = 0xFD; break;
case 0x200F: ch = 0xFE; break;
case 0x2013: ch = 0x96; break;
case 0x2014: ch = 0x97; break;
case 0x2018: ch = 0x91; break;
case 0x2019: ch = 0x92; break;
case 0x201A: ch = 0x82; break;
case 0x201C: ch = 0x93; break;
case 0x201D: ch = 0x94; break;
case 0x201E: ch = 0x84; break;
case 0x2020: ch = 0x86; break;
case 0x2021: ch = 0x87; break;
case 0x2022: ch = 0x95; break;
case 0x2026: ch = 0x85; break;
case 0x2030: ch = 0x89; break;
case 0x2039: ch = 0x8B; break;
case 0x203A: ch = 0x9B; break;
case 0x20AA: ch = 0xA4; break;
case 0x20AC: ch = 0x80; break;
case 0x2122: ch = 0x99; break;
default:
{
if(ch >= 0xFF01 && ch <= 0xFF5E)
ch -= 0xFEE0;
else
ch = 0x3F;
}
break;
}
bytes[byteIndex++] = (byte)ch;
--charCount;
}
}
protected override void ToBytes(String s, int charIndex, int charCount,
byte[] bytes, int byteIndex)
{
int ch;
while(charCount > 0)
{
ch = (int)(s[charIndex++]);
if(ch >= 128) switch(ch)
{
case 0x0081:
case 0x008A:
case 0x008C:
case 0x008D:
case 0x008E:
case 0x008F:
case 0x0090:
case 0x009A:
case 0x009C:
case 0x009D:
case 0x009E:
case 0x009F:
case 0x00A0:
case 0x00A1:
case 0x00A2:
case 0x00A3:
case 0x00A5:
case 0x00A6:
case 0x00A7:
case 0x00A8:
case 0x00A9:
case 0x00AB:
case 0x00AC:
case 0x00AD:
case 0x00AE:
case 0x00AF:
case 0x00B0:
case 0x00B1:
case 0x00B2:
case 0x00B3:
case 0x00B4:
case 0x00B5:
case 0x00B6:
case 0x00B7:
case 0x00B8:
case 0x00B9:
case 0x00BB:
case 0x00BC:
case 0x00BD:
case 0x00BE:
case 0x00BF:
break;
case 0x00D7: ch = 0xAA; break;
case 0x00F7: ch = 0xBA; break;
case 0x0192: ch = 0x83; break;
case 0x02C6: ch = 0x88; break;
case 0x02DC: ch = 0x98; break;
case 0x05B0:
case 0x05B1:
case 0x05B2:
case 0x05B3:
case 0x05B4:
case 0x05B5:
case 0x05B6:
case 0x05B7:
case 0x05B8:
case 0x05B9:
ch -= 0x04F0;
break;
case 0x05BB:
case 0x05BC:
case 0x05BD:
case 0x05BE:
case 0x05BF:
case 0x05C0:
case 0x05C1:
case 0x05C2:
case 0x05C3:
ch -= 0x04F0;
break;
case 0x05D0:
case 0x05D1:
case 0x05D2:
case 0x05D3:
case 0x05D4:
case 0x05D5:
case 0x05D6:
case 0x05D7:
case 0x05D8:
case 0x05D9:
case 0x05DA:
case 0x05DB:
case 0x05DC:
case 0x05DD:
case 0x05DE:
case 0x05DF:
case 0x05E0:
case 0x05E1:
case 0x05E2:
case 0x05E3:
case 0x05E4:
case 0x05E5:
case 0x05E6:
case 0x05E7:
case 0x05E8:
case 0x05E9:
case 0x05EA:
ch -= 0x04F0;
break;
case 0x05F0:
case 0x05F1:
case 0x05F2:
case 0x05F3:
case 0x05F4:
ch -= 0x051C;
break;
case 0x200E: ch = 0xFD; break;
case 0x200F: ch = 0xFE; break;
case 0x2013: ch = 0x96; break;
case 0x2014: ch = 0x97; break;
case 0x2018: ch = 0x91; break;
case 0x2019: ch = 0x92; break;
case 0x201A: ch = 0x82; break;
case 0x201C: ch = 0x93; break;
case 0x201D: ch = 0x94; break;
case 0x201E: ch = 0x84; break;
case 0x2020: ch = 0x86; break;
case 0x2021: ch = 0x87; break;
case 0x2022: ch = 0x95; break;
case 0x2026: ch = 0x85; break;
case 0x2030: ch = 0x89; break;
case 0x2039: ch = 0x8B; break;
case 0x203A: ch = 0x9B; break;
case 0x20AA: ch = 0xA4; break;
case 0x20AC: ch = 0x80; break;
case 0x2122: ch = 0x99; break;
default:
{
if(ch >= 0xFF01 && ch <= 0xFF5E)
ch -= 0xFEE0;
else
ch = 0x3F;
}
break;
}
bytes[byteIndex++] = (byte)ch;
--charCount;
}
}
}; // class CP1255
public class ENCwindows_1255 : CP1255
{
public ENCwindows_1255() : base() {}
}; // class ENCwindows_1255
}; // namespace I18N.MidEast
| |
using System;
using System.IO;
using System.Drawing;
namespace Jovian.ImagePHP
{
/// <summary>
/// Summary description for Class1.
/// </summary>
class ThreeChannel
{
/// <summary>
/// The main entry point for the application.
/// </summary>
///
StreamWriter Output;
int ScanX(Bitmap B, int x, int y, bool[][] Used, int max)
{
Color C = B.GetPixel(x, y);
for (int k = 1; k < max; k++)
{
Color R = B.GetPixel(x + k, y);
if (Math.Abs(C.A - R.A) + Math.Abs(C.R - R.R) + Math.Abs(C.G - R.G) + Math.Abs(C.B - R.B) > 0)
{
return k - 1;
}
}
return max;
}
int ScanY(Bitmap B, int x, int y, bool[][] Used, int max)
{
Color C = B.GetPixel(x, y);
for (int k = 1; k < max; k++)
{
Color R = B.GetPixel(x, y + k);
if (Math.Abs(C.A - R.A) + Math.Abs(C.R - R.R) + Math.Abs(C.G - R.G) + Math.Abs(C.B - R.B) > 0)
{
return k - 1;
}
}
return max;
}
void LineX(Bitmap B, int start, int y, bool[][] Used, int end, Color C)
{
for (int x = start; x < start + end; x++)
{
Used[y][x] = true;
}
Output.WriteLine("imageline($i," + start + "," + y + "," + (start + end - 1) + "," + y + ",$c);");
}
void LineY(Bitmap B, int x, int start, bool[][] Used, int end, Color C)
{
for (int y = start; y < start + end; y++)
{
Used[y][x] = true;
}
// Console.WriteLine("imageline($i, "+x+", "+start+","+x+", "+(start+end)+", mapcolor($i,"+C.R+","+C.G+","+C.B+","+(127 - C.A/2)+") );");
Output.WriteLine("imageline($i," + x + "," + start + "," + x + "," + (start + end - 1) + ",$c);");
}
void ScanColor(Bitmap B, bool[][] Used, Color C)
{
if (C.R == 0 && C.G == 0 && C.B == 0)
{
Output.WriteLine("$c = m1($i," + (127 - C.A / 2) + ");");
}
else if (C.R == C.G && C.G == C.B)
{
Output.WriteLine("$c = m2($i," + C.R + "," + (127 - C.A / 2) + ");");
}
else if (C.R == 0 && C.B == 0)
{
Output.WriteLine("$c = m3($i," + C.G + "," + (127 - C.A / 2) + ");");
}
else if (C.R == 0 && C.G == 0)
{
Output.WriteLine("$c = m4($i," + C.B + "," + (127 - C.A / 2) + ");");
}
else if (C.G == 0 && C.B == 0)
{
Output.WriteLine("$c = m5($i," + C.R + "," + (127 - C.A / 2) + ");");
}
else
{
Output.WriteLine("$c = mc($i," + C.R + "," + C.G + "," + C.B + "," + (127 - C.A / 2) + ");");
}
for (int y = 0; y < B.Height; y++)
{
for (int x = 0; x < B.Width; x++)
{
Color LC = B.GetPixel(x, y);
if (LC.Equals(C))
{
if (!Used[y][x])
{
int Take = ScanY(B, x, y, Used, B.Height - y);
if (Take >= 2)
{
LineY(B, x, y, Used, Take, C);
}
}
}
}
}
for (int y = 0; y < B.Height; y++)
{
for (int x = 0; x < B.Width; x++)
{
Color LC = B.GetPixel(x, y);
if (LC.Equals(C))
{
if (!Used[y][x])
{
int Take = ScanX(B, x, y, Used, B.Width - x);
if (Take >= 2)
{
LineX(B, x, y, Used, Take, C);
}
}
}
}
}
for (int y = 0; y < B.Height; y++)
{
for (int x = 0; x < B.Width; x++)
{
if (!Used[y][x])
{
Color LC = B.GetPixel(x, y);
if (LC.Equals(C))
{
//Console.WriteLine("imagesetpixel($i, "+x+", "+y+", mapcolor($i,"+C.R+","+C.G+","+C.B+","+(127 - C.A/2)+") );");
Output.WriteLine("imagesetpixel($i," + x + "," + y + ",$c);");
Used[y][x] = true;
}
}
}
}
}
public void CompileImage2Php(string image, string outfilename)
{
Output = new StreamWriter(outfilename);
Bitmap B = new Bitmap(image);
Output.WriteLine("<?php");
Output.WriteLine("include_once('blend.inc');");
Output.WriteLine("cache(\"" + outfilename.Substring(0, outfilename.Length - 4).ToLower().Substring(4) + "\");");
Output.WriteLine("$i=ib(" + B.Width + "," + B.Height + ");");
// Console.WriteLine("imagesavealpha($i, true);");
// Console.WriteLine("imagefill ( $i, 0, 0, imagecolorallocatealpha($i, 255, 255, 255, 127) );");
bool[][] Used = new bool[B.Height][];
for (int y = 0; y < B.Height; y++)
{ Used[y] = new bool[B.Width]; }
for (int y = 0; y < B.Height; y++)
{
for (int x = 0; x < B.Width; x++)
{
Used[y][x] = false;
}
}
for (int y = 0; y < B.Height; y++)
{
for (int x = 0; x < B.Width; x++)
{
if (!Used[y][x])
{
Color C = B.GetPixel(x, y);
ScanColor(B, Used, C);
}
}
}
Output.WriteLine("fin($i);");
Output.WriteLine("?>");
Output.Flush();
Output.Close();
B.Dispose();
}
/*
[STAThread]
static void Main(string[] args)
{
//Bitmap B = new Bitmap("rs-tr.png");
Bitmap B = new Bitmap(args[0]);
Console.WriteLine("<?php");
Console.WriteLine("include_once('blend.inc');");
Console.WriteLine("cache(\"" + args[0].Substring(0,args[0].Length - 4).ToLower().Substring(4) + "\");");
Console.WriteLine("$i=ib("+B.Width+","+B.Height+");");
// Console.WriteLine("imagesavealpha($i, true);");
// Console.WriteLine("imagefill ( $i, 0, 0, imagecolorallocatealpha($i, 255, 255, 255, 127) );");
bool [][] Used = new bool[B.Height][];
for(int y = 0; y < B.Height; y++)
{ Used[y] = new bool[B.Width]; }
for(int y = 0; y < B.Height; y++)
{ for(int x = 0; x < B.Width; x++)
{ Used[y][x] = false;
}
}
for (int y = 0; y < B.Height; y++)
{
for (int x = 0; x < B.Width; x++)
{
if (!Used[y][x])
{
Color C = B.GetPixel(x, y);
ScanColor(B, Used, C);
}
}
}
Console.WriteLine("fin($i);");
Console.WriteLine("?>");
B.Dispose();
}
*/
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Reflection;
using System.Xml.Serialization;
using ClangCompile;
using System.Xml;
using System.IO;
using Microsoft.Build.Framework;
namespace PropSchemaGen
{
/// <summary>
/// Serves as main entry and Attribute parser for ClangCompile
/// </summary>
class Program
{
// Formality is a B.
static string xmlns = "clr-namespace:Microsoft.Build.Framework.XamlTypes;assembly=Microsoft.Build.Framework";
/// <summary>
/// Assigns each property of a class to an XML attribute for a given Element
/// </summary>
/// <param name="doc">XmlDocument used to create elements</param>
/// <param name="obj">Class whose properties are to be serialized</param>
/// <param name="elem">Element to add attributes to</param>
/// <returns>The same XmlElement, for brevity of code</returns>
static XmlElement PropsToXmlAttr(XmlDocument doc, object obj, XmlElement elem)
{
foreach (PropertyInfo pi in obj.GetType().GetProperties(BindingFlags.Public | BindingFlags.DeclaredOnly | BindingFlags.Instance))
{
if (pi.GetValue(obj) != null && pi.GetValue(obj).ToString() != "")
{
DefaultValueAttribute dvAttr = (DefaultValueAttribute)Attribute.GetCustomAttribute(pi, typeof(DefaultValueAttribute));
if (dvAttr == null)
{
elem.SetAttribute(pi.Name, pi.GetValue(obj).ToString());
}
else
{
if(dvAttr.Value.ToLower() == pi.GetValue(obj).ToString().ToLower()) {
Console.WriteLine("Default value of {0} skipped for {1}", dvAttr.Value, pi.Name);
}
else
{
Console.WriteLine("Default value of {0} overwritten by {1} for {2}", dvAttr.Value, pi.GetValue(obj).ToString(), pi.Name);
elem.SetAttribute(pi.Name, pi.GetValue(obj).ToString());
}
}
}
}
return elem;
}
/// <summary>
/// Helper that creates an element "name" under "parentName.name" and populates the attributes with obj's properties.
/// </summary>
/// <param name="doc">XmlDocument used to create elements</param>
/// <param name="obj">Class whose properties are to be serialized</param>
/// <param name="elem">Element to which a subelement will be added</param>
/// <param name="name">Name of the subelement</param>
/// <returns>The same XmlElement, for brevity of code</returns>
static XmlElement AttrToSubElem(XmlDocument doc, object obj, XmlElement elem, string name)
{
XmlElement subElem = (XmlElement)elem.AppendChild(doc.CreateElement(elem.Name + "." + name, xmlns)).AppendChild(doc.CreateElement(name, xmlns));
PropsToXmlAttr(doc, obj, subElem);
return elem;
}
static void Main(string[] args)
{
XmlDocument doc = new XmlDocument();
XmlElement projectElement = doc.CreateElement("ProjectSchemaDefinitions", xmlns);
List<string> categories = new List<string>();
doc.AppendChild(projectElement);
// These attributes are probably common to all property schemas in Visual Studio.
projectElement.SetAttribute("xmlns", "clr-namespace:Microsoft.Build.Framework.XamlTypes;assembly=Microsoft.Build.Framework");
projectElement.SetAttribute("xmlns:x", "http://schemas.microsoft.com/winfx/2006/xaml");
projectElement.SetAttribute("xmlns:sys", "clr-namespace:System;assembly=mscorlib");
projectElement.SetAttribute("xmlns:transformCallback", "Microsoft.Cpp.Dev10.ConvertPropertyCallback");
XmlElement ruleElement = (XmlElement)projectElement.AppendChild(doc.CreateElement("Rule", xmlns));
ruleElement.AppendChild(doc.CreateComment("This file is generated! Modify with caution!"));
RuleAttribute ruleAttr = (RuleAttribute)Attribute.GetCustomAttribute(typeof(Clang), typeof(RuleAttribute));
if (ruleAttr == null)
{
throw new InvalidOperationException("Class requires Rule attribute!");
}
PropsToXmlAttr(doc, ruleAttr, ruleElement);
DataSourceAttribute dataAttr = (DataSourceAttribute)Attribute.GetCustomAttribute(typeof(Clang), typeof(DataSourceAttribute));
if (dataAttr == null)
{
throw new InvalidOperationException("Class requires DataSource attribute!");
}
AttrToSubElem(doc, dataAttr, ruleElement, "DataSource");
XmlElement catsElement = (XmlElement)ruleElement.AppendChild(doc.CreateElement("Rule.Categories", xmlns));
PropertyCategoryAttribute[] allAttributes = (PropertyCategoryAttribute[])Attribute.GetCustomAttributes(typeof(Clang), typeof(PropertyCategoryAttribute));
allAttributes = allAttributes.OrderBy(x => x.Order).ToArray();
Dictionary<string, PropertyCategoryAttribute> categoryMap = allAttributes.ToDictionary(x => x.Name);
MemberInfo[] members = typeof(Clang).GetMembers(BindingFlags.Public | BindingFlags.DeclaredOnly | BindingFlags.Instance);
foreach (PropertyCategoryAttribute catAttr in allAttributes)
{
XmlElement catElement = (XmlElement)catsElement.AppendChild(doc.CreateElement("Category", xmlns));
PropsToXmlAttr(doc, catAttr, catElement);
}
foreach(MemberInfo member in members) {
PropertyPageAttribute[] attrs = (PropertyPageAttribute[])Attribute.GetCustomAttributes(member, typeof(PropertyPageAttribute));
foreach (PropertyPageAttribute attr in attrs)
{
Console.WriteLine("Member name: {0}", member.Name);
if (attr.Category != null && attr.Category != "")
{
PropertyCategoryAttribute req;
if (!categoryMap.TryGetValue(attr.Category, out req))
{
Console.WriteLine("Category not found: {0}", attr.Category);
}
}
}
XmlElement curElement = null;
switch (member.MemberType)
{
case MemberTypes.Property:
PropertyInfo pInfo = (PropertyInfo)member;
PropertyPageAttribute propAttr = (PropertyPageAttribute)Attribute.GetCustomAttribute(member, typeof(PropertyPageAttribute));
// Untracked parameter.
if (propAttr == null)
{
continue;
}
if (pInfo.PropertyType.IsSubclassOf(typeof(Enum)))
{
Console.WriteLine("Warning: Enumerations are invalid types because VisualStudio isn't that smart. You'll have to make it a string and back it with an enum.");
continue;
}
else if (pInfo.PropertyType.IsAssignableFrom(typeof(ITaskItem[])))
{
curElement = (XmlElement)ruleElement.AppendChild(doc.CreateElement("StringListProperty", xmlns));
PropsToXmlAttr(doc, propAttr, curElement).SetAttribute("Name", member.Name);
}
else if (pInfo.PropertyType.IsAssignableFrom(typeof(String)))
{
EnumeratedValueAttribute enumAttr = (EnumeratedValueAttribute)Attribute.GetCustomAttribute(member, typeof(EnumeratedValueAttribute));
if (enumAttr != null)
{
curElement = (XmlElement)ruleElement.AppendChild(doc.CreateElement("EnumProperty", xmlns));
PropsToXmlAttr(doc, propAttr, curElement).SetAttribute("Name", member.Name);
int foundAttr = 0;
FieldInfo[] fields = enumAttr.Enumeration.GetFields(BindingFlags.Public | BindingFlags.Static);
foreach (FieldInfo field in fields)
{
FieldAttribute attr = (FieldAttribute)field.GetCustomAttribute(typeof(FieldAttribute));
if (attr != null)
{
foundAttr++;
PropsToXmlAttr(doc, attr, (XmlElement)curElement.AppendChild(doc.CreateElement("EnumValue", xmlns))).SetAttribute("Name", field.Name);
}
}
if (foundAttr > 0 && foundAttr != fields.Length)
{
Console.WriteLine("Not all fields in {0} have attributes", pInfo.Name);
}
}
else
{
curElement = (XmlElement)ruleElement.AppendChild(doc.CreateElement("StringProperty", xmlns));
PropsToXmlAttr(doc, propAttr, curElement).SetAttribute("Name", member.Name);
}
}
else if (pInfo.PropertyType.IsAssignableFrom(typeof(String[])))
{
curElement = (XmlElement)ruleElement.AppendChild(doc.CreateElement("StringListProperty", xmlns));
PropsToXmlAttr(doc, propAttr, curElement).SetAttribute("Name", member.Name);
}
else if (pInfo.PropertyType.IsAssignableFrom(typeof(Boolean)))
{
curElement = (XmlElement)ruleElement.AppendChild(doc.CreateElement("BoolProperty", xmlns));
PropsToXmlAttr(doc, propAttr, curElement).SetAttribute("Name", member.Name);
}
else if (pInfo.PropertyType.IsAssignableFrom(typeof(int)))
{
curElement = (XmlElement)ruleElement.AppendChild(doc.CreateElement("IntProperty", xmlns));
PropsToXmlAttr(doc, propAttr, curElement).SetAttribute("Name", member.Name);
}
break;
// Fields are not exposed, only property accessors.
case MemberTypes.Field:
break;
default:
break;
}
if (curElement != null)
{
DataSourceAttribute dataSrcAttr = (DataSourceAttribute)Attribute.GetCustomAttribute(member, typeof(DataSourceAttribute));
if (dataSrcAttr != null)
{
AttrToSubElem(doc, dataSrcAttr, curElement, "DataSource");
}
}
}
IEnumerable<Attribute> additionalAttrs = Attribute.GetCustomAttributes(typeof(Clang), typeof(ItemTypeAttribute));
additionalAttrs = additionalAttrs.Concat(Attribute.GetCustomAttributes(typeof(Clang), typeof(FileExtensionAttribute)));
additionalAttrs = additionalAttrs.Concat(Attribute.GetCustomAttributes(typeof(Clang), typeof(ContentTypeAttribute)));
foreach (Attribute additionalAttr in additionalAttrs)
{
string attrName = additionalAttr.GetType().Name;
attrName = attrName.Substring(0, attrName.Length - "Attribute".Length);
// So lollers. C# is great.
PropsToXmlAttr(doc, additionalAttr, (XmlElement)projectElement.AppendChild(doc.CreateElement(attrName, xmlns)));
}
FileStream fs = new FileStream(@"..\..\sbclang.xml", FileMode.Create);
XmlWriterSettings xmlSettings = new XmlWriterSettings();
xmlSettings.NewLineOnAttributes = true;
xmlSettings.Indent = true;
xmlSettings.NamespaceHandling = NamespaceHandling.OmitDuplicates;
XmlWriter writer = XmlWriter.Create(fs, xmlSettings);
doc.Save(writer);
Console.WriteLine("All done! Press any key to exit.");
Console.ReadKey();
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
namespace System.Net.Http.Headers
{
[SuppressMessage("Microsoft.Naming", "CA1710:IdentifiersShouldHaveCorrectSuffix",
Justification = "This is not a collection")]
public sealed class HttpResponseHeaders : HttpHeaders
{
private static readonly Dictionary<string, HttpHeaderParser> s_parserStore = CreateParserStore();
private static readonly HashSet<string> s_invalidHeaders = CreateInvalidHeaders();
private const int AcceptRangesSlot = 0;
private const int ProxyAuthenticateSlot = 1;
private const int ServerSlot = 2;
private const int VarySlot = 3;
private const int WwwAuthenticateSlot = 4;
private const int NumCollectionsSlots = 5;
private object[] _specialCollectionsSlots;
private HttpGeneralHeaders _generalHeaders;
#region Response Headers
private T GetSpecializedCollection<T>(int slot, Func<HttpResponseHeaders, T> creationFunc)
{
// 5 properties each lazily allocate a collection to store the value(s) for that property.
// Rather than having a field for each of these, store them untyped in an array that's lazily
// allocated. Then we only pay for the 45 bytes for those fields when any is actually accessed.
object[] collections = _specialCollectionsSlots ?? (_specialCollectionsSlots = new object[NumCollectionsSlots]);
object result = collections[slot];
if (result == null)
{
collections[slot] = result = creationFunc(this);
}
return (T)result;
}
public HttpHeaderValueCollection<string> AcceptRanges =>
GetSpecializedCollection(AcceptRangesSlot, thisRef => new HttpHeaderValueCollection<string>(HttpKnownHeaderNames.AcceptRanges, thisRef, HeaderUtilities.TokenValidator));
public TimeSpan? Age
{
get { return HeaderUtilities.GetTimeSpanValue(HttpKnownHeaderNames.Age, this); }
set { SetOrRemoveParsedValue(HttpKnownHeaderNames.Age, value); }
}
public EntityTagHeaderValue ETag
{
get { return (EntityTagHeaderValue)GetParsedValues(HttpKnownHeaderNames.ETag); }
set { SetOrRemoveParsedValue(HttpKnownHeaderNames.ETag, value); }
}
public Uri Location
{
get { return (Uri)GetParsedValues(HttpKnownHeaderNames.Location); }
set { SetOrRemoveParsedValue(HttpKnownHeaderNames.Location, value); }
}
public HttpHeaderValueCollection<AuthenticationHeaderValue> ProxyAuthenticate =>
GetSpecializedCollection(ProxyAuthenticateSlot, thisRef => new HttpHeaderValueCollection<AuthenticationHeaderValue>(HttpKnownHeaderNames.ProxyAuthenticate, thisRef));
public RetryConditionHeaderValue RetryAfter
{
get { return (RetryConditionHeaderValue)GetParsedValues(HttpKnownHeaderNames.RetryAfter); }
set { SetOrRemoveParsedValue(HttpKnownHeaderNames.RetryAfter, value); }
}
public HttpHeaderValueCollection<ProductInfoHeaderValue> Server =>
GetSpecializedCollection(ServerSlot, thisRef => new HttpHeaderValueCollection<ProductInfoHeaderValue>(HttpKnownHeaderNames.Server, thisRef));
public HttpHeaderValueCollection<string> Vary =>
GetSpecializedCollection(VarySlot, thisRef => new HttpHeaderValueCollection<string>(HttpKnownHeaderNames.Vary, thisRef, HeaderUtilities.TokenValidator));
public HttpHeaderValueCollection<AuthenticationHeaderValue> WwwAuthenticate =>
GetSpecializedCollection(WwwAuthenticateSlot, thisRef => new HttpHeaderValueCollection<AuthenticationHeaderValue>(HttpKnownHeaderNames.WWWAuthenticate, thisRef));
#endregion
#region General Headers
public CacheControlHeaderValue CacheControl
{
get { return GeneralHeaders.CacheControl; }
set { GeneralHeaders.CacheControl = value; }
}
public HttpHeaderValueCollection<string> Connection
{
get { return GeneralHeaders.Connection; }
}
public bool? ConnectionClose
{
get { return HttpGeneralHeaders.GetConnectionClose(this, _generalHeaders); } // special-cased to avoid forcing _generalHeaders initialization
set { GeneralHeaders.ConnectionClose = value; }
}
public DateTimeOffset? Date
{
get { return GeneralHeaders.Date; }
set { GeneralHeaders.Date = value; }
}
public HttpHeaderValueCollection<NameValueHeaderValue> Pragma
{
get { return GeneralHeaders.Pragma; }
}
public HttpHeaderValueCollection<string> Trailer
{
get { return GeneralHeaders.Trailer; }
}
public HttpHeaderValueCollection<TransferCodingHeaderValue> TransferEncoding
{
get { return GeneralHeaders.TransferEncoding; }
}
public bool? TransferEncodingChunked
{
get { return HttpGeneralHeaders.GetTransferEncodingChunked(this, _generalHeaders); } // special-cased to avoid forcing _generalHeaders initialization
set { GeneralHeaders.TransferEncodingChunked = value; }
}
public HttpHeaderValueCollection<ProductHeaderValue> Upgrade
{
get { return GeneralHeaders.Upgrade; }
}
public HttpHeaderValueCollection<ViaHeaderValue> Via
{
get { return GeneralHeaders.Via; }
}
public HttpHeaderValueCollection<WarningHeaderValue> Warning
{
get { return GeneralHeaders.Warning; }
}
#endregion
internal HttpResponseHeaders()
{
base.SetConfiguration(s_parserStore, s_invalidHeaders);
}
private static Dictionary<string, HttpHeaderParser> CreateParserStore()
{
var parserStore = new Dictionary<string, HttpHeaderParser>(StringComparer.OrdinalIgnoreCase);
parserStore.Add(HttpKnownHeaderNames.AcceptRanges, GenericHeaderParser.TokenListParser);
parserStore.Add(HttpKnownHeaderNames.Age, TimeSpanHeaderParser.Parser);
parserStore.Add(HttpKnownHeaderNames.ETag, GenericHeaderParser.SingleValueEntityTagParser);
parserStore.Add(HttpKnownHeaderNames.Location, UriHeaderParser.RelativeOrAbsoluteUriParser);
parserStore.Add(HttpKnownHeaderNames.ProxyAuthenticate, GenericHeaderParser.MultipleValueAuthenticationParser);
parserStore.Add(HttpKnownHeaderNames.RetryAfter, GenericHeaderParser.RetryConditionParser);
parserStore.Add(HttpKnownHeaderNames.Server, ProductInfoHeaderParser.MultipleValueParser);
parserStore.Add(HttpKnownHeaderNames.Vary, GenericHeaderParser.TokenListParser);
parserStore.Add(HttpKnownHeaderNames.WWWAuthenticate, GenericHeaderParser.MultipleValueAuthenticationParser);
HttpGeneralHeaders.AddParsers(parserStore);
return parserStore;
}
private static HashSet<string> CreateInvalidHeaders()
{
var invalidHeaders = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
HttpContentHeaders.AddKnownHeaders(invalidHeaders);
return invalidHeaders;
// Note: Reserved request header names are allowed as custom response header names. Reserved request
// headers have no defined meaning or format when used on a response. This enables a client to accept
// any headers sent from the server as either content headers or response headers.
}
internal static void AddKnownHeaders(HashSet<string> headerSet)
{
Debug.Assert(headerSet != null);
headerSet.Add(HttpKnownHeaderNames.AcceptRanges);
headerSet.Add(HttpKnownHeaderNames.Age);
headerSet.Add(HttpKnownHeaderNames.ETag);
headerSet.Add(HttpKnownHeaderNames.Location);
headerSet.Add(HttpKnownHeaderNames.ProxyAuthenticate);
headerSet.Add(HttpKnownHeaderNames.RetryAfter);
headerSet.Add(HttpKnownHeaderNames.Server);
headerSet.Add(HttpKnownHeaderNames.Vary);
headerSet.Add(HttpKnownHeaderNames.WWWAuthenticate);
}
internal override void AddHeaders(HttpHeaders sourceHeaders)
{
base.AddHeaders(sourceHeaders);
HttpResponseHeaders sourceResponseHeaders = sourceHeaders as HttpResponseHeaders;
Debug.Assert(sourceResponseHeaders != null);
// Copy special values, but do not overwrite
if (sourceResponseHeaders._generalHeaders != null)
{
GeneralHeaders.AddSpecialsFrom(sourceResponseHeaders._generalHeaders);
}
}
private HttpGeneralHeaders GeneralHeaders => _generalHeaders ?? (_generalHeaders = new HttpGeneralHeaders(this));
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.IO;
using System.Runtime.InteropServices;
using Xunit;
public class PathInternal_Windows_Tests
{
[Theory
InlineData(@"\\?\", false)
InlineData(@"\\?\?", true)
InlineData(@"//?/", false)
InlineData(@"//?/*", true)
InlineData(@"\\.\>", true)
InlineData(@"C:\", false)
InlineData(@"C:\<", true)
InlineData("\"MyFile\"", true)
]
[PlatformSpecific(PlatformID.Windows)]
public void HasWildcardCharacters(string path, bool expected)
{
Assert.Equal(expected, PathInternal.HasWildCardCharacters(path));
}
[Theory
InlineData(PathInternal.ExtendedPathPrefix, PathInternal.ExtendedPathPrefix)
InlineData(@"Foo", @"Foo")
InlineData(@"C:\Foo", @"\\?\C:\Foo")
InlineData(@"\\.\Foo", @"\\.\Foo")
InlineData(@"\\?\Foo", @"\\?\Foo")
InlineData(@"\??\Foo", @"\??\Foo")
InlineData(@"//?/Foo", @"//?/Foo")
InlineData(@"\\Server\Share", PathInternal.UncExtendedPathPrefix + @"Server\Share")
]
[PlatformSpecific(PlatformID.Windows)]
public void EnsureExtendedPrefixTest(string path, string expected)
{
Assert.Equal(expected, PathInternal.EnsureExtendedPrefix(path));
}
[Theory
InlineData(@"", false)
InlineData(@"\\?\", true)
InlineData(@"\??\", true)
InlineData(@"\\.\", false)
InlineData(@"\\?", false)
InlineData(@"\??", false)
InlineData(@"//?/", false)
InlineData(@"/??/", false)
]
[PlatformSpecific(PlatformID.Windows)]
public void IsExtendedTest(string path, bool expected)
{
StringBuffer sb = new StringBuffer();
sb.Append(path);
Assert.Equal(expected, PathInternal.IsExtended(sb));
Assert.Equal(expected, PathInternal.IsExtended(path));
}
[Theory
InlineData(@"", false)
InlineData(@"\\?\", true)
InlineData(@"\??\", true)
InlineData(@"\\.\", true)
InlineData(@"\\?", false)
InlineData(@"\??", false)
InlineData(@"//?/", true)
InlineData(@"/??/", false)
]
[PlatformSpecific(PlatformID.Windows)]
public void IsDeviceTest(string path, bool expected)
{
StringBuffer sb = new StringBuffer();
sb.Append(path);
Assert.Equal(expected, PathInternal.IsDevice(sb));
Assert.Equal(expected, PathInternal.IsDevice(path));
}
[Theory
InlineData("", true)
InlineData("C:", true)
InlineData("**", true)
InlineData(@"\\.\path", false)
InlineData(@"\\?\path", false)
InlineData(@"\\.", false)
InlineData(@"\\?", false)
InlineData(@"\?", false)
InlineData(@"/?", false)
InlineData(@"\\", false)
InlineData(@"//", false)
InlineData(@"\a", true)
InlineData(@"/a", true)
InlineData(@"\", true)
InlineData(@"/", true)
InlineData(@"C:Path", true)
InlineData(@"C:\Path", false)
InlineData(@"\\?\C:\Path", false)
InlineData(@"Path", true)
InlineData(@"X", true)
]
[PlatformSpecific(PlatformID.Windows)]
public void IsPartiallyQualifiedTest(string path, bool expected)
{
StringBuffer sb = new StringBuffer();
sb.Append(path);
Assert.Equal(expected, PathInternal.IsPartiallyQualified(sb));
Assert.Equal(expected, PathInternal.IsPartiallyQualified(path));
}
[Theory,
InlineData(@"", 0),
InlineData(@" :", 0),
InlineData(@" C:", 2),
InlineData(@" C:\", 3),
InlineData(@"C:\", 0),
InlineData(@" ", 0),
InlineData(@" \", 2),
InlineData(@" 8:", 0),
InlineData(@" \\", 4),
InlineData(@"\\", 0),
]
[PlatformSpecific(PlatformID.Windows)]
public void PathStartSkipTest(string path, int expected)
{
Assert.Equal(expected, PathInternal.PathStartSkip(path));
}
[Theory,
InlineData(@"", @""),
InlineData(null, null),
InlineData(@"\", @"\"),
InlineData(@"/", @"\"),
InlineData(@"\\", @"\\"),
InlineData(@"\\\", @"\\"),
InlineData(@"//", @"\\"),
InlineData(@"///", @"\\"),
InlineData(@"\/", @"\\"),
InlineData(@"\/\", @"\\"),
InlineData(@"a\a", @"a\a"),
InlineData(@"a\\a", @"a\a"),
InlineData(@"a/a", @"a\a"),
InlineData(@"a//a", @"a\a"),
InlineData(@"a\", @"a\"),
InlineData(@"a\\", @"a\"),
InlineData(@"a/", @"a\"),
InlineData(@"a//", @"a\"),
InlineData(@"\a", @"\a"),
InlineData(@"\\a", @"\\a"),
InlineData(@"/a", @"\a"),
InlineData(@"//a", @"\\a"),
// Skip tests
InlineData(@" :", @" :"),
InlineData(@" C:", @"C:"),
InlineData(@" C:\", @"C:\"),
InlineData(@" C:/", @"C:\"),
InlineData(@" ", @" "),
InlineData(@" \", @"\"),
InlineData(@" /", @"\"),
InlineData(@" 8:", @" 8:"),
InlineData(@" \\", @"\\"),
InlineData(@" //", @"\\"),
]
[PlatformSpecific(PlatformID.Windows)]
public void NormalizeDirectorySeparatorTests(string path, string expected)
{
string result = PathInternal.NormalizeDirectorySeparators(path);
Assert.Equal(expected, result);
if (string.Equals(path, expected, StringComparison.Ordinal))
Assert.Same(path, result);
}
[Theory]
[InlineData(@"\", 1)]
[InlineData("", 0)]
[InlineData(":", 1)]
[InlineData(";", 0)]
[InlineData("/", 1)]
[InlineData(@"Foo\/\/\", 8)]
[InlineData("Foo:Bar", 4)]
[InlineData(@"C:\Users\Foobar\", 16)]
[PlatformSpecific(PlatformID.Windows)]
public void FindFileNameIndexTests(string path, int expected)
{
Assert.Equal(expected, PathInternal.FindFileNameIndex(path));
}
[Theory,
InlineData(@"", @"", StringComparison.OrdinalIgnoreCase, true)
InlineData(@"", @"", StringComparison.Ordinal, true)
InlineData(@"A", @"a", StringComparison.OrdinalIgnoreCase, true)
InlineData(@"A", @"a", StringComparison.Ordinal, true)
InlineData(@"C:\", @"c:\", StringComparison.OrdinalIgnoreCase, true)
InlineData(@"C:\", @"c:\", StringComparison.Ordinal, false)
]
[PlatformSpecific(PlatformID.Windows)]
public void AreRootsEqual(string first, string second, StringComparison comparisonType, bool expected)
{
Assert.Equal(expected, PathInternal.AreRootsEqual(first, second, comparisonType));
}
}
| |
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
[System.Serializable]
[AddComponentMenu("NGUI/NData/ItemsSource Binding")]
public class NguiItemsSourceBinding : NguiBinding
{
protected NguiItemsSourceBinding _parent;
protected NguiListItemTemplate _itemTemplate;
protected EZData.Collection _collection;
protected bool _isCollectionSelecting = false;
protected UITable _uiTable = null;
protected UIGrid _uiGrid = null;
protected bool _dirty = false;
public override void Awake()
{
base.Awake();
_uiTable = GetComponent<UITable>();
_uiGrid = GetComponent<UIGrid>();
_itemTemplate = gameObject.GetComponent<NguiListItemTemplate>();
}
public override void Start ()
{
base.Start ();
_parent = NguiUtils.GetComponentInParentsExcluding<NguiItemsSourceBinding>(gameObject);
}
void LateUpdate()
{
if (_dirty)
{
var topDirty = this;
var p = _parent;
while (p != null)
{
if (p._dirty)
topDirty = p;
p = p._parent;
}
topDirty.RepositionHierarchy(this);
}
}
private void RepositionHierarchy(NguiItemsSourceBinding node)
{
var childSources = node.GetComponentsInChildren<NguiItemsSourceBinding>(true);
foreach (var c in childSources)
{
if (c != node && c._dirty)
{
RepositionHierarchy(c);
}
}
node.RepositionContent();
}
private void RepositionContent()
{
_dirty = false;
if (_uiTable != null)
{
_uiTable.Reposition();
}
if (_uiGrid != null)
{
_uiGrid.Reposition();
}
}
protected override void Unbind()
{
base.Unbind();
if (_collection != null)
{
_collection.OnItemInsert -= OnItemInsert;
_collection.OnItemRemove -= OnItemRemove;
_collection.OnItemsClear -= OnItemsClear;
_collection.OnSelectionChange -= OnCollectionSelectionChange;
_collection = null;
OnItemsClear();
}
}
protected void BaseBind(){
base.Bind();
}
protected void BaseUnbind() {
base.Unbind();
}
protected override void Bind()
{
base.Bind();
var context = GetContext(Path);
if (context == null)
return;
_collection = context.FindCollection(Path, this);
if (_collection == null)
return;
_collection.OnItemInsert += OnItemInsert;
_collection.OnItemRemove += OnItemRemove;
_collection.OnItemsClear += OnItemsClear;
_collection.OnSelectionChange += OnCollectionSelectionChange;
for (var i = 0; i < _collection.ItemsCount; ++i)
{
OnItemInsert(i, _collection.GetBaseItem(i));
}
OnCollectionSelectionChange();
}
protected virtual void OnItemInsert(int position, EZData.Context item)
{
GameObject itemObject = null;
if (_itemTemplate != null)
{
itemObject = _itemTemplate.Instantiate(item, position);
itemObject.name = string.Format("{0}", position);
for (var i = 0; i < transform.childCount; ++i)
{
var child = transform.GetChild(i).gameObject;
int childNumber;
if (int.TryParse(child.name, out childNumber) && childNumber >= position)
{
child.name = string.Format("{0}", childNumber + 1);
}
}
itemObject.transform.parent = gameObject.transform;
itemObject.transform.localScale = Vector3.one;
itemObject.transform.localPosition = Vector3.back;
}
else
{
if (position < transform.childCount)
{
itemObject = transform.GetChild(position).gameObject;
var itemData = itemObject.GetComponent<NguiItemDataContext>();
if (itemData != null)
{
itemData.SetContext(item);
itemData.SetIndex(position);
}
}
}
if (itemObject != null)
{
foreach(var dragObject in itemObject.GetComponentsInChildren<UIDragObject>())
{
if (dragObject.target == null)
dragObject.target = gameObject.transform;
}
foreach(var dragObject in itemObject.GetComponents<UIDragObject>())
{
if (dragObject.target == null)
dragObject.target = gameObject.transform;
}
var parentVisibility = NguiUtils.GetComponentInParentsAs<IVisibilityBinding>(gameObject);
foreach(var visibility in NguiUtils.GetComponentsInChildrenAs<IVisibilityBinding>(itemObject))
{
visibility.InvalidateParent();
}
var visible = parentVisibility == null ? true : parentVisibility.Visible;
NguiUtils.SetVisible(itemObject, visible);
_dirty = true;
}
}
protected virtual void OnItemRemove(int position)
{
if (_itemTemplate == null)
return;
for (var i = 0; i < transform.childCount; ++i)
{
var child = transform.GetChild(i).gameObject;
int childNumber;
if (int.TryParse(child.name, out childNumber))
{
if (childNumber == position)
{
GameObject.DestroyImmediate(child);
break;
}
}
}
for (var i = 0; i < transform.childCount; ++i)
{
var child = transform.GetChild(i).gameObject;
int childNumber;
if (int.TryParse(child.name, out childNumber))
{
if (childNumber > position)
{
child.name = string.Format("{0}", childNumber - 1);
}
}
}
_dirty = true;
}
protected virtual void OnItemsClear()
{
if (_itemTemplate == null)
return;
while(transform.childCount > 0)
{
GameObject.DestroyImmediate(transform.GetChild(0).gameObject);
}
_dirty = true;
}
public void OnSelectionChange(GameObject selectedObject)
{
if (_collection != null && !_isCollectionSelecting)
{
_isCollectionSelecting = true;
for (var i = 0; i < transform.childCount; ++i)
{
var child = transform.GetChild(i).gameObject;
if (selectedObject != child)
continue;
int childNumber;
if (int.TryParse(child.name, out childNumber))
{
_collection.SelectItem(childNumber);
break;
}
}
_isCollectionSelecting = false;
}
}
protected virtual void OnCollectionSelectionChange()
{
for (var i = 0; i < transform.childCount; ++i)
{
var child = transform.GetChild(i).gameObject;
int childNumber;
if (int.TryParse(child.name, out childNumber))
{
var itemData = child.GetComponent<NguiItemDataContext>();
if (itemData != null)
itemData.SetSelected(childNumber == _collection.SelectedIndex);
}
}
}
}
| |
using System;
using System.Text;
namespace Renci.SshNet.Security.Org.BouncyCastle.Math.EC.Abc
{
/**
* Class representing a simple version of a big decimal. A
* <code>SimpleBigDecimal</code> is basically a
* {@link java.math.BigInteger BigInteger} with a few digits on the right of
* the decimal point. The number of (binary) digits on the right of the decimal
* point is called the <code>scale</code> of the <code>SimpleBigDecimal</code>.
* Unlike in {@link java.math.BigDecimal BigDecimal}, the scale is not adjusted
* automatically, but must be set manually. All <code>SimpleBigDecimal</code>s
* taking part in the same arithmetic operation must have equal scale. The
* result of a multiplication of two <code>SimpleBigDecimal</code>s returns a
* <code>SimpleBigDecimal</code> with double scale.
*/
internal class SimpleBigDecimal
// : Number
{
// private static final long serialVersionUID = 1L;
private readonly BigInteger bigInt;
private readonly int scale;
/**
* Returns a <code>SimpleBigDecimal</code> representing the same numerical
* value as <code>value</code>.
* @param value The value of the <code>SimpleBigDecimal</code> to be
* created.
* @param scale The scale of the <code>SimpleBigDecimal</code> to be
* created.
* @return The such created <code>SimpleBigDecimal</code>.
*/
public static SimpleBigDecimal GetInstance(BigInteger val, int scale)
{
return new SimpleBigDecimal(val.ShiftLeft(scale), scale);
}
/**
* Constructor for <code>SimpleBigDecimal</code>. The value of the
* constructed <code>SimpleBigDecimal</code> Equals <code>bigInt /
* 2<sup>scale</sup></code>.
* @param bigInt The <code>bigInt</code> value parameter.
* @param scale The scale of the constructed <code>SimpleBigDecimal</code>.
*/
public SimpleBigDecimal(BigInteger bigInt, int scale)
{
if (scale < 0)
throw new ArgumentException("scale may not be negative");
this.bigInt = bigInt;
this.scale = scale;
}
private SimpleBigDecimal(SimpleBigDecimal limBigDec)
{
bigInt = limBigDec.bigInt;
scale = limBigDec.scale;
}
private void CheckScale(SimpleBigDecimal b)
{
if (scale != b.scale)
throw new ArgumentException("Only SimpleBigDecimal of same scale allowed in arithmetic operations");
}
public SimpleBigDecimal AdjustScale(int newScale)
{
if (newScale < 0)
throw new ArgumentException("scale may not be negative");
if (newScale == scale)
return this;
return new SimpleBigDecimal(bigInt.ShiftLeft(newScale - scale), newScale);
}
public SimpleBigDecimal Add(SimpleBigDecimal b)
{
CheckScale(b);
return new SimpleBigDecimal(bigInt.Add(b.bigInt), scale);
}
public SimpleBigDecimal Add(BigInteger b)
{
return new SimpleBigDecimal(bigInt.Add(b.ShiftLeft(scale)), scale);
}
public SimpleBigDecimal Negate()
{
return new SimpleBigDecimal(bigInt.Negate(), scale);
}
public SimpleBigDecimal Subtract(SimpleBigDecimal b)
{
return Add(b.Negate());
}
public SimpleBigDecimal Subtract(BigInteger b)
{
return new SimpleBigDecimal(bigInt.Subtract(b.ShiftLeft(scale)), scale);
}
public SimpleBigDecimal Multiply(SimpleBigDecimal b)
{
CheckScale(b);
return new SimpleBigDecimal(bigInt.Multiply(b.bigInt), scale + scale);
}
public SimpleBigDecimal Multiply(BigInteger b)
{
return new SimpleBigDecimal(bigInt.Multiply(b), scale);
}
public SimpleBigDecimal Divide(SimpleBigDecimal b)
{
CheckScale(b);
BigInteger dividend = bigInt.ShiftLeft(scale);
return new SimpleBigDecimal(dividend.Divide(b.bigInt), scale);
}
public SimpleBigDecimal Divide(BigInteger b)
{
return new SimpleBigDecimal(bigInt.Divide(b), scale);
}
public SimpleBigDecimal ShiftLeft(int n)
{
return new SimpleBigDecimal(bigInt.ShiftLeft(n), scale);
}
public int CompareTo(SimpleBigDecimal val)
{
CheckScale(val);
return bigInt.CompareTo(val.bigInt);
}
public int CompareTo(BigInteger val)
{
return bigInt.CompareTo(val.ShiftLeft(scale));
}
public BigInteger Floor()
{
return bigInt.ShiftRight(scale);
}
public BigInteger Round()
{
SimpleBigDecimal oneHalf = new SimpleBigDecimal(BigInteger.One, 1);
return Add(oneHalf.AdjustScale(scale)).Floor();
}
public int IntValue
{
get { return Floor().IntValue; }
}
public long LongValue
{
get { return Floor().LongValue; }
}
// public double doubleValue()
// {
// return new Double(ToString()).doubleValue();
// }
//
// public float floatValue()
// {
// return new Float(ToString()).floatValue();
// }
public int Scale
{
get { return scale; }
}
public override string ToString()
{
if (scale == 0)
return bigInt.ToString();
BigInteger floorBigInt = Floor();
BigInteger fract = bigInt.Subtract(floorBigInt.ShiftLeft(scale));
if (bigInt.SignValue < 0)
{
fract = BigInteger.One.ShiftLeft(scale).Subtract(fract);
}
if ((floorBigInt.SignValue == -1) && (!(fract.Equals(BigInteger.Zero))))
{
floorBigInt = floorBigInt.Add(BigInteger.One);
}
string leftOfPoint = floorBigInt.ToString();
char[] fractCharArr = new char[scale];
string fractStr = fract.ToString(2);
int fractLen = fractStr.Length;
int zeroes = scale - fractLen;
for (int i = 0; i < zeroes; i++)
{
fractCharArr[i] = '0';
}
for (int j = 0; j < fractLen; j++)
{
fractCharArr[zeroes + j] = fractStr[j];
}
string rightOfPoint = new string(fractCharArr);
StringBuilder sb = new StringBuilder(leftOfPoint);
sb.Append(".");
sb.Append(rightOfPoint);
return sb.ToString();
}
public override bool Equals(
object obj)
{
if (this == obj)
return true;
SimpleBigDecimal other = obj as SimpleBigDecimal;
if (other == null)
return false;
return bigInt.Equals(other.bigInt)
&& scale == other.scale;
}
public override int GetHashCode()
{
return bigInt.GetHashCode() ^ scale;
}
}
}
| |
// Copyright (c) 2015, Outercurve Foundation.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// - Redistributions of source code must retain the above copyright notice, this
// list of conditions and the following disclaimer.
//
// - Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// - Neither the name of the Outercurve Foundation nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
// ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
namespace WebsitePanel.Portal.ExchangeServer {
public partial class ExchangeAddMailboxPlan {
/// <summary>
/// asyncTasks control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::WebsitePanel.Portal.EnableAsyncTasksSupport asyncTasks;
/// <summary>
/// Image1 control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Image Image1;
/// <summary>
/// locTitle control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locTitle;
/// <summary>
/// messageBox control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::WebsitePanel.Portal.UserControls.SimpleMessageBox messageBox;
/// <summary>
/// hfArchivingPlan control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.HiddenField hfArchivingPlan;
/// <summary>
/// secMailboxPlan control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::WebsitePanel.Portal.CollapsiblePanel secMailboxPlan;
/// <summary>
/// MailboxPlan control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Panel MailboxPlan;
/// <summary>
/// txtMailboxPlan control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.TextBox txtMailboxPlan;
/// <summary>
/// valRequireMailboxPlan control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.RequiredFieldValidator valRequireMailboxPlan;
/// <summary>
/// secMailboxFeatures control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::WebsitePanel.Portal.CollapsiblePanel secMailboxFeatures;
/// <summary>
/// MailboxFeatures control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Panel MailboxFeatures;
/// <summary>
/// chkPOP3 control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.CheckBox chkPOP3;
/// <summary>
/// chkIMAP control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.CheckBox chkIMAP;
/// <summary>
/// chkOWA control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.CheckBox chkOWA;
/// <summary>
/// chkMAPI control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.CheckBox chkMAPI;
/// <summary>
/// chkActiveSync control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.CheckBox chkActiveSync;
/// <summary>
/// secMailboxGeneral control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::WebsitePanel.Portal.CollapsiblePanel secMailboxGeneral;
/// <summary>
/// MailboxGeneral control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Panel MailboxGeneral;
/// <summary>
/// chkHideFromAddressBook control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.CheckBox chkHideFromAddressBook;
/// <summary>
/// secStorageQuotas control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::WebsitePanel.Portal.CollapsiblePanel secStorageQuotas;
/// <summary>
/// StorageQuotas control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Panel StorageQuotas;
/// <summary>
/// locMailboxSize control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locMailboxSize;
/// <summary>
/// mailboxSize control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::WebsitePanel.Portal.QuotaEditor mailboxSize;
/// <summary>
/// locMaxRecipients control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locMaxRecipients;
/// <summary>
/// maxRecipients control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::WebsitePanel.Portal.QuotaEditor maxRecipients;
/// <summary>
/// locMaxSendMessageSizeKB control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locMaxSendMessageSizeKB;
/// <summary>
/// maxSendMessageSizeKB control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::WebsitePanel.Portal.QuotaEditor maxSendMessageSizeKB;
/// <summary>
/// locMaxReceiveMessageSizeKB control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locMaxReceiveMessageSizeKB;
/// <summary>
/// maxReceiveMessageSizeKB control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::WebsitePanel.Portal.QuotaEditor maxReceiveMessageSizeKB;
/// <summary>
/// locWhenSizeExceeds control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locWhenSizeExceeds;
/// <summary>
/// locIssueWarning control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locIssueWarning;
/// <summary>
/// sizeIssueWarning control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::WebsitePanel.Portal.ExchangeServer.UserControls.SizeBox sizeIssueWarning;
/// <summary>
/// locProhibitSend control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locProhibitSend;
/// <summary>
/// sizeProhibitSend control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::WebsitePanel.Portal.ExchangeServer.UserControls.SizeBox sizeProhibitSend;
/// <summary>
/// locProhibitSendReceive control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locProhibitSendReceive;
/// <summary>
/// sizeProhibitSendReceive control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::WebsitePanel.Portal.ExchangeServer.UserControls.SizeBox sizeProhibitSendReceive;
/// <summary>
/// secDeleteRetention control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::WebsitePanel.Portal.CollapsiblePanel secDeleteRetention;
/// <summary>
/// DeleteRetention control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Panel DeleteRetention;
/// <summary>
/// locKeepDeletedItems control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locKeepDeletedItems;
/// <summary>
/// daysKeepDeletedItems control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::WebsitePanel.Portal.ExchangeServer.UserControls.DaysBox daysKeepDeletedItems;
/// <summary>
/// secLitigationHold control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::WebsitePanel.Portal.CollapsiblePanel secLitigationHold;
/// <summary>
/// LitigationHold control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Panel LitigationHold;
/// <summary>
/// chkEnableLitigationHold control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.CheckBox chkEnableLitigationHold;
/// <summary>
/// locRecoverableItemsSpace control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locRecoverableItemsSpace;
/// <summary>
/// recoverableItemsSpace control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::WebsitePanel.Portal.QuotaEditor recoverableItemsSpace;
/// <summary>
/// locRecoverableItemsWarning control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locRecoverableItemsWarning;
/// <summary>
/// recoverableItemsWarning control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::WebsitePanel.Portal.ExchangeServer.UserControls.SizeBox recoverableItemsWarning;
/// <summary>
/// lblLitigationHoldUrl control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Label lblLitigationHoldUrl;
/// <summary>
/// txtLitigationHoldUrl control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.TextBox txtLitigationHoldUrl;
/// <summary>
/// lblLitigationHoldMsg control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Label lblLitigationHoldMsg;
/// <summary>
/// txtLitigationHoldMsg control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.TextBox txtLitigationHoldMsg;
/// <summary>
/// secArchiving control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::WebsitePanel.Portal.CollapsiblePanel secArchiving;
/// <summary>
/// Archiving control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Panel Archiving;
/// <summary>
/// chkEnableArchiving control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.CheckBox chkEnableArchiving;
/// <summary>
/// locArchiveQuota control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locArchiveQuota;
/// <summary>
/// archiveQuota control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::WebsitePanel.Portal.QuotaEditor archiveQuota;
/// <summary>
/// locArchiveWarningQuota control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locArchiveWarningQuota;
/// <summary>
/// archiveWarningQuota control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::WebsitePanel.Portal.ExchangeServer.UserControls.SizeBox archiveWarningQuota;
/// <summary>
/// chkEnableForceArchiveDeletion control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.CheckBox chkEnableForceArchiveDeletion;
/// <summary>
/// secRetentionPolicyTags control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::WebsitePanel.Portal.CollapsiblePanel secRetentionPolicyTags;
/// <summary>
/// RetentionPolicyTags control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Panel RetentionPolicyTags;
/// <summary>
/// GeneralUpdatePanel control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.UpdatePanel GeneralUpdatePanel;
/// <summary>
/// gvPolicy control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.GridView gvPolicy;
/// <summary>
/// ddTags control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.DropDownList ddTags;
/// <summary>
/// bntAddTag control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Button bntAddTag;
/// <summary>
/// btnAdd control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Button btnAdd;
/// <summary>
/// ValidationSummary1 control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.ValidationSummary ValidationSummary1;
}
}
| |
// --------------------------------------------------------------------------------------------------------------------
// <copyright file="GeofenceMonitor.cs" company="In The Hand Ltd">
// Copyright (c) 2015-17 In The Hand Ltd, All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
using InTheHand.Foundation;
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
#if __UNIFIED__
using Foundation;
using CoreLocation;
#if __IOS__
using UIKit;
#endif
#endif
namespace InTheHand.Devices.Geolocation.Geofencing
{
/// <summary>
/// Contains the information about the monitored Geofence objects.
/// </summary>
public sealed class GeofenceMonitor
{
private static GeofenceMonitor _current;
/// <summary>
/// Gets the GeofenceMonitor object which contains all of an app's <see cref="Geofence"/> information.
/// </summary>
public static GeofenceMonitor Current
{
get
{
if (_current == null)
{
_current = new GeofenceMonitor();
}
return _current;
}
}
#if __UNIFIED__
internal CLLocationManager _locationManager;
#elif WINDOWS_UWP || WINDOWS_APP || WINDOWS_PHONE_APP || WINDOWS_PHONE
private Windows.Devices.Geolocation.Geofencing.GeofenceMonitor _monitor;
public static implicit operator Windows.Devices.Geolocation.Geofencing.GeofenceMonitor(GeofenceMonitor m)
{
return m._monitor;
}
#elif TIZEN
internal Tizen.Location.Locator _locator;
#endif
private Queue<GeofenceStateChangeReport> _reports = new Queue<GeofenceStateChangeReport>();
internal double maxRegion;
private GeofenceMonitor()
{
#if __UNIFIED__
_locationManager = new CLLocationManager();
_locationManager.DesiredAccuracy = CLLocation.AccuracyBest;
_locationManager.AuthorizationChanged += _locationManager_AuthorizationChanged;
_locationManager.LocationsUpdated += _locationManager_LocationsUpdated;
#if __IOS__
_locationManager.AllowsBackgroundLocationUpdates = true;
UIApplication.SharedApplication.BeginInvokeOnMainThread(() =>
{
_locationManager.RequestAlwaysAuthorization();
});
if (!CLLocationManager.IsMonitoringAvailable(typeof(CLCircularRegion)))
#else
if (!CLLocationManager.IsMonitoringAvailable(new ObjCRuntime.Class("CLCircularRegion")))
#endif
{
Status = GeofenceMonitorStatus.NotAvailable;
}
else
{
maxRegion = _locationManager.MaximumRegionMonitoringDistance;
_locationManager.RegionEntered += _locationManager_RegionEntered;
_locationManager.RegionLeft += _locationManager_RegionLeft;
}
#elif WINDOWS_UWP || WINDOWS_APP || WINDOWS_PHONE_APP || WINDOWS_PHONE
_monitor = Windows.Devices.Geolocation.Geofencing.GeofenceMonitor.Current;
#elif TIZEN
_locator = new Tizen.Location.Locator(Tizen.Location.LocationType.Hybrid);
_locator.ZoneChanged += _locator_ZoneChanged;
_locator.ServiceStateChanged += _locator_ServiceStateChanged;
#endif
}
#if __UNIFIED__
private void _locationManager_LocationsUpdated(object sender, CLLocationsUpdatedEventArgs e)
{
Status = GeofenceMonitorStatus.Ready;
_locationManager.LocationsUpdated -= _locationManager_LocationsUpdated;
}
private void _locationManager_AuthorizationChanged(object sender, CLAuthorizationChangedEventArgs e)
{
switch(e.Status)
{
case CLAuthorizationStatus.AuthorizedAlways:
case CLAuthorizationStatus.AuthorizedWhenInUse:
Status = GeofenceMonitorStatus.Initializing;
break;
case CLAuthorizationStatus.Denied:
Status = GeofenceMonitorStatus.Disabled;
break;
}
}
#elif TIZEN
private void _locator_ZoneChanged(object sender, Tizen.Location.ZoneChangedEventArgs e)
{
lock (_reports)
{
_reports.Enqueue(new Geofencing.GeofenceStateChangeReport(null, new Geolocation.Geoposition(new Tizen.Location.Location { Latitude = e.Latitude, Longitude = e.Longitude, Altitude = e.Altitude }), e.BoundState == Tizen.Location.BoundaryState.In ? GeofenceState.Entered : GeofenceState.Exited));
}
OnGeofenceStateChanged();
}
private void _locator_ServiceStateChanged(object sender, Tizen.Location.ServiceStateChangedEventArgs e)
{
switch(e.ServiceState)
{
case Tizen.Location.ServiceState.Enabled:
Status = GeofenceMonitorStatus.Ready;
break;
default:
Status = GeofenceMonitorStatus.Disabled;
break;
}
}
#endif
/// <summary>
/// Gets a collection of status changes to the <see cref="Geofence"/> objects in the Geofences collection of the GeofenceMonitor.
/// </summary>
/// <returns></returns>
public IReadOnlyList<GeofenceStateChangeReport> ReadReports()
{
List<GeofenceStateChangeReport> reportSnapshot = new List<Geofencing.GeofenceStateChangeReport>();
#if WINDOWS_UWP || WINDOWS_APP || WINDOWS_PHONE_APP || WINDOWS_PHONE
foreach(Windows.Devices.Geolocation.Geofencing.GeofenceStateChangeReport r in _monitor.ReadReports())
{
reportSnapshot.Add(r);
}
#else
lock (_reports)
{
reportSnapshot.AddRange(_reports.ToArray());
_reports.Clear();
}
#endif
return reportSnapshot;
}
#if __UNIFIED__
private void _locationManager_RegionLeft(object sender, CLRegionEventArgs e)
{
lock (_reports)
{
_reports.Enqueue(new Geofencing.GeofenceStateChangeReport(e.Region, LastKnownGeoposition, GeofenceState.Exited));
}
OnGeofenceStateChanged();
}
private void _locationManager_RegionEntered(object sender, CLRegionEventArgs e)
{
lock (_reports)
{
_reports.Enqueue(new Geofencing.GeofenceStateChangeReport(e.Region, LastKnownGeoposition, GeofenceState.Entered));
}
OnGeofenceStateChanged();
}
#endif
/// <summary>
/// Returns a vector of the app's <see cref="Geofence"/> objects currently registered with the system wide GeofenceMonitor.
/// </summary>
public IList<Geofence> Geofences
{
get
{
#if __UNIFIED__ || TIZEN
return new GeofenceList(this);
#elif WINDOWS_UWP || WINDOWS_APP || WINDOWS_PHONE_APP || WINDOWS_PHONE
List<Geofence> fences = new List<Geofence>();
foreach(Windows.Devices.Geolocation.Geofencing.Geofence f in _monitor.Geofences)
{
fences.Add(f);
}
return fences;
#else
return null;
#endif
}
}
#if __UNIFIED__
internal void AddRegion(CLRegion region)
{
#if __IOS__
_locationManager.StartMonitoring(region, CLLocation.AccuracyBest);
#else
_locationManager.StartMonitoring(region);
#endif
Status = GeofenceMonitorStatus.Ready;
}
internal void RemoveRegion(CLRegion region)
{
_locationManager.StopMonitoring(region);
}
#endif
/// <summary>
/// Last reading of the device's location.
/// </summary>
public Geoposition LastKnownGeoposition
{
get
{
#if __UNIFIED__
#if __IOS__
_locationManager.RequestLocation();
#endif
return new Geoposition(_locationManager.Location);
#elif WINDOWS_UWP || WINDOWS_APP || WINDOWS_PHONE_APP || WINDOWS_PHONE
return _monitor.LastKnownGeoposition;
#elif TIZEN
return new Geoposition(_locator.Location);
#else
return new Geoposition();
#endif
}
}
private GeofenceMonitorStatus _status = GeofenceMonitorStatus.NoData;
/// <summary>
/// Indicates the current state of the GeofenceMonitor.
/// </summary>
public GeofenceMonitorStatus Status
{
#if WINDOWS_UWP || WINDOWS_APP || WINDOWS_PHONE_APP || WINDOWS_PHONE
get
{
return (GeofenceMonitorStatus)((int)_monitor.Status);
}
#else
get
{
return _status;
}
private set
{
if(_status != value)
{
_status = value;
StatusChanged?.Invoke(this, null);
}
}
#endif
}
#if WINDOWS_UWP || WINDOWS_APP || WINDOWS_PHONE_APP || WINDOWS_PHONE
private event TypedEventHandler<GeofenceMonitor, object> _geofenceStateChanged;
#endif
/// <summary>
/// Raised when the state of one or more <see cref="Geofence"/> objects in the Geofences collection of the GeofenceMonitor has changed.
/// </summary>
public event TypedEventHandler<GeofenceMonitor, object> GeofenceStateChanged
#if WINDOWS_UWP || WINDOWS_APP || WINDOWS_PHONE_APP || WINDOWS_PHONE
{
add
{
if(_geofenceStateChanged == null)
{
_monitor.GeofenceStateChanged += _monitor_GeofenceStateChanged;
}
_geofenceStateChanged += value;
}
remove
{
_geofenceStateChanged -= value;
if(_geofenceStateChanged == null)
{
_monitor.GeofenceStateChanged -= _monitor_GeofenceStateChanged;
}
}
}
private void _monitor_GeofenceStateChanged(Windows.Devices.Geolocation.Geofencing.GeofenceMonitor sender, object args)
{
_geofenceStateChanged?.Invoke(this, EventArgs.Empty);
}
#else
;
private void OnGeofenceStateChanged()
{
GeofenceStateChanged?.Invoke(this, null);
}
#endif
#if WINDOWS_UWP || WINDOWS_APP || WINDOWS_PHONE_APP || WINDOWS_PHONE
private event TypedEventHandler<GeofenceMonitor, object> _statusChanged;
#endif
/// <summary>
/// Raised when the status of the GeofenceMonitor has changed.
/// </summary>
public event TypedEventHandler<GeofenceMonitor, object> StatusChanged
#if WINDOWS_UWP || WINDOWS_APP || WINDOWS_PHONE_APP || WINDOWS_PHONE
{
add
{
if (_statusChanged == null)
{
_monitor.StatusChanged += _monitor_StatusChanged;
}
_statusChanged += value;
}
remove
{
_statusChanged -= value;
if (_geofenceStateChanged == null)
{
_monitor.StatusChanged -= _monitor_StatusChanged;
}
}
}
private void _monitor_StatusChanged(Windows.Devices.Geolocation.Geofencing.GeofenceMonitor sender, object args)
{
_statusChanged?.Invoke(this, EventArgs.Empty);
}
#else
;
#endif
}
#if __UNIFIED__ || TIZEN
internal sealed class GeofenceList : Collection<Geofence>
{
private GeofenceMonitor _monitor;
internal GeofenceList(GeofenceMonitor monitor)
{
_monitor = monitor;
#if __UNIFIED__
foreach(CLRegion r in _monitor._locationManager.MonitoredRegions)
{
// add all the currently monitored regions as geofences
base.InsertItem(base.Count, r);
}
#endif
}
protected override void InsertItem(int index, Geofence item)
{
#if __UNIFIED__
_monitor.AddRegion(item);
#elif TIZEN
_monitor._locator.AddBoundary((Geocircle)item.Geoshape);
#endif
base.InsertItem(index, item);
}
protected override void RemoveItem(int index)
{
try
{
#if __UNIFIED__
_monitor.RemoveRegion(this[index]);
#elif TIZEN
_monitor._locator.RemoveBoundary((Geocircle)this[index].Geoshape);
#endif
}
catch { }
base.RemoveItem(index);
}
}
#endif
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using log4net;
using Nini.Config;
using OpenMetaverse;
using OpenSim.Framework;
using OpenSim.Framework.Servers.HttpServer;
using OpenSim.Server.Base;
using OpenSim.Server.Handlers.Base;
using OpenSim.Services.Interfaces;
using System;
using System.Collections.Generic;
using System.IO;
using System.Reflection;
namespace OpenSim.Groups
{
public class HGGroupsServicePostHandler : BaseStreamHandler
{
private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);
private HGGroupsService m_GroupsService;
public HGGroupsServicePostHandler(HGGroupsService service) :
base("POST", "/hg-groups")
{
m_GroupsService = service;
}
protected override byte[] ProcessRequest(string path, Stream requestData,
IOSHttpRequest httpRequest, IOSHttpResponse httpResponse)
{
StreamReader sr = new StreamReader(requestData);
string body = sr.ReadToEnd();
sr.Close();
body = body.Trim();
//m_log.DebugFormat("[XXX]: query String: {0}", body);
try
{
Dictionary<string, object> request =
ServerUtils.ParseQueryString(body);
if (!request.ContainsKey("METHOD"))
return FailureResult();
string method = request["METHOD"].ToString();
request.Remove("METHOD");
m_log.DebugFormat("[Groups.RobustHGConnector]: {0}", method);
switch (method)
{
case "POSTGROUP":
return HandleAddGroupProxy(request);
case "REMOVEAGENTFROMGROUP":
return HandleRemoveAgentFromGroup(request);
case "GETGROUP":
return HandleGetGroup(request);
case "ADDNOTICE":
return HandleAddNotice(request);
case "VERIFYNOTICE":
return HandleVerifyNotice(request);
case "GETGROUPMEMBERS":
return HandleGetGroupMembers(request);
case "GETGROUPROLES":
return HandleGetGroupRoles(request);
case "GETROLEMEMBERS":
return HandleGetRoleMembers(request);
}
m_log.DebugFormat("[Groups.RobustHGConnector]: unknown method request: {0}", method);
}
catch (Exception e)
{
m_log.Error(string.Format("[Groups.RobustHGConnector]: Exception {0} ", e.Message), e);
}
return FailureResult();
}
private byte[] HandleAddGroupProxy(Dictionary<string, object> request)
{
Dictionary<string, object> result = new Dictionary<string, object>();
if (!request.ContainsKey("RequestingAgentID") || !request.ContainsKey("GroupID")
|| !request.ContainsKey("AgentID")
|| !request.ContainsKey("AccessToken") || !request.ContainsKey("Location"))
NullResult(result, "Bad network data");
else
{
string RequestingAgentID = request["RequestingAgentID"].ToString();
string agentID = request["AgentID"].ToString();
UUID groupID = new UUID(request["GroupID"].ToString());
string accessToken = request["AccessToken"].ToString();
string location = request["Location"].ToString();
string name = string.Empty;
if (request.ContainsKey("Name"))
name = request["Name"].ToString();
string reason = string.Empty;
bool success = m_GroupsService.CreateGroupProxy(RequestingAgentID, agentID, accessToken, groupID, location, name, out reason);
result["REASON"] = reason;
result["RESULT"] = success.ToString();
}
string xmlString = ServerUtils.BuildXmlResponse(result);
//m_log.DebugFormat("[XXX]: resp string: {0}", xmlString);
return Util.UTF8NoBomEncoding.GetBytes(xmlString);
}
private byte[] HandleAddNotice(Dictionary<string, object> request)
{
Dictionary<string, object> result = new Dictionary<string, object>();
if (!request.ContainsKey("RequestingAgentID") || !request.ContainsKey("GroupID") || !request.ContainsKey("NoticeID") ||
!request.ContainsKey("FromName") || !request.ContainsKey("Subject") || !request.ContainsKey("Message") ||
!request.ContainsKey("HasAttachment"))
NullResult(result, "Bad network data");
else
{
bool hasAtt = bool.Parse(request["HasAttachment"].ToString());
byte attType = 0;
string attName = string.Empty;
string attOwner = string.Empty;
UUID attItem = UUID.Zero;
if (request.ContainsKey("AttachmentType"))
attType = byte.Parse(request["AttachmentType"].ToString());
if (request.ContainsKey("AttachmentName"))
attName = request["AttachmentType"].ToString();
if (request.ContainsKey("AttachmentItemID"))
attItem = new UUID(request["AttachmentItemID"].ToString());
if (request.ContainsKey("AttachmentOwnerID"))
attOwner = request["AttachmentOwnerID"].ToString();
bool success = m_GroupsService.AddNotice(request["RequestingAgentID"].ToString(), new UUID(request["GroupID"].ToString()),
new UUID(request["NoticeID"].ToString()), request["FromName"].ToString(), request["Subject"].ToString(),
request["Message"].ToString(), hasAtt, attType, attName, attItem, attOwner);
result["RESULT"] = success.ToString();
}
string xmlString = ServerUtils.BuildXmlResponse(result);
//m_log.DebugFormat("[XXX]: resp string: {0}", xmlString);
return Util.UTF8NoBomEncoding.GetBytes(xmlString);
}
private byte[] HandleGetGroup(Dictionary<string, object> request)
{
Dictionary<string, object> result = new Dictionary<string, object>();
if (!request.ContainsKey("RequestingAgentID") || !request.ContainsKey("AccessToken"))
NullResult(result, "Bad network data");
else
{
string RequestingAgentID = request["RequestingAgentID"].ToString();
string token = request["AccessToken"].ToString();
UUID groupID = UUID.Zero;
string groupName = string.Empty;
if (request.ContainsKey("GroupID"))
groupID = new UUID(request["GroupID"].ToString());
if (request.ContainsKey("Name"))
groupName = request["Name"].ToString();
ExtendedGroupRecord grec = m_GroupsService.GetGroupRecord(RequestingAgentID, groupID, groupName, token);
if (grec == null)
NullResult(result, "Group not found");
else
result["RESULT"] = GroupsDataUtils.GroupRecord(grec);
}
string xmlString = ServerUtils.BuildXmlResponse(result);
//m_log.DebugFormat("[XXX]: resp string: {0}", xmlString);
return Util.UTF8NoBomEncoding.GetBytes(xmlString);
}
private byte[] HandleGetGroupMembers(Dictionary<string, object> request)
{
Dictionary<string, object> result = new Dictionary<string, object>();
if (!request.ContainsKey("RequestingAgentID") || !request.ContainsKey("GroupID") || !request.ContainsKey("AccessToken"))
NullResult(result, "Bad network data");
else
{
UUID groupID = new UUID(request["GroupID"].ToString());
string requestingAgentID = request["RequestingAgentID"].ToString();
string token = request["AccessToken"].ToString();
List<ExtendedGroupMembersData> members = m_GroupsService.GetGroupMembers(requestingAgentID, groupID, token);
if (members == null || (members != null && members.Count == 0))
{
NullResult(result, "No members");
}
else
{
Dictionary<string, object> dict = new Dictionary<string, object>();
int i = 0;
foreach (ExtendedGroupMembersData m in members)
{
dict["m-" + i++] = GroupsDataUtils.GroupMembersData(m);
}
result["RESULT"] = dict;
}
}
string xmlString = ServerUtils.BuildXmlResponse(result);
//m_log.DebugFormat("[XXX]: resp string: {0}", xmlString);
return Util.UTF8NoBomEncoding.GetBytes(xmlString);
}
private byte[] HandleGetGroupRoles(Dictionary<string, object> request)
{
Dictionary<string, object> result = new Dictionary<string, object>();
if (!request.ContainsKey("RequestingAgentID") || !request.ContainsKey("GroupID") || !request.ContainsKey("AccessToken"))
NullResult(result, "Bad network data");
else
{
UUID groupID = new UUID(request["GroupID"].ToString());
string requestingAgentID = request["RequestingAgentID"].ToString();
string token = request["AccessToken"].ToString();
List<GroupRolesData> roles = m_GroupsService.GetGroupRoles(requestingAgentID, groupID, token);
if (roles == null || (roles != null && roles.Count == 0))
{
NullResult(result, "No members");
}
else
{
Dictionary<string, object> dict = new Dictionary<string, object>();
int i = 0;
foreach (GroupRolesData r in roles)
dict["r-" + i++] = GroupsDataUtils.GroupRolesData(r);
result["RESULT"] = dict;
}
}
string xmlString = ServerUtils.BuildXmlResponse(result);
//m_log.DebugFormat("[XXX]: resp string: {0}", xmlString);
return Util.UTF8NoBomEncoding.GetBytes(xmlString);
}
private byte[] HandleGetRoleMembers(Dictionary<string, object> request)
{
Dictionary<string, object> result = new Dictionary<string, object>();
if (!request.ContainsKey("RequestingAgentID") || !request.ContainsKey("GroupID") || !request.ContainsKey("AccessToken"))
NullResult(result, "Bad network data");
else
{
UUID groupID = new UUID(request["GroupID"].ToString());
string requestingAgentID = request["RequestingAgentID"].ToString();
string token = request["AccessToken"].ToString();
List<ExtendedGroupRoleMembersData> rmembers = m_GroupsService.GetGroupRoleMembers(requestingAgentID, groupID, token);
if (rmembers == null || (rmembers != null && rmembers.Count == 0))
{
NullResult(result, "No members");
}
else
{
Dictionary<string, object> dict = new Dictionary<string, object>();
int i = 0;
foreach (ExtendedGroupRoleMembersData rm in rmembers)
dict["rm-" + i++] = GroupsDataUtils.GroupRoleMembersData(rm);
result["RESULT"] = dict;
}
}
string xmlString = ServerUtils.BuildXmlResponse(result);
//m_log.DebugFormat("[XXX]: resp string: {0}", xmlString);
return Util.UTF8NoBomEncoding.GetBytes(xmlString);
}
private byte[] HandleRemoveAgentFromGroup(Dictionary<string, object> request)
{
Dictionary<string, object> result = new Dictionary<string, object>();
if (!request.ContainsKey("AccessToken") || !request.ContainsKey("AgentID") ||
!request.ContainsKey("GroupID"))
NullResult(result, "Bad network data");
else
{
UUID groupID = new UUID(request["GroupID"].ToString());
string agentID = request["AgentID"].ToString();
string token = request["AccessToken"].ToString();
m_GroupsService.RemoveAgentFromGroup(agentID, agentID, groupID, token);
}
//m_log.DebugFormat("[XXX]: resp string: {0}", xmlString);
result["RESULT"] = "true";
return Util.UTF8NoBomEncoding.GetBytes(ServerUtils.BuildXmlResponse(result));
}
private byte[] HandleVerifyNotice(Dictionary<string, object> request)
{
Dictionary<string, object> result = new Dictionary<string, object>();
if (!request.ContainsKey("NoticeID") || !request.ContainsKey("GroupID"))
NullResult(result, "Bad network data");
else
{
UUID noticeID = new UUID(request["NoticeID"].ToString());
UUID groupID = new UUID(request["GroupID"].ToString());
bool success = m_GroupsService.VerifyNotice(noticeID, groupID);
//m_log.DebugFormat("[XXX]: VerifyNotice returned {0}", success);
result["RESULT"] = success.ToString();
}
string xmlString = ServerUtils.BuildXmlResponse(result);
//m_log.DebugFormat("[XXX]: resp string: {0}", xmlString);
return Util.UTF8NoBomEncoding.GetBytes(xmlString);
}
//
//
//
//
//
#region Helpers
private byte[] FailureResult()
{
Dictionary<string, object> result = new Dictionary<string, object>();
NullResult(result, "Unknown method");
string xmlString = ServerUtils.BuildXmlResponse(result);
return Util.UTF8NoBomEncoding.GetBytes(xmlString);
}
private void NullResult(Dictionary<string, object> result, string reason)
{
result["RESULT"] = "NULL";
result["REASON"] = reason;
}
#endregion Helpers
}
public class HGGroupsServiceRobustConnector : ServiceConnector
{
private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);
private string m_ConfigName = "Groups";
private HGGroupsService m_GroupsService;
// Called by Robust shell
public HGGroupsServiceRobustConnector(IConfigSource config, IHttpServer server, string configName) :
this(config, server, configName, null, null)
{
}
// Called by the sim-bound module
public HGGroupsServiceRobustConnector(IConfigSource config, IHttpServer server, string configName, IOfflineIMService im, IUserAccountService users) :
base(config, server, configName)
{
if (configName != String.Empty)
m_ConfigName = configName;
m_log.DebugFormat("[Groups.RobustHGConnector]: Starting with config name {0}", m_ConfigName);
string homeURI = Util.GetConfigVarFromSections<string>(config, "HomeURI",
new string[] { "Startup", "Hypergrid", m_ConfigName }, string.Empty);
if (homeURI == string.Empty)
throw new Exception(String.Format("[Groups.RobustHGConnector]: please provide the HomeURI [Startup] or in section {0}", m_ConfigName));
IConfig cnf = config.Configs[m_ConfigName];
if (cnf == null)
throw new Exception(String.Format("[Groups.RobustHGConnector]: {0} section does not exist", m_ConfigName));
if (im == null)
{
string imDll = cnf.GetString("OfflineIMService", string.Empty);
if (imDll == string.Empty)
throw new Exception(String.Format("[Groups.RobustHGConnector]: please provide OfflineIMService in section {0}", m_ConfigName));
Object[] args = new Object[] { config };
im = ServerUtils.LoadPlugin<IOfflineIMService>(imDll, args);
}
if (users == null)
{
string usersDll = cnf.GetString("UserAccountService", string.Empty);
if (usersDll == string.Empty)
throw new Exception(String.Format("[Groups.RobustHGConnector]: please provide UserAccountService in section {0}", m_ConfigName));
Object[] args = new Object[] { config };
users = ServerUtils.LoadPlugin<IUserAccountService>(usersDll, args);
}
m_GroupsService = new HGGroupsService(config, im, users, homeURI);
server.AddStreamHandler(new HGGroupsServicePostHandler(m_GroupsService));
}
}
}
| |
// Copyright (C) 2004 Kevin Downs
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
using System;
using System.Drawing;
using System.Drawing.Design;
using System.Collections;
using System.ComponentModel;
using System.Windows.Forms;
using System.Windows.Forms.Design;
using System.Diagnostics;
using System.Reflection;
using NDoc.ExtendedUI;
namespace NDoc.Core
{
/// <summary>
/// Summary description for ReferencePathCollectionEditorForm.
/// </summary>
internal class ReferencePathCollectionEditorForm : System.Windows.Forms.Form
{
private ListView listView1;
private System.Windows.Forms.Button btnAdd;
private System.Windows.Forms.Button btnRemove;
private System.Windows.Forms.Button btnTest;
private NDoc.Core.PropertyGridUI.RuntimePropertyGrid propertyGrid1;
private System.Windows.Forms.Button btnCancel;
private System.Windows.Forms.Button btnOK;
private System.Windows.Forms.ImageList imageList1;
private System.Windows.Forms.ColumnHeader columnHeader1;
private System.ComponentModel.IContainer components;
/// <summary>
/// Creates a new <see cref="ReferencePathCollectionEditorForm"/> instance.
/// </summary>
public ReferencePathCollectionEditorForm()
{
//
// Required for Windows Form Designer support
//
InitializeComponent();
// manually add image resources.
// This avoids problems with VS.NET designer versioning.
Assembly assembly = Assembly.GetExecutingAssembly();
ImageList.ImageCollection imlcol = this.imageList1.Images;
imlcol.Add(new Bitmap(assembly.GetManifestResourceStream("NDoc.Core.graphics.folder.bmp")));
imlcol.Add(new Bitmap(assembly.GetManifestResourceStream("NDoc.Core.graphics.folderpin.bmp")));
imlcol.Add(new Bitmap(assembly.GetManifestResourceStream("NDoc.Core.graphics.multifolder.bmp")));
imlcol.Add(new Bitmap(assembly.GetManifestResourceStream("NDoc.Core.graphics.multifolderpin.bmp")));
}
/// <summary>
/// Clean up any resources being used.
/// </summary>
protected override void Dispose(bool disposing)
{
if (disposing)
{
if (components != null)
{
components.Dispose();
}
}
base.Dispose(disposing);
}
ReferencePathCollection _refPaths = null;
/// <summary>
/// Gets or sets the reference paths collection to edit.
/// </summary>
/// <value></value>
public ReferencePathCollection ReferencePaths
{
get { return _refPaths; }
set
{
_refPaths = new ReferencePathCollection();
foreach (ReferencePath rp in value)
{
_refPaths.Add(new ReferencePath(rp));
}
}
}
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this.components = new System.ComponentModel.Container();
System.Resources.ResourceManager resources = new System.Resources.ResourceManager(typeof(ReferencePathCollectionEditorForm));
this.propertyGrid1 = new NDoc.Core.PropertyGridUI.RuntimePropertyGrid();
this.btnCancel = new System.Windows.Forms.Button();
this.btnOK = new System.Windows.Forms.Button();
this.btnRemove = new System.Windows.Forms.Button();
this.btnAdd = new System.Windows.Forms.Button();
this.listView1 = new System.Windows.Forms.ListView();
this.columnHeader1 = new System.Windows.Forms.ColumnHeader();
this.imageList1 = new System.Windows.Forms.ImageList(this.components);
this.btnTest = new System.Windows.Forms.Button();
this.SuspendLayout();
//
// propertyGrid1
//
this.propertyGrid1.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this.propertyGrid1.CommandsVisibleIfAvailable = true;
this.propertyGrid1.LargeButtons = false;
this.propertyGrid1.LineColor = System.Drawing.SystemColors.ScrollBar;
this.propertyGrid1.Location = new System.Drawing.Point(8, 208);
this.propertyGrid1.Name = "propertyGrid1";
this.propertyGrid1.PropertySort = System.Windows.Forms.PropertySort.NoSort;
this.propertyGrid1.Size = new System.Drawing.Size(456, 136);
this.propertyGrid1.TabIndex = 5;
this.propertyGrid1.Text = "propertyGrid";
this.propertyGrid1.ToolbarVisible = false;
this.propertyGrid1.ViewBackColor = System.Drawing.SystemColors.Window;
this.propertyGrid1.ViewForeColor = System.Drawing.SystemColors.WindowText;
this.propertyGrid1.PropertyValueChanged += new System.Windows.Forms.PropertyValueChangedEventHandler(this.propertyGrid1_PropertyValueChanged);
//
// btnCancel
//
this.btnCancel.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Right)));
this.btnCancel.DialogResult = System.Windows.Forms.DialogResult.Cancel;
this.btnCancel.FlatStyle = System.Windows.Forms.FlatStyle.System;
this.btnCancel.Location = new System.Drawing.Point(376, 352);
this.btnCancel.Name = "btnCancel";
this.btnCancel.Size = new System.Drawing.Size(88, 24);
this.btnCancel.TabIndex = 6;
this.btnCancel.Text = "Cancel";
//
// btnOK
//
this.btnOK.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Right)));
this.btnOK.DialogResult = System.Windows.Forms.DialogResult.OK;
this.btnOK.FlatStyle = System.Windows.Forms.FlatStyle.System;
this.btnOK.Location = new System.Drawing.Point(280, 352);
this.btnOK.Name = "btnOK";
this.btnOK.Size = new System.Drawing.Size(88, 24);
this.btnOK.TabIndex = 7;
this.btnOK.Text = "OK";
//
// btnRemove
//
this.btnRemove.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Left)));
this.btnRemove.FlatStyle = System.Windows.Forms.FlatStyle.System;
this.btnRemove.Location = new System.Drawing.Point(104, 168);
this.btnRemove.Name = "btnRemove";
this.btnRemove.Size = new System.Drawing.Size(88, 24);
this.btnRemove.TabIndex = 3;
this.btnRemove.Text = "Remove";
this.btnRemove.Click += new System.EventHandler(this.btnRemove_Click);
//
// btnAdd
//
this.btnAdd.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Left)));
this.btnAdd.FlatStyle = System.Windows.Forms.FlatStyle.System;
this.btnAdd.Location = new System.Drawing.Point(8, 168);
this.btnAdd.Name = "btnAdd";
this.btnAdd.Size = new System.Drawing.Size(88, 24);
this.btnAdd.TabIndex = 2;
this.btnAdd.Text = "Add";
this.btnAdd.Click += new System.EventHandler(this.btnAdd_Click);
//
// listView1
//
this.listView1.Anchor = ((System.Windows.Forms.AnchorStyles)((((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom)
| System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this.listView1.Columns.AddRange(new System.Windows.Forms.ColumnHeader[] {
this.columnHeader1});
this.listView1.FullRowSelect = true;
this.listView1.HideSelection = false;
this.listView1.Location = new System.Drawing.Point(8, 8);
this.listView1.Name = "listView1";
this.listView1.Size = new System.Drawing.Size(456, 152);
this.listView1.SmallImageList = this.imageList1;
this.listView1.TabIndex = 1;
this.listView1.View = System.Windows.Forms.View.Details;
this.listView1.KeyDown += new System.Windows.Forms.KeyEventHandler(this.listView1_KeyDown);
this.listView1.SelectedIndexChanged += new System.EventHandler(this.listView1_SelectedIndexChanged);
//
// columnHeader1
//
this.columnHeader1.Text = "ReferencePath";
this.columnHeader1.Width = 429;
//
// imageList1
//
this.imageList1.ColorDepth = System.Windows.Forms.ColorDepth.Depth32Bit;
this.imageList1.ImageSize = new System.Drawing.Size(16, 16);
this.imageList1.TransparentColor = System.Drawing.Color.Transparent;
//
// btnTest
//
this.btnTest.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Left)));
this.btnTest.FlatStyle = System.Windows.Forms.FlatStyle.System;
this.btnTest.Location = new System.Drawing.Point(368, 168);
this.btnTest.Name = "btnTest";
this.btnTest.Size = new System.Drawing.Size(96, 24);
this.btnTest.TabIndex = 4;
this.btnTest.Text = "Test";
this.btnTest.Visible = false;
this.btnTest.Click += new System.EventHandler(this.btnTest_Click);
//
// ReferencePathCollectionEditorForm
//
this.AcceptButton = this.btnOK;
this.AutoScaleBaseSize = new System.Drawing.Size(5, 13);
this.CancelButton = this.btnCancel;
this.ClientSize = new System.Drawing.Size(472, 390);
this.Controls.Add(this.btnTest);
this.Controls.Add(this.listView1);
this.Controls.Add(this.btnAdd);
this.Controls.Add(this.btnRemove);
this.Controls.Add(this.btnOK);
this.Controls.Add(this.btnCancel);
this.Controls.Add(this.propertyGrid1);
this.Icon = ((System.Drawing.Icon)(resources.GetObject("$this.Icon")));
this.MinimizeBox = false;
this.MinimumSize = new System.Drawing.Size(480, 400);
this.Name = "ReferencePathCollectionEditorForm";
this.ShowInTaskbar = false;
this.SizeGripStyle = System.Windows.Forms.SizeGripStyle.Show;
this.StartPosition = System.Windows.Forms.FormStartPosition.CenterParent;
this.Text = "ReferencePath Collection Editor";
this.Load += new System.EventHandler(this.ReferencePathCollectionEditorForm_Load);
this.ResumeLayout(false);
}
#endregion
private void listView1_KeyDown(object sender, System.Windows.Forms.KeyEventArgs e)
{
switch (e.KeyData)
{
case Keys.Insert :
{
this.btnAdd_Click(this.btnAdd, EventArgs.Empty);
e.Handled = true;
return;
}
case Keys.Delete :
{
this.btnRemove_Click(this.btnRemove, EventArgs.Empty);
e.Handled = true;
return;
}
}
if (e.Control && (e.KeyCode == Keys.A))
{
foreach (ListViewItem li in listView1.Items)
{
li.Selected = true;
}
e.Handled = true;
}
}
private void btnAdd_Click(object sender, System.EventArgs e)
{
string path = null;
ShellBrowseForFolderDialog folderDialog = new ShellBrowseForFolderDialog();
folderDialog.hwndOwner = this.Handle;
if (folderDialog.ShowDialog() == DialogResult.OK)
{
path = folderDialog.FullName;
foreach (ListViewItem sel_li in listView1.SelectedItems)
{
sel_li.Selected = false;
}
ReferencePath rp = new ReferencePath();
rp.Path = path;
ListViewItem li = new ListViewItem();
listView1.Items.Add(li);
li.Tag = rp;
UpdateListItem(li);
_refPaths.Add(rp);
li.Selected = true;
}
}
private void btnRemove_Click(object sender, System.EventArgs e)
{
foreach (ListViewItem li in listView1.SelectedItems)
{
_refPaths.Remove((ReferencePath)li.Tag);
listView1.Items.Remove(li);
}
}
private void btnTest_Click(object sender, System.EventArgs e)
{
}
private void ReferencePathCollectionEditorForm_Load(object sender, System.EventArgs e)
{
foreach (ReferencePath rp in _refPaths)
{
ListViewItem li = new ListViewItem();
li.Tag = rp;
UpdateListItem(li);
listView1.Items.Add(li);
}
if (listView1.Items.Count > 0) listView1.Items[0].Selected = true;
}
private void listView1_SelectedIndexChanged(object sender, System.EventArgs e)
{
if (listView1.SelectedItems.Count == 1)
{
propertyGrid1.SelectedObject = new RefPathPropGridProxy(this);
if (propertyGrid1.SelectedGridItem.Expandable)
propertyGrid1.SelectedGridItem.Expanded = true;
}
else
{
object[] si = new object[this.listView1.SelectedItems.Count];
for (int i = 0; i < listView1.SelectedItems.Count; i++)
{
si[i] = listView1.SelectedItems[i].Tag;
}
propertyGrid1.SelectedObjects = si;
}
}
private void propertyGrid1_PropertyValueChanged(object s, System.Windows.Forms.PropertyValueChangedEventArgs e)
{
foreach (ListViewItem li in listView1.SelectedItems)
{
UpdateListItem(li);
}
}
private void UpdateListItem(ListViewItem li)
{
ReferencePath rp = (ReferencePath)li.Tag;
li.Text = rp.ToString();
int imageIndex;
if (rp.FixedPath)
imageIndex = 1;
else
imageIndex = 0;
if (rp.IncludeSubDirectories)
imageIndex += 2;
li.ImageIndex = imageIndex;
if (rp.Path.Length == 0) li.ForeColor = Color.Black;
}
private class RefPathPropGridProxy
{
public RefPathPropGridProxy(ReferencePathCollectionEditorForm editorForm)
{
_editorForm = editorForm;
ListViewItem li = editorForm.listView1.SelectedItems[0];
_listViewItem = li;
_referencePath = (ReferencePath)li.Tag;
}
private ReferencePathCollectionEditorForm _editorForm;
private ReferencePath _referencePath;
private ListViewItem _listViewItem;
[Editor(typeof(ReferencePath.UIEditor), typeof(UITypeEditor))]
[NDoc.Core.PropertyGridUI.FoldernameEditor.FolderDialogTitle("Select Reference Path")]
public ReferencePath ReferencePath
{
get { return _referencePath; }
set
{
_referencePath = value;
if(!Object.ReferenceEquals(_listViewItem.Tag,_referencePath))
{
_listViewItem.Tag=_referencePath;
_editorForm._refPaths[_listViewItem.Index]=_referencePath;
}
}
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
// =+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
//
// Thread tracks managed thread IDs, recycling them when threads die to keep the set of
// live IDs compact.
//
// =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
using System;
using System.Diagnostics;
using System.Runtime;
namespace System.Threading
{
internal class ManagedThreadId
{
//
// Binary tree used to keep track of active thread ids. Each node of the tree keeps track of 32 consecutive ids.
// Implemented as immutable collection to avoid locks. Each modification creates a new top level node.
//
class ImmutableIdDispenser
{
private readonly ImmutableIdDispenser _left; // Child nodes
private readonly ImmutableIdDispenser _right;
private readonly int _used; // Number of ids tracked by this node and all its childs
private readonly int _size; // Maximum number of ids that can be tracked by this node and all its childs
private readonly uint _bitmap; // Bitmap of ids tracked by this node
private const int BitsPerNode = 32;
private ImmutableIdDispenser(ImmutableIdDispenser left, ImmutableIdDispenser right, int used, int size, uint bitmap)
{
_left = left;
_right = right;
_used = used;
_size = size;
_bitmap = bitmap;
CheckInvariants();
}
[Conditional("DEBUG")]
private void CheckInvariants()
{
int actualUsed = 0;
uint countBits = _bitmap;
while (countBits != 0)
{
actualUsed += (int)(countBits & 1);
countBits >>= 1;
}
if (_left != null)
{
Debug.Assert(_left._size == ChildSize);
actualUsed += _left._used;
}
if (_right != null)
{
Debug.Assert(_right._size == ChildSize);
actualUsed += _right._used;
}
Debug.Assert(actualUsed == _used);
Debug.Assert(_used <= _size);
}
private int ChildSize
{
get
{
Debug.Assert((_size / 2) >= (BitsPerNode / 2));
return (_size / 2) - (BitsPerNode / 2);
}
}
public static ImmutableIdDispenser Empty
{
get
{
// The empty dispenser has the id=0 allocated, so it is not really empty.
// It saves us from dealing with the corner case of true empty dispenser,
// and it ensures that ManagedThreadIdNone will not be ever given out.
return new ImmutableIdDispenser(null, null, 1, BitsPerNode, 1);
}
}
public ImmutableIdDispenser AllocateId(out int id)
{
if (_used == _size)
{
id = _size;
return new ImmutableIdDispenser(this, null, _size + 1, checked(2 * _size + BitsPerNode), 1);
}
var bitmap = _bitmap;
var left = _left;
var right = _right;
// Any free bits in current node?
if (bitmap != UInt32.MaxValue)
{
int bit = 0;
while ((bitmap & (uint)(1 << bit)) != 0)
bit++;
bitmap |= (uint)(1 << bit);
id = ChildSize + bit;
}
else
{
Debug.Assert(ChildSize > 0);
if (left == null)
{
left = new ImmutableIdDispenser(null, null, 1, ChildSize, 1);
id = left.ChildSize;
}
else
if (right == null)
{
right = new ImmutableIdDispenser(null, null, 1, ChildSize, 1);
id = ChildSize + BitsPerNode + right.ChildSize;
}
else
{
if (left._used < right._used)
{
Debug.Assert(left._used < left._size);
left = left.AllocateId(out id);
}
else
{
Debug.Assert(right._used < right._size);
right = right.AllocateId(out id);
id += (ChildSize + BitsPerNode);
}
}
}
return new ImmutableIdDispenser(left, right, _used + 1, _size, bitmap);
}
public ImmutableIdDispenser RecycleId(int id)
{
Debug.Assert(id < _size);
if (_used == 1)
return null;
var bitmap = _bitmap;
var left = _left;
var right = _right;
int childSize = ChildSize;
if (id < childSize)
{
left = left.RecycleId(id);
}
else
{
id -= childSize;
if (id < BitsPerNode)
{
Debug.Assert((bitmap & (uint)(1 << id)) != 0);
bitmap &= ~(uint)(1 << id);
}
else
{
right = right.RecycleId(id - BitsPerNode);
}
}
return new ImmutableIdDispenser(left, right, _used - 1, _size, bitmap);
}
}
[ThreadStatic]
private static ManagedThreadId t_currentThreadId;
[ThreadStatic]
private static int t_currentManagedThreadId;
// We have to avoid the static constructors on the ManagedThreadId class, otherwise we can run into stack overflow as first time Current property get called,
// the runtime will ensure running the static constructor and this process will call the Current property again (when taking any lock)
// System::Environment.get_CurrentManagedThreadId
// System::Threading::Lock.Acquire
// System::Runtime::CompilerServices::ClassConstructorRunner::Cctor.GetCctor
// System::Runtime::CompilerServices::ClassConstructorRunner.EnsureClassConstructorRun
// System::Threading::ManagedThreadId.get_Current
// System::Environment.get_CurrentManagedThreadId
private static ImmutableIdDispenser s_idDispenser;
private int _managedThreadId;
internal const int ManagedThreadIdNone = 0;
public static int Current
{
get
{
int currentManagedThreadId = t_currentManagedThreadId;
if (currentManagedThreadId == ManagedThreadIdNone)
return MakeForCurrentThread();
else
return currentManagedThreadId;
}
}
private static int MakeForCurrentThread()
{
if (s_idDispenser == null)
Interlocked.CompareExchange(ref s_idDispenser, ImmutableIdDispenser.Empty, null);
int id;
var priorIdDispenser = Volatile.Read(ref s_idDispenser);
for (;;)
{
var updatedIdDispenser = priorIdDispenser.AllocateId(out id);
var interlockedResult = Interlocked.CompareExchange(ref s_idDispenser, updatedIdDispenser, priorIdDispenser);
if (Object.ReferenceEquals(priorIdDispenser, interlockedResult))
break;
priorIdDispenser = interlockedResult; // we already have a volatile read that we can reuse for the next loop
}
Debug.Assert(id != ManagedThreadIdNone);
t_currentThreadId = new ManagedThreadId(id);
t_currentManagedThreadId = id;
return id;
}
private ManagedThreadId(int managedThreadId)
{
_managedThreadId = managedThreadId;
}
~ManagedThreadId()
{
if (_managedThreadId == ManagedThreadIdNone)
{
return;
}
var priorIdDispenser = Volatile.Read(ref s_idDispenser);
for (;;)
{
var updatedIdDispenser = s_idDispenser.RecycleId(_managedThreadId);
var interlockedResult = Interlocked.CompareExchange(ref s_idDispenser, updatedIdDispenser, priorIdDispenser);
if (Object.ReferenceEquals(priorIdDispenser, interlockedResult))
break;
priorIdDispenser = interlockedResult; // we already have a volatile read that we can reuse for the next loop
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using FluentNHibernate.Mapping.Providers;
using FluentNHibernate.MappingModel;
using FluentNHibernate.MappingModel.ClassBased;
using FluentNHibernate.Utils;
namespace FluentNHibernate.Mapping
{
/// <summary>
/// Defines a mapping for an entity subclass. Derive from this class to create a mapping,
/// and use the constructor to control how your entity is persisted.
/// </summary>
/// <example>
/// public class EmployeeMap : SubclassMap<Employee>
/// {
/// public EmployeeMap()
/// {
/// Map(x => x.Name);
/// Map(x => x.Age);
/// }
/// }
/// </example>
/// <typeparam name="T">Entity type to map</typeparam>
public class SubclassMap<T> : ClasslikeMapBase<T>, IIndeterminateSubclassMappingProvider
{
readonly MappingProviderStore providers;
readonly AttributeStore attributes = new AttributeStore();
// this is a bit weird, but we need a way of delaying the generation of the subclass mappings until we know
// what the parent subclass type is...
readonly IDictionary<Type, IIndeterminateSubclassMappingProvider> indetermineateSubclasses = new Dictionary<Type, IIndeterminateSubclassMappingProvider>();
bool nextBool = true;
readonly IList<JoinMapping> joins = new List<JoinMapping>();
public SubclassMap()
: this(new MappingProviderStore())
{}
protected SubclassMap(MappingProviderStore providers)
: base(providers)
{
this.providers = providers;
}
/// <summary>
/// Inverts the next boolean setting
/// </summary>
[DebuggerBrowsable(DebuggerBrowsableState.Never)]
public SubclassMap<T> Not
{
get
{
nextBool = !nextBool;
return this;
}
}
/// <summary>
/// (optional) Specifies that this subclass is abstract
/// </summary>
public void Abstract()
{
attributes.Set("Abstract", Layer.UserSupplied, nextBool);
nextBool = true;
}
/// <summary>
/// Sets the dynamic insert behaviour
/// </summary>
public void DynamicInsert()
{
attributes.Set("DynamicInsert", Layer.UserSupplied, nextBool);
nextBool = true;
}
/// <summary>
/// Sets the dynamic update behaviour
/// </summary>
public void DynamicUpdate()
{
attributes.Set("DynamicUpdate", Layer.UserSupplied, nextBool);
nextBool = true;
}
/// <summary>
/// Specifies that this entity should be lazy loaded
/// </summary>
public void LazyLoad()
{
attributes.Set("Lazy", Layer.UserSupplied, nextBool);
nextBool = true;
}
/// <summary>
/// Specify a proxy type for this entity
/// </summary>
/// <typeparam name="TProxy">Proxy type</typeparam>
public void Proxy<TProxy>()
{
Proxy(typeof(TProxy));
}
/// <summary>
/// Specify a proxy type for this entity
/// </summary>
/// <param name="proxyType">Proxy type</param>
public void Proxy(Type proxyType)
{
attributes.Set("Proxy", Layer.UserSupplied, proxyType.AssemblyQualifiedName);
}
/// <summary>
/// Specify that a select should be performed before an update of this entity
/// </summary>
public void SelectBeforeUpdate()
{
attributes.Set("SelectBeforeUpdate", Layer.UserSupplied, nextBool);
nextBool = true;
}
[Obsolete("Use a new SubclassMap")]
public void Subclass<TSubclass>(Action<SubclassMap<TSubclass>> subclassDefinition)
{
var subclass = new SubclassMap<TSubclass>();
subclassDefinition(subclass);
indetermineateSubclasses[typeof(TSubclass)] = subclass;
}
/// <summary>
/// Set the discriminator value, if this entity is in a table-per-class-hierarchy
/// mapping strategy.
/// </summary>
/// <param name="discriminatorValue">Discriminator value</param>
public void DiscriminatorValue(object discriminatorValue)
{
attributes.Set("DiscriminatorValue", Layer.UserSupplied, discriminatorValue);
}
/// <summary>
/// Sets the table name
/// </summary>
/// <param name="table">Table name</param>
public void Table(string table)
{
attributes.Set("TableName", Layer.UserSupplied, table);
}
/// <summary>
/// Sets the schema
/// </summary>
/// <param name="schema">Schema</param>
public void Schema(string schema)
{
attributes.Set("Schema", Layer.UserSupplied, schema);
}
/// <summary>
/// Specifies a check constraint
/// </summary>
/// <param name="constraint">Constraint name</param>
public void Check(string constraint)
{
attributes.Set("Check", Layer.UserSupplied, constraint);
}
/// <summary>
/// Adds a column to the key for this subclass, if used
/// in a table-per-subclass strategy.
/// </summary>
/// <param name="column">Column name</param>
public void KeyColumn(string column)
{
KeyMapping key;
if (attributes.IsSpecified("Key"))
key = attributes.GetOrDefault<KeyMapping>("Key");
else
key = new KeyMapping();
key.AddColumn(Layer.UserSupplied, new ColumnMapping(column));
attributes.Set("Key", Layer.UserSupplied, key);
}
/// <summary>
/// Subselect query
/// </summary>
/// <param name="subselect">Subselect query</param>
public void Subselect(string subselect)
{
attributes.Set("Subselect", Layer.UserSupplied, subselect);
}
/// <summary>
/// Specifies a persister for this entity
/// </summary>
/// <typeparam name="TPersister">Persister type</typeparam>
public void Persister<TPersister>()
{
attributes.Set("Persister", Layer.UserSupplied, new TypeReference(typeof(TPersister)));
}
/// <summary>
/// Specifies a persister for this entity
/// </summary>
/// <param name="type">Persister type</param>
public void Persister(Type type)
{
attributes.Set("Persister", Layer.UserSupplied, new TypeReference(type));
}
/// <summary>
/// Specifies a persister for this entity
/// </summary>
/// <param name="type">Persister type</param>
public void Persister(string type)
{
attributes.Set("Persister", Layer.UserSupplied, new TypeReference(type));
}
/// <summary>
/// Set the query batch size
/// </summary>
/// <param name="batchSize">Batch size</param>
public void BatchSize(int batchSize)
{
attributes.Set("BatchSize", Layer.UserSupplied, batchSize);
}
/// <summary>
/// Specifies an entity-name.
/// </summary>
/// <remarks>See http://nhforge.org/blogs/nhibernate/archive/2008/10/21/entity-name-in-action-a-strongly-typed-entity.aspx</remarks>
public void EntityName(string entityname)
{
attributes.Set("EntityName", Layer.UserSupplied, entityname);
}
/// <summary>
/// Links this entity to another table, to create a composite entity from two or
/// more tables. This only works if you're in a table-per-inheritance-hierarchy
/// strategy.
/// </summary>
/// <param name="tableName">Joined table name</param>
/// <param name="action">Joined table mapping</param>
/// <example>
/// Join("another_table", join =>
/// {
/// join.Map(x => x.Name);
/// join.Map(x => x.Age);
/// });
/// </example>
public void Join(string tableName, Action<JoinPart<T>> action)
{
var join = new JoinPart<T>(tableName);
action(join);
joins.Add(((IJoinMappingProvider)join).GetJoinMapping());
}
/// <summary>
/// (optional) Specifies the entity from which this subclass descends/extends.
/// </summary>
/// <typeparam name="TOther">Type of the entity to extend</typeparam>
public void Extends<TOther>()
{
Extends(typeof(TOther));
}
/// <summary>
/// (optional) Specifies the entity from which this subclass descends/extends.
/// </summary>
/// <param name="type">Type of the entity to extend</param>
public void Extends(Type type)
{
attributes.Set("Extends", Layer.UserSupplied, type);
}
SubclassMapping IIndeterminateSubclassMappingProvider.GetSubclassMapping(SubclassType type)
{
var mapping = new SubclassMapping(type);
GenerateNestedSubclasses(mapping);
attributes.Set("Type", Layer.Defaults, typeof(T));
attributes.Set("Name", Layer.Defaults, typeof(T).AssemblyQualifiedName);
attributes.Set("DiscriminatorValue", Layer.Defaults, typeof(T).Name);
// TODO: un-hardcode this
Type baseType = typeof(T).BaseType
?? attributes.Get("Extends") as Type;
if (baseType != null)
{
var key = new KeyMapping();
key.AddColumn(Layer.Defaults, new ColumnMapping(baseType.Name + "_id"));
attributes.Set("Key", Layer.Defaults, key);
}
attributes.Set("TableName", Layer.Defaults, GetDefaultTableName());
// TODO: this is nasty, we should find a better way
mapping.OverrideAttributes(attributes.Clone());
foreach (var join in joins)
mapping.AddJoin(join);
foreach (var property in providers.Properties)
mapping.AddProperty(property.GetPropertyMapping());
foreach (var component in providers.Components)
mapping.AddComponent(component.GetComponentMapping());
foreach (var oneToOne in providers.OneToOnes)
mapping.AddOneToOne(oneToOne.GetOneToOneMapping());
foreach (var collection in providers.Collections)
mapping.AddCollection(collection.GetCollectionMapping());
foreach (var reference in providers.References)
mapping.AddReference(reference.GetManyToOneMapping());
foreach (var any in providers.Anys)
mapping.AddAny(any.GetAnyMapping());
return mapping.DeepClone();
}
Type IIndeterminateSubclassMappingProvider.EntityType
{
get { return EntityType; }
}
Type IIndeterminateSubclassMappingProvider.Extends
{
get { return attributes.GetOrDefault<Type>("Extends"); }
}
void GenerateNestedSubclasses(SubclassMapping mapping)
{
foreach (var subclassType in indetermineateSubclasses.Keys)
{
var subclassMapping = indetermineateSubclasses[subclassType].GetSubclassMapping(mapping.SubclassType);
mapping.AddSubclass(subclassMapping);
}
}
string GetDefaultTableName()
{
#pragma warning disable 612,618
var tableName = EntityType.Name;
if (EntityType.IsGenericType)
{
// special case for generics: GenericType_GenericParameterType
tableName = EntityType.Name.Substring(0, EntityType.Name.IndexOf('`'));
foreach (var argument in EntityType.GetGenericArguments())
{
tableName += "_";
tableName += argument.Name;
}
}
#pragma warning restore 612,618
return "`" + tableName + "`";
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections;
using System.Globalization;
using System.Threading.Tasks;
using System.Collections.Generic;
using Xunit;
namespace System.Data.SqlClient.ManualTesting.Tests
{
public static class ExceptionTest
{
// data value and server consts
private const string badServer = "NotAServer";
private const string sqlsvrBadConn = "A network-related or instance-specific error occurred while establishing a connection to SQL Server. The server was not found or was not accessible. Verify that the instance name is correct and that SQL Server is configured to allow remote connections.";
private const string logonFailedErrorMessage = "Login failed for user '{0}'.";
private const string execReaderFailedMessage = "ExecuteReader requires an open and available Connection. The connection's current state is closed.";
private const string warningNoiseMessage = "The full-text search condition contained noise word(s).";
private const string warningInfoMessage = "Test of info messages";
private const string orderIdQuery = "select orderid from orders where orderid < 10250";
[CheckConnStrSetupFact]
public static void WarningTest()
{
Action<object, SqlInfoMessageEventArgs> warningCallback =
(object sender, SqlInfoMessageEventArgs imevent) =>
{
for (int i = 0; i < imevent.Errors.Count; i++)
{
Assert.True(imevent.Errors[i].Message.Contains(warningInfoMessage), "FAILED: WarningTest Callback did not contain correct message.");
}
};
SqlInfoMessageEventHandler handler = new SqlInfoMessageEventHandler(warningCallback);
using (SqlConnection sqlConnection = new SqlConnection((new SqlConnectionStringBuilder(DataTestUtility.TcpConnStr) { Pooling = false }).ConnectionString))
{
sqlConnection.InfoMessage += handler;
sqlConnection.Open();
SqlCommand cmd = new SqlCommand(string.Format("PRINT N'{0}'", warningInfoMessage), sqlConnection);
cmd.ExecuteNonQuery();
sqlConnection.InfoMessage -= handler;
cmd.ExecuteNonQuery();
}
}
[CheckConnStrSetupFact]
public static void WarningsBeforeRowsTest()
{
bool hitWarnings = false;
int iteration = 0;
Action<object, SqlInfoMessageEventArgs> warningCallback =
(object sender, SqlInfoMessageEventArgs imevent) =>
{
for (int i = 0; i < imevent.Errors.Count; i++)
{
Assert.True(imevent.Errors[i].Message.Contains(warningNoiseMessage), "FAILED: WarningsBeforeRowsTest Callback did not contain correct message. Failed in loop iteration: " + iteration);
}
hitWarnings = true;
};
SqlInfoMessageEventHandler handler = new SqlInfoMessageEventHandler(warningCallback);
SqlConnection sqlConnection = new SqlConnection(DataTestUtility.TcpConnStr);
sqlConnection.InfoMessage += handler;
sqlConnection.Open();
foreach (string orderClause in new string[] { "", " order by FirstName" })
{
foreach (bool messagesOnErrors in new bool[] { true, false })
{
iteration++;
sqlConnection.FireInfoMessageEventOnUserErrors = messagesOnErrors;
// These queries should return warnings because AND here is a noise word.
SqlCommand cmd = new SqlCommand("select FirstName from Northwind.dbo.Employees where contains(FirstName, '\"Anne AND\"')" + orderClause, sqlConnection);
using (SqlDataReader reader = cmd.ExecuteReader())
{
Assert.True(reader.HasRows, "FAILED: SqlDataReader.HasRows is not correct (should be TRUE)");
bool receivedRows = false;
while (reader.Read())
{
receivedRows = true;
}
Assert.True(receivedRows, "FAILED: Should have received rows from this query.");
Assert.True(hitWarnings, "FAILED: Should have received warnings from this query");
}
hitWarnings = false;
cmd.CommandText = "select FirstName from Northwind.dbo.Employees where contains(FirstName, '\"NotARealPerson AND\"')" + orderClause;
using (SqlDataReader reader = cmd.ExecuteReader())
{
Assert.False(reader.HasRows, "FAILED: SqlDataReader.HasRows is not correct (should be FALSE)");
bool receivedRows = false;
while (reader.Read())
{
receivedRows = true;
}
Assert.False(receivedRows, "FAILED: Should have NOT received rows from this query.");
Assert.True(hitWarnings, "FAILED: Should have received warnings from this query");
}
}
}
sqlConnection.Close();
}
private static bool CheckThatExceptionsAreDistinctButHaveSameData(SqlException e1, SqlException e2)
{
Assert.True(e1 != e2, "FAILED: verification of exception cloning in subsequent connection attempts");
Assert.False((e1 == null) || (e2 == null), "FAILED: One of exceptions is null, another is not");
bool equal = (e1.Message == e2.Message) && (e1.HelpLink == e2.HelpLink) && (e1.InnerException == e2.InnerException)
&& (e1.Source == e2.Source) && (e1.Data.Count == e2.Data.Count) && (e1.Errors == e2.Errors);
IDictionaryEnumerator enum1 = e1.Data.GetEnumerator();
IDictionaryEnumerator enum2 = e2.Data.GetEnumerator();
while (equal)
{
if (!enum1.MoveNext())
break;
enum2.MoveNext();
equal = (enum1.Key == enum2.Key) && (enum2.Value == enum2.Value);
}
Assert.True(equal, string.Format("FAILED: exceptions do not contain the same data (besides call stack):\nFirst: {0}\nSecond: {1}\n", e1, e2));
return true;
}
[CheckConnStrSetupFact]
public static void ExceptionTests()
{
SqlConnectionStringBuilder builder = new SqlConnectionStringBuilder(DataTestUtility.TcpConnStr);
// tests improper server name thrown from constructor of tdsparser
SqlConnectionStringBuilder badBuilder = new SqlConnectionStringBuilder(builder.ConnectionString) { DataSource = badServer, ConnectTimeout = 1 };
VerifyConnectionFailure<SqlException>(() => GenerateConnectionException(badBuilder.ConnectionString), sqlsvrBadConn, VerifyException);
// tests incorrect password - thrown from the adapter
badBuilder = new SqlConnectionStringBuilder(builder.ConnectionString) { Password = string.Empty, IntegratedSecurity = false };
string errorMessage = string.Format(CultureInfo.InvariantCulture, logonFailedErrorMessage, badBuilder.UserID);
VerifyConnectionFailure<SqlException>(() => GenerateConnectionException(badBuilder.ConnectionString), errorMessage, (ex) => VerifyException(ex, 1, 18456, 1, 14));
// tests incorrect database name - exception thrown from adapter
badBuilder = new SqlConnectionStringBuilder(builder.ConnectionString) { InitialCatalog = "NotADatabase" };
errorMessage = string.Format(CultureInfo.InvariantCulture, "Cannot open database \"{0}\" requested by the login. The login failed.", badBuilder.InitialCatalog);
SqlException firstAttemptException = VerifyConnectionFailure<SqlException>(() => GenerateConnectionException(badBuilder.ConnectionString), errorMessage, (ex) => VerifyException(ex, 2, 4060, 1, 11));
// Verify that the same error results in a different instance of an exception, but with the same data
VerifyConnectionFailure<SqlException>(() => GenerateConnectionException(badBuilder.ConnectionString), errorMessage, (ex) => CheckThatExceptionsAreDistinctButHaveSameData(firstAttemptException, ex));
// tests incorrect user name - exception thrown from adapter
badBuilder = new SqlConnectionStringBuilder(builder.ConnectionString) { UserID = "NotAUser", IntegratedSecurity = false };
errorMessage = string.Format(CultureInfo.InvariantCulture, logonFailedErrorMessage, badBuilder.UserID);
VerifyConnectionFailure<SqlException>(() => GenerateConnectionException(badBuilder.ConnectionString), errorMessage, (ex) => VerifyException(ex, 1, 18456, 1, 14));
}
[CheckConnStrSetupFact]
public static void VariousExceptionTests()
{
SqlConnectionStringBuilder builder = new SqlConnectionStringBuilder(DataTestUtility.TcpConnStr);
// Test 1 - A
SqlConnectionStringBuilder badBuilder = new SqlConnectionStringBuilder(builder.ConnectionString) { DataSource = badServer, ConnectTimeout = 1 };
using (var sqlConnection = new SqlConnection(badBuilder.ConnectionString))
{
using (SqlCommand command = sqlConnection.CreateCommand())
{
command.CommandText = orderIdQuery;
VerifyConnectionFailure<InvalidOperationException>(() => command.ExecuteReader(), execReaderFailedMessage);
}
}
// Test 1 - B
badBuilder = new SqlConnectionStringBuilder(builder.ConnectionString) { Password = string.Empty, IntegratedSecurity = false };
using (var sqlConnection = new SqlConnection(badBuilder.ConnectionString))
{
string errorMessage = string.Format(CultureInfo.InvariantCulture, logonFailedErrorMessage, badBuilder.UserID);
VerifyConnectionFailure<SqlException>(() => sqlConnection.Open(), errorMessage, (ex) => VerifyException(ex, 1, 18456, 1, 14));
}
}
[CheckConnStrSetupFact]
public static void IndependentConnectionExceptionTest()
{
SqlConnectionStringBuilder builder = new SqlConnectionStringBuilder(DataTestUtility.TcpConnStr);
SqlConnectionStringBuilder badBuilder = new SqlConnectionStringBuilder(builder.ConnectionString) { DataSource = badServer, ConnectTimeout = 1 };
using (var sqlConnection = new SqlConnection(badBuilder.ConnectionString))
{
// Test 1
VerifyConnectionFailure<SqlException>(() => sqlConnection.Open(), sqlsvrBadConn, VerifyException);
// Test 2
using (SqlCommand command = new SqlCommand(orderIdQuery, sqlConnection))
{
VerifyConnectionFailure<InvalidOperationException>(() => command.ExecuteReader(), execReaderFailedMessage);
}
}
}
[CheckConnStrSetupFact]
public static async Task UnobservedTaskExceptionTest()
{
List<Exception> exceptionsSeen = new List<Exception>();
Action<object, UnobservedTaskExceptionEventArgs> unobservedTaskCallback =
(object sender, UnobservedTaskExceptionEventArgs e) =>
{
Assert.False(exceptionsSeen.Contains(e.Exception.InnerException), "FAILED: This exception was already observed by awaiting: " + e.Exception.InnerException);
};
EventHandler<UnobservedTaskExceptionEventArgs> handler = new EventHandler<UnobservedTaskExceptionEventArgs>(unobservedTaskCallback);
TaskScheduler.UnobservedTaskException += handler;
using(var connection = new SqlConnection(DataTestUtility.TcpConnStr))
{
await connection.OpenAsync();
using (var command = new SqlCommand("select null; select * from dbo.NonexistentTable;", connection))
{
try
{
using (var reader = await command.ExecuteReaderAsync())
{
do
{
while (await reader.ReadAsync())
{
}
} while (await reader.NextResultAsync());
}
}
catch (SqlException ex)
{
exceptionsSeen.Add(ex);
}
}
}
GC.Collect();
GC.WaitForPendingFinalizers();
TaskScheduler.UnobservedTaskException -= handler;
}
private static void GenerateConnectionException(string connectionString)
{
using (SqlConnection sqlConnection = new SqlConnection(connectionString))
{
sqlConnection.Open();
using (SqlCommand command = sqlConnection.CreateCommand())
{
command.CommandText = orderIdQuery;
command.ExecuteReader();
}
}
}
private static TException VerifyConnectionFailure<TException>(Action connectAction, string expectedExceptionMessage, Func<TException, bool> exVerifier) where TException : Exception
{
TException ex = Assert.Throws<TException>(connectAction);
Assert.True(ex.Message.Contains(expectedExceptionMessage), string.Format("FAILED: SqlException did not contain expected error message. Actual message: {0}", ex.Message));
Assert.True(exVerifier(ex), "FAILED: Exception verifier failed on the exception.");
return ex;
}
private static TException VerifyConnectionFailure<TException>(Action connectAction, string expectedExceptionMessage) where TException : Exception
{
return VerifyConnectionFailure<TException>(connectAction, expectedExceptionMessage, (ex) => true);
}
private static bool VerifyException(SqlException exception)
{
VerifyException(exception, 1);
return true;
}
private static bool VerifyException(SqlException exception, int count, int? errorNumber = null, int? errorState = null, int? severity = null)
{
// Verify that there are the correct number of errors in the exception
Assert.True(exception.Errors.Count == count, string.Format("FAILED: Incorrect number of errors. Expected: {0}. Actual: {1}.", count, exception.Errors.Count));
// Ensure that all errors have an error-level severity
for (int i = 0; i < count; i++)
{
Assert.True(exception.Errors[i].Class >= 10, "FAILED: verification of Exception! Exception contains a warning!");
}
// Check the properties of the exception populated by the server are correct
if (errorNumber.HasValue)
{
Assert.True(errorNumber.Value == exception.Number, string.Format("FAILED: Error number of exception is incorrect. Expected: {0}. Actual: {1}.", errorNumber.Value, exception.Number));
}
if (errorState.HasValue)
{
Assert.True(errorState.Value == exception.State, string.Format("FAILED: Error state of exception is incorrect. Expected: {0}. Actual: {1}.", errorState.Value, exception.State));
}
if (severity.HasValue)
{
Assert.True(severity.Value == exception.Class, string.Format("FAILED: Severity of exception is incorrect. Expected: {0}. Actual: {1}.", severity.Value, exception.Class));
}
if ((errorNumber.HasValue) && (errorState.HasValue) && (severity.HasValue))
{
string detailsText = string.Format("Error Number:{0},State:{1},Class:{2}", errorNumber.Value, errorState.Value, severity.Value);
Assert.True(exception.ToString().Contains(detailsText), string.Format("FAILED: SqlException.ToString does not contain the error number, state and severity information"));
}
// verify that the this[] function on the collection works, as well as the All function
SqlError[] errors = new SqlError[exception.Errors.Count];
exception.Errors.CopyTo(errors, 0);
Assert.True((errors[0].Message).Equals(exception.Errors[0].Message), string.Format("FAILED: verification of Exception! ErrorCollection indexer/CopyTo resulted in incorrect value."));
return true;
}
}
}
| |
//
// Copyright (C) Microsoft. All rights reserved.
//
using System;
using System.Collections.Generic;
using System.Text;
using System.IO;
using System.Text.RegularExpressions;
using System.Collections;
using System.Xml;
using System.Runtime.InteropServices;
using System.Reflection;
using System.Resources;
using System.Globalization;
using System.Diagnostics.CodeAnalysis;
using Microsoft.Win32;
using System.Management.Automation;
using System.Management.Automation.Provider;
using System.Threading;
#if CORECLR
using System.Xml.XPath;
#endif
namespace Microsoft.WSMan.Management
{
[SuppressMessage("Microsoft.Design", "CA1054:UriParametersShouldNotBeStrings", MessageId = "0#")]
internal class WSManHelper
{
//regular expressions
private const string PTRN_URI_LAST = @"([a-z_][-a-z0-9._]*)$";
private const string PTRN_OPT = @"^-([a-z]+):(.*)";
private const string PTRN_HASH_TOK = @"\s*([\w:]+)\s*=\s*(\$null|""([^""]*)"")\s*";
//schemas
private const string URI_IPMI = @"http://schemas.dmtf.org/wbem/wscim/1/cim-schema";
private const string URI_WMI = @"http://schemas.microsoft.com/wbem/wsman/1/wmi";
private const string NS_IPMI = @"http://schemas.dmtf.org/wbem/wscim/1/cim-schema";
private const string NS_CIMBASE = @"http://schemas.dmtf.org/wbem/wsman/1/base";
private const string NS_WSMANL = @"http://schemas.microsoft.com";
private const string NS_XSI = @"xmlns:xsi=""http://www.w3.org/2001/XMLSchema-instance""";
private const string ATTR_NIL = @"xsi:nil=""true""";
private const string ATTR_NIL_NAME = @"xsi:nil";
private const string NS_XSI_URI = @"http://www.w3.org/2001/XMLSchema-instance";
private const string ALIAS_XPATH = @"xpath";
private const string URI_XPATH_DIALECT = @"http://www.w3.org/TR/1999/REC-xpath-19991116";
//credSSP strings
internal string CredSSP_RUri = "winrm/config/client/auth";
internal string CredSSP_XMLNmsp = "http://schemas.microsoft.com/wbem/wsman/1/config/client/auth";
internal string CredSSP_SNode = "/cfg:Auth/cfg:CredSSP";
internal string Client_uri = "winrm/config/client";
internal string urlprefix_node = "/cfg:Client/cfg:URLPrefix";
internal string Client_XMLNmsp = "http://schemas.microsoft.com/wbem/wsman/1/config/client";
internal string Service_Uri = "winrm/config/service";
internal string Service_UrlPrefix_Node = "/cfg:Service/cfg:URLPrefix";
internal string Service_XMLNmsp = "http://schemas.microsoft.com/wbem/wsman/1/config/service";
internal string Service_CredSSP_Uri = "winrm/config/service/auth";
internal string Service_CredSSP_XMLNmsp = "http://schemas.microsoft.com/wbem/wsman/1/config/service/auth";
//gpo registry path and keys
internal string Registry_Path_Credentials_Delegation = @"SOFTWARE\Policies\Microsoft\Windows";
internal string Key_Allow_Fresh_Credentials = "AllowFreshCredentials";
internal string Key_Concatenate_Defaults_AllowFresh = "ConcatenateDefaults_AllowFresh";
internal string Delegate = "delegate";
internal string keyAllowcredssp = "AllowCredSSP";
//'Constants for MS-XML
private const string NODE_ATTRIBUTE = "2";
private const int NODE_TEXT = 3;
//strings for dialects
internal string ALIAS_WQL = @"wql";
internal string ALIAS_ASSOCIATION = @"association";
internal string ALIAS_SELECTOR = @"selector";
internal string URI_WQL_DIALECT = @"http://schemas.microsoft.com/wbem/wsman/1/WQL";
internal string URI_SELECTOR_DIALECT = @"http://schemas.dmtf.org/wbem/wsman/1/wsman/SelectorFilter";
internal string URI_ASSOCIATION_DIALECT = @" http://schemas.dmtf.org/wbem/wsman/1/cimbinding/associationFilter";
//string for operation
internal string WSManOp = null;
private ResourceManager _resourceMgr = null;
private PSCmdlet cmdletname;
private NavigationCmdletProvider _provider;
private FileStream _fs;
private StreamReader _sr;
private static ResourceManager g_resourceMgr = new ResourceManager("Microsoft.WSMan.Management.resources.WsManResources", typeof(WSManHelper).GetTypeInfo().Assembly);
//
//
//Below class is just a static container which would release sessions in case this DLL is unloaded.
internal class Sessions
{
/// <summary>
/// dictionary object to store the connection
/// </summary>
internal static Dictionary<string, object> SessionObjCache = new Dictionary<string, object>();
~Sessions()
{
ReleaseSessions();
}
}
internal static Sessions AutoSession = new Sessions();
//
//
//
internal static void ReleaseSessions()
{
lock (Sessions.SessionObjCache)
{
object sessionobj;
foreach (string key in Sessions.SessionObjCache.Keys)
{
Sessions.SessionObjCache.TryGetValue(key, out sessionobj);
try
{
Marshal.ReleaseComObject(sessionobj);
}
catch (ArgumentException)
{
//Somehow the object was a null reference. Ignore the error
}
sessionobj=null;
}
Sessions.SessionObjCache.Clear();
}
}
internal WSManHelper()
{
}
internal WSManHelper(PSCmdlet cmdlet)
{
cmdletname = cmdlet;
}
internal WSManHelper(NavigationCmdletProvider provider)
{
_provider = provider;
}
internal static void ThrowIfNotAdministrator()
{
System.Security.Principal.WindowsIdentity currentIdentity = System.Security.Principal.WindowsIdentity.GetCurrent();
System.Security.Principal.WindowsPrincipal principal = new System.Security.Principal.WindowsPrincipal(currentIdentity);
if (!principal.IsInRole(System.Security.Principal.WindowsBuiltInRole.Administrator))
{
string message = g_resourceMgr.GetString("ErrorElevationNeeded");
throw new InvalidOperationException(message);
}
}
internal string GetResourceMsgFromResourcetext(string rscname)
{
return g_resourceMgr.GetString(rscname);
}
static internal string FormatResourceMsgFromResourcetextS(string rscname,
params object[] args)
{
return FormatResourceMsgFromResourcetextS(g_resourceMgr, rscname, args);
}
internal string FormatResourceMsgFromResourcetext(string resourceName,
params object[] args)
{
return FormatResourceMsgFromResourcetextS(this._resourceMgr, resourceName, args);
}
static private string FormatResourceMsgFromResourcetextS(
ResourceManager resourceManager,
string resourceName,
object[] args)
{
if (resourceManager == null)
{
throw new ArgumentNullException("resourceManager");
}
if (String.IsNullOrEmpty(resourceName))
{
throw new ArgumentNullException("resourceName");
}
string template = resourceManager.GetString(resourceName);
string result = null;
if (null != template)
{
result = String.Format(CultureInfo.CurrentCulture,
template, args);
}
return result;
}
/// <summary>
/// add a session to dictioanry
/// </summary>
/// <param name="key">connection string</param>
/// <param name="value">session object</param>
internal void AddtoDictionary(string key, Object value)
{
key = key.ToLowerInvariant();
lock (Sessions.SessionObjCache)
{
if (!Sessions.SessionObjCache.ContainsKey(key))
{
Sessions.SessionObjCache.Add(key, value);
}
else
{
object objsession = null;
Sessions.SessionObjCache.TryGetValue(key, out objsession);
try
{
Marshal.ReleaseComObject(objsession);
}
catch (ArgumentException)
{
//Somehow the object was a null reference. Ignore the error
}
Sessions.SessionObjCache.Remove(key);
Sessions.SessionObjCache.Add(key, value);
}
}
}
internal object RemoveFromDictionary(string computer)
{
object objsession = null;
computer = computer.ToLowerInvariant();
lock (Sessions.SessionObjCache)
{
if (Sessions.SessionObjCache.ContainsKey(computer))
{
Sessions.SessionObjCache.TryGetValue(computer, out objsession);
try
{
Marshal.ReleaseComObject(objsession);
}
catch (ArgumentException)
{
//Somehow the object was a null reference. Ignore the error
}
Sessions.SessionObjCache.Remove(computer);
}
}
return objsession;
}
internal static Dictionary<string, object> GetSessionObjCache()
{
try
{
lock (Sessions.SessionObjCache)
{
if (!Sessions.SessionObjCache.ContainsKey("localhost"))
{
IWSManEx wsmanObject = (IWSManEx)new WSManClass();
IWSManSession SessionObj = (IWSManSession)wsmanObject.CreateSession(null, 0, null);
Sessions.SessionObjCache.Add("localhost", SessionObj);
}
}
}
catch (IOException)
{
}
catch (System.Security.SecurityException)
{
}
catch (System.UnauthorizedAccessException)
{
}
catch (COMException)
{
}
return Sessions.SessionObjCache;
}
internal string GetRootNodeName(string operation, string resourceUri, string actionStr)
{
string resultStr = null, sfx = null;
if (resourceUri != null)
{
resultStr = resourceUri;
resultStr = StripParams(resultStr);
Regex regexpr = new Regex(PTRN_URI_LAST, RegexOptions.IgnoreCase);
MatchCollection matches = regexpr.Matches(resultStr);
if (matches.Count > 0)
{
if (operation.Equals("invoke", StringComparison.OrdinalIgnoreCase))
{
sfx = "_INPUT";
resultStr = String.Concat(actionStr, sfx);
}
else
{
resultStr = matches[0].ToString();
}
}
else
{
//error
}
}
return resultStr;
}
internal string StripParams(string uri)
{
int pos = uri.IndexOf('?');
if (pos > 0)
return uri.Substring(pos, uri.Length - pos);
else
return uri;
}
internal string ReadFile(string path)
{
if (!File.Exists(path))
{
throw new ArgumentException(GetResourceMsgFromResourcetext("InvalidFileName"));
}
string strOut = null;
try
{
_fs = new FileStream(path, FileMode.Open, FileAccess.Read);
// create stream Reader
_sr = new StreamReader(_fs);
strOut = _sr.ReadToEnd();
}
catch (ArgumentNullException e)
{
ErrorRecord er = new ErrorRecord(e, "ArgumentNullException", ErrorCategory.InvalidArgument, null);
cmdletname.ThrowTerminatingError(er);
}
catch (UnauthorizedAccessException e)
{
ErrorRecord er = new ErrorRecord(e, "UnauthorizedAccessException", ErrorCategory.PermissionDenied, null);
cmdletname.ThrowTerminatingError(er);
}
catch (FileNotFoundException e)
{
ErrorRecord er = new ErrorRecord(e, "FileNotFoundException", ErrorCategory.ObjectNotFound, null);
cmdletname.ThrowTerminatingError(er);
}
catch (DirectoryNotFoundException e)
{
ErrorRecord er = new ErrorRecord(e, "DirectoryNotFoundException", ErrorCategory.ObjectNotFound, null);
cmdletname.ThrowTerminatingError(er);
}
catch (System.Security.SecurityException e)
{
ErrorRecord er = new ErrorRecord(e, "SecurityException", ErrorCategory.SecurityError, null);
cmdletname.ThrowTerminatingError(er);
}
finally
{
if (_sr != null)
{
// _sr.Close();
_sr.Dispose();
}
if (_fs != null)
{
//_fs.Close();
_fs.Dispose();
}
}
return strOut;
}
internal string ProcessInput(IWSManEx wsman, string filepath, string operation, string root, Hashtable valueset, IWSManResourceLocator resourceUri, IWSManSession sessionObj)
{
string resultString = null;
//if file path is given
if (!string.IsNullOrEmpty(filepath) && valueset == null)
{
if (!File.Exists(filepath))
{
throw new FileNotFoundException(_resourceMgr.GetString("InvalidFileName"));
}
resultString = ReadFile(filepath);
return resultString;
}
switch (operation)
{
case "new":
case "invoke":
string parameters = null, nilns = null;
string xmlns = GetXmlNs(resourceUri.ResourceUri);
//if valueset is given, i.e hastable
if (valueset != null)
{
foreach (DictionaryEntry entry in valueset)
{
parameters = parameters + "<p:" + entry.Key.ToString();
if (entry.Value.ToString() == null)
{
parameters = parameters + " " + ATTR_NIL;
nilns = " " + NS_XSI;
}
parameters = parameters + ">" + entry.Value.ToString() + "</p:" + entry.Key.ToString() + ">";
}
}
resultString = "<p:" + root + " " + xmlns + nilns + ">" + parameters + "</p:" + root + ">";
break;
case "set":
string getResult = sessionObj.Get(resourceUri, 0);
XmlDocument xmlfile = new XmlDocument();
xmlfile.LoadXml(getResult);
string xpathString = null;
if (valueset != null)
{
foreach (DictionaryEntry entry in valueset)
{
xpathString = @"/*/*[local-name()=""" + entry.Key + @"""]";
if (entry.Key.ToString().Equals("location", StringComparison.OrdinalIgnoreCase))
{
//'Ignore cim:Location
xpathString = @"/*/*[local-name()=""" + entry.Key + @""" and namespace-uri() != """ + NS_CIMBASE + @"""]";
}
XmlNodeList nodes = xmlfile.SelectNodes(xpathString);
if (nodes.Count == 0)
{
throw new ArgumentException(_resourceMgr.GetString("NoResourceMatch"));
}
else if (nodes.Count > 1)
{
throw new ArgumentException(_resourceMgr.GetString("MultipleResourceMatch"));
}
else
{
XmlNode node = nodes[0];
if (node.HasChildNodes)
{
if (node.ChildNodes.Count > 1)
{
throw new ArgumentException(_resourceMgr.GetString("NOAttributeMatch"));
}
else
{
XmlNode tmpNode = node.ChildNodes[0];//.Item[0];
if (!tmpNode.NodeType.ToString().Equals("text", StringComparison.OrdinalIgnoreCase))
{
throw new ArgumentException(_resourceMgr.GetString("NOAttributeMatch"));
}
}
}
if (string.IsNullOrEmpty(entry.Key.ToString()))
{
//XmlNode newnode = xmlfile.CreateNode(XmlNodeType.Attribute, ATTR_NIL_NAME, NS_XSI_URI);
XmlAttribute newnode = xmlfile.CreateAttribute(XmlNodeType.Attribute.ToString(), ATTR_NIL_NAME, NS_XSI_URI);
newnode.Value = "true";
node.Attributes.Append(newnode);
//(newnode.Attributes.Item(0).FirstChild );
node.Value = "";
}
else
{
node.Attributes.RemoveNamedItem(ATTR_NIL_NAME);
node.InnerText = entry.Value.ToString();
}
}
}//end for
}//end if valueset
resultString = xmlfile.OuterXml;
break;
}//end switch
return resultString;
}
internal string GetXmlNs(string resUri)
{
string tmpNs = null;
if (resUri.ToLowerInvariant().Contains(URI_IPMI) || (resUri.ToLowerInvariant().Contains(URI_WMI)))
tmpNs = StripParams(resUri);
else
{
//tmpNs = StripParams(resUri) + ".xsd";
//This was reported by Intel as an interop issue. So now we are not appending a .xsd in the end.
tmpNs = StripParams(resUri);
}
return (@"xmlns:p=""" + tmpNs + @"""");
}
internal XmlNode GetXmlNode(string xmlString, string xpathpattern, string xmlnamespace)
{
XmlNode node = null;
XmlDocument xDoc = new XmlDocument();
xDoc.LoadXml(xmlString);
XmlNamespaceManager nsmgr = new XmlNamespaceManager(xDoc.NameTable);
if (!string.IsNullOrEmpty(xmlnamespace))
{
nsmgr.AddNamespace("cfg", xmlnamespace);
}
node = xDoc.SelectSingleNode(xpathpattern, nsmgr);
return node;
}
internal string CreateConnectionString(Uri ConnUri, int port, string computername, string applicationname)
{
string ConnectionString = null;
if (ConnUri != null)
{
ConnectionString = ConnUri.OriginalString;
}
else
{
if (computername == null && (port != 0 || applicationname != null))
{
// the user didn't give us a computer name but he gave a port and/or application name;
// in this case we need to have a computer name, to form the connection string;
// assume localhost
computername = "localhost";
}
ConnectionString = computername;
if (port != 0)
{
ConnectionString = ConnectionString + ":" + port;
}
if (applicationname != null)
{
ConnectionString = ConnectionString + "/" + applicationname;
}
}
return ConnectionString;
}
internal IWSManResourceLocator InitializeResourceLocator(Hashtable optionset, Hashtable selectorset, string fragment, Uri dialect, IWSManEx wsmanObj, Uri resourceuri)
{
string resource = null;
if (resourceuri != null)
{
resource = resourceuri.ToString();
}
if (selectorset != null)
{
resource = resource + "?";
int i = 0;
foreach (DictionaryEntry entry in selectorset)
{
i++;
resource = resource + entry.Key.ToString() + "=" + entry.Value.ToString();
if (i < selectorset.Count)
resource += "+";
}
}
IWSManResourceLocator m_resource = null;
try
{
m_resource = (IWSManResourceLocator)wsmanObj.CreateResourceLocator(resource);
if (optionset != null)
{
foreach (DictionaryEntry entry in optionset)
{
if (entry.Value.ToString() == null)
{
m_resource.AddOption(entry.Key.ToString(), null, 1);
}
else
{
m_resource.AddOption(entry.Key.ToString(), entry.Value, 1);
}
}
}
if (!string.IsNullOrEmpty(fragment))
{
m_resource.FragmentPath = fragment;
}
if (dialect != null)
{
m_resource.FragmentDialect = dialect.ToString();
}
}
catch (COMException ex)
{
AssertError(ex.Message, false, null);
}
return m_resource;
}
/// <summary>
/// Used to resolve authentication from the parameters chosen by the user.
/// User has the following options:
/// 1. AuthMechanism + Credential
/// 2. CertiticateThumbPrint
///
/// All the above are mutually exclusive.
/// </summary>
/// <exception cref="InvalidOperationException">
/// If there is ambiguity as specified above.
/// </exception>
static internal void ValidateSpecifiedAuthentication(AuthenticationMechanism authentication, PSCredential credential, string certificateThumbprint)
{
if ((credential != null) && (certificateThumbprint != null))
{
String message = FormatResourceMsgFromResourcetextS(
"AmbiguosAuthentication",
"CertificateThumbPrint", "credential");
throw new InvalidOperationException(message);
}
if ((authentication != AuthenticationMechanism.Default) &&
(authentication != AuthenticationMechanism.ClientCertificate) &&
(certificateThumbprint != null))
{
String message = FormatResourceMsgFromResourcetextS(
"AmbiguosAuthentication",
"CertificateThumbPrint", authentication.ToString());
throw new InvalidOperationException(message);
}
}
internal IWSManSession CreateSessionObject(IWSManEx wsmanObject, AuthenticationMechanism authentication, SessionOption sessionoption, PSCredential credential, string connectionString, string certificateThumbprint, bool usessl)
{
ValidateSpecifiedAuthentication(authentication, credential, certificateThumbprint);
////if authentication is given
int sessionFlags = 0;
if (authentication.ToString() != null)
{
if (authentication.Equals(AuthenticationMechanism.None))
{
sessionFlags = sessionFlags | (int)WSManSessionFlags.WSManFlagUseNoAuthentication;
}
if (authentication.Equals(AuthenticationMechanism.Basic))
{
sessionFlags = sessionFlags | (int)WSManSessionFlags.WSManFlagUseBasic | (int)WSManSessionFlags.WSManFlagCredUserNamePassword;
}
if (authentication.Equals(AuthenticationMechanism.Negotiate))
{
sessionFlags = sessionFlags | (int)WSManSessionFlags.WSManFlagUseNegotiate;
}
if (authentication.Equals(AuthenticationMechanism.Kerberos))
{
sessionFlags = sessionFlags | (int)WSManSessionFlags.WSManFlagUseKerberos;
}
if (authentication.Equals(AuthenticationMechanism.Digest))
{
sessionFlags = sessionFlags | (int)WSManSessionFlags.WSManFlagUseDigest | (int)WSManSessionFlags.WSManFlagCredUserNamePassword;
}
if (authentication.Equals(AuthenticationMechanism.Credssp))
{
sessionFlags = sessionFlags | (int)WSManSessionFlags.WSManFlagUseCredSsp | (int)WSManSessionFlags.WSManFlagCredUserNamePassword;
}
if (authentication.Equals(AuthenticationMechanism.ClientCertificate))
{
sessionFlags = sessionFlags | (int)WSManSessionFlags.WSManFlagUseClientCertificate;
}
}
IWSManConnectionOptionsEx2 connObject = (IWSManConnectionOptionsEx2)wsmanObject.CreateConnectionOptions();
if (credential != null)
{
//connObject = (IWSManConnectionOptionsEx2)wsmanObject.CreateConnectionOptions();
System.Net.NetworkCredential nwCredential = new System.Net.NetworkCredential();
if (credential.UserName != null)
{
nwCredential = credential.GetNetworkCredential();
if (String.IsNullOrEmpty(nwCredential.Domain))
{
if ( authentication.Equals(AuthenticationMechanism.Digest) || authentication.Equals(AuthenticationMechanism.Basic) )
{
connObject.UserName = nwCredential.UserName;
}
else
{
// just wanted to not use null domain, empty is actually fine
connObject.UserName = "\\" + nwCredential.UserName;
}
}
else
{
connObject.UserName = nwCredential.Domain + "\\" + nwCredential.UserName;
}
connObject.Password = nwCredential.Password;
if (!authentication.Equals(AuthenticationMechanism.Credssp) || !authentication.Equals(AuthenticationMechanism.Digest) || authentication.Equals(AuthenticationMechanism.Basic))
{
sessionFlags = sessionFlags | (int)WSManSessionFlags.WSManFlagCredUserNamePassword;
}
}
}
if (certificateThumbprint != null)
{
connObject.CertificateThumbprint = certificateThumbprint;
sessionFlags = sessionFlags | (int)WSManSessionFlags.WSManFlagUseClientCertificate;
}
if (sessionoption != null)
{
if (sessionoption.ProxyAuthentication != 0)
{
int ProxyAccessflags = 0;
int ProxyAuthenticationFlags = 0;
if (sessionoption.ProxyAccessType.Equals(ProxyAccessType.ProxyIEConfig))
{
ProxyAccessflags = connObject.ProxyIEConfig();
}
else if (sessionoption.ProxyAccessType.Equals(ProxyAccessType.ProxyAutoDetect))
{
ProxyAccessflags = connObject.ProxyAutoDetect();
}
else if (sessionoption.ProxyAccessType.Equals(ProxyAccessType.ProxyNoProxyServer))
{
ProxyAccessflags = connObject.ProxyNoProxyServer();
}
else if (sessionoption.ProxyAccessType.Equals(ProxyAccessType.ProxyWinHttpConfig))
{
ProxyAccessflags = connObject.ProxyWinHttpConfig();
}
if (sessionoption.ProxyAuthentication.Equals(ProxyAuthentication.Basic))
{
ProxyAuthenticationFlags = connObject.ProxyAuthenticationUseBasic();
}
else if (sessionoption.ProxyAuthentication.Equals(ProxyAuthentication.Negotiate))
{
ProxyAuthenticationFlags = connObject.ProxyAuthenticationUseNegotiate();
}
else if (sessionoption.ProxyAuthentication.Equals(ProxyAuthentication.Digest))
{
ProxyAuthenticationFlags = connObject.ProxyAuthenticationUseDigest();
}
if (sessionoption.ProxyCredential != null)
{
try
{
connObject.SetProxy(ProxyAccessflags, ProxyAuthenticationFlags, sessionoption.ProxyCredential.UserName, sessionoption.ProxyCredential.Password);
}
catch (Exception ex)
{
AssertError(ex.Message, false, null);
}
}
else
{
connObject.SetProxy((int)sessionoption.ProxyAccessType, (int)sessionoption.ProxyAuthentication, null, null);
}
}
if (sessionoption.SkipCACheck)
{
sessionFlags = sessionFlags | (int)WSManSessionFlags.WSManFlagSkipCACheck;
}
if (sessionoption.SkipCNCheck)
{
sessionFlags = sessionFlags | (int)WSManSessionFlags.WSManFlagSkipCNCheck;
}
if (sessionoption.SPNPort > 0)
{
sessionFlags = sessionFlags | (int)WSManSessionFlags.WSManFlagEnableSpnServerPort;
}
if (sessionoption.UseUtf16)
{
sessionFlags = sessionFlags | (int)WSManSessionFlags.WSManFlagUtf16;
}
else
{
//If UseUtf16 is false, then default Encoding is Utf8
sessionFlags = sessionFlags | (int)WSManSessionFlags.WSManFlagUtf8;
}
if (!sessionoption.UseEncryption)
{
sessionFlags = sessionFlags | (int)WSManSessionFlags.WSManFlagNoEncryption;
}
if (sessionoption.SkipRevocationCheck)
{
sessionFlags = sessionFlags | (int)WSManSessionFlags.WSManFlagSkipRevocationCheck;
}
}
else
{
//If SessionOption is null then, default Encoding is Utf8
sessionFlags = sessionFlags | (int)WSManSessionFlags.WSManFlagUtf8;
}
if (usessl)
{
sessionFlags = sessionFlags | (int)WSManSessionFlags.WSManFlagUseSsl;
}
IWSManSession m_SessionObj = null;
try
{
m_SessionObj = (IWSManSession)wsmanObject.CreateSession(connectionString, sessionFlags, connObject);
if (sessionoption != null)
{
if (sessionoption.OperationTimeout > 0)
{
m_SessionObj.Timeout = sessionoption.OperationTimeout;
}
}
}
catch (COMException ex)
{
AssertError(ex.Message, false, null);
}
return m_SessionObj;
}
internal void CleanUp()
{
if (_sr != null)
{
_sr.Dispose();
_sr = null;
}
if (_fs != null)
{
_fs.Dispose();
_fs = null;
}
}
internal string GetFilterString(Hashtable seletorset)
{
StringBuilder filter = new StringBuilder();
foreach (DictionaryEntry entry in seletorset)
{
if (entry.Key != null && entry.Value != null)
{
filter.Append(entry.Key.ToString());
filter.Append("=");
filter.Append(entry.Value.ToString());
filter.Append("+");
}
}
filter.Remove(filter.ToString().Length - 1, 1);
return filter.ToString();
}
internal void AssertError(string ErrorMessage, bool IsWSManError, object targetobject)
{
if (IsWSManError)
{
XmlDocument ErrorDoc = new XmlDocument();
ErrorDoc.LoadXml(ErrorMessage);
InvalidOperationException ex = new InvalidOperationException(ErrorDoc.OuterXml);
ErrorRecord er = new ErrorRecord(ex, "WsManError", ErrorCategory.InvalidOperation, targetobject);
if (cmdletname != null)
{
cmdletname.ThrowTerminatingError(er);
}
else
{
_provider.ThrowTerminatingError(er);
}
}
else
{
InvalidOperationException ex = new InvalidOperationException(ErrorMessage);
ErrorRecord er = new ErrorRecord(ex, "WsManError", ErrorCategory.InvalidOperation, targetobject);
if (cmdletname != null)
{
cmdletname.ThrowTerminatingError(er);
}
else
{
_provider.ThrowTerminatingError(er);
}
}
}
internal string GetURIWithFilter(string uri, string filter, Hashtable selectorset, string operation)
{
StringBuilder sburi = new StringBuilder();
sburi.Append(uri);
sburi.Append("?");
if (operation.Equals("remove", StringComparison.OrdinalIgnoreCase))
{
sburi.Append(GetFilterString(selectorset));
if (sburi.ToString().EndsWith("?", StringComparison.OrdinalIgnoreCase))
{
sburi.Remove(sburi.Length - 1, 1);
}
}
return sburi.ToString();
}
/// <summary>
/// This method is used by Connect-WsMan Cmdlet and New-Item of WsMan Provider to create connection to WsMan
/// </summary>
/// <param name="ParameterSetName"></param>
/// <param name="connectionuri"></param>
/// <param name="port"></param>
/// <param name="computername"></param>
/// <param name="applicationname"></param>
/// <param name="usessl"></param>
/// <param name="authentication"></param>
/// <param name="sessionoption"></param>
/// <param name="credential"></param>
/// <param name="certificateThumbprint"></param>
internal void CreateWsManConnection(string ParameterSetName, Uri connectionuri, int port, string computername, string applicationname, bool usessl, AuthenticationMechanism authentication, SessionOption sessionoption, PSCredential credential, string certificateThumbprint)
{
IWSManEx m_wsmanObject = (IWSManEx)new WSManClass();
try
{
string connectionStr = CreateConnectionString(connectionuri, port, computername, applicationname);
if (connectionuri != null)
{
//in the format http(s)://server[:port/applicationname]
string[] constrsplit = connectionStr.Split(new string[] { ":" + port + "/" + applicationname }, StringSplitOptions.None);
string[] constrsplit1 = constrsplit[0].Split(new string[] { "//" }, StringSplitOptions.None);
computername = constrsplit1[1].Trim();
}
IWSManSession m_session = CreateSessionObject(m_wsmanObject, authentication, sessionoption, credential, connectionStr, certificateThumbprint, usessl);
m_session.Identify(0);
string key = computername;
if (key == null)
{
key = "localhost";
}
AddtoDictionary(key, m_session);
}
catch (IndexOutOfRangeException)
{
AssertError(_resourceMgr.GetString("NotProperURI"), false, connectionuri);
}
catch (Exception ex)
{
AssertError(ex.Message, false, computername);
}
finally
{
if (!String.IsNullOrEmpty(m_wsmanObject.Error))
{
AssertError(m_wsmanObject.Error, true, computername);
}
}
}
/// <summary>
/// Verifies all the registry keys are set as expected. In case of failure .. try ecery second for 60 seconds before returning false.
/// </summary>
/// <param name="AllowFreshCredentialsValueShouldBePresent">True if trying to Enable CredSSP.</param>
/// <param name="DelegateComputer">Names of the degate computer.</param>
/// <param name="applicationname">Name of the application.</param>
/// <returns>True if valid.</returns>
internal bool ValidateCreadSSPRegistryRetry(bool AllowFreshCredentialsValueShouldBePresent, string[] DelegateComputer, string applicationname)
{
for (int i = 0; i < 60; i++)
{
if (!ValidateCredSSPRegistry(AllowFreshCredentialsValueShouldBePresent, DelegateComputer, applicationname))
{
Thread.Sleep(1000);
}
else
{
return true;
}
}
return false;
}
internal bool ValidateCredSSPRegistry(bool AllowFreshCredentialsValueShouldBePresent, string[] DelegateComputer, string applicationname)
{
System.IntPtr NakedGPOCriticalSection = GpoNativeApi.EnterCriticalPolicySection(true);
try
{
RegistryKey rGPOLocalMachineKey = Registry.LocalMachine.OpenSubKey(
Registry_Path_Credentials_Delegation + @"\CredentialsDelegation",
#if !CORECLR
RegistryKeyPermissionCheck.ReadWriteSubTree,
#endif
System.Security.AccessControl.RegistryRights.FullControl);
if (rGPOLocalMachineKey != null)
{
rGPOLocalMachineKey = rGPOLocalMachineKey.OpenSubKey(Key_Allow_Fresh_Credentials,
#if !CORECLR
RegistryKeyPermissionCheck.ReadWriteSubTree,
#endif
System.Security.AccessControl.RegistryRights.FullControl);
if (rGPOLocalMachineKey == null)
{
return !AllowFreshCredentialsValueShouldBePresent;
}
string[] valuenames = rGPOLocalMachineKey.GetValueNames();
if (valuenames.Length <= 0)
{
return !AllowFreshCredentialsValueShouldBePresent;
}
List<string> RegValues = new List<string>();
foreach (string value in valuenames)
{
object keyvalue = rGPOLocalMachineKey.GetValue(value);
if (keyvalue != null && keyvalue.ToString().StartsWith(applicationname, StringComparison.OrdinalIgnoreCase))
{
if (!AllowFreshCredentialsValueShouldBePresent)
{
// If calling Disable-CredSSP .. no value should start with "applicationName" regardless of the computer.
return false;
}
RegValues.Add(keyvalue.ToString());
}
}
if (AllowFreshCredentialsValueShouldBePresent)
{
// For all the keys that starts with "applicationName" make sure the delegated computer is listed.
foreach (string comp in DelegateComputer)
{
if (!RegValues.Contains(applicationname + "/" + comp))
{
return false;
}
}
}
}
}
finally
{
bool GPOReleaseCriticalSection = GpoNativeApi.LeaveCriticalPolicySection(NakedGPOCriticalSection);
}
return true;
}
}
internal static class WSManResourceLoader
{
internal static void LoadResourceData()
{
try
{
string filepath = System.Environment.ExpandEnvironmentVariables("%Windir%") + "\\System32\\Winrm\\" +
#if CORECLR
"0409" /* TODO: don't assume it is always English on CSS? */
#else
String.Concat("0", String.Format(CultureInfo.CurrentCulture, "{0:x2}", checked((uint)CultureInfo.CurrentUICulture.LCID)))
#endif
+ "\\" + "winrm.ini";
if (File.Exists(filepath))
{
FileStream _fs = new FileStream(filepath, FileMode.Open, FileAccess.Read);
StreamReader _sr = new StreamReader(_fs);
while (!_sr.EndOfStream)
{
string Line = _sr.ReadLine();
if (Line.Contains("="))
{
string[] arr = Line.Split(new char[] { '=' }, 2);
if (!ResourceValueCache.ContainsKey(arr[0].Trim()))
{
string value = arr[1].TrimStart(new char[] { '"' }).TrimEnd(new char[] { '"' });
ResourceValueCache.Add(arr[0].Trim(), value.Trim());
}
}
}
}
}
catch (IOException e)
{
throw (e);
}
}
/// <summary>
/// Get the resource value from WinRm.ini
/// from %windir%\system32\winrm\[Hexadecimal Language Folder]\winrm.ini
/// </summary>
/// <param name="Key"></param>
/// <returns></returns>
internal static string GetResourceString(string Key)
{
//Checks whether resource values already loaded and loads.
if (ResourceValueCache.Count <= 0)
{
LoadResourceData();
}
string value = "";
if (ResourceValueCache.ContainsKey(Key.Trim()))
{
ResourceValueCache.TryGetValue(Key.Trim(), out value);
}
return value.Trim();
}
/// <summary>
///
/// </summary>
private static Dictionary<string, string> ResourceValueCache = new Dictionary<string, string>();
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Text;
using Xunit;
using Xunit.NetCore.Extensions;
namespace System.IO.Tests
{
public class Directory_Delete_str : FileSystemTest
{
#region Utilities
public virtual void Delete(string path)
{
Directory.Delete(path);
}
#endregion
#region UniversalTests
[Fact]
public void NullParameters()
{
Assert.Throws<ArgumentNullException>(() => Delete(null));
}
[Fact]
public void InvalidParameters()
{
Assert.Throws<ArgumentException>(() => Delete(string.Empty));
}
[Fact]
public void ShouldThrowIOExceptionIfContainedFileInUse()
{
DirectoryInfo testDir = Directory.CreateDirectory(GetTestFilePath());
using (File.Create(Path.Combine(testDir.FullName, GetTestFileName())))
{
Assert.Throws<IOException>(() => Delete(testDir.FullName));
}
Assert.True(testDir.Exists);
}
[Fact]
public void ShouldThrowIOExceptionForDirectoryWithFiles()
{
DirectoryInfo testDir = Directory.CreateDirectory(GetTestFilePath());
File.Create(Path.Combine(testDir.FullName, GetTestFileName())).Dispose();
Assert.Throws<IOException>(() => Delete(testDir.FullName));
Assert.True(testDir.Exists);
}
[Fact]
public void DirectoryWithSubdirectories()
{
DirectoryInfo testDir = Directory.CreateDirectory(GetTestFilePath());
testDir.CreateSubdirectory(GetTestFileName());
Assert.Throws<IOException>(() => Delete(testDir.FullName));
Assert.True(testDir.Exists);
}
[Fact]
[OuterLoop]
public void DeleteRoot()
{
Assert.Throws<IOException>(() => Delete(Path.GetPathRoot(Directory.GetCurrentDirectory())));
}
[Fact]
public void PositiveTest()
{
DirectoryInfo testDir = Directory.CreateDirectory(GetTestFilePath());
Delete(testDir.FullName);
Assert.False(testDir.Exists);
}
[Theory, MemberData(nameof(TrailingCharacters))]
public void MissingFile_ThrowsDirectoryNotFound(char trailingChar)
{
string path = GetTestFilePath() + trailingChar;
Assert.Throws<DirectoryNotFoundException>(() => Delete(path));
}
[Theory, MemberData(nameof(TrailingCharacters))]
public void MissingDirectory_ThrowsDirectoryNotFound(char trailingChar)
{
string path = Path.Combine(GetTestFilePath(), "file" + trailingChar);
Assert.Throws<DirectoryNotFoundException>(() => Delete(path));
}
[Fact]
public void ShouldThrowIOExceptionDeletingCurrentDirectory()
{
Assert.Throws<IOException>(() => Delete(Directory.GetCurrentDirectory()));
}
[ConditionalFact(nameof(CanCreateSymbolicLinks))]
public void DeletingSymLinkDoesntDeleteTarget()
{
var path = GetTestFilePath();
var linkPath = GetTestFilePath();
Directory.CreateDirectory(path);
Assert.True(MountHelper.CreateSymbolicLink(linkPath, path, isDirectory: true));
// Both the symlink and the target exist
Assert.True(Directory.Exists(path), "path should exist");
Assert.True(Directory.Exists(linkPath), "linkPath should exist");
// Delete the symlink
Directory.Delete(linkPath);
// Target should still exist
Assert.True(Directory.Exists(path), "path should still exist");
Assert.False(Directory.Exists(linkPath), "linkPath should no longer exist");
}
[ConditionalFact(nameof(UsingNewNormalization))]
public void ExtendedDirectoryWithSubdirectories()
{
DirectoryInfo testDir = Directory.CreateDirectory(IOInputs.ExtendedPrefix + GetTestFilePath());
testDir.CreateSubdirectory(GetTestFileName());
Assert.Throws<IOException>(() => Delete(testDir.FullName));
Assert.True(testDir.Exists);
}
[ConditionalFact(nameof(LongPathsAreNotBlocked), nameof(UsingNewNormalization))]
public void LongPathExtendedDirectory()
{
DirectoryInfo testDir = Directory.CreateDirectory(IOServices.GetPath(IOInputs.ExtendedPrefix + TestDirectory, characterCount: 500));
Delete(testDir.FullName);
Assert.False(testDir.Exists);
}
#endregion
#region PlatformSpecific
[Fact]
[PlatformSpecific(TestPlatforms.Windows)] // Deleting readonly directory throws IOException
public void WindowsDeleteReadOnlyDirectory()
{
DirectoryInfo testDir = Directory.CreateDirectory(GetTestFilePath());
testDir.Attributes = FileAttributes.ReadOnly;
Assert.Throws<IOException>(() => Delete(testDir.FullName));
Assert.True(testDir.Exists);
testDir.Attributes = FileAttributes.Normal;
}
[ConditionalFact(nameof(UsingNewNormalization))]
[PlatformSpecific(TestPlatforms.Windows)] // Deleting extended readonly directory throws IOException
public void WindowsDeleteExtendedReadOnlyDirectory()
{
DirectoryInfo testDir = Directory.CreateDirectory(IOInputs.ExtendedPrefix + GetTestFilePath());
testDir.Attributes = FileAttributes.ReadOnly;
Assert.Throws<IOException>(() => Delete(testDir.FullName));
Assert.True(testDir.Exists);
testDir.Attributes = FileAttributes.Normal;
}
[Fact]
[PlatformSpecific(TestPlatforms.AnyUnix)] // Deleting readOnly directory succeeds
public void UnixDeleteReadOnlyDirectory()
{
DirectoryInfo testDir = Directory.CreateDirectory(GetTestFilePath());
testDir.Attributes = FileAttributes.ReadOnly;
Delete(testDir.FullName);
Assert.False(testDir.Exists);
}
[Fact]
[PlatformSpecific(TestPlatforms.Windows)] // Deleting hidden directory succeeds
public void WindowsShouldBeAbleToDeleteHiddenDirectory()
{
DirectoryInfo testDir = Directory.CreateDirectory(GetTestFilePath());
testDir.Attributes = FileAttributes.Hidden;
Delete(testDir.FullName);
Assert.False(testDir.Exists);
}
[ConditionalFact(nameof(UsingNewNormalization))]
[PlatformSpecific(TestPlatforms.Windows)] // Deleting extended hidden directory succeeds
public void WindowsShouldBeAbleToDeleteExtendedHiddenDirectory()
{
DirectoryInfo testDir = Directory.CreateDirectory(IOInputs.ExtendedPrefix + GetTestFilePath());
testDir.Attributes = FileAttributes.Hidden;
Delete(testDir.FullName);
Assert.False(testDir.Exists);
}
[Fact]
[PlatformSpecific(TestPlatforms.AnyUnix)] // Deleting hidden directory succeeds
public void UnixShouldBeAbleToDeleteHiddenDirectory()
{
string testDir = "." + GetTestFileName();
Directory.CreateDirectory(Path.Combine(TestDirectory, testDir));
Assert.True(0 != (new DirectoryInfo(Path.Combine(TestDirectory, testDir)).Attributes & FileAttributes.Hidden));
Delete(Path.Combine(TestDirectory, testDir));
Assert.False(Directory.Exists(testDir));
}
[Fact]
[OuterLoop("Needs sudo access")]
[PlatformSpecific(TestPlatforms.Linux)]
[Trait(XunitConstants.Category, XunitConstants.RequiresElevation)]
public void Unix_NotFoundDirectory_ReadOnlyVolume()
{
ReadOnly_FileSystemHelper(readOnlyDirectory =>
{
Assert.Throws<DirectoryNotFoundException>(() => Delete(Path.Combine(readOnlyDirectory, "DoesNotExist")));
});
}
#endregion
}
public class Directory_Delete_str_bool : Directory_Delete_str
{
#region Utilities
public override void Delete(string path)
{
Directory.Delete(path, false);
}
public virtual void Delete(string path, bool recursive)
{
Directory.Delete(path, recursive);
}
#endregion
[Fact]
public void RecursiveDelete()
{
DirectoryInfo testDir = Directory.CreateDirectory(GetTestFilePath());
File.Create(Path.Combine(testDir.FullName, GetTestFileName())).Dispose();
testDir.CreateSubdirectory(GetTestFileName());
Delete(testDir.FullName, true);
Assert.False(testDir.Exists);
}
[Fact]
public void RecursiveDeleteWithTrailingSlash()
{
DirectoryInfo testDir = Directory.CreateDirectory(GetTestFilePath());
Delete(testDir.FullName + Path.DirectorySeparatorChar, true);
Assert.False(testDir.Exists);
}
[Fact]
[PlatformSpecific(TestPlatforms.Windows)]
[OuterLoop("This test is very slow.")]
public void RecursiveDelete_DeepNesting()
{
// Create a 2000 level deep directory and recursively delete from the root.
// This number can be dropped if we find it problematic on low memory machines
// and/or we can look at skipping in such environments.
//
// On debug we were overflowing the stack with directories that were under 1000
// levels deep. Testing on a 32GB box I consistently fell over around 1300.
// With optimizations to the Delete helper I was able to raise this to around 3200.
// Release binaries don't stress the stack nearly as much (10K+ is doable, but can
// take 5 minutes on an SSD).
string rootDirectory = GetTestFilePath();
StringBuilder sb = new StringBuilder(5000);
sb.Append(rootDirectory);
for (int i = 0; i < 2000; i++)
{
sb.Append(@"\a");
}
string path = sb.ToString();
Directory.CreateDirectory(path);
Delete(rootDirectory, recursive: true);
}
[Fact]
[PlatformSpecific(TestPlatforms.Windows)] // Recursive delete throws IOException if directory contains in-use file
public void RecursiveDelete_ShouldThrowIOExceptionIfContainedFileInUse()
{
DirectoryInfo testDir = Directory.CreateDirectory(GetTestFilePath());
using (File.Create(Path.Combine(testDir.FullName, GetTestFileName())))
{
Assert.Throws<IOException>(() => Delete(testDir.FullName, true));
}
Assert.True(testDir.Exists);
}
}
}
| |
using System;
using System.Collections.Generic;
using System.IO;
using System.Xml;
using System.Xml.Schema;
using XsdDocumentation.Markup;
namespace XsdDocumentation.Model
{
public sealed class ContentGenerator
{
private Context _context;
public ContentGenerator(IMessageReporter messageReporter, Configuration configuration)
{
_context = new Context(messageReporter, configuration);
MediaItems = new List<MediaItem>();
TopicFiles = new List<string>();
}
public string ContentFile { get; private set; }
public string IndexFile { get; private set; }
public string TopicsFolder { get; private set; }
public string MediaFolder { get; private set; }
public List<string> TopicFiles { get; private set; }
public List<MediaItem> MediaItems { get; private set; }
public void Generate()
{
TopicsFolder = Path.Combine(_context.Configuration.OutputFolderPath, "xsdTopics");
ContentFile = Path.Combine(TopicsFolder, "xsd.content");
IndexFile = Path.Combine(TopicsFolder, "xsd.index");
MediaFolder = Path.Combine(_context.Configuration.OutputFolderPath, "xsdMedia");
GenerateIndex();
GenerateContentFile();
GenerateTopicFiles();
GenerateMediaFiles();
}
private void GenerateIndex()
{
var topicIndex = new TopicIndex();
topicIndex.Load(_context.TopicManager);
topicIndex.Save(IndexFile);
}
private void GenerateTopicFiles()
{
Directory.CreateDirectory(TopicsFolder);
GenerateTopicFiles(_context.TopicManager.Topics);
}
private void GenerateTopicFiles(IEnumerable<Topic> topics)
{
foreach (var topic in topics)
{
topic.FileName = GetAbsoluteFileName(TopicsFolder, topic);
TopicFiles.Add(topic.FileName);
switch (topic.TopicType)
{
case TopicType.SchemaSet:
GenerateSchemaSetTopic(topic);
break;
case TopicType.Namespace:
GenerateNamespaceTopic(topic);
break;
case TopicType.Schema:
GenerateSchemaTopic(topic);
break;
case TopicType.Element:
GenerateElementTopic(topic);
break;
case TopicType.Attribute:
GenerateAttributeTopic(topic);
break;
case TopicType.AttributeGroup:
GenerateAttributeGroup(topic);
break;
case TopicType.Group:
GenerateGroupTopic(topic);
break;
case TopicType.SimpleType:
GenerateSimpleTypeTopic(topic);
break;
case TopicType.ComplexType:
GenerateComplexTypeTopic(topic);
break;
case TopicType.RootSchemasSection:
case TopicType.RootElementsSection:
case TopicType.SchemasSection:
case TopicType.ElementsSection:
case TopicType.AttributesSection:
case TopicType.AttributeGroupsSection:
case TopicType.GroupsSection:
case TopicType.SimpleTypesSection:
case TopicType.ComplexTypesSection:
GenerateOverviewTopic(topic);
break;
default:
throw ExceptionBuilder.UnhandledCaseLabel(topic.TopicType);
}
GenerateTopicFiles(topic.Children);
}
}
private void GenerateSchemaSetTopic(Topic topic)
{
if (_context.Configuration.NamespaceContainer)
{
using (var stream = File.Create(topic.FileName))
using (var writer = new MamlWriter(stream))
{
writer.StartTopic(topic.Id);
writer.WriteIntroductionForSchemaSet(_context);
writer.WriteRemarksSectionForSchemaSet(_context);
writer.WriteExamplesSectionForSchemaSet(_context);
writer.WriteNamespacesSection(_context, _context.SchemaSetManager.GetNamespaces());
writer.EndTopic();
}
}
else
{
var contentFinder = new NamespaceContentFinder(_context.SchemaSetManager, topic.Namespace);
contentFinder.Traverse(_context.SchemaSetManager.SchemaSet);
using (var stream = File.Create(topic.FileName))
using (var writer = new MamlWriter(stream))
{
writer.StartTopic(topic.Id);
writer.WriteIntroductionForSchemaSet(_context);
writer.WriteRemarksSectionForSchemaSet(_context);
writer.WriteExamplesSectionForSchemaSet(_context);
writer.WriteRootSchemasSection(_context, _context.SchemaSetManager.GetNamespaceRootSchemas(topic.Namespace));
writer.WriteRootElementsSection(_context, _context.SchemaSetManager.GetNamespaceRootElements(topic.Namespace));
writer.WriteSchemasSection(_context, contentFinder.Schemas);
writer.WriteElementsSection(_context, contentFinder.Elements);
writer.WriteAttributesSection(_context, contentFinder.Attributes);
writer.WriteGroupsSection(_context, contentFinder.Groups);
writer.WriteAttributeGroupsSection(_context, contentFinder.AttributeGroups);
writer.WriteSimpleTypesSection(_context, contentFinder.SimpleTypes);
writer.WriteComplexTypesSection(_context, contentFinder.ComplexTypes);
writer.EndTopic();
}
}
}
private void GenerateNamespaceTopic(Topic topic)
{
var contentFinder = new NamespaceContentFinder(_context.SchemaSetManager, topic.Namespace);
contentFinder.Traverse(_context.SchemaSetManager.SchemaSet);
using (var stream = File.Create(topic.FileName))
using (var writer = new MamlWriter(stream))
{
writer.StartTopic(topic.Id);
writer.WriteIntroductionForNamespace(_context, topic.Namespace);
writer.WriteRemarksSectionForNamespace(_context, topic.Namespace);
writer.WriteExamplesSectionForNamespace(_context, topic.Namespace);
writer.WriteRootSchemasSection(_context, _context.SchemaSetManager.GetNamespaceRootSchemas(topic.Namespace));
writer.WriteRootElementsSection(_context, _context.SchemaSetManager.GetNamespaceRootElements(topic.Namespace));
writer.WriteSchemasSection(_context, contentFinder.Schemas);
writer.WriteElementsSection(_context, contentFinder.Elements);
writer.WriteAttributesSection(_context, contentFinder.Attributes);
writer.WriteGroupsSection(_context, contentFinder.Groups);
writer.WriteAttributeGroupsSection(_context, contentFinder.AttributeGroups);
writer.WriteSimpleTypesSection(_context, contentFinder.SimpleTypes);
writer.WriteComplexTypesSection(_context, contentFinder.ComplexTypes);
writer.EndTopic();
}
}
private void GenerateSchemaTopic(Topic topic)
{
var schema = (XmlSchema)topic.SchemaObject;
var contentFinder = new SchemaContentFinder(schema);
contentFinder.Traverse(schema);
using (var stream = File.Create(topic.FileName))
using (var writer = new MamlWriter(stream))
{
writer.StartTopic(topic.Id);
writer.WriteIntroductionForSchema(_context, schema);
writer.WriteRemarksSectionForObject(_context, schema);
writer.WriteExamplesSectionForObject(_context, schema);
writer.WriteElementsSection(_context, contentFinder.Elements);
writer.WriteAttributesSection(_context, contentFinder.Attributes);
writer.WriteGroupsSection(_context, contentFinder.Groups);
writer.WriteAttributeGroupsSection(_context, contentFinder.AttributeGroups);
writer.WriteSimpleTypesSection(_context, contentFinder.SimpleTypes);
writer.WriteComplexTypesSection(_context, contentFinder.ComplexTypes);
writer.EndTopic();
}
}
private void GenerateOverviewTopic(Topic topic)
{
var contentFinder = new NamespaceContentFinder(_context.SchemaSetManager, topic.Namespace);
contentFinder.Traverse(_context.SchemaSetManager.SchemaSet);
using (var stream = File.Create(topic.FileName))
using (var writer = new MamlWriter(stream))
{
writer.StartTopic(topic.Id);
writer.WriteIntroductionForOverview(_context, topic.Namespace);
switch (topic.TopicType)
{
case TopicType.RootSchemasSection:
writer.WriteRootSchemasSection(_context, _context.SchemaSetManager.GetNamespaceRootSchemas(topic.Namespace));
break;
case TopicType.RootElementsSection:
writer.WriteRootElementsSection(_context, _context.SchemaSetManager.GetNamespaceRootElements(topic.Namespace));
break;
case TopicType.SchemasSection:
writer.WriteSchemasSection(_context, contentFinder.Schemas);
break;
case TopicType.ElementsSection:
writer.WriteElementsSection(_context, contentFinder.Elements);
break;
case TopicType.AttributesSection:
writer.WriteAttributesSection(_context, contentFinder.Attributes);
break;
case TopicType.AttributeGroupsSection:
writer.WriteAttributeGroupsSection(_context, contentFinder.AttributeGroups);
break;
case TopicType.GroupsSection:
writer.WriteGroupsSection(_context, contentFinder.Groups);
break;
case TopicType.SimpleTypesSection:
writer.WriteSimpleTypesSection(_context, contentFinder.SimpleTypes);
break;
case TopicType.ComplexTypesSection:
writer.WriteComplexTypesSection(_context, contentFinder.ComplexTypes);
break;
default:
throw ExceptionBuilder.UnhandledCaseLabel(topic.TopicType);
}
writer.EndTopic();
}
}
private void GenerateElementTopic(Topic topic)
{
var element = (XmlSchemaElement)topic.SchemaObject;
var parents = _context.SchemaSetManager.GetObjectParents(element);
var simpleTypeStructureRoot = _context.SchemaSetManager.GetSimpleTypeStructure(element.ElementSchemaType);
var children = _context.SchemaSetManager.GetChildren(element);
var attributeEntries = _context.SchemaSetManager.GetAttributeEntries(element);
var constraints = element.Constraints;
using (var stream = File.Create(topic.FileName))
using (var writer = new MamlWriter(stream))
{
writer.StartTopic(topic.Id);
writer.WriteIntroductionForObject(_context, element);
writer.WriteTypeSection(_context, element);
writer.WriteContentTypeSection(_context, simpleTypeStructureRoot);
writer.WriteParentsSection(_context, parents);
writer.WriteChildrenSection(_context, children);
writer.WriteAttributesSection(_context, attributeEntries);
writer.WriteConstraintsSection(_context, constraints);
writer.WriteRemarksSectionForObject(_context, element);
writer.WriteExamplesSectionForObject(_context, element);
writer.WriteSyntaxSection(_context, element);
writer.WriteRelatedTopics(_context, element);
writer.EndTopic();
}
}
private void GenerateAttributeTopic(Topic topic)
{
var attribute = (XmlSchemaAttribute)topic.SchemaObject;
var parents = _context.SchemaSetManager.GetObjectParents(attribute);
var simpleTypeStructureRoot = _context.SchemaSetManager.GetSimpleTypeStructure(attribute.AttributeSchemaType);
using (var stream = File.Create(topic.FileName))
using (var writer = new MamlWriter(stream))
{
writer.StartTopic(topic.Id);
writer.WriteIntroductionForObject(_context, attribute);
writer.WriteContentTypeSection(_context, simpleTypeStructureRoot);
writer.WriteParentsSection(_context, parents);
writer.WriteRemarksSectionForObject(_context, attribute);
writer.WriteExamplesSectionForObject(_context, attribute);
writer.WriteSyntaxSection(_context, attribute);
writer.WriteRelatedTopics(_context, attribute);
writer.EndTopic();
}
}
private void GenerateGroupTopic(Topic topic)
{
var group = (XmlSchemaGroup)topic.SchemaObject;
var parents = _context.SchemaSetManager.GetObjectParents(group);
var children = _context.SchemaSetManager.GetChildren(group);
using (var stream = File.Create(topic.FileName))
using (var writer = new MamlWriter(stream))
{
writer.StartTopic(topic.Id);
writer.WriteIntroductionForObject(_context, group);
writer.WriteUsagesSection(_context, parents);
writer.WriteChildrenSection(_context, children);
writer.WriteRemarksSectionForObject(_context, group);
writer.WriteExamplesSectionForObject(_context, group);
writer.WriteSyntaxSection(_context, group);
writer.WriteRelatedTopics(_context, group);
writer.EndTopic();
}
}
private void GenerateAttributeGroup(Topic topic)
{
var attributeGroup = (XmlSchemaAttributeGroup)topic.SchemaObject;
var usages = _context.SchemaSetManager.GetObjectParents(attributeGroup);
var attributeEntries = _context.SchemaSetManager.GetAttributeEntries(attributeGroup);
using (var stream = File.Create(topic.FileName))
using (var writer = new MamlWriter(stream))
{
writer.StartTopic(topic.Id);
writer.WriteIntroductionForObject(_context, attributeGroup);
writer.WriteUsagesSection(_context, usages);
writer.WriteAttributesSection(_context, attributeEntries);
writer.WriteRemarksSectionForObject(_context, attributeGroup);
writer.WriteExamplesSectionForObject(_context, attributeGroup);
writer.WriteSyntaxSection(_context, attributeGroup);
writer.WriteRelatedTopics(_context, attributeGroup);
writer.EndTopic();
}
}
private void GenerateSimpleTypeTopic(Topic topic)
{
var simpleType = (XmlSchemaSimpleType)topic.SchemaObject;
var usages = _context.SchemaSetManager.GetTypeUsages(simpleType);
var simpleTypeStructureRoot = _context.SchemaSetManager.GetSimpleTypeStructure(simpleType.Content);
using (var stream = File.Create(topic.FileName))
using (var writer = new MamlWriter(stream))
{
writer.StartTopic(topic.Id);
writer.WriteIntroductionForObject(_context, simpleType);
writer.WriteContentTypeSection(_context, simpleTypeStructureRoot);
writer.WriteUsagesSection(_context, usages);
writer.WriteRemarksSectionForObject(_context, simpleType);
writer.WriteExamplesSectionForObject(_context, simpleType);
writer.WriteSyntaxSection(_context, simpleType);
writer.WriteRelatedTopics(_context, simpleType);
writer.EndTopic();
}
}
private void GenerateComplexTypeTopic(Topic topic)
{
var complexType = (XmlSchemaComplexType)topic.SchemaObject;
var usages = _context.SchemaSetManager.GetTypeUsages(complexType);
var simpleTypeStructureRoot = _context.SchemaSetManager.GetSimpleTypeStructure(complexType);
var children = _context.SchemaSetManager.GetChildren(complexType);
var attributeEntries = _context.SchemaSetManager.GetAttributeEntries(complexType);
using (var stream = File.Create(topic.FileName))
using (var writer = new MamlWriter(stream))
{
writer.StartTopic(topic.Id);
writer.WriteIntroductionForObject(_context, complexType);
writer.WriteBaseTypeSection(_context, complexType);
writer.WriteContentTypeSection(_context, simpleTypeStructureRoot);
writer.WriteUsagesSection(_context, usages);
writer.WriteChildrenSection(_context, children);
writer.WriteAttributesSection(_context, attributeEntries);
writer.WriteRemarksSectionForObject(_context, complexType);
writer.WriteExamplesSectionForObject(_context, complexType);
writer.WriteSyntaxSection(_context, complexType);
writer.WriteRelatedTopics(_context, complexType);
writer.EndTopic();
}
}
private void GenerateMediaFiles()
{
var mediaFolder = Path.Combine(Path.GetDirectoryName(GetType().Assembly.Location), "Media");
Directory.CreateDirectory(MediaFolder);
foreach (var artItem in ArtItem.ArtItems)
{
var sourceFile = Path.Combine(mediaFolder, artItem.FileName);
var destinationFile = Path.Combine(MediaFolder, artItem.FileName);
File.Copy(sourceFile, destinationFile);
var mediaItem = new MediaItem(artItem, destinationFile);
MediaItems.Add(mediaItem);
}
}
private void GenerateContentFile()
{
var doc = new XmlDocument();
var rootNode = doc.CreateElement("Topics");
doc.AppendChild(rootNode);
GenerateContentFileElements(rootNode, _context.TopicManager.Topics);
var directory = Path.GetDirectoryName(ContentFile);
if (!Directory.Exists(directory))
Directory.CreateDirectory(directory);
doc.Save(ContentFile);
}
private static void GenerateContentFileElements(XmlNode parentNode, IEnumerable<Topic> topics)
{
foreach (var topic in topics)
{
var doc = parentNode.OwnerDocument;
var topicElement = doc.CreateElement("Topic");
topicElement.SetAttribute("id", topic.Id);
topicElement.SetAttribute("visible", XmlConvert.ToString(true));
topicElement.SetAttribute("title", topic.Title);
parentNode.AppendChild(topicElement);
if (topic.KeywordsK.Count > 0 ||
topic.KeywordsF.Count > 0)
{
var helpKeywordsElement = doc.CreateElement("HelpKeywords");
topicElement.AppendChild(helpKeywordsElement);
AddKeywords(helpKeywordsElement, topic.KeywordsK, "K");
AddKeywords(helpKeywordsElement, topic.KeywordsF, "F");
}
GenerateContentFileElements(topicElement, topic.Children);
}
}
private static void AddKeywords(XmlNode helpKeywordsElement, IEnumerable<string> keywordsF, string index)
{
foreach (var keywordF in keywordsF)
{
var helpKeywordElement = helpKeywordsElement.OwnerDocument.CreateElement("HelpKeyword");
helpKeywordElement.SetAttribute("index", index);
helpKeywordElement.SetAttribute("term", keywordF);
helpKeywordsElement.AppendChild(helpKeywordElement);
}
}
private static string GetAbsoluteFileName(string topicsFolder, Topic topic)
{
return Path.Combine(topicsFolder, Path.ChangeExtension(topic.Id, ".aml"));
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using Internal.IL.Stubs;
using Internal.TypeSystem;
using Debug = System.Diagnostics.Debug;
using Interlocked = System.Threading.Interlocked;
namespace Internal.IL
{
/// <summary>
/// Represents a delegate and provides access to compiler-generated methods on the delegate type.
/// </summary>
public class DelegateInfo
{
private TypeDesc _delegateType;
private MethodSignature _signature;
private MethodDesc _getThunkMethod;
private DelegateThunkCollection _thunks;
/// <summary>
/// Gets the Delegate.GetThunk override implementation for this delegate type.
/// </summary>
public MethodDesc GetThunkMethod
{
get
{
if (_getThunkMethod == null)
{
Interlocked.CompareExchange(ref _getThunkMethod, new DelegateGetThunkMethodOverride(this), null);
}
return _getThunkMethod;
}
}
/// <summary>
/// Gets the collection of delegate invocation thunks.
/// </summary>
public DelegateThunkCollection Thunks
{
get
{
if (_thunks == null)
{
Interlocked.CompareExchange(ref _thunks, new DelegateThunkCollection(this), null);
}
return _thunks;
}
}
/// <summary>
/// Gets the signature of the delegate type.
/// </summary>
public MethodSignature Signature
{
get
{
if (_signature == null)
{
_signature = _delegateType.GetKnownMethod("Invoke", null).Signature;
}
return _signature;
}
}
/// <summary>
/// Gets the type of the delegate.
/// </summary>
public TypeDesc Type
{
get
{
return _delegateType;
}
}
public DelegateInfo(TypeDesc delegateType)
{
Debug.Assert(delegateType.IsDelegate);
Debug.Assert(delegateType.IsTypeDefinition);
_delegateType = delegateType;
}
}
/// <summary>
/// Represents a collection of delegate invocation thunks.
/// </summary>
public class DelegateThunkCollection
{
private MethodDesc _openStaticThunk;
private MethodDesc _multicastThunk;
private MethodDesc _closedStaticThunk;
private MethodDesc _invokeThunk;
private MethodDesc _closedInstanceOverGeneric;
private MethodDesc _reversePInvokeThunk;
private MethodDesc _invokeObjectArrayThunk;
private MethodDesc _openInstanceThunk;
internal DelegateThunkCollection(DelegateInfo owningDelegate)
{
_openStaticThunk = new DelegateInvokeOpenStaticThunk(owningDelegate);
_multicastThunk = new DelegateInvokeMulticastThunk(owningDelegate);
_closedStaticThunk = new DelegateInvokeClosedStaticThunk(owningDelegate);
_invokeThunk = new DelegateDynamicInvokeThunk(owningDelegate);
_closedInstanceOverGeneric = new DelegateInvokeInstanceClosedOverGenericMethodThunk(owningDelegate);
_invokeObjectArrayThunk = new DelegateInvokeObjectArrayThunk(owningDelegate);
if (!owningDelegate.Type.HasInstantiation && IsNativeCallingConventionCompatible(owningDelegate.Signature))
_reversePInvokeThunk = new DelegateReversePInvokeThunk(owningDelegate);
MethodSignature delegateSignature = owningDelegate.Signature;
if (delegateSignature.Length > 0)
{
TypeDesc firstParam = delegateSignature[0];
bool generateOpenInstanceMethod = true;
if (firstParam.IsValueType ||
(!firstParam.IsDefType && !firstParam.IsSignatureVariable) /* no arrays, pointers, byrefs, etc. */)
{
generateOpenInstanceMethod = false;
}
if (generateOpenInstanceMethod)
{
_openInstanceThunk = new DelegateInvokeOpenInstanceThunk(owningDelegate);
}
}
}
#region Temporary interop logic
// TODO: interop should provide a way to query this
private static bool IsNativeCallingConventionCompatible(MethodSignature delegateSignature)
{
if (!IsNativeCallingConventionCompatible(delegateSignature.ReturnType))
return false;
else
{
for (int i = 0; i < delegateSignature.Length; i++)
{
if (!IsNativeCallingConventionCompatible(delegateSignature[i]))
{
return false;
}
}
}
return true;
}
private static bool IsNativeCallingConventionCompatible(TypeDesc type)
{
if (type.IsPointer || type.IsByRef)
return IsNativeCallingConventionCompatible(((ParameterizedType)type).ParameterType);
if (!type.IsValueType)
return false;
if (type.IsPrimitive)
{
if (type.IsWellKnownType(WellKnownType.Boolean))
return false;
return true;
}
foreach (FieldDesc field in type.GetFields())
{
if (!field.IsStatic && !IsNativeCallingConventionCompatible(field.FieldType))
return false;
}
return true;
}
#endregion
public MethodDesc this[DelegateThunkKind kind]
{
get
{
switch (kind)
{
case DelegateThunkKind.OpenStaticThunk:
return _openStaticThunk;
case DelegateThunkKind.MulticastThunk:
return _multicastThunk;
case DelegateThunkKind.ClosedStaticThunk:
return _closedStaticThunk;
case DelegateThunkKind.DelegateInvokeThunk:
return _invokeThunk;
case DelegateThunkKind.ClosedInstanceThunkOverGenericMethod:
return _closedInstanceOverGeneric;
case DelegateThunkKind.ReversePinvokeThunk:
return _reversePInvokeThunk;
case DelegateThunkKind.ObjectArrayThunk:
return _invokeObjectArrayThunk;
case DelegateThunkKind.OpenInstanceThunk:
return _openInstanceThunk;
default:
return null;
}
}
}
}
// TODO: Unify with the consts used in Delegate.cs within the class library.
public enum DelegateThunkKind
{
MulticastThunk = 0,
ClosedStaticThunk = 1,
OpenStaticThunk = 2,
ClosedInstanceThunkOverGenericMethod = 3, // This may not exist
DelegateInvokeThunk = 4,
OpenInstanceThunk = 5, // This may not exist
ReversePinvokeThunk = 6, // This may not exist
ObjectArrayThunk = 7, // This may not exist
}
}
| |
//-----------------------------------------------------------------------
// <copyright file="StreamOfStreams.cs" company="Akka.NET Project">
// Copyright (C) 2015-2016 Lightbend Inc. <http://www.lightbend.com>
// Copyright (C) 2013-2016 Akka.NET project <https://github.com/akkadotnet/akka.net>
// </copyright>
//-----------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using Akka.Annotations;
using Akka.Pattern;
using Akka.Streams.Actors;
using Akka.Streams.Dsl;
using Akka.Streams.Implementation.Stages;
using Akka.Streams.Stage;
using Akka.Streams.Supervision;
using Akka.Streams.Util;
using Akka.Util;
using Akka.Util.Internal;
namespace Akka.Streams.Implementation.Fusing
{
/// <summary>
/// TBD
/// </summary>
/// <typeparam name="TGraph">TBD</typeparam>
/// <typeparam name="T">TBD</typeparam>
/// <typeparam name="TMat">TBD</typeparam>
internal sealed class FlattenMerge<TGraph, T, TMat> : GraphStage<FlowShape<TGraph, T>> where TGraph : IGraph<SourceShape<T>, TMat>
{
#region internal classes
private sealed class Logic : InAndOutGraphStageLogic
{
private readonly FlattenMerge<TGraph, T, TMat> _stage;
private readonly Attributes _enclosingAttributes;
private readonly HashSet<SubSinkInlet<T>> _sources = new HashSet<SubSinkInlet<T>>();
private IBuffer<SubSinkInlet<T>> _q;
private readonly Action _outHandler;
public Logic(FlattenMerge<TGraph, T, TMat> stage, Attributes enclosingAttributes) : base(stage.Shape)
{
_stage = stage;
_enclosingAttributes = enclosingAttributes;
_outHandler = () =>
{
// could be unavailable due to async input having been executed before this notification
if (_q.NonEmpty && IsAvailable(_stage._out))
PushOut();
};
SetHandler(stage._in, stage._out, this);
}
public override void OnPush()
{
var source = Grab(_stage._in);
AddSource(source);
if (ActiveSources < _stage._breadth)
TryPull(_stage._in);
}
public override void OnUpstreamFinish()
{
if (ActiveSources == 0)
CompleteStage();
}
public override void OnPull()
{
Pull(_stage._in);
SetHandler(_stage._out, _outHandler);
}
private int ActiveSources => _sources.Count;
public override void PreStart()
=> _q = Buffer.Create<SubSinkInlet<T>>(_stage._breadth, Interpreter.Materializer);
public override void PostStop() => _sources.ForEach(s => s.Cancel());
private void PushOut()
{
var src = _q.Dequeue();
Push(_stage._out, src.Grab());
if (!src.IsClosed)
src.Pull();
else
RemoveSource(src);
}
private void RemoveSource(SubSinkInlet<T> src)
{
var pullSuppressed = ActiveSources == _stage._breadth;
_sources.Remove(src);
if (pullSuppressed)
TryPull(_stage._in);
if (ActiveSources == 0 && IsClosed(_stage._in))
CompleteStage();
}
private void AddSource(IGraph<SourceShape<T>, TMat> source)
{
var sinkIn = CreateSubSinkInlet<T>("FlattenMergeSink");
sinkIn.SetHandler(new LambdaInHandler(
onPush: () =>
{
if (IsAvailable(_stage._out))
{
Push(_stage._out, sinkIn.Grab());
sinkIn.Pull();
}
else
_q.Enqueue(sinkIn);
},
onUpstreamFinish: () =>
{
if (!sinkIn.IsAvailable)
RemoveSource(sinkIn);
}));
sinkIn.Pull();
_sources.Add(sinkIn);
var graph = Source.FromGraph(source).To(sinkIn.Sink);
var attributes = _stage.InitialAttributes.And(_enclosingAttributes);
Interpreter.SubFusingMaterializer.Materialize(graph, attributes);
}
public override string ToString() => $"FlattenMerge({_stage._breadth})";
}
#endregion
private readonly Inlet<TGraph> _in = new Inlet<TGraph>("flatten.in");
private readonly Outlet<T> _out = new Outlet<T>("flatten.out");
private readonly int _breadth;
/// <summary>
/// TBD
/// </summary>
/// <param name="breadth">TBD</param>
public FlattenMerge(int breadth)
{
_breadth = breadth;
InitialAttributes = DefaultAttributes.FlattenMerge;
Shape = new FlowShape<TGraph, T>(_in, _out);
}
/// <summary>
/// TBD
/// </summary>
protected override Attributes InitialAttributes { get; }
/// <summary>
/// TBD
/// </summary>
public override FlowShape<TGraph, T> Shape { get; }
/// <summary>
/// TBD
/// </summary>
/// <param name="enclosingAttributes">TBD</param>
/// <returns>TBD</returns>
protected override GraphStageLogic CreateLogic(Attributes enclosingAttributes) => new Logic(this, enclosingAttributes);
/// <summary>
/// TBD
/// </summary>
/// <returns>TBD</returns>
public override string ToString() => $"FlattenMerge({_breadth})";
}
/// <summary>
/// INTERNAL API
/// </summary>
/// <typeparam name="T">TBD</typeparam>
internal sealed class PrefixAndTail<T> : GraphStage<FlowShape<T, Tuple<IImmutableList<T>, Source<T, NotUsed>>>>
{
#region internal classes
private sealed class Logic : TimerGraphStageLogic, IInHandler, IOutHandler
{
private const string SubscriptionTimer = "SubstreamSubscriptionTimer";
private readonly PrefixAndTail<T> _stage;
private readonly LambdaOutHandler _subHandler;
private int _left;
private ImmutableList<T>.Builder _builder;
private SubSourceOutlet<T> _tailSource;
public Logic(PrefixAndTail<T> stage) : base(stage.Shape)
{
_stage = stage;
_left = _stage._count < 0 ? 0 : _stage._count;
_builder = ImmutableList<T>.Empty.ToBuilder();
_subHandler = new LambdaOutHandler(onPull: () =>
{
SetKeepGoing(false);
CancelTimer(SubscriptionTimer);
Pull(_stage._in);
_tailSource.SetHandler(new LambdaOutHandler(onPull: () => Pull(_stage._in)));
});
SetHandler(_stage._in, this);
SetHandler(_stage._out, this);
}
protected internal override void OnTimer(object timerKey)
{
var materializer = ActorMaterializerHelper.Downcast(Interpreter.Materializer);
var timeoutSettings = materializer.Settings.SubscriptionTimeoutSettings;
var timeout = timeoutSettings.Timeout;
switch (timeoutSettings.Mode)
{
case StreamSubscriptionTimeoutTerminationMode.NoopTermination:
//do nothing
break;
case StreamSubscriptionTimeoutTerminationMode.WarnTermination:
materializer.Logger.Warning(
$"Substream subscription timeout triggered after {timeout} in prefixAndTail({_stage._count}).");
break;
case StreamSubscriptionTimeoutTerminationMode.CancelTermination:
_tailSource.Timeout(timeout);
if(_tailSource.IsClosed)
CompleteStage();
break;
default:
throw new ArgumentOutOfRangeException();
}
}
private bool IsPrefixComplete => ReferenceEquals(_builder, null);
private Source<T, NotUsed> OpenSubstream()
{
var timeout = ActorMaterializerHelper.Downcast(Interpreter.Materializer).Settings.SubscriptionTimeoutSettings.Timeout;
_tailSource = new SubSourceOutlet<T>(this, "TailSource");
_tailSource.SetHandler(_subHandler);
SetKeepGoing(true);
ScheduleOnce(SubscriptionTimer, timeout);
_builder = null;
return Source.FromGraph(_tailSource.Source);
}
public void OnPush()
{
if (IsPrefixComplete)
_tailSource.Push(Grab(_stage._in));
else
{
_builder.Add(Grab(_stage._in));
_left--;
if (_left == 0)
{
Push(_stage._out, Tuple.Create((IImmutableList<T>) _builder.ToImmutable(), OpenSubstream()));
Complete(_stage._out);
}
else
Pull(_stage._in);
}
}
public void OnPull()
{
if (_left == 0)
{
Push(_stage._out, Tuple.Create((IImmutableList<T>) ImmutableList<T>.Empty, OpenSubstream()));
Complete(_stage._out);
}
else
Pull(_stage._in);
}
public void OnUpstreamFinish()
{
if (!IsPrefixComplete)
{
// This handles the unpulled out case as well
Emit(_stage._out, Tuple.Create((IImmutableList<T>) _builder.ToImmutable(), Source.Empty<T>()), CompleteStage);
}
else
{
if (!_tailSource.IsClosed)
_tailSource.Complete();
CompleteStage();
}
}
public void OnUpstreamFailure(Exception ex)
{
if (IsPrefixComplete)
{
if (!_tailSource.IsClosed)
_tailSource.Fail(ex);
CompleteStage();
}
else
FailStage(ex);
}
public void OnDownstreamFinish()
{
if (!IsPrefixComplete)
CompleteStage();
// Otherwise substream is open, ignore
}
}
#endregion
private readonly int _count;
private readonly Inlet<T> _in = new Inlet<T>("PrefixAndTail.in");
private readonly Outlet<Tuple<IImmutableList<T>, Source<T, NotUsed>>> _out = new Outlet<Tuple<IImmutableList<T>, Source<T, NotUsed>>>("PrefixAndTail.out");
/// <summary>
/// TBD
/// </summary>
/// <param name="count">TBD</param>
public PrefixAndTail(int count)
{
_count = count;
Shape = new FlowShape<T, Tuple<IImmutableList<T>, Source<T, NotUsed>>>(_in, _out);
}
/// <summary>
/// TBD
/// </summary>
protected override Attributes InitialAttributes { get; } = DefaultAttributes.PrefixAndTail;
/// <summary>
/// TBD
/// </summary>
public override FlowShape<T, Tuple<IImmutableList<T>, Source<T, NotUsed>>> Shape { get; }
/// <summary>
/// TBD
/// </summary>
/// <param name="inheritedAttributes">TBD</param>
/// <returns>TBD</returns>
protected override GraphStageLogic CreateLogic(Attributes inheritedAttributes) => new Logic(this);
/// <summary>
/// TBD
/// </summary>
/// <returns>TBD</returns>
public override string ToString() => $"PrefixAndTail({_count})";
}
/// <summary>
/// INTERNAL API
/// </summary>
/// <typeparam name="T">TBD</typeparam>
/// <typeparam name="TKey">TBD</typeparam>
internal sealed class GroupBy<T, TKey> : GraphStage<FlowShape<T, Source<T, NotUsed>>>
{
#region Loigc
private sealed class Logic : TimerGraphStageLogic, IInHandler, IOutHandler
{
private readonly GroupBy<T, TKey> _stage;
private readonly Dictionary<TKey, SubstreamSource> _activeSubstreams = new Dictionary<TKey, SubstreamSource>();
private readonly HashSet<TKey> _closedSubstreams = new HashSet<TKey>();
private readonly HashSet<SubstreamSource> _substreamsJustStarted = new HashSet<SubstreamSource>();
private readonly Lazy<Decider> _decider;
private TimeSpan _timeout;
private SubstreamSource _substreamWaitingToBePushed;
private Option<TKey> _nextElementKey = Option<TKey>.None;
private Option<T> _nextElementValue = Option<T>.None;
private long _nextId;
private int _firstPushCounter;
public Logic(GroupBy<T, TKey> stage, Attributes inheritedAttributes) : base(stage.Shape)
{
_stage = stage;
_decider = new Lazy<Decider>(() =>
{
var attribute = inheritedAttributes.GetAttribute<ActorAttributes.SupervisionStrategy>(null);
return attribute != null ? attribute.Decider : Deciders.StoppingDecider;
});
SetHandler(_stage.In, this);
SetHandler(_stage.Out, this);
}
public void OnPush()
{
try
{
var element = Grab(_stage.In);
var key = _stage._keyFor(element);
if (key == null)
throw new ArgumentNullException(nameof(key), "Key cannot be null");
if (_activeSubstreams.TryGetValue(key, out var substreamSource))
{
if (substreamSource.IsAvailable)
substreamSource.Push(element);
else
{
_nextElementKey = key;
_nextElementValue = element;
}
}
else
{
if (_activeSubstreams.Count == _stage._maxSubstreams)
Fail(new IllegalStateException($"Cannot open substream for key {key}: too many substreams open"));
else if (_closedSubstreams.Contains(key) && !HasBeenPulled(_stage.In))
Pull(_stage.In);
else
RunSubstream(key, element);
}
}
catch (Exception ex)
{
var directive = _decider.Value(ex);
if (directive == Directive.Stop)
Fail(ex);
else if (!HasBeenPulled(_stage.In))
Pull(_stage.In);
}
}
public void OnPull()
{
if (_substreamWaitingToBePushed != null)
{
Push(_stage.Out, Source.FromGraph(_substreamWaitingToBePushed.Source));
ScheduleOnce(_substreamWaitingToBePushed.Key.Value, _timeout);
_substreamWaitingToBePushed = null;
}
else
{
if (HasNextElement)
{
var subSubstreamSource = _activeSubstreams[_nextElementKey.Value];
if (subSubstreamSource.IsAvailable)
{
subSubstreamSource.Push(_nextElementValue.Value);
ClearNextElement();
}
}
else if (!HasBeenPulled(_stage.In))
TryPull(_stage.In);
}
}
public void OnUpstreamFinish()
{
if (!TryCompleteAll())
SetKeepGoing(true);
}
public void OnUpstreamFailure(Exception ex) => Fail(ex);
public void OnDownstreamFinish()
{
if (_activeSubstreams.Count == 0)
CompleteStage();
else
SetKeepGoing(true);
}
private long NextId => ++_nextId;
private bool HasNextElement => _nextElementKey.HasValue;
private void ClearNextElement()
{
_nextElementKey = Option<TKey>.None;
_nextElementValue = Option<T>.None;
}
private bool TryCompleteAll()
{
if (_activeSubstreams.Count == 0 || (!HasNextElement && _firstPushCounter == 0))
{
foreach (var value in _activeSubstreams.Values)
value.Complete();
CompleteStage();
return true;
}
return false;
}
private void Fail(Exception ex)
{
foreach (var value in _activeSubstreams.Values)
value.Fail(ex);
FailStage(ex);
}
private bool NeedToPull => !(HasBeenPulled(_stage.In) || IsClosed(_stage.In) || HasNextElement);
public override void PreStart()
{
var settings = ActorMaterializerHelper.Downcast(Interpreter.Materializer).Settings;
_timeout = settings.SubscriptionTimeoutSettings.Timeout;
}
protected internal override void OnTimer(object timerKey)
{
var key = (TKey) timerKey;
if (_activeSubstreams.TryGetValue(key, out var substreamSource))
{
substreamSource.Timeout(_timeout);
_closedSubstreams.Add(key);
_activeSubstreams.Remove(key);
if (IsClosed(_stage.In))
TryCompleteAll();
}
}
private void RunSubstream(TKey key, T value)
{
var substreamSource = new SubstreamSource(this, "GroupBySource " + NextId, key, value);
_activeSubstreams.Add(key, substreamSource);
_firstPushCounter++;
if (IsAvailable(_stage.Out))
{
Push(_stage.Out, Source.FromGraph(substreamSource.Source));
ScheduleOnce(key, _timeout);
_substreamWaitingToBePushed = null;
}
else
{
SetKeepGoing(true);
_substreamsJustStarted.Add(substreamSource);
_substreamWaitingToBePushed = substreamSource;
}
}
private sealed class SubstreamSource : SubSourceOutlet<T>, IOutHandler
{
private readonly Logic _logic;
private Option<T> _firstElement;
public SubstreamSource(Logic logic, string name, Option<TKey> key, Option<T> firstElement) : base(logic, name)
{
_logic = logic;
_firstElement = firstElement;
Key = key;
SetHandler(this);
}
private bool FirstPush => _firstElement.HasValue;
private bool HasNextForSubSource => _logic.HasNextElement && _logic._nextElementKey.Equals(Key);
public Option<TKey> Key { get; }
private void CompleteSubStream()
{
Complete();
_logic._activeSubstreams.Remove(Key.Value);
_logic._closedSubstreams.Add(Key.Value);
}
private void TryCompleteHandler()
{
if (_logic.IsClosed(_logic._stage.In) && !HasNextForSubSource)
{
CompleteSubStream();
_logic.TryCompleteAll();
}
}
public void OnPull()
{
_logic.CancelTimer(Key.Value);
if (FirstPush)
{
_logic._firstPushCounter--;
Push(_firstElement.Value);
_firstElement = Option<T>.None;
_logic._substreamsJustStarted.Remove(this);
if(_logic._substreamsJustStarted.Count == 0)
_logic.SetKeepGoing(false);
}
else if (HasNextForSubSource)
{
Push(_logic._nextElementValue.Value);
_logic.ClearNextElement();
}
else if (_logic.NeedToPull)
_logic.Pull(_logic._stage.In);
TryCompleteHandler();
}
public void OnDownstreamFinish()
{
if(_logic.HasNextElement && _logic._nextElementKey.Equals(Key))
_logic.ClearNextElement();
if (FirstPush)
_logic._firstPushCounter--;
CompleteSubStream();
if (_logic.IsClosed(_logic._stage.In))
_logic.TryCompleteAll();
else if (_logic.NeedToPull)
_logic.Pull(_logic._stage.In);
}
}
}
#endregion
private readonly int _maxSubstreams;
private readonly Func<T, TKey> _keyFor;
/// <summary>
/// TBD
/// </summary>
/// <param name="maxSubstreams">TBD</param>
/// <param name="keyFor">TBD</param>
public GroupBy(int maxSubstreams, Func<T, TKey> keyFor)
{
_maxSubstreams = maxSubstreams;
_keyFor = keyFor;
Shape = new FlowShape<T, Source<T, NotUsed>>(In, Out);
}
private Inlet<T> In { get; } = new Inlet<T>("GroupBy.in");
private Outlet<Source<T, NotUsed>> Out { get; } = new Outlet<Source<T, NotUsed>>("GroupBy.out");
/// <summary>
/// TBD
/// </summary>
protected override Attributes InitialAttributes { get; } = DefaultAttributes.GroupBy;
/// <summary>
/// TBD
/// </summary>
public override FlowShape<T, Source<T, NotUsed>> Shape { get; }
/// <summary>
/// TBD
/// </summary>
/// <param name="inheritedAttributes">TBD</param>
/// <returns>TBD</returns>
protected override GraphStageLogic CreateLogic(Attributes inheritedAttributes)
=> new Logic(this, inheritedAttributes);
/// <summary>
/// TBD
/// </summary>
/// <returns>TBD</returns>
public override string ToString() => "GroupBy";
}
/// <summary>
/// INTERNAL API
/// </summary>
internal static class Split
{
/// <summary>
/// TBD
/// </summary>
internal enum SplitDecision
{
/// <summary>
/// TBD
/// </summary>
SplitBefore,
/// <summary>
/// TBD
/// </summary>
SplitAfter
}
/// <summary>
/// TBD
/// </summary>
/// <typeparam name="T">TBD</typeparam>
/// <param name="p">TBD</param>
/// <param name="substreamCancelStrategy">TBD</param>
/// <returns>TBD</returns>
public static IGraph<FlowShape<T, Source<T, NotUsed>>, NotUsed> When<T>(Func<T, bool> p, SubstreamCancelStrategy substreamCancelStrategy) => new Split<T>(SplitDecision.SplitBefore, p, substreamCancelStrategy);
/// <summary>
/// TBD
/// </summary>
/// <typeparam name="T">TBD</typeparam>
/// <param name="p">TBD</param>
/// <param name="substreamCancelStrategy">TBD</param>
/// <returns>TBD</returns>
public static IGraph<FlowShape<T, Source<T, NotUsed>>, NotUsed> After<T>(Func<T, bool> p, SubstreamCancelStrategy substreamCancelStrategy) => new Split<T>(SplitDecision.SplitAfter, p, substreamCancelStrategy);
}
/// <summary>
/// INTERNAL API
/// </summary>
/// <typeparam name="T">TBD</typeparam>
internal sealed class Split<T> : GraphStage<FlowShape<T, Source<T, NotUsed>>>
{
#region internal classes
private sealed class Logic : TimerGraphStageLogic, IInHandler, IOutHandler
{
#region internal classes
private sealed class SubstreamHandler : InAndOutHandler
{
private readonly Logic _logic;
private readonly Inlet<T> _inlet;
private readonly Split.SplitDecision _decision;
private bool _willCompleteAfterInitialElement;
public SubstreamHandler(Logic logic)
{
_logic = logic;
_inlet = logic._stage._in;
_decision = _logic._stage._decision;
}
public bool HasInitialElement => FirstElement.HasValue;
public Option<T> FirstElement { private get; set; }
// Substreams are always assumed to be pushable position when we enter this method
private void CloseThis(SubstreamHandler handler, T currentElem)
{
if (_decision == Split.SplitDecision.SplitAfter)
{
if (!_logic._substreamCancelled)
{
_logic._substreamSource.Push(currentElem);
_logic._substreamSource.Complete();
}
}
else if (_decision == Split.SplitDecision.SplitBefore)
{
handler.FirstElement = currentElem;
if (!_logic._substreamCancelled)
_logic._substreamSource.Complete();
}
}
public override void OnPull()
{
_logic.CancelTimer(SubscriptionTimer);
if (HasInitialElement)
{
_logic._substreamSource.Push(FirstElement.Value);
FirstElement = Option<T>.None;
_logic.SetKeepGoing(false);
if (_willCompleteAfterInitialElement)
{
_logic._substreamSource.Complete();
_logic.CompleteStage();
}
}
else
_logic.Pull(_inlet);
}
public override void OnDownstreamFinish()
{
_logic._substreamCancelled = true;
if (_logic.IsClosed(_inlet) || _logic._stage._propagateSubstreamCancel)
_logic.CompleteStage();
else
// Start draining
if (!_logic.HasBeenPulled(_inlet))
_logic.Pull(_inlet);
}
public override void OnPush()
{
var elem = _logic.Grab(_inlet);
try
{
if (_logic._stage._predicate(elem))
{
var handler = new SubstreamHandler(_logic);
CloseThis(handler, elem);
if(_decision == Split.SplitDecision.SplitBefore)
_logic.HandOver(handler);
else
{
_logic._substreamSource = null;
_logic.SetHandler(_inlet, _logic);
_logic.Pull(_inlet);
}
}
else
{
// Drain into the void
if (_logic._substreamCancelled)
_logic.Pull(_inlet);
else
_logic._substreamSource.Push(elem);
}
}
catch (Exception ex)
{
OnUpstreamFailure(ex);
}
}
public override void OnUpstreamFinish()
{
if (HasInitialElement)
_willCompleteAfterInitialElement = true;
else
{
_logic._substreamSource.Complete();
_logic.CompleteStage();
}
}
public override void OnUpstreamFailure(Exception ex)
{
_logic._substreamSource.Fail(ex);
_logic.FailStage(ex);
}
}
#endregion
private const string SubscriptionTimer = "SubstreamSubscriptionTimer";
private TimeSpan _timeout;
private SubSourceOutlet<T> _substreamSource;
private bool _substreamWaitingToBePushed;
private bool _substreamCancelled;
private readonly Split<T> _stage;
public Logic(Split<T> stage) : base(stage.Shape)
{
_stage = stage;
SetHandler(stage._out, this);
// initial input handler
SetHandler(stage._in, this);
}
public void OnPush()
{
var handler = new SubstreamHandler(this);
var elem = Grab(_stage._in);
if (_stage._decision == Split.SplitDecision.SplitAfter && _stage._predicate(elem))
Push(_stage._out, Source.Single(elem));
// Next pull will come from the next substream that we will open
else
handler.FirstElement = elem;
HandOver(handler);
}
public void OnUpstreamFinish() => CompleteStage();
public void OnUpstreamFailure(Exception e) => FailStage(e);
public void OnPull()
{
if (_substreamSource == null)
{
//can be already pulled from substream in case split after
if (!HasBeenPulled(_stage._in))
Pull(_stage._in);
}
else if (_substreamWaitingToBePushed)
PushSubstreamSource();
}
public void OnDownstreamFinish()
{
// If the substream is already cancelled or it has not been handed out, we can go away
if (_substreamSource == null || _substreamWaitingToBePushed || _substreamCancelled)
CompleteStage();
}
public override void PreStart()
{
var settings = ActorMaterializerHelper.Downcast(Interpreter.Materializer).Settings;
_timeout = settings.SubscriptionTimeoutSettings.Timeout;
}
private void HandOver(SubstreamHandler handler)
{
if (IsClosed(_stage._out))
CompleteStage();
else
{
_substreamSource = new SubSourceOutlet<T>(this, "SplitSource");
_substreamSource.SetHandler(handler);
_substreamCancelled = false;
SetHandler(_stage._in, handler);
SetKeepGoing(handler.HasInitialElement);
if (IsAvailable(_stage._out))
{
if(_stage._decision == Split.SplitDecision.SplitBefore || handler.HasInitialElement)
PushSubstreamSource();
else
Pull(_stage._in);
}
else
_substreamWaitingToBePushed = true;
}
}
private void PushSubstreamSource()
{
Push(_stage._out, Source.FromGraph(_substreamSource.Source));
ScheduleOnce(SubscriptionTimer, _timeout);
_substreamWaitingToBePushed = false;
}
protected internal override void OnTimer(object timerKey) => _substreamSource.Timeout(_timeout);
}
#endregion
private readonly Inlet<T> _in = new Inlet<T>("Split.in");
private readonly Outlet<Source<T, NotUsed>> _out = new Outlet<Source<T, NotUsed>>("Split.out");
private readonly Split.SplitDecision _decision;
private readonly Func<T, bool> _predicate;
private readonly bool _propagateSubstreamCancel;
/// <summary>
/// TBD
/// </summary>
/// <param name="decision">TBD</param>
/// <param name="predicate">TBD</param>
/// <param name="substreamCancelStrategy">TBD</param>
public Split(Split.SplitDecision decision, Func<T, bool> predicate, SubstreamCancelStrategy substreamCancelStrategy)
{
_decision = decision;
_predicate = predicate;
_propagateSubstreamCancel = substreamCancelStrategy == SubstreamCancelStrategy.Propagate;
Shape = new FlowShape<T, Source<T, NotUsed>>(_in, _out);
}
/// <summary>
/// TBD
/// </summary>
public override FlowShape<T, Source<T, NotUsed>> Shape { get; }
/// <summary>
/// TBD
/// </summary>
/// <param name="inheritedAttributes">TBD</param>
/// <returns>TBD</returns>
protected override GraphStageLogic CreateLogic(Attributes inheritedAttributes) => new Logic(this);
/// <summary>
/// TBD
/// </summary>
/// <returns>TBD</returns>
public override string ToString() => "Split";
}
/// <summary>
/// INTERNAL API
/// </summary>
internal static class SubSink
{
internal interface IState
{
}
/// <summary>
/// Not yet materialized and no command has been scheduled
/// </summary>
internal class Uninitialized : IState
{
public static readonly Uninitialized Instance = new Uninitialized();
private Uninitialized()
{
}
}
/// <summary>
/// A command was scheduled before materialization
/// </summary>
internal abstract class CommandScheduledBeforeMaterialization : IState
{
protected CommandScheduledBeforeMaterialization(ICommand command)
{
Command = command;
}
public ICommand Command { get; }
}
/// <summary>
/// A RequestOne command was scheduled before materialization
/// </summary>
internal class RequestOneScheduledBeforeMaterialization : CommandScheduledBeforeMaterialization
{
public static readonly RequestOneScheduledBeforeMaterialization Instance = new RequestOneScheduledBeforeMaterialization(RequestOne.Instance);
private RequestOneScheduledBeforeMaterialization(ICommand command) : base(command)
{
}
}
/// <summary>
/// A Cancel command was scheduled before materialization
/// </summary>
internal sealed class CancelScheduledBeforeMaterialization : CommandScheduledBeforeMaterialization
{
public static readonly CancelScheduledBeforeMaterialization Instance = new CancelScheduledBeforeMaterialization(Cancel.Instance);
private CancelScheduledBeforeMaterialization(ICommand command) : base(command)
{
}
}
/*
Steady state: sink has been materialized, commands can be delivered through the callback
Represented in unwrapped form as AsyncCallback[Command] directly to prevent a level of indirection
case class Materialized(callback: AsyncCallback[Command]) extends State
*/
internal interface ICommand
{
}
internal class RequestOne : ICommand
{
public static readonly RequestOne Instance = new RequestOne();
private RequestOne()
{
}
}
internal class Cancel : ICommand
{
public static readonly Cancel Instance = new Cancel();
private Cancel()
{
}
}
}
/// <summary>
/// INTERNAL API
/// </summary>
internal sealed class SubSink<T> : GraphStage<SinkShape<T>>
{
#region internal classes
private sealed class Logic : InGraphStageLogic
{
private readonly SubSink<T> _stage;
public Logic(SubSink<T> stage) : base(stage.Shape)
{
_stage = stage;
SetHandler(stage._in, this);
}
public override void OnPush() => _stage._externalCallback(new OnNext(Grab(_stage._in)));
public override void OnUpstreamFinish() => _stage._externalCallback(OnComplete.Instance);
public override void OnUpstreamFailure(Exception e) => _stage._externalCallback(new OnError(e));
private void SetCallback(Action<SubSink.ICommand> callback)
{
var status = _stage._status;
switch (status.Value)
{
case SubSink.Uninitialized _:
if(!status.CompareAndSet(SubSink.Uninitialized.Instance, /* Materialized */ GetAsyncCallback(callback)))
SetCallback(callback);
break;
case SubSink.CommandScheduledBeforeMaterialization command:
if (status.CompareAndSet(command, /* Materialized */ GetAsyncCallback(callback)))
{
// between those two lines a new command might have been scheduled, but that will go through the
// async interface, so that the ordering is still kept
callback(command.Command);
}
else
SetCallback(callback);
break;
case Action<SubSink.ICommand> _: /* Materialized */
FailStage(new IllegalStateException("Substream Source cannot be materialized more than once"));
break;
}
}
public override void PreStart()
{
SetCallback(command =>
{
if (command is SubSink.RequestOne)
TryPull(_stage._in);
else if (command is SubSink.Cancel)
CompleteStage();
});
}
}
#endregion
private readonly Inlet<T> _in = new Inlet<T>("SubSink.in");
private readonly AtomicReference<object> _status = new AtomicReference<object>(SubSink.Uninitialized.Instance);
private readonly string _name;
private readonly Action<IActorSubscriberMessage> _externalCallback;
/// <summary>
/// TBD
/// </summary>
/// <param name="name">TBD</param>
/// <param name="externalCallback">TBD</param>
public SubSink(string name, Action<IActorSubscriberMessage> externalCallback)
{
_name = name;
_externalCallback = externalCallback;
InitialAttributes = Attributes.CreateName($"SubSink({name})");
Shape = new SinkShape<T>(_in);
}
/// <summary>
/// TBD
/// </summary>
protected override Attributes InitialAttributes { get; }
/// <summary>
/// TBD
/// </summary>
public override SinkShape<T> Shape { get; }
/// <summary>
/// TBD
/// </summary>
public void PullSubstream() => DispatchCommand(SubSink.RequestOneScheduledBeforeMaterialization.Instance);
/// <summary>
/// TBD
/// </summary>
public void CancelSubstream() => DispatchCommand(SubSink.CancelScheduledBeforeMaterialization.Instance);
private void DispatchCommand(SubSink.CommandScheduledBeforeMaterialization newState)
{
switch (_status.Value)
{
case Action<SubSink.ICommand> callback: callback(newState.Command); break;
case SubSink.Uninitialized _:
if(!_status.CompareAndSet(SubSink.Uninitialized.Instance, newState))
DispatchCommand(newState); // changed to materialized in the meantime
break;
case SubSink.RequestOneScheduledBeforeMaterialization _ when newState == SubSink.CancelScheduledBeforeMaterialization.Instance:
// cancellation is allowed to replace pull
if(!_status.CompareAndSet(SubSink.RequestOneScheduledBeforeMaterialization.Instance, newState))
DispatchCommand(SubSink.RequestOneScheduledBeforeMaterialization.Instance);
break;
case SubSink.CommandScheduledBeforeMaterialization command:
throw new IllegalStateException($"{newState.Command} on subsink is illegal when {command.Command} is still pending");
}
}
/// <summary>
/// TBD
/// </summary>
/// <param name="inheritedAttributes">TBD</param>
/// <returns>TBD</returns>
protected override GraphStageLogic CreateLogic(Attributes inheritedAttributes) => new Logic(this);
/// <summary>
/// TBD
/// </summary>
/// <returns>TBD</returns>
public override string ToString() => _name;
}
/// <summary>
/// INTERNAL API
/// </summary>
internal static class SubSource
{
/// <summary>
/// INTERNAL API
///
/// HERE ACTUALLY ARE DRAGONS, YOU HAVE BEEN WARNED!
///
/// FIXME #19240 (jvm)
/// </summary>
/// <typeparam name="T">TBD</typeparam>
/// <typeparam name="TMat">TBD</typeparam>
/// <param name="s">TBD</param>
/// <exception cref="NotSupportedException">TBD</exception>
[InternalApi]
public static void Kill<T, TMat>(Source<T, TMat> s)
{
var module = s.Module as GraphStageModule;
if (module?.Stage is SubSource<T>)
{
((SubSource<T>) module.Stage).ExternalCallback(SubSink.Cancel.Instance);
return;
}
var pub = s.Module as PublisherSource<T>;
if (pub != null)
{
NotUsed _;
pub.Create(default(MaterializationContext), out _).Subscribe(CancelingSubscriber<T>.Instance);
return;
}
var intp = GraphInterpreter.CurrentInterpreterOrNull;
if (intp == null)
throw new NotSupportedException($"cannot drop Source of type {s.Module.GetType().Name}");
s.RunWith(Sink.Ignore<T>(), intp.SubFusingMaterializer);
}
}
/// <summary>
/// INTERNAL API
/// </summary>
/// <typeparam name="T">TBD</typeparam>
internal sealed class SubSource<T> : GraphStage<SourceShape<T>>
{
#region internal classes
private sealed class Logic : OutGraphStageLogic
{
private readonly SubSource<T> _stage;
public Logic(SubSource<T> stage) : base(stage.Shape)
{
_stage = stage;
SetHandler(stage._out, this);
}
public override void OnPull() => _stage.ExternalCallback(SubSink.RequestOne.Instance);
public override void OnDownstreamFinish() => _stage.ExternalCallback(SubSink.Cancel.Instance);
private void SetCallback(Action<IActorSubscriberMessage> callback)
{
var status = _stage._status.Value;
if (status == null)
{
if (!_stage._status.CompareAndSet(null, callback))
SetCallback(callback);
}
else if (status is OnComplete)
CompleteStage();
else if (status is OnError)
FailStage(((OnError) status).Cause);
else if (status is Action<IActorSubscriberMessage>)
throw new IllegalStateException("Substream Source cannot be materialized more than once");
}
public override void PreStart()
{
var ourOwnCallback = GetAsyncCallback<IActorSubscriberMessage>(msg =>
{
if (msg is OnComplete)
CompleteStage();
else if (msg is OnError)
FailStage(((OnError) msg).Cause);
else if (msg is OnNext)
Push(_stage._out, (T) ((OnNext) msg).Element);
});
SetCallback(ourOwnCallback);
}
}
#endregion
private readonly string _name;
private readonly Outlet<T> _out = new Outlet<T>("SubSource.out");
private readonly AtomicReference<object> _status = new AtomicReference<object>();
/// <summary>
/// TBD
/// </summary>
/// <param name="name">TBD</param>
/// <param name="externalCallback">TBD</param>
public SubSource(string name, Action<SubSink.ICommand> externalCallback)
{
_name = name;
Shape = new SourceShape<T>(_out);
InitialAttributes = Attributes.CreateName($"SubSource({name})");
ExternalCallback = externalCallback;
}
/// <summary>
/// TBD
/// </summary>
public override SourceShape<T> Shape { get; }
/// <summary>
/// TBD
/// </summary>
protected override Attributes InitialAttributes { get; }
/// <summary>
/// TBD
/// </summary>
internal Action<SubSink.ICommand> ExternalCallback { get; }
/// <summary>
/// TBD
/// </summary>
/// <param name="elem">TBD</param>
/// <exception cref="IllegalStateException">TBD</exception>
public void PushSubstream(T elem)
{
var s = _status.Value;
var f = s as Action<IActorSubscriberMessage>;
if (f == null)
throw new IllegalStateException("cannot push to uninitialized substream");
f(new OnNext(elem));
}
/// <summary>
/// TBD
/// </summary>
public void CompleteSubstream()
{
var s = _status.Value;
var f = s as Action<IActorSubscriberMessage>;
if (f != null)
f(OnComplete.Instance);
else if (!_status.CompareAndSet(null, OnComplete.Instance))
((Action<IActorSubscriberMessage>) _status.Value)(OnComplete.Instance);
}
/// <summary>
/// TBD
/// </summary>
/// <param name="ex">TBD</param>
public void FailSubstream(Exception ex)
{
var s = _status.Value;
var f = s as Action<IActorSubscriberMessage>;
var failure = new OnError(ex);
if (f != null)
f(failure);
else if (!_status.CompareAndSet(null, failure))
((Action<IActorSubscriberMessage>) _status.Value)(failure);
}
/// <summary>
/// TBD
/// </summary>
/// <param name="d">TBD</param>
/// <returns>TBD</returns>
public bool Timeout(TimeSpan d) => _status.CompareAndSet(null, new OnError(new SubscriptionTimeoutException($"Substream Source has not been materialized in {d}")));
/// <summary>
/// TBD
/// </summary>
/// <param name="inheritedAttributes">TBD</param>
/// <returns>TBD</returns>
protected override GraphStageLogic CreateLogic(Attributes inheritedAttributes) => new Logic(this);
/// <summary>
/// TBD
/// </summary>
/// <returns>TBD</returns>
public override string ToString() => _name;
}
}
| |
/*
* Copyright 2011 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Threading;
using BitCoinSharp.Threading;
using log4net;
namespace BitCoinSharp
{
/// <summary>
/// A Peer handles the high level communication with a BitCoin node. It requires a NetworkConnection to be set up for
/// it. After that it takes ownership of the connection, creates and manages its own thread used for communication
/// with the network. All these threads synchronize on the block chain.
/// </summary>
public class Peer
{
private static readonly ILog _log = LogManager.GetLogger(typeof (Peer));
private readonly NetworkConnection _conn;
private readonly NetworkParameters _params;
private Thread _thread;
// Whether the peer thread is supposed to be running or not. Set to false during shutdown so the peer thread
// knows to quit when the socket goes away.
private bool _running;
private readonly BlockChain _blockChain;
// Used to notify clients when the initial block chain download is finished.
private CountDownLatch _chainCompletionLatch;
// When we want to download a block or transaction from a peer, the InventoryItem is put here whilst waiting for
// the response. Synchronized on itself.
private readonly IList<GetDataFuture<Block>> _pendingGetBlockFutures;
/// <summary>
/// Construct a peer that handles the given network connection and reads/writes from the given block chain. Note that
/// communication won't occur until you call start().
/// </summary>
public Peer(NetworkParameters @params, NetworkConnection conn, BlockChain blockChain)
{
_conn = conn;
_params = @params;
_blockChain = blockChain;
_pendingGetBlockFutures = new List<GetDataFuture<Block>>();
}
/// <summary>
/// Starts the background thread that processes messages.
/// </summary>
public void Start()
{
_thread = new Thread(Run);
lock (this)
{
_running = true;
}
_thread.Name = "BitCoin peer thread: " + _conn;
_thread.Start();
}
/// <summary>
/// Runs in the peers network thread and manages communication with the peer.
/// </summary>
private void Run()
{
Debug.Assert(Thread.CurrentThread == _thread);
try
{
while (true)
{
var m = _conn.ReadMessage();
if (m is InventoryMessage)
{
ProcessInv((InventoryMessage) m);
}
else if (m is Block)
{
ProcessBlock((Block) m);
}
else if (m is AddressMessage)
{
// We don't care about addresses of the network right now. But in future,
// we should save them in the wallet so we don't put too much load on the seed nodes and can
// properly explore the network.
}
else
{
// TODO: Handle the other messages we can receive.
_log.WarnFormat("Received unhandled message: {0}", m);
}
}
}
catch (Exception e)
{
if (e is IOException && !_running)
{
// This exception was expected because we are tearing down the socket as part of quitting.
_log.Info("Shutting down peer thread");
}
else
{
// We caught an unexpected exception.
Console.Error.WriteLine(e);
}
}
lock (this)
{
_running = false;
}
}
/// <exception cref="System.IO.IOException" />
private void ProcessBlock(Block m)
{
Debug.Assert(Thread.CurrentThread == _thread);
try
{
// Was this block requested by getblock?
lock (_pendingGetBlockFutures)
{
for (var i = 0; i < _pendingGetBlockFutures.Count; i++)
{
var f = _pendingGetBlockFutures[i];
if (f.Item.Hash.SequenceEqual(m.Hash))
{
// Yes, it was. So pass it through the future.
f.SetResult(m);
// Blocks explicitly requested don't get sent to the block chain.
_pendingGetBlockFutures.RemoveAt(i);
return;
}
}
}
// Otherwise it's a block sent to us because the peer thought we needed it, so add it to the block chain.
// This call will synchronize on blockChain.
if (_blockChain.Add(m))
{
// The block was successfully linked into the chain. Notify the user of our progress.
if (_chainCompletionLatch != null)
{
_chainCompletionLatch.CountDown();
if (_chainCompletionLatch.Count == 0)
{
// All blocks fetched, so we don't need this anymore.
_chainCompletionLatch = null;
}
}
}
else
{
// This block is unconnected - we don't know how to get from it back to the genesis block yet. That
// must mean that there are blocks we are missing, so do another getblocks with a new block locator
// to ask the peer to send them to us. This can happen during the initial block chain download where
// the peer will only send us 500 at a time and then sends us the head block expecting us to request
// the others.
// TODO: Should actually request root of orphan chain here.
BlockChainDownload(m.Hash);
}
}
catch (VerificationException e)
{
// We don't want verification failures to kill the thread.
_log.Warn("block verification failed", e);
}
catch (ScriptException e)
{
// We don't want script failures to kill the thread.
_log.Warn("script exception", e);
}
}
/// <exception cref="System.IO.IOException" />
private void ProcessInv(InventoryMessage inv)
{
Debug.Assert(Thread.CurrentThread == _thread);
// The peer told us about some blocks or transactions they have. For now we only care about blocks.
// Note that as we don't actually want to store the entire block chain or even the headers of the block
// chain, we may end up requesting blocks we already requested before. This shouldn't (in theory) happen
// enough to be a problem.
var topBlock = _blockChain.UnconnectedBlock;
var topHash = (topBlock != null ? topBlock.Hash : null);
var items = inv.Items;
if (items.Count == 1 && items[0].Type == InventoryItem.ItemType.Block && topHash != null &&
items[0].Hash.SequenceEqual(topHash))
{
// An inv with a single hash containing our most recent unconnected block is a special inv,
// it's kind of like a tickle from the peer telling us that it's time to download more blocks to catch up to
// the block chain. We could just ignore this and treat it as a regular inv but then we'd download the head
// block over and over again after each batch of 500 blocks, which is wasteful.
BlockChainDownload(topHash);
return;
}
var getdata = new GetDataMessage(_params);
var dirty = false;
foreach (var item in items)
{
if (item.Type != InventoryItem.ItemType.Block) continue;
getdata.AddItem(item);
dirty = true;
}
// No blocks to download. This probably contained transactions instead, but right now we can't prove they are
// valid so we don't bother downloading transactions that aren't in blocks yet.
if (!dirty)
return;
// This will cause us to receive a bunch of block messages.
_conn.WriteMessage(getdata);
}
/// <summary>
/// Asks the connected peer for the block of the given hash, and returns a Future representing the answer.
/// If you want the block right away and don't mind waiting for it, just call .get() on the result. Your thread
/// will block until the peer answers. You can also use the Future object to wait with a timeout, or just check
/// whether it's done later.
/// </summary>
/// <param name="blockHash">Hash of the block you were requesting.</param>
/// <exception cref="System.IO.IOException" />
public GetDataFuture<Block> GetBlock(byte[] blockHash)
{
var getdata = new InventoryMessage(_params);
var inventoryItem = new InventoryItem(InventoryItem.ItemType.Block, blockHash);
getdata.AddItem(inventoryItem);
var future = new GetDataFuture<Block>(this, inventoryItem);
// Add to the list of things we're waiting for. It's important this come before the network send to avoid
// race conditions.
lock (_pendingGetBlockFutures)
{
_pendingGetBlockFutures.Add(future);
}
_conn.WriteMessage(getdata);
return future;
}
// A GetDataFuture wraps the result of a getblock or (in future) getTransaction so the owner of the object can
// decide whether to wait forever, wait for a short while or check later after doing other work.
public class GetDataFuture<T>
{
private readonly Peer _enclosing;
private bool _cancelled;
private readonly InventoryItem _item;
private readonly CountDownLatch _latch;
private T _result;
internal GetDataFuture(Peer enclosing, InventoryItem item)
{
_enclosing = enclosing;
_item = item;
_latch = new CountDownLatch(1);
}
public bool Cancel()
{
// Cannot cancel a getdata - once sent, it's sent.
_cancelled = true;
return false;
}
public bool IsCancelled
{
get { return _cancelled; }
}
public bool IsDone
{
get { return !Equals(_result, default(T)) || _cancelled; }
}
public T Get()
{
_latch.Await();
Debug.Assert(!Equals(_result, default(T)));
return _result;
}
/// <exception cref="System.TimeoutException" />
public T Get(TimeSpan timeout)
{
if (!_latch.Await(timeout))
throw new TimeoutException();
Debug.Assert(!Equals(_result, default(T)));
return _result;
}
internal InventoryItem Item
{
get { return _item; }
}
/// <summary>
/// Called by the Peer when the result has arrived. Completes the task.
/// </summary>
internal void SetResult(T result)
{
Debug.Assert(Thread.CurrentThread == _enclosing._thread); // Called from peer thread.
_result = result;
// Now release the thread that is waiting. We don't need to synchronize here as the latch establishes
// a memory barrier.
_latch.CountDown();
}
}
/// <summary>
/// Send the given Transaction, ie, make a payment with BitCoins. To create a transaction you can broadcast, use
/// a <see cref="Wallet">Wallet</see>. After the broadcast completes, confirm the send using the wallet confirmSend() method.
/// </summary>
/// <exception cref="System.IO.IOException" />
internal void BroadcastTransaction(Transaction tx)
{
_conn.WriteMessage(tx);
}
/// <exception cref="System.IO.IOException" />
private void BlockChainDownload(byte[] toHash)
{
// This may run in ANY thread.
// The block chain download process is a bit complicated. Basically, we start with zero or more blocks in a
// chain that we have from a previous session. We want to catch up to the head of the chain BUT we don't know
// where that chain is up to or even if the top block we have is even still in the chain - we
// might have got ourselves onto a fork that was later resolved by the network.
//
// To solve this, we send the peer a block locator which is just a list of block hashes. It contains the
// blocks we know about, but not all of them, just enough of them so the peer can figure out if we did end up
// on a fork and if so, what the earliest still valid block we know about is likely to be.
//
// Once it has decided which blocks we need, it will send us an inv with up to 500 block messages. We may
// have some of them already if we already have a block chain and just need to catch up. Once we request the
// last block, if there are still more to come it sends us an "inv" containing only the hash of the head
// block.
//
// That causes us to download the head block but then we find (in processBlock) that we can't connect
// it to the chain yet because we don't have the intermediate blocks. So we rerun this function building a
// new block locator describing where we're up to.
//
// The getblocks with the new locator gets us another inv with another bunch of blocks. We download them once
// again. This time when the peer sends us an inv with the head block, we already have it so we won't download
// it again - but we recognize this case as special and call back into blockChainDownload to continue the
// process.
//
// So this is a complicated process but it has the advantage that we can download a chain of enormous length
// in a relatively stateless manner and with constant/bounded memory usage.
_log.InfoFormat("blockChainDownload({0})", Utils.BytesToHexString(toHash));
// TODO: Block locators should be abstracted out rather than special cased here.
var blockLocator = new LinkedList<byte[]>();
// We don't do the exponential thinning here, so if we get onto a fork of the chain we will end up
// re-downloading the whole thing again.
blockLocator.AddLast(_params.GenesisBlock.Hash);
var topBlock = _blockChain.ChainHead.Header;
if (!topBlock.Equals(_params.GenesisBlock))
blockLocator.AddFirst(topBlock.Hash);
var message = new GetBlocksMessage(_params, blockLocator.ToList(), toHash);
_conn.WriteMessage(message);
}
/// <summary>
/// Starts an asynchronous download of the block chain. The chain download is deemed to be complete once we've
/// downloaded the same number of blocks that the peer advertised having in its version handshake message.
/// </summary>
/// <returns>
/// A <see cref="BitCoinSharp.Threading.CountDownLatch">BitCoinSharp.Threading.CountDownLatch</see> that can be used to track progress and wait for completion.
/// </returns>
/// <exception cref="System.IO.IOException" />
public CountDownLatch StartBlockChainDownload()
{
// Chain will overflow signed int blocks in ~41,000 years.
var chainHeight = _conn.VersionMessage.BestHeight;
if (chainHeight == 0)
{
// This should not happen because we shouldn't have given the user a Peer that is to another client-mode
// node. If that happens it means the user overrode us somewhere.
throw new Exception("Peer does not have block chain");
}
var blocksToGet = (int) (chainHeight - _blockChain.ChainHead.Height);
_chainCompletionLatch = new CountDownLatch(blocksToGet);
if (blocksToGet > 0)
{
// When we just want as many blocks as possible, we can set the target hash to zero.
BlockChainDownload(new byte[32]);
}
return _chainCompletionLatch;
}
/// <summary>
/// Terminates the network connection and stops the background thread.
/// </summary>
public void Disconnect()
{
lock (this)
{
_running = false;
}
try
{
// This will cause the background thread to die, but it's really ugly. We must do a better job of this.
_conn.Shutdown();
}
catch (IOException)
{
// Don't care about this.
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Reflection;
using Xunit;
namespace System.Linq.Expressions.Tests
{
public class UnaryIncrementNullableTests : IncrementDecrementTests
{
public struct IncrementableWhenNullable
{
public IncrementableWhenNullable(int value)
{
Value = value;
}
public int Value { get; }
public static IncrementableWhenNullable? operator ++(IncrementableWhenNullable? operand)
{
if (operand.HasValue)
{
int dec = unchecked(operand.GetValueOrDefault().Value + 1);
if (dec == 0)
{
return null;
}
return new IncrementableWhenNullable(dec);
}
return new IncrementableWhenNullable(1);
}
}
private static IEnumerable<object[]> IncrementableWhenNullableValues()
{
yield return new object[] { new IncrementableWhenNullable(0), new IncrementableWhenNullable(1) };
yield return new object[] { new IncrementableWhenNullable(-1), null };
yield return new object[] { new IncrementableWhenNullable(int.MinValue), new IncrementableWhenNullable(int.MinValue + 1) };
yield return new object[] { new IncrementableWhenNullable(int.MaxValue), new IncrementableWhenNullable(int.MinValue) };
yield return new object[] { null, new IncrementableWhenNullable(1) };
}
#region Test methods
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckUnaryIncrementNullableShortTest(bool useInterpreter)
{
short?[] values = new short?[] { null, 0, 1, -1, short.MinValue, short.MaxValue };
for (int i = 0; i < values.Length; i++)
{
VerifyIncrementNullableShort(values[i], useInterpreter);
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckUnaryIncrementNullableUShortTest(bool useInterpreter)
{
ushort?[] values = new ushort?[] { null, 0, 1, ushort.MaxValue };
for (int i = 0; i < values.Length; i++)
{
VerifyIncrementNullableUShort(values[i], useInterpreter);
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckUnaryIncrementNullableIntTest(bool useInterpreter)
{
int?[] values = new int?[] { null, 0, 1, -1, int.MinValue, int.MaxValue };
for (int i = 0; i < values.Length; i++)
{
VerifyIncrementNullableInt(values[i], useInterpreter);
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckUnaryIncrementNullableUIntTest(bool useInterpreter)
{
uint?[] values = new uint?[] { null, 0, 1, uint.MaxValue };
for (int i = 0; i < values.Length; i++)
{
VerifyIncrementNullableUInt(values[i], useInterpreter);
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckUnaryIncrementNullableLongTest(bool useInterpreter)
{
long?[] values = new long?[] { null, 0, 1, -1, long.MinValue, long.MaxValue };
for (int i = 0; i < values.Length; i++)
{
VerifyIncrementNullableLong(values[i], useInterpreter);
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckUnaryIncrementNullableULongTest(bool useInterpreter)
{
ulong?[] values = new ulong?[] { null, 0, 1, ulong.MaxValue };
for (int i = 0; i < values.Length; i++)
{
VerifyIncrementNullableULong(values[i], useInterpreter);
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckIncrementFloatTest(bool useInterpreter)
{
float?[] values = new float?[] { null, 0, 1, -1, float.MinValue, float.MaxValue, float.Epsilon, float.NegativeInfinity, float.PositiveInfinity, float.NaN };
for (int i = 0; i < values.Length; i++)
{
VerifyIncrementNullableFloat(values[i], useInterpreter);
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckIncrementDoubleTest(bool useInterpreter)
{
double?[] values = new double?[] { null, 0, 1, -1, double.MinValue, double.MaxValue, double.Epsilon, double.NegativeInfinity, double.PositiveInfinity, double.NaN };
for (int i = 0; i < values.Length; i++)
{
VerifyIncrementNullableDouble(values[i], useInterpreter);
}
}
#endregion
[Theory, MemberData(nameof(NonArithmeticObjects), false)]
public static void DecrementNonArithmetic(object value)
{
Expression ex = Expression.Constant(value, typeof(Nullable<>).MakeGenericType(value.GetType()));
Assert.Throws<InvalidOperationException>(() => Expression.Decrement(ex));
}
[Theory, PerCompilationType(nameof(IncrementableValues), true)]
public static void CustomOpIncrement(Incrementable? operand, Incrementable? expected, bool useInterpreter)
{
Func<Incrementable?> func = Expression.Lambda<Func<Incrementable?>>(
Expression.Increment(Expression.Constant(operand, typeof(Incrementable?)))).Compile(useInterpreter);
Assert.Equal(expected, func());
}
[Theory, PerCompilationType(nameof(IncrementableWhenNullableValues))]
public static void NonLiftedNullableOpIncrement(
IncrementableWhenNullable? operand, IncrementableWhenNullable? expected, bool useInterpreter)
{
Func<IncrementableWhenNullable?> func = Expression.Lambda<Func<IncrementableWhenNullable?>>(
Expression.Increment(Expression.Constant(operand, typeof(IncrementableWhenNullable?)))).Compile(useInterpreter);
Assert.Equal(expected, func());
}
[Theory, PerCompilationType(nameof(DoublyIncrementedIncrementableValues), true)]
public static void UserDefinedOpIncrement(Incrementable? operand, Incrementable? expected, bool useInterpreter)
{
MethodInfo method = typeof(IncrementDecrementTests).GetMethod(nameof(DoublyIncrement));
Func<Incrementable?> func = Expression.Lambda<Func<Incrementable?>>(
Expression.Increment(Expression.Constant(operand, typeof(Incrementable?)), method)).Compile(useInterpreter);
Assert.Equal(expected, func());
}
[Theory, PerCompilationType(nameof(DoublyIncrementedInt32s), true)]
public static void UserDefinedOpIncrementArithmeticType(int? operand, int? expected, bool useInterpreter)
{
MethodInfo method = typeof(IncrementDecrementTests).GetMethod(nameof(DoublyIncrementInt32));
Func<int?> func = Expression.Lambda<Func<int?>>(
Expression.Increment(Expression.Constant(operand, typeof(int?)), method)).Compile(useInterpreter);
Assert.Equal(expected, func());
}
#region Test verifiers
private static void VerifyIncrementNullableShort(short? value, bool useInterpreter)
{
Expression<Func<short?>> e =
Expression.Lambda<Func<short?>>(
Expression.Increment(Expression.Constant(value, typeof(short?))),
Enumerable.Empty<ParameterExpression>());
Func<short?> f = e.Compile(useInterpreter);
Assert.Equal(unchecked((short?)(++value)), f());
}
private static void VerifyIncrementNullableUShort(ushort? value, bool useInterpreter)
{
Expression<Func<ushort?>> e =
Expression.Lambda<Func<ushort?>>(
Expression.Increment(Expression.Constant(value, typeof(ushort?))),
Enumerable.Empty<ParameterExpression>());
Func<ushort?> f = e.Compile(useInterpreter);
Assert.Equal(unchecked((ushort?)(++value)), f());
}
private static void VerifyIncrementNullableInt(int? value, bool useInterpreter)
{
Expression<Func<int?>> e =
Expression.Lambda<Func<int?>>(
Expression.Increment(Expression.Constant(value, typeof(int?))),
Enumerable.Empty<ParameterExpression>());
Func<int?> f = e.Compile(useInterpreter);
Assert.Equal(unchecked((int?)(++value)), f());
}
private static void VerifyIncrementNullableUInt(uint? value, bool useInterpreter)
{
Expression<Func<uint?>> e =
Expression.Lambda<Func<uint?>>(
Expression.Increment(Expression.Constant(value, typeof(uint?))),
Enumerable.Empty<ParameterExpression>());
Func<uint?> f = e.Compile(useInterpreter);
Assert.Equal(unchecked((uint?)(++value)), f());
}
private static void VerifyIncrementNullableLong(long? value, bool useInterpreter)
{
Expression<Func<long?>> e =
Expression.Lambda<Func<long?>>(
Expression.Increment(Expression.Constant(value, typeof(long?))),
Enumerable.Empty<ParameterExpression>());
Func<long?> f = e.Compile(useInterpreter);
Assert.Equal(unchecked((long?)(++value)), f());
}
private static void VerifyIncrementNullableULong(ulong? value, bool useInterpreter)
{
Expression<Func<ulong?>> e =
Expression.Lambda<Func<ulong?>>(
Expression.Increment(Expression.Constant(value, typeof(ulong?))),
Enumerable.Empty<ParameterExpression>());
Func<ulong?> f = e.Compile(useInterpreter);
Assert.Equal(unchecked((ulong?)(++value)), f());
}
private static void VerifyIncrementNullableFloat(float? value, bool useInterpreter)
{
Expression<Func<float?>> e =
Expression.Lambda<Func<float?>>(
Expression.Increment(Expression.Constant(value, typeof(float?))),
Enumerable.Empty<ParameterExpression>());
Func<float?> f = e.Compile(useInterpreter);
Assert.Equal((float?)(++value), f());
}
private static void VerifyIncrementNullableDouble(double? value, bool useInterpreter)
{
Expression<Func<double?>> e =
Expression.Lambda<Func<double?>>(
Expression.Increment(Expression.Constant(value, typeof(double?))),
Enumerable.Empty<ParameterExpression>());
Func<double?> f = e.Compile(useInterpreter);
Assert.Equal((double?)(++value), f());
}
#endregion
}
}
| |
// Copyright (c) The Avalonia Project. All rights reserved.
// Licensed under the MIT license. See licence.md file in the project root for full license information.
using System;
using System.Reactive;
using System.Reactive.Disposables;
using System.Reactive.Linq;
using System.Reactive.Subjects;
using Avalonia.Data;
using Avalonia.Reactive;
namespace Avalonia
{
/// <summary>
/// Provides extension methods for <see cref="AvaloniaObject"/> and related classes.
/// </summary>
public static class AvaloniaObjectExtensions
{
/// <summary>
/// Converts an <see cref="IObservable{T}"/> to an <see cref="IBinding"/>.
/// </summary>
/// <typeparam name="T">The type produced by the observable.</typeparam>
/// <param name="source">The observable</param>
/// <returns>An <see cref="IBinding"/>.</returns>
public static IBinding ToBinding<T>(this IObservable<T> source)
{
return new BindingAdaptor(source.Select(x => (object)x));
}
/// <summary>
/// Gets an observable for a <see cref="AvaloniaProperty"/>.
/// </summary>
/// <param name="o">The object.</param>
/// <param name="property">The property.</param>
/// <returns>
/// An observable which fires immediately with the current value of the property on the
/// object and subsequently each time the property value changes.
/// </returns>
/// <remarks>
/// The subscription to <paramref name="o"/> is created using a weak reference.
/// </remarks>
public static IObservable<object> GetObservable(this IAvaloniaObject o, AvaloniaProperty property)
{
Contract.Requires<ArgumentNullException>(o != null);
Contract.Requires<ArgumentNullException>(property != null);
return new AvaloniaPropertyObservable<object>(o, property);
}
/// <summary>
/// Gets an observable for a <see cref="AvaloniaProperty"/>.
/// </summary>
/// <param name="o">The object.</param>
/// <typeparam name="T">The property type.</typeparam>
/// <param name="property">The property.</param>
/// <returns>
/// An observable which fires immediately with the current value of the property on the
/// object and subsequently each time the property value changes.
/// </returns>
/// <remarks>
/// The subscription to <paramref name="o"/> is created using a weak reference.
/// </remarks>
public static IObservable<T> GetObservable<T>(this IAvaloniaObject o, AvaloniaProperty<T> property)
{
Contract.Requires<ArgumentNullException>(o != null);
Contract.Requires<ArgumentNullException>(property != null);
return new AvaloniaPropertyObservable<T>(o, property);
}
/// <summary>
/// Gets an observable that listens for property changed events for an
/// <see cref="AvaloniaProperty"/>.
/// </summary>
/// <param name="o">The object.</param>
/// <param name="property">The property.</param>
/// <returns>
/// An observable which when subscribed pushes the property changed event args
/// each time a <see cref="IAvaloniaObject.PropertyChanged"/> event is raised
/// for the specified property.
/// </returns>
public static IObservable<AvaloniaPropertyChangedEventArgs> GetPropertyChangedObservable(
this IAvaloniaObject o,
AvaloniaProperty property)
{
Contract.Requires<ArgumentNullException>(o != null);
Contract.Requires<ArgumentNullException>(property != null);
return new AvaloniaPropertyChangedObservable(o, property);
}
/// <summary>
/// Gets a subject for a <see cref="AvaloniaProperty"/>.
/// </summary>
/// <param name="o">The object.</param>
/// <param name="property">The property.</param>
/// <param name="priority">
/// The priority with which binding values are written to the object.
/// </param>
/// <returns>
/// An <see cref="ISubject{Object}"/> which can be used for two-way binding to/from the
/// property.
/// </returns>
public static ISubject<object> GetSubject(
this IAvaloniaObject o,
AvaloniaProperty property,
BindingPriority priority = BindingPriority.LocalValue)
{
return Subject.Create<object>(
Observer.Create<object>(x => o.SetValue(property, x, priority)),
o.GetObservable(property));
}
/// <summary>
/// Gets a subject for a <see cref="AvaloniaProperty"/>.
/// </summary>
/// <typeparam name="T">The property type.</typeparam>
/// <param name="o">The object.</param>
/// <param name="property">The property.</param>
/// <param name="priority">
/// The priority with which binding values are written to the object.
/// </param>
/// <returns>
/// An <see cref="ISubject{T}"/> which can be used for two-way binding to/from the
/// property.
/// </returns>
public static ISubject<T> GetSubject<T>(
this IAvaloniaObject o,
AvaloniaProperty<T> property,
BindingPriority priority = BindingPriority.LocalValue)
{
return Subject.Create<T>(
Observer.Create<T>(x => o.SetValue(property, x, priority)),
o.GetObservable(property));
}
/// <summary>
/// Binds a property on an <see cref="IAvaloniaObject"/> to an <see cref="IBinding"/>.
/// </summary>
/// <param name="target">The object.</param>
/// <param name="property">The property to bind.</param>
/// <param name="binding">The binding.</param>
/// <param name="anchor">
/// An optional anchor from which to locate required context. When binding to objects that
/// are not in the logical tree, certain types of binding need an anchor into the tree in
/// order to locate named controls or resources. The <paramref name="anchor"/> parameter
/// can be used to provice this context.
/// </param>
/// <returns>An <see cref="IDisposable"/> which can be used to cancel the binding.</returns>
public static IDisposable Bind(
this IAvaloniaObject target,
AvaloniaProperty property,
IBinding binding,
object anchor = null)
{
Contract.Requires<ArgumentNullException>(target != null);
Contract.Requires<ArgumentNullException>(property != null);
Contract.Requires<ArgumentNullException>(binding != null);
var metadata = property.GetMetadata(target.GetType()) as IDirectPropertyMetadata;
var result = binding.Initiate(
target,
property,
anchor,
metadata?.EnableDataValidation ?? false);
if (result != null)
{
return BindingOperations.Apply(target, property, result, anchor);
}
else
{
return Disposable.Empty;
}
}
/// <summary>
/// Subscribes to a property changed notifications for changes that originate from a
/// <typeparamref name="TTarget"/>.
/// </summary>
/// <typeparam name="TTarget">The type of the property change sender.</typeparam>
/// <param name="observable">The property changed observable.</param>
/// <param name="action">
/// The method to call. The parameters are the sender and the event args.
/// </param>
/// <returns>A disposable that can be used to terminate the subscription.</returns>
public static IDisposable AddClassHandler<TTarget>(
this IObservable<AvaloniaPropertyChangedEventArgs> observable,
Action<TTarget, AvaloniaPropertyChangedEventArgs> action)
where TTarget : AvaloniaObject
{
return observable.Subscribe(e =>
{
if (e.Sender is TTarget)
{
action((TTarget)e.Sender, e);
}
});
}
/// <summary>
/// Subscribes to a property changed notifications for changes that originate from a
/// <typeparamref name="TTarget"/>.
/// </summary>
/// <typeparam name="TTarget">The type of the property change sender.</typeparam>
/// <param name="observable">The property changed observable.</param>
/// <param name="handler">Given a TTarget, returns the handler.</param>
/// <returns>A disposable that can be used to terminate the subscription.</returns>
public static IDisposable AddClassHandler<TTarget>(
this IObservable<AvaloniaPropertyChangedEventArgs> observable,
Func<TTarget, Action<AvaloniaPropertyChangedEventArgs>> handler)
where TTarget : class
{
return observable.Subscribe(e => SubscribeAdapter(e, handler));
}
/// <summary>
/// Gets a description of a property that van be used in observables.
/// </summary>
/// <param name="o">The object.</param>
/// <param name="property">The property</param>
/// <returns>The description.</returns>
private static string GetDescription(IAvaloniaObject o, AvaloniaProperty property)
{
return $"{o.GetType().Name}.{property.Name}";
}
/// <summary>
/// Observer method for <see cref="AddClassHandler{TTarget}(IObservable{AvaloniaPropertyChangedEventArgs},
/// Func{TTarget, Action{AvaloniaPropertyChangedEventArgs}})"/>.
/// </summary>
/// <typeparam name="TTarget">The sender type to accept.</typeparam>
/// <param name="e">The event args.</param>
/// <param name="handler">Given a TTarget, returns the handler.</param>
private static void SubscribeAdapter<TTarget>(
AvaloniaPropertyChangedEventArgs e,
Func<TTarget, Action<AvaloniaPropertyChangedEventArgs>> handler)
where TTarget : class
{
var target = e.Sender as TTarget;
if (target != null)
{
handler(target)(e);
}
}
private class BindingAdaptor : IBinding
{
private IObservable<object> _source;
public BindingAdaptor(IObservable<object> source)
{
this._source = source;
}
public InstancedBinding Initiate(
IAvaloniaObject target,
AvaloniaProperty targetProperty,
object anchor = null,
bool enableDataValidation = false)
{
return InstancedBinding.OneWay(_source);
}
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using StructureMap.Configuration.DSL;
using StructureMap.Construction;
using StructureMap.Diagnostics;
using StructureMap.Exceptions;
using StructureMap.Graph;
using StructureMap.Interceptors;
using StructureMap.Pipeline;
using StructureMap.Query;
using StructureMap.TypeRules;
namespace StructureMap
{
public class Container : IContainer
{
private InterceptorLibrary _interceptorLibrary;
private PipelineGraph _pipelineGraph;
private PluginGraph _pluginGraph;
public Container(Action<ConfigurationExpression> action)
{
var expression = new ConfigurationExpression();
action(expression);
// As explained later in the article,
// PluginGraph is part of the Semantic Model
// of StructureMap
PluginGraph graph = expression.BuildGraph();
// Take the PluginGraph object graph and
// dynamically emit classes to build the
// configured objects
construct(graph);
}
public Container(Registry registry)
: this(registry.Build())
{
}
public Container()
: this(new PluginGraph())
{
}
/// <summary>
/// Constructor to create an Container
/// </summary>
/// <param name="pluginGraph">PluginGraph containing the instance and type definitions
/// for the Container</param>
public Container(PluginGraph pluginGraph)
{
construct(pluginGraph);
}
protected MissingFactoryFunction onMissingFactory { set { _pipelineGraph.OnMissingFactory = value; } }
public PluginGraph PluginGraph { get { return _pluginGraph; } }
#region IContainer Members
/// <summary>
/// Provides queryable access to the configured PluginType's and Instances of this Container
/// </summary>
public IModel Model { get { return new Model(_pipelineGraph, this); } }
/// <summary>
/// Creates or finds the named instance of T
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="instanceKey"></param>
/// <returns></returns>
public T GetInstance<T>(string instanceKey)
{
return (T) GetInstance(typeof (T), instanceKey);
}
/// <summary>
/// Creates a new instance of the requested type T using the supplied Instance. Mostly used internally
/// </summary>
/// <param name="instance"></param>
/// <returns></returns>
public T GetInstance<T>(Instance instance)
{
return (T) GetInstance(typeof (T), instance);
}
/// <summary>
/// Gets the default instance of the pluginType using the explicitly configured arguments from the "args"
/// </summary>
/// <param name="args"></param>
/// <returns></returns>
public PLUGINTYPE GetInstance<PLUGINTYPE>(ExplicitArguments args)
{
return (PLUGINTYPE) GetInstance(typeof (PLUGINTYPE), args);
}
public T GetInstance<T>(ExplicitArguments args, string name)
{
Instance namedInstance = _pipelineGraph.ForType(typeof (T)).FindInstance(name);
return (T) buildInstanceWithArgs(typeof (T), namedInstance, args, name);
}
/// <summary>
/// Gets the default instance of the pluginType using the explicitly configured arguments from the "args"
/// </summary>
/// <param name="pluginType"></param>
/// <param name="args"></param>
/// <returns></returns>
public object GetInstance(Type pluginType, ExplicitArguments args)
{
Instance defaultInstance = _pipelineGraph.GetDefault(pluginType);
string requestedName = Plugin.DEFAULT;
return buildInstanceWithArgs(pluginType, defaultInstance, args, requestedName);
}
/// <summary>
/// Gets all configured instances of type T using explicitly configured arguments from the "args"
/// </summary>
/// <param name="type"></param>
/// <param name="args"></param>
/// <returns></returns>
public IList GetAllInstances(Type type, ExplicitArguments args)
{
BuildSession session = withNewSession(Plugin.DEFAULT);
args.RegisterDefaults(session);
Array instances = session.CreateInstanceArray(type, null);
return new ArrayList(instances);
}
public IList<T> GetAllInstances<T>(ExplicitArguments args)
{
BuildSession session = withNewSession(Plugin.DEFAULT);
args.RegisterDefaults(session);
return getListOfTypeWithSession<T>(session);
}
/// <summary>
/// Creates or finds the default instance of type T
/// </summary>
/// <typeparam name="T"></typeparam>
/// <returns></returns>
public T GetInstance<T>()
{
return (T) GetInstance(typeof (T));
}
[Obsolete("Please use GetInstance<T>() instead.")]
public T FillDependencies<T>()
{
return (T) FillDependencies(typeof (T));
}
/// <summary>
/// Creates or resolves all registered instances of type T
/// </summary>
/// <typeparam name="T"></typeparam>
/// <returns></returns>
public IList<T> GetAllInstances<T>()
{
BuildSession session = withNewSession(Plugin.DEFAULT);
return getListOfTypeWithSession<T>(session);
}
/// <summary>
/// Sets the default instance for all PluginType's to the designated Profile.
/// </summary>
/// <param name="profile"></param>
public void SetDefaultsToProfile(string profile)
{
_pipelineGraph.CurrentProfile = profile;
}
/// <summary>
/// Creates or finds the named instance of the pluginType
/// </summary>
/// <param name="pluginType"></param>
/// <param name="instanceKey"></param>
/// <returns></returns>
public object GetInstance(Type pluginType, string instanceKey)
{
return withNewSession(instanceKey).CreateInstance(pluginType, instanceKey);
}
/// <summary>
/// Creates or finds the named instance of the pluginType. Returns null if the named instance is not known to the container.
/// </summary>
/// <param name="pluginType"></param>
/// <param name="instanceKey"></param>
/// <returns></returns>
public object TryGetInstance(Type pluginType, string instanceKey)
{
return !_pipelineGraph.HasInstance(pluginType, instanceKey)
? null
: GetInstance(pluginType, instanceKey);
}
/// <summary>
/// Creates or finds the default instance of the pluginType. Returns null if the pluginType is not known to the container.
/// </summary>
/// <param name="pluginType"></param>
/// <returns></returns>
public object TryGetInstance(Type pluginType)
{
return !_pipelineGraph.HasDefaultForPluginType(pluginType)
? null
: GetInstance(pluginType);
}
/// <summary>
/// Creates or finds the default instance of type T. Returns the default value of T if it is not known to the container.
/// </summary>
/// <typeparam name="T"></typeparam>
/// <returns></returns>
public T TryGetInstance<T>()
{
return (T) (TryGetInstance(typeof (T)) ?? default(T));
}
/// <summary>
/// The "BuildUp" method takes in an already constructed object
/// and uses Setter Injection to push in configured dependencies
/// of that object
/// </summary>
/// <param name="target"></param>
public void BuildUp(object target)
{
Type pluggedType = target.GetType();
IConfiguredInstance instance = _pipelineGraph.GetDefault(pluggedType) as IConfiguredInstance
?? new ConfiguredInstance(pluggedType);
IInstanceBuilder builder = PluginCache.FindBuilder(pluggedType);
var arguments = new Arguments(instance, withNewSession(Plugin.DEFAULT));
builder.BuildUp(arguments, target);
}
/// <summary>
/// Creates or finds the named instance of type T. Returns the default value of T if the named instance is not known to the container.
/// </summary>
/// <typeparam name="T"></typeparam>
/// <returns></returns>
public T TryGetInstance<T>(string instanceKey)
{
return (T) (TryGetInstance(typeof (T), instanceKey) ?? default(T));
}
/// <summary>
/// Creates or finds the default instance of the pluginType
/// </summary>
/// <param name="pluginType"></param>
/// <returns></returns>
public object GetInstance(Type pluginType)
{
return withNewSession(Plugin.DEFAULT).CreateInstance(pluginType);
}
/// <summary>
/// Creates a new instance of the requested type using the supplied Instance. Mostly used internally
/// </summary>
/// <param name="pluginType"></param>
/// <param name="instance"></param>
/// <returns></returns>
public object GetInstance(Type pluginType, Instance instance)
{
return withNewSession(instance.Name).CreateInstance(pluginType, instance);
}
public void SetDefault(Type pluginType, Instance instance)
{
_pipelineGraph.SetDefault(pluginType, instance);
}
[Obsolete("Please use GetInstance(Type) instead")]
public object FillDependencies(Type type)
{
if (!type.IsConcrete())
{
throw new StructureMapException(230, type.FullName);
}
var plugin = new Plugin(type);
if (!plugin.CanBeAutoFilled)
{
throw new StructureMapException(230, type.FullName);
}
return GetInstance(type);
}
/// <summary>
/// Creates or resolves all registered instances of the pluginType
/// </summary>
/// <param name="pluginType"></param>
/// <returns></returns>
public IList GetAllInstances(Type pluginType)
{
Array instances = withNewSession(Plugin.DEFAULT).CreateInstanceArray(pluginType, null);
return new ArrayList(instances);
}
/// <summary>
/// Used to add additional configuration to a Container *after* the initialization.
/// </summary>
/// <param name="configure"></param>
public void Configure(Action<ConfigurationExpression> configure)
{
lock (this)
{
var registry = new ConfigurationExpression();
configure(registry);
PluginGraph graph = registry.BuildGraph();
graph.Log.AssertFailures();
_interceptorLibrary.ImportFrom(graph.InterceptorLibrary);
_pipelineGraph.ImportFrom(graph);
}
}
/// <summary>
/// Returns a report detailing the complete configuration of all PluginTypes and Instances
/// </summary>
/// <returns></returns>
public string WhatDoIHave()
{
var writer = new WhatDoIHaveWriter(_pipelineGraph);
return writer.GetText();
}
/// <summary>
/// Starts a request for an instance or instances with explicitly configured arguments. Specifies that any dependency
/// of type T should be "arg"
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="arg"></param>
/// <returns></returns>
public ExplicitArgsExpression With<T>(T arg)
{
return new ExplicitArgsExpression(this).With(arg);
}
/// <summary>
/// Starts a request for an instance or instances with explicitly configured arguments. Specifies that any dependency
/// of type T should be "arg"
/// </summary>
/// <param name="pluginType"></param>
/// <param name="arg"></param>
/// <returns></returns>
public ExplicitArgsExpression With(Type pluginType, object arg)
{
return new ExplicitArgsExpression(this).With(pluginType, arg);
}
/// <summary>
/// Starts a request for an instance or instances with explicitly configured arguments. Specifies that any dependency or primitive argument
/// with the designated name should be the next value.
/// </summary>
/// <param name="argName"></param>
/// <returns></returns>
public IExplicitProperty With(string argName)
{
return new ExplicitArgsExpression(this).With(argName);
}
/// <summary>
/// Use with caution! Does a full environment test of the configuration of this container. Will try to create every configured
/// instance and afterward calls any methods marked with the [ValidationMethod] attribute
/// </summary>
public void AssertConfigurationIsValid()
{
var session = new ValidationBuildSession(_pipelineGraph, _interceptorLibrary);
session.PerformValidations();
if (!session.Success)
{
throw new StructureMapConfigurationException(session.BuildErrorMessages());
}
}
/// <summary>
/// Removes all configured instances of type T from the Container. Use with caution!
/// </summary>
/// <typeparam name="T"></typeparam>
public void EjectAllInstancesOf<T>()
{
_pipelineGraph.EjectAllInstancesOf<T>();
}
/// <summary>
/// Convenience method to request an object using an Open Generic
/// Type and its parameter Types
/// </summary>
/// <param name="templateType"></param>
/// <returns></returns>
/// <example>
/// IFlattener flattener1 = container.ForGenericType(typeof (IFlattener<>))
/// .WithParameters(typeof (Address)).GetInstanceAs<IFlattener>();
/// </example>
public OpenGenericTypeExpression ForGenericType(Type templateType)
{
return new OpenGenericTypeExpression(templateType, this);
}
/// <summary>
/// Shortcut syntax for using an object to find a service that handles
/// that type of object by using an open generic type
/// </summary>
/// <example>
/// IHandler handler = container.ForObject(shipment)
/// .GetClosedTypeOf(typeof (IHandler<>))
/// .As<IHandler>();
/// </example>
/// <param name="subject"></param>
/// <returns></returns>
public CloseGenericTypeExpression ForObject(object subject)
{
return new CloseGenericTypeExpression(subject, this);
}
/// <summary>
/// Starts a "Nested" Container for atomic, isolated access
/// </summary>
/// <returns></returns>
public IContainer GetNestedContainer()
{
var container = new Container
{
_interceptorLibrary = _interceptorLibrary,
_pipelineGraph = _pipelineGraph.ToNestedGraph(),
_onDispose = nestedDispose
};
// Fixes a mild bug. The child container should inject itself
container.Configure(x => x.For<IContainer>().Use(container));
return container;
}
/// <summary>
/// Starts a new "Nested" Container for atomic, isolated service location. Opens
/// </summary>
/// <param name="profileName"></param>
/// <returns></returns>
public IContainer GetNestedContainer(string profileName)
{
IContainer container = GetNestedContainer();
container.SetDefaultsToProfile(profileName);
return container;
}
private Action<Container> _onDispose = fullDispose;
public void Dispose()
{
_onDispose(this);
}
private static void fullDispose(Container c)
{
c.Model.AllInstances.Each(i => i.EjectObject());
nestedDispose(c);
}
private static void nestedDispose(Container c)
{
c._pipelineGraph.Dispose();
}
#endregion
/// <summary>
/// Injects the given object into a Container as the default for the designated
/// PLUGINTYPE. Mostly used for temporarily setting up return values of the Container
/// to introduce mocks or stubs during automated testing scenarios
/// </summary>
/// <typeparam name="PLUGINTYPE"></typeparam>
/// <param name="instance"></param>
public void Inject<PLUGINTYPE>(PLUGINTYPE instance)
{
Configure(x => x.For<PLUGINTYPE>().Use(instance));
}
public void Inject<PLUGINTYPE>(string name, PLUGINTYPE value)
{
Configure(x => x.For<PLUGINTYPE>().Use(value).Named(name));
}
/// <summary>
/// Injects the given object into a Container as the default for the designated
/// pluginType. Mostly used for temporarily setting up return values of the Container
/// to introduce mocks or stubs during automated testing scenarios
/// </summary>
/// <param name="pluginType"></param>
/// <param name="object"></param>
public void Inject(Type pluginType, object @object)
{
Configure(x => x.For(pluginType).Use(@object));
}
private object buildInstanceWithArgs(Type pluginType, Instance defaultInstance, ExplicitArguments args,
string requestedName)
{
if (defaultInstance == null && pluginType.IsConcrete())
{
defaultInstance = new ConfiguredInstance(pluginType);
}
var basicInstance = defaultInstance as ConstructorInstance;
Instance instance = basicInstance == null
? defaultInstance
: basicInstance.Override(args);
BuildSession session = withNewSession(requestedName);
args.RegisterDefaults(session);
return session.CreateInstance(pluginType, instance);
}
public ExplicitArgsExpression With(Action<ExplicitArgsExpression> action)
{
var expression = new ExplicitArgsExpression(this);
action(expression);
return expression;
}
private void construct(PluginGraph pluginGraph)
{
_interceptorLibrary = pluginGraph.InterceptorLibrary;
if (!pluginGraph.IsSealed)
{
pluginGraph.Seal();
}
_pluginGraph = pluginGraph;
var thisInstance = new ObjectInstance(this);
_pluginGraph.FindFamily(typeof (IContainer)).AddInstance(thisInstance);
_pluginGraph.ProfileManager.SetDefault(typeof (IContainer), thisInstance);
var funcInstance = new FactoryTemplate(typeof (LazyInstance<>));
_pluginGraph.FindFamily(typeof(Func<>)).AddInstance(funcInstance);
_pluginGraph.ProfileManager.SetDefault(typeof(Func<>), funcInstance);
pluginGraph.Log.AssertFailures();
_pipelineGraph = new PipelineGraph(pluginGraph);
}
[Obsolete("delegate to something cleaner in BuildSession")]
private IList<T> getListOfTypeWithSession<T>(BuildSession session)
{
var list = new List<T>();
foreach (T instance in session.CreateInstanceArray(typeof (T), null))
{
list.Add(instance);
}
return list;
}
private BuildSession withNewSession(string name)
{
return new BuildSession(_pipelineGraph, _interceptorLibrary)
{
RequestedName = name
};
}
/// <summary>
/// Sets the default instance for the PluginType
/// </summary>
/// <param name="pluginType"></param>
/// <param name="instance"></param>
public void Inject(Type pluginType, Instance instance)
{
_pipelineGraph.SetDefault(pluginType, instance);
}
#region Nested type: GetInstanceAsExpression
public interface GetInstanceAsExpression
{
T GetInstanceAs<T>();
}
#endregion
#region Nested type: OpenGenericTypeExpression
public class OpenGenericTypeExpression : GetInstanceAsExpression
{
private readonly Container _container;
private readonly Type _templateType;
private Type _pluginType;
public OpenGenericTypeExpression(Type templateType, Container container)
{
if (!templateType.IsOpenGeneric())
{
throw new StructureMapException(285);
}
_templateType = templateType;
_container = container;
}
#region GetInstanceAsExpression Members
public T GetInstanceAs<T>()
{
return (T) _container.GetInstance(_pluginType);
}
#endregion
public GetInstanceAsExpression WithParameters(params Type[] parameterTypes)
{
_pluginType = _templateType.MakeGenericType(parameterTypes);
return this;
}
}
#endregion
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using UnityEditor.Experimental.GraphView;
using UnityEngine;
using UnityEngine.UIElements;
using UnityEngine.Experimental.VFX;
using UnityEditor.VFX.UIElements;
namespace UnityEditor.VFX.UI
{
class VFXUniformOperatorEdit<T, U> : VisualElement, IControlledElement<T> where U : VFXOperatorDynamicOperand, IVFXOperatorUniform where T : VFXUniformOperatorController<U>
{
Label m_TypePopup;
public VFXUniformOperatorEdit()
{
this.AddStyleSheetPathWithSkinVariant("VFXControls");
AddToClassList("VFXUniformOperatorEdit");
m_TypePopup = new Label();
m_TypePopup.AddToClassList("PopupButton");
m_TypePopup.AddManipulator(new DownClickable(() => OnTypeMenu()));
Add(m_TypePopup);
}
void OnTypeMenu()
{
var op = controller.model;
GenericMenu menu = new GenericMenu();
var selectedType = op.GetOperandType();
foreach (var type in op.validTypes)
{
menu.AddItem(EditorGUIUtility.TrTextContent(type.UserFriendlyName()), selectedType == type, OnChangeType, type);
}
menu.DropDown(m_TypePopup.worldBound);
}
void OnChangeType(object type)
{
var op = controller.model;
op.SetOperandType((Type)type);
}
T m_Controller;
Controller IControlledElement.controller
{
get { return m_Controller; }
}
public T controller
{
get { return m_Controller; }
set
{
if (m_Controller != value)
{
if (m_Controller != null)
{
m_Controller.UnregisterHandler(this);
}
m_Controller = value;
if (m_Controller != null)
{
m_Controller.RegisterHandler(this);
}
}
}
}
void IControlledElement.OnControllerChanged(ref ControllerChangedEvent e)
{
if (e.controller == controller)
{
m_TypePopup.text = controller.model.GetOperandType().UserFriendlyName();
}
}
}
class VFXMultiOperatorEdit<T, U> : VFXReorderableList, IControlledElement<T> where U : VFXOperatorNumeric, IVFXOperatorNumericUnified where T : VFXUnifiedOperatorControllerBase<U>
{
T m_Controller;
Controller IControlledElement.controller
{
get { return m_Controller; }
}
public T controller
{
get { return m_Controller; }
set
{
if (m_Controller != value)
{
if (m_Controller != null)
{
m_Controller.UnregisterHandler(this);
}
m_Controller = value;
if (m_Controller != null)
{
m_Controller.RegisterHandler(this);
}
}
}
}
public VFXMultiOperatorEdit()
{
}
int m_CurrentIndex = -1;
void OnTypeMenu(Label button, int index)
{
var op = controller.model;
GenericMenu menu = new GenericMenu();
var selectedType = op.GetOperandType(index);
IVFXOperatorNumericUnifiedConstrained constraintInterface = op as IVFXOperatorNumericUnifiedConstrained;
if (constraintInterface != null && constraintInterface.slotIndicesThatCanBeScalar.Contains(index))
{
VFXSlot otherSlotWithConstraint = op.inputSlots.Where((t, i) => constraintInterface.slotIndicesThatMustHaveSameType.Contains(i) && !constraintInterface.slotIndicesThatCanBeScalar.Contains(i)).FirstOrDefault();
foreach (var type in op.validTypes)
{
if (otherSlotWithConstraint == null || otherSlotWithConstraint.property.type == type || VFXUnifiedConstraintOperatorController.GetMatchingScalar(otherSlotWithConstraint.property.type) == type)
menu.AddItem(EditorGUIUtility.TrTextContent(type.UserFriendlyName()), selectedType == type, OnChangeType, type);
}
}
else
{
foreach (var type in op.validTypes)
{
menu.AddItem(EditorGUIUtility.TrTextContent(type.UserFriendlyName()), selectedType == type, OnChangeType, type);
}
}
m_CurrentIndex = index;
menu.DropDown(button.worldBound);
}
void OnChangeType(object type)
{
var op = controller.model;
op.SetOperandType(m_CurrentIndex, (Type)type);
IVFXOperatorNumericUnifiedConstrained constraintInterface = op as IVFXOperatorNumericUnifiedConstrained;
if (constraintInterface != null)
{
if (!constraintInterface.slotIndicesThatCanBeScalar.Contains(m_CurrentIndex))
{
foreach (var index in constraintInterface.slotIndicesThatMustHaveSameType)
{
if (index != m_CurrentIndex && (!constraintInterface.slotIndicesThatCanBeScalar.Contains(index) || VFXUnifiedConstraintOperatorController.GetMatchingScalar((Type)type) != op.GetOperandType(index)))
{
op.SetOperandType(index, (Type)type);
}
}
}
}
}
void IControlledElement.OnControllerChanged(ref ControllerChangedEvent e)
{
if (e.controller == controller)
{
SelfChange();
}
}
protected bool m_SelfChanging;
void SelfChange()
{
m_SelfChanging = true;
var op = controller.model;
int count = op.operandCount;
while (itemCount < count)
{
OperandInfoBase item = CreateOperandInfo(itemCount);
item.Set(op);
AddItem(item);
}
while (itemCount > count)
{
RemoveItemAt(itemCount - 1);
}
for (int i = 0; i < count; ++i)
{
OperandInfoBase operand = ItemAt(i) as OperandInfoBase;
operand.index = i; // The operand might have been changed by the drag
operand.Set(op);
}
m_SelfChanging = false;
}
protected virtual OperandInfoBase CreateOperandInfo(int index)
{
return new OperandInfoBase(this, controller.model, index);
}
protected class OperandInfoBase : VisualElement
{
Label type;
public VFXMultiOperatorEdit<T, U> m_Owner;
public int index;
public OperandInfoBase(VFXMultiOperatorEdit<T, U> owner, U op, int index)
{
this.AddStyleSheetPathWithSkinVariant("VFXControls");
m_Owner = owner;
type = new Label();
this.index = index;
type.AddToClassList("PopupButton");
type.AddManipulator(new DownClickable(OnTypeMenu));
Add(type);
}
void OnTypeMenu()
{
m_Owner.OnTypeMenu(type, index);
}
public virtual void Set(U op)
{
type.text = op.GetOperandType(index).UserFriendlyName();
}
}
}
class VFXUnifiedOperatorEdit : VFXMultiOperatorEdit<VFXUnifiedOperatorController, VFXOperatorNumericUnified>
{
public VFXUnifiedOperatorEdit()
{
toolbar = false;
reorderable = false;
}
protected override OperandInfoBase CreateOperandInfo(int index)
{
return new OperandInfo(this, controller.model, index);
}
class OperandInfo : OperandInfoBase
{
Label label;
public OperandInfo(VFXUnifiedOperatorEdit owner, VFXOperatorNumericUnified op, int index) : base(owner, op, index)
{
label = new Label();
Insert(0, label);
}
public override void Set(VFXOperatorNumericUnified op)
{
base.Set(op);
label.text = op.GetInputSlot(index).name;
}
}
}
class VFXCascadedOperatorEdit : VFXMultiOperatorEdit<VFXCascadedOperatorController, VFXOperatorNumericCascadedUnified>
{
protected override void ElementMoved(int movedIndex, int targetIndex)
{
base.ElementMoved(movedIndex, targetIndex);
controller.model.OperandMoved(movedIndex, targetIndex);
}
public override void OnAdd()
{
controller.model.AddOperand();
}
public override bool CanRemove()
{
return controller.CanRemove();
}
public override void OnRemove(int index)
{
controller.RemoveOperand(index);
}
void OnChangeLabel(string value, int index)
{
if (!m_SelfChanging)
{
var op = controller.model;
if (value != op.GetOperandName(index)) // test mandatory because TextField might send ChangeEvent anytime
op.SetOperandName(index, value);
}
}
protected override OperandInfoBase CreateOperandInfo(int index)
{
return new OperandInfo(this, controller.model, index);
}
class OperandInfo : OperandInfoBase
{
TextField field;
public OperandInfo(VFXCascadedOperatorEdit owner, VFXOperatorNumericCascadedUnified op, int index) : base(owner, op, index)
{
field = new TextField();
field.Q("unity-text-input").RegisterCallback<BlurEvent>(OnChangeValue);
field.Q("unity-text-input").RegisterCallback<KeyDownEvent>(OnKeyDown);
Insert(0, field);
}
void OnKeyDown(KeyDownEvent e)
{
if (e.keyCode == KeyCode.KeypadEnter || e.keyCode == KeyCode.Return)
{
OnChangeValue(e);
}
}
void OnChangeValue(EventBase evt)
{
(m_Owner as VFXCascadedOperatorEdit).OnChangeLabel(field.value, index);
}
public override void Set(VFXOperatorNumericCascadedUnified op)
{
base.Set(op);
field.value = op.GetOperandName(index);
}
}
}
}
| |
/**
* Copyright 2015 Marc Rufer, d-fens GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.Data.Entity;
using System.Data.Entity.Infrastructure;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Web;
using System.Web.Http.OData;
using System.Web.Http.Results;
using biz.dfch.CS.Entity.LifeCycleManager.Context;
using biz.dfch.CS.Entity.LifeCycleManager.Controller;
using biz.dfch.CS.Entity.LifeCycleManager.Model;
using biz.dfch.CS.Entity.LifeCycleManager.UserData;
using biz.dfch.CS.Entity.LifeCycleManager.Util;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Telerik.JustMock;
namespace biz.dfch.CS.Entity.LifeCycleManager.Tests.Controller
{
[TestClass]
public class CalloutDefinitionsControllerTest : BaseControllerTest<CalloutDefinition>
{
private CalloutDefinitionsController _calloutDefinitionsController;
private LifeCycleContext _lifeCycleContext;
private const String CALLOUT_DEFINITION_READ_PERMISSION = "LightSwitchApplication:CalloutDefinitionCanRead";
private const String CALLOUT_DEFINITION_UPDATE_PERMISSION = "LightSwitchApplication:CalloutDefinitionCanUpdate";
private const String CALLOUT_DEFINITION_CREATE_PERMISSION = "LightSwitchApplication:CalloutDefinitionCanCreate";
private const String CALLOUT_DEFINITION_DELETE_PERMISSION = "LightSwitchApplication:CalloutDefinitionCanDelete";
private const String SAMPLE_ENTITY_TYPE = "User";
private const String ENTITY_ID_1 = "1";
[TestInitialize]
public void TestInitialize()
{
Mock.SetupStatic(typeof(ODataControllerHelper));
Mock.SetupStatic(typeof(CurrentUserDataProvider));
Mock.SetupStatic(typeof(HttpContext));
Mock.Arrange(() => HttpContext.Current.Request.Headers.Get(TENANT_ID_HEADER_KEY))
.Returns(TENANT_ID)
.OccursOnce();
_calloutDefinitionsController = new CalloutDefinitionsController();
_lifeCycleContext = Mock.Create<LifeCycleContext>();
}
[TestMethod]
public void GetCalloutDefinitionsForUserWithReadPermissionReturnsCalloutDefinitionsTheUserIsAuthorizedFor()
{
Mock.Arrange(() => CurrentUserDataProvider.GetIdentity(TENANT_ID))
.Returns(new Identity
{
Permissions = new List<String> {CALLOUT_DEFINITION_READ_PERMISSION},
Username = CURRENT_USER_ID,
Tid = TENANT_ID
})
.MustBeCalled();
Mock.Arrange(() => CurrentUserDataProvider.GetEntitiesForUser(Arg.IsAny<DbSet<CalloutDefinition>>(), CURRENT_USER_ID, TENANT_ID))
.ReturnsCollection(CreateSampleCalloutDefinitionDbSet().ToList())
.MustBeCalled();
Mock.Arrange(() => _lifeCycleContext.CalloutDefinitions)
.IgnoreInstance()
.ReturnsCollection(CreateSampleCalloutDefinitionDbSet())
.MustBeCalled();
var actionResult = _calloutDefinitionsController.GetCalloutDefinitions(
CreateODataQueryOptions("http://localhost/api/Core.svc/CalloutDefinitions"))
.Result;
Assert.IsTrue(actionResult.GetType() == typeof(OkNegotiatedContentResult<IEnumerable<CalloutDefinition>>));
var response = actionResult as OkNegotiatedContentResult<IEnumerable<CalloutDefinition>>;
Assert.IsNotNull(response);
Assert.AreEqual(2, response.Content.Count());
Mock.Assert(() => CurrentUserDataProvider.GetIdentity(TENANT_ID));
Mock.Assert(() => CurrentUserDataProvider.GetEntitiesForUser(Arg.IsAny<DbSet<CalloutDefinition>>(), CURRENT_USER_ID, TENANT_ID));
Mock.Assert(_lifeCycleContext);
}
[TestMethod]
public void GetCalloutDefinitionsForUserWithoutReadPermissionReturnsForbidden()
{
Mock.Arrange(() => CurrentUserDataProvider.GetIdentity(TENANT_ID))
.Returns(new Identity
{
Permissions = new List<String>(),
Username = CURRENT_USER_ID
})
.MustBeCalled();
var actionResult = _calloutDefinitionsController.GetCalloutDefinitions(
CreateODataQueryOptions("http://localhost/api/Core.svc/CalloutDefinitions"))
.Result;
AssertStatusCodeResult(actionResult, HttpStatusCode.Forbidden);
Mock.Assert(() => CurrentUserDataProvider.GetIdentity(TENANT_ID));
}
[TestMethod]
public void GetCalloutDefinitionsWithNonExistingCalloutDefinitionsForCurrentUserReturnsEmptyList()
{
Mock.Arrange(() => CurrentUserDataProvider.GetIdentity(TENANT_ID))
.Returns(new Identity
{
Permissions = new List<String> { CALLOUT_DEFINITION_READ_PERMISSION },
Username = CURRENT_USER_ID,
Tid = TENANT_ID
})
.MustBeCalled();
Mock.Arrange(() => CurrentUserDataProvider.GetEntitiesForUser(Arg.IsAny<DbSet<CalloutDefinition>>(), CURRENT_USER_ID, TENANT_ID))
.ReturnsCollection(new List<CalloutDefinition>())
.MustBeCalled();
Mock.Arrange(() => _lifeCycleContext.CalloutDefinitions)
.IgnoreInstance()
.ReturnsCollection(CreateSampleCalloutDefinitionDbSet())
.MustBeCalled();
var actionResult = _calloutDefinitionsController.GetCalloutDefinitions(
CreateODataQueryOptions("http://localhost/api/Core.svc/CalloutDefinitions"))
.Result;
Assert.IsTrue(actionResult.GetType() == typeof(OkNegotiatedContentResult<IEnumerable<CalloutDefinition>>));
var response = actionResult as OkNegotiatedContentResult<IEnumerable<CalloutDefinition>>;
Assert.IsNotNull(response);
Assert.AreEqual(0, response.Content.Count());
Mock.Assert(() => CurrentUserDataProvider.GetIdentity(TENANT_ID));
Mock.Assert(() => CurrentUserDataProvider.GetEntitiesForUser(Arg.IsAny<DbSet<CalloutDefinition>>(), CURRENT_USER_ID, TENANT_ID));
Mock.Assert(_lifeCycleContext);
}
[TestMethod]
public void GetCalloutDefinitionByIdForAuthorizedUserWithReadPermissionReturnsRequestedCalloutDefinition()
{
Mock.Arrange(() => CurrentUserDataProvider.GetIdentity(TENANT_ID))
.Returns(new Identity
{
Permissions = new List<String> { CALLOUT_DEFINITION_READ_PERMISSION },
Username = CURRENT_USER_ID,
Tid = TENANT_ID
})
.MustBeCalled();
Mock.Arrange(() => CurrentUserDataProvider.IsEntityOfUser(CURRENT_USER_ID, TENANT_ID, Arg.IsAny<CalloutDefinition>()))
.Returns(true)
.MustBeCalled();
Mock.Arrange(() => _lifeCycleContext.CalloutDefinitions.Find(1))
.IgnoreInstance()
.Returns(CreateSampleCalloutDefinitionDbSet()[0])
.MustBeCalled();
var actionResult = _calloutDefinitionsController.GetCalloutDefinition(1,
CreateODataQueryOptions("http://localhost/api/Core.svc/CalloutDefinitions(1)"))
.Result;
Assert.IsTrue(actionResult.GetType() == typeof(OkNegotiatedContentResult<CalloutDefinition>));
var response = actionResult as OkNegotiatedContentResult<CalloutDefinition>;
var calloutDefinition = response.Content;
Assert.AreEqual(1, calloutDefinition.Id);
Assert.AreEqual(CURRENT_USER_ID, calloutDefinition.CreatedBy);
Mock.Assert(() => CurrentUserDataProvider.GetIdentity(TENANT_ID));
Mock.Assert(() => CurrentUserDataProvider.IsEntityOfUser(CURRENT_USER_ID, TENANT_ID, Arg.IsAny<CalloutDefinition>()));
Mock.Assert(_lifeCycleContext);
}
[TestMethod]
public void GetCalloutDefinitionByIdForUnAuthorizedUserReturnsForbidden()
{
Mock.Arrange(() => CurrentUserDataProvider.GetIdentity(TENANT_ID))
.Returns(new Identity
{
Permissions = new List<String> { CALLOUT_DEFINITION_READ_PERMISSION },
Username = CURRENT_USER_ID,
Tid = TENANT_ID
})
.MustBeCalled();
Mock.Arrange(() => CurrentUserDataProvider.IsEntityOfUser(CURRENT_USER_ID, TENANT_ID, Arg.IsAny<CalloutDefinition>()))
.Returns(false)
.MustBeCalled();
Mock.Arrange(() => _lifeCycleContext.CalloutDefinitions.Find(1))
.IgnoreInstance()
.Returns(CreateSampleCalloutDefinitionDbSet()[0])
.MustBeCalled();
var actionResult = _calloutDefinitionsController.GetCalloutDefinition(1,
CreateODataQueryOptions("http://localhost/api/Core.svc/CalloutDefinitions(1)"))
.Result;
AssertStatusCodeResult(actionResult, HttpStatusCode.Forbidden);
Mock.Assert(() => CurrentUserDataProvider.GetIdentity(TENANT_ID));
Mock.Assert(() => CurrentUserDataProvider.IsEntityOfUser(CURRENT_USER_ID, TENANT_ID, Arg.IsAny<CalloutDefinition>()));
Mock.Assert(_lifeCycleContext);
}
[TestMethod]
public void GetCalloutDefinitionByIdForNonExistingCalloutDefinitionIdReturnsNotFound()
{
Mock.Arrange(() => CurrentUserDataProvider.GetIdentity(TENANT_ID))
.Returns(new Identity
{
Permissions = new List<String> { CALLOUT_DEFINITION_READ_PERMISSION },
Username = CURRENT_USER_ID
})
.MustBeCalled();
Mock.Arrange(() => _lifeCycleContext.CalloutDefinitions.Find(1))
.IgnoreInstance()
.Returns((CalloutDefinition)null)
.MustBeCalled();
var actionResult = _calloutDefinitionsController.GetCalloutDefinition(1,
CreateODataQueryOptions("http://localhost/api/Core.svc/CalloutDefinitions(1)"))
.Result;
AssertStatusCodeResult(actionResult, HttpStatusCode.NotFound);
Mock.Assert(() => CurrentUserDataProvider.GetIdentity(TENANT_ID));
Mock.Assert(_lifeCycleContext);
}
[TestMethod]
public void GetCalloutDefinitionByIdForUserWithoutReadPermissionReturnsForbidden()
{
Mock.Arrange(() => CurrentUserDataProvider.GetIdentity(TENANT_ID))
.Returns(new Identity
{
Permissions = new List<String>(),
Username = CURRENT_USER_ID
})
.MustBeCalled();
var actionResult = _calloutDefinitionsController.GetCalloutDefinition(1,
CreateODataQueryOptions("http://localhost/api/Core.svc/CalloutDefinitions(1)"))
.Result;
AssertStatusCodeResult(actionResult, HttpStatusCode.Forbidden);
Mock.Assert(() => CurrentUserDataProvider.GetIdentity(TENANT_ID));
}
[TestMethod]
public void PutCalloutDefinitionForAuthorizedUserWithUpdatePermissionUpdatesCalloutDefinition()
{
Mock.Arrange(() => CurrentUserDataProvider.GetIdentity(TENANT_ID))
.Returns(new Identity
{
Permissions = new List<String> { CALLOUT_DEFINITION_UPDATE_PERMISSION },
Username = CURRENT_USER_ID,
Tid = TENANT_ID
})
.MustBeCalled();
Mock.Arrange(() => CurrentUserDataProvider.IsEntityOfUser(CURRENT_USER_ID, TENANT_ID, Arg.IsAny<CalloutDefinition>()))
.Returns(true)
.MustBeCalled();
Mock.Arrange(() => _lifeCycleContext.CalloutDefinitions.Find(1))
.IgnoreInstance()
.Returns(CreateSampleCalloutDefinitionDbSet()[0])
.MustBeCalled();
Mock.Arrange(() => _lifeCycleContext.CalloutDefinitions.Attach(Arg.IsAny<CalloutDefinition>()))
.IgnoreInstance()
.MustBeCalled();
Mock.Arrange(() => _lifeCycleContext.Entry(Arg.IsAny<CalloutDefinition>()))
.IgnoreInstance()
.Returns(Mock.Create<DbEntityEntry<CalloutDefinition>>())
.MustBeCalled();
Mock.Arrange(() => _lifeCycleContext.SaveChanges())
.IgnoreInstance()
.MustBeCalled();
var actionResult = _calloutDefinitionsController.Put(1,
new CalloutDefinition
{
Id = 1,
CreatedBy = ANOTHER_USER_ID,
Created = DateTimeOffset.Parse("05/01/2008"),
ModifiedBy = ANOTHER_USER_ID,
Modified = DateTimeOffset.Parse("05/01/2008"),
Tid = ANOTHER_TENANT_ID,
EntityType = SAMPLE_ENTITY_TYPE,
EntityId = ENTITY_ID_1,
TenantId = TENANT_ID,
Parameters = "testparameters",
}).Result;
Assert.IsTrue(actionResult.GetType() == typeof(OkNegotiatedContentResult<CalloutDefinition>));
var response = actionResult as OkNegotiatedContentResult<CalloutDefinition>;
var calloutDefinition = response.Content;
Assert.AreEqual(1, calloutDefinition.Id);
Assert.AreEqual(CURRENT_USER_ID, calloutDefinition.CreatedBy);
Assert.AreNotEqual(DateTimeOffset.Parse("05/01/2008"), calloutDefinition.Created);
Assert.AreEqual(CURRENT_USER_ID, calloutDefinition.ModifiedBy);
Assert.AreNotEqual(DateTimeOffset.Parse("05/01/2008"), calloutDefinition.Modified);
Assert.AreEqual(TENANT_ID, calloutDefinition.Tid);
Assert.AreEqual(SAMPLE_ENTITY_TYPE, calloutDefinition.EntityType);
Assert.AreEqual(ENTITY_ID_1, calloutDefinition.EntityId);
Assert.AreEqual(TENANT_ID, calloutDefinition.TenantId);
Assert.AreEqual("testparameters", calloutDefinition.Parameters);
Mock.Assert(() => CurrentUserDataProvider.GetIdentity(TENANT_ID));
Mock.Assert(() => CurrentUserDataProvider.IsEntityOfUser(CURRENT_USER_ID, TENANT_ID, Arg.IsAny<CalloutDefinition>()));
Mock.Assert(_lifeCycleContext);
}
[TestMethod]
public void PutCalloutDefinitionForUserWithoutUpdatePermissionReturnsForbidden()
{
Mock.Arrange(() => CurrentUserDataProvider.GetIdentity(TENANT_ID))
.Returns(new Identity
{
Permissions = new List<String>(),
Username = CURRENT_USER_ID
})
.MustBeCalled();
var actionResult = _calloutDefinitionsController.Put(1,
new CalloutDefinition { Id = 1 })
.Result;
AssertStatusCodeResult(actionResult, HttpStatusCode.Forbidden);
Mock.Assert(() => CurrentUserDataProvider.GetIdentity(TENANT_ID));
}
[TestMethod]
public void PutCalloutDefinitionForUnauthorizedUserReturnsForbidden()
{
Mock.Arrange(() => CurrentUserDataProvider.GetIdentity(TENANT_ID))
.Returns(new Identity
{
Permissions = new List<String> { CALLOUT_DEFINITION_UPDATE_PERMISSION },
Username = CURRENT_USER_ID,
Tid = TENANT_ID
})
.MustBeCalled();
Mock.Arrange(() => CurrentUserDataProvider.IsEntityOfUser(CURRENT_USER_ID, TENANT_ID, Arg.IsAny<CalloutDefinition>()))
.Returns(false)
.MustBeCalled();
Mock.Arrange(() => _lifeCycleContext.CalloutDefinitions.Find(1))
.IgnoreInstance()
.Returns(CreateSampleCalloutDefinitionDbSet()[0])
.MustBeCalled();
var actionResult = _calloutDefinitionsController.Put(1,
new CalloutDefinition
{
Id = 1,
CreatedBy = ANOTHER_USER_ID,
Created = DateTimeOffset.Parse("05/01/2008"),
ModifiedBy = ANOTHER_USER_ID,
Modified = DateTimeOffset.Parse("05/01/2008"),
EntityType = SAMPLE_ENTITY_TYPE,
EntityId = ENTITY_ID_1,
TenantId = TENANT_ID,
Parameters = "testparameters",
}).Result;
AssertStatusCodeResult(actionResult, HttpStatusCode.Forbidden);
Mock.Assert(() => CurrentUserDataProvider.GetIdentity(TENANT_ID));
Mock.Assert(() => CurrentUserDataProvider.IsEntityOfUser(CURRENT_USER_ID, TENANT_ID, Arg.IsAny<CalloutDefinition>()));
Mock.Assert(_lifeCycleContext);
}
[TestMethod]
public void PutCalloutDefinitionForAuthorizedUserSetsUpdatedDate()
{
Mock.Arrange(() => CurrentUserDataProvider.GetIdentity(TENANT_ID))
.Returns(new Identity
{
Permissions = new List<String> { CALLOUT_DEFINITION_UPDATE_PERMISSION },
Username = CURRENT_USER_ID,
Tid = TENANT_ID
})
.MustBeCalled();
Mock.Arrange(() => CurrentUserDataProvider.IsEntityOfUser(CURRENT_USER_ID, TENANT_ID, Arg.IsAny<CalloutDefinition>()))
.Returns(true)
.MustBeCalled();
Mock.Arrange(() => _lifeCycleContext.CalloutDefinitions.Find(1))
.IgnoreInstance()
.Returns(CreateSampleCalloutDefinitionDbSet()[0])
.MustBeCalled();
Mock.Arrange(() => _lifeCycleContext.CalloutDefinitions.Attach(Arg.IsAny<CalloutDefinition>()))
.IgnoreInstance()
.MustBeCalled();
Mock.Arrange(() => _lifeCycleContext.Entry(Arg.IsAny<CalloutDefinition>()))
.IgnoreInstance()
.Returns(Mock.Create<DbEntityEntry<CalloutDefinition>>())
.MustBeCalled();
Mock.Arrange(() => _lifeCycleContext.SaveChanges())
.IgnoreInstance()
.MustBeCalled();
var actionResult = _calloutDefinitionsController.Put(1,
new CalloutDefinition
{
Id = 1,
EntityType = SAMPLE_ENTITY_TYPE,
EntityId = ENTITY_ID_1,
TenantId = TENANT_ID,
Parameters = "testparameters",
}).Result;
Assert.IsTrue(actionResult.GetType() == typeof(OkNegotiatedContentResult<CalloutDefinition>));
var response = actionResult as OkNegotiatedContentResult<CalloutDefinition>;
var calloutDefinition = response.Content;
Assert.AreEqual(1, calloutDefinition.Id);
Assert.AreEqual(CURRENT_USER_ID, calloutDefinition.CreatedBy);
Assert.AreEqual(DateTimeOffset.Now.Date, calloutDefinition.Modified.Date);
Assert.AreEqual(CURRENT_USER_ID, calloutDefinition.ModifiedBy);
Assert.AreEqual(SAMPLE_ENTITY_TYPE, calloutDefinition.EntityType);
Assert.AreEqual(ENTITY_ID_1, calloutDefinition.EntityId);
Assert.AreEqual(TENANT_ID, calloutDefinition.TenantId);
Mock.Assert(() => CurrentUserDataProvider.GetIdentity(TENANT_ID));
Mock.Assert(() => CurrentUserDataProvider.IsEntityOfUser(CURRENT_USER_ID, TENANT_ID, Arg.IsAny<CalloutDefinition>()));
Mock.Assert(_lifeCycleContext);
}
[TestMethod]
public void PutCalloutDefinitionForNonExistingCalloutDefinitionIdReturnsNotFound()
{
Mock.Arrange(() => CurrentUserDataProvider.GetIdentity(TENANT_ID))
.Returns(new Identity
{
Permissions = new List<String> { CALLOUT_DEFINITION_UPDATE_PERMISSION },
Username = CURRENT_USER_ID
})
.MustBeCalled();
Mock.Arrange(() => _lifeCycleContext.CalloutDefinitions.Find(1))
.IgnoreInstance()
.Returns((CalloutDefinition)null)
.MustBeCalled();
var actionResult = _calloutDefinitionsController.Put(1,
new CalloutDefinition
{
Id = 1,
EntityType = SAMPLE_ENTITY_TYPE,
EntityId = ENTITY_ID_1,
TenantId = TENANT_ID,
Parameters = "testparameters",
}).Result;
AssertStatusCodeResult(actionResult, HttpStatusCode.NotFound);
Mock.Assert(() => CurrentUserDataProvider.GetIdentity(TENANT_ID));
Mock.Assert(_lifeCycleContext);
}
[TestMethod]
public void PutCalloutDefinitionWithDifferentCalloutDefinitionIdsInUrlAndBodyReturnsBadRequest()
{
var actionResult = _calloutDefinitionsController.Put(2,
new CalloutDefinition
{
Id = 1,
EntityType = SAMPLE_ENTITY_TYPE,
EntityId = ENTITY_ID_1,
TenantId = TENANT_ID,
Parameters = "testparameters",
}).Result;
Assert.IsTrue(actionResult.GetType() == typeof(BadRequestResult));
}
[TestMethod]
public void PostCalloutDefinitionForUserWithoutCreatePermissionReturnsForbidden()
{
Mock.Arrange(() => CurrentUserDataProvider.GetIdentity(TENANT_ID))
.Returns(new Identity
{
Permissions = new List<String>(),
Username = CURRENT_USER_ID
})
.MustBeCalled();
var actionResult = _calloutDefinitionsController.Post(
new CalloutDefinition
{
Id = 1,
EntityType = SAMPLE_ENTITY_TYPE,
EntityId = ENTITY_ID_1,
TenantId = TENANT_ID,
Parameters = "testparameters"
}).Result;
AssertStatusCodeResult(actionResult, HttpStatusCode.Forbidden);
Mock.Assert(() => CurrentUserDataProvider.GetIdentity(TENANT_ID));
}
[TestMethod]
public void PostCalloutDefinitionForUserWithCreatePermissionCreatesCalloutDefinitionAndReturnsCreated()
{
CalloutDefinition createdCalloutDefinition = null;
Mock.Arrange(() => ODataControllerHelper.ResponseCreated(
Arg.IsAny<ODataController>(), Arg.IsAny<CalloutDefinition>(),
Arg.IsAny<String>())).Returns(new HttpResponseMessage(HttpStatusCode.Created));
Mock.Arrange(() => CurrentUserDataProvider.GetIdentity(TENANT_ID))
.Returns(new Identity
{
Permissions = new List<String> { CALLOUT_DEFINITION_CREATE_PERMISSION },
Username = CURRENT_USER_ID,
Tid = TENANT_ID
})
.MustBeCalled();
Mock.Arrange(() => _lifeCycleContext.CalloutDefinitions.Add(Arg.IsAny<CalloutDefinition>()))
.IgnoreInstance()
.DoInstead((CalloutDefinition j) => createdCalloutDefinition = j)
.Returns((CalloutDefinition j) => j)
.MustBeCalled();
Mock.Arrange(() => _lifeCycleContext.SaveChanges())
.IgnoreInstance()
.MustBeCalled();
var actionResult = _calloutDefinitionsController.Post(
new CalloutDefinition
{
Id = 1,
CreatedBy = ANOTHER_USER_ID,
Modified = DateTimeOffset.Now,
Tid = ANOTHER_TENANT_ID,
ModifiedBy = CURRENT_USER_ID,
EntityType = SAMPLE_ENTITY_TYPE,
EntityId = ENTITY_ID_1,
TenantId = TENANT_ID,
Parameters = "testparameters"
}).Result;
Assert.AreEqual(CURRENT_USER_ID, createdCalloutDefinition.CreatedBy);
Assert.AreEqual(DateTimeOffset.Now.Date, createdCalloutDefinition.Created.Date);
Assert.IsNull(createdCalloutDefinition.ModifiedBy);
Assert.AreEqual(TENANT_ID, createdCalloutDefinition.Tid);
Assert.AreEqual(SAMPLE_ENTITY_TYPE, createdCalloutDefinition.EntityType);
Assert.AreEqual(ENTITY_ID_1, createdCalloutDefinition.EntityId);
Assert.AreEqual(TENANT_ID, createdCalloutDefinition.TenantId);
Assert.AreEqual("testparameters", createdCalloutDefinition.Parameters);
Assert.IsTrue(actionResult.GetType() == typeof(ResponseMessageResult));
var response = actionResult as ResponseMessageResult;
Assert.AreEqual(HttpStatusCode.Created, response.Response.StatusCode);
Mock.Assert(() => CurrentUserDataProvider.GetIdentity(TENANT_ID));
Mock.Assert(_lifeCycleContext);
}
[TestMethod]
public void PatchCalloutDefinitionForAuthorizedUserWithUpdatePermissionUpdatesDeliveredFields()
{
Mock.Arrange(() => CurrentUserDataProvider.GetIdentity(TENANT_ID))
.Returns(new Identity
{
Permissions = new List<String> { CALLOUT_DEFINITION_UPDATE_PERMISSION },
Username = CURRENT_USER_ID,
Tid = TENANT_ID
})
.MustBeCalled();
Mock.Arrange(() => CurrentUserDataProvider.IsEntityOfUser(CURRENT_USER_ID, TENANT_ID, Arg.IsAny<CalloutDefinition>()))
.Returns(true)
.MustBeCalled();
Mock.Arrange(() => _lifeCycleContext.CalloutDefinitions.Find(1))
.IgnoreInstance()
.Returns(CreateSampleCalloutDefinitionDbSet()[0])
.MustBeCalled();
Mock.Arrange(() => _lifeCycleContext.CalloutDefinitions.Attach(Arg.IsAny<CalloutDefinition>()))
.IgnoreInstance()
.MustBeCalled();
Mock.Arrange(() => _lifeCycleContext.Entry(Arg.IsAny<CalloutDefinition>()))
.IgnoreInstance()
.Returns(Mock.Create<DbEntityEntry<CalloutDefinition>>())
.MustBeCalled();
Mock.Arrange(() => _lifeCycleContext.SaveChanges())
.IgnoreInstance()
.MustBeCalled();
var delta = new Delta<CalloutDefinition>(typeof(CalloutDefinition));
delta.TrySetPropertyValue("Id", "3");
delta.TrySetPropertyValue("CreatedBy", ANOTHER_USER_ID);
delta.TrySetPropertyValue("ModifiedBy", ANOTHER_USER_ID);
delta.TrySetPropertyValue("Tid", ANOTHER_TENANT_ID);
delta.TrySetPropertyValue("Parameters", "testparameters");
delta.TrySetPropertyValue("EntityType", SAMPLE_ENTITY_TYPE);
var actionResult = _calloutDefinitionsController.Patch(1, delta).Result;
Assert.IsTrue(actionResult.GetType() == typeof(OkNegotiatedContentResult<CalloutDefinition>));
var response = actionResult as OkNegotiatedContentResult<CalloutDefinition>;
var calloutDefinition = response.Content;
Assert.AreEqual(1, calloutDefinition.Id);
Assert.AreEqual(DateTimeOffset.Now.Date, calloutDefinition.Modified.Date);
Assert.AreEqual(CURRENT_USER_ID, calloutDefinition.CreatedBy);
Assert.AreEqual(CURRENT_USER_ID, calloutDefinition.ModifiedBy);
Assert.AreEqual(TENANT_ID, calloutDefinition.Tid);
Assert.AreEqual("testparameters", calloutDefinition.Parameters);
Assert.AreEqual(SAMPLE_ENTITY_TYPE, calloutDefinition.EntityType);
Mock.Assert(() => CurrentUserDataProvider.GetIdentity(TENANT_ID));
Mock.Assert(() => CurrentUserDataProvider.IsEntityOfUser(CURRENT_USER_ID, TENANT_ID, Arg.IsAny<CalloutDefinition>()));
Mock.Assert(_lifeCycleContext);
}
[TestMethod]
public void PatchCalloutDefinitionForUserWithoutUpdatePermissionReturnsForbidden()
{
Mock.Arrange(() => CurrentUserDataProvider.GetIdentity(TENANT_ID))
.Returns(new Identity
{
Permissions = new List<String>(),
Username = CURRENT_USER_ID
})
.MustBeCalled();
var actionResult = _calloutDefinitionsController.Patch(1, new Delta<CalloutDefinition>()).Result;
AssertStatusCodeResult(actionResult, HttpStatusCode.Forbidden);
Mock.Assert(() => CurrentUserDataProvider.GetIdentity(TENANT_ID));
}
[TestMethod]
public void PatchCalloutDefinitionForUnauthorizedUserReturnsForbidden()
{
Mock.Arrange(() => CurrentUserDataProvider.GetIdentity(TENANT_ID))
.Returns(new Identity
{
Permissions = new List<String> { CALLOUT_DEFINITION_UPDATE_PERMISSION },
Username = CURRENT_USER_ID,
Tid = TENANT_ID
})
.MustBeCalled();
Mock.Arrange(() => CurrentUserDataProvider.IsEntityOfUser(CURRENT_USER_ID, TENANT_ID, Arg.IsAny<CalloutDefinition>()))
.Returns(false)
.MustBeCalled();
Mock.Arrange(() => _lifeCycleContext.CalloutDefinitions.Find(1))
.IgnoreInstance()
.Returns(CreateSampleCalloutDefinitionDbSet()[0])
.MustBeCalled();
var actionResult = _calloutDefinitionsController.Patch(1, new Delta<CalloutDefinition>()).Result;
AssertStatusCodeResult(actionResult, HttpStatusCode.Forbidden);
Mock.Assert(() => CurrentUserDataProvider.GetIdentity(TENANT_ID));
Mock.Assert(() => CurrentUserDataProvider.IsEntityOfUser(CURRENT_USER_ID, TENANT_ID, Arg.IsAny<CalloutDefinition>()));
Mock.Assert(_lifeCycleContext);
}
[TestMethod]
public void PatchForNonExistingCalloutDefinitionIdReturnsNotFound()
{
Mock.Arrange(() => CurrentUserDataProvider.GetIdentity(TENANT_ID))
.Returns(new Identity
{
Permissions = new List<String> { CALLOUT_DEFINITION_UPDATE_PERMISSION },
Username = CURRENT_USER_ID
})
.MustBeCalled();
Mock.Arrange(() => _lifeCycleContext.CalloutDefinitions.Find(1))
.IgnoreInstance()
.Returns((CalloutDefinition)null)
.MustBeCalled();
var actionResult = _calloutDefinitionsController.Patch(1, new Delta<CalloutDefinition>()).Result;
AssertStatusCodeResult(actionResult, HttpStatusCode.NotFound);
Mock.Assert(() => CurrentUserDataProvider.GetIdentity(TENANT_ID));
Mock.Assert(_lifeCycleContext);
}
[TestMethod]
public void DeleteCalloutDefinitionForAuthorizedUserWithDeletePermissionDeletesCalloutDefinition()
{
Mock.Arrange(() => CurrentUserDataProvider.GetIdentity(TENANT_ID))
.Returns(new Identity
{
Permissions = new List<String> { CALLOUT_DEFINITION_DELETE_PERMISSION },
Username = CURRENT_USER_ID,
Tid = TENANT_ID
})
.MustBeCalled();
Mock.Arrange(() => CurrentUserDataProvider.IsEntityOfUser(CURRENT_USER_ID, TENANT_ID, Arg.IsAny<CalloutDefinition>()))
.Returns(true)
.MustBeCalled();
Mock.Arrange(() => _lifeCycleContext.CalloutDefinitions.Find(1))
.IgnoreInstance()
.Returns(CreateSampleCalloutDefinitionDbSet()[0])
.MustBeCalled();
Mock.Arrange(() => _lifeCycleContext.CalloutDefinitions.Remove(Arg.IsAny<CalloutDefinition>()))
.IgnoreInstance()
.MustBeCalled();
var actionResult = _calloutDefinitionsController.Delete(1).Result;
AssertStatusCodeResult(actionResult, HttpStatusCode.NoContent);
Mock.Assert(() => CurrentUserDataProvider.GetIdentity(TENANT_ID));
Mock.Assert(() => CurrentUserDataProvider.IsEntityOfUser(CURRENT_USER_ID, TENANT_ID, Arg.IsAny<CalloutDefinition>()));
Mock.Assert(_lifeCycleContext);
}
[TestMethod]
public void DeleteCalloutDefinitionForUserWithoutDeletePermissionReturnsForbidden()
{
Mock.Arrange(() => CurrentUserDataProvider.GetIdentity(TENANT_ID))
.Returns(new Identity
{
Permissions = new List<String>(),
Username = CURRENT_USER_ID
})
.MustBeCalled();
var actionResult = _calloutDefinitionsController.Delete(1).Result;
AssertStatusCodeResult(actionResult, HttpStatusCode.Forbidden);
Mock.Assert(() => CurrentUserDataProvider.GetIdentity(TENANT_ID));
}
[TestMethod]
public void DeleteCalloutDefinitionForUnauthorizedUserReturnsForbidden()
{
Mock.Arrange(() => CurrentUserDataProvider.GetIdentity(TENANT_ID))
.Returns(new Identity
{
Permissions = new List<String> { CALLOUT_DEFINITION_DELETE_PERMISSION },
Username = CURRENT_USER_ID,
Tid = TENANT_ID
})
.MustBeCalled();
Mock.Arrange(() => CurrentUserDataProvider.IsEntityOfUser(CURRENT_USER_ID, TENANT_ID, Arg.IsAny<CalloutDefinition>()))
.Returns(false)
.MustBeCalled();
Mock.Arrange(() => _lifeCycleContext.CalloutDefinitions.Find(1))
.IgnoreInstance()
.Returns(CreateSampleCalloutDefinitionDbSet()[0])
.MustBeCalled();
var actionResult = _calloutDefinitionsController.Delete(1).Result;
AssertStatusCodeResult(actionResult, HttpStatusCode.Forbidden);
Mock.Assert(() => CurrentUserDataProvider.GetIdentity(TENANT_ID));
Mock.Assert(() => CurrentUserDataProvider.IsEntityOfUser(CURRENT_USER_ID, TENANT_ID, Arg.IsAny<CalloutDefinition>()));
Mock.Assert(_lifeCycleContext);
}
[TestMethod]
public void DeleteForNonExistingCalloutDefinitionIdReturnsNotFound()
{
Mock.Arrange(() => CurrentUserDataProvider.GetIdentity(TENANT_ID))
.Returns(new Identity
{
Permissions = new List<String> { CALLOUT_DEFINITION_DELETE_PERMISSION },
Username = CURRENT_USER_ID
})
.MustBeCalled();
Mock.Arrange(() => _lifeCycleContext.CalloutDefinitions.Find(1))
.IgnoreInstance()
.Returns((CalloutDefinition)null)
.MustBeCalled();
var actionResult = _calloutDefinitionsController.Delete(1).Result;
AssertStatusCodeResult(actionResult, HttpStatusCode.NotFound);
Mock.Assert(() => CurrentUserDataProvider.GetIdentity(TENANT_ID));
Mock.Assert(_lifeCycleContext);
}
private IList<CalloutDefinition> CreateSampleCalloutDefinitionDbSet()
{
var dbSet = new List<CalloutDefinition>();
dbSet.Add(new CalloutDefinition { Id = 1, Tid = TENANT_ID, CreatedBy = CURRENT_USER_ID, EntityType = SAMPLE_ENTITY_TYPE });
dbSet.Add(new CalloutDefinition { Id = 2, Tid = TENANT_ID, CreatedBy = CURRENT_USER_ID, EntityId = ENTITY_ID_1});
return dbSet;
}
}
}
| |
using Signum.Entities.Workflow;
using Signum.Entities.Authorization;
using Signum.Engine.Authorization;
using Signum.Entities.Dynamic;
using System.Text.RegularExpressions;
using Signum.Entities.Reflection;
using Signum.Engine.UserAssets;
using Signum.Entities.Basics;
namespace Signum.Engine.Workflow;
public static class WorkflowLogic
{
public static Action<ICaseMainEntity, WorkflowTransitionContext>? OnTransition;
public static ResetLazy<Dictionary<Lite<WorkflowEntity>, WorkflowEntity>> Workflows = null!;
[AutoExpressionField]
public static bool HasExpired(this WorkflowEntity w) =>
As.Expression(() => w.ExpirationDate.HasValue && w.ExpirationDate.Value < Clock.Now);
[AutoExpressionField]
public static IQueryable<WorkflowPoolEntity> WorkflowPools(this WorkflowEntity e) =>
As.Expression(() => Database.Query<WorkflowPoolEntity>().Where(a => a.Workflow.Is(e)));
[AutoExpressionField]
public static IQueryable<WorkflowActivityEntity> WorkflowActivities(this WorkflowEntity e) =>
As.Expression(() => Database.Query<WorkflowActivityEntity>().Where(a => a.Lane.Pool.Workflow.Is(e)));
public static IEnumerable<WorkflowActivityEntity> WorkflowActivitiesFromCache(this WorkflowEntity e)
{
return GetWorkflowNodeGraph(e.ToLite()).NextGraph.OfType<WorkflowActivityEntity>();
}
[AutoExpressionField]
public static IQueryable<WorkflowEventEntity> WorkflowEvents(this WorkflowEntity e) =>
As.Expression(() => Database.Query<WorkflowEventEntity>().Where(a => a.Lane.Pool.Workflow.Is(e)));
[AutoExpressionField]
public static WorkflowEventEntity? WorkflowStartEvent(this WorkflowEntity e) =>
As.Expression(() => e.WorkflowEvents().Where(we => we.Type == WorkflowEventType.Start).SingleOrDefault());
public static IEnumerable<WorkflowEventEntity> WorkflowEventsFromCache(this WorkflowEntity e)
{
return GetWorkflowNodeGraph(e.ToLite()).NextGraph.OfType<WorkflowEventEntity>();
}
[AutoExpressionField]
public static IQueryable<WorkflowGatewayEntity> WorkflowGateways(this WorkflowEntity e) =>
As.Expression(() => Database.Query<WorkflowGatewayEntity>().Where(a => a.Lane.Pool.Workflow.Is(e)));
public static IEnumerable<WorkflowGatewayEntity> WorkflowGatewaysFromCache(this WorkflowEntity e)
{
return GetWorkflowNodeGraph(e.ToLite()).NextGraph.OfType<WorkflowGatewayEntity>();
}
[AutoExpressionField]
public static IQueryable<WorkflowConnectionEntity> WorkflowConnections(this WorkflowEntity e) =>
As.Expression(() => Database.Query<WorkflowConnectionEntity>().Where(a => a.From.Lane.Pool.Workflow.Is(e) && a.To.Lane.Pool.Workflow.Is(e)));
public static IEnumerable<WorkflowConnectionEntity> WorkflowConnectionsFromCache(this WorkflowEntity e)
{
return GetWorkflowNodeGraph(e.ToLite()).NextGraph.EdgesWithValue.SelectMany(edge => edge.Value);
}
[AutoExpressionField]
public static IQueryable<WorkflowConnectionEntity> WorkflowMessageConnections(this WorkflowEntity e) =>
As.Expression(() => e.WorkflowConnections().Where(a => !a.From.Lane.Pool.Is(a.To.Lane.Pool)));
[AutoExpressionField]
public static IQueryable<WorkflowLaneEntity> WorkflowLanes(this WorkflowPoolEntity e) =>
As.Expression(() => Database.Query<WorkflowLaneEntity>().Where(a => a.Pool.Is(e)));
[AutoExpressionField]
public static IQueryable<WorkflowConnectionEntity> WorkflowConnections(this WorkflowPoolEntity e) =>
As.Expression(() => Database.Query<WorkflowConnectionEntity>().Where(a => a.From.Lane.Pool.Is(e) && a.To.Lane.Pool.Is(e)));
[AutoExpressionField]
public static IQueryable<WorkflowGatewayEntity> WorkflowGateways(this WorkflowLaneEntity e) =>
As.Expression(() => Database.Query<WorkflowGatewayEntity>().Where(a => a.Lane.Is(e)));
[AutoExpressionField]
public static IQueryable<WorkflowEventEntity> WorkflowEvents(this WorkflowLaneEntity e) =>
As.Expression(() => Database.Query<WorkflowEventEntity>().Where(a => a.Lane.Is(e)));
[AutoExpressionField]
public static IQueryable<WorkflowActivityEntity> WorkflowActivities(this WorkflowLaneEntity e) =>
As.Expression(() => Database.Query<WorkflowActivityEntity>().Where(a => a.Lane.Is(e)));
[AutoExpressionField]
public static IQueryable<WorkflowConnectionEntity> NextConnections(this IWorkflowNodeEntity e) =>
As.Expression(() => Database.Query<WorkflowConnectionEntity>().Where(a => a.From == e));
[AutoExpressionField]
public static WorkflowEntity Workflow(this CaseActivityEntity ca) =>
As.Expression(() => ca.Case.Workflow);
public static IEnumerable<WorkflowConnectionEntity> NextConnectionsFromCache(this IWorkflowNodeEntity e, ConnectionType? type)
{
var result = GetWorkflowNodeGraph(e.Lane.Pool.Workflow.ToLite()).NextConnections(e);
if (type == null)
return result;
return result.Where(a => a.Type == type);
}
[AutoExpressionField]
public static IQueryable<WorkflowConnectionEntity> PreviousConnections(this IWorkflowNodeEntity e) =>
As.Expression(() => Database.Query<WorkflowConnectionEntity>().Where(a => a.To == e));
public static IEnumerable<WorkflowConnectionEntity> PreviousConnectionsFromCache(this IWorkflowNodeEntity e)
{
return GetWorkflowNodeGraph(e.Lane.Pool.Workflow.ToLite()).PreviousConnections(e);
}
public static ResetLazy<Dictionary<Lite<WorkflowEntity>, WorkflowNodeGraph>> WorkflowGraphLazy = null!;
public static List<Lite<IWorkflowNodeEntity>> AutocompleteNodes(Lite<WorkflowEntity> workflow, string subString, int count, List<Lite<IWorkflowNodeEntity>> excludes)
{
return WorkflowGraphLazy.Value.GetOrThrow(workflow).Autocomplete(subString, count, excludes);
}
public static WorkflowNodeGraph GetWorkflowNodeGraph(Lite<WorkflowEntity> workflow)
{
var graph = WorkflowGraphLazy.Value.GetOrThrow(workflow);
if (graph.TrackId != null)
return graph;
lock (graph)
{
if (graph.TrackId != null)
return graph;
var issues = new List<WorkflowIssue>();
graph.Validate(issues, (g, newDirection) =>
{
throw new InvalidOperationException($"Unexpected direction of gateway '{g}' (Should be '{newDirection.NiceToString()}'). Consider saving Workflow '{workflow}'.");
});
var errors = issues.Where(a => a.Type == WorkflowIssueType.Error);
if (errors.HasItems())
throw new ApplicationException("Errors in Workflow '" + workflow + "':\r\n" + errors.ToString("\r\n").Indent(4));
return graph;
}
}
static Func<WorkflowConfigurationEmbedded> getConfiguration = null!;
public static WorkflowConfigurationEmbedded Configuration
{
get { return getConfiguration(); }
}
static Regex CurrentIsRegex = new Regex($@"{nameof(WorkflowActivityInfo)}\s*\.\s*{nameof(WorkflowActivityInfo.Current)}\s*\.\s*{nameof(WorkflowActivityInfo.Is)}\s*\(\s*""(?<workflowName>[^""]*)""\s*,\s*""(?<activityName>[^""]*)""\s*\)");
internal static List<CustomCompilerError> GetCustomErrors(string code)
{
var matches = CurrentIsRegex.Matches(code).Cast<Match>().ToList();
return matches.Select(m =>
{
var workflowName = m.Groups["workflowName"].Value;
var wa = WorkflowLogic.WorkflowGraphLazy.Value.Values.SingleOrDefault(w => w.Workflow.Name == workflowName);
if (wa == null)
return CreateCompilerError(code, m, $"No workflow with Name '{workflowName}' found.");
var activityName = m.Groups["activityName"].Value;
if (!wa.Activities.Values.Any(a => a.Name == activityName))
return CreateCompilerError(code, m, $"No activity with Name '{activityName}' found in workflow '{workflowName}'.");
return null;
}).NotNull().ToList();
}
private static CustomCompilerError CreateCompilerError(string code, Match m, string errorText)
{
int index = 0;
int line = 1;
while (true)
{
var newIndex = code.IndexOf('\n', index + 1);
if (newIndex >= m.Index || newIndex == -1)
return new CustomCompilerError { ErrorText = errorText, Line = line };
index = newIndex;
line++;
}
}
public static void Start(SchemaBuilder sb, Func<WorkflowConfigurationEmbedded> getConfiguration)
{
if (sb.NotDefined(MethodInfo.GetCurrentMethod()))
{
PermissionAuthLogic.RegisterPermissions(WorkflowPermission.ViewWorkflowPanel);
PermissionAuthLogic.RegisterPermissions(WorkflowPermission.ViewCaseFlow);
PermissionAuthLogic.RegisterPermissions(WorkflowPermission.WorkflowToolbarMenu);
WorkflowLogic.getConfiguration = getConfiguration;
UserAssetsImporter.Register<WorkflowEntity>("Workflow", WorkflowOperation.Save);
UserAssetsImporter.Register<WorkflowScriptEntity>("WorkflowScript", WorkflowScriptOperation.Save);
UserAssetsImporter.Register<WorkflowTimerConditionEntity>("WorkflowTimerCondition", WorkflowTimerConditionOperation.Save);
UserAssetsImporter.Register<WorkflowConditionEntity>("WorkflowCondition", WorkflowConditionOperation.Save);
UserAssetsImporter.Register<WorkflowActionEntity>("WorkflowAction", WorkflowActionOperation.Save);
UserAssetsImporter.Register<WorkflowScriptRetryStrategyEntity>("WorkflowScriptRetryStrategy", WorkflowScriptRetryStrategyOperation.Save);
sb.Include<WorkflowEntity>()
.WithConstruct(WorkflowOperation.Create)
.WithQuery(() => DynamicQueryCore.Auto(
from e in Database.Query<WorkflowEntity>()
select new
{
Entity = e,
e.Id,
e.Name,
e.MainEntityType,
HasExpired = e.HasExpired(),
e.ExpirationDate,
})
.ColumnDisplayName(a => a.HasExpired, () => WorkflowMessage.HasExpired.NiceToString()))
.WithExpressionFrom((CaseActivityEntity ca) => ca.Workflow());
WorkflowGraph.Register();
QueryLogic.Expressions.Register((WorkflowEntity wf) => wf.WorkflowStartEvent());
QueryLogic.Expressions.Register((WorkflowEntity wf) => wf.HasExpired(), WorkflowMessage.HasExpired);
sb.AddIndex((WorkflowEntity wf) => wf.ExpirationDate);
DynamicCode.GetCustomErrors += GetCustomErrors;
Workflows = sb.GlobalLazy(() => Database.Query<WorkflowEntity>().ToDictionary(a => a.ToLite()),
new InvalidateWith(typeof(WorkflowEntity)));
sb.Include<WorkflowPoolEntity>()
.WithUniqueIndex(wp => new { wp.Workflow, wp.Name })
.WithSave(WorkflowPoolOperation.Save)
.WithDelete(WorkflowPoolOperation.Delete)
.WithExpressionFrom((WorkflowEntity p) => p.WorkflowPools())
.WithQuery(() => e => new
{
Entity = e,
e.Id,
e.Name,
e.BpmnElementId,
e.Workflow,
});
sb.Include<WorkflowLaneEntity>()
.WithUniqueIndex(wp => new { wp.Pool, wp.Name })
.WithSave(WorkflowLaneOperation.Save)
.WithDelete(WorkflowLaneOperation.Delete)
.WithExpressionFrom((WorkflowPoolEntity p) => p.WorkflowLanes())
.WithQuery(() => e => new
{
Entity = e,
e.Id,
e.Name,
e.BpmnElementId,
e.Pool,
e.Pool.Workflow,
});
sb.Include<WorkflowActivityEntity>()
.WithUniqueIndex(w => new { w.Lane, w.Name })
.WithSave(WorkflowActivityOperation.Save)
.WithDelete(WorkflowActivityOperation.Delete)
.WithExpressionFrom((WorkflowEntity p) => p.WorkflowActivities())
.WithExpressionFrom((WorkflowLaneEntity p) => p.WorkflowActivities())
.WithVirtualMList(wa => wa.BoundaryTimers, e => e.BoundaryOf, WorkflowEventOperation.Save, WorkflowEventOperation.Delete)
.WithQuery(() => e => new
{
Entity = e,
e.Id,
e.Name,
e.BpmnElementId,
e.Comments,
e.Lane,
e.Lane.Pool.Workflow,
});
sb.Include<WorkflowEventEntity>()
.WithExpressionFrom((WorkflowEntity p) => p.WorkflowEvents())
.WithExpressionFrom((WorkflowLaneEntity p) => p.WorkflowEvents())
.WithQuery(() => e => new
{
Entity = e,
e.Id,
e.Type,
e.Name,
e.BpmnElementId,
e.Lane,
e.Lane.Pool.Workflow,
});
new Graph<WorkflowEventEntity>.Execute(WorkflowEventOperation.Save)
{
CanBeNew = true,
CanBeModified = true,
Execute = (e, _) =>
{
if (e.Timer == null && e.Type.IsTimer())
throw new InvalidOperationException(ValidationMessage._0IsMandatoryWhen1IsSetTo2.NiceToString(e.NicePropertyName(a => a.Timer), e.NicePropertyName(a => a.Type), e.Type.NiceToString()));
if (e.Timer != null && !e.Type.IsTimer())
throw new InvalidOperationException(ValidationMessage._0ShouldBeNullWhen1IsSetTo2.NiceToString(e.NicePropertyName(a => a.Timer), e.NicePropertyName(a => a.Type), e.Type.NiceToString()));
if (e.BoundaryOf == null && e.Type.IsBoundaryTimer())
throw new InvalidOperationException(ValidationMessage._0IsMandatoryWhen1IsSetTo2.NiceToString(e.NicePropertyName(a => a.BoundaryOf), e.NicePropertyName(a => a.Type), e.Type.NiceToString()));
if (e.BoundaryOf != null && !e.Type.IsBoundaryTimer())
throw new InvalidOperationException(ValidationMessage._0ShouldBeNullWhen1IsSetTo2.NiceToString(e.NicePropertyName(a => a.BoundaryOf), e.NicePropertyName(a => a.Type), e.Type.NiceToString()));
e.Save();
},
}.Register();
new Graph<WorkflowEventEntity>.Delete(WorkflowEventOperation.Delete)
{
Delete = (e, _) =>
{
if (e.Type.IsScheduledStart())
{
var scheduled = e.ScheduledTask();
if (scheduled != null)
WorkflowEventTaskLogic.DeleteWorkflowEventScheduledTask(scheduled);
}
e.Delete();
},
}.Register();
sb.Include<WorkflowGatewayEntity>()
.WithSave(WorkflowGatewayOperation.Save)
.WithDelete(WorkflowGatewayOperation.Delete)
.WithExpressionFrom((WorkflowEntity p) => p.WorkflowGateways())
.WithExpressionFrom((WorkflowLaneEntity p) => p.WorkflowGateways())
.WithQuery(() => e => new
{
Entity = e,
e.Id,
e.Type,
e.Name,
e.BpmnElementId,
e.Lane,
e.Lane.Pool.Workflow,
});
sb.Include<WorkflowConnectionEntity>()
.WithSave(WorkflowConnectionOperation.Save)
.WithDelete(WorkflowConnectionOperation.Delete)
.WithExpressionFrom((WorkflowEntity p) => p.WorkflowConnections())
.WithExpressionFrom((WorkflowEntity p) => p.WorkflowMessageConnections(), null!)
.WithExpressionFrom((WorkflowPoolEntity p) => p.WorkflowConnections())
.WithExpressionFrom((IWorkflowNodeEntity p) => p.NextConnections(), null!)
.WithExpressionFrom((IWorkflowNodeEntity p) => p.PreviousConnections(), null!)
.WithQuery(() => e => new
{
Entity = e,
e.Id,
e.Name,
e.BpmnElementId,
e.From,
e.To,
});
WorkflowEventTaskEntity.GetWorkflowEntity = lite => WorkflowGraphLazy.Value.GetOrThrow(lite).Workflow;
WorkflowGraphLazy = sb.GlobalLazy(() =>
{
using (new EntityCache())
{
var events = Database.RetrieveAll<WorkflowEventEntity>().GroupToDictionary(a => a.Lane.Pool.Workflow.ToLite());
var gateways = Database.RetrieveAll<WorkflowGatewayEntity>().GroupToDictionary(a => a.Lane.Pool.Workflow.ToLite());
var activities = Database.RetrieveAll<WorkflowActivityEntity>().GroupToDictionary(a => a.Lane.Pool.Workflow.ToLite());
var connections = Database.RetrieveAll<WorkflowConnectionEntity>().GroupToDictionary(a => a.From.Lane.Pool.Workflow.ToLite());
var result = Database.RetrieveAll<WorkflowEntity>().ToDictionary(workflow => workflow.ToLite(), workflow =>
{
var w = workflow.ToLite();
var nodeGraph = new WorkflowNodeGraph
{
Workflow = workflow,
Events = events.TryGetC(w).EmptyIfNull().ToDictionary(e => e.ToLite()),
Gateways = gateways.TryGetC(w).EmptyIfNull().ToDictionary(g => g.ToLite()),
Activities = activities.TryGetC(w).EmptyIfNull().ToDictionary(a => a.ToLite()),
Connections = connections.TryGetC(w).EmptyIfNull().ToDictionary(c => c.ToLite()),
};
nodeGraph.FillGraphs();
return nodeGraph;
});
return result;
}
}, new InvalidateWith(typeof(WorkflowConnectionEntity)));
WorkflowGraphLazy.OnReset += (e, args) => DynamicCode.OnInvalidated?.Invoke();
Validator.PropertyValidator((WorkflowConnectionEntity c) => c.Condition).StaticPropertyValidation = (e, pi) =>
{
if (e.Condition != null && e.From != null)
{
var conditionType = (e.Condition.EntityOrNull ?? Conditions.Value.GetOrThrow(e.Condition)).MainEntityType;
var workflowType = e.From.Lane.Pool.Workflow.MainEntityType;
if (!conditionType.Is(workflowType))
return WorkflowMessage.Condition0IsDefinedFor1Not2.NiceToString(conditionType, workflowType);
}
return null;
};
StartWorkflowConditions(sb);
StartWorkflowTimerConditions(sb);
StartWorkflowActions(sb);
StartWorkflowScript(sb);
}
}
public static ResetLazy<Dictionary<Lite<WorkflowTimerConditionEntity>, WorkflowTimerConditionEntity>> TimerConditions = null!;
public static WorkflowTimerConditionEntity RetrieveFromCache(this Lite<WorkflowTimerConditionEntity> wc) => TimerConditions.Value.GetOrThrow(wc);
private static void StartWorkflowTimerConditions(SchemaBuilder sb)
{
sb.Include<WorkflowTimerConditionEntity>()
.WithQuery(() => e => new
{
Entity = e,
e.Id,
e.Name,
e.MainEntityType,
e.Eval.Script
});
new Graph<WorkflowTimerConditionEntity>.Execute(WorkflowTimerConditionOperation.Save)
{
CanBeNew = true,
CanBeModified = true,
Execute = (e, _) =>
{
if (!e.IsNew)
{
var oldMainEntityType = e.InDB(a => a.MainEntityType);
if (!oldMainEntityType.Is(e.MainEntityType))
ThrowConnectionError(Database.Query<WorkflowEventEntity>().Where(a => a.Timer!.Condition.Is(e.ToLite())), e, WorkflowTimerConditionOperation.Save);
}
e.Save();
},
}.Register();
new Graph<WorkflowTimerConditionEntity>.Delete(WorkflowTimerConditionOperation.Delete)
{
Delete = (e, _) =>
{
ThrowConnectionError(Database.Query<WorkflowEventEntity>().Where(a => a.Timer!.Condition.Is(e.ToLite())), e, WorkflowTimerConditionOperation.Delete);
e.Delete();
},
}.Register();
new Graph<WorkflowTimerConditionEntity>.ConstructFrom<WorkflowTimerConditionEntity>(WorkflowTimerConditionOperation.Clone)
{
Construct = (e, args) =>
{
return new WorkflowTimerConditionEntity
{
MainEntityType = e.MainEntityType,
Eval = new WorkflowTimerConditionEval { Script = e.Eval.Script }
};
},
}.Register();
TimerConditions = sb.GlobalLazy(() => Database.Query<WorkflowTimerConditionEntity>().ToDictionary(a => a.ToLite()),
new InvalidateWith(typeof(WorkflowTimerConditionEntity)));
}
public static ResetLazy<Dictionary<Lite<WorkflowActionEntity>, WorkflowActionEntity>> Actions = null!;
public static WorkflowActionEntity RetrieveFromCache(this Lite<WorkflowActionEntity> wa) => Actions.Value.GetOrThrow(wa);
private static void StartWorkflowActions(SchemaBuilder sb)
{
sb.Include<WorkflowActionEntity>()
.WithQuery(() => e => new
{
Entity = e,
e.Id,
e.Name,
e.MainEntityType,
e.Eval.Script
});
new Graph<WorkflowActionEntity>.Execute(WorkflowActionOperation.Save)
{
CanBeNew = true,
CanBeModified = true,
Execute = (e, _) =>
{
if (!e.IsNew)
{
var oldMainEntityType = e.InDB(a => a.MainEntityType);
if (!oldMainEntityType.Is(e.MainEntityType))
ThrowConnectionError(Database.Query<WorkflowConnectionEntity>().Where(a => a.Action.Is(e.ToLite())), e, WorkflowActionOperation.Save);
}
e.Save();
},
}.Register();
new Graph<WorkflowActionEntity>.Delete(WorkflowActionOperation.Delete)
{
Delete = (e, _) =>
{
ThrowConnectionError(Database.Query<WorkflowConnectionEntity>().Where(a => a.Action.Is(e.ToLite())), e, WorkflowActionOperation.Delete);
e.Delete();
},
}.Register();
new Graph<WorkflowActionEntity>.ConstructFrom<WorkflowActionEntity>(WorkflowActionOperation.Clone)
{
Construct = (e, args) =>
{
return new WorkflowActionEntity
{
MainEntityType = e.MainEntityType,
Eval = new WorkflowActionEval { Script = e.Eval.Script }
};
},
}.Register();
Actions = sb.GlobalLazy(() => Database.Query<WorkflowActionEntity>().ToDictionary(a => a.ToLite()),
new InvalidateWith(typeof(WorkflowActionEntity)));
}
public static ResetLazy<Dictionary<Lite<WorkflowConditionEntity>, WorkflowConditionEntity>> Conditions = null!;
public static WorkflowConditionEntity RetrieveFromCache(this Lite<WorkflowConditionEntity> wc) => Conditions.Value.GetOrThrow(wc);
private static void StartWorkflowConditions(SchemaBuilder sb)
{
sb.Include<WorkflowConditionEntity>()
.WithQuery(() => e => new
{
Entity = e,
e.Id,
e.Name,
e.MainEntityType,
e.Eval.Script
});
new Graph<WorkflowConditionEntity>.Execute(WorkflowConditionOperation.Save)
{
CanBeNew = true,
CanBeModified = true,
Execute = (e, _) =>
{
if (!e.IsNew)
{
var oldMainEntityType = e.InDB(a => a.MainEntityType);
if (!oldMainEntityType.Is(e.MainEntityType))
ThrowConnectionError(Database.Query<WorkflowConnectionEntity>().Where(a => a.Condition.Is(e.ToLite())), e, WorkflowConditionOperation.Save);
}
e.Save();
},
}.Register();
new Graph<WorkflowConditionEntity>.Delete(WorkflowConditionOperation.Delete)
{
Delete = (e, _) =>
{
ThrowConnectionError(Database.Query<WorkflowConnectionEntity>().Where(a => a.Condition.Is(e.ToLite())), e, WorkflowConditionOperation.Delete);
e.Delete();
},
}.Register();
new Graph<WorkflowConditionEntity>.ConstructFrom<WorkflowConditionEntity>(WorkflowConditionOperation.Clone)
{
Construct = (e, args) =>
{
return new WorkflowConditionEntity
{
MainEntityType = e.MainEntityType,
Eval = new WorkflowConditionEval { Script = e.Eval.Script }
};
},
}.Register();
Conditions = sb.GlobalLazy(() => Database.Query<WorkflowConditionEntity>().ToDictionary(a => a.ToLite()),
new InvalidateWith(typeof(WorkflowConditionEntity)));
}
public static ResetLazy<Dictionary<Lite<WorkflowScriptEntity>, WorkflowScriptEntity>> Scripts = null!;
public static WorkflowScriptEntity RetrieveFromCache(this Lite<WorkflowScriptEntity> ws) => Scripts.Value.GetOrThrow(ws);
private static void StartWorkflowScript(SchemaBuilder sb)
{
sb.Include<WorkflowScriptEntity>()
.WithQuery(() => s => new
{
Entity = s,
s.Id,
s.Name,
s.MainEntityType,
});
new Graph<WorkflowScriptEntity>.Execute(WorkflowScriptOperation.Save)
{
CanBeNew = true,
CanBeModified = true,
Execute = (e, _) =>
{
if (!e.IsNew)
{
var oldMainEntityType = e.InDB(a => a.MainEntityType);
if (!oldMainEntityType.Is(e.MainEntityType))
ThrowConnectionError(Database.Query<WorkflowActivityEntity>().Where(a => a.Script!.Script.Is(e.ToLite())), e, WorkflowScriptOperation.Save);
}
e.Save();
},
}.Register();
new Graph<WorkflowScriptEntity>.ConstructFrom<WorkflowScriptEntity>(WorkflowScriptOperation.Clone)
{
Construct = (s, _) => new WorkflowScriptEntity()
{
MainEntityType = s.MainEntityType,
Eval = new WorkflowScriptEval() { Script = s.Eval.Script }
}
}.Register();
new Graph<WorkflowScriptEntity>.Delete(WorkflowScriptOperation.Delete)
{
Delete = (s, _) =>
{
ThrowConnectionError(Database.Query<WorkflowActivityEntity>().Where(a => a.Script!.Script.Is(s.ToLite())), s, WorkflowScriptOperation.Delete);
s.Delete();
},
}.Register();
Scripts = sb.GlobalLazy(() => Database.Query<WorkflowScriptEntity>().ToDictionary(a => a.ToLite()),
new InvalidateWith(typeof(WorkflowScriptEntity)));
sb.Include<WorkflowScriptRetryStrategyEntity>()
.WithSave(WorkflowScriptRetryStrategyOperation.Save)
.WithDelete(WorkflowScriptRetryStrategyOperation.Delete)
.WithQuery(() => e => new
{
Entity = e,
e.Id,
e.Rule
});
}
private static void ThrowConnectionError(IQueryable<WorkflowConnectionEntity> queryable, Entity entity, IOperationSymbolContainer operation)
{
if (queryable.Count() == 0)
return;
var errors = queryable.Select(a => new { Connection = a.ToLite(), From = a.From.ToLite(), To = a.To.ToLite(), Workflow = a.From.Lane.Pool.Workflow.ToLite() }).ToList();
var formattedErrors = errors.GroupBy(a => a.Workflow).ToString(gr => $"Workflow '{gr.Key}':" +
gr.ToString(a => $"Connection {a.Connection!.Id} ({a.Connection}): {a.From} -> {a.To}", "\r\n").Indent(4),
"\r\n\r\n").Indent(4);
throw new ApplicationException($"Impossible to {operation.Symbol.Key.After('.')} '{entity}' because is used in some connections: \r\n" + formattedErrors);
}
private static void ThrowConnectionError<T>(IQueryable<T> queryable, Entity entity, IOperationSymbolContainer operation)
where T : Entity, IWorkflowNodeEntity
{
if (queryable.Count() == 0)
return;
var errors = queryable.Select(a => new { Entity = a.ToLite(), Workflow = a.Lane.Pool.Workflow.ToLite() }).ToList();
var formattedErrors = errors.GroupBy(a => a.Workflow).ToString(gr => $"Workflow '{gr.Key}':" +
gr.ToString(a => $"{typeof(T).NiceName()} {a.Entity}", "\r\n").Indent(4),
"\r\n\r\n").Indent(4);
throw new ApplicationException($"Impossible to {operation.Symbol.Key.After('.')} '{entity}' because is used in some {typeof(T).NicePluralName()}: \r\n" + formattedErrors);
}
public class WorkflowGraph : Graph<WorkflowEntity>
{
public static void Register()
{
new Execute(WorkflowOperation.Save)
{
CanBeNew = true,
CanBeModified = true,
Execute = (e, args) =>
{
if (e.MainEntityStrategies.Contains(WorkflowMainEntityStrategy.CreateNew))
{
var type = e.MainEntityType.ToType();
if (CaseActivityLogic.Options.TryGetC(type)?.Constructor == null)
throw new ApplicationException(WorkflowMessage._0NotAllowedFor1NoConstructorHasBeenDefinedInWithWorkflow.NiceToString(WorkflowMainEntityStrategy.CreateNew.NiceToString(), type.NiceName()));
}
WorkflowLogic.ApplyDocument(e, args.TryGetArgC<WorkflowModel>(), args.TryGetArgC<WorkflowReplacementModel>(), args.TryGetArgC<List<WorkflowIssue>>() ?? new List<WorkflowIssue>());
DynamicCode.OnInvalidated?.Invoke();
}
}.Register();
new ConstructFrom<WorkflowEntity>(WorkflowOperation.Clone)
{
Construct = (w, args) =>
{
WorkflowBuilder wb = new WorkflowBuilder(w);
var result = wb.Clone();
return result;
}
}.Register();
new Delete(WorkflowOperation.Delete)
{
CanDelete = w =>
{
var usedWorkflows = Database.Query<CaseEntity>()
.Where(c => c.Workflow.Is(w) && c.ParentCase != null)
.Select(c => c.ParentCase!.Entity.Workflow.ToLite())
.Distinct()
.ToList();
if (usedWorkflows.Any())
return WorkflowMessage.WorkflowUsedIn0ForDecompositionOrCallWorkflow.NiceToString(usedWorkflows.ToString(", "));
return null;
},
Delete = (w, _) =>
{
var wb = new WorkflowBuilder(w);
wb.Delete();
DynamicCode.OnInvalidated?.Invoke();
}
}.Register();
new Execute(WorkflowOperation.Activate)
{
CanExecute = w => w.HasExpired() ? null : WorkflowMessage.Workflow0AlreadyActivated.NiceToString(w),
Execute = (w, _) =>
{
w.ExpirationDate = null;
w.Save();
w.SuspendWorkflowScheduledTasks(suspended: false);
}
}.Register();
new Execute(WorkflowOperation.Deactivate)
{
CanExecute = w => w.HasExpired() ? WorkflowMessage.Workflow0HasExpiredOn1.NiceToString(w, w.ExpirationDate!.Value.ToString()) :
w.Cases().SelectMany(c => c.CaseActivities()).Any(ca => ca.DoneDate == null) ? CaseActivityMessage.ThereAreInprogressActivities.NiceToString() : null,
Execute = (w, args) =>
{
w.ExpirationDate = args.GetArg<DateTime>();
w.Save();
w.SuspendWorkflowScheduledTasks(suspended: true);
}
}.Register();
}
}
public static void SuspendWorkflowScheduledTasks(this WorkflowEntity workflow, bool suspended)
{
workflow.WorkflowEvents()
.Where(a => a.Type == WorkflowEventType.ScheduledStart)
.Select(a => a.ScheduledTask()!)
.UnsafeUpdate()
.Set(a => a.Suspended, a => suspended)
.Execute();
}
public static Func<UserEntity, Lite<Entity>, bool> IsUserActor = (user, actor) =>
actor.Is(user) ||
(actor is Lite<RoleEntity> && AuthLogic.IndirectlyRelated(user.Role).Contains((Lite<RoleEntity>)actor));
public static Expression<Func<UserEntity, Lite<Entity>, bool>> IsUserActorForNotifications = (user, actorConstant) =>
actorConstant.Is(user) ||
(actorConstant is Lite<RoleEntity> && AuthLogic.InverseIndirectlyRelated((Lite<RoleEntity>)actorConstant).Contains(user.Role));
public static List<WorkflowEntity> GetAllowedStarts()
{
return WorkflowGraphLazy.Value.Values.Where(wg => wg.IsStartCurrentUser()).Select(wg => wg.Workflow).ToList();
}
public static WorkflowModel GetWorkflowModel(WorkflowEntity workflow)
{
var wb = new WorkflowBuilder(workflow);
return wb.GetWorkflowModel();
}
public static WorkflowReplacementModel PreviewChanges(WorkflowEntity workflow, WorkflowModel model)
{
if (model == null)
throw new ArgumentNullException(nameof(model));
var document = WorkflowBuilder.ParseDocument(model.DiagramXml);
var wb = new WorkflowBuilder(workflow);
return wb.PreviewChanges(document, model);
}
public static void ApplyDocument(WorkflowEntity workflow, WorkflowModel? model, WorkflowReplacementModel? replacements, List<WorkflowIssue> issuesContainer)
{
if (issuesContainer.Any())
throw new InvalidOperationException("issuesContainer should be empty");
var wb = new WorkflowBuilder(workflow);
if (workflow.IsNew)
workflow.Save();
if (model != null)
{
wb.ApplyChanges(model, replacements);
}
wb.ValidateGraph(issuesContainer);
if (issuesContainer.Any(a => a.Type == WorkflowIssueType.Error))
throw new IntegrityCheckException(new Dictionary<Guid, IntegrityCheck>());
workflow.FullDiagramXml = new WorkflowXmlEmbedded { DiagramXml = wb.GetXDocument().ToString() };
workflow.Save();
}
}
| |
//
// Copyright (c) 2004-2011 Jaroslaw Kowalski <jaak@jkowalski.net>
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * Neither the name of Jaroslaw Kowalski nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
// THE POSSIBILITY OF SUCH DAMAGE.
//
#if !SILVERLIGHT2 && !SILVERLIGHT3 && !WINDOWS_PHONE
namespace NLog.Targets
{
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Globalization;
using System.IO;
using System.Text;
using System.Threading;
using Common;
using Config;
using Internal;
using Internal.FileAppenders;
using Layouts;
/// <summary>
/// Writes log messages to one or more files.
/// </summary>
/// <seealso href="http://nlog-project.org/wiki/File_target">Documentation on NLog Wiki</seealso>
[Target("File")]
public class FileTarget : TargetWithLayoutHeaderAndFooter, ICreateFileParameters
{
private readonly Dictionary<string, DateTime> initializedFiles = new Dictionary<string, DateTime>();
private LineEndingMode lineEndingMode = LineEndingMode.Default;
private IFileAppenderFactory appenderFactory;
private BaseFileAppender[] recentAppenders;
private Timer autoClosingTimer;
private int initializedFilesCounter;
private int _MaxArchiveFilesField;
private readonly DynamicArchiveFileHandlerClass dynamicArchiveFileHandler;
private class DynamicArchiveFileHandlerClass
{
private readonly Queue<string> archiveFileEntryQueue;
public DynamicArchiveFileHandlerClass(int MaxArchivedFiles) : this()
{
this.MaxArchiveFileToKeep = MaxArchivedFiles;
}
public DynamicArchiveFileHandlerClass()
{
this.MaxArchiveFileToKeep = -1;
archiveFileEntryQueue = new Queue<string>();
}
public int MaxArchiveFileToKeep { get; set; }
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes")]
public void AddToArchive(string archiveFileName, string fileName, bool createDirectoryIfNotExists)
{
if (MaxArchiveFileToKeep < 1)
{
InternalLogger.Warn("AddToArchive is called. Even though the MaxArchiveFiles is set to less than 1");
return;
}
if (!File.Exists(fileName))
{
InternalLogger.Error("Error while trying to archive, Source File : {0} Not found.", fileName);
return;
}
while (archiveFileEntryQueue.Count >= MaxArchiveFileToKeep)
{
string oldestArchivedFileName = archiveFileEntryQueue.Dequeue();
try
{
File.Delete(oldestArchivedFileName);
}
catch (Exception exceptionThrown)
{
InternalLogger.Warn("Can't Delete Old Archive File : {0} , Exception : {1}", oldestArchivedFileName, exceptionThrown);
}
}
String archiveFileNamePattern = archiveFileName;
if(archiveFileEntryQueue.Contains(archiveFileName))
{
InternalLogger.Trace("Archive File {0} seems to be already exist. Trying with Different File Name..", archiveFileName);
int NumberToStartWith = 1;
archiveFileNamePattern = Path.GetFileNameWithoutExtension(archiveFileName) + ".{#}" + Path.GetExtension(archiveFileName);
while(File.Exists(ReplaceNumber(archiveFileNamePattern,NumberToStartWith)))
{
InternalLogger.Trace("Archive File {0} seems to be already exist, too. Trying with Different File Name..", archiveFileName);
NumberToStartWith++;
}
}
try
{
File.Move(fileName, archiveFileNamePattern);
}
catch (DirectoryNotFoundException)
{
if (createDirectoryIfNotExists)
{
InternalLogger.Trace("Directory For Archive File is not created. Creating it..");
try
{
Directory.CreateDirectory(Path.GetDirectoryName(archiveFileName));
File.Move(fileName, archiveFileNamePattern);
}
catch (Exception ExceptionThrown)
{
InternalLogger.Error("Can't create Archive File Directory , Exception : {0}", ExceptionThrown);
throw;
}
}
else
{
throw;
}
}
catch (Exception ExceptionThrown)
{
InternalLogger.Error("Can't Archive File : {0} , Exception : {1}", fileName, ExceptionThrown);
throw;
}
archiveFileEntryQueue.Enqueue(archiveFileName);
}
}
/// <summary>
/// Initializes a new instance of the <see cref="FileTarget" /> class.
/// </summary>
/// <remarks>
/// The default value of the layout is: <code>${longdate}|${level:uppercase=true}|${logger}|${message}</code>
/// </remarks>
public FileTarget()
{
this.ArchiveNumbering = ArchiveNumberingMode.Sequence;
this._MaxArchiveFilesField = 9;
this.ConcurrentWriteAttemptDelay = 1;
this.ArchiveEvery = FileArchivePeriod.None;
this.ArchiveAboveSize = -1;
this.ConcurrentWriteAttempts = 10;
this.ConcurrentWrites = true;
#if SILVERLIGHT
this.Encoding = Encoding.UTF8;
#else
this.Encoding = Encoding.Default;
#endif
this.BufferSize = 32768;
this.AutoFlush = true;
#if !SILVERLIGHT && !NET_CF
this.FileAttributes = Win32FileAttributes.Normal;
#endif
this.NewLineChars = EnvironmentHelper.NewLine;
this.EnableFileDelete = true;
this.OpenFileCacheTimeout = -1;
this.OpenFileCacheSize = 5;
this.CreateDirs = true;
this.dynamicArchiveFileHandler = new DynamicArchiveFileHandlerClass(MaxArchiveFiles);
}
/// <summary>
/// Gets or sets the name of the file to write to.
/// </summary>
/// <remarks>
/// This FileName string is a layout which may include instances of layout renderers.
/// This lets you use a single target to write to multiple files.
/// </remarks>
/// <example>
/// The following value makes NLog write logging events to files based on the log level in the directory where
/// the application runs.
/// <code>${basedir}/${level}.log</code>
/// All <c>Debug</c> messages will go to <c>Debug.log</c>, all <c>Info</c> messages will go to <c>Info.log</c> and so on.
/// You can combine as many of the layout renderers as you want to produce an arbitrary log file name.
/// </example>
/// <docgen category='Output Options' order='1' />
[RequiredParameter]
public Layout FileName { get; set; }
/// <summary>
/// Gets or sets a value indicating whether to create directories if they don't exist.
/// </summary>
/// <remarks>
/// Setting this to false may improve performance a bit, but you'll receive an error
/// when attempting to write to a directory that's not present.
/// </remarks>
/// <docgen category='Output Options' order='10' />
[DefaultValue(true)]
[Advanced]
public bool CreateDirs { get; set; }
/// <summary>
/// Gets or sets a value indicating whether to delete old log file on startup.
/// </summary>
/// <remarks>
/// This option works only when the "FileName" parameter denotes a single file.
/// </remarks>
/// <docgen category='Output Options' order='10' />
[DefaultValue(false)]
public bool DeleteOldFileOnStartup { get; set; }
/// <summary>
/// Gets or sets a value indicating whether to replace file contents on each write instead of appending log message at the end.
/// </summary>
/// <docgen category='Output Options' order='10' />
[DefaultValue(false)]
[Advanced]
public bool ReplaceFileContentsOnEachWrite { get; set; }
/// <summary>
/// Gets or sets a value indicating whether to keep log file open instead of opening and closing it on each logging event.
/// </summary>
/// <remarks>
/// Setting this property to <c>True</c> helps improve performance.
/// </remarks>
/// <docgen category='Performance Tuning Options' order='10' />
[DefaultValue(false)]
public bool KeepFileOpen { get; set; }
/// <summary>
/// Gets or sets a value indicating whether to enable log file(s) to be deleted.
/// </summary>
/// <docgen category='Output Options' order='10' />
[DefaultValue(true)]
public bool EnableFileDelete { get; set; }
#if !NET_CF && !SILVERLIGHT
/// <summary>
/// Gets or sets the file attributes (Windows only).
/// </summary>
/// <docgen category='Output Options' order='10' />
[Advanced]
public Win32FileAttributes FileAttributes { get; set; }
#endif
/// <summary>
/// Gets or sets the line ending mode.
/// </summary>
/// <docgen category='Layout Options' order='10' />
[Advanced]
public LineEndingMode LineEnding
{
get
{
return this.lineEndingMode;
}
set
{
this.lineEndingMode = value;
switch (value)
{
case LineEndingMode.CR:
this.NewLineChars = "\r";
break;
case LineEndingMode.LF:
this.NewLineChars = "\n";
break;
case LineEndingMode.CRLF:
this.NewLineChars = "\r\n";
break;
case LineEndingMode.Default:
this.NewLineChars = EnvironmentHelper.NewLine;
break;
case LineEndingMode.None:
this.NewLineChars = string.Empty;
break;
}
}
}
/// <summary>
/// Gets or sets a value indicating whether to automatically flush the file buffers after each log message.
/// </summary>
/// <docgen category='Performance Tuning Options' order='10' />
[DefaultValue(true)]
public bool AutoFlush { get; set; }
/// <summary>
/// Gets or sets the number of files to be kept open. Setting this to a higher value may improve performance
/// in a situation where a single File target is writing to many files
/// (such as splitting by level or by logger).
/// </summary>
/// <remarks>
/// The files are managed on a LRU (least recently used) basis, which flushes
/// the files that have not been used for the longest period of time should the
/// cache become full. As a rule of thumb, you shouldn't set this parameter to
/// a very high value. A number like 10-15 shouldn't be exceeded, because you'd
/// be keeping a large number of files open which consumes system resources.
/// </remarks>
/// <docgen category='Performance Tuning Options' order='10' />
[DefaultValue(5)]
[Advanced]
public int OpenFileCacheSize { get; set; }
/// <summary>
/// Gets or sets the maximum number of seconds that files are kept open. If this number is negative the files are
/// not automatically closed after a period of inactivity.
/// </summary>
/// <docgen category='Performance Tuning Options' order='10' />
[DefaultValue(-1)]
[Advanced]
public int OpenFileCacheTimeout { get; set; }
/// <summary>
/// Gets or sets the log file buffer size in bytes.
/// </summary>
/// <docgen category='Performance Tuning Options' order='10' />
[DefaultValue(32768)]
public int BufferSize { get; set; }
/// <summary>
/// Gets or sets the file encoding.
/// </summary>
/// <docgen category='Layout Options' order='10' />
public Encoding Encoding { get; set; }
/// <summary>
/// Gets or sets a value indicating whether concurrent writes to the log file by multiple processes on the same host.
/// </summary>
/// <remarks>
/// This makes multi-process logging possible. NLog uses a special technique
/// that lets it keep the files open for writing.
/// </remarks>
/// <docgen category='Performance Tuning Options' order='10' />
[DefaultValue(true)]
public bool ConcurrentWrites { get; set; }
/// <summary>
/// Gets or sets a value indicating whether concurrent writes to the log file by multiple processes on different network hosts.
/// </summary>
/// <remarks>
/// This effectively prevents files from being kept open.
/// </remarks>
/// <docgen category='Performance Tuning Options' order='10' />
[DefaultValue(false)]
public bool NetworkWrites { get; set; }
/// <summary>
/// Gets or sets the number of times the write is appended on the file before NLog
/// discards the log message.
/// </summary>
/// <docgen category='Performance Tuning Options' order='10' />
[DefaultValue(10)]
[Advanced]
public int ConcurrentWriteAttempts { get; set; }
/// <summary>
/// Gets or sets the delay in milliseconds to wait before attempting to write to the file again.
/// </summary>
/// <remarks>
/// The actual delay is a random value between 0 and the value specified
/// in this parameter. On each failed attempt the delay base is doubled
/// up to <see cref="ConcurrentWriteAttempts" /> times.
/// </remarks>
/// <example>
/// Assuming that ConcurrentWriteAttemptDelay is 10 the time to wait will be:<p/>
/// a random value between 0 and 10 milliseconds - 1st attempt<br/>
/// a random value between 0 and 20 milliseconds - 2nd attempt<br/>
/// a random value between 0 and 40 milliseconds - 3rd attempt<br/>
/// a random value between 0 and 80 milliseconds - 4th attempt<br/>
/// ...<p/>
/// and so on.
/// </example>
/// <docgen category='Performance Tuning Options' order='10' />
[DefaultValue(1)]
[Advanced]
public int ConcurrentWriteAttemptDelay { get; set; }
/// <summary>
/// Gets or sets the size in bytes above which log files will be automatically archived.
/// </summary>
/// <remarks>
/// Caution: Enabling this option can considerably slow down your file
/// logging in multi-process scenarios. If only one process is going to
/// be writing to the file, consider setting <c>ConcurrentWrites</c>
/// to <c>false</c> for maximum performance.
/// </remarks>
/// <docgen category='Archival Options' order='10' />
public long ArchiveAboveSize { get; set; }
/// <summary>
/// Gets or sets a value indicating whether to automatically archive log files every time the specified time passes.
/// </summary>
/// <remarks>
/// Files are moved to the archive as part of the write operation if the current period of time changes. For example
/// if the current <c>hour</c> changes from 10 to 11, the first write that will occur
/// on or after 11:00 will trigger the archiving.
/// <p>
/// Caution: Enabling this option can considerably slow down your file
/// logging in multi-process scenarios. If only one process is going to
/// be writing to the file, consider setting <c>ConcurrentWrites</c>
/// to <c>false</c> for maximum performance.
/// </p>
/// </remarks>
/// <docgen category='Archival Options' order='10' />
public FileArchivePeriod ArchiveEvery { get; set; }
/// <summary>
/// Gets or sets the name of the file to be used for an archive.
/// </summary>
/// <remarks>
/// It may contain a special placeholder {#####}
/// that will be replaced with a sequence of numbers depending on
/// the archiving strategy. The number of hash characters used determines
/// the number of numerical digits to be used for numbering files.
/// </remarks>
/// <docgen category='Archival Options' order='10' />
public Layout ArchiveFileName { get; set; }
/// <summary>
/// Gets or sets the maximum number of archive files that should be kept.
/// </summary>
/// <docgen category='Archival Options' order='10' />
[DefaultValue(9)]
public int MaxArchiveFiles
{
get
{
return _MaxArchiveFilesField;
}
set
{
_MaxArchiveFilesField = value;
dynamicArchiveFileHandler.MaxArchiveFileToKeep = value;
}
}
/// <summary>
/// Gets or sets the way file archives are numbered.
/// </summary>
/// <docgen category='Archival Options' order='10' />
public ArchiveNumberingMode ArchiveNumbering { get; set; }
/// <summary>
/// Gets the characters that are appended after each line.
/// </summary>
protected internal string NewLineChars { get; private set; }
/// <summary>
/// Removes records of initialized files that have not been
/// accessed in the last two days.
/// </summary>
/// <remarks>
/// Files are marked 'initialized' for the purpose of writing footers when the logging finishes.
/// </remarks>
public void CleanupInitializedFiles()
{
this.CleanupInitializedFiles(DateTime.Now.AddDays(-2));
}
/// <summary>
/// Removes records of initialized files that have not been
/// accessed after the specified date.
/// </summary>
/// <param name="cleanupThreshold">The cleanup threshold.</param>
/// <remarks>
/// Files are marked 'initialized' for the purpose of writing footers when the logging finishes.
/// </remarks>
public void CleanupInitializedFiles(DateTime cleanupThreshold)
{
// clean up files that are two days old
var filesToUninitialize = new List<string>();
foreach (var de in this.initializedFiles)
{
string fileName = de.Key;
DateTime lastWriteTime = de.Value;
if (lastWriteTime < cleanupThreshold)
{
filesToUninitialize.Add(fileName);
}
}
foreach (string fileName in filesToUninitialize)
{
this.WriteFooterAndUninitialize(fileName);
}
}
/// <summary>
/// Flushes all pending file operations.
/// </summary>
/// <param name="asyncContinuation">The asynchronous continuation.</param>
/// <remarks>
/// The timeout parameter is ignored, because file APIs don't provide
/// the needed functionality.
/// </remarks>
protected override void FlushAsync(AsyncContinuation asyncContinuation)
{
try
{
foreach (BaseFileAppender t in this.recentAppenders)
{
if (t == null)
{
break;
}
t.Flush();
}
asyncContinuation(null);
}
catch (Exception exception)
{
if (exception.MustBeRethrown())
{
throw;
}
asyncContinuation(exception);
}
}
/// <summary>
/// Initializes file logging by creating data structures that
/// enable efficient multi-file logging.
/// </summary>
protected override void InitializeTarget()
{
base.InitializeTarget();
if (!this.KeepFileOpen)
{
this.appenderFactory = RetryingMultiProcessFileAppender.TheFactory;
}
else
{
if (this.ArchiveAboveSize != -1 || this.ArchiveEvery != FileArchivePeriod.None)
{
if (this.NetworkWrites)
{
this.appenderFactory = RetryingMultiProcessFileAppender.TheFactory;
}
else if (this.ConcurrentWrites)
{
#if NET_CF || SILVERLIGHT
this.appenderFactory = RetryingMultiProcessFileAppender.TheFactory;
#elif MONO
//
// mono on Windows uses mutexes, on Unix - special appender
//
if (PlatformDetector.IsUnix)
{
this.appenderFactory = UnixMultiProcessFileAppender.TheFactory;
}
else
{
this.appenderFactory = MutexMultiProcessFileAppender.TheFactory;
}
#else
this.appenderFactory = MutexMultiProcessFileAppender.TheFactory;
#endif
}
else
{
this.appenderFactory = CountingSingleProcessFileAppender.TheFactory;
}
}
else
{
if (this.NetworkWrites)
{
this.appenderFactory = RetryingMultiProcessFileAppender.TheFactory;
}
else if (this.ConcurrentWrites)
{
#if NET_CF || SILVERLIGHT
this.appenderFactory = RetryingMultiProcessFileAppender.TheFactory;
#elif MONO
//
// mono on Windows uses mutexes, on Unix - special appender
//
if (PlatformDetector.IsUnix)
{
this.appenderFactory = UnixMultiProcessFileAppender.TheFactory;
}
else
{
this.appenderFactory = MutexMultiProcessFileAppender.TheFactory;
}
#else
this.appenderFactory = MutexMultiProcessFileAppender.TheFactory;
#endif
}
else
{
this.appenderFactory = SingleProcessFileAppender.TheFactory;
}
}
}
this.recentAppenders = new BaseFileAppender[this.OpenFileCacheSize];
if ((this.OpenFileCacheSize > 0 || this.EnableFileDelete) && this.OpenFileCacheTimeout > 0)
{
this.autoClosingTimer = new Timer(
this.AutoClosingTimerCallback,
null,
this.OpenFileCacheTimeout * 1000,
this.OpenFileCacheTimeout * 1000);
}
// Console.Error.WriteLine("Name: {0} Factory: {1}", this.Name, this.appenderFactory.GetType().FullName);
}
/// <summary>
/// Closes the file(s) opened for writing.
/// </summary>
protected override void CloseTarget()
{
base.CloseTarget();
foreach (string fileName in new List<string>(this.initializedFiles.Keys))
{
this.WriteFooterAndUninitialize(fileName);
}
if (this.autoClosingTimer != null)
{
this.autoClosingTimer.Change(Timeout.Infinite, Timeout.Infinite);
this.autoClosingTimer.Dispose();
this.autoClosingTimer = null;
}
if (this.recentAppenders != null)
{
for (int i = 0; i < this.recentAppenders.Length; ++i)
{
if (this.recentAppenders[i] == null)
{
break;
}
this.recentAppenders[i].Close();
this.recentAppenders[i] = null;
}
}
}
/// <summary>
/// Writes the specified logging event to a file specified in the FileName
/// parameter.
/// </summary>
/// <param name="logEvent">The logging event.</param>
protected override void Write(LogEventInfo logEvent)
{
string fileName = this.FileName.Render(logEvent);
byte[] bytes = this.GetBytesToWrite(logEvent);
if (this.ShouldAutoArchive(fileName, logEvent, bytes.Length))
{
this.InvalidateCacheItem(fileName);
this.DoAutoArchive(fileName, logEvent);
}
this.WriteToFile(fileName, bytes, false);
}
/// <summary>
/// Writes the specified array of logging events to a file specified in the FileName
/// parameter.
/// </summary>
/// <param name="logEvents">An array of <see cref="LogEventInfo "/> objects.</param>
/// <remarks>
/// This function makes use of the fact that the events are batched by sorting
/// the requests by filename. This optimizes the number of open/close calls
/// and can help improve performance.
/// </remarks>
protected override void Write(AsyncLogEventInfo[] logEvents)
{
Thread.Sleep(5000);
var buckets = logEvents.BucketSort(c => this.FileName.Render(c.LogEvent));
using (var ms = new MemoryStream())
{
var pendingContinuations = new List<AsyncContinuation>();
foreach (var bucket in buckets)
{
string fileName = bucket.Key;
ms.SetLength(0);
ms.Position = 0;
LogEventInfo firstLogEvent = null;
foreach (AsyncLogEventInfo ev in bucket.Value)
{
if (firstLogEvent == null)
{
firstLogEvent = ev.LogEvent;
}
byte[] bytes = this.GetBytesToWrite(ev.LogEvent);
ms.Write(bytes, 0, bytes.Length);
pendingContinuations.Add(ev.Continuation);
}
this.FlushCurrentFileWrites(fileName, firstLogEvent, ms, pendingContinuations);
}
}
}
/// <summary>
/// Formats the log event for write.
/// </summary>
/// <param name="logEvent">The log event to be formatted.</param>
/// <returns>A string representation of the log event.</returns>
protected virtual string GetFormattedMessage(LogEventInfo logEvent)
{
return this.Layout.Render(logEvent);
}
/// <summary>
/// Gets the bytes to be written to the file.
/// </summary>
/// <param name="logEvent">Log event.</param>
/// <returns>Array of bytes that are ready to be written.</returns>
protected virtual byte[] GetBytesToWrite(LogEventInfo logEvent)
{
string renderedText = this.GetFormattedMessage(logEvent) + this.NewLineChars;
return this.TransformBytes(this.Encoding.GetBytes(renderedText));
}
/// <summary>
/// Modifies the specified byte array before it gets sent to a file.
/// </summary>
/// <param name="value">The byte array.</param>
/// <returns>The modified byte array. The function can do the modification in-place.</returns>
protected virtual byte[] TransformBytes(byte[] value)
{
return value;
}
private static Boolean IsContainValidNumberPatternForReplacement(string pattern)
{
int StartingIndex = pattern.IndexOf("{#", StringComparison.Ordinal);
int EndingIndex = pattern.IndexOf("#}", StringComparison.Ordinal);
return (StartingIndex != -1 && EndingIndex != -1 && StartingIndex < EndingIndex);
}
private static string ReplaceNumber(string pattern, int value)
{
int firstPart = pattern.IndexOf("{#", StringComparison.Ordinal);
int lastPart = pattern.IndexOf("#}", StringComparison.Ordinal) + 2;
int numDigits = lastPart - firstPart - 2;
return pattern.Substring(0, firstPart) + Convert.ToString(value, 10).PadLeft(numDigits, '0') + pattern.Substring(lastPart);
}
private void FlushCurrentFileWrites(string currentFileName, LogEventInfo firstLogEvent, MemoryStream ms, List<AsyncContinuation> pendingContinuations)
{
Exception lastException = null;
try
{
if (currentFileName != null)
{
if (this.ShouldAutoArchive(currentFileName, firstLogEvent, (int)ms.Length))
{
this.WriteFooterAndUninitialize(currentFileName);
this.InvalidateCacheItem(currentFileName);
this.DoAutoArchive(currentFileName, firstLogEvent);
}
this.WriteToFile(currentFileName, ms.ToArray(), false);
}
}
catch (Exception exception)
{
if (exception.MustBeRethrown())
{
throw;
}
lastException = exception;
}
foreach (AsyncContinuation cont in pendingContinuations)
{
cont(lastException);
}
pendingContinuations.Clear();
}
private void RecursiveRollingRename(string fileName, string pattern, int archiveNumber)
{
if (archiveNumber >= this.MaxArchiveFiles)
{
File.Delete(fileName);
return;
}
if (!File.Exists(fileName))
{
return;
}
string newFileName = ReplaceNumber(pattern, archiveNumber);
if (File.Exists(fileName))
{
this.RecursiveRollingRename(newFileName, pattern, archiveNumber + 1);
}
InternalLogger.Trace("Renaming {0} to {1}", fileName, newFileName);
try
{
File.Move(fileName, newFileName);
}
catch (IOException)
{
string dir = Path.GetDirectoryName(newFileName);
if (!Directory.Exists(dir))
{
Directory.CreateDirectory(dir);
}
File.Move(fileName, newFileName);
}
}
private void SequentialArchive(string fileName, string pattern)
{
string baseNamePattern = Path.GetFileName(pattern);
int firstPart = baseNamePattern.IndexOf("{#", StringComparison.Ordinal);
int lastPart = baseNamePattern.IndexOf("#}", StringComparison.Ordinal) + 2;
int trailerLength = baseNamePattern.Length - lastPart;
string fileNameMask = baseNamePattern.Substring(0, firstPart) + "*" + baseNamePattern.Substring(lastPart);
string dirName = Path.GetDirectoryName(Path.GetFullPath(pattern));
int nextNumber = -1;
int minNumber = -1;
var number2name = new Dictionary<int, string>();
try
{
#if SILVERLIGHT
foreach (string s in Directory.EnumerateFiles(dirName, fileNameMask))
#else
foreach (string s in Directory.GetFiles(dirName, fileNameMask))
#endif
{
string baseName = Path.GetFileName(s);
string number = baseName.Substring(firstPart, baseName.Length - trailerLength - firstPart);
int num;
try
{
num = Convert.ToInt32(number, CultureInfo.InvariantCulture);
}
catch (FormatException)
{
continue;
}
nextNumber = Math.Max(nextNumber, num);
minNumber = minNumber != -1 ? Math.Min(minNumber, num) : num;
number2name[num] = s;
}
nextNumber++;
}
catch (DirectoryNotFoundException)
{
Directory.CreateDirectory(dirName);
nextNumber = 0;
}
if (minNumber != -1)
{
int minNumberToKeep = nextNumber - this.MaxArchiveFiles + 1;
for (int i = minNumber; i < minNumberToKeep; ++i)
{
string s;
if (number2name.TryGetValue(i, out s))
{
File.Delete(s);
}
}
}
string newFileName = ReplaceNumber(pattern, nextNumber);
File.Move(fileName, newFileName);
}
private void DoAutoArchive(string fileName, LogEventInfo ev)
{
var fi = new FileInfo(fileName);
if (!fi.Exists)
{
return;
}
// Console.WriteLine("DoAutoArchive({0})", fileName);
string fileNamePattern;
if (this.ArchiveFileName == null)
{
string ext = Path.GetExtension(fileName);
fileNamePattern = Path.ChangeExtension(fi.FullName, ".{#}" + ext);
}
else
{
//The archive file name is given. There are two possibiliy
//(1) User supplied the Filename with pattern
//(2) User supplied the normal filename
fileNamePattern = this.ArchiveFileName.Render(ev);
}
if (!IsContainValidNumberPatternForReplacement(fileNamePattern))
{
dynamicArchiveFileHandler.AddToArchive(fileNamePattern, fi.FullName,CreateDirs);
}
else
{
switch (this.ArchiveNumbering)
{
case ArchiveNumberingMode.Rolling:
this.RecursiveRollingRename(fi.FullName, fileNamePattern, 0);
break;
case ArchiveNumberingMode.Sequence:
this.SequentialArchive(fi.FullName, fileNamePattern);
break;
}
}
}
private bool ShouldAutoArchive(string fileName, LogEventInfo ev, int upcomingWriteSize)
{
if (this.ArchiveAboveSize == -1 && this.ArchiveEvery == FileArchivePeriod.None)
{
return false;
}
DateTime lastWriteTime;
long fileLength;
if (!this.GetFileInfo(fileName, out lastWriteTime, out fileLength))
{
return false;
}
if (this.ArchiveAboveSize != -1)
{
if (fileLength + upcomingWriteSize > this.ArchiveAboveSize)
{
return true;
}
}
if (this.ArchiveEvery != FileArchivePeriod.None)
{
string formatString;
switch (this.ArchiveEvery)
{
case FileArchivePeriod.Year:
formatString = "yyyy";
break;
case FileArchivePeriod.Month:
formatString = "yyyyMM";
break;
default:
formatString = "yyyyMMdd";
break;
case FileArchivePeriod.Hour:
formatString = "yyyyMMddHH";
break;
case FileArchivePeriod.Minute:
formatString = "yyyyMMddHHmm";
break;
}
string ts = lastWriteTime.ToString(formatString, CultureInfo.InvariantCulture);
string ts2 = ev.TimeStamp.ToString(formatString, CultureInfo.InvariantCulture);
if (ts != ts2)
{
return true;
}
}
return false;
}
private void AutoClosingTimerCallback(object state)
{
lock (this.SyncRoot)
{
if (!this.IsInitialized)
{
return;
}
try
{
DateTime timeToKill = DateTime.Now.AddSeconds(-this.OpenFileCacheTimeout);
for (int i = 0; i < this.recentAppenders.Length; ++i)
{
if (this.recentAppenders[i] == null)
{
break;
}
if (this.recentAppenders[i].OpenTime < timeToKill)
{
for (int j = i; j < this.recentAppenders.Length; ++j)
{
if (this.recentAppenders[j] == null)
{
break;
}
this.recentAppenders[j].Close();
this.recentAppenders[j] = null;
}
break;
}
}
}
catch (Exception exception)
{
if (exception.MustBeRethrown())
{
throw;
}
InternalLogger.Warn("Exception in AutoClosingTimerCallback: {0}", exception);
}
}
}
private void WriteToFile(string fileName, byte[] bytes, bool justData)
{
if (this.ReplaceFileContentsOnEachWrite)
{
using (FileStream fs = File.Create(fileName))
{
byte[] headerBytes = this.GetHeaderBytes();
byte[] footerBytes = this.GetFooterBytes();
if (headerBytes != null)
{
fs.Write(headerBytes, 0, headerBytes.Length);
}
fs.Write(bytes, 0, bytes.Length);
if (footerBytes != null)
{
fs.Write(footerBytes, 0, footerBytes.Length);
}
}
return;
}
bool writeHeader = false;
if (!justData)
{
if (!this.initializedFiles.ContainsKey(fileName))
{
if (this.DeleteOldFileOnStartup)
{
try
{
File.Delete(fileName);
}
catch (Exception exception)
{
if (exception.MustBeRethrown())
{
throw;
}
InternalLogger.Warn("Unable to delete old log file '{0}': {1}", fileName, exception);
}
}
this.initializedFiles[fileName] = DateTime.Now;
this.initializedFilesCounter++;
writeHeader = true;
if (this.initializedFilesCounter >= 100)
{
this.initializedFilesCounter = 0;
this.CleanupInitializedFiles();
}
}
this.initializedFiles[fileName] = DateTime.Now;
}
//
// BaseFileAppender.Write is the most expensive operation here
// so the in-memory data structure doesn't have to be
// very sophisticated. It's a table-based LRU, where we move
// the used element to become the first one.
// The number of items is usually very limited so the
// performance should be equivalent to the one of the hashtable.
//
BaseFileAppender appenderToWrite = null;
int freeSpot = this.recentAppenders.Length - 1;
for (int i = 0; i < this.recentAppenders.Length; ++i)
{
if (this.recentAppenders[i] == null)
{
freeSpot = i;
break;
}
if (this.recentAppenders[i].FileName == fileName)
{
// found it, move it to the first place on the list
// (MRU)
// file open has a chance of failure
// if it fails in the constructor, we won't modify any data structures
BaseFileAppender app = this.recentAppenders[i];
for (int j = i; j > 0; --j)
{
this.recentAppenders[j] = this.recentAppenders[j - 1];
}
this.recentAppenders[0] = app;
appenderToWrite = app;
break;
}
}
if (appenderToWrite == null)
{
BaseFileAppender newAppender = this.appenderFactory.Open(fileName, this);
if (this.recentAppenders[freeSpot] != null)
{
this.recentAppenders[freeSpot].Close();
this.recentAppenders[freeSpot] = null;
}
for (int j = freeSpot; j > 0; --j)
{
this.recentAppenders[j] = this.recentAppenders[j - 1];
}
this.recentAppenders[0] = newAppender;
appenderToWrite = newAppender;
}
if (writeHeader)
{
long fileLength;
DateTime lastWriteTime;
// Only write header on empty files or if file info cannot be obtained
if (!appenderToWrite.GetFileInfo(out lastWriteTime, out fileLength) || fileLength == 0)
{
byte[] headerBytes = this.GetHeaderBytes();
if (headerBytes != null)
{
appenderToWrite.Write(headerBytes);
}
}
}
appenderToWrite.Write(bytes);
}
private byte[] GetHeaderBytes()
{
if (this.Header == null)
{
return null;
}
string renderedText = this.Header.Render(LogEventInfo.CreateNullEvent()) + this.NewLineChars;
return this.TransformBytes(this.Encoding.GetBytes(renderedText));
}
private byte[] GetFooterBytes()
{
if (this.Footer == null)
{
return null;
}
string renderedText = this.Footer.Render(LogEventInfo.CreateNullEvent()) + this.NewLineChars;
return this.TransformBytes(this.Encoding.GetBytes(renderedText));
}
private void WriteFooterAndUninitialize(string fileName)
{
byte[] footerBytes = this.GetFooterBytes();
if (footerBytes != null)
{
if (File.Exists(fileName))
{
this.WriteToFile(fileName, footerBytes, true);
}
}
this.initializedFiles.Remove(fileName);
}
private bool GetFileInfo(string fileName, out DateTime lastWriteTime, out long fileLength)
{
foreach (BaseFileAppender t in this.recentAppenders)
{
if (t == null)
{
break;
}
if (t.FileName == fileName)
{
t.GetFileInfo(out lastWriteTime, out fileLength);
return true;
}
}
var fi = new FileInfo(fileName);
if (fi.Exists)
{
fileLength = fi.Length;
lastWriteTime = fi.LastWriteTime;
return true;
}
fileLength = -1;
lastWriteTime = DateTime.MinValue;
return false;
}
private void InvalidateCacheItem(string fileName)
{
for (int i = 0; i < this.recentAppenders.Length; ++i)
{
if (this.recentAppenders[i] == null)
{
break;
}
if (this.recentAppenders[i].FileName == fileName)
{
this.recentAppenders[i].Close();
for (int j = i; j < this.recentAppenders.Length - 1; ++j)
{
this.recentAppenders[j] = this.recentAppenders[j + 1];
}
this.recentAppenders[this.recentAppenders.Length - 1] = null;
break;
}
}
}
}
}
#endif
| |
// SF API version v50.0
// Custom fields included: False
// Relationship objects included: True
using System;
using NetCoreForce.Client.Models;
using NetCoreForce.Client.Attributes;
using Newtonsoft.Json;
namespace NetCoreForce.Models
{
///<summary>
/// Connected App
///<para>SObject Name: ConnectedApplication</para>
///<para>Custom Object: False</para>
///</summary>
public class SfConnectedApplication : SObject
{
[JsonIgnore]
public static string SObjectTypeName
{
get { return "ConnectedApplication"; }
}
///<summary>
/// Connected App ID
/// <para>Name: Id</para>
/// <para>SF Type: id</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "id")]
[Updateable(false), Createable(false)]
public string Id { get; set; }
///<summary>
/// Connected App Name
/// <para>Name: Name</para>
/// <para>SF Type: string</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "name")]
[Updateable(false), Createable(false)]
public string Name { get; set; }
///<summary>
/// Created Date
/// <para>Name: CreatedDate</para>
/// <para>SF Type: datetime</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "createdDate")]
[Updateable(false), Createable(false)]
public DateTimeOffset? CreatedDate { get; set; }
///<summary>
/// Created By ID
/// <para>Name: CreatedById</para>
/// <para>SF Type: reference</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "createdById")]
[Updateable(false), Createable(false)]
public string CreatedById { get; set; }
///<summary>
/// ReferenceTo: User
/// <para>RelationshipName: CreatedBy</para>
///</summary>
[JsonProperty(PropertyName = "createdBy")]
[Updateable(false), Createable(false)]
public SfUser CreatedBy { get; set; }
///<summary>
/// Last Modified Date
/// <para>Name: LastModifiedDate</para>
/// <para>SF Type: datetime</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "lastModifiedDate")]
[Updateable(false), Createable(false)]
public DateTimeOffset? LastModifiedDate { get; set; }
///<summary>
/// Last Modified By ID
/// <para>Name: LastModifiedById</para>
/// <para>SF Type: reference</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "lastModifiedById")]
[Updateable(false), Createable(false)]
public string LastModifiedById { get; set; }
///<summary>
/// ReferenceTo: User
/// <para>RelationshipName: LastModifiedBy</para>
///</summary>
[JsonProperty(PropertyName = "lastModifiedBy")]
[Updateable(false), Createable(false)]
public SfUser LastModifiedBy { get; set; }
///<summary>
/// System Modstamp
/// <para>Name: SystemModstamp</para>
/// <para>SF Type: datetime</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "systemModstamp")]
[Updateable(false), Createable(false)]
public DateTimeOffset? SystemModstamp { get; set; }
///<summary>
/// AllowAdminApprovedUsersOnly
/// <para>Name: OptionsAllowAdminApprovedUsersOnly</para>
/// <para>SF Type: boolean</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "optionsAllowAdminApprovedUsersOnly")]
[Updateable(false), Createable(false)]
public bool? OptionsAllowAdminApprovedUsersOnly { get; set; }
///<summary>
/// RefreshTokenValidityMetric
/// <para>Name: OptionsRefreshTokenValidityMetric</para>
/// <para>SF Type: boolean</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "optionsRefreshTokenValidityMetric")]
[Updateable(false), Createable(false)]
public bool? OptionsRefreshTokenValidityMetric { get; set; }
///<summary>
/// HasSessionLevelPolicy
/// <para>Name: OptionsHasSessionLevelPolicy</para>
/// <para>SF Type: boolean</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "optionsHasSessionLevelPolicy")]
[Updateable(false), Createable(false)]
public bool? OptionsHasSessionLevelPolicy { get; set; }
///<summary>
/// isInternal
/// <para>Name: OptionsIsInternal</para>
/// <para>SF Type: boolean</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "optionsIsInternal")]
[Updateable(false), Createable(false)]
public bool? OptionsIsInternal { get; set; }
///<summary>
/// FullContentPushNotifications
/// <para>Name: OptionsFullContentPushNotifications</para>
/// <para>SF Type: boolean</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "optionsFullContentPushNotifications")]
[Updateable(false), Createable(false)]
public bool? OptionsFullContentPushNotifications { get; set; }
///<summary>
/// Lock App After
/// <para>Name: MobileSessionTimeout</para>
/// <para>SF Type: picklist</para>
/// <para>Nillable: True</para>
///</summary>
[JsonProperty(PropertyName = "mobileSessionTimeout")]
[Updateable(false), Createable(false)]
public string MobileSessionTimeout { get; set; }
///<summary>
/// PIN Length
/// <para>Name: PinLength</para>
/// <para>SF Type: picklist</para>
/// <para>Nillable: True</para>
///</summary>
[JsonProperty(PropertyName = "pinLength")]
[Updateable(false), Createable(false)]
public string PinLength { get; set; }
///<summary>
/// Start URL
/// <para>Name: StartUrl</para>
/// <para>SF Type: url</para>
/// <para>Nillable: True</para>
///</summary>
[JsonProperty(PropertyName = "startUrl")]
[Updateable(false), Createable(false)]
public string StartUrl { get; set; }
///<summary>
/// Mobile Start URL
/// <para>Name: MobileStartUrl</para>
/// <para>SF Type: url</para>
/// <para>Nillable: True</para>
///</summary>
[JsonProperty(PropertyName = "mobileStartUrl")]
[Updateable(false), Createable(false)]
public string MobileStartUrl { get; set; }
///<summary>
/// Refresh Token Policy:
/// <para>Name: RefreshTokenValidityPeriod</para>
/// <para>SF Type: int</para>
/// <para>Nillable: True</para>
///</summary>
[JsonProperty(PropertyName = "refreshTokenValidityPeriod")]
[Updateable(false), Createable(false)]
public int? RefreshTokenValidityPeriod { get; set; }
}
}
| |
/*
* Copyright (c) Contributors, http://aurora-sim.org/, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the Aurora-Sim Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections.Generic;
using System.IO;
using System.IO.Compression;
using System.Linq;
using System.Reflection;
using OpenMetaverse;
using Aurora.Framework;
using Aurora.Framework.Serialization;
using OpenSim.Region.Framework.Interfaces;
using OpenSim.Region.Framework.Scenes;
namespace Aurora.Modules.Archivers
{
/// <summary>
/// Prepare to write out an archive.
/// </summary>
public class ArchiveWriteRequestPreparation
{
protected string m_checkPermissions;
protected Guid m_requestId;
protected Stream m_saveStream;
protected IScene m_scene;
/// <summary>
/// Constructor
/// </summary>
/// <param name = "scene"></param>
/// <param name = "savePath">The path to which to save data.</param>
/// <param name = "requestId">The id associated with this request</param>
/// <exception cref = "System.IO.IOException">
/// If there was a problem opening a stream for the file specified by the savePath
/// </exception>
public ArchiveWriteRequestPreparation(IScene scene, string savePath, Guid requestId, string checkPermissions)
{
m_scene = scene;
try
{
m_saveStream = new GZipStream(new FileStream(savePath, FileMode.Create), CompressionMode.Compress);
}
catch (EntryPointNotFoundException e)
{
MainConsole.Instance.ErrorFormat(
"[ARCHIVER]: Mismatch between Mono and zlib1g library version when trying to create compression stream."
+ "If you've manually installed Mono, have you appropriately updated zlib1g as well?");
MainConsole.Instance.Error(e);
}
m_requestId = requestId;
m_checkPermissions = checkPermissions;
}
/// <summary>
/// Constructor.
/// </summary>
/// <param name = "scene"></param>
/// <param name = "saveStream">The stream to which to save data.</param>
/// <param name = "requestId">The id associated with this request</param>
public ArchiveWriteRequestPreparation(IScene scene, Stream saveStream, Guid requestId)
{
m_scene = scene;
m_saveStream = saveStream;
m_requestId = requestId;
}
/// <summary>
/// Archive the region requested.
/// </summary>
/// <exception cref = "System.IO.IOException">if there was an io problem with creating the file</exception>
public void ArchiveRegion()
{
Dictionary<UUID, AssetType> assetUuids = new Dictionary<UUID, AssetType>();
ISceneEntity[] entities = m_scene.Entities.GetEntities();
List<ISceneEntity> sceneObjects = new List<ISceneEntity>();
int numObjectsSkippedPermissions = 0;
// Filter entities so that we only have scene objects.
// FIXME: Would be nicer to have this as a proper list in SceneGraph, since lots of methods
// end up having to do this
#if (!ISWIN)
foreach (ISceneEntity entity in entities)
{
if (!entity.IsDeleted && !entity.IsAttachment)
{
if (!CanUserArchiveObject(m_scene.RegionInfo.EstateSettings.EstateOwner, entity, m_checkPermissions))
// The user isn't allowed to copy/transfer this object, so it will not be included in the OAR.
++numObjectsSkippedPermissions;
else
sceneObjects.Add(entity);
}
}
#else
foreach (ISceneEntity entity in entities.Where(entity => !entity.IsDeleted && !entity.IsAttachment))
{
if (!CanUserArchiveObject(m_scene.RegionInfo.EstateSettings.EstateOwner, entity, m_checkPermissions))
// The user isn't allowed to copy/transfer this object, so it will not be included in the OAR.
++numObjectsSkippedPermissions;
else
sceneObjects.Add(entity);
}
#endif
UuidGatherer assetGatherer = new UuidGatherer(m_scene.AssetService);
foreach (ISceneEntity sceneObject in sceneObjects)
{
assetGatherer.GatherAssetUuids(sceneObject, assetUuids, m_scene);
}
MainConsole.Instance.InfoFormat(
"[ARCHIVER]: {0} scene objects to serialize requiring save of {1} assets",
sceneObjects.Count, assetUuids.Count);
if (numObjectsSkippedPermissions > 0)
{
MainConsole.Instance.DebugFormat(
"[ARCHIVER]: {0} scene objects skipped due to lack of permissions",
numObjectsSkippedPermissions);
}
// Make sure that we also request terrain texture assets
RegionSettings regionSettings = m_scene.RegionInfo.RegionSettings;
if (regionSettings.TerrainTexture1 != RegionSettings.DEFAULT_TERRAIN_TEXTURE_1)
assetUuids[regionSettings.TerrainTexture1] = AssetType.Texture;
if (regionSettings.TerrainTexture2 != RegionSettings.DEFAULT_TERRAIN_TEXTURE_2)
assetUuids[regionSettings.TerrainTexture2] = AssetType.Texture;
if (regionSettings.TerrainTexture3 != RegionSettings.DEFAULT_TERRAIN_TEXTURE_3)
assetUuids[regionSettings.TerrainTexture3] = AssetType.Texture;
if (regionSettings.TerrainTexture4 != RegionSettings.DEFAULT_TERRAIN_TEXTURE_4)
assetUuids[regionSettings.TerrainTexture4] = AssetType.Texture;
TarArchiveWriter archiveWriter = new TarArchiveWriter(m_saveStream);
// Asynchronously request all the assets required to perform this archive operation
ArchiveWriteRequestExecution awre
= new ArchiveWriteRequestExecution(
sceneObjects,
m_scene.RequestModuleInterface<ITerrainModule>(),
m_scene.RequestModuleInterface<IRegionSerialiserModule>(),
m_scene,
archiveWriter,
m_requestId);
new AssetsRequest(
new AssetsArchiver(archiveWriter), assetUuids,
m_scene.AssetService, awre.ReceivedAllAssets).Execute();
}
/// <summary>
/// Checks whether the user has permission to export an object group to an OAR.
/// </summary>
/// <param name = "user">The user</param>
/// <param name = "objGroup">The object group</param>
/// <param name = "checkPermissions">Which permissions to check: "C" = Copy, "T" = Transfer</param>
/// <returns>Whether the user is allowed to export the object to an OAR</returns>
private bool CanUserArchiveObject(UUID user, ISceneEntity objGroup, string checkPermissions)
{
if (checkPermissions == null)
return true;
IPermissionsModule module = m_scene.RequestModuleInterface<IPermissionsModule>();
if (module == null)
return true; // this shouldn't happen
// Check whether the user is permitted to export all of the parts in the SOG. If any
// part can't be exported then the entire SOG can't be exported.
bool permitted = true;
//int primNumber = 1;
foreach (ISceneChildEntity obj in objGroup.ChildrenEntities())
{
uint perm;
PermissionClass permissionClass = module.GetPermissionClass(user, obj);
switch (permissionClass)
{
case PermissionClass.Owner:
perm = obj.BaseMask;
break;
case PermissionClass.Group:
perm = obj.GroupMask | obj.EveryoneMask;
break;
case PermissionClass.Everyone:
default:
perm = obj.EveryoneMask;
break;
}
bool canCopy = (perm & (uint) PermissionMask.Copy) != 0;
bool canTransfer = (perm & (uint) PermissionMask.Transfer) != 0;
// Special case: if Everyone can copy the object then this implies it can also be
// Transferred.
// However, if the user is the Owner then we don't check EveryoneMask, because it seems that the mask
// always (incorrectly) includes the Copy bit set in this case. But that's a mistake: the viewer
// does NOT show that the object has Everyone-Copy permissions, and doesn't allow it to be copied.
if (permissionClass != PermissionClass.Owner)
{
canTransfer |= (obj.EveryoneMask & (uint) PermissionMask.Copy) != 0;
}
bool partPermitted = true;
if (checkPermissions.Contains("C") && !canCopy)
partPermitted = false;
if (checkPermissions.Contains("T") && !canTransfer)
partPermitted = false;
//string name = (objGroup.PrimCount == 1) ? objGroup.Name : string.Format("{0} ({1}/{2})", obj.Name, primNumber, objGroup.PrimCount);
//MainConsole.Instance.DebugFormat("[ARCHIVER]: Object permissions: {0}: Base={1:X4}, Owner={2:X4}, Everyone={3:X4}, permissionClass={4}, checkPermissions={5}, canCopy={6}, canTransfer={7}, permitted={8}",
// name, obj.BaseMask, obj.OwnerMask, obj.EveryoneMask,
// permissionClass, checkPermissions, canCopy, canTransfer, permitted);
if (!partPermitted)
{
permitted = false;
break;
}
//++primNumber;
}
return permitted;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
using System;
using System.Collections.Generic;
using ParquetSharp.Column;
using ParquetSharp.Column.Impl;
using ParquetSharp.Column.Page;
using ParquetSharp.External;
using ParquetSharp.Hadoop;
using ParquetSharp.Hadoop.Metadata;
using ParquetSharp.IO.Api;
using ParquetSharp.Schema;
using ParquetSharp.Tools.Util;
using static ParquetSharp.Column.Page.DataPage;
using static ParquetSharp.Schema.PrimitiveType;
namespace ParquetSharp.Tools.Commands
{
public class DumpCommand : ArgsOnlyCommand
{
private static readonly Charset UTF8 = Charset.forName("UTF-8");
private static readonly CharsetDecoder UTF8_DECODER = UTF8.newDecoder();
public const string TABS = " ";
public const int BLOCK_BUFFER_SIZE = 64 * 1024;
public readonly string[] USAGE = new string[] { "<input>", "where <input> is the parquet file to print to stdout" };
public static readonly Options OPTIONS;
static DumpCommand()
{
OPTIONS = new Options();
Option md = OptionBuilder.withLongOpt("disable-meta")
.withDescription("Do not dump row group and page metadata")
.create('m');
Option dt = OptionBuilder.withLongOpt("disable-data")
.withDescription("Do not dump column data")
.create('d');
Option cl = OptionBuilder.withLongOpt("column")
.withDescription("Dump only the given column, can be specified more than once")
.hasArgs()
.create('c');
OPTIONS.addOption(md);
OPTIONS.addOption(dt);
OPTIONS.addOption(cl);
}
public DumpCommand()
: base(1, 1)
{
}
public override Options getOptions()
{
return OPTIONS;
}
public override string[] getUsageDescription()
{
return USAGE;
}
public override void execute(CommandLine options)
{
base.execute(options);
string[] args = options.getArgs();
string input = args[0];
Configuration conf = new Configuration();
Path inpath = new Path(input);
ParquetMetadata metaData = ParquetFileReader.readFooter(conf, inpath, NO_FILTER);
MessageType schema = metaData.getFileMetaData().getSchema();
PrettyPrintWriter @out = PrettyPrintWriter.stdoutPrettyPrinter()
.withAutoColumn()
.withAutoCrop()
.withWhitespaceHandler(WhiteSpaceHandler.ELIMINATE_NEWLINES)
.withColumnPadding(1)
.withMaxBufferedLines(1000000)
.withFlushOnTab()
.build();
bool showmd = !options.hasOption('m');
bool showdt = !options.hasOption('d');
HashSet<string> showColumns = null;
if (options.hasOption('c'))
{
string[] cols = options.getOptionValues('c');
showColumns = new HashSet<string>(Arrays.asList(cols));
}
dump(@out, metaData, schema, inpath, showmd, showdt, showColumns);
}
public static void dump(PrettyPrintWriter @out, ParquetMetadata meta, MessageType schema, Path inpath, bool showmd, bool showdt, HashSet<string> showColumns)
{
Configuration conf = new Configuration();
List<BlockMetaData> blocks = meta.getBlocks();
List<ColumnDescriptor> columns = schema.getColumns();
if (showColumns != null)
{
columns = new List<ColumnDescriptor>();
foreach (ColumnDescriptor column in schema.getColumns())
{
string path = Joiner.on('.').skipNulls().join(column.getPath());
if (showColumns.Contains(path))
{
columns.Add(column);
}
}
}
ParquetFileReader freader = null;
if (showmd)
{
try
{
long group = 0;
foreach (BlockMetaData block in blocks)
{
if (group != 0) @out.println();
@out.format("row group %d%n", group++);
@out.rule('-');
ICollection<ColumnChunkMetaData> ccmds = block.getColumns();
if (showColumns != null)
{
ccmds = new List<ColumnChunkMetaData>();
foreach (ColumnChunkMetaData ccmd in block.getColumns())
{
string path = Joiner.on('.').skipNulls().join(ccmd.getPath().toArray());
if (showColumns.Contains(path))
{
ccmds.Add(ccmd);
}
}
}
MetadataUtils.showDetails(@out, ccmds);
List<BlockMetaData> rblocks = Collections.singletonList(block);
freader = new ParquetFileReader(
conf, meta.getFileMetaData(), inpath, rblocks, columns);
PageReadStore store = freader.readNextRowGroup();
while (store != null)
{
@out.incrementTabLevel();
foreach (ColumnDescriptor column in columns)
{
@out.println();
dump(@out, store, column);
}
@out.decrementTabLevel();
store = freader.readNextRowGroup();
}
@out.flushColumns();
}
}
finally
{
if (freader != null)
{
freader.close();
}
}
}
if (showdt)
{
bool first = true;
foreach (ColumnDescriptor column in columns)
{
if (!first || showmd) @out.println();
first = false;
@out.format("%s %s%n", column.getType(), Joiner.on('.').skipNulls().join(column.getPath()));
@out.rule('-');
try
{
long page = 1;
long total = blocks.Count;
long offset = 1;
freader = new ParquetFileReader(
conf, meta.getFileMetaData(), inpath, blocks, Collections.singletonList(column));
PageReadStore store = freader.readNextRowGroup();
while (store != null)
{
ColumnReadStoreImpl crstore = new ColumnReadStoreImpl(
store, new DumpGroupConverter(), schema,
meta.getFileMetaData().getCreatedBy());
dump(@out, crstore, column, page++, total, offset);
offset += store.getRowCount();
store = freader.readNextRowGroup();
}
@out.flushColumns();
}
finally
{
@out.flushColumns();
if (freader != null)
{
freader.close();
}
}
}
}
}
public static void dump(PrettyPrintWriter @out, PageReadStore store, ColumnDescriptor column)
{
PageReader reader = store.getPageReader(column);
long vc = reader.getTotalValueCount();
int rmax = column.getMaxRepetitionLevel();
int dmax = column.getMaxDefinitionLevel();
@out.format("%s TV=%d RL=%d DL=%d", Joiner.on('.').skipNulls().join(column.getPath()), vc, rmax, dmax);
DictionaryPage dict = reader.readDictionaryPage();
if (dict != null)
{
@out.format(" DS:%d", dict.getDictionarySize());
@out.format(" DE:%s", dict.getEncoding());
}
@out.println();
@out.rule('-');
DataPage page = reader.readPage();
for (long count = 0; page != null; count++)
{
@out.format("page %d:", count);
page.accept(new DumpVisitor1(@out));
@out.format(" SZ:%d", page.getUncompressedSize());
@out.format(" VC:%d", page.getValueCount());
@out.println();
page = reader.readPage();
}
}
class DumpVisitor1 : Visitor<object>
{
readonly PrettyPrintWriter @out;
public DumpVisitor1(PrettyPrintWriter @out)
{
this.@out = @out;
}
public object visit(DataPageV1 pageV1)
{
this.@out.format(" DLE:%s", pageV1.getDlEncoding());
this.@out.format(" RLE:%s", pageV1.getRlEncoding());
this.@out.format(" VLE:%s", pageV1.getValueEncoding());
return null;
}
public object visit(DataPageV2 pageV2)
{
this.@out.format(" DLE:RLE");
this.@out.format(" RLE:RLE");
this.@out.format(" VLE:%s", pageV2.getDataEncoding());
return null;
}
}
public static void dump(PrettyPrintWriter @out, ColumnReadStoreImpl crstore, ColumnDescriptor column, long page, long total, long offset)
{
int dmax = column.getMaxDefinitionLevel();
ColumnReader creader = crstore.getColumnReader(column);
@out.format("*** row group %d of %d, values %d to %d ***%n", page, total, offset, offset + creader.getTotalValueCount() - 1);
for (long i = 0, e = creader.getTotalValueCount(); i < e; ++i)
{
int rlvl = creader.getCurrentRepetitionLevel();
int dlvl = creader.getCurrentDefinitionLevel();
@out.format("value %d: R:%d D:%d V:", offset + i, rlvl, dlvl);
if (dlvl == dmax)
{
switch (column.getType().Name)
{
case Name.BINARY: @out.format("%s", binaryToString(creader.getBinary())); break;
case Name.BOOLEAN: @out.format("%s", creader.getBoolean()); break;
case Name.DOUBLE: @out.format("%s", creader.getDouble()); break;
case Name.FLOAT: @out.format("%s", creader.getFloat()); break;
case Name.INT32: @out.format("%s", creader.getInteger()); break;
case Name.INT64: @out.format("%s", creader.getLong()); break;
case Name.INT96: @out.format("%s", binaryToBigInteger(creader.getBinary())); break;
case Name.FIXED_LEN_BYTE_ARRAY: @out.format("%s", binaryToString(creader.getBinary())); break;
}
}
else
{
@out.format("<null>");
}
@out.println();
creader.consume();
}
}
public static string binaryToString(Binary value)
{
byte[] data = value.getBytesUnsafe();
if (data == null) return null;
try
{
CharBuffer buffer = UTF8_DECODER.decode(value.toByteBuffer());
return buffer.toString();
}
catch (Exception)
{
}
return "<bytes...>";
}
public static BigInteger binaryToBigInteger(Binary value)
{
byte[] data = value.getBytesUnsafe();
if (data == null) return null;
return new BigInteger(data);
}
sealed class DumpGroupConverter : GroupConverter
{
public override void start() { }
public override void end() { }
public override Converter getConverter(int fieldIndex) { return new DumpConverter(); }
}
sealed class DumpConverter : PrimitiveConverter
{
public override GroupConverter asGroupConverter() { return new DumpGroupConverter(); }
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using Microsoft.Win32;
using Microsoft.Win32.SafeHandles;
using System.ComponentModel;
using System.Diagnostics;
using System.Globalization;
using System.Runtime.InteropServices;
namespace System.Security.Principal
{
public sealed class NTAccount : IdentityReference
{
#region Private members
private readonly string _name;
//
// Limit for nt account names for users is 20 while that for groups is 256
//
internal const int MaximumAccountNameLength = 256;
//
// Limit for dns domain names is 255
//
internal const int MaximumDomainNameLength = 255;
#endregion
#region Constructors
public NTAccount(string domainName, string accountName)
{
if (accountName == null)
{
throw new ArgumentNullException(nameof(accountName));
}
if (accountName.Length == 0)
{
throw new ArgumentException(SR.Argument_StringZeroLength, nameof(accountName));
}
if (accountName.Length > MaximumAccountNameLength)
{
throw new ArgumentException(SR.IdentityReference_AccountNameTooLong, nameof(accountName));
}
if (domainName != null && domainName.Length > MaximumDomainNameLength)
{
throw new ArgumentException(SR.IdentityReference_DomainNameTooLong, nameof(domainName));
}
if (domainName == null || domainName.Length == 0)
{
_name = accountName;
}
else
{
_name = domainName + "\\" + accountName;
}
}
public NTAccount(string name)
{
if (name == null)
{
throw new ArgumentNullException(nameof(name));
}
if (name.Length == 0)
{
throw new ArgumentException(SR.Argument_StringZeroLength, nameof(name));
}
if (name.Length > (MaximumDomainNameLength + 1 /* '\' */ + MaximumAccountNameLength))
{
throw new ArgumentException(SR.IdentityReference_AccountNameTooLong, nameof(name));
}
_name = name;
}
#endregion
#region Inherited properties and methods
public override string Value
{
get
{
return ToString();
}
}
public override bool IsValidTargetType(Type targetType)
{
if (targetType == typeof(SecurityIdentifier))
{
return true;
}
else if (targetType == typeof(NTAccount))
{
return true;
}
else
{
return false;
}
}
public override IdentityReference Translate(Type targetType)
{
if (targetType == null)
{
throw new ArgumentNullException(nameof(targetType));
}
if (targetType == typeof(NTAccount))
{
return this; // assumes that NTAccount objects are immutable
}
else if (targetType == typeof(SecurityIdentifier))
{
IdentityReferenceCollection irSource = new IdentityReferenceCollection(1);
irSource.Add(this);
IdentityReferenceCollection irTarget;
irTarget = NTAccount.Translate(irSource, targetType, true);
return irTarget[0];
}
else
{
throw new ArgumentException(SR.IdentityReference_MustBeIdentityReference, nameof(targetType));
}
}
public override bool Equals(object o)
{
return (this == o as NTAccount); // invokes operator==
}
public override int GetHashCode()
{
return StringComparer.OrdinalIgnoreCase.GetHashCode(_name);
}
public override string ToString()
{
return _name;
}
internal static IdentityReferenceCollection Translate(IdentityReferenceCollection sourceAccounts, Type targetType, bool forceSuccess)
{
bool SomeFailed = false;
IdentityReferenceCollection Result;
Result = Translate(sourceAccounts, targetType, out SomeFailed);
if (forceSuccess && SomeFailed)
{
IdentityReferenceCollection UnmappedIdentities = new IdentityReferenceCollection();
foreach (IdentityReference id in Result)
{
if (id.GetType() != targetType)
{
UnmappedIdentities.Add(id);
}
}
throw new IdentityNotMappedException(SR.IdentityReference_IdentityNotMapped, UnmappedIdentities);
}
return Result;
}
internal static IdentityReferenceCollection Translate(IdentityReferenceCollection sourceAccounts, Type targetType, out bool someFailed)
{
if (sourceAccounts == null)
{
throw new ArgumentNullException(nameof(sourceAccounts));
}
if (targetType == typeof(SecurityIdentifier))
{
return TranslateToSids(sourceAccounts, out someFailed);
}
throw new ArgumentException(SR.IdentityReference_MustBeIdentityReference, nameof(targetType));
}
#endregion
#region Operators
public static bool operator ==(NTAccount left, NTAccount right)
{
object l = left;
object r = right;
if (l == r)
{
return true;
}
else if (l == null || r == null)
{
return false;
}
else
{
return (left.ToString().Equals(right.ToString(), StringComparison.OrdinalIgnoreCase));
}
}
public static bool operator !=(NTAccount left, NTAccount right)
{
return !(left == right); // invoke operator==
}
#endregion
#region Private methods
private static IdentityReferenceCollection TranslateToSids(IdentityReferenceCollection sourceAccounts, out bool someFailed)
{
if (sourceAccounts == null)
{
throw new ArgumentNullException(nameof(sourceAccounts));
}
if (sourceAccounts.Count == 0)
{
throw new ArgumentException(SR.Arg_EmptyCollection, nameof(sourceAccounts));
}
SafeLsaPolicyHandle LsaHandle = SafeLsaPolicyHandle.InvalidHandle;
SafeLsaMemoryHandle ReferencedDomainsPtr = SafeLsaMemoryHandle.InvalidHandle;
SafeLsaMemoryHandle SidsPtr = SafeLsaMemoryHandle.InvalidHandle;
try
{
//
// Construct an array of unicode strings
//
Interop.UNICODE_STRING[] Names = new Interop.UNICODE_STRING[sourceAccounts.Count];
int currentName = 0;
foreach (IdentityReference id in sourceAccounts)
{
NTAccount nta = id as NTAccount;
if (nta == null)
{
throw new ArgumentException(SR.Argument_ImproperType, nameof(sourceAccounts));
}
Names[currentName].Buffer = nta.ToString();
if (Names[currentName].Buffer.Length * 2 + 2 > ushort.MaxValue)
{
// this should never happen since we are already validating account name length in constructor and
// it is less than this limit
Debug.Assert(false, "NTAccount::TranslateToSids - source account name is too long.");
throw new InvalidOperationException();
}
Names[currentName].Length = (ushort)(Names[currentName].Buffer.Length * 2);
Names[currentName].MaximumLength = (ushort)(Names[currentName].Length + 2);
currentName++;
}
//
// Open LSA policy (for lookup requires it)
//
LsaHandle = Win32.LsaOpenPolicy(null, PolicyRights.POLICY_LOOKUP_NAMES);
//
// Now perform the actual lookup
//
someFailed = false;
uint ReturnCode;
ReturnCode = Interop.Advapi32.LsaLookupNames2(LsaHandle, 0, sourceAccounts.Count, Names, ref ReferencedDomainsPtr, ref SidsPtr);
//
// Make a decision regarding whether it makes sense to proceed
// based on the return code and the value of the forceSuccess argument
//
if (ReturnCode == Interop.StatusOptions.STATUS_NO_MEMORY ||
ReturnCode == Interop.StatusOptions.STATUS_INSUFFICIENT_RESOURCES)
{
throw new OutOfMemoryException();
}
else if (ReturnCode == Interop.StatusOptions.STATUS_ACCESS_DENIED)
{
throw new UnauthorizedAccessException();
}
else if (ReturnCode == Interop.StatusOptions.STATUS_NONE_MAPPED ||
ReturnCode == Interop.StatusOptions.STATUS_SOME_NOT_MAPPED)
{
someFailed = true;
}
else if (ReturnCode != 0)
{
uint win32ErrorCode = Interop.Advapi32.LsaNtStatusToWinError(ReturnCode);
if (unchecked((int)win32ErrorCode) != Interop.Errors.ERROR_TRUSTED_RELATIONSHIP_FAILURE)
{
Debug.Assert(false, string.Format(CultureInfo.InvariantCulture, "Interop.LsaLookupNames(2) returned unrecognized error {0}", win32ErrorCode));
}
throw new Win32Exception(unchecked((int)win32ErrorCode));
}
//
// Interpret the results and generate SID objects
//
IdentityReferenceCollection Result = new IdentityReferenceCollection(sourceAccounts.Count);
if (ReturnCode == 0 || ReturnCode == Interop.StatusOptions.STATUS_SOME_NOT_MAPPED)
{
SidsPtr.Initialize((uint)sourceAccounts.Count, (uint)Marshal.SizeOf<Interop.LSA_TRANSLATED_SID2>());
Win32.InitializeReferencedDomainsPointer(ReferencedDomainsPtr);
Interop.LSA_TRANSLATED_SID2[] translatedSids = new Interop.LSA_TRANSLATED_SID2[sourceAccounts.Count];
SidsPtr.ReadArray(0, translatedSids, 0, translatedSids.Length);
for (int i = 0; i < sourceAccounts.Count; i++)
{
Interop.LSA_TRANSLATED_SID2 Lts = translatedSids[i];
//
// Only some names are recognized as NTAccount objects
//
switch ((SidNameUse)Lts.Use)
{
case SidNameUse.User:
case SidNameUse.Group:
case SidNameUse.Alias:
case SidNameUse.Computer:
case SidNameUse.WellKnownGroup:
Result.Add(new SecurityIdentifier(Lts.Sid, true));
break;
default:
someFailed = true;
Result.Add(sourceAccounts[i]);
break;
}
}
}
else
{
for (int i = 0; i < sourceAccounts.Count; i++)
{
Result.Add(sourceAccounts[i]);
}
}
return Result;
}
finally
{
LsaHandle.Dispose();
ReferencedDomainsPtr.Dispose();
SidsPtr.Dispose();
}
}
#endregion
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Xml;
using System.Xml.Linq;
using CoreXml.Test.XLinq;
using Microsoft.Test.ModuleCore;
using XmlCoreTest.Common;
namespace XLinqTests
{
public class XNodeSequenceRemove : XLinqTestCase
{
// Type is CoreXml.Test.XLinq.FunctionalTests+TreeManipulationTests+XNodeSequenceRemove
// Test Case
#region Fields
private EventsHelper _eHelper;
private bool _runWithEvents;
#endregion
#region Public Methods and Operators
public override void AddChildren()
{
AddChild(new TestVariation(ElementsFromMixedContent) { Attribute = new VariationAttribute("All elements from mixed content") { Priority = 0 } });
AddChild(new TestVariation(AllFromDocument) { Attribute = new VariationAttribute("All content from the XDocument (doc level)") { Priority = 0 } });
AddChild(new TestVariation(AllNodes) { Attribute = new VariationAttribute("All nodes from the XDocument") { Priority = 0 } });
AddChild(new TestVariation(TwoDocuments) { Attribute = new VariationAttribute("Nodes from two documents") { Priority = 0 } });
AddChild(new TestVariation(DuplicateNodes) { Attribute = new VariationAttribute("Duplicate nodes in sequence") { Priority = 0 } });
AddChild(new TestVariation(IdAttrsNulls) { Attribute = new VariationAttribute("Nodes from multiple elements + nulls") { Priority = 1 } });
AddChild(new TestVariation(EmptySequence) { Attribute = new VariationAttribute("Empty sequence") { Priority = 1 } });
AddChild(new TestVariation(XNodeAncestors) { Attribute = new VariationAttribute("XNode.Ancestors") { Priority = 1 } });
AddChild(new TestVariation(XNodeAncestorsXName) { Attribute = new VariationAttribute("XNode.Ancestors(XName)") { Priority = 1 } });
AddChild(new TestVariation(XNodesBeforeSelf) { Attribute = new VariationAttribute("XNode.NodesBeforeSelf") { Priority = 1 } });
AddChild(new TestVariation(XNodesAfterSelf) { Attribute = new VariationAttribute("XNode.NodesAfterSelf") { Priority = 1 } });
AddChild(new TestVariation(XElementsBeforeSelf) { Attribute = new VariationAttribute("XNode.ElementsBeforeSelf") { Priority = 1 } });
AddChild(new TestVariation(XElementsAfterSelf) { Attribute = new VariationAttribute("XNode.ElementsAfterSelf") { Priority = 1 } });
AddChild(new TestVariation(XElementsBeforeSelfXName) { Attribute = new VariationAttribute("XNode.ElementsBeforeSelf(XName)") { Priority = 1 } });
AddChild(new TestVariation(XElementsAfterSelfXName) { Attribute = new VariationAttribute("XNode.ElementsAfterSelf(XName)") { Priority = 1 } });
AddChild(new TestVariation(Document_Nodes) { Attribute = new VariationAttribute("XDocument.Nodes") { Priority = 2 } });
AddChild(new TestVariation(Document_DescendantNodes) { Attribute = new VariationAttribute("XDocument.DescendantNodes") { Priority = 2 } });
AddChild(new TestVariation(Document_Descendants) { Attribute = new VariationAttribute("XDocument.Descendants") { Priority = 2 } });
AddChild(new TestVariation(Document_Elements) { Attribute = new VariationAttribute("XDocument.Elements") { Priority = 2 } });
AddChild(new TestVariation(Document_DescendantsXName) { Attribute = new VariationAttribute("XDocument.Descendants(XName)") { Priority = 2 } });
AddChild(new TestVariation(Document_ElementsXName) { Attribute = new VariationAttribute("XDocument.Elements(XName)") { Priority = 2 } });
AddChild(new TestVariation(Element_Nodes) { Attribute = new VariationAttribute("XElement.Nodes") { Priority = 2 } });
AddChild(new TestVariation(Element_DescendantNodes) { Attribute = new VariationAttribute("XElement.DescendantNodes") { Priority = 2 } });
AddChild(new TestVariation(Element_Descendants) { Attribute = new VariationAttribute("XElement.Descendants") { Priority = 2 } });
AddChild(new TestVariation(Element_Elements) { Attribute = new VariationAttribute("XElement.Elements") { Priority = 2 } });
AddChild(new TestVariation(Element_DescendantsXName) { Attribute = new VariationAttribute("XElement.Descendants(XName)") { Priority = 2 } });
AddChild(new TestVariation(Element_ElementsXName) { Attribute = new VariationAttribute("XElement.Elements(XName)") { Priority = 2 } });
AddChild(new TestVariation(Element_AncestorsAndSelf) { Attribute = new VariationAttribute("XElement.AncestorsAndSelf") { Priority = 2 } });
AddChild(new TestVariation(Element_DescendantNodesAndSelf) { Attribute = new VariationAttribute("XElement.DescendantNodesAndSelf") { Priority = 2 } });
AddChild(new TestVariation(Element_DescendantsAndSelf) { Attribute = new VariationAttribute("XElement.DescendantsAndSelf") { Priority = 2 } });
AddChild(new TestVariation(Element_DescendantsAndSelfXName) { Attribute = new VariationAttribute("XElement.DescendantsAndSelf(XName) II.") { Param = false, Priority = 2 } });
AddChild(new TestVariation(Element_DescendantsAndSelfXName) { Attribute = new VariationAttribute("XElement.DescendantsAndSelf(XName) I.") { Param = true, Priority = 2 } });
AddChild(new TestVariation(Element_AncestorsAndSelfXName) { Attribute = new VariationAttribute("XElement.AncestorsAndSelf(XName) I.") { Param = true, Priority = 2 } });
AddChild(new TestVariation(Element_AncestorsAndSelfXName) { Attribute = new VariationAttribute("XElement.AncestorsAndSelf(XName) II.") { Param = false, Priority = 2 } });
}
//[Variation(Priority = 0, Desc = "All elements from mixed content")]
//[Variation(Priority = 0, Desc = "All content from the XDocument (doc level)")]
public void AllFromDocument()
{
int count = 0;
_runWithEvents = (bool)Params[0];
XDocument doc = XDocument.Parse("\t<?PI?><A xmlns='a'/>\r\n <!--comment-->", LoadOptions.PreserveWhitespace);
IEnumerable<XNode> toRemove = doc.Nodes();
if (_runWithEvents)
{
_eHelper = new EventsHelper(doc);
count = toRemove.IsEmpty() ? 0 : toRemove.Count();
}
VerifyDeleteNodes(toRemove);
if (_runWithEvents)
{
_eHelper.Verify(XObjectChange.Remove, count);
}
}
//[Variation(Priority = 0, Desc = "All nodes from the XDocument")]
public void AllNodes()
{
int count = 0;
_runWithEvents = (bool)Params[0];
XDocument doc = XDocument.Parse("\t<?PI?><A xmlns='a'/>\r\n <!--comment-->", LoadOptions.PreserveWhitespace);
IEnumerable<XNode> toRemove = doc.DescendantNodes();
if (_runWithEvents)
{
_eHelper = new EventsHelper(doc);
count = toRemove.IsEmpty() ? 0 : toRemove.Count();
}
VerifyDeleteNodes(toRemove);
if (_runWithEvents)
{
_eHelper.Verify(XObjectChange.Remove, count);
}
}
//[Variation(Priority = 0, Desc = "Nodes from two documents")]
//[Variation(Priority = 2, Desc = "XDocument.DescendantNodes")]
public void Document_DescendantNodes()
{
int count = 0;
_runWithEvents = (bool)Params[0];
XDocument doc = XDocument.Load(FilePathUtil.getStream(@"testdata\xlinq\books.xml"), LoadOptions.PreserveWhitespace);
IEnumerable<XNode> toRemove = doc.DescendantNodes();
if (_runWithEvents)
{
_eHelper = new EventsHelper(doc);
count = doc.Nodes().Count();
}
VerifyDeleteNodes(toRemove);
if (_runWithEvents)
{
_eHelper.Verify(XObjectChange.Remove, count);
}
}
//[Variation(Priority = 2, Desc = "XDocument.Descendants")]
public void Document_Descendants()
{
int count = 0;
_runWithEvents = (bool)Params[0];
XDocument doc = XDocument.Load(FilePathUtil.getStream(@"testdata\xlinq\books.xml"), LoadOptions.PreserveWhitespace);
IEnumerable<XNode> toRemove = doc.Descendants().OfType<XNode>();
if (_runWithEvents)
{
_eHelper = new EventsHelper(doc);
count = doc.Elements().Count();
}
VerifyDeleteNodes(toRemove);
if (_runWithEvents)
{
_eHelper.Verify(XObjectChange.Remove, count);
}
}
//[Variation(Priority = 2, Desc = "XDocument.Elements")]
//[Variation(Priority = 2, Desc = "XDocument.Descendants(XName)")]
public void Document_DescendantsXName()
{
int count = 0;
_runWithEvents = (bool)Params[0];
XDocument doc = XDocument.Load(FilePathUtil.getStream(@"testdata\xlinq\books.xml"), LoadOptions.PreserveWhitespace);
IEnumerable<XNode> toRemove = doc.Descendants(@"{http://www.books.com/}book").OfType<XNode>();
if (_runWithEvents)
{
_eHelper = new EventsHelper(doc);
count = toRemove.IsEmpty() ? 0 : toRemove.Count();
}
VerifyDeleteNodes(toRemove);
if (_runWithEvents)
{
_eHelper.Verify(XObjectChange.Remove, count);
}
}
public void Document_Elements()
{
int count = 0;
_runWithEvents = (bool)Params[0];
XDocument doc = XDocument.Load(FilePathUtil.getStream(@"testdata\xlinq\books.xml"), LoadOptions.PreserveWhitespace);
IEnumerable<XNode> toRemove = doc.Elements().OfType<XNode>();
if (_runWithEvents)
{
_eHelper = new EventsHelper(doc);
count = toRemove.IsEmpty() ? 0 : toRemove.Count();
}
VerifyDeleteNodes(toRemove);
if (_runWithEvents)
{
_eHelper.Verify(XObjectChange.Remove, count);
}
}
//[Variation(Priority = 2, Desc = "XDocument.Elements(XName)")]
public void Document_ElementsXName()
{
int count = 0;
_runWithEvents = (bool)Params[0];
XDocument doc = XDocument.Load(FilePathUtil.getStream(@"testdata\xlinq\books.xml"), LoadOptions.PreserveWhitespace);
IEnumerable<XNode> toRemove = doc.Elements("bookstore").OfType<XNode>();
if (_runWithEvents)
{
_eHelper = new EventsHelper(doc);
count = toRemove.IsEmpty() ? 0 : toRemove.Count();
}
VerifyDeleteNodes(toRemove);
if (_runWithEvents)
{
_eHelper.Verify(XObjectChange.Remove, count);
}
}
public void Document_Nodes()
{
int count = 0;
_runWithEvents = (bool)Params[0];
XDocument doc = XDocument.Load(FilePathUtil.getStream(@"testdata\xlinq\books.xml"), LoadOptions.PreserveWhitespace);
IEnumerable<XNode> toRemove = doc.Nodes();
if (_runWithEvents)
{
_eHelper = new EventsHelper(doc);
count = toRemove.IsEmpty() ? 0 : toRemove.Count();
}
VerifyDeleteNodes(toRemove);
if (_runWithEvents)
{
_eHelper.Verify(XObjectChange.Remove, count);
}
}
public void DuplicateNodes()
{
int count = 0;
_runWithEvents = (bool)Params[0];
XDocument doc = XDocument.Parse("<A xmlns='a'><!--comment-->text1<X/></A>", LoadOptions.PreserveWhitespace);
IEnumerable<XNode> toRemove = doc.Root.Nodes().Take(2).Concat2(doc.Root.Elements());
if (_runWithEvents)
{
_eHelper = new EventsHelper(doc);
count = toRemove.IsEmpty() ? 0 : toRemove.Count();
}
VerifyDeleteNodes(toRemove);
if (_runWithEvents)
{
_eHelper.Verify(XObjectChange.Remove, count);
}
}
//[Variation(Priority = 2, Desc = "XElement.Nodes")]
// XElement:
// IEnumerable<XElement> AncestorsAndSelf()
// IEnumerable<XNode> SelfAndDescendantNodes()
// IEnumerable<XElement> DescendantsAndSelf()
// IEnumerable<XElement> DescendantsAndSelf(XName name)
// IEnumerable<XElement> AncestorsAndSelf(XName name)
//[Variation(Priority = 2, Desc = "XElement.AncestorsAndSelf")]
public void Element_AncestorsAndSelf()
{
int count = 0;
_runWithEvents = (bool)Params[0];
XDocument doc = XDocument.Load(FilePathUtil.getStream(@"testdata\xlinq\books.xml"), LoadOptions.PreserveWhitespace);
XElement e = doc.Descendants(@"{http://www.books.com/}book").Where(x => x.Element("title").Value == "XQL The Golden Years").First();
TestLog.Compare(e != null, "TEST_FAILED: wrong starting position");
IEnumerable<XNode> toRemove = e.AncestorsAndSelf().OfType<XNode>();
if (_runWithEvents)
{
_eHelper = new EventsHelper(doc);
count = toRemove.IsEmpty() ? 0 : toRemove.Count();
}
VerifyDeleteNodes(toRemove);
if (_runWithEvents)
{
_eHelper.Verify(XObjectChange.Remove, count);
}
}
public void Element_AncestorsAndSelfXName()
{
int count = 0;
_runWithEvents = (bool)Params[0];
var useSelf = (bool)Variation.Param;
XDocument doc = XDocument.Load(FilePathUtil.getStream(@"testdata\xlinq\books.xml"), LoadOptions.PreserveWhitespace);
XElement e = doc.Descendants(@"{http://www.books.com/}book").Where(x => x.Element("title").Value == "XQL The Golden Years").First();
TestLog.Compare(e != null, "TEST_FAILED: wrong starting position");
IEnumerable<XNode> toRemove = e.AncestorsAndSelf(useSelf ? @"{http://www.books.com/}book" : @"bookstore").OfType<XNode>();
if (_runWithEvents)
{
_eHelper = new EventsHelper(doc);
count = toRemove.IsEmpty() ? 0 : toRemove.Count();
}
VerifyDeleteNodes(toRemove);
if (_runWithEvents)
{
_eHelper.Verify(XObjectChange.Remove, count);
}
}
public void Element_DescendantNodes()
{
int count = 0;
_runWithEvents = (bool)Params[0];
XDocument doc = XDocument.Load(FilePathUtil.getStream(@"testdata\xlinq\books.xml"), LoadOptions.PreserveWhitespace);
XElement e = doc.Descendants(@"{http://www.books.com/}book").Where(x => x.Element("title").Value == "XQL The Golden Years").First();
TestLog.Compare(e != null, "TEST_FAILING: wrong starting position");
IEnumerable<XNode> toRemove = e.DescendantNodes();
if (_runWithEvents)
{
_eHelper = new EventsHelper(doc);
count = e.Nodes().Count();
}
VerifyDeleteNodes(toRemove);
if (_runWithEvents)
{
_eHelper.Verify(XObjectChange.Remove, count);
}
}
//[Variation(Priority = 2, Desc = "XElement.DescendantNodesAndSelf")]
public void Element_DescendantNodesAndSelf()
{
int count = 0;
_runWithEvents = (bool)Params[0];
XDocument doc = XDocument.Load(FilePathUtil.getStream(@"testdata\xlinq\books.xml"), LoadOptions.PreserveWhitespace);
XElement e = doc.Descendants(@"{http://www.books.com/}book").Where(x => x.Element("title").Value == "XQL The Golden Years").First();
TestLog.Compare(e != null, "TEST_FAILING: wrong starting position");
IEnumerable<XNode> toRemove = e.DescendantNodesAndSelf();
if (_runWithEvents)
{
_eHelper = new EventsHelper(doc);
count = 1;
}
VerifyDeleteNodes(toRemove);
if (_runWithEvents)
{
_eHelper.Verify(XObjectChange.Remove, count);
}
}
public void Element_Descendants()
{
int count = 0;
_runWithEvents = (bool)Params[0];
XDocument doc = XDocument.Load(FilePathUtil.getStream(@"testdata\xlinq\books.xml"), LoadOptions.PreserveWhitespace);
XElement e = doc.Descendants(@"{http://www.books.com/}book").Where(x => x.Element("title").Value == "XQL The Golden Years").First();
TestLog.Compare(e != null, "TEST_FAILING: wrong starting position");
IEnumerable<XNode> toRemove = e.Descendants().OfType<XNode>();
if (_runWithEvents)
{
_eHelper = new EventsHelper(doc);
count = e.Elements().Count();
}
VerifyDeleteNodes(toRemove);
if (_runWithEvents)
{
_eHelper.Verify(XObjectChange.Remove, count);
}
}
//[Variation(Priority = 2, Desc = "XElement.DescendantsAndSelf")]
public void Element_DescendantsAndSelf()
{
int count = 0;
_runWithEvents = (bool)Params[0];
XDocument doc = XDocument.Load(FilePathUtil.getStream(@"testdata\xlinq\books.xml"), LoadOptions.PreserveWhitespace);
XElement e = doc.Descendants(@"{http://www.books.com/}book").Where(x => x.Element("title").Value == "XQL The Golden Years").First();
TestLog.Compare(e != null, "TEST_FAILING: wrong starting position");
IEnumerable<XNode> toRemove = e.DescendantsAndSelf().OfType<XNode>();
if (_runWithEvents)
{
_eHelper = new EventsHelper(doc);
count = 1;
}
VerifyDeleteNodes(toRemove);
if (_runWithEvents)
{
_eHelper.Verify(XObjectChange.Remove, count);
}
}
//[Variation(Priority = 2, Desc = "XElement.DescendantsAndSelf(XName) I.", Param = true)]
//[Variation(Priority = 2, Desc = "XElement.DescendantsAndSelf(XName) II.", Param = false)]
public void Element_DescendantsAndSelfXName()
{
int count = 0;
_runWithEvents = (bool)Params[0];
var useSelf = (bool)Variation.Param;
XDocument doc = XDocument.Load(FilePathUtil.getStream(@"testdata\xlinq\books.xml"), LoadOptions.PreserveWhitespace);
XElement e = useSelf ? doc.Descendants(@"{http://www.books.com/}book").Where(x => x.Element("title").Value == "XQL The Golden Years").First() : doc.Root;
TestLog.Compare(e != null, "TEST_FAILING: wrong starting position");
IEnumerable<XNode> toRemove = e.DescendantsAndSelf(@"{http://www.books.com/}book").OfType<XNode>();
if (_runWithEvents)
{
_eHelper = new EventsHelper(doc);
count = toRemove.IsEmpty() ? 0 : toRemove.Count();
}
VerifyDeleteNodes(toRemove);
if (_runWithEvents)
{
_eHelper.Verify(XObjectChange.Remove, count);
}
}
public void Element_DescendantsXName()
{
int count = 0;
_runWithEvents = (bool)Params[0];
XDocument doc = XDocument.Load(FilePathUtil.getStream(@"testdata\xlinq\books.xml"), LoadOptions.PreserveWhitespace);
IEnumerable<XNode> toRemove = doc.Root.Descendants(@"{http://www.books.com/}book").OfType<XNode>();
if (_runWithEvents)
{
_eHelper = new EventsHelper(doc);
count = toRemove.IsEmpty() ? 0 : toRemove.Count();
}
VerifyDeleteNodes(toRemove);
if (_runWithEvents)
{
_eHelper.Verify(XObjectChange.Remove, count);
}
}
public void Element_Elements()
{
int count = 0;
_runWithEvents = (bool)Params[0];
XDocument doc = XDocument.Load(FilePathUtil.getStream(@"testdata\xlinq\books.xml"), LoadOptions.PreserveWhitespace);
XElement e = doc.Descendants(@"{http://www.books.com/}book").Where(x => x.Element("title").Value == "XQL The Golden Years").First();
TestLog.Compare(e != null, "TEST_FAILING: wrong starting position");
IEnumerable<XNode> toRemove = e.Elements().OfType<XNode>();
if (_runWithEvents)
{
_eHelper = new EventsHelper(doc);
count = toRemove.IsEmpty() ? 0 : toRemove.Count();
}
VerifyDeleteNodes(toRemove);
if (_runWithEvents)
{
_eHelper.Verify(XObjectChange.Remove, count);
}
}
public void Element_ElementsXName()
{
int count = 0;
_runWithEvents = (bool)Params[0];
XDocument doc = XDocument.Load(FilePathUtil.getStream(@"testdata\xlinq\books.xml"), LoadOptions.PreserveWhitespace);
IEnumerable<XNode> toRemove = doc.Root.Elements(@"{http://www.books.com/}book").OfType<XNode>();
if (_runWithEvents)
{
_eHelper = new EventsHelper(doc);
count = toRemove.IsEmpty() ? 0 : toRemove.Count();
}
VerifyDeleteNodes(toRemove);
if (_runWithEvents)
{
_eHelper.Verify(XObjectChange.Remove, count);
}
}
public void Element_Nodes()
{
int count = 0;
_runWithEvents = (bool)Params[0];
XDocument doc = XDocument.Load(FilePathUtil.getStream(@"testdata\xlinq\books.xml"), LoadOptions.PreserveWhitespace);
XElement e = doc.Descendants(@"{http://www.books.com/}book").Where(x => x.Element("title").Value == "XQL The Golden Years").First();
TestLog.Compare(e != null, "TEST_FAILED: wrong starting position");
IEnumerable<XNode> toRemove = e.Nodes();
if (_runWithEvents)
{
_eHelper = new EventsHelper(doc);
count = toRemove.IsEmpty() ? 0 : toRemove.Count();
}
VerifyDeleteNodes(toRemove);
if (_runWithEvents)
{
_eHelper.Verify(XObjectChange.Remove, count);
}
}
public void ElementsFromMixedContent()
{
int count = 0;
_runWithEvents = (bool)Params[0];
XDocument doc = XDocument.Parse(@"<A xmlns='a'><B/>text1<p:B xmlns:p='nsp'/>text2<!--commnent--><C><innerElement/></C></A>", LoadOptions.PreserveWhitespace);
IEnumerable<XElement> toRemove = doc.Root.Elements();
if (_runWithEvents)
{
_eHelper = new EventsHelper(doc);
count = toRemove.IsEmpty() ? 0 : toRemove.Count();
}
VerifyDeleteNodes(toRemove);
if (_runWithEvents)
{
_eHelper.Verify(XObjectChange.Remove, count);
}
}
public void EmptySequence()
{
int count = 0;
_runWithEvents = (bool)Params[0];
XDocument doc = XDocument.Parse(@"<A id='a' xmlns:p1='nsp1'><B id='b' xmlns='nbs' xmlns:p='nsp' p:x='xx'>text</B><C/><p1:D id='x' datrt='dat'/></A>");
IEnumerable<XNode> noNodes = doc.Descendants().Where(x => x.Name == "NonExisting").OfType<XNode>();
var ms1 = new MemoryStream();
doc.Save(new StreamWriter(ms1));
if (_runWithEvents)
{
_eHelper = new EventsHelper(doc);
count = noNodes.IsEmpty() ? 0 : noNodes.Count();
}
noNodes.Remove();
if (_runWithEvents)
{
_eHelper.Verify(XObjectChange.Remove, count);
}
var ms2 = new MemoryStream();
doc.Save(new StreamWriter(ms2));
TestLog.Compare(ms1.ToArray().SequenceEqual(ms2.ToArray()), "Documents different");
}
public void IdAttrsNulls()
{
int count = 0;
_runWithEvents = (bool)Params[0];
XDocument doc = XDocument.Parse(@"<A id='a' xmlns:p1='nsp1'><B id='b' xmlns='nbs' xmlns:p='nsp' p:x='xx'>text</B><C/><p1:D id='x' datrt='dat'/></A>");
IEnumerable<XNode> someNodes = doc.Root.Descendants().OfType<XNode>().InsertNulls(1);
if (_runWithEvents)
{
_eHelper = new EventsHelper(doc);
count = someNodes.IsEmpty() ? 0 : someNodes.Count() / 2;
}
VerifyDeleteNodes(someNodes);
if (_runWithEvents)
{
_eHelper.Verify(XObjectChange.Remove, count);
}
}
public void TwoDocuments()
{
int count = 0;
_runWithEvents = (bool)Params[0];
XDocument doc1 = XDocument.Parse("<A xmlns='a'><!--comment-->text1<X/></A>", LoadOptions.PreserveWhitespace);
XDocument doc2 = XDocument.Parse("<A xmlns='b'>text1<X/>text2</A>", LoadOptions.PreserveWhitespace);
IEnumerable<XNode> toRemove = doc1.Root.DescendantNodes().Where(x => x.NodeType == XmlNodeType.Comment).Concat2(doc2.Root.Elements());
if (_runWithEvents)
{
_eHelper = new EventsHelper(doc1);
count = doc1.Root.DescendantNodes().Where(x => x.NodeType == XmlNodeType.Comment).Count();
}
VerifyDeleteNodes(toRemove);
if (_runWithEvents)
{
_eHelper.Verify(XObjectChange.Remove, count);
}
}
public void XElementsAfterSelf()
{
int count = 0;
_runWithEvents = (bool)Params[0];
XDocument doc = XDocument.Load(FilePathUtil.getStream(@"testdata\xlinq\books.xml"), LoadOptions.PreserveWhitespace);
XElement e = doc.Descendants("magazine").Where(x => x.Element("title").Value == "PC Week").First();
TestLog.Compare(e != null, "TEST_FAILED: wrong starting position");
IEnumerable<XNode> toRemove = e.ElementsAfterSelf().OfType<XNode>();
if (_runWithEvents)
{
_eHelper = new EventsHelper(doc);
count = toRemove.IsEmpty() ? 0 : toRemove.Count();
}
VerifyDeleteNodes(toRemove);
if (_runWithEvents)
{
_eHelper.Verify(XObjectChange.Remove, count);
}
}
public void XElementsAfterSelfXName()
{
int count = 0;
_runWithEvents = (bool)Params[0];
XDocument doc = XDocument.Load(FilePathUtil.getStream(@"testdata\xlinq\books.xml"), LoadOptions.PreserveWhitespace);
XElement e = doc.Descendants("magazine").Where(x => x.Element("title").Value == "PC Week").First();
TestLog.Compare(e != null, "TEST_FAILED: wrong starting position");
IEnumerable<XNode> toRemove = e.ElementsAfterSelf(@"{http://www.books.com/}book").OfType<XNode>();
if (_runWithEvents)
{
_eHelper = new EventsHelper(doc);
count = toRemove.IsEmpty() ? 0 : toRemove.Count();
}
VerifyDeleteNodes(toRemove);
if (_runWithEvents)
{
_eHelper.Verify(XObjectChange.Remove, count);
}
}
public void XElementsBeforeSelf()
{
int count = 0;
_runWithEvents = (bool)Params[0];
XDocument doc = XDocument.Load(FilePathUtil.getStream(@"testdata\xlinq\books.xml"), LoadOptions.PreserveWhitespace);
XElement e = doc.Descendants("magazine").Where(x => x.Element("title").Value == "PC Week").First();
TestLog.Compare(e != null, "TEST_FAILED: wrong starting position");
IEnumerable<XNode> toRemove = e.ElementsBeforeSelf().OfType<XNode>();
if (_runWithEvents)
{
_eHelper = new EventsHelper(doc);
count = toRemove.IsEmpty() ? 0 : toRemove.Count();
}
VerifyDeleteNodes(toRemove);
if (_runWithEvents)
{
_eHelper.Verify(XObjectChange.Remove, count);
}
}
public void XElementsBeforeSelfXName()
{
int count = 0;
_runWithEvents = (bool)Params[0];
XDocument doc = XDocument.Load(FilePathUtil.getStream(@"testdata\xlinq\books.xml"), LoadOptions.PreserveWhitespace);
XElement e = doc.Descendants("magazine").Where(x => x.Element("title").Value == "PC Week").First();
TestLog.Compare(e != null, "TEST_FAILED: wrong starting position");
IEnumerable<XNode> toRemove = e.ElementsBeforeSelf(@"{http://www.books.com/}book").OfType<XNode>();
if (_runWithEvents)
{
_eHelper = new EventsHelper(doc);
count = toRemove.IsEmpty() ? 0 : toRemove.Count();
}
VerifyDeleteNodes(toRemove);
if (_runWithEvents)
{
_eHelper.Verify(XObjectChange.Remove, count);
}
}
public void XNodeAncestors()
{
int count = 0;
_runWithEvents = (bool)Params[0];
XDocument doc = XDocument.Load(FilePathUtil.getStream(@"testdata\xlinq\books.xml"), LoadOptions.PreserveWhitespace);
XElement e = doc.Descendants("last.name").Where(x => x.Value == "Marsh").First();
TestLog.Compare(e != null, "TEST_FAILED: wrong starting position");
IEnumerable<XNode> toRemove = e.Ancestors().OfType<XNode>();
if (_runWithEvents)
{
_eHelper = new EventsHelper(doc);
count = toRemove.IsEmpty() ? 0 : toRemove.Count();
}
VerifyDeleteNodes(e.Ancestors());
if (_runWithEvents)
{
_eHelper.Verify(XObjectChange.Remove, count);
}
}
//[Variation(Priority = 1, Desc = "XNode.Ancestors(XName)")]
public void XNodeAncestorsXName()
{
int count = 0;
_runWithEvents = (bool)Params[0];
XDocument doc = XDocument.Load(FilePathUtil.getStream(@"testdata\xlinq\books.xml"), LoadOptions.PreserveWhitespace);
XElement e = doc.Descendants("last.name").Where(x => x.Value == "Marsh").First();
TestLog.Compare(e != null, "TEST_FAILED: wrong starting position");
IEnumerable<XNode> toRemove = e.Ancestors("author").OfType<XNode>();
if (_runWithEvents)
{
_eHelper = new EventsHelper(doc);
count = toRemove.IsEmpty() ? 0 : toRemove.Count();
}
VerifyDeleteNodes(e.Ancestors("author"));
if (_runWithEvents)
{
_eHelper.Verify(XObjectChange.Remove, count);
}
}
public void XNodesAfterSelf()
{
int count = 0;
_runWithEvents = (bool)Params[0];
XDocument doc = XDocument.Load(FilePathUtil.getStream(@"testdata\xlinq\books.xml"), LoadOptions.PreserveWhitespace);
XElement e = doc.Descendants("magazine").Where(x => x.Element("title").Value == "PC Week").First();
TestLog.Compare(e != null, "TEST_FAILED: wrong starting position");
IEnumerable<XNode> toRemove = e.NodesAfterSelf();
if (_runWithEvents)
{
_eHelper = new EventsHelper(doc);
count = toRemove.IsEmpty() ? 0 : toRemove.Count();
}
VerifyDeleteNodes(toRemove);
if (_runWithEvents)
{
_eHelper.Verify(XObjectChange.Remove, count);
}
}
public void XNodesBeforeSelf()
{
int count = 0;
_runWithEvents = (bool)Params[0];
XDocument doc = XDocument.Load(FilePathUtil.getStream(@"testdata\xlinq\books.xml"), LoadOptions.PreserveWhitespace);
XElement e = doc.Descendants("magazine").Where(x => x.Element("title").Value == "PC Week").First();
TestLog.Compare(e != null, "TEST_FAILED: wrong starting position");
IEnumerable<XNode> toRemove = e.NodesBeforeSelf();
if (_runWithEvents)
{
_eHelper = new EventsHelper(doc);
count = toRemove.IsEmpty() ? 0 : toRemove.Count();
}
VerifyDeleteNodes(toRemove);
if (_runWithEvents)
{
_eHelper.Verify(XObjectChange.Remove, count);
}
}
#endregion
//[Variation(Priority = 2, Desc = "XElement.AncestorsAndSelf(XName) I.", Param = true)]
//[Variation(Priority = 2, Desc = "XElement.AncestorsAndSelf(XName) II.", Param = false)]
#region Methods
private void VerifyDeleteNodes<T>(IEnumerable<T> toRemove) where T : XNode
{
// copy of the data to delete
IEnumerable<XNode> toRemoveCopy = toRemove.OfType<XNode>().ToList();
// Create array of parents
IEnumerable<XContainer> parents = toRemove.Select(x => (x == null) ? (XContainer)null : (x.Parent != null ? (XContainer)x.Parent : (XContainer)x.Document)).ToList();
// calculate the expected results for the parents of the processed elements
var expectedNodesForParent = new Dictionary<XContainer, List<ExpectedValue>>();
foreach (XContainer p in parents)
{
if (p != null && !expectedNodesForParent.ContainsKey(p))
{
expectedNodesForParent.Add(p, p.Nodes().Except(toRemoveCopy.Where(x => x != null)).Select(a => new ExpectedValue(!(a is XText), a)).ProcessNodes().ToList());
}
}
toRemove.Remove();
IEnumerator<XNode> copyToRemove = toRemoveCopy.GetEnumerator();
IEnumerator<XContainer> parentsEnum = parents.GetEnumerator();
// verify on parents: deleted elements should not be found
while (copyToRemove.MoveNext() && parentsEnum.MoveNext())
{
XNode node = copyToRemove.Current;
if (node != null)
{
XContainer parent = parentsEnum.Current;
TestLog.Compare(node.Parent, null, "Parent of deleted");
TestLog.Compare(node.Document, null, "Document of deleted");
TestLog.Compare(node.NextNode, null, "NextNode of deleted");
TestLog.Compare(node.PreviousNode, null, "PreviousNode of deleted");
if (parent != null)
{
TestLog.Compare(parent.Nodes().Where(x => x == node).IsEmpty(), "Nodes axis");
if (node is XElement)
{
var e = node as XElement;
e.Verify();
TestLog.Compare(parent.Element(e.Name) != node, "Element axis");
TestLog.Compare(parent.Elements(e.Name).Where(x => x == e).IsEmpty(), "Elements axis");
}
// Compare the rest of the elements
TestLog.Compare(expectedNodesForParent[parent].EqualAll(parent.Nodes(), XNode.EqualityComparer), "The rest of ther nodes");
}
}
}
}
#endregion
}
}
| |
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
using Pathfinding.Util;
using Pathfinding.Serialization.JsonFx;
namespace Pathfinding {
/// <summary>
/// Base class for all graphs
/// </summary>
public abstract class NavGraph {
/** Used to store the guid value
* \see NavGraph.guid
*/
public byte[] _sguid;
/** Reference to the AstarPath object in the scene.
* Might not be entirely safe to use, it's better to use AstarPath.active
*/
public AstarPath active;
/** Used as an ID of the graph, considered to be unique.
* \note This is Pathfinding.Util.Guid not System.Guid. A replacement for System.Guid was coded for better compatibility with iOS
*/
[JsonMember]
public Guid guid {
get {
if (_sguid == null || _sguid.Length != 16) {
_sguid = Guid.NewGuid ().ToByteArray ();
}
return new Guid (_sguid);
}
set {
_sguid = value.ToByteArray ();
}
}
[JsonMember]
public uint initialPenalty = 0;
/// <summary>
/// Is the graph open in the editor
/// </summary>
[JsonMember]
public bool open;
[JsonMember]
public string name;
[JsonMember]
public bool drawGizmos = true;
//#if UNITY_EDITOR
/** Used in the editor to check if the info screen is open.
* Should be inside UNITY_EDITOR only \#ifs but just in case anyone tries to serialize a NavGraph instance using Unity, I have left it like this as it would otherwise cause a crash when building.
* Version 3.0.8.1 was released because of this bug only
*/
[JsonMember]
public bool infoScreenOpen;
//#endif
/** All nodes this graph contains. This can be iterated to search for a specific node.
* This should be set if the graph does contain any nodes.
* \note Entries are permitted to be NULL, make sure you account for that when iterating a graph's nodes
*/
public Node[] nodes;
/** A matrix for translating/rotating/scaling the graph.
* Not all graph generators sets this variable though.
*/
public Matrix4x4 matrix;
public Matrix4x4 inverseMatrix {
get { return matrix.inverse; }
}
/** Creates a number of nodes with the correct type for the graph.
This should not set the #nodes array, only return the nodes.
Called by graph generators and when deserializing a graph with nodes.
Override this function if you do not use the default Pathfinding.Node class.
*/
public virtual Node[] CreateNodes (int number) {
Node[] tmp = new Node[number];
for (int i=0;i<number;i++) {
tmp[i] = new Node ();
tmp[i].penalty = initialPenalty;
}
return tmp;
}
/** Relocates the nodes in this graph.
* Assumes the nodes are translated using the "oldMatrix", then translates them according to the "newMatrix".
* The "oldMatrix" is not required by all implementations of this function though (e.g the NavMesh generator).
* \bug Does not always work for Grid Graphs, see http://www.arongranberg.com/forums/topic/relocate-nodes-fix/
*/
public virtual void RelocateNodes (Matrix4x4 oldMatrix, Matrix4x4 newMatrix) {
if (nodes == null || nodes.Length == 0) {
return;
}
Matrix4x4 inv = oldMatrix.inverse;
Matrix4x4 m = inv * newMatrix;
for (int i=0;i<nodes.Length;i++) {
//Vector3 tmp = inv.MultiplyPoint3x4 ((Vector3)nodes[i].position);
nodes[i].position = (Int3)m.MultiplyPoint ((Vector3)nodes[i].position);
}
this.matrix = newMatrix;
}
/** Returns the nearest node to a position using the default NNConstraint.
* \param position The position to try to find a close node to
* \see Pathfinding.NNConstraint.None
*/
public NNInfo GetNearest (Vector3 position) {
return GetNearest (position, NNConstraint.None);
}
/** Returns the nearest node to a position using the specified NNConstraint.
* \param position The position to try to find a close node to
* \param constraint Can for example tell the function to try to return a walkable node. If you do not get a good node back, consider calling GetNearestForce. */
public NNInfo GetNearest (Vector3 position, NNConstraint constraint) {
return GetNearest (position, constraint, null);
}
/** Returns the nearest node to a position using the specified NNConstraint.
* \param position The position to try to find a close node to
* \param hint Can be passed to enable some graph generators to find the nearest node faster.
* \param constraint Can for example tell the function to try to return a walkable node. If you do not get a good node back, consider calling GetNearestForce. */
public virtual NNInfo GetNearest (Vector3 position, NNConstraint constraint, Node hint) {
//Debug.LogError ("This function (GetNearest) is not implemented in the navigation graph generator : Type "+this.GetType ().Name);
if (nodes == null) {
return new NNInfo ();
}
float maxDistSqr = constraint.constrainDistance ? AstarPath.active.maxNearestNodeDistanceSqr : float.PositiveInfinity;
float minDist = float.PositiveInfinity;
Node minNode = null;
float minConstDist = float.PositiveInfinity;
Node minConstNode = null;
for (int i=0;i<nodes.Length;i++) {
Node node = nodes[i];
float dist = (position-(Vector3)node.position).sqrMagnitude;
if (dist < minDist) {
minDist = dist;
minNode = node;
}
if (dist < minConstDist && dist < maxDistSqr && constraint.Suitable (node)) {
minConstDist = dist;
minConstNode = node;
}
}
NNInfo nnInfo = new NNInfo (minNode);
nnInfo.constrainedNode = minConstNode;
if (minConstNode != null) {
nnInfo.constClampedPosition = (Vector3)minConstNode.position;
} else if (minNode != null) {
nnInfo.constrainedNode = minNode;
nnInfo.constClampedPosition = (Vector3)minNode.position;
}
return nnInfo;
}
/// <summary>
/// Returns the nearest node to a position using the specified <see cref="NNConstraint">constraint</see>.
/// </summary>
/// <param name="position">
/// A <see cref="Vector3"/>
/// </param>
/// <param name="constraint">
/// A <see cref="NNConstraint"/>
/// </param>
/// <returns>
/// A <see cref="NNInfo"/>. This function will only return an empty NNInfo if there is no nodes which comply with the specified constraint.
/// </returns>
public virtual NNInfo GetNearestForce (Vector3 position, NNConstraint constraint) {
return GetNearest (position, constraint);
//Debug.LogError ("This should not be called if not GetNearest has been overriden, and if GetNearest has been overriden, you should override this function too, always return a node which returns true when passed to constraint.Suitable (node)");
//return new NNInfo ();
}
/// <summary>
/// This will be called on the same time as Awake on the gameObject which the AstarPath script is attached to. (remember, not in the editor)
/// Use this for any initialization code which can't be placed in Scan
/// </summary>
public virtual void Awake () {
}
/// <summary>
/// SafeOnDestroy should be used when there is a risk that the pathfinding is searching through this graph when called
/// </summary>
public void SafeOnDestroy () {
AstarPath.RegisterSafeUpdate (OnDestroy,false);
}
/// <summary>
/// Function for cleaning up references.
/// </summary>
/// <remarks>This will be called on the same time as OnDisable on the gameObject which the AstarPath script is attached to (remember, not in the editor)
/// Use for any cleanup code such as cleaning up static variables which otherwise might prevent resources from being collected
/// Use by creating a function overriding this one in a graph class, but always call base.OnDestroy () in that function.</remarks>
public virtual void OnDestroy () {
//Clean up a refence to the node array so it can get collected even if a reference to this graph still exists somewhere
nodes = null;
}
/// <summary>
/// Consider using AstarPath.Scan () instead since this function might screw things up if there is more than one graph.
/// This function does not perform all necessary postprocessing for the graph to work with pathfinding (e.g flood fill).
/// See the source of the AstarPath.Scan function to see how it can be used.
///
/// In almost all cases you should use AstarPath.Scan instead.
/// </summary>
public void ScanGraph () {
if (AstarPath.OnPreScan != null) {
AstarPath.OnPreScan (AstarPath.active);
}
if (AstarPath.OnGraphPreScan != null) {
AstarPath.OnGraphPreScan (this);
}
Scan ();
if (AstarPath.OnGraphPostScan != null) {
AstarPath.OnGraphPostScan (this);
}
if (AstarPath.OnPostScan != null) {
AstarPath.OnPostScan (AstarPath.active);
}
}
/// <summary>
/// Scans the graph, called from <see cref="AstarPath.Scan"/>
/// Override this function to implement custom scanning logic
/// </summary>
public abstract void Scan ();
/* Color to use for gizmos.
* Returns a color to be used for the specified node with the current debug settings (editor only)
*/
public virtual Color NodeColor (Node node, NodeRunData data) {
#if !PhotonImplementation
Color c = AstarColor.NodeConnection;
bool colSet = false;
if (node == null) return AstarColor.NodeConnection;
switch (AstarPath.active.debugMode) {
case GraphDebugMode.Areas:
c = AstarColor.GetAreaColor (node.area);
colSet = true;
break;
case GraphDebugMode.Penalty:
c = Color.Lerp (AstarColor.ConnectionLowLerp,AstarColor.ConnectionHighLerp, (float)node.penalty / (float)AstarPath.active.debugRoof);
colSet = true;
break;
case GraphDebugMode.Tags:
c = Mathfx.IntToColor (node.tags,0.5F);
colSet = true;
break;
/* Wasn't really usefull
case GraphDebugMode.Position:
float r = Mathf.PingPong (node.position.x/10000F,1F) + Mathf.PingPong (node.position.x/300000F,1F);
float g = Mathf.PingPong (node.position.y/10000F,1F) + Mathf.PingPong (node.position.y/200000F,1F);
float b = Mathf.PingPong (node.position.z/10000F,1F) + Mathf.PingPong (node.position.z/100000F,1F);
c = new Color (r,g,b);
break;
*/
}
if (!colSet) {
if (data == null) return AstarColor.NodeConnection;
NodeRun nodeR = node.GetNodeRun (data);
if (nodeR == null) return AstarColor.NodeConnection;
switch (AstarPath.active.debugMode) {
case GraphDebugMode.G:
//c = Mathfx.IntToColor (node.g,0.5F);
c = Color.Lerp (AstarColor.ConnectionLowLerp,AstarColor.ConnectionHighLerp, (float)nodeR.g / (float)AstarPath.active.debugRoof);
break;
case GraphDebugMode.H:
c = Color.Lerp (AstarColor.ConnectionLowLerp,AstarColor.ConnectionHighLerp, (float)nodeR.h / (float)AstarPath.active.debugRoof);
break;
case GraphDebugMode.F:
c = Color.Lerp (AstarColor.ConnectionLowLerp,AstarColor.ConnectionHighLerp, (float)nodeR.f / (float)AstarPath.active.debugRoof);
break;
}
}
c.a *= 0.5F;
return c;
#else
return new Color (1,1,1);
#endif
}
/** Serializes graph type specific node data.
* This function can be overriden to serialize extra node information (or graph information for that matter)
* which cannot be serialized using the standard serialization.
* Serialize the data in any way you want and return a byte array.
* When loading, the exact same byte array will be passed to the DeserializeExtraInfo function.\n
* These functions will only be called if node serialization is enabled.\n
* If null is returned from this function, the DeserializeExtraInfo function will not be called on load.
*/
public virtual byte[] SerializeExtraInfo () {
return null;
}
/** Deserializes graph type specific node data.
* \see SerializeExtraInfo
*/
public virtual void DeserializeExtraInfo (byte[] bytes) {
}
/** Called after all deserialization has been done for all graphs.
* Can be used to set up more graph data which is not serialized
*/
public virtual void PostDeserialization () {
}
/** Returns if the node is in the search tree of the path.
* Only guaranteed to be correct if \a path is the latest path calculated.
* Use for gizmo drawing only.
*/
public bool InSearchTree (Node node, Path path) {
if (path == null || path.runData == null) return true;
NodeRun nodeR = node.GetNodeRun (path.runData);
return nodeR.pathID == path.pathID;
}
public virtual void OnDrawGizmos (bool drawNodes) {
if (nodes == null || !drawNodes) {
if (!Application.isPlaying) {
//Scan (0);
}
return;
}
for (int i=0;i<nodes.Length;i++) {
Node node = nodes[i];
if (node.connections != null) {
Gizmos.color = NodeColor (node, AstarPath.active.debugPathData);
if (AstarPath.active.showSearchTree && !InSearchTree(node,AstarPath.active.debugPath)) return;
if (AstarPath.active.showSearchTree && AstarPath.active.debugPathData != null && node.GetNodeRun(AstarPath.active.debugPathData).parent != null) {
Gizmos.DrawLine ((Vector3)node.position,(Vector3)node.GetNodeRun(AstarPath.active.debugPathData).parent.node.position);
} else {
for (int q=0;q<node.connections.Length;q++) {
Gizmos.DrawLine ((Vector3)node.position,(Vector3)node.connections[q].position);
}
}
}
}
}
}
[System.Serializable]
/** Handles collision checking for graphs.
* Mostly used by grid based graphs */
public class GraphCollision : ISerializableObject {
/** Collision shape to use.
* Pathfinding.ColliderType */
public ColliderType type = ColliderType.Capsule;
/** Diameter of capsule or sphere when checking for collision.
* 1 equals \link Pathfinding.GridGraph.nodeSize nodeSize \endlink.
* If #type is set to Ray, this does not affect anything */
public float diameter = 1F;
/** Height of capsule or length of ray when checking for collision.
* If #type is set to Sphere, this does not affect anything
*/
public float height = 2F;
public float collisionOffset = 0;
/** Direction of the ray when checking for collision.
* If #type is not Ray, this does not affect anything
* \note This variable is not used currently, it does not affect anything
*/
public RayDirection rayDirection = RayDirection.Both;
/** Layer mask to use for collision check.
* This should only contain layers of objects defined as obstacles */
public LayerMask mask;
/** Layer mask to use for height check. */
public LayerMask heightMask = -1;
/** The height to check from when checking height */
public float fromHeight = 100;
/** Toggles thick raycast */
public bool thickRaycast = false;
/** Diameter of the thick raycast in nodes.
* 1 equals \link Pathfinding.GridGraph.nodeSize nodeSize \endlink */
public float thickRaycastDiameter = 1;
/** Direction to use as \a UP.
* \see Initialize */
public Vector3 up;
/** #up * #height.
* \see Initialize */
private Vector3 upheight;
/** #diameter * scale * 0.5.
* Where \a scale usually is \link Pathfinding.GridGraph.nodeSize nodeSize \endlink
* \see Initialize */
private float finalRadius;
/** #thickRaycastDiameter * scale * 0.5. Where \a scale usually is \link Pathfinding.GridGraph.nodeSize nodeSize \endlink \see Initialize */
private float finalRaycastRadius;
/** Offset to apply after each raycast to make sure we don't hit the same point again in CheckHeightAll */
public const float RaycastErrorMargin = 0.005F;
#if !PhotonImplementation
public bool collisionCheck = true; /**< Toggle collision check */
public bool heightCheck = true; /**< Toggle height check. If false, the grid will be flat */
#else
//No height or collision checks can be done outside of Unity
public bool collisionCheck {
get {
return false;
}
set {}
}
public bool heightCheck {
get {
return false;
}
set {}
}
#endif
/** Make nodes unwalkable when no ground was found with the height raycast. If height raycast is turned off, this doesn't affect anything. */
public bool unwalkableWhenNoGround = true;
//#if !PhotonImplementation
/** Sets up several variables using the specified matrix and scale.
* \see GraphCollision.up
* \see GraphCollision.upheight
* \see GraphCollision.finalRadius
* \see GraphCollision.finalRaycastRadius
*/
public void Initialize (Matrix4x4 matrix, float scale) {
up = matrix.MultiplyVector (Vector3.up);
upheight = up*height;
finalRadius = diameter*scale*0.5F;
finalRaycastRadius = thickRaycastDiameter*scale*0.5F;
}
/** Returns if the position is obstructed. If #collisionCheck is false, this will always return true.\n */
public bool Check (Vector3 position) {
if (!collisionCheck) {
return true;
}
position += up*collisionOffset;
switch (type) {
case ColliderType.Capsule:
return !Physics.CheckCapsule (position, position+upheight,finalRadius,mask);
case ColliderType.Sphere:
return !Physics.CheckSphere (position, finalRadius,mask);
default:
switch (rayDirection) {
case RayDirection.Both:
return !Physics.Raycast (position, up, height, mask) && !Physics.Raycast (position+upheight, -up, height, mask);
case RayDirection.Up:
return !Physics.Raycast (position, up, height, mask);
default:
return !Physics.Raycast (position+upheight, -up, height, mask);
}
}
}
/** Returns the position with the correct height. If #heightCheck is false, this will return \a position.\n */
public Vector3 CheckHeight (Vector3 position) {
RaycastHit hit;
bool walkable;
return CheckHeight (position,out hit, out walkable);
}
/** Returns the position with the correct height. If #heightCheck is false, this will return \a position.\n
* \a walkable will be set to false if nothing was hit. The ray will check a tiny bit further than to the grids base to avoid floating point errors when the ground is exactly at the base of the grid */
public Vector3 CheckHeight (Vector3 position, out RaycastHit hit, out bool walkable) {
walkable = true;
if (!heightCheck) {
hit = new RaycastHit ();
return position;
}
if (thickRaycast) {
Ray ray = new Ray (position+up*fromHeight,-up);
if (Physics.SphereCast (ray, finalRaycastRadius,out hit, fromHeight+0.005F, heightMask)) {
return Mathfx.NearestPoint (ray.origin,ray.origin+ray.direction,hit.point);
//position+up*(fromHeight-hit.distance);
} else {
if (unwalkableWhenNoGround) {
walkable = false;
}
}
} else {
if (Physics.Raycast (position+up*fromHeight, -up,out hit, fromHeight+0.005F, heightMask)) {
return hit.point;
} else {
if (unwalkableWhenNoGround) {
walkable = false;
}
}
}
return position;
}
/** Same as #CheckHeight, except that the raycast will always start exactly at \a origin
* \a walkable will be set to false if nothing was hit. The ray will check a tiny bit further than to the grids base to avoid floating point errors when the ground is exactly at the base of the grid */
public Vector3 Raycast (Vector3 origin, out RaycastHit hit, out bool walkable) {
walkable = true;
if (!heightCheck) {
hit = new RaycastHit ();
return origin -up*fromHeight;
}
if (thickRaycast) {
Ray ray = new Ray (origin,-up);
if (Physics.SphereCast (ray, finalRaycastRadius,out hit, fromHeight+0.005F, heightMask)) {
return Mathfx.NearestPoint (ray.origin,ray.origin+ray.direction,hit.point);
//position+up*(fromHeight-hit.distance);
} else {
if (unwalkableWhenNoGround) {
walkable = false;
}
}
} else {
if (Physics.Raycast (origin, -up,out hit, fromHeight+0.005F, heightMask)) {
return hit.point;
} else {
if (unwalkableWhenNoGround) {
walkable = false;
}
}
}
return origin -up*fromHeight;
}
//[System.Obsolete ("Does not work well, will only return an object a single time")]
/** Returns all hits when checking height for \a position.
* \note Does not work well with thick raycast, will only return an object a single time */
public RaycastHit[] CheckHeightAll (Vector3 position) {
/*RaycastHit[] hits;
if (!heightCheck) {
RaycastHit hit = new RaycastHit ();
hit.point = position;
hit.distance = 0;
return new RaycastHit[1] {hit};
}
if (thickRaycast) {
Ray ray = new Ray (position+up*fromHeight,-up);
hits = Physics.SphereCastAll (ray, finalRaycastRadius, fromHeight, heightMask);
for (int i=0;i<hits.Length;i++) {
hits[i].point = Mathfx.NearestPoint (ray.origin,ray.origin+ray.direction,hits[i].point);
//position+up*(fromHeight-hit.distance);
}
} else {
hits = Physics.RaycastAll (position+up*fromHeight, -up, fromHeight, heightMask);
}
return hits;*/
if (!heightCheck) {
RaycastHit hit = new RaycastHit ();
hit.point = position;
hit.distance = 0;
return new RaycastHit[1] {hit};
}
if (thickRaycast) {
Debug.LogWarning ("Thick raycast cannot be used with CheckHeightAll. Disabling thick raycast...");
thickRaycast = false;
}
List<RaycastHit> hits = new List<RaycastHit>();
bool walkable = true;
Vector3 cpos = position + up*fromHeight;
Vector3 prevHit = Vector3.zero;
int numberSame = 0;
while (true) {
RaycastHit hit;
Raycast (cpos, out hit, out walkable);
if (hit.transform == null) { //Raycast did not hit anything
break;
} else {
//Make sure we didn't hit the same position
if (hit.point != prevHit || hits.Count == 0) {
cpos = hit.point - up*RaycastErrorMargin;
prevHit = hit.point;
numberSame = 0;
hits.Add (hit);
} else {
cpos -= up*0.001F;
numberSame++;
//Check if we are hitting the same position all the time, even though we are decrementing the cpos variable
if (numberSame > 10) {
Debug.LogError ("Infinite Loop when raycasting. Please report this error (arongranberg.com)\n"+cpos+" : "+prevHit);
break;
}
}
}
}
return hits.ToArray ();
}
/** \copydoc Pathfinding.ISerializableObject.SerializeSettings \copybrief Pathfinding.ISerializableObject.SerializeSettings */
public void SerializeSettings (AstarSerializer serializer) {
serializer.AddValue ("Mask",(int)mask);
serializer.AddValue ("Diameter",diameter);
serializer.AddValue ("Height",height);
serializer.AddValue ("Type",(int)type);
serializer.AddValue ("RayDirection",(int)rayDirection);
serializer.AddValue ("heightMask",(int)heightMask);
serializer.AddValue ("fromHeight",fromHeight);
serializer.AddValue ("thickRaycastDiameter",thickRaycastDiameter);
serializer.AddValue ("thickRaycast",thickRaycast);
serializer.AddValue ("collisionCheck",collisionCheck);
serializer.AddValue ("heightCheck",heightCheck);
serializer.AddValue ("unwalkableWhenNoGround",unwalkableWhenNoGround);
serializer.AddValue ("collisionOffset",collisionOffset);
}
/** \copydoc Pathfinding.ISerializableObject.DeSerializeSettings */
public void DeSerializeSettings (AstarSerializer serializer) {
mask.value = (int)serializer.GetValue ("Mask",typeof (int));
diameter = (float)serializer.GetValue ("Diameter",typeof (float));
height = (float)serializer.GetValue ("Height",typeof (float));
type = (ColliderType)serializer.GetValue ("Type",typeof(int));
rayDirection = (RayDirection)serializer.GetValue ("RayDirection",typeof(int));
heightMask.value = (int)serializer.GetValue ("heightMask",typeof (int),-1);
fromHeight = (float)serializer.GetValue ("fromHeight",typeof (float), 100.0F);
thickRaycastDiameter = (float)serializer.GetValue ("thickRaycastDiameter",typeof (float));
thickRaycast = (bool)serializer.GetValue ("thickRaycast",typeof (bool));
collisionCheck = (bool)serializer.GetValue ("collisionCheck",typeof(bool),true);
heightCheck = (bool)serializer.GetValue ("heightCheck",typeof(bool),true);
unwalkableWhenNoGround = (bool)serializer.GetValue ("unwalkableWhenNoGround",typeof(bool),true);
collisionOffset = (float)serializer.GetValue ("collisionOffset",typeof(float),0.0F);
if (fromHeight == 0) fromHeight = 100;
}
}
/** Determines collision check shape */
public enum ColliderType {
Sphere, /**< Uses a Sphere, Physics.CheckSphere */
Capsule, /**< Uses a Capsule, Physics.CheckCapsule */
Ray /**< Uses a Ray, Physics.Linecast */
}
/** Determines collision check ray direction */
public enum RayDirection {
Up, /**< Casts the ray from the bottom upwards */
Down, /**< Casts the ray from the top downwards */
Both /**< Casts two rays in either direction */
}
}
| |
/********************************************************************++
Copyright (c) Microsoft Corporation. All rights reserved.
--********************************************************************/
using System.Collections.Specialized;
using System.Text;
namespace Microsoft.PowerShell.Commands.Internal.Format
{
internal class TableWriter
{
/// <summary>
/// Information about each column boudaries
/// </summary>
private class ColumnInfo
{
internal int startCol = 0;
internal int width = 0;
internal int alignment = TextAlignment.Left;
}
/// <summary>
/// Class containing information about the tabular layout
/// </summary>
private class ScreenInfo
{
internal int screenColumns = 0;
internal const int separatorCharacterCount = 1;
internal const int minimumScreenColumns = 5;
internal const int minimumColumnWidth = 1;
internal ColumnInfo[] columnInfo = null;
}
private ScreenInfo _si;
internal static int ComputeWideViewBestItemsPerRowFit(int stringLen, int screenColumns)
{
if (stringLen <= 0 || screenColumns < 1)
return 1;
if (stringLen >= screenColumns)
{
// we too wide anyways, we might have to trim even for a single column
return 1;
}
// we try to fit more than one: start increasing until we do not fit anymore
int columnNumber = 1;
while (true)
{
// would we fit with one more column?
int nextValue = columnNumber + 1;
// compute the width if we added the extra column
int width = stringLen * nextValue + (nextValue - 1) * ScreenInfo.separatorCharacterCount;
if (width >= screenColumns)
{
// we would not fit, so we are done
return columnNumber;
}
// try another round
columnNumber++;
}
}
/// <summary>
/// Initalize the table specifying the width of each column
/// </summary>
/// <param name="leftMarginIndent">left margin indentation</param>
/// <param name="screenColumns">number of character columns on the screen</param>
/// <param name="columnWidths">array of specified column widths</param>
/// <param name="alignment">array of alignment flags</param>
/// <param name="suppressHeader">if true, suppress header printing</param>
internal void Initialize(int leftMarginIndent, int screenColumns, int[] columnWidths, int[] alignment, bool suppressHeader)
{
//Console.WriteLine(" 1 2 3 4 5 6 7");
//Console.WriteLine("01234567890123456789012345678901234567890123456789012345678901234567890123456789");
if (leftMarginIndent < 0)
{
leftMarginIndent = 0;
}
if (screenColumns - leftMarginIndent < ScreenInfo.minimumScreenColumns)
{
_disabled = true;
return;
}
_startColumn = leftMarginIndent;
_hideHeader = suppressHeader;
// make sure the column widths are correct; if not, take appropriate action
ColumnWidthManager manager = new ColumnWidthManager(screenColumns - leftMarginIndent,
ScreenInfo.minimumColumnWidth,
ScreenInfo.separatorCharacterCount);
manager.CalculateColumnWidths(columnWidths);
// if all the columns are hidden, just disable
bool oneValid = false;
for (int k = 0; k < columnWidths.Length; k++)
{
if (columnWidths[k] >= ScreenInfo.minimumColumnWidth)
{
oneValid = true;
break;
}
}
if (!oneValid)
{
_disabled = true;
return;
}
// now set the run time data structures
_si = new ScreenInfo();
_si.screenColumns = screenColumns;
_si.columnInfo = new ColumnInfo[columnWidths.Length];
int startCol = _startColumn;
for (int k = 0; k < columnWidths.Length; k++)
{
_si.columnInfo[k] = new ColumnInfo();
_si.columnInfo[k].startCol = startCol;
_si.columnInfo[k].width = columnWidths[k];
_si.columnInfo[k].alignment = alignment[k];
startCol += columnWidths[k] + ScreenInfo.separatorCharacterCount;
//Console.WriteLine("start = {0} width = {1}", si.columnInfo[k].startCol, si.columnInfo[k].width);
}
}
internal void GenerateHeader(string[] values, LineOutput lo)
{
if (_disabled)
return;
if (_hideHeader)
return;
// generate the row with the header labels
GenerateRow(values, lo, true, null, lo.DisplayCells);
// generate an array of "--" as header markers below
// the column header labels
string[] breakLine = new string[values.Length];
for (int k = 0; k < _si.columnInfo.Length; k++)
{
// the column can be hidden
if (_si.columnInfo[k].width <= 0)
{
breakLine[k] = "";
continue;
}
// the title can be larger than the width
int count = _si.columnInfo[k].width;
if (!string.IsNullOrEmpty(values[k]))
{
int labelDisplayCells = lo.DisplayCells.Length(values[k]);
if (labelDisplayCells < count)
count = labelDisplayCells;
}
// NOTE: we can do this because "-" is a single cell character
// on all devices. If changed to some other character, this assuption
// would be invalidated
breakLine[k] = new string('-', count);
}
GenerateRow(breakLine, lo, false, null, lo.DisplayCells);
}
internal void GenerateRow(string[] values, LineOutput lo, bool multiLine, int[] alignment, DisplayCells dc)
{
if (_disabled)
return;
// build the current row aligment settings
int cols = _si.columnInfo.Length;
int[] currentAlignment = new int[cols];
if (alignment == null)
{
for (int i = 0; i < cols; i++)
{
currentAlignment[i] = _si.columnInfo[i].alignment;
}
}
else
{
for (int i = 0; i < cols; i++)
{
if (alignment[i] == TextAlignment.Undefined)
currentAlignment[i] = _si.columnInfo[i].alignment;
else
currentAlignment[i] = alignment[i];
}
}
if (multiLine)
{
string[] lines = GenerateTableRow(values, currentAlignment, lo.DisplayCells);
for (int k = 0; k < lines.Length; k++)
{
lo.WriteLine(lines[k]);
}
}
else
{
lo.WriteLine(GenerateRow(values, currentAlignment, dc));
}
}
private string[] GenerateTableRow(string[] values, int[] alignment, DisplayCells ds)
{
// select the active columns (skip hidden ones)
int[] validColumnArray = new int[_si.columnInfo.Length];
int validColumnCount = 0;
for (int k = 0; k < _si.columnInfo.Length; k++)
{
if (_si.columnInfo[k].width > 0)
{
validColumnArray[validColumnCount++] = k;
}
}
if (validColumnCount == 0)
return null;
StringCollection[] scArray = new StringCollection[validColumnCount];
for (int k = 0; k < scArray.Length; k++)
{
// obtain a set of tokens for each field
scArray[k] = GenerateMultiLineRowField(values[validColumnArray[k]], validColumnArray[k],
alignment[validColumnArray[k]], ds);
// NOTE: the following padding operations assume that we
// pad with a blank (or any character that ALWAYS maps to a single screen cell
if (k > 0)
{
// skipping the first ones, add a separator for catenation
for (int j = 0; j < scArray[k].Count; j++)
{
scArray[k][j] = new string(' ', ScreenInfo.separatorCharacterCount) + scArray[k][j];
}
}
else
{
// add indentation padding if needed
if (_startColumn > 0)
{
for (int j = 0; j < scArray[k].Count; j++)
{
scArray[k][j] = new string(' ', _startColumn) + scArray[k][j];
}
}
}
}
// now we processed all the rows colums and we need to find the cell that occupies the most
// rows
int screenRows = 0;
for (int k = 0; k < scArray.Length; k++)
{
if (scArray[k].Count > screenRows)
screenRows = scArray[k].Count;
}
// add padding for the colums that are shorter
for (int col = 0; col < scArray.Length; col++)
{
int paddingBlanks = _si.columnInfo[validColumnArray[col]].width;
if (col > 0)
paddingBlanks += ScreenInfo.separatorCharacterCount;
else
{
paddingBlanks += _startColumn;
}
int paddingEntries = screenRows - scArray[col].Count;
if (paddingEntries > 0)
{
for (int j = 0; j < paddingEntries; j++)
{
scArray[col].Add(new string(' ', paddingBlanks));
}
}
}
// finally, build an array of strings
string[] rows = new string[screenRows];
for (int row = 0; row < rows.Length; row++)
{
StringBuilder sb = new StringBuilder();
// for a give row, walk the columns
for (int col = 0; col < scArray.Length; col++)
{
sb.Append(scArray[col][row]);
}
rows[row] = sb.ToString();
}
return rows;
}
private StringCollection GenerateMultiLineRowField(string val, int k, int aligment, DisplayCells dc)
{
StringCollection sc = StringManipulationHelper.GenerateLines(dc, val,
_si.columnInfo[k].width, _si.columnInfo[k].width);
// if length is shorter, do some padding
for (int col = 0; col < sc.Count; col++)
{
if (dc.Length(sc[col]) < _si.columnInfo[k].width)
sc[col] = GenerateRowField(sc[col], _si.columnInfo[k].width, aligment, dc);
}
return sc;
}
private string GenerateRow(string[] values, int[] alignment, DisplayCells dc)
{
StringBuilder sb = new StringBuilder();
for (int k = 0; k < _si.columnInfo.Length; k++)
{
if (_si.columnInfo[k].width <= 0)
{
// skip columns that are not at least a single character wide
continue;
}
int newRowIndex = sb.Length;
// NOTE: the following padding operations assume that we
// pad with a blank (or any character that ALWAYS maps to a single screen cell
if (k > 0)
{
sb.Append(new string(' ', ScreenInfo.separatorCharacterCount));
}
else
{
// add indentation padding if needed
if (_startColumn > 0)
{
sb.Append(new string(' ', _startColumn));
}
}
sb.Append(GenerateRowField(values[k], _si.columnInfo[k].width, alignment[k], dc));
}
return sb.ToString();
}
private static string GenerateRowField(string val, int width, int alignment, DisplayCells dc)
{
// make sure the string does not have any embedded <CR> in it
string s = StringManipulationHelper.TruncateAtNewLine(val) ?? "";
string currentValue = s;
int currentValueDisplayLength = dc.Length(currentValue);
if (currentValueDisplayLength < width)
{
// the string is shorter than the width of the column
// need to pad with with blanks to reach the desired width
int padCount = width - currentValueDisplayLength;
switch (alignment)
{
case TextAlignment.Right:
{
s = new string(' ', padCount) + s;
}
break;
case TextAlignment.Center:
{
// add a bit at both ends of the string
int padLeft = padCount / 2;
int padRight = padCount - padLeft;
s = new string(' ', padLeft) + s + new string(' ', padRight);
}
break;
default:
{
// left align is the default
s += new string(' ', padCount);
}
break;
}
}
else if (currentValueDisplayLength > width)
{
// the string is longer than the width of the column
// truncate and add ellipsis if it's too long
int truncationDisplayLength = width - ellipsis.Length;
if (truncationDisplayLength > 0)
{
// we have space for the ellipsis, add it
switch (alignment)
{
case TextAlignment.Right:
{
// get from "abcdef" to "...f"
int tailCount = dc.GetTailSplitLength(s, truncationDisplayLength);
s = s.Substring(s.Length - tailCount);
s = ellipsis + s;
}
break;
case TextAlignment.Center:
{
// get from "abcdef" to "a..."
s = s.Substring(0, dc.GetHeadSplitLength(s, truncationDisplayLength));
s += ellipsis;
}
break;
default:
{
// left align is the default
// get from "abcdef" to "a..."
s = s.Substring(0, dc.GetHeadSplitLength(s, truncationDisplayLength));
s += ellipsis;
}
break;
}
}
else
{
// not enough space for the ellipsis, just truncate at the width
int len = width;
switch (alignment)
{
case TextAlignment.Right:
{
// get from "abcdef" to "f"
int tailCount = dc.GetTailSplitLength(s, len);
s = s.Substring(s.Length - tailCount, tailCount);
}
break;
case TextAlignment.Center:
{
// get from "abcdef" to "a"
s = s.Substring(0, dc.GetHeadSplitLength(s, len));
}
break;
default:
{
// left align is the default
// get from "abcdef" to "a"
s = s.Substring(0, dc.GetHeadSplitLength(s, len));
}
break;
}
}
}
// we need to take into consideration that truncation left the string one
// display cell short if a double cell character got truncated
// in this case, we need to pad with a blank
int finalValueDisplayLength = dc.Length(s);
if (finalValueDisplayLength == width)
{
return s;
}
// we have to pad
System.Diagnostics.Debug.Assert(finalValueDisplayLength == width - 1, "padding is not correct");
switch (alignment)
{
case TextAlignment.Right:
{
s = " " + s;
}
break;
case TextAlignment.Center:
{
s += " ";
}
break;
default:
{
// left align is the default
s += " ";
}
break;
}
return s;
}
private const string ellipsis = "...";
private bool _disabled = false;
private bool _hideHeader = false;
private int _startColumn = 0;
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Collections;
using System.Diagnostics;
using System.Diagnostics.Contracts;
using System.Runtime.CompilerServices;
namespace System.Collections.Specialized
{
/// <summary>
/// This enum describes the action that caused a CollectionChanged event.
/// </summary>
public enum NotifyCollectionChangedAction
{
/// <summary> One or more items were added to the collection. </summary>
Add,
/// <summary> One or more items were removed from the collection. </summary>
Remove,
/// <summary> One or more items were replaced in the collection. </summary>
Replace,
/// <summary> One or more items were moved within the collection. </summary>
Move,
/// <summary> The contents of the collection changed dramatically. </summary>
Reset,
}
/// <summary>
/// Arguments for the CollectionChanged event.
/// A collection that supports INotifyCollectionChangedThis raises this event
/// whenever an item is added or removed, or when the contents of the collection
/// changes dramatically.
/// </summary>
public class NotifyCollectionChangedEventArgs : EventArgs
{
//------------------------------------------------------
//
// Constructors
//
//------------------------------------------------------
/// <summary>
/// Construct a NotifyCollectionChangedEventArgs that describes a reset change.
/// </summary>
/// <param name="action">The action that caused the event (must be Reset).</param>
public NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction action)
{
if (action != NotifyCollectionChangedAction.Reset)
throw new ArgumentException(SR.Format(SR.WrongActionForCtor, NotifyCollectionChangedAction.Reset), "action");
InitializeAdd(action, null, -1);
}
/// <summary>
/// Construct a NotifyCollectionChangedEventArgs that describes a one-item change.
/// </summary>
/// <param name="action">The action that caused the event; can only be Reset, Add or Remove action.</param>
/// <param name="changedItem">The item affected by the change.</param>
public NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction action, object changedItem)
{
if ((action != NotifyCollectionChangedAction.Add) && (action != NotifyCollectionChangedAction.Remove)
&& (action != NotifyCollectionChangedAction.Reset))
throw new ArgumentException(SR.MustBeResetAddOrRemoveActionForCtor, "action");
if (action == NotifyCollectionChangedAction.Reset)
{
if (changedItem != null)
throw new ArgumentException(SR.ResetActionRequiresNullItem, "action");
InitializeAdd(action, null, -1);
}
else
{
InitializeAddOrRemove(action, new object[] { changedItem }, -1);
}
}
/// <summary>
/// Construct a NotifyCollectionChangedEventArgs that describes a one-item change.
/// </summary>
/// <param name="action">The action that caused the event.</param>
/// <param name="changedItem">The item affected by the change.</param>
/// <param name="index">The index where the change occurred.</param>
public NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction action, object changedItem, int index)
{
if ((action != NotifyCollectionChangedAction.Add) && (action != NotifyCollectionChangedAction.Remove)
&& (action != NotifyCollectionChangedAction.Reset))
throw new ArgumentException(SR.MustBeResetAddOrRemoveActionForCtor, "action");
if (action == NotifyCollectionChangedAction.Reset)
{
if (changedItem != null)
throw new ArgumentException(SR.ResetActionRequiresNullItem, "action");
if (index != -1)
throw new ArgumentException(SR.ResetActionRequiresIndexMinus1, "action");
InitializeAdd(action, null, -1);
}
else
{
InitializeAddOrRemove(action, new object[] { changedItem }, index);
}
}
/// <summary>
/// Construct a NotifyCollectionChangedEventArgs that describes a multi-item change.
/// </summary>
/// <param name="action">The action that caused the event.</param>
/// <param name="changedItems">The items affected by the change.</param>
public NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction action, IList changedItems)
{
if ((action != NotifyCollectionChangedAction.Add) && (action != NotifyCollectionChangedAction.Remove)
&& (action != NotifyCollectionChangedAction.Reset))
throw new ArgumentException(SR.MustBeResetAddOrRemoveActionForCtor, "action");
if (action == NotifyCollectionChangedAction.Reset)
{
if (changedItems != null)
throw new ArgumentException(SR.ResetActionRequiresNullItem, "action");
InitializeAdd(action, null, -1);
}
else
{
if (changedItems == null)
throw new ArgumentNullException("changedItems");
InitializeAddOrRemove(action, changedItems, -1);
}
}
/// <summary>
/// Construct a NotifyCollectionChangedEventArgs that describes a multi-item change (or a reset).
/// </summary>
/// <param name="action">The action that caused the event.</param>
/// <param name="changedItems">The items affected by the change.</param>
/// <param name="startingIndex">The index where the change occurred.</param>
public NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction action, IList changedItems, int startingIndex)
{
if ((action != NotifyCollectionChangedAction.Add) && (action != NotifyCollectionChangedAction.Remove)
&& (action != NotifyCollectionChangedAction.Reset))
throw new ArgumentException(SR.MustBeResetAddOrRemoveActionForCtor, "action");
if (action == NotifyCollectionChangedAction.Reset)
{
if (changedItems != null)
throw new ArgumentException(SR.ResetActionRequiresNullItem, "action");
if (startingIndex != -1)
throw new ArgumentException(SR.ResetActionRequiresIndexMinus1, "action");
InitializeAdd(action, null, -1);
}
else
{
if (changedItems == null)
throw new ArgumentNullException("changedItems");
if (startingIndex < -1)
throw new ArgumentException(SR.IndexCannotBeNegative, "startingIndex");
InitializeAddOrRemove(action, changedItems, startingIndex);
}
}
/// <summary>
/// Construct a NotifyCollectionChangedEventArgs that describes a one-item Replace event.
/// </summary>
/// <param name="action">Can only be a Replace action.</param>
/// <param name="newItem">The new item replacing the original item.</param>
/// <param name="oldItem">The original item that is replaced.</param>
public NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction action, object newItem, object oldItem)
{
if (action != NotifyCollectionChangedAction.Replace)
throw new ArgumentException(SR.Format(SR.WrongActionForCtor, NotifyCollectionChangedAction.Replace), "action");
InitializeMoveOrReplace(action, new object[] { newItem }, new object[] { oldItem }, -1, -1);
}
/// <summary>
/// Construct a NotifyCollectionChangedEventArgs that describes a one-item Replace event.
/// </summary>
/// <param name="action">Can only be a Replace action.</param>
/// <param name="newItem">The new item replacing the original item.</param>
/// <param name="oldItem">The original item that is replaced.</param>
/// <param name="index">The index of the item being replaced.</param>
public NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction action, object newItem, object oldItem, int index)
{
if (action != NotifyCollectionChangedAction.Replace)
throw new ArgumentException(SR.Format(SR.WrongActionForCtor, NotifyCollectionChangedAction.Replace), "action");
InitializeMoveOrReplace(action, new object[] { newItem }, new object[] { oldItem }, index, index);
}
/// <summary>
/// Construct a NotifyCollectionChangedEventArgs that describes a multi-item Replace event.
/// </summary>
/// <param name="action">Can only be a Replace action.</param>
/// <param name="newItems">The new items replacing the original items.</param>
/// <param name="oldItems">The original items that are replaced.</param>
public NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction action, IList newItems, IList oldItems)
{
if (action != NotifyCollectionChangedAction.Replace)
throw new ArgumentException(SR.Format(SR.WrongActionForCtor, NotifyCollectionChangedAction.Replace), "action");
if (newItems == null)
throw new ArgumentNullException("newItems");
if (oldItems == null)
throw new ArgumentNullException("oldItems");
InitializeMoveOrReplace(action, newItems, oldItems, -1, -1);
}
/// <summary>
/// Construct a NotifyCollectionChangedEventArgs that describes a multi-item Replace event.
/// </summary>
/// <param name="action">Can only be a Replace action.</param>
/// <param name="newItems">The new items replacing the original items.</param>
/// <param name="oldItems">The original items that are replaced.</param>
/// <param name="startingIndex">The starting index of the items being replaced.</param>
public NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction action, IList newItems, IList oldItems, int startingIndex)
{
if (action != NotifyCollectionChangedAction.Replace)
throw new ArgumentException(SR.Format(SR.WrongActionForCtor, NotifyCollectionChangedAction.Replace), "action");
if (newItems == null)
throw new ArgumentNullException("newItems");
if (oldItems == null)
throw new ArgumentNullException("oldItems");
InitializeMoveOrReplace(action, newItems, oldItems, startingIndex, startingIndex);
}
/// <summary>
/// Construct a NotifyCollectionChangedEventArgs that describes a one-item Move event.
/// </summary>
/// <param name="action">Can only be a Move action.</param>
/// <param name="changedItem">The item affected by the change.</param>
/// <param name="index">The new index for the changed item.</param>
/// <param name="oldIndex">The old index for the changed item.</param>
public NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction action, object changedItem, int index, int oldIndex)
{
if (action != NotifyCollectionChangedAction.Move)
throw new ArgumentException(SR.Format(SR.WrongActionForCtor, NotifyCollectionChangedAction.Move), "action");
if (index < 0)
throw new ArgumentException(SR.IndexCannotBeNegative, "index");
object[] changedItems = new object[] { changedItem };
InitializeMoveOrReplace(action, changedItems, changedItems, index, oldIndex);
}
/// <summary>
/// Construct a NotifyCollectionChangedEventArgs that describes a multi-item Move event.
/// </summary>
/// <param name="action">The action that caused the event.</param>
/// <param name="changedItems">The items affected by the change.</param>
/// <param name="index">The new index for the changed items.</param>
/// <param name="oldIndex">The old index for the changed items.</param>
public NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction action, IList changedItems, int index, int oldIndex)
{
if (action != NotifyCollectionChangedAction.Move)
throw new ArgumentException(SR.Format(SR.WrongActionForCtor, NotifyCollectionChangedAction.Move), "action");
if (index < 0)
throw new ArgumentException(SR.IndexCannotBeNegative, "index");
InitializeMoveOrReplace(action, changedItems, changedItems, index, oldIndex);
}
/// <summary>
/// Construct a NotifyCollectionChangedEventArgs with given fields (no validation). Used by WinRT marshaling.
/// </summary>
internal NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction action, IList newItems, IList oldItems, int newIndex, int oldIndex)
{
_action = action;
_newItems = (newItems == null) ? null : new ReadOnlyList(newItems);
_oldItems = (oldItems == null) ? null : new ReadOnlyList(oldItems);
_newStartingIndex = newIndex;
_oldStartingIndex = oldIndex;
}
private void InitializeAddOrRemove(NotifyCollectionChangedAction action, IList changedItems, int startingIndex)
{
if (action == NotifyCollectionChangedAction.Add)
InitializeAdd(action, changedItems, startingIndex);
else if (action == NotifyCollectionChangedAction.Remove)
InitializeRemove(action, changedItems, startingIndex);
else
Debug.Assert(false, String.Format("Unsupported action: {0}", action.ToString()));
}
private void InitializeAdd(NotifyCollectionChangedAction action, IList newItems, int newStartingIndex)
{
_action = action;
_newItems = (newItems == null) ? null : new ReadOnlyList(newItems);
_newStartingIndex = newStartingIndex;
}
private void InitializeRemove(NotifyCollectionChangedAction action, IList oldItems, int oldStartingIndex)
{
_action = action;
_oldItems = (oldItems == null) ? null : new ReadOnlyList(oldItems);
_oldStartingIndex = oldStartingIndex;
}
private void InitializeMoveOrReplace(NotifyCollectionChangedAction action, IList newItems, IList oldItems, int startingIndex, int oldStartingIndex)
{
InitializeAdd(action, newItems, startingIndex);
InitializeRemove(action, oldItems, oldStartingIndex);
}
//------------------------------------------------------
//
// Public Properties
//
//------------------------------------------------------
/// <summary>
/// The action that caused the event.
/// </summary>
public NotifyCollectionChangedAction Action
{
get { return _action; }
}
/// <summary>
/// The items affected by the change.
/// </summary>
public IList NewItems
{
get { return _newItems; }
}
/// <summary>
/// The old items affected by the change (for Replace events).
/// </summary>
public IList OldItems
{
get { return _oldItems; }
}
/// <summary>
/// The index where the change occurred.
/// </summary>
public int NewStartingIndex
{
get { return _newStartingIndex; }
}
/// <summary>
/// The old index where the change occurred (for Move events).
/// </summary>
public int OldStartingIndex
{
get { return _oldStartingIndex; }
}
//------------------------------------------------------
//
// Private Fields
//
//------------------------------------------------------
private NotifyCollectionChangedAction _action;
private IList _newItems, _oldItems;
private int _newStartingIndex = -1;
private int _oldStartingIndex = -1;
}
/// <summary>
/// The delegate to use for handlers that receive the CollectionChanged event.
/// </summary>
public delegate void NotifyCollectionChangedEventHandler(object sender, NotifyCollectionChangedEventArgs e);
internal sealed class ReadOnlyList : IList
{
private readonly IList _list;
internal ReadOnlyList(IList list)
{
Debug.Assert(list != null);
_list = list;
}
public int Count
{
get { return _list.Count; }
}
public bool IsReadOnly
{
get { return true; }
}
public bool IsFixedSize
{
get { return true; }
}
public bool IsSynchronized
{
get { return _list.IsSynchronized; }
}
public object this[int index]
{
get
{
return _list[index];
}
set
{
throw new NotSupportedException(SR.NotSupported_ReadOnlyCollection);
}
}
public object SyncRoot
{
get { return _list.SyncRoot; }
}
public int Add(object value)
{
throw new NotSupportedException(SR.NotSupported_ReadOnlyCollection);
}
public void Clear()
{
throw new NotSupportedException(SR.NotSupported_ReadOnlyCollection);
}
public bool Contains(object value)
{
return _list.Contains(value);
}
public void CopyTo(Array array, int index)
{
_list.CopyTo(array, index);
}
public IEnumerator GetEnumerator()
{
return _list.GetEnumerator();
}
public int IndexOf(object value)
{
return _list.IndexOf(value);
}
public void Insert(int index, object value)
{
throw new NotSupportedException(SR.NotSupported_ReadOnlyCollection);
}
public void Remove(object value)
{
throw new NotSupportedException(SR.NotSupported_ReadOnlyCollection);
}
public void RemoveAt(int index)
{
throw new NotSupportedException(SR.NotSupported_ReadOnlyCollection);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/*============================================================
**
**
**
**
**
** Purpose: A collection of methods for manipulating Files.
**
** April 09,2000 (some design refactorization)
**
===========================================================*/
using System;
#if FEATURE_MACL
using System.Security.AccessControl;
#endif
using System.Security.Permissions;
using PermissionSet = System.Security.PermissionSet;
using Win32Native = Microsoft.Win32.Win32Native;
using System.Runtime.InteropServices;
using System.Text;
using System.Runtime.Serialization;
using System.Globalization;
using System.Runtime.Versioning;
using System.Diagnostics.Contracts;
namespace System.IO {
// Class for creating FileStream objects, and some basic file management
// routines such as Delete, etc.
[Serializable]
[ComVisible(true)]
public sealed class FileInfo: FileSystemInfo
{
private String _name;
#if FEATURE_CORECLR
// Migrating InheritanceDemands requires this default ctor, so we can annotate it.
#if FEATURE_CORESYSTEM
[System.Security.SecurityCritical]
#else
[System.Security.SecuritySafeCritical]
#endif //FEATURE_CORESYSTEM
private FileInfo(){}
[System.Security.SecurityCritical]
public static FileInfo UnsafeCreateFileInfo(String fileName)
{
if (fileName == null)
throw new ArgumentNullException("fileName");
Contract.EndContractBlock();
FileInfo fi = new FileInfo();
fi.Init(fileName, false);
return fi;
}
#endif
[System.Security.SecuritySafeCritical]
public FileInfo(String fileName)
{
if (fileName == null)
throw new ArgumentNullException("fileName");
Contract.EndContractBlock();
Init(fileName, true);
}
[System.Security.SecurityCritical]
private void Init(String fileName, bool checkHost)
{
OriginalPath = fileName;
// Must fully qualify the path for the security check
String fullPath = Path.GetFullPathInternal(fileName);
#if FEATURE_CORECLR
if (checkHost)
{
FileSecurityState state = new FileSecurityState(FileSecurityStateAccess.Read, fileName, fullPath);
state.EnsureState();
}
#else
FileIOPermission.QuickDemand(FileIOPermissionAccess.Read, fullPath, false, false);
#endif
_name = Path.GetFileName(fileName);
FullPath = fullPath;
DisplayPath = GetDisplayPath(fileName);
}
private String GetDisplayPath(String originalPath)
{
#if FEATURE_CORECLR
return Path.GetFileName(originalPath);
#else
return originalPath;
#endif
}
[System.Security.SecurityCritical] // auto-generated
private FileInfo(SerializationInfo info, StreamingContext context) : base(info, context)
{
#if !FEATURE_CORECLR
new FileIOPermission(FileIOPermissionAccess.Read, new String[] { FullPath }, false, false).Demand();
#endif
_name = Path.GetFileName(OriginalPath);
DisplayPath = GetDisplayPath(OriginalPath);
}
#if FEATURE_CORESYSTEM
[System.Security.SecuritySafeCritical]
#endif //FEATURE_CORESYSTEM
internal FileInfo(String fullPath, bool ignoreThis)
{
Contract.Assert(Path.GetRootLength(fullPath) > 0, "fullPath must be fully qualified!");
_name = Path.GetFileName(fullPath);
OriginalPath = _name;
FullPath = fullPath;
DisplayPath = _name;
}
public override String Name {
get { return _name; }
}
public long Length {
[System.Security.SecuritySafeCritical] // auto-generated
get {
if (_dataInitialised == -1)
Refresh();
if (_dataInitialised != 0) // Refresh was unable to initialise the data
__Error.WinIOError(_dataInitialised, DisplayPath);
if ((_data.fileAttributes & Win32Native.FILE_ATTRIBUTE_DIRECTORY) != 0)
__Error.WinIOError(Win32Native.ERROR_FILE_NOT_FOUND, DisplayPath);
return ((long)_data.fileSizeHigh) << 32 | ((long)_data.fileSizeLow & 0xFFFFFFFFL);
}
}
/* Returns the name of the directory that the file is in */
public String DirectoryName
{
[System.Security.SecuritySafeCritical]
get
{
String directoryName = Path.GetDirectoryName(FullPath);
if (directoryName != null)
{
#if FEATURE_CORECLR
FileSecurityState state = new FileSecurityState(FileSecurityStateAccess.Read, DisplayPath, FullPath);
state.EnsureState();
#else
new FileIOPermission(FileIOPermissionAccess.PathDiscovery, new String[] { directoryName }, false, false).Demand();
#endif
}
return directoryName;
}
}
/* Creates an instance of the the parent directory */
public DirectoryInfo Directory
{
get
{
String dirName = DirectoryName;
if (dirName == null)
return null;
return new DirectoryInfo(dirName);
}
}
public bool IsReadOnly {
get {
return (Attributes & FileAttributes.ReadOnly) != 0;
}
set {
if (value)
Attributes |= FileAttributes.ReadOnly;
else
Attributes &= ~FileAttributes.ReadOnly;
}
}
#if FEATURE_MACL
public FileSecurity GetAccessControl()
{
return File.GetAccessControl(FullPath, AccessControlSections.Access | AccessControlSections.Owner | AccessControlSections.Group);
}
public FileSecurity GetAccessControl(AccessControlSections includeSections)
{
return File.GetAccessControl(FullPath, includeSections);
}
public void SetAccessControl(FileSecurity fileSecurity)
{
File.SetAccessControl(FullPath, fileSecurity);
}
#endif
[System.Security.SecuritySafeCritical] // auto-generated
public StreamReader OpenText()
{
return new StreamReader(FullPath, Encoding.UTF8, true, StreamReader.DefaultBufferSize, false);
}
public StreamWriter CreateText()
{
return new StreamWriter(FullPath,false);
}
public StreamWriter AppendText()
{
return new StreamWriter(FullPath,true);
}
// Copies an existing file to a new file. An exception is raised if the
// destination file already exists. Use the
// Copy(String, String, boolean) method to allow
// overwriting an existing file.
//
// The caller must have certain FileIOPermissions. The caller must have
// Read permission to sourceFileName
// and Write permissions to destFileName.
//
public FileInfo CopyTo(String destFileName) {
if (destFileName == null)
throw new ArgumentNullException("destFileName", Environment.GetResourceString("ArgumentNull_FileName"));
if (destFileName.Length == 0)
throw new ArgumentException(Environment.GetResourceString("Argument_EmptyFileName"), "destFileName");
Contract.EndContractBlock();
destFileName = File.InternalCopy(FullPath, destFileName, false, true);
return new FileInfo(destFileName, false);
}
// Copies an existing file to a new file. If overwrite is
// false, then an IOException is thrown if the destination file
// already exists. If overwrite is true, the file is
// overwritten.
//
// The caller must have certain FileIOPermissions. The caller must have
// Read permission to sourceFileName and Create
// and Write permissions to destFileName.
//
public FileInfo CopyTo(String destFileName, bool overwrite) {
if (destFileName == null)
throw new ArgumentNullException("destFileName", Environment.GetResourceString("ArgumentNull_FileName"));
if (destFileName.Length == 0)
throw new ArgumentException(Environment.GetResourceString("Argument_EmptyFileName"), "destFileName");
Contract.EndContractBlock();
destFileName = File.InternalCopy(FullPath, destFileName, overwrite, true);
return new FileInfo(destFileName, false);
}
public FileStream Create() {
return File.Create(FullPath);
}
// Deletes a file. The file specified by the designated path is deleted.
// If the file does not exist, Delete succeeds without throwing
// an exception.
//
// On NT, Delete will fail for a file that is open for normal I/O
// or a file that is memory mapped. On Win95, the file will be
// deleted irregardless of whether the file is being used.
//
// Your application must have Delete permission to the target file.
//
[System.Security.SecuritySafeCritical]
public override void Delete()
{
#if FEATURE_CORECLR
FileSecurityState state = new FileSecurityState(FileSecurityStateAccess.Write, DisplayPath, FullPath);
state.EnsureState();
#else
// For security check, path should be resolved to an absolute path.
new FileIOPermission(FileIOPermissionAccess.Write, new String[] { FullPath }, false, false).Demand();
#endif
bool r = Win32Native.DeleteFile(FullPath);
if (!r) {
int hr = Marshal.GetLastWin32Error();
if (hr==Win32Native.ERROR_FILE_NOT_FOUND)
return;
else
__Error.WinIOError(hr, DisplayPath);
}
}
[ComVisible(false)]
public void Decrypt()
{
File.Decrypt(FullPath);
}
[ComVisible(false)]
public void Encrypt()
{
File.Encrypt(FullPath);
}
// Tests if the given file exists. The result is true if the file
// given by the specified path exists; otherwise, the result is
// false.
//
// Your application must have Read permission for the target directory.
public override bool Exists {
[System.Security.SecuritySafeCritical] // auto-generated
get {
try {
if (_dataInitialised == -1)
Refresh();
if (_dataInitialised != 0) {
// Refresh was unable to initialise the data.
// We should normally be throwing an exception here,
// but Exists is supposed to return true or false.
return false;
}
return (_data.fileAttributes & Win32Native.FILE_ATTRIBUTE_DIRECTORY) == 0;
}
catch
{
return false;
}
}
}
// User must explicitly specify opening a new file or appending to one.
public FileStream Open(FileMode mode) {
return Open(mode, FileAccess.ReadWrite, FileShare.None);
}
public FileStream Open(FileMode mode, FileAccess access) {
return Open(mode, access, FileShare.None);
}
public FileStream Open(FileMode mode, FileAccess access, FileShare share) {
return new FileStream(FullPath, mode, access, share);
}
#if FEATURE_CORECLR
[System.Security.SecuritySafeCritical] // auto-generated
#endif
public FileStream OpenRead()
{
return new FileStream(FullPath, FileMode.Open, FileAccess.Read,
FileShare.Read, 4096, false);
}
public FileStream OpenWrite() {
return new FileStream(FullPath, FileMode.OpenOrCreate,
FileAccess.Write, FileShare.None);
}
// Moves a given file to a new location and potentially a new file name.
// This method does work across volumes.
//
// The caller must have certain FileIOPermissions. The caller must
// have Read and Write permission to
// sourceFileName and Write
// permissions to destFileName.
//
[System.Security.SecuritySafeCritical]
public void MoveTo(String destFileName) {
if (destFileName==null)
throw new ArgumentNullException("destFileName");
if (destFileName.Length==0)
throw new ArgumentException(Environment.GetResourceString("Argument_EmptyFileName"), "destFileName");
Contract.EndContractBlock();
String fullDestFileName = Path.GetFullPathInternal(destFileName);
#if FEATURE_CORECLR
FileSecurityState sourceState = new FileSecurityState(FileSecurityStateAccess.Write | FileSecurityStateAccess.Read, DisplayPath, FullPath);
FileSecurityState destState = new FileSecurityState(FileSecurityStateAccess.Write, destFileName, fullDestFileName);
sourceState.EnsureState();
destState.EnsureState();
#else
new FileIOPermission(FileIOPermissionAccess.Write | FileIOPermissionAccess.Read, new String[] { FullPath }, false, false).Demand();
FileIOPermission.QuickDemand(FileIOPermissionAccess.Write, fullDestFileName, false, false);
#endif
if (!Win32Native.MoveFile(FullPath, fullDestFileName))
__Error.WinIOError();
FullPath = fullDestFileName;
OriginalPath = destFileName;
_name = Path.GetFileName(fullDestFileName);
DisplayPath = GetDisplayPath(destFileName);
// Flush any cached information about the file.
_dataInitialised = -1;
}
[ComVisible(false)]
public FileInfo Replace(String destinationFileName, String destinationBackupFileName)
{
return Replace(destinationFileName, destinationBackupFileName, false);
}
[ComVisible(false)]
public FileInfo Replace(String destinationFileName, String destinationBackupFileName, bool ignoreMetadataErrors)
{
File.Replace(FullPath, destinationFileName, destinationBackupFileName, ignoreMetadataErrors);
return new FileInfo(destinationFileName);
}
// Returns the display path
public override String ToString()
{
return DisplayPath;
}
}
}
| |
/********************************************************
* ADO.NET 2.0 Data Provider for SQLite Version 3.X
* Written by Robert Simpson (robert@blackcastlesoft.com)
*
* Released to the public domain, use at your own risk!
********************************************************/
namespace Mono.Data.Sqlite
{
using System;
using System.Data;
using System.Data.Common;
using System.Collections.Generic;
/// <summary>
/// This class provides key info for a given SQLite statement.
/// <remarks>
/// Providing key information for a given statement is non-trivial :(
/// </remarks>
/// </summary>
internal sealed class SqliteKeyReader : IDisposable
{
private KeyInfo[] _keyInfo;
private SqliteStatement _stmt;
private bool _isValid;
/// <summary>
/// Used to support CommandBehavior.KeyInfo
/// </summary>
private struct KeyInfo
{
internal string databaseName;
internal string tableName;
internal string columnName;
internal int database;
internal int rootPage;
internal int cursor;
internal KeyQuery query;
internal int column;
}
/// <summary>
/// A single sub-query for a given table/database.
/// </summary>
private sealed class KeyQuery : IDisposable
{
private SqliteCommand _command;
internal SqliteDataReader _reader;
internal KeyQuery(SqliteConnection cnn, string database, string table, params string[] columns)
{
using (SqliteCommandBuilder builder = new SqliteCommandBuilder())
{
_command = cnn.CreateCommand();
for (int n = 0; n < columns.Length; n++)
{
columns[n] = builder.QuoteIdentifier(columns[n]);
}
}
_command.CommandText = String.Format("SELECT {0} FROM [{1}].[{2}] WHERE ROWID = ?", String.Join(",", columns), database, table);
_command.Parameters.AddWithValue(null, (long)0);
}
internal bool IsValid
{
get { return (_reader != null); }
set
{
if (value != false) throw new ArgumentException();
if (_reader != null)
{
_reader.Dispose();
_reader = null;
}
}
}
internal void Sync(long rowid)
{
IsValid = false;
_command.Parameters[0].Value = rowid;
_reader = _command.ExecuteReader();
_reader.Read();
}
public void Dispose()
{
IsValid = false;
if (_command != null) _command.Dispose();
_command = null;
}
}
/// <summary>
/// This function does all the nasty work at determining what keys need to be returned for
/// a given statement.
/// </summary>
/// <param name="cnn"></param>
/// <param name="reader"></param>
/// <param name="stmt"></param>
internal SqliteKeyReader(SqliteConnection cnn, SqliteDataReader reader, SqliteStatement stmt)
{
Dictionary<string, int> catalogs = new Dictionary<string, int>();
Dictionary<string, List<string>> tables = new Dictionary<string, List<string>>();
List<string> list;
List<KeyInfo> keys = new List<KeyInfo>();
// Record the statement so we can use it later for sync'ing
_stmt = stmt;
// Fetch all the attached databases on this connection
using (DataTable tbl = cnn.GetSchema("Catalogs"))
{
foreach (DataRow row in tbl.Rows)
{
catalogs.Add((string)row["CATALOG_NAME"], Convert.ToInt32(row["ID"]));
}
}
// Fetch all the unique tables and catalogs used by the current statement
using (DataTable schema = reader.GetSchemaTable(false, false))
{
foreach (DataRow row in schema.Rows)
{
// Check if column is backed to a table
if (row[SchemaTableOptionalColumn.BaseCatalogName] == DBNull.Value)
continue;
// Record the unique table so we can look up its keys
string catalog = (string)row[SchemaTableOptionalColumn.BaseCatalogName];
string table = (string)row[SchemaTableColumn.BaseTableName];
if (tables.ContainsKey(catalog) == false)
{
list = new List<string>();
tables.Add(catalog, list);
}
else
list = tables[catalog];
if (list.Contains(table) == false)
list.Add(table);
}
// For each catalog and each table, query the indexes for the table.
// Find a primary key index if there is one. If not, find a unique index instead
foreach (KeyValuePair<string, List<string>> pair in tables)
{
for (int i = 0; i < pair.Value.Count; i++)
{
string table = pair.Value[i];
DataRow preferredRow = null;
using (DataTable tbl = cnn.GetSchema("Indexes", new string[] { pair.Key, null, table }))
{
// Loop twice. The first time looking for a primary key index,
// the second time looking for a unique index
for (int n = 0; n < 2 && preferredRow == null; n++)
{
foreach (DataRow row in tbl.Rows)
{
if (n == 0 && (bool)row["PRIMARY_KEY"] == true)
{
preferredRow = row;
break;
}
else if (n == 1 && (bool)row["UNIQUE"] == true)
{
preferredRow = row;
break;
}
}
}
if (preferredRow == null) // Unable to find any suitable index for this table so remove it
{
pair.Value.RemoveAt(i);
i--;
}
else // We found a usable index, so fetch the necessary table details
{
using (DataTable tblTables = cnn.GetSchema("Tables", new string[] { pair.Key, null, table }))
{
// Find the root page of the table in the current statement and get the cursor that's iterating it
int database = catalogs[pair.Key];
int rootPage = Convert.ToInt32(tblTables.Rows[0]["TABLE_ROOTPAGE"]);
int cursor = stmt._sql.GetCursorForTable(stmt, database, rootPage);
// Now enumerate the members of the index we're going to use
using (DataTable indexColumns = cnn.GetSchema("IndexColumns", new string[] { pair.Key, null, table, (string)preferredRow["INDEX_NAME"] }))
{
KeyQuery query = null;
List<string> cols = new List<string>();
for (int x = 0; x < indexColumns.Rows.Count; x++)
{
bool addKey = true;
// If the column in the index already appears in the query, skip it
foreach (DataRow row in schema.Rows)
{
if (row.IsNull(SchemaTableColumn.BaseColumnName))
continue;
if ((string)row[SchemaTableColumn.BaseColumnName] == (string)indexColumns.Rows[x]["COLUMN_NAME"] &&
(string)row[SchemaTableColumn.BaseTableName] == table &&
(string)row[SchemaTableOptionalColumn.BaseCatalogName] == pair.Key)
{
indexColumns.Rows.RemoveAt(x);
x--;
addKey = false;
break;
}
}
if (addKey == true)
cols.Add((string)indexColumns.Rows[x]["COLUMN_NAME"]);
}
// If the index is not a rowid alias, record all the columns
// needed to make up the unique index and construct a SQL query for it
if ((string)preferredRow["INDEX_NAME"] != "sqlite_master_PK_" + table)
{
// Whatever remains of the columns we need that make up the index that are not
// already in the query need to be queried separately, so construct a subquery
if (cols.Count > 0)
{
string[] querycols = new string[cols.Count];
cols.CopyTo(querycols);
query = new KeyQuery(cnn, pair.Key, table, querycols);
}
}
// Create a KeyInfo struct for each column of the index
for (int x = 0; x < indexColumns.Rows.Count; x++)
{
string columnName = (string)indexColumns.Rows[x]["COLUMN_NAME"];
KeyInfo key = new KeyInfo();
key.rootPage = rootPage;
key.cursor = cursor;
key.database = database;
key.databaseName = pair.Key;
key.tableName = table;
key.columnName = columnName;
key.query = query;
key.column = x;
keys.Add(key);
}
}
}
}
}
}
}
}
// Now we have all the additional columns we have to return in order to support
// CommandBehavior.KeyInfo
_keyInfo = new KeyInfo[keys.Count];
keys.CopyTo(_keyInfo);
}
/// <summary>
/// How many additional columns of keyinfo we're holding
/// </summary>
internal int Count
{
get { return (_keyInfo == null) ? 0 : _keyInfo.Length; }
}
internal void Sync(int i)
{
Sync();
if (_keyInfo[i].cursor == -1)
throw new InvalidCastException();
}
/// <summary>
/// Make sure all the subqueries are open and ready and sync'd with the current rowid
/// of the table they're supporting
/// </summary>
internal void Sync()
{
if (_isValid == true) return;
KeyQuery last = null;
for (int n = 0; n < _keyInfo.Length; n++)
{
if (_keyInfo[n].query == null || _keyInfo[n].query != last)
{
last = _keyInfo[n].query;
if (last != null)
{
last.Sync(_stmt._sql.GetRowIdForCursor(_stmt, _keyInfo[n].cursor));
}
}
}
_isValid = true;
}
/// <summary>
/// Release any readers on any subqueries
/// </summary>
internal void Reset()
{
_isValid = false;
if (_keyInfo == null) return;
for (int n = 0; n < _keyInfo.Length; n++)
{
if (_keyInfo[n].query != null)
_keyInfo[n].query.IsValid = false;
}
}
public void Dispose()
{
_stmt = null;
if (_keyInfo == null) return;
for (int n = 0; n < _keyInfo.Length; n++)
{
if (_keyInfo[n].query != null)
_keyInfo[n].query.Dispose();
}
_keyInfo = null;
}
internal string GetDataTypeName(int i)
{
Sync();
if (_keyInfo[i].query != null) return _keyInfo[i].query._reader.GetDataTypeName(_keyInfo[i].column);
else return "integer";
}
internal Type GetFieldType(int i)
{
Sync();
if (_keyInfo[i].query != null) return _keyInfo[i].query._reader.GetFieldType(_keyInfo[i].column);
else return typeof(Int64);
}
internal string GetName(int i)
{
return _keyInfo[i].columnName;
}
internal int GetOrdinal(string name)
{
for (int n = 0; n < _keyInfo.Length; n++)
{
if (String.Compare(name, _keyInfo[n].columnName, StringComparison.OrdinalIgnoreCase) == 0) return n;
}
return -1;
}
internal bool GetBoolean(int i)
{
Sync(i);
if (_keyInfo[i].query != null) return _keyInfo[i].query._reader.GetBoolean(_keyInfo[i].column);
else throw new InvalidCastException();
}
internal byte GetByte(int i)
{
Sync(i);
if (_keyInfo[i].query != null) return _keyInfo[i].query._reader.GetByte(_keyInfo[i].column);
else throw new InvalidCastException();
}
internal long GetBytes(int i, long fieldOffset, byte[] buffer, int bufferoffset, int length)
{
Sync(i);
if (_keyInfo[i].query != null) return _keyInfo[i].query._reader.GetBytes(_keyInfo[i].column, fieldOffset, buffer, bufferoffset, length);
else throw new InvalidCastException();
}
internal char GetChar(int i)
{
Sync(i);
if (_keyInfo[i].query != null) return _keyInfo[i].query._reader.GetChar(_keyInfo[i].column);
else throw new InvalidCastException();
}
internal long GetChars(int i, long fieldOffset, char[] buffer, int bufferoffset, int length)
{
Sync(i);
if (_keyInfo[i].query != null) return _keyInfo[i].query._reader.GetChars(_keyInfo[i].column, fieldOffset, buffer, bufferoffset, length);
else throw new InvalidCastException();
}
internal DateTime GetDateTime(int i)
{
Sync(i);
if (_keyInfo[i].query != null) return _keyInfo[i].query._reader.GetDateTime(_keyInfo[i].column);
else throw new InvalidCastException();
}
internal decimal GetDecimal(int i)
{
Sync(i);
if (_keyInfo[i].query != null) return _keyInfo[i].query._reader.GetDecimal(_keyInfo[i].column);
else throw new InvalidCastException();
}
internal double GetDouble(int i)
{
Sync(i);
if (_keyInfo[i].query != null) return _keyInfo[i].query._reader.GetDouble(_keyInfo[i].column);
else throw new InvalidCastException();
}
internal float GetFloat(int i)
{
Sync(i);
if (_keyInfo[i].query != null) return _keyInfo[i].query._reader.GetFloat(_keyInfo[i].column);
else throw new InvalidCastException();
}
internal Guid GetGuid(int i)
{
Sync(i);
if (_keyInfo[i].query != null) return _keyInfo[i].query._reader.GetGuid(_keyInfo[i].column);
else throw new InvalidCastException();
}
internal Int16 GetInt16(int i)
{
Sync(i);
if (_keyInfo[i].query != null) return _keyInfo[i].query._reader.GetInt16(_keyInfo[i].column);
else
{
long rowid = _stmt._sql.GetRowIdForCursor(_stmt, _keyInfo[i].cursor);
if (rowid == 0) throw new InvalidCastException();
return Convert.ToInt16(rowid);
}
}
internal Int32 GetInt32(int i)
{
Sync(i);
if (_keyInfo[i].query != null) return _keyInfo[i].query._reader.GetInt32(_keyInfo[i].column);
else
{
long rowid = _stmt._sql.GetRowIdForCursor(_stmt, _keyInfo[i].cursor);
if (rowid == 0) throw new InvalidCastException();
return Convert.ToInt32(rowid);
}
}
internal Int64 GetInt64(int i)
{
Sync(i);
if (_keyInfo[i].query != null) return _keyInfo[i].query._reader.GetInt64(_keyInfo[i].column);
else
{
long rowid = _stmt._sql.GetRowIdForCursor(_stmt, _keyInfo[i].cursor);
if (rowid == 0) throw new InvalidCastException();
return Convert.ToInt64(rowid);
}
}
internal string GetString(int i)
{
Sync(i);
if (_keyInfo[i].query != null) return _keyInfo[i].query._reader.GetString(_keyInfo[i].column);
else throw new InvalidCastException();
}
internal object GetValue(int i)
{
if (_keyInfo[i].cursor == -1) return DBNull.Value;
Sync(i);
if (_keyInfo[i].query != null) return _keyInfo[i].query._reader.GetValue(_keyInfo[i].column);
if (IsDBNull(i) == true)
return DBNull.Value;
else return GetInt64(i);
}
internal bool IsDBNull(int i)
{
if (_keyInfo[i].cursor == -1) return true;
Sync(i);
if (_keyInfo[i].query != null) return _keyInfo[i].query._reader.IsDBNull(_keyInfo[i].column);
else return _stmt._sql.GetRowIdForCursor(_stmt, _keyInfo[i].cursor) == 0;
}
/// <summary>
/// Append all the columns we've added to the original query to the schema
/// </summary>
/// <param name="tbl"></param>
internal void AppendSchemaTable(DataTable tbl)
{
KeyQuery last = null;
for (int n = 0; n < _keyInfo.Length; n++)
{
if (_keyInfo[n].query == null || _keyInfo[n].query != last)
{
last = _keyInfo[n].query;
if (last == null) // ROWID aliases are treated special
{
DataRow row = tbl.NewRow();
row[SchemaTableColumn.ColumnName] = _keyInfo[n].columnName;
row[SchemaTableColumn.ColumnOrdinal] = tbl.Rows.Count;
row[SchemaTableColumn.ColumnSize] = 8;
row[SchemaTableColumn.NumericPrecision] = 255;
row[SchemaTableColumn.NumericScale] = 255;
row[SchemaTableColumn.ProviderType] = DbType.Int64;
row[SchemaTableColumn.IsLong] = false;
row[SchemaTableColumn.AllowDBNull] = false;
row[SchemaTableOptionalColumn.IsReadOnly] = false;
row[SchemaTableOptionalColumn.IsRowVersion] = false;
row[SchemaTableColumn.IsUnique] = false;
row[SchemaTableColumn.IsKey] = true;
row[SchemaTableColumn.DataType] = typeof(Int64);
row[SchemaTableOptionalColumn.IsHidden] = true;
row[SchemaTableColumn.BaseColumnName] = _keyInfo[n].columnName;
row[SchemaTableColumn.IsExpression] = false;
row[SchemaTableColumn.IsAliased] = false;
row[SchemaTableColumn.BaseTableName] = _keyInfo[n].tableName;
row[SchemaTableOptionalColumn.BaseCatalogName] = _keyInfo[n].databaseName;
row[SchemaTableOptionalColumn.IsAutoIncrement] = true;
row["DataTypeName"] = "integer";
tbl.Rows.Add(row);
}
else
{
last.Sync(0);
using (DataTable tblSub = last._reader.GetSchemaTable())
{
foreach (DataRow row in tblSub.Rows)
{
object[] o = row.ItemArray;
DataRow newrow = tbl.Rows.Add(o);
newrow[SchemaTableOptionalColumn.IsHidden] = true;
newrow[SchemaTableColumn.ColumnOrdinal] = tbl.Rows.Count - 1;
}
}
}
}
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
using System.IO.Pipes;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.DotNet.RemoteExecutor;
using Xunit;
namespace System.Diagnostics.Tests
{
public partial class ProcessStreamReadTests : ProcessTestBase
{
[Fact]
public void TestSyncErrorStream()
{
Process p = CreateProcessPortable(RemotelyInvokable.ErrorProcessBody);
p.StartInfo.RedirectStandardError = true;
p.Start();
string expected = RemotelyInvokable.TestConsoleApp + " started error stream" + Environment.NewLine +
RemotelyInvokable.TestConsoleApp + " closed error stream" + Environment.NewLine;
Assert.Equal(expected, p.StandardError.ReadToEnd());
Assert.True(p.WaitForExit(WaitInMS));
}
[Fact]
public void TestAsyncErrorStream()
{
for (int i = 0; i < 2; ++i)
{
StringBuilder sb = new StringBuilder();
Process p = CreateProcessPortable(RemotelyInvokable.ErrorProcessBody);
p.StartInfo.RedirectStandardError = true;
p.ErrorDataReceived += (s, e) =>
{
sb.Append(e.Data);
if (i == 1)
{
((Process)s).CancelErrorRead();
}
};
p.Start();
p.BeginErrorReadLine();
Assert.True(p.WaitForExit(WaitInMS));
p.WaitForExit(); // This ensures async event handlers are finished processing.
string expected = RemotelyInvokable.TestConsoleApp + " started error stream" + (i == 1 ? "" : RemotelyInvokable.TestConsoleApp + " closed error stream");
Assert.Equal(expected, sb.ToString());
}
}
[Fact]
public void TestSyncOutputStream()
{
Process p = CreateProcessPortable(RemotelyInvokable.StreamBody);
p.StartInfo.RedirectStandardOutput = true;
p.Start();
string s = p.StandardOutput.ReadToEnd();
Assert.True(p.WaitForExit(WaitInMS));
Assert.Equal(RemotelyInvokable.TestConsoleApp + " started" + Environment.NewLine + RemotelyInvokable.TestConsoleApp + " closed" + Environment.NewLine, s);
}
[Fact]
public void TestAsyncOutputStream()
{
for (int i = 0; i < 2; ++i)
{
StringBuilder sb = new StringBuilder();
Process p = CreateProcess(RemotelyInvokable.StreamBody);
p.StartInfo.RedirectStandardOutput = true;
p.OutputDataReceived += (s, e) =>
{
sb.Append(e.Data);
if (i == 1)
{
((Process)s).CancelOutputRead();
}
};
p.Start();
p.BeginOutputReadLine();
Assert.True(p.WaitForExit(WaitInMS));
p.WaitForExit(); // This ensures async event handlers are finished processing.
string expected = RemotelyInvokable.TestConsoleApp + " started" + (i == 1 ? "" : RemotelyInvokable.TestConsoleApp + " closed");
Assert.Equal(expected, sb.ToString());
}
}
[Fact]
[SkipOnTargetFramework(TargetFrameworkMonikers.Uap, "Pipe doesn't work well on UAP")]
async public Task TestAsyncOutputStream_CancelOutputRead()
{
// This test might have some false negatives due to possible race condition in System.Diagnostics.AsyncStreamReader.ReadBufferAsync
// There is not way to know if parent process has processed async output from child process
using (AnonymousPipeServerStream pipeWrite = new AnonymousPipeServerStream(PipeDirection.Out, HandleInheritability.Inheritable))
using (AnonymousPipeServerStream pipeRead = new AnonymousPipeServerStream(PipeDirection.In, HandleInheritability.Inheritable))
{
using (Process p = CreateProcess(TestAsyncOutputStream_CancelOutputRead_RemotelyInvokable, $"{pipeWrite.GetClientHandleAsString()} {pipeRead.GetClientHandleAsString()}"))
{
var dataReceived = new List<int>();
var dataArrivedEvent = new AutoResetEvent(false);
p.StartInfo.RedirectStandardOutput = true;
p.OutputDataReceived += (s, e) =>
{
if (e.Data != null)
{
dataReceived.Add(int.Parse(e.Data));
}
dataArrivedEvent.Set();
};
// Start child process
p.Start();
pipeWrite.DisposeLocalCopyOfClientHandle();
pipeRead.DisposeLocalCopyOfClientHandle();
// Wait child process start
Assert.True(await WaitPipeSignal(pipeRead, WaitInMS), "Child process not started");
//Start listening and produce output 1
p.BeginOutputReadLine();
await pipeWrite.WriteAsync(new byte[1], 0, 1);
// Wait child signal produce number 1
Assert.True(await WaitPipeSignal(pipeRead, WaitInMS), "Missing child signal for value 1");
Assert.True(dataArrivedEvent.WaitOne(WaitInMS), "Value 1 not received");
// Stop listening and signal to produce value 2
p.CancelOutputRead();
await pipeWrite.WriteAsync(new byte[1], 0, 1);
// Wait child signal produce number 2
Assert.True(await WaitPipeSignal(pipeRead, WaitInMS), "Missing child signal for value 2");
// Wait child process close to be sure that output buffer has been flushed
Assert.True(p.WaitForExit(WaitInMS), "Child process didn't close");
Assert.Equal(1, dataReceived.Count);
Assert.Equal(1, dataReceived[0]);
}
}
}
async private Task<int> TestAsyncOutputStream_CancelOutputRead_RemotelyInvokable(string pipesHandle)
{
string[] pipeHandlers = pipesHandle.Split(' ');
using (AnonymousPipeClientStream pipeRead = new AnonymousPipeClientStream(PipeDirection.In, pipeHandlers[0]))
using (AnonymousPipeClientStream pipeWrite = new AnonymousPipeClientStream(PipeDirection.Out, pipeHandlers[1]))
{
// Signal child process start
await pipeWrite.WriteAsync(new byte[1], 0, 1);
// Wait parent signal to produce number 1
// Generate output 1 and signal parent
Assert.True(await WaitPipeSignal(pipeRead, WaitInMS), "Missing parent signal to produce number 1");
Console.WriteLine(1);
await pipeWrite.WriteAsync(new byte[1], 0, 1);
// Wait parent signal to produce number 2
// Generate output 2 and signal parent
Assert.True(await WaitPipeSignal(pipeRead, WaitInMS), "Missing parent signal to produce number 2");
Console.WriteLine(2);
await pipeWrite.WriteAsync(new byte[1], 0, 1);
return RemoteExecutor.SuccessExitCode;
}
}
[Fact]
[SkipOnTargetFramework(TargetFrameworkMonikers.Uap, "Pipe doesn't work well on UAP")]
async public Task TestAsyncOutputStream_BeginCancelBeginOutputRead()
{
using (AnonymousPipeServerStream pipeWrite = new AnonymousPipeServerStream(PipeDirection.Out, HandleInheritability.Inheritable))
using (AnonymousPipeServerStream pipeRead = new AnonymousPipeServerStream(PipeDirection.In, HandleInheritability.Inheritable))
{
using (Process p = CreateProcess(TestAsyncOutputStream_BeginCancelBeinOutputRead_RemotelyInvokable, $"{pipeWrite.GetClientHandleAsString()} {pipeRead.GetClientHandleAsString()}"))
{
var dataReceived = new BlockingCollection<int>();
p.StartInfo.RedirectStandardOutput = true;
p.OutputDataReceived += (s, e) =>
{
if (e.Data != null)
{
dataReceived.Add(int.Parse(e.Data));
}
};
// Start child process
p.Start();
pipeWrite.DisposeLocalCopyOfClientHandle();
pipeRead.DisposeLocalCopyOfClientHandle();
// Wait child process start
Assert.True(await WaitPipeSignal(pipeRead, WaitInMS), "Child process not started");
//Start listening and signal client to produce 1,2,3
p.BeginOutputReadLine();
await pipeWrite.WriteAsync(new byte[1], 0, 1);
// Wait child signal produce number 1,2,3
Assert.True(await WaitPipeSignal(pipeRead, WaitInMS), "Missing child signal for value 1,2,3");
using (CancellationTokenSource cts = new CancellationTokenSource(WaitInMS))
{
try
{
List<int> expectedValue123 = new List<int>() { 1, 2, 3 };
foreach (int value in dataReceived.GetConsumingEnumerable(cts.Token))
{
expectedValue123.Remove(value);
if (expectedValue123.Count == 0)
{
break;
}
}
}
catch(OperationCanceledException)
{
Assert.False(cts.IsCancellationRequested, "Values 1,2,3 not arrived");
}
}
// Cancel and signal child
p.CancelOutputRead();
await pipeWrite.WriteAsync(new byte[1], 0, 1);
// Re-start listening and signal child
p.BeginOutputReadLine();
await pipeWrite.WriteAsync(new byte[1], 0, 1);
// Wait child process close
Assert.True(p.WaitForExit(WaitInMS), "Child process didn't close");
// Wait for value 7,8,9
using (CancellationTokenSource cts = new CancellationTokenSource(WaitInMS))
{
try
{
List<int> expectedValue789 = new List<int>() { 7, 8, 9 };
foreach (int value in dataReceived.GetConsumingEnumerable(cts.Token))
{
expectedValue789.Remove(value);
if (expectedValue789.Count == 0)
{
break;
}
}
}
catch(OperationCanceledException)
{
Assert.False(cts.IsCancellationRequested, "Values 7,8,9 not arrived");
}
}
}
}
}
async private Task<int> TestAsyncOutputStream_BeginCancelBeinOutputRead_RemotelyInvokable(string pipesHandle)
{
string[] pipeHandlers = pipesHandle.Split(' ');
using (AnonymousPipeClientStream pipeRead = new AnonymousPipeClientStream(PipeDirection.In, pipeHandlers[0]))
using (AnonymousPipeClientStream pipeWrite = new AnonymousPipeClientStream(PipeDirection.Out, pipeHandlers[1]))
{
// Signal child process start
await pipeWrite.WriteAsync(new byte[1], 0, 1);
// Wait parent signal to produce number 1,2,3
Assert.True(await WaitPipeSignal(pipeRead, WaitInMS), "Missing parent signal to produce number 1,2,3");
Console.WriteLine(1);
Console.WriteLine(2);
Console.WriteLine(3);
await pipeWrite.WriteAsync(new byte[1], 0, 1);
// Wait parent cancellation signal and produce new values 4,5,6
Assert.True(await WaitPipeSignal(pipeRead, WaitInMS), "Missing parent signal to produce number 4,5,6");
Console.WriteLine(4);
Console.WriteLine(5);
Console.WriteLine(6);
// Wait parent re-start listening signal and produce 7,8,9
Assert.True(await WaitPipeSignal(pipeRead, WaitInMS), "Missing parent re-start listening signal");
Console.WriteLine(7);
Console.WriteLine(8);
Console.WriteLine(9);
return RemoteExecutor.SuccessExitCode;
}
}
async private Task<bool> WaitPipeSignal(PipeStream pipe, int millisecond)
{
using (var cts = new CancellationTokenSource(millisecond))
{
try
{
await pipe.ReadAsync(new byte[1], 0, 1, cts.Token);
return true;
}
catch (OperationCanceledException)
{
return false;
}
}
}
[Fact]
public void TestSyncStreams()
{
const string expected = "This string should come as output";
Process p = CreateProcessPortable(RemotelyInvokable.ReadLine);
p.StartInfo.RedirectStandardInput = true;
p.StartInfo.RedirectStandardOutput = true;
p.OutputDataReceived += (s, e) => { Assert.Equal(expected, e.Data); };
p.Start();
using (StreamWriter writer = p.StandardInput)
{
writer.WriteLine(expected);
}
Assert.True(p.WaitForExit(WaitInMS));
}
[Fact]
public void TestEOFReceivedWhenStdInClosed()
{
// This is the test for the fix of dotnet/corefx issue #13447.
//
// Summary of the issue:
// When an application starts more than one child processes with their standard inputs redirected on Unix,
// closing the standard input stream of the first child process won't unblock the 'Console.ReadLine()' call
// in the first child process (it's expected to receive EOF).
//
// Root cause of the issue:
// The file descriptor for the write end of the first child process standard input redirection pipe gets
// inherited by the second child process, which makes the reference count of the pipe write end become 2.
// When closing the standard input stream of the first child process, the file descriptor held by the parent
// process is released, but the one inherited by the second child process is still referencing the pipe
// write end, which cause the 'Console.ReadLine()' continue to be blocked in the first child process.
//
// Fix:
// Set the O_CLOEXEC flag when creating the redirection pipes. So that no child process would inherit the
// file descriptors referencing those pipes.
const string ExpectedLine = "NULL";
Process p1 = CreateProcessPortable(RemotelyInvokable.ReadLineWriteIfNull);
Process p2 = CreateProcessPortable(RemotelyInvokable.ReadLine);
// Start the first child process
p1.StartInfo.RedirectStandardInput = true;
p1.StartInfo.RedirectStandardOutput = true;
p1.OutputDataReceived += (s, e) => Assert.Equal(ExpectedLine, e.Data);
p1.Start();
// Start the second child process
p2.StartInfo.RedirectStandardInput = true;
p2.Start();
try
{
// Close the standard input stream of the first child process.
// The first child process should be unblocked and write out 'NULL', and then exit.
p1.StandardInput.Close();
Assert.True(p1.WaitForExit(WaitInMS));
}
finally
{
// Cleanup: kill the second child process
p2.Kill();
}
// Cleanup
Assert.True(p2.WaitForExit(WaitInMS));
p2.Dispose();
p1.Dispose();
}
[Fact]
[SkipOnTargetFramework(TargetFrameworkMonikers.Uap, "No simple way to perform this on uap using cmd.exe")]
public void TestAsyncHalfCharacterAtATime()
{
var receivedOutput = false;
var collectedExceptions = new List<Exception>();
Process p = CreateProcessPortable(RemotelyInvokable.WriteSlowlyByByte);
p.StartInfo.RedirectStandardOutput = true;
p.StartInfo.StandardOutputEncoding = Encoding.Unicode;
p.OutputDataReceived += (s, e) =>
{
try
{
if (!receivedOutput)
{
receivedOutput = true;
Assert.Equal("a", e.Data);
}
}
catch (Exception ex)
{
// This ensures that the exception in event handlers does not break
// the whole unittest
collectedExceptions.Add(ex);
}
};
p.Start();
p.BeginOutputReadLine();
Assert.True(p.WaitForExit(WaitInMS));
p.WaitForExit(); // This ensures async event handlers are finished processing.
Assert.True(receivedOutput);
if (collectedExceptions.Count > 0)
{
// Re-throw collected exceptions
throw new AggregateException(collectedExceptions);
}
}
[Fact]
public void TestManyOutputLines()
{
const int ExpectedLineCount = 144;
int nonWhitespaceLinesReceived = 0;
int totalLinesReceived = 0;
Process p = CreateProcessPortable(RemotelyInvokable.Write144Lines);
p.StartInfo.RedirectStandardOutput = true;
p.OutputDataReceived += (s, e) =>
{
if (!string.IsNullOrWhiteSpace(e.Data))
{
nonWhitespaceLinesReceived++;
}
totalLinesReceived++;
};
p.Start();
p.BeginOutputReadLine();
Assert.True(p.WaitForExit(WaitInMS));
p.WaitForExit(); // This ensures async event handlers are finished processing.
Assert.Equal(ExpectedLineCount, nonWhitespaceLinesReceived);
Assert.Equal(ExpectedLineCount + 1, totalLinesReceived);
}
[Fact]
public void TestClosingStreamsAsyncDoesNotThrow()
{
Process p = CreateProcessPortable(RemotelyInvokable.WriteLinesAfterClose);
p.StartInfo.RedirectStandardOutput = true;
p.StartInfo.RedirectStandardError = true;
// On netfx, the handler is called once with the Data as null, even if the process writes nothing to the pipe.
// That behavior is documented here https://docs.microsoft.com/en-us/dotnet/api/system.diagnostics.datareceivedeventhandler
p.Start();
p.BeginOutputReadLine();
p.BeginErrorReadLine();
p.Close();
RemotelyInvokable.FireClosedEvent();
}
[Fact]
public void TestClosingStreamsUndefinedDoesNotThrow()
{
Process p = CreateProcessPortable(RemotelyInvokable.WriteLinesAfterClose);
p.StartInfo.RedirectStandardOutput = true;
p.StartInfo.RedirectStandardError = true;
p.Start();
p.Close();
RemotelyInvokable.FireClosedEvent();
}
[Fact]
public void TestClosingSyncModeDoesNotCloseStreams()
{
Process p = CreateProcessPortable(RemotelyInvokable.WriteLinesAfterClose);
p.StartInfo.RedirectStandardOutput = true;
p.StartInfo.RedirectStandardError = true;
p.Start();
var output = p.StandardOutput;
var error = p.StandardError;
p.Close();
RemotelyInvokable.FireClosedEvent();
output.ReadToEnd();
error.ReadToEnd();
}
[Fact]
public void TestStreamNegativeTests()
{
{
Process p = new Process();
Assert.Throws<InvalidOperationException>(() => p.StandardOutput);
Assert.Throws<InvalidOperationException>(() => p.StandardError);
Assert.Throws<InvalidOperationException>(() => p.BeginOutputReadLine());
Assert.Throws<InvalidOperationException>(() => p.BeginErrorReadLine());
Assert.Throws<InvalidOperationException>(() => p.CancelOutputRead());
Assert.Throws<InvalidOperationException>(() => p.CancelErrorRead());
}
{
Process p = CreateProcessPortable(RemotelyInvokable.StreamBody);
p.StartInfo.RedirectStandardOutput = true;
p.StartInfo.RedirectStandardError = true;
p.OutputDataReceived += (s, e) => {};
p.ErrorDataReceived += (s, e) => {};
p.Start();
p.BeginOutputReadLine();
p.BeginErrorReadLine();
Assert.Throws<InvalidOperationException>(() => p.StandardOutput);
Assert.Throws<InvalidOperationException>(() => p.StandardError);
Assert.True(p.WaitForExit(WaitInMS));
}
{
Process p = CreateProcessPortable(RemotelyInvokable.StreamBody);
p.StartInfo.RedirectStandardOutput = true;
p.StartInfo.RedirectStandardError = true;
p.OutputDataReceived += (s, e) => {};
p.ErrorDataReceived += (s, e) => {};
p.Start();
StreamReader output = p.StandardOutput;
StreamReader error = p.StandardError;
Assert.Throws<InvalidOperationException>(() => p.BeginOutputReadLine());
Assert.Throws<InvalidOperationException>(() => p.BeginErrorReadLine());
Assert.True(p.WaitForExit(WaitInMS));
}
}
}
}
| |
// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the MIT Licence.
// See the LICENCE file in the repository root for full licence text.
using System;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.Linq;
using osu.Framework.Allocation;
using osu.Framework.Graphics;
using osu.Framework.Graphics.Containers;
using osu.Framework.Input;
using osu.Framework.Input.Events;
using osu.Framework.Logging;
using osu.Game.Beatmaps;
using osu.Game.Beatmaps.ControlPoints;
using osu.Game.Rulesets.Configuration;
using osu.Game.Rulesets.Edit.Tools;
using osu.Game.Rulesets.Mods;
using osu.Game.Rulesets.Objects;
using osu.Game.Rulesets.Objects.Drawables;
using osu.Game.Rulesets.UI;
using osu.Game.Rulesets.UI.Scrolling;
using osu.Game.Screens.Edit;
using osu.Game.Screens.Edit.Components.RadioButtons;
using osu.Game.Screens.Edit.Components.TernaryButtons;
using osu.Game.Screens.Edit.Compose;
using osu.Game.Screens.Edit.Compose.Components;
using osuTK;
using osuTK.Input;
namespace osu.Game.Rulesets.Edit
{
/// <summary>
/// Top level container for editor compose mode.
/// Responsible for providing snapping and generally gluing components together.
/// </summary>
/// <typeparam name="TObject">The base type of supported objects.</typeparam>
[Cached(Type = typeof(IPlacementHandler))]
public abstract class HitObjectComposer<TObject> : HitObjectComposer, IPlacementHandler
where TObject : HitObject
{
protected IRulesetConfigManager Config { get; private set; }
protected readonly Ruleset Ruleset;
// Provides `Playfield`
private DependencyContainer dependencies;
[Resolved]
protected EditorClock EditorClock { get; private set; }
[Resolved]
protected EditorBeatmap EditorBeatmap { get; private set; }
[Resolved]
protected IBeatSnapProvider BeatSnapProvider { get; private set; }
protected ComposeBlueprintContainer BlueprintContainer { get; private set; }
private DrawableEditorRulesetWrapper<TObject> drawableRulesetWrapper;
protected readonly Container LayerBelowRuleset = new Container { RelativeSizeAxes = Axes.Both };
private InputManager inputManager;
private RadioButtonCollection toolboxCollection;
private FillFlowContainer togglesCollection;
protected HitObjectComposer(Ruleset ruleset)
{
Ruleset = ruleset;
}
protected override IReadOnlyDependencyContainer CreateChildDependencies(IReadOnlyDependencyContainer parent) =>
dependencies = new DependencyContainer(base.CreateChildDependencies(parent));
[BackgroundDependencyLoader]
private void load()
{
Config = Dependencies.Get<RulesetConfigCache>().GetConfigFor(Ruleset);
try
{
drawableRulesetWrapper = new DrawableEditorRulesetWrapper<TObject>(CreateDrawableRuleset(Ruleset, EditorBeatmap.PlayableBeatmap, new[] { Ruleset.GetAutoplayMod() }))
{
Clock = EditorClock,
ProcessCustomClock = false
};
}
catch (Exception e)
{
Logger.Error(e, "Could not load beatmap successfully!");
return;
}
dependencies.CacheAs(Playfield);
const float toolbar_width = 200;
InternalChildren = new Drawable[]
{
new Container
{
Name = "Content",
Padding = new MarginPadding { Left = toolbar_width },
RelativeSizeAxes = Axes.Both,
Children = new Drawable[]
{
// layers below playfield
drawableRulesetWrapper.CreatePlayfieldAdjustmentContainer().WithChild(LayerBelowRuleset),
drawableRulesetWrapper,
// layers above playfield
drawableRulesetWrapper.CreatePlayfieldAdjustmentContainer()
.WithChild(BlueprintContainer = CreateBlueprintContainer())
}
},
new FillFlowContainer
{
Name = "Sidebar",
RelativeSizeAxes = Axes.Y,
Width = toolbar_width,
Padding = new MarginPadding { Right = 10 },
Spacing = new Vector2(10),
Children = new Drawable[]
{
new ToolboxGroup("toolbox (1-9)")
{
Child = toolboxCollection = new RadioButtonCollection { RelativeSizeAxes = Axes.X }
},
new ToolboxGroup("toggles (Q~P)")
{
Child = togglesCollection = new FillFlowContainer
{
RelativeSizeAxes = Axes.X,
AutoSizeAxes = Axes.Y,
Direction = FillDirection.Vertical,
Spacing = new Vector2(0, 5),
},
}
}
},
};
toolboxCollection.Items = CompositionTools
.Prepend(new SelectTool())
.Select(t => new RadioButton(t.Name, () => toolSelected(t), t.CreateIcon))
.ToList();
TernaryStates = CreateTernaryButtons().ToArray();
togglesCollection.AddRange(TernaryStates.Select(b => new DrawableTernaryButton(b)));
setSelectTool();
EditorBeatmap.SelectedHitObjects.CollectionChanged += selectionChanged;
}
protected override void LoadComplete()
{
base.LoadComplete();
inputManager = GetContainingInputManager();
}
public override Playfield Playfield => drawableRulesetWrapper.Playfield;
public override IEnumerable<DrawableHitObject> HitObjects => drawableRulesetWrapper.Playfield.AllHitObjects;
public override bool CursorInPlacementArea => drawableRulesetWrapper.Playfield.ReceivePositionalInputAt(inputManager.CurrentState.Mouse.Position);
/// <summary>
/// Defines all available composition tools, listed on the left side of the editor screen as button controls.
/// This should usually define one tool for each <see cref="HitObject"/> type used in the target ruleset.
/// </summary>
/// <remarks>
/// A "select" tool is automatically added as the first tool.
/// </remarks>
protected abstract IReadOnlyList<HitObjectCompositionTool> CompositionTools { get; }
/// <summary>
/// A collection of states which will be displayed to the user in the toolbox.
/// </summary>
public TernaryButton[] TernaryStates { get; private set; }
/// <summary>
/// Create all ternary states required to be displayed to the user.
/// </summary>
protected virtual IEnumerable<TernaryButton> CreateTernaryButtons() => BlueprintContainer.TernaryStates;
/// <summary>
/// Construct a relevant blueprint container. This will manage hitobject selection/placement input handling and display logic.
/// </summary>
protected virtual ComposeBlueprintContainer CreateBlueprintContainer() => new ComposeBlueprintContainer(this);
/// <summary>
/// Construct a drawable ruleset for the provided ruleset.
/// </summary>
/// <remarks>
/// Can be overridden to add editor-specific logical changes to a <see cref="Ruleset"/>'s standard <see cref="DrawableRuleset{TObject}"/>.
/// For example, hit animations or judgement logic may be changed to give a better editor user experience.
/// </remarks>
/// <param name="ruleset">The ruleset used to construct its drawable counterpart.</param>
/// <param name="beatmap">The loaded beatmap.</param>
/// <param name="mods">The mods to be applied.</param>
/// <returns>An editor-relevant <see cref="DrawableRuleset{TObject}"/>.</returns>
protected virtual DrawableRuleset<TObject> CreateDrawableRuleset(Ruleset ruleset, IBeatmap beatmap, IReadOnlyList<Mod> mods = null)
=> (DrawableRuleset<TObject>)ruleset.CreateDrawableRulesetWith(beatmap, mods);
#region Tool selection logic
protected override bool OnKeyDown(KeyDownEvent e)
{
if (e.ControlPressed || e.AltPressed || e.SuperPressed)
return false;
if (checkLeftToggleFromKey(e.Key, out var leftIndex))
{
var item = toolboxCollection.Items.ElementAtOrDefault(leftIndex);
if (item != null)
{
item.Select();
return true;
}
}
if (checkRightToggleFromKey(e.Key, out var rightIndex))
{
var item = togglesCollection.ElementAtOrDefault(rightIndex);
if (item is DrawableTernaryButton button)
{
button.Button.Toggle();
return true;
}
}
return base.OnKeyDown(e);
}
private bool checkLeftToggleFromKey(Key key, out int index)
{
if (key < Key.Number1 || key > Key.Number9)
{
index = -1;
return false;
}
index = key - Key.Number1;
return true;
}
private bool checkRightToggleFromKey(Key key, out int index)
{
switch (key)
{
case Key.Q:
index = 0;
break;
case Key.W:
index = 1;
break;
case Key.E:
index = 2;
break;
case Key.R:
index = 3;
break;
case Key.T:
index = 4;
break;
case Key.Y:
index = 5;
break;
case Key.U:
index = 6;
break;
case Key.I:
index = 7;
break;
case Key.O:
index = 8;
break;
case Key.P:
index = 9;
break;
default:
index = -1;
break;
}
return index >= 0;
}
private void selectionChanged(object sender, NotifyCollectionChangedEventArgs changedArgs)
{
if (EditorBeatmap.SelectedHitObjects.Any())
{
// ensure in selection mode if a selection is made.
setSelectTool();
}
}
private void setSelectTool() => toolboxCollection.Items.First().Select();
private void toolSelected(HitObjectCompositionTool tool)
{
BlueprintContainer.CurrentTool = tool;
if (!(tool is SelectTool))
EditorBeatmap.SelectedHitObjects.Clear();
}
#endregion
#region IPlacementHandler
public void BeginPlacement(HitObject hitObject)
{
EditorBeatmap.PlacementObject.Value = hitObject;
}
public void EndPlacement(HitObject hitObject, bool commit)
{
EditorBeatmap.PlacementObject.Value = null;
if (commit)
{
EditorBeatmap.Add(hitObject);
if (EditorClock.CurrentTime < hitObject.StartTime)
EditorClock.SeekSmoothlyTo(hitObject.StartTime);
}
}
public void Delete(HitObject hitObject) => EditorBeatmap.Remove(hitObject);
#endregion
#region IPositionSnapProvider
/// <summary>
/// Retrieve the relevant <see cref="Playfield"/> at a specified screen-space position.
/// In cases where a ruleset doesn't require custom logic (due to nested playfields, for example)
/// this will return the ruleset's main playfield.
/// </summary>
/// <param name="screenSpacePosition">The screen-space position to query.</param>
/// <returns>The most relevant <see cref="Playfield"/>.</returns>
protected virtual Playfield PlayfieldAtScreenSpacePosition(Vector2 screenSpacePosition) => drawableRulesetWrapper.Playfield;
public override SnapResult SnapScreenSpacePositionToValidTime(Vector2 screenSpacePosition)
{
var playfield = PlayfieldAtScreenSpacePosition(screenSpacePosition);
double? targetTime = null;
if (playfield is ScrollingPlayfield scrollingPlayfield)
{
targetTime = scrollingPlayfield.TimeAtScreenSpacePosition(screenSpacePosition);
// apply beat snapping
targetTime = BeatSnapProvider.SnapTime(targetTime.Value);
// convert back to screen space
screenSpacePosition = scrollingPlayfield.ScreenSpacePositionAtTime(targetTime.Value);
}
return new SnapResult(screenSpacePosition, targetTime, playfield);
}
public override float GetBeatSnapDistanceAt(double referenceTime)
{
DifficultyControlPoint difficultyPoint = EditorBeatmap.ControlPointInfo.DifficultyPointAt(referenceTime);
return (float)(100 * EditorBeatmap.BeatmapInfo.BaseDifficulty.SliderMultiplier * difficultyPoint.SpeedMultiplier / BeatSnapProvider.BeatDivisor);
}
public override float DurationToDistance(double referenceTime, double duration)
{
double beatLength = BeatSnapProvider.GetBeatLengthAtTime(referenceTime);
return (float)(duration / beatLength * GetBeatSnapDistanceAt(referenceTime));
}
public override double DistanceToDuration(double referenceTime, float distance)
{
double beatLength = BeatSnapProvider.GetBeatLengthAtTime(referenceTime);
return distance / GetBeatSnapDistanceAt(referenceTime) * beatLength;
}
public override double GetSnappedDurationFromDistance(double referenceTime, float distance)
=> BeatSnapProvider.SnapTime(referenceTime + DistanceToDuration(referenceTime, distance), referenceTime) - referenceTime;
public override float GetSnappedDistanceFromDistance(double referenceTime, float distance)
{
double actualDuration = referenceTime + DistanceToDuration(referenceTime, distance);
double snappedEndTime = BeatSnapProvider.SnapTime(actualDuration, referenceTime);
double beatLength = BeatSnapProvider.GetBeatLengthAtTime(referenceTime);
// we don't want to exceed the actual duration and snap to a point in the future.
// as we are snapping to beat length via SnapTime (which will round-to-nearest), check for snapping in the forward direction and reverse it.
if (snappedEndTime > actualDuration + 1)
snappedEndTime -= beatLength;
return DurationToDistance(referenceTime, snappedEndTime - referenceTime);
}
#endregion
}
/// <summary>
/// A non-generic definition of a HitObject composer class.
/// Generally used to access certain methods without requiring a generic type for <see cref="HitObjectComposer{T}" />.
/// </summary>
[Cached(typeof(HitObjectComposer))]
[Cached(typeof(IPositionSnapProvider))]
public abstract class HitObjectComposer : CompositeDrawable, IPositionSnapProvider
{
protected HitObjectComposer()
{
RelativeSizeAxes = Axes.Both;
}
/// <summary>
/// The target ruleset's playfield.
/// </summary>
public abstract Playfield Playfield { get; }
/// <summary>
/// All <see cref="DrawableHitObject"/>s in currently loaded beatmap.
/// </summary>
public abstract IEnumerable<DrawableHitObject> HitObjects { get; }
/// <summary>
/// Whether the user's cursor is currently in an area of the <see cref="HitObjectComposer"/> that is valid for placement.
/// </summary>
public abstract bool CursorInPlacementArea { get; }
public virtual string ConvertSelectionToString() => string.Empty;
#region IPositionSnapProvider
public abstract SnapResult SnapScreenSpacePositionToValidTime(Vector2 screenSpacePosition);
public virtual SnapResult SnapScreenSpacePositionToValidPosition(Vector2 screenSpacePosition) =>
new SnapResult(screenSpacePosition, null);
public abstract float GetBeatSnapDistanceAt(double referenceTime);
public abstract float DurationToDistance(double referenceTime, double duration);
public abstract double DistanceToDuration(double referenceTime, float distance);
public abstract double GetSnappedDurationFromDistance(double referenceTime, float distance);
public abstract float GetSnappedDistanceFromDistance(double referenceTime, float distance);
#endregion
}
}
| |
using System;
using System.Collections.Generic;
using System.Text;
namespace l2pvp
{
class BlowfishEngine
{
// Methods
//public BlowfishEngine();
//private void Bits32ToBytes(uint in_b, byte[] b, uint offset);
//private uint BytesTo32bits(byte[] b, uint i);
//private void decryptBlock(byte[] src, uint srcIndex, byte[] dst, uint dstIndex);
//private void encryptBlock(byte[] src, uint srcIndex, byte[] dst, uint dstIndex);
//private uint F(uint x);
//public string getAlgorithmName();
//public int getBlockSize();
//public void init(bool pEncrypting, byte[] key);
//public void processBigBlock(byte[] in_b, uint inOff, byte[] out_b, uint outOff, int len);
//public int processBlock(byte[] in_b, uint inOff, byte[] out_b, uint outOff);
//private void processTable(uint xl, uint xr, uint[] table);
//public void reset();
//private void setKey(byte[] key);
// Fields
private bool encrypting;
private uint[] P;
private uint[] S0;
private uint[] S1;
private uint[] S2;
private uint[] S3;
private byte[] workingKey;
private static uint[] KP = new uint[] {
0x243f6a88, 0x85a308d3, 0x13198a2e, 0x3707344, 0xa4093822, 0x299f31d0, 0x82efa98, 0xec4e6c89, 0x452821e6, 0x38d01377, 0xbe5466cf, 0x34e90c6c, 0xc0ac29b7, 0xc97c50dd, 0x3f84d5b5, 0xb5470917,
0x9216d5d9, 0x8979fb1b
};
private static uint[] KS0 = new uint[] {
0xd1310ba6, 0x98dfb5ac, 0x2ffd72db, 0xd01adfb7, 0xb8e1afed, 0x6a267e96, 0xba7c9045, 0xf12c7f99, 0x24a19947, 0xb3916cf7, 0x801f2e2, 0x858efc16, 0x636920d8, 0x71574e69, 0xa458fea3, 0xf4933d7e,
0xd95748f, 0x728eb658, 0x718bcd58, 0x82154aee, 0x7b54a41d, 0xc25a59b5, 0x9c30d539, 0x2af26013, 0xc5d1b023, 0x286085f0, 0xca417918, 0xb8db38ef, 0x8e79dcb0, 0x603a180e, 0x6c9e0e8b, 0xb01e8a3e,
0xd71577c1, 0xbd314b27, 0x78af2fda, 0x55605c60, 0xe65525f3, 0xaa55ab94, 0x57489862, 0x63e81440, 0x55ca396a, 0x2aab10b6, 0xb4cc5c34, 0x1141e8ce, 0xa15486af, 0x7c72e993, 0xb3ee1411, 0x636fbc2a,
0x2ba9c55d, 0x741831f6, 0xce5c3e16, 0x9b87931e, 0xafd6ba33, 0x6c24cf5c, 0x7a325381, 0x28958677, 0x3b8f4898, 0x6b4bb9af, 0xc4bfe81b, 0x66282193, 0x61d809cc, 0xfb21a991, 0x487cac60, 0x5dec8032,
0xef845d5d, 0xe98575b1, 0xdc262302, 0xeb651b88, 0x23893e81, 0xd396acc5, 0xf6d6ff3, 0x83f44239, 0x2e0b4482, 0xa4842004, 0x69c8f04a, 0x9e1f9b5e, 0x21c66842, 0xf6e96c9a, 0x670c9c61, 0xabd388f0,
0x6a51a0d2, 0xd8542f68, 0x960fa728, 0xab5133a3, 0x6eef0b6c, 0x137a3be4, 0xba3bf050, 0x7efb2a98, 0xa1f1651d, 0x39af0176, 0x66ca593e, 0x82430e88, 0x8cee8619, 0x456f9fb4, 0x7d84a5c3, 0x3b8b5ebe,
0xe06f75d8, 0x85c12073, 0x401a449f, 0x56c16aa6, 0x4ed3aa62, 0x363f7706, 0x1bfedf72, 0x429b023d, 0x37d0d724, 0xd00a1248, 0xdb0fead3, 0x49f1c09b, 0x75372c9, 0x80991b7b, 0x25d479d8, 0xf6e8def7,
0xe3fe501a, 0xb6794c3b, 0x976ce0bd, 0x4c006ba, 0xc1a94fb6, 0x409f60c4, 0x5e5c9ec2, 0x196a2463, 0x68fb6faf, 0x3e6c53b5, 0x1339b2eb, 0x3b52ec6f, 0x6dfc511f, 0x9b30952c, 0xcc814544, 0xaf5ebd09,
0xbee3d004, 0xde334afd, 0x660f2807, 0x192e4bb3, 0xc0cba857, 0x45c8740f, 0xd20b5f39, 0xb9d3fbdb, 0x5579c0bd, 0x1a60320a, 0xd6a100c6, 0x402c7279, 0x679f25fe, 0xfb1fa3cc, 0x8ea5e9f8, 0xdb3222f8,
0x3c7516df, 0xfd616b15, 0x2f501ec8, 0xad0552ab, 0x323db5fa, 0xfd238760, 0x53317b48, 0x3e00df82, 0x9e5c57bb, 0xca6f8ca0, 0x1a87562e, 0xdf1769db, 0xd542a8f6, 0x287effc3, 0xac6732c6, 0x8c4f5573,
0x695b27b0, 0xbbca58c8, 0xe1ffa35d, 0xb8f011a0, 0x10fa3d98, 0xfd2183b8, 0x4afcb56c, 0x2dd1d35b, 0x9a53e479, 0xb6f84565, 0xd28e49bc, 0x4bfb9790, 0xe1ddf2da, 0xa4cb7e33, 0x62fb1341, 0xcee4c6e8,
0xef20cada, 0x36774c01, 0xd07e9efe, 0x2bf11fb4, 0x95dbda4d, 0xae909198, 0xeaad8e71, 0x6b93d5a0, 0xd08ed1d0, 0xafc725e0, 0x8e3c5b2f, 0x8e7594b7, 0x8ff6e2fb, 0xf2122b64, 0x8888b812, 0x900df01c,
0x4fad5ea0, 0x688fc31c, 0xd1cff191, 0xb3a8c1ad, 0x2f2f2218, 0xbe0e1777, 0xea752dfe, 0x8b021fa1, 0xe5a0cc0f, 0xb56f74e8, 0x18acf3d6, 0xce89e299, 0xb4a84fe0, 0xfd13e0b7, 0x7cc43b81, 0xd2ada8d9,
0x165fa266, 0x80957705, 0x93cc7314, 0x211a1477, 0xe6ad2065, 0x77b5fa86, 0xc75442f5, 0xfb9d35cf, 0xebcdaf0c, 0x7b3e89a0, 0xd6411bd3, 0xae1e7e49, 0x250e2d, 0x2071b35e, 0x226800bb, 0x57b8e0af,
0x2464369b, 0xf009b91e, 0x5563911d, 0x59dfa6aa, 0x78c14389, 0xd95a537f, 0x207d5ba2, 0x2e5b9c5, 0x83260376, 0x6295cfa9, 0x11c81968, 0x4e734a41, 0xb3472dca, 0x7b14a94a, 0x1b510052, 0x9a532915,
0xd60f573f, 0xbc9bc6e4, 0x2b60a476, 0x81e67400, 0x8ba6fb5, 0x571be91f, 0xf296ec6b, 0x2a0dd915, 0xb6636521, 0xe7b9f9b6, 0xff34052e, 0xc5855664, 0x53b02d5d, 0xa99f8fa1, 0x8ba4799, 0x6e85076a
};
private static uint[] KS1 = new uint[] {
0x4b7a70e9, 0xb5b32944, 0xdb75092e, 0xc4192623, 0xad6ea6b0, 0x49a7df7d, 0x9cee60b8, 0x8fedb266, 0xecaa8c71, 0x699a17ff, 0x5664526c, 0xc2b19ee1, 0x193602a5, 0x75094c29, 0xa0591340, 0xe4183a3e,
0x3f54989a, 0x5b429d65, 0x6b8fe4d6, 0x99f73fd6, 0xa1d29c07, 0xefe830f5, 0x4d2d38e6, 0xf0255dc1, 0x4cdd2086, 0x8470eb26, 0x6382e9c6, 0x21ecc5e, 0x9686b3f, 0x3ebaefc9, 0x3c971814, 0x6b6a70a1,
0x687f3584, 0x52a0e286, 0xb79c5305, 0xaa500737, 0x3e07841c, 0x7fdeae5c, 0x8e7d44ec, 0x5716f2b8, 0xb03ada37, 0xf0500c0d, 0xf01c1f04, 0x200b3ff, 0xae0cf51a, 0x3cb574b2, 0x25837a58, 0xdc0921bd,
0xd19113f9, 0x7ca92ff6, 0x94324773, 0x22f54701, 0x3ae5e581, 0x37c2dadc, 0xc8b57634, 0x9af3dda7, 0xa9446146, 0xfd0030e, 0xecc8c73e, 0xa4751e41, 0xe238cd99, 0x3bea0e2f, 0x3280bba1, 0x183eb331,
0x4e548b38, 0x4f6db908, 0x6f420d03, 0xf60a04bf, 0x2cb81290, 0x24977c79, 0x5679b072, 0xbcaf89af, 0xde9a771f, 0xd9930810, 0xb38bae12, 0xdccf3f2e, 0x5512721f, 0x2e6b7124, 0x501adde6, 0x9f84cd87,
0x7a584718, 0x7408da17, 0xbc9f9abc, 0xe94b7d8c, 0xec7aec3a, 0xdb851dfa, 0x63094366, 0xc464c3d2, 0xef1c1847, 0x3215d908, 0xdd433b37, 0x24c2ba16, 0x12a14d43, 0x2a65c451, 0x50940002, 0x133ae4dd,
0x71dff89e, 0x10314e55, 0x81ac77d6, 0x5f11199b, 0x43556f1, 0xd7a3c76b, 0x3c11183b, 0x5924a509, 0xf28fe6ed, 0x97f1fbfa, 0x9ebabf2c, 0x1e153c6e, 0x86e34570, 0xeae96fb1, 0x860e5e0a, 0x5a3e2ab3,
0x771fe71c, 0x4e3d06fa, 0x2965dcb9, 0x99e71d0f, 0x803e89d6, 0x5266c825, 0x2e4cc978, 0x9c10b36a, 0xc6150eba, 0x94e2ea78, 0xa5fc3c53, 0x1e0a2df4, 0xf2f74ea7, 0x361d2b3d, 0x1939260f, 0x19c27960,
0x5223a708, 0xf71312b6, 0xebadfe6e, 0xeac31f66, 0xe3bc4595, 0xa67bc883, 0xb17f37d1, 0x18cff28, 0xc332ddef, 0xbe6c5aa5, 0x65582185, 0x68ab9802, 0xeecea50f, 0xdb2f953b, 0x2aef7dad, 0x5b6e2f84,
0x1521b628, 0x29076170, 0xecdd4775, 0x619f1510, 0x13cca830, 0xeb61bd96, 0x334fe1e, 0xaa0363cf, 0xb5735c90, 0x4c70a239, 0xd59e9e0b, 0xcbaade14, 0xeecc86bc, 0x60622ca7, 0x9cab5cab, 0xb2f3846e,
0x648b1eaf, 0x19bdf0ca, 0xa02369b9, 0x655abb50, 0x40685a32, 0x3c2ab4b3, 0x319ee9d5, 0xc021b8f7, 0x9b540b19, 0x875fa099, 0x95f7997e, 0x623d7da8, 0xf837889a, 0x97e32d77, 0x11ed935f, 0x16681281,
0xe358829, 0xc7e61fd6, 0x96dedfa1, 0x7858ba99, 0x57f584a5, 0x1b227263, 0x9b83c3ff, 0x1ac24696, 0xcdb30aeb, 0x532e3054, 0x8fd948e4, 0x6dbc3128, 0x58ebf2ef, 0x34c6ffea, 0xfe28ed61, 0xee7c3c73,
0x5d4a14d9, 0xe864b7e3, 0x42105d14, 0x203e13e0, 0x45eee2b6, 0xa3aaabea, 0xdb6c4f15, 0xfacb4fd0, 0xc742f442, 0xef6abbb5, 0x654f3b1d, 0x41cd2105, 0xd81e799e, 0x86854dc7, 0xe44b476a, 0x3d816250,
0xcf62a1f2, 0x5b8d2646, 0xfc8883a0, 0xc1c7b6a3, 0x7f1524c3, 0x69cb7492, 0x47848a0b, 0x5692b285, 0x95bbf00, 0xad19489d, 0x1462b174, 0x23820e00, 0x58428d2a, 0xc55f5ea, 0x1dadf43e, 0x233f7061,
0x3372f092, 0x8d937e41, 0xd65fecf1, 0x6c223bdb, 0x7cde3759, 0xcbee7460, 0x4085f2a7, 0xce77326e, 0xa6078084, 0x19f8509e, 0xe8efd855, 0x61d99735, 0xa969a7aa, 0xc50c06c2, 0x5a04abfc, 0x800bcadc,
0x9e447a2e, 0xc3453484, 0xfdd56705, 0xe1e9ec9, 0xdb73dbd3, 0x105588cd, 0x675fda79, 0xe3674340, 0xc5c43465, 0x713e38d8, 0x3d28f89e, 0xf16dff20, 0x153e21e7, 0x8fb03d4a, 0xe6e39f2b, 0xdb83adf7
};
private static uint[] KS2 = new uint[] {
0xe93d5a68, 0x948140f7, 0xf64c261c, 0x94692934, 0x411520f7, 0x7602d4f7, 0xbcf46b2e, 0xd4a20068, 0xd4082471, 0x3320f46a, 0x43b7d4b7, 0x500061af, 0x1e39f62e, 0x97244546, 0x14214f74, 0xbf8b8840,
0x4d95fc1d, 0x96b591af, 0x70f4ddd3, 0x66a02f45, 0xbfbc09ec, 0x3bd9785, 0x7fac6dd0, 0x31cb8504, 0x96eb27b3, 0x55fd3941, 0xda2547e6, 0xabca0a9a, 0x28507825, 0x530429f4, 0xa2c86da, 0xe9b66dfb,
0x68dc1462, 0xd7486900, 0x680ec0a4, 0x27a18dee, 0x4f3ffea2, 0xe887ad8c, 0xb58ce006, 0x7af4d6b6, 0xaace1e7c, 0xd3375fec, 0xce78a399, 0x406b2a42, 0x20fe9e35, 0xd9f385b9, 0xee39d7ab, 0x3b124e8b,
0x1dc9faf7, 0x4b6d1856, 0x26a36631, 0xeae397b2, 0x3a6efa74, 0xdd5b4332, 0x6841e7f7, 0xca7820fb, 0xfb0af54e, 0xd8feb397, 0x454056ac, 0xba489527, 0x55533a3a, 0x20838d87, 0xfe6ba9b7, 0xd096954b,
0x55a867bc, 0xa1159a58, 0xcca92963, 0x99e1db33, 0xa62a4a56, 0x3f3125f9, 0x5ef47e1c, 0x9029317c, 0xfdf8e802, 0x4272f70, 0x80bb155c, 0x5282ce3, 0x95c11548, 0xe4c66d22, 0x48c1133f, 0xc70f86dc,
0x7f9c9ee, 0x41041f0f, 0x404779a4, 0x5d886e17, 0x325f51eb, 0xd59bc0d1, 0xf2bcc18f, 0x41113564, 0x257b7834, 0x602a9c60, 0xdff8e8a3, 0x1f636c1b, 0xe12b4c2, 0x2e1329e, 0xaf664fd1, 0xcad18115,
0x6b2395e0, 0x333e92e1, 0x3b240b62, 0xeebeb922, 0x85b2a20e, 0xe6ba0d99, 0xde720c8c, 0x2da2f728, 0xd0127845, 0x95b794fd, 0x647d0862, 0xe7ccf5f0, 0x5449a36f, 0x877d48fa, 0xc39dfd27, 0xf33e8d1e,
0xa476341, 0x992eff74, 0x3a6f6eab, 0xf4f8fd37, 0xa812dc60, 0xa1ebddf8, 0x991be14c, 0xdb6e6b0d, 0xc67b5510, 0x6d672c37, 0x2765d43b, 0xdcd0e804, 0xf1290dc7, 0xcc00ffa3, 0xb5390f92, 0x690fed0b,
0x667b9ffb, 0xcedb7d9c, 0xa091cf0b, 0xd9155ea3, 0xbb132f88, 0x515bad24, 0x7b9479bf, 0x763bd6eb, 0x37392eb3, 0xcc115979, 0x8026e297, 0xf42e312d, 0x6842ada7, 0xc66a2b3b, 0x12754ccc, 0x782ef11c,
0x6a124237, 0xb79251e7, 0x6a1bbe6, 0x4bfb6350, 0x1a6b1018, 0x11caedfa, 0x3d25bdd8, 0xe2e1c3c9, 0x44421659, 0xa121386, 0xd90cec6e, 0xd5abea2a, 0x64af674e, 0xda86a85f, 0xbebfe988, 0x64e4c3fe,
0x9dbc8057, 0xf0f7c086, 0x60787bf8, 0x6003604d, 0xd1fd8346, 0xf6381fb0, 0x7745ae04, 0xd736fccc, 0x83426b33, 0xf01eab71, 0xb0804187, 0x3c005e5f, 0x77a057be, 0xbde8ae24, 0x55464299, 0xbf582e61,
0x4e58f48f, 0xf2ddfda2, 0xf474ef38, 0x8789bdc2, 0x5366f9c3, 0xc8b38e74, 0xb475f255, 0x46fcd9b9, 0x7aeb2661, 0x8b1ddf84, 0x846a0e79, 0x915f95e2, 0x466e598e, 0x20b45770, 0x8cd55591, 0xc902de4c,
0xb90bace1, 0xbb8205d0, 0x11a86248, 0x7574a99e, 0xb77f19b6, 0xe0a9dc09, 0x662d09a1, 0xc4324633, 0xe85a1f02, 0x9f0be8c, 0x4a99a025, 0x1d6efe10, 0x1ab93d1d, 0xba5a4df, 0xa186f20f, 0x2868f169,
0xdcb7da83, 0x573906fe, 0xa1e2ce9b, 0x4fcd7f52, 0x50115e01, 0xa70683fa, 0xa002b5c4, 0xde6d027, 0x9af88c27, 0x773f8641, 0xc3604c06, 0x61a806b5, 0xf0177a28, 0xc0f586e0, 0x6058aa, 0x30dc7d62,
0x11e69ed7, 0x2338ea63, 0x53c2dd94, 0xc2c21634, 0xbbcbee56, 0x90bcb6de, 0xebfc7da1, 0xce591d76, 0x6f05e409, 0x4b7c0188, 0x39720a3d, 0x7c927c24, 0x86e3725f, 0x724d9db9, 0x1ac15bb4, 0xd39eb8fc,
0xed545578, 0x8fca5b5, 0xd83d7cd3, 0x4dad0fc4, 0x1e50ef5e, 0xb161e6f8, 0xa28514d9, 0x6c51133c, 0x6fd5c7e7, 0x56e14ec4, 0x362abfce, 0xddc6c837, 0xd79a3234, 0x92638212, 0x670efa8e, 0x406000e0
};
private static uint[] KS3 = new uint[] {
0x3a39ce37, 0xd3faf5cf, 0xabc27737, 0x5ac52d1b, 0x5cb0679e, 0x4fa33742, 0xd3822740, 0x99bc9bbe, 0xd5118e9d, 0xbf0f7315, 0xd62d1c7e, 0xc700c47b, 0xb78c1b6b, 0x21a19045, 0xb26eb1be, 0x6a366eb4,
0x5748ab2f, 0xbc946e79, 0xc6a376d2, 0x6549c2c8, 0x530ff8ee, 0x468dde7d, 0xd5730a1d, 0x4cd04dc6, 0x2939bbdb, 0xa9ba4650, 0xac9526e8, 0xbe5ee304, 0xa1fad5f0, 0x6a2d519a, 0x63ef8ce2, 0x9a86ee22,
0xc089c2b8, 0x43242ef6, 0xa51e03aa, 0x9cf2d0a4, 0x83c061ba, 0x9be96a4d, 0x8fe51550, 0xba645bd6, 0x2826a2f9, 0xa73a3ae1, 0x4ba99586, 0xef5562e9, 0xc72fefd3, 0xf752f7da, 0x3f046f69, 0x77fa0a59,
0x80e4a915, 0x87b08601, 0x9b09e6ad, 0x3b3ee593, 0xe990fd5a, 0x9e34d797, 0x2cf0b7d9, 0x22b8b51, 0x96d5ac3a, 0x17da67d, 0xd1cf3ed6, 0x7c7d2d28, 0x1f9f25cf, 0xadf2b89b, 0x5ad6b472, 0x5a88f54c,
0xe029ac71, 0xe019a5e6, 0x47b0acfd, 0xed93fa9b, 0xe8d3c48d, 0x283b57cc, 0xf8d56629, 0x79132e28, 0x785f0191, 0xed756055, 0xf7960e44, 0xe3d35e8c, 0x15056dd4, 0x88f46dba, 0x3a16125, 0x564f0bd,
0xc3eb9e15, 0x3c9057a2, 0x97271aec, 0xa93a072a, 0x1b3f6d9b, 0x1e6321f5, 0xf59c66fb, 0x26dcf319, 0x7533d928, 0xb155fdf5, 0x3563482, 0x8aba3cbb, 0x28517711, 0xc20ad9f8, 0xabcc5167, 0xccad925f,
0x4de81751, 0x3830dc8e, 0x379d5862, 0x9320f991, 0xea7a90c2, 0xfb3e7bce, 0x5121ce64, 0x774fbe32, 0xa8b6e37e, 0xc3293d46, 0x48de5369, 0x6413e680, 0xa2ae0810, 0xdd6db224, 0x69852dfd, 0x9072166,
0xb39a460a, 0x6445c0dd, 0x586cdecf, 0x1c20c8ae, 0x5bbef7dd, 0x1b588d40, 0xccd2017f, 0x6bb4e3bb, 0xdda26a7e, 0x3a59ff45, 0x3e350a44, 0xbcb4cdd5, 0x72eacea8, 0xfa6484bb, 0x8d6612ae, 0xbf3c6f47,
0xd29be463, 0x542f5d9e, 0xaec2771b, 0xf64e6370, 0x740e0d8d, 0xe75b1357, 0xf8721671, 0xaf537d5d, 0x4040cb08, 0x4eb4e2cc, 0x34d2466a, 0x115af84, 0xe1b00428, 0x95983a1d, 0x6b89fb4, 0xce6ea048,
0x6f3f3b82, 0x3520ab82, 0x11a1d4b, 0x277227f8, 0x611560b1, 0xe7933fdc, 0xbb3a792b, 0x344525bd, 0xa08839e1, 0x51ce794b, 0x2f32c9b7, 0xa01fbac9, 0xe01cc87e, 0xbcc7d1f6, 0xcf0111c3, 0xa1e8aac7,
0x1a908749, 0xd44fbd9a, 0xd0dadecb, 0xd50ada38, 0x339c32a, 0xc6913667, 0x8df9317c, 0xe0b12b4f, 0xf79e59b7, 0x43f5bb3a, 0xf2d519ff, 0x27d9459c, 0xbf97222c, 0x15e6fc2a, 0xf91fc71, 0x9b941525,
0xfae59361, 0xceb69ceb, 0xc2a86459, 0x12baa8d1, 0xb6c1075e, 0xe3056a0c, 0x10d25065, 0xcb03a442, 0xe0ec6e0e, 0x1698db3b, 0x4c98a0be, 0x3278e964, 0x9f1f9532, 0xe0d392df, 0xd3a0342b, 0x8971f21e,
0x1b0a7441, 0x4ba3348c, 0xc5be7120, 0xc37632d8, 0xdf359f8d, 0x9b992f2e, 0xe60b6f47, 0xfe3f11d, 0xe54cda54, 0x1edad891, 0xce6279cf, 0xcd3e7e6f, 0x1618b166, 0xfd2c1d05, 0x848fd2c5, 0xf6fb2299,
0xf523f357, 0xa6327623, 0x93a83531, 0x56cccd02, 0xacf08162, 0x5a75ebb5, 0x6e163697, 0x88d273cc, 0xde966292, 0x81b949d0, 0x4c50901b, 0x71c65614, 0xe6c6c7bd, 0x327a140a, 0x45e1d006, 0xc3f27b9a,
0xc9aa53fd, 0x62a80f00, 0xbb25bfe2, 0x35bdd2f6, 0x71126905, 0xb2040222, 0xb6cbcf7c, 0xcd769c2b, 0x53113ec0, 0x1640e3d3, 0x38abbd60, 0x2547adf0, 0xba38209c, 0xf746ce76, 0x77afa1c5, 0x20756060,
0x85cbfe4e, 0x8ae88dd8, 0x7aaaf9b0, 0x4cf9aa7e, 0x1948c25c, 0x2fb8a8c, 0x1c36ae4, 0xd6ebe1f9, 0x90d4f869, 0xa65cdea0, 0x3f09252d, 0xc208e69f, 0xb74e6132, 0xce77e25b, 0x578fdfe3, 0x3ac372e6
};
private static int ROUNDS = 0x10;
private static int BLOCK_SIZE = 8;
private static int SBOX_SK = 0x100;
private static int P_SZ = BlowfishEngine.ROUNDS + 2;
public BlowfishEngine()
{
this.encrypting = false;
this.workingKey = null;
this.S0 = new uint[BlowfishEngine.SBOX_SK];
this.S1 = new uint[BlowfishEngine.SBOX_SK];
this.S2 = new uint[BlowfishEngine.SBOX_SK];
this.S3 = new uint[BlowfishEngine.SBOX_SK];
this.P = new uint[BlowfishEngine.P_SZ];
}
private void Bits32ToBytes(uint in_b, byte[] b, uint offset)
{
b[(int)((IntPtr)offset)] = (byte)in_b;
b[(int)((IntPtr)(offset + 1))] = (byte)(in_b >> 8);
b[(int)((IntPtr)(offset + 2))] = (byte)(in_b >> 0x10);
b[(int)((IntPtr)(offset + 3))] = (byte)(in_b >> 0x18);
}
private uint BytesTo32bits(byte[] b, uint i)
{
return (uint)(((((b[(int)((IntPtr)(i + 3))] & 0xff) << 0x18) | ((b[(int)((IntPtr)(i + 2))] & 0xff) << 0x10)) | ((b[(int)((IntPtr)(i + 1))] & 0xff) << 8)) | (b[(int)((IntPtr)i)] & 0xff));
}
private void decryptBlock(byte[] src, uint srcIndex, byte[] dst, uint dstIndex)
{
uint num1 = this.BytesTo32bits(src, srcIndex);
uint num2 = this.BytesTo32bits(src, srcIndex + 4);
num1 ^= this.P[BlowfishEngine.ROUNDS + 1];
for (int num3 = BlowfishEngine.ROUNDS; num3 > 0; num3 -= 2)
{
num2 ^= this.F(num1) ^ this.P[num3];
num1 ^= this.F(num2) ^ this.P[num3 - 1];
}
num2 ^= this.P[0];
this.Bits32ToBytes(num2, dst, dstIndex);
this.Bits32ToBytes(num1, dst, dstIndex + 4);
}
private void encryptBlock(byte[] src, uint srcIndex, byte[] dst, uint dstIndex)
{
uint num1 = this.BytesTo32bits(src, srcIndex);
uint num2 = this.BytesTo32bits(src, srcIndex + 4);
num1 ^= this.P[0];
for (int num3 = 1; num3 < BlowfishEngine.ROUNDS; num3 += 2)
{
num2 ^= this.F(num1) ^ this.P[num3];
num1 ^= this.F(num2) ^ this.P[num3 + 1];
}
num2 ^= this.P[BlowfishEngine.ROUNDS + 1];
this.Bits32ToBytes(num2, dst, dstIndex);
this.Bits32ToBytes(num1, dst, dstIndex + 4);
}
private uint F(uint x)
{
return (((this.S0[(int)((IntPtr)(x >> 0x18))] + this.S1[(int)((IntPtr)((x >> 0x10) & 0xff))]) ^ this.S2[(int)((IntPtr)((x >> 8) & 0xff))]) + this.S3[(int)((IntPtr)(x & 0xff))]);
}
public string getAlgorithmName()
{
return "Blowfish";
}
public int getBlockSize()
{
return BlowfishEngine.BLOCK_SIZE;
}
public void init(bool pEncrypting, byte[] key)
{
this.encrypting = pEncrypting;
this.workingKey = key;
this.setKey(this.workingKey);
}
public void processBigBlock(byte[] in_b, uint inOff, byte[] out_b, uint outOff, int len)
{
for (uint num1 = 0; num1 < len; num1 += 8)
{
this.processBlock(in_b, inOff + num1, out_b, outOff + num1);
}
}
public int processBlock(byte[] in_b, uint inOff, byte[] out_b, uint outOff)
{
if (this.workingKey == null)
{
Console.WriteLine("key = null");
}
if ((inOff + BlowfishEngine.BLOCK_SIZE) > in_b.Length)
{
Console.WriteLine("block size > in buffer len");
}
if ((outOff + BlowfishEngine.BLOCK_SIZE) > out_b.Length)
{
Console.WriteLine("block size > out buffer len");
}
if (this.encrypting)
{
this.encryptBlock(in_b, inOff, out_b, outOff);
}
else
{
this.decryptBlock(in_b, inOff, out_b, outOff);
}
return BlowfishEngine.BLOCK_SIZE;
}
private void processTable(uint xl, uint xr, uint[] table)
{
int num1 = table.Length;
for (int num2 = 0; num2 < num1; num2 += 2)
{
xl ^= this.P[0];
for (int num3 = 1; num3 < BlowfishEngine.ROUNDS; num3 += 2)
{
xr ^= this.F(xl) ^ this.P[num3];
xl ^= this.F(xr) ^ this.P[num3 + 1];
}
xr ^= this.P[BlowfishEngine.ROUNDS + 1];
table[num2] = xr;
table[num2 + 1] = xl;
xr = xl;
xl = table[num2];
}
}
public void reset()
{
}
private void setKey(byte[] key)
{
for (int num1 = 0; num1 < BlowfishEngine.SBOX_SK; num1++)
{
this.S0[num1] = BlowfishEngine.KS0[num1];
this.S1[num1] = BlowfishEngine.KS1[num1];
this.S2[num1] = BlowfishEngine.KS2[num1];
this.S3[num1] = BlowfishEngine.KS3[num1];
}
for (int num2 = 0; num2 < BlowfishEngine.P_SZ; num2++)
{
this.P[num2] = BlowfishEngine.KP[num2];
}
int num3 = key.Length;
int num4 = 0;
for (int num5 = 0; num5 < BlowfishEngine.P_SZ; num5++)
{
uint[] numArray1;
IntPtr ptr1;
uint num6 = 0;
for (int num7 = 0; num7 < 4; num7++)
{
num6 = (num6 << 8) | ((uint)(key[num4++] & 0xff));
if (num4 >= num3)
{
num4 = 0;
}
}
(numArray1 = this.P)[(int)(ptr1 = (IntPtr)num5)] = numArray1[(int)ptr1] ^ num6;
}
this.processTable(0, 0, this.P);
this.processTable(this.P[BlowfishEngine.P_SZ - 2], this.P[BlowfishEngine.P_SZ - 1], this.S0);
this.processTable(this.S0[BlowfishEngine.SBOX_SK - 2], this.S0[BlowfishEngine.SBOX_SK - 1], this.S1);
this.processTable(this.S1[BlowfishEngine.SBOX_SK - 2], this.S1[BlowfishEngine.SBOX_SK - 1], this.S2);
this.processTable(this.S2[BlowfishEngine.SBOX_SK - 2], this.S2[BlowfishEngine.SBOX_SK - 1], this.S3);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Runtime.InteropServices;
using System.Threading.Tasks;
using Microsoft.Reflection;
using Microsoft.Win32;
namespace System.Diagnostics.Tracing
{
public partial class EventSource
{
#if FEATURE_MANAGED_ETW && FEATURE_PERFTRACING
// For non-Windows, we use a thread-local variable to hold the activity ID.
// On Windows, ETW has it's own thread-local variable and we participate in its use.
[ThreadStatic]
private static Guid s_currentThreadActivityId;
#endif // FEATURE_MANAGED_ETW && FEATURE_PERFTRACING
// ActivityID support (see also WriteEventWithRelatedActivityIdCore)
/// <summary>
/// When a thread starts work that is on behalf of 'something else' (typically another
/// thread or network request) it should mark the thread as working on that other work.
/// This API marks the current thread as working on activity 'activityID'. This API
/// should be used when the caller knows the thread's current activity (the one being
/// overwritten) has completed. Otherwise, callers should prefer the overload that
/// return the oldActivityThatWillContinue (below).
///
/// All events created with the EventSource on this thread are also tagged with the
/// activity ID of the thread.
///
/// It is common, and good practice after setting the thread to an activity to log an event
/// with a 'start' opcode to indicate that precise time/thread where the new activity
/// started.
/// </summary>
/// <param name="activityId">A Guid that represents the new activity with which to mark
/// the current thread</param>
public static void SetCurrentThreadActivityId(Guid activityId)
{
if (TplEtwProvider.Log != null)
TplEtwProvider.Log.SetActivityId(activityId);
#if FEATURE_MANAGED_ETW
#if FEATURE_ACTIVITYSAMPLING
Guid newId = activityId;
#endif // FEATURE_ACTIVITYSAMPLING
// We ignore errors to keep with the convention that EventSources do not throw errors.
// Note we can't access m_throwOnWrites because this is a static method.
#if FEATURE_PERFTRACING
s_currentThreadActivityId = activityId;
#elif PLATFORM_WINDOWS
if (UnsafeNativeMethods.ManifestEtw.EventActivityIdControl(
UnsafeNativeMethods.ManifestEtw.ActivityControl.EVENT_ACTIVITY_CTRL_GET_SET_ID,
ref activityId) == 0)
#endif // FEATURE_PERFTRACING
{
#if FEATURE_ACTIVITYSAMPLING
var activityDying = s_activityDying;
if (activityDying != null && newId != activityId)
{
if (activityId == Guid.Empty)
{
activityId = FallbackActivityId;
}
// OutputDebugString(string.Format("Activity dying: {0} -> {1}", activityId, newId));
activityDying(activityId); // This is actually the OLD activity ID.
}
#endif // FEATURE_ACTIVITYSAMPLING
}
#endif // FEATURE_MANAGED_ETW
}
/// <summary>
/// When a thread starts work that is on behalf of 'something else' (typically another
/// thread or network request) it should mark the thread as working on that other work.
/// This API marks the current thread as working on activity 'activityID'. It returns
/// whatever activity the thread was previously marked with. There is a convention that
/// callers can assume that callees restore this activity mark before the callee returns.
/// To encourage this, this API returns the old activity, so that it can be restored later.
///
/// All events created with the EventSource on this thread are also tagged with the
/// activity ID of the thread.
///
/// It is common, and good practice after setting the thread to an activity to log an event
/// with a 'start' opcode to indicate that precise time/thread where the new activity
/// started.
/// </summary>
/// <param name="activityId">A Guid that represents the new activity with which to mark
/// the current thread</param>
/// <param name="oldActivityThatWillContinue">The Guid that represents the current activity
/// which will continue at some point in the future, on the current thread</param>
public static void SetCurrentThreadActivityId(Guid activityId, out Guid oldActivityThatWillContinue)
{
oldActivityThatWillContinue = activityId;
#if FEATURE_MANAGED_ETW
// We ignore errors to keep with the convention that EventSources do not throw errors.
// Note we can't access m_throwOnWrites because this is a static method.
#if FEATURE_PERFTRACING
oldActivityThatWillContinue = s_currentThreadActivityId;
s_currentThreadActivityId = activityId;
#elif PLATFORM_WINDOWS
UnsafeNativeMethods.ManifestEtw.EventActivityIdControl(
UnsafeNativeMethods.ManifestEtw.ActivityControl.EVENT_ACTIVITY_CTRL_GET_SET_ID,
ref oldActivityThatWillContinue);
#endif // FEATURE_PERFTRACING
#endif // FEATURE_MANAGED_ETW
// We don't call the activityDying callback here because the caller has declared that
// it is not dying.
if (TplEtwProvider.Log != null)
TplEtwProvider.Log.SetActivityId(activityId);
}
/// <summary>
/// Retrieves the ETW activity ID associated with the current thread.
/// </summary>
public static Guid CurrentThreadActivityId
{
get
{
// We ignore errors to keep with the convention that EventSources do not throw
// errors. Note we can't access m_throwOnWrites because this is a static method.
Guid retVal = new Guid();
#if FEATURE_MANAGED_ETW
#if FEATURE_PERFTRACING
retVal = s_currentThreadActivityId;
#elif PLATFORM_WINDOWS
UnsafeNativeMethods.ManifestEtw.EventActivityIdControl(
UnsafeNativeMethods.ManifestEtw.ActivityControl.EVENT_ACTIVITY_CTRL_GET_ID,
ref retVal);
#endif // FEATURE_PERFTRACING
#endif // FEATURE_MANAGED_ETW
return retVal;
}
}
private int GetParameterCount(EventMetadata eventData)
{
return eventData.Parameters.Length;
}
private Type GetDataType(EventMetadata eventData, int parameterId)
{
return eventData.Parameters[parameterId].ParameterType;
}
private static string GetResourceString(string key, params object[] args)
{
return SR.Format(SR.GetResourceString(key), args);
}
private static readonly bool m_EventSourcePreventRecursion = false;
}
internal partial class ManifestBuilder
{
private string GetTypeNameHelper(Type type)
{
switch (type.GetTypeCode())
{
case TypeCode.Boolean:
return "win:Boolean";
case TypeCode.Byte:
return "win:UInt8";
case TypeCode.Char:
case TypeCode.UInt16:
return "win:UInt16";
case TypeCode.UInt32:
return "win:UInt32";
case TypeCode.UInt64:
return "win:UInt64";
case TypeCode.SByte:
return "win:Int8";
case TypeCode.Int16:
return "win:Int16";
case TypeCode.Int32:
return "win:Int32";
case TypeCode.Int64:
return "win:Int64";
case TypeCode.String:
return "win:UnicodeString";
case TypeCode.Single:
return "win:Float";
case TypeCode.Double:
return "win:Double";
case TypeCode.DateTime:
return "win:FILETIME";
default:
if (type == typeof(Guid))
return "win:GUID";
else if (type == typeof(IntPtr))
return "win:Pointer";
else if ((type.IsArray || type.IsPointer) && type.GetElementType() == typeof(byte))
return "win:Binary";
ManifestError(Resources.GetResourceString("EventSource_UnsupportedEventTypeInManifest", type.Name), true);
return string.Empty;
}
}
}
internal partial class EventProvider
{
internal unsafe int SetInformation(
UnsafeNativeMethods.ManifestEtw.EVENT_INFO_CLASS eventInfoClass,
IntPtr data,
uint dataSize)
{
int status = UnsafeNativeMethods.ManifestEtw.ERROR_NOT_SUPPORTED;
if (!m_setInformationMissing)
{
try
{
status = UnsafeNativeMethods.ManifestEtw.EventSetInformation(
m_regHandle,
eventInfoClass,
(void*)data,
(int)dataSize);
}
catch (TypeLoadException)
{
m_setInformationMissing = true;
}
}
return status;
}
}
internal static class Resources
{
internal static string GetResourceString(string key, params object[] args)
{
return SR.Format(SR.GetResourceString(key), args);
}
}
}
| |
// Generated from https://github.com/nuke-build/nuke/blob/master/source/Nuke.Common/Tools/MSpec/MSpec.json
using JetBrains.Annotations;
using Newtonsoft.Json;
using Nuke.Common;
using Nuke.Common.Execution;
using Nuke.Common.Tooling;
using Nuke.Common.Tools;
using Nuke.Common.Utilities.Collections;
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Diagnostics.CodeAnalysis;
using System.IO;
using System.Linq;
using System.Text;
namespace Nuke.Common.Tools.MSpec
{
/// <summary>
/// <p>MSpec is called a 'context/specification' test framework because of the 'grammar' that is used in describing and coding the tests or 'specs'.</p>
/// <p>For more details, visit the <a href="https://github.com/machine/machine.specifications">official website</a>.</p>
/// </summary>
[PublicAPI]
[ExcludeFromCodeCoverage]
public static partial class MSpecTasks
{
/// <summary>
/// Path to the MSpec executable.
/// </summary>
public static string MSpecPath =>
ToolPathResolver.TryGetEnvironmentExecutable("MSPEC_EXE") ??
GetToolPath();
public static Action<OutputType, string> MSpecLogger { get; set; } = ProcessTasks.DefaultLogger;
/// <summary>
/// <p>MSpec is called a 'context/specification' test framework because of the 'grammar' that is used in describing and coding the tests or 'specs'.</p>
/// <p>For more details, visit the <a href="https://github.com/machine/machine.specifications">official website</a>.</p>
/// </summary>
public static IReadOnlyCollection<Output> MSpec(string arguments, string workingDirectory = null, IReadOnlyDictionary<string, string> environmentVariables = null, int? timeout = null, bool? logOutput = null, bool? logInvocation = null, Func<string, string> outputFilter = null)
{
using var process = ProcessTasks.StartProcess(MSpecPath, arguments, workingDirectory, environmentVariables, timeout, logOutput, logInvocation, MSpecLogger, outputFilter);
process.AssertZeroExitCode();
return process.Output;
}
/// <summary>
/// <p>MSpec is called a 'context/specification' test framework because of the 'grammar' that is used in describing and coding the tests or 'specs'.</p>
/// <p>For more details, visit the <a href="https://github.com/machine/machine.specifications">official website</a>.</p>
/// </summary>
/// <remarks>
/// <p>This is a <a href="http://www.nuke.build/docs/authoring-builds/cli-tools.html#fluent-apis">CLI wrapper with fluent API</a> that allows to modify the following arguments:</p>
/// <ul>
/// <li><c><assemblies></c> via <see cref="MSpecSettings.Assemblies"/></li>
/// <li><c>--appveyor</c> via <see cref="MSpecSettings.AppVeyor"/></li>
/// <li><c>--html</c> via <see cref="MSpecSettings.HtmlOutput"/></li>
/// <li><c>--no-appveyor-autodetect</c> via <see cref="MSpecSettings.NoAppVeyor"/></li>
/// <li><c>--no-color</c> via <see cref="MSpecSettings.NoColor"/></li>
/// <li><c>--no-teamcity-autodetect</c> via <see cref="MSpecSettings.NoTeamCity"/></li>
/// <li><c>--progress</c> via <see cref="MSpecSettings.DottedProgress"/></li>
/// <li><c>--silent</c> via <see cref="MSpecSettings.Silent"/></li>
/// <li><c>--teamcity</c> via <see cref="MSpecSettings.TeamCity"/></li>
/// <li><c>--timeinfo</c> via <see cref="MSpecSettings.TimeInfo"/></li>
/// <li><c>--xml</c> via <see cref="MSpecSettings.XmlOutput"/></li>
/// <li><c>-f</c> via <see cref="MSpecSettings.Filters"/></li>
/// <li><c>-i</c> via <see cref="MSpecSettings.Includes"/></li>
/// <li><c>-x</c> via <see cref="MSpecSettings.Excludes"/></li>
/// </ul>
/// </remarks>
public static IReadOnlyCollection<Output> MSpec(MSpecSettings toolSettings = null)
{
toolSettings = toolSettings ?? new MSpecSettings();
using var process = ProcessTasks.StartProcess(toolSettings);
process.AssertZeroExitCode();
return process.Output;
}
/// <summary>
/// <p>MSpec is called a 'context/specification' test framework because of the 'grammar' that is used in describing and coding the tests or 'specs'.</p>
/// <p>For more details, visit the <a href="https://github.com/machine/machine.specifications">official website</a>.</p>
/// </summary>
/// <remarks>
/// <p>This is a <a href="http://www.nuke.build/docs/authoring-builds/cli-tools.html#fluent-apis">CLI wrapper with fluent API</a> that allows to modify the following arguments:</p>
/// <ul>
/// <li><c><assemblies></c> via <see cref="MSpecSettings.Assemblies"/></li>
/// <li><c>--appveyor</c> via <see cref="MSpecSettings.AppVeyor"/></li>
/// <li><c>--html</c> via <see cref="MSpecSettings.HtmlOutput"/></li>
/// <li><c>--no-appveyor-autodetect</c> via <see cref="MSpecSettings.NoAppVeyor"/></li>
/// <li><c>--no-color</c> via <see cref="MSpecSettings.NoColor"/></li>
/// <li><c>--no-teamcity-autodetect</c> via <see cref="MSpecSettings.NoTeamCity"/></li>
/// <li><c>--progress</c> via <see cref="MSpecSettings.DottedProgress"/></li>
/// <li><c>--silent</c> via <see cref="MSpecSettings.Silent"/></li>
/// <li><c>--teamcity</c> via <see cref="MSpecSettings.TeamCity"/></li>
/// <li><c>--timeinfo</c> via <see cref="MSpecSettings.TimeInfo"/></li>
/// <li><c>--xml</c> via <see cref="MSpecSettings.XmlOutput"/></li>
/// <li><c>-f</c> via <see cref="MSpecSettings.Filters"/></li>
/// <li><c>-i</c> via <see cref="MSpecSettings.Includes"/></li>
/// <li><c>-x</c> via <see cref="MSpecSettings.Excludes"/></li>
/// </ul>
/// </remarks>
public static IReadOnlyCollection<Output> MSpec(Configure<MSpecSettings> configurator)
{
return MSpec(configurator(new MSpecSettings()));
}
/// <summary>
/// <p>MSpec is called a 'context/specification' test framework because of the 'grammar' that is used in describing and coding the tests or 'specs'.</p>
/// <p>For more details, visit the <a href="https://github.com/machine/machine.specifications">official website</a>.</p>
/// </summary>
/// <remarks>
/// <p>This is a <a href="http://www.nuke.build/docs/authoring-builds/cli-tools.html#fluent-apis">CLI wrapper with fluent API</a> that allows to modify the following arguments:</p>
/// <ul>
/// <li><c><assemblies></c> via <see cref="MSpecSettings.Assemblies"/></li>
/// <li><c>--appveyor</c> via <see cref="MSpecSettings.AppVeyor"/></li>
/// <li><c>--html</c> via <see cref="MSpecSettings.HtmlOutput"/></li>
/// <li><c>--no-appveyor-autodetect</c> via <see cref="MSpecSettings.NoAppVeyor"/></li>
/// <li><c>--no-color</c> via <see cref="MSpecSettings.NoColor"/></li>
/// <li><c>--no-teamcity-autodetect</c> via <see cref="MSpecSettings.NoTeamCity"/></li>
/// <li><c>--progress</c> via <see cref="MSpecSettings.DottedProgress"/></li>
/// <li><c>--silent</c> via <see cref="MSpecSettings.Silent"/></li>
/// <li><c>--teamcity</c> via <see cref="MSpecSettings.TeamCity"/></li>
/// <li><c>--timeinfo</c> via <see cref="MSpecSettings.TimeInfo"/></li>
/// <li><c>--xml</c> via <see cref="MSpecSettings.XmlOutput"/></li>
/// <li><c>-f</c> via <see cref="MSpecSettings.Filters"/></li>
/// <li><c>-i</c> via <see cref="MSpecSettings.Includes"/></li>
/// <li><c>-x</c> via <see cref="MSpecSettings.Excludes"/></li>
/// </ul>
/// </remarks>
public static IEnumerable<(MSpecSettings Settings, IReadOnlyCollection<Output> Output)> MSpec(CombinatorialConfigure<MSpecSettings> configurator, int degreeOfParallelism = 1, bool completeOnFailure = false)
{
return configurator.Invoke(MSpec, MSpecLogger, degreeOfParallelism, completeOnFailure);
}
}
#region MSpecSettings
/// <summary>
/// Used within <see cref="MSpecTasks"/>.
/// </summary>
[PublicAPI]
[ExcludeFromCodeCoverage]
[Serializable]
public partial class MSpecSettings : ToolSettings
{
/// <summary>
/// Path to the MSpec executable.
/// </summary>
public override string ProcessToolPath => base.ProcessToolPath ?? GetProcessToolPath();
public override Action<OutputType, string> ProcessCustomLogger => MSpecTasks.MSpecLogger;
/// <summary>
/// Assemblies with tests to be executed.
/// </summary>
public virtual IReadOnlyList<string> Assemblies => AssembliesInternal.AsReadOnly();
internal List<string> AssembliesInternal { get; set; } = new List<string>();
/// <summary>
/// Filter file specifying contexts to execute (full type name, one per line). Takes precedence over tags.
/// </summary>
public virtual IReadOnlyList<string> Filters => FiltersInternal.AsReadOnly();
internal List<string> FiltersInternal { get; set; } = new List<string>();
/// <summary>
/// Executes all specifications in contexts with these comma delimited tags. Ex. <c>-i 'foo, bar, foo_bar'</c>.
/// </summary>
public virtual IReadOnlyList<string> Includes => IncludesInternal.AsReadOnly();
internal List<string> IncludesInternal { get; set; } = new List<string>();
/// <summary>
/// Exclude specifications in contexts with these comma delimited tags. Ex. <c>-x 'foo, bar, foo_bar'</c>.
/// </summary>
public virtual IReadOnlyList<string> Excludes => ExcludesInternal.AsReadOnly();
internal List<string> ExcludesInternal { get; set; } = new List<string>();
/// <summary>
/// Outputs the HTML report to path, one-per-assembly w/ index.html (if directory, otherwise all are in one file). Ex. <c>--html=output/reports/</c>
/// </summary>
public virtual string HtmlOutput { get; internal set; }
/// <summary>
/// Outputs the XML report to the file referenced by the path. Ex. <c>--xml=output/reports/MSpecResults.xml</c>
/// </summary>
public virtual string XmlOutput { get; internal set; }
/// <summary>
/// Reporting for TeamCity CI integration (also auto-detected).
/// </summary>
public virtual bool? TeamCity { get; internal set; }
/// <summary>
/// Disables TeamCity autodetection.
/// </summary>
public virtual bool? NoTeamCity { get; internal set; }
/// <summary>
/// Reporting for AppVeyor CI integration (also auto-detected).
/// </summary>
public virtual bool? AppVeyor { get; internal set; }
/// <summary>
/// Disables AppVeyor autodetection.
/// </summary>
public virtual bool? NoAppVeyor { get; internal set; }
/// <summary>
/// Shows time-related information in HTML output.
/// </summary>
public virtual bool? TimeInfo { get; internal set; }
/// <summary>
/// Suppress progress output (print fatal errors, failures and summary).
/// </summary>
public virtual bool? Silent { get; internal set; }
/// <summary>
/// Print dotted progress output.
/// </summary>
public virtual bool? DottedProgress { get; internal set; }
/// <summary>
/// Suppress colored console output.
/// </summary>
public virtual bool? NoColor { get; internal set; }
protected override Arguments ConfigureProcessArguments(Arguments arguments)
{
arguments
.Add("{value}", Assemblies, separator: ' ')
.Add("-f={value}", Filters, separator: ',')
.Add("-i={value}", Includes, separator: ',')
.Add("-x={value}", Excludes, separator: ',')
.Add("--html={value}", HtmlOutput)
.Add("--xml={value}", XmlOutput)
.Add("--teamcity", TeamCity)
.Add("--no-teamcity-autodetect", NoTeamCity)
.Add("--appveyor", AppVeyor)
.Add("--no-appveyor-autodetect", NoAppVeyor)
.Add("--timeinfo", TimeInfo)
.Add("--silent", Silent)
.Add("--progress", DottedProgress)
.Add("--no-color", NoColor);
return base.ConfigureProcessArguments(arguments);
}
}
#endregion
#region MSpecSettingsExtensions
/// <summary>
/// Used within <see cref="MSpecTasks"/>.
/// </summary>
[PublicAPI]
[ExcludeFromCodeCoverage]
public static partial class MSpecSettingsExtensions
{
#region Assemblies
/// <summary>
/// <p><em>Sets <see cref="MSpecSettings.Assemblies"/> to a new list</em></p>
/// <p>Assemblies with tests to be executed.</p>
/// </summary>
[Pure]
public static T SetAssemblies<T>(this T toolSettings, params string[] assemblies) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.AssembliesInternal = assemblies.ToList();
return toolSettings;
}
/// <summary>
/// <p><em>Sets <see cref="MSpecSettings.Assemblies"/> to a new list</em></p>
/// <p>Assemblies with tests to be executed.</p>
/// </summary>
[Pure]
public static T SetAssemblies<T>(this T toolSettings, IEnumerable<string> assemblies) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.AssembliesInternal = assemblies.ToList();
return toolSettings;
}
/// <summary>
/// <p><em>Adds values to <see cref="MSpecSettings.Assemblies"/></em></p>
/// <p>Assemblies with tests to be executed.</p>
/// </summary>
[Pure]
public static T AddAssemblies<T>(this T toolSettings, params string[] assemblies) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.AssembliesInternal.AddRange(assemblies);
return toolSettings;
}
/// <summary>
/// <p><em>Adds values to <see cref="MSpecSettings.Assemblies"/></em></p>
/// <p>Assemblies with tests to be executed.</p>
/// </summary>
[Pure]
public static T AddAssemblies<T>(this T toolSettings, IEnumerable<string> assemblies) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.AssembliesInternal.AddRange(assemblies);
return toolSettings;
}
/// <summary>
/// <p><em>Clears <see cref="MSpecSettings.Assemblies"/></em></p>
/// <p>Assemblies with tests to be executed.</p>
/// </summary>
[Pure]
public static T ClearAssemblies<T>(this T toolSettings) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.AssembliesInternal.Clear();
return toolSettings;
}
/// <summary>
/// <p><em>Removes values from <see cref="MSpecSettings.Assemblies"/></em></p>
/// <p>Assemblies with tests to be executed.</p>
/// </summary>
[Pure]
public static T RemoveAssemblies<T>(this T toolSettings, params string[] assemblies) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
var hashSet = new HashSet<string>(assemblies);
toolSettings.AssembliesInternal.RemoveAll(x => hashSet.Contains(x));
return toolSettings;
}
/// <summary>
/// <p><em>Removes values from <see cref="MSpecSettings.Assemblies"/></em></p>
/// <p>Assemblies with tests to be executed.</p>
/// </summary>
[Pure]
public static T RemoveAssemblies<T>(this T toolSettings, IEnumerable<string> assemblies) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
var hashSet = new HashSet<string>(assemblies);
toolSettings.AssembliesInternal.RemoveAll(x => hashSet.Contains(x));
return toolSettings;
}
#endregion
#region Filters
/// <summary>
/// <p><em>Sets <see cref="MSpecSettings.Filters"/> to a new list</em></p>
/// <p>Filter file specifying contexts to execute (full type name, one per line). Takes precedence over tags.</p>
/// </summary>
[Pure]
public static T SetFilters<T>(this T toolSettings, params string[] filters) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.FiltersInternal = filters.ToList();
return toolSettings;
}
/// <summary>
/// <p><em>Sets <see cref="MSpecSettings.Filters"/> to a new list</em></p>
/// <p>Filter file specifying contexts to execute (full type name, one per line). Takes precedence over tags.</p>
/// </summary>
[Pure]
public static T SetFilters<T>(this T toolSettings, IEnumerable<string> filters) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.FiltersInternal = filters.ToList();
return toolSettings;
}
/// <summary>
/// <p><em>Adds values to <see cref="MSpecSettings.Filters"/></em></p>
/// <p>Filter file specifying contexts to execute (full type name, one per line). Takes precedence over tags.</p>
/// </summary>
[Pure]
public static T AddFilters<T>(this T toolSettings, params string[] filters) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.FiltersInternal.AddRange(filters);
return toolSettings;
}
/// <summary>
/// <p><em>Adds values to <see cref="MSpecSettings.Filters"/></em></p>
/// <p>Filter file specifying contexts to execute (full type name, one per line). Takes precedence over tags.</p>
/// </summary>
[Pure]
public static T AddFilters<T>(this T toolSettings, IEnumerable<string> filters) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.FiltersInternal.AddRange(filters);
return toolSettings;
}
/// <summary>
/// <p><em>Clears <see cref="MSpecSettings.Filters"/></em></p>
/// <p>Filter file specifying contexts to execute (full type name, one per line). Takes precedence over tags.</p>
/// </summary>
[Pure]
public static T ClearFilters<T>(this T toolSettings) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.FiltersInternal.Clear();
return toolSettings;
}
/// <summary>
/// <p><em>Removes values from <see cref="MSpecSettings.Filters"/></em></p>
/// <p>Filter file specifying contexts to execute (full type name, one per line). Takes precedence over tags.</p>
/// </summary>
[Pure]
public static T RemoveFilters<T>(this T toolSettings, params string[] filters) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
var hashSet = new HashSet<string>(filters);
toolSettings.FiltersInternal.RemoveAll(x => hashSet.Contains(x));
return toolSettings;
}
/// <summary>
/// <p><em>Removes values from <see cref="MSpecSettings.Filters"/></em></p>
/// <p>Filter file specifying contexts to execute (full type name, one per line). Takes precedence over tags.</p>
/// </summary>
[Pure]
public static T RemoveFilters<T>(this T toolSettings, IEnumerable<string> filters) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
var hashSet = new HashSet<string>(filters);
toolSettings.FiltersInternal.RemoveAll(x => hashSet.Contains(x));
return toolSettings;
}
#endregion
#region Includes
/// <summary>
/// <p><em>Sets <see cref="MSpecSettings.Includes"/> to a new list</em></p>
/// <p>Executes all specifications in contexts with these comma delimited tags. Ex. <c>-i 'foo, bar, foo_bar'</c>.</p>
/// </summary>
[Pure]
public static T SetIncludes<T>(this T toolSettings, params string[] includes) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.IncludesInternal = includes.ToList();
return toolSettings;
}
/// <summary>
/// <p><em>Sets <see cref="MSpecSettings.Includes"/> to a new list</em></p>
/// <p>Executes all specifications in contexts with these comma delimited tags. Ex. <c>-i 'foo, bar, foo_bar'</c>.</p>
/// </summary>
[Pure]
public static T SetIncludes<T>(this T toolSettings, IEnumerable<string> includes) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.IncludesInternal = includes.ToList();
return toolSettings;
}
/// <summary>
/// <p><em>Adds values to <see cref="MSpecSettings.Includes"/></em></p>
/// <p>Executes all specifications in contexts with these comma delimited tags. Ex. <c>-i 'foo, bar, foo_bar'</c>.</p>
/// </summary>
[Pure]
public static T AddIncludes<T>(this T toolSettings, params string[] includes) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.IncludesInternal.AddRange(includes);
return toolSettings;
}
/// <summary>
/// <p><em>Adds values to <see cref="MSpecSettings.Includes"/></em></p>
/// <p>Executes all specifications in contexts with these comma delimited tags. Ex. <c>-i 'foo, bar, foo_bar'</c>.</p>
/// </summary>
[Pure]
public static T AddIncludes<T>(this T toolSettings, IEnumerable<string> includes) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.IncludesInternal.AddRange(includes);
return toolSettings;
}
/// <summary>
/// <p><em>Clears <see cref="MSpecSettings.Includes"/></em></p>
/// <p>Executes all specifications in contexts with these comma delimited tags. Ex. <c>-i 'foo, bar, foo_bar'</c>.</p>
/// </summary>
[Pure]
public static T ClearIncludes<T>(this T toolSettings) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.IncludesInternal.Clear();
return toolSettings;
}
/// <summary>
/// <p><em>Removes values from <see cref="MSpecSettings.Includes"/></em></p>
/// <p>Executes all specifications in contexts with these comma delimited tags. Ex. <c>-i 'foo, bar, foo_bar'</c>.</p>
/// </summary>
[Pure]
public static T RemoveIncludes<T>(this T toolSettings, params string[] includes) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
var hashSet = new HashSet<string>(includes);
toolSettings.IncludesInternal.RemoveAll(x => hashSet.Contains(x));
return toolSettings;
}
/// <summary>
/// <p><em>Removes values from <see cref="MSpecSettings.Includes"/></em></p>
/// <p>Executes all specifications in contexts with these comma delimited tags. Ex. <c>-i 'foo, bar, foo_bar'</c>.</p>
/// </summary>
[Pure]
public static T RemoveIncludes<T>(this T toolSettings, IEnumerable<string> includes) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
var hashSet = new HashSet<string>(includes);
toolSettings.IncludesInternal.RemoveAll(x => hashSet.Contains(x));
return toolSettings;
}
#endregion
#region Excludes
/// <summary>
/// <p><em>Sets <see cref="MSpecSettings.Excludes"/> to a new list</em></p>
/// <p>Exclude specifications in contexts with these comma delimited tags. Ex. <c>-x 'foo, bar, foo_bar'</c>.</p>
/// </summary>
[Pure]
public static T SetExcludes<T>(this T toolSettings, params string[] excludes) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.ExcludesInternal = excludes.ToList();
return toolSettings;
}
/// <summary>
/// <p><em>Sets <see cref="MSpecSettings.Excludes"/> to a new list</em></p>
/// <p>Exclude specifications in contexts with these comma delimited tags. Ex. <c>-x 'foo, bar, foo_bar'</c>.</p>
/// </summary>
[Pure]
public static T SetExcludes<T>(this T toolSettings, IEnumerable<string> excludes) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.ExcludesInternal = excludes.ToList();
return toolSettings;
}
/// <summary>
/// <p><em>Adds values to <see cref="MSpecSettings.Excludes"/></em></p>
/// <p>Exclude specifications in contexts with these comma delimited tags. Ex. <c>-x 'foo, bar, foo_bar'</c>.</p>
/// </summary>
[Pure]
public static T AddExcludes<T>(this T toolSettings, params string[] excludes) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.ExcludesInternal.AddRange(excludes);
return toolSettings;
}
/// <summary>
/// <p><em>Adds values to <see cref="MSpecSettings.Excludes"/></em></p>
/// <p>Exclude specifications in contexts with these comma delimited tags. Ex. <c>-x 'foo, bar, foo_bar'</c>.</p>
/// </summary>
[Pure]
public static T AddExcludes<T>(this T toolSettings, IEnumerable<string> excludes) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.ExcludesInternal.AddRange(excludes);
return toolSettings;
}
/// <summary>
/// <p><em>Clears <see cref="MSpecSettings.Excludes"/></em></p>
/// <p>Exclude specifications in contexts with these comma delimited tags. Ex. <c>-x 'foo, bar, foo_bar'</c>.</p>
/// </summary>
[Pure]
public static T ClearExcludes<T>(this T toolSettings) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.ExcludesInternal.Clear();
return toolSettings;
}
/// <summary>
/// <p><em>Removes values from <see cref="MSpecSettings.Excludes"/></em></p>
/// <p>Exclude specifications in contexts with these comma delimited tags. Ex. <c>-x 'foo, bar, foo_bar'</c>.</p>
/// </summary>
[Pure]
public static T RemoveExcludes<T>(this T toolSettings, params string[] excludes) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
var hashSet = new HashSet<string>(excludes);
toolSettings.ExcludesInternal.RemoveAll(x => hashSet.Contains(x));
return toolSettings;
}
/// <summary>
/// <p><em>Removes values from <see cref="MSpecSettings.Excludes"/></em></p>
/// <p>Exclude specifications in contexts with these comma delimited tags. Ex. <c>-x 'foo, bar, foo_bar'</c>.</p>
/// </summary>
[Pure]
public static T RemoveExcludes<T>(this T toolSettings, IEnumerable<string> excludes) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
var hashSet = new HashSet<string>(excludes);
toolSettings.ExcludesInternal.RemoveAll(x => hashSet.Contains(x));
return toolSettings;
}
#endregion
#region HtmlOutput
/// <summary>
/// <p><em>Sets <see cref="MSpecSettings.HtmlOutput"/></em></p>
/// <p>Outputs the HTML report to path, one-per-assembly w/ index.html (if directory, otherwise all are in one file). Ex. <c>--html=output/reports/</c></p>
/// </summary>
[Pure]
public static T SetHtmlOutput<T>(this T toolSettings, string htmlOutput) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.HtmlOutput = htmlOutput;
return toolSettings;
}
/// <summary>
/// <p><em>Resets <see cref="MSpecSettings.HtmlOutput"/></em></p>
/// <p>Outputs the HTML report to path, one-per-assembly w/ index.html (if directory, otherwise all are in one file). Ex. <c>--html=output/reports/</c></p>
/// </summary>
[Pure]
public static T ResetHtmlOutput<T>(this T toolSettings) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.HtmlOutput = null;
return toolSettings;
}
#endregion
#region XmlOutput
/// <summary>
/// <p><em>Sets <see cref="MSpecSettings.XmlOutput"/></em></p>
/// <p>Outputs the XML report to the file referenced by the path. Ex. <c>--xml=output/reports/MSpecResults.xml</c></p>
/// </summary>
[Pure]
public static T SetXmlOutput<T>(this T toolSettings, string xmlOutput) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.XmlOutput = xmlOutput;
return toolSettings;
}
/// <summary>
/// <p><em>Resets <see cref="MSpecSettings.XmlOutput"/></em></p>
/// <p>Outputs the XML report to the file referenced by the path. Ex. <c>--xml=output/reports/MSpecResults.xml</c></p>
/// </summary>
[Pure]
public static T ResetXmlOutput<T>(this T toolSettings) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.XmlOutput = null;
return toolSettings;
}
#endregion
#region TeamCity
/// <summary>
/// <p><em>Sets <see cref="MSpecSettings.TeamCity"/></em></p>
/// <p>Reporting for TeamCity CI integration (also auto-detected).</p>
/// </summary>
[Pure]
public static T SetTeamCity<T>(this T toolSettings, bool? teamCity) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.TeamCity = teamCity;
return toolSettings;
}
/// <summary>
/// <p><em>Resets <see cref="MSpecSettings.TeamCity"/></em></p>
/// <p>Reporting for TeamCity CI integration (also auto-detected).</p>
/// </summary>
[Pure]
public static T ResetTeamCity<T>(this T toolSettings) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.TeamCity = null;
return toolSettings;
}
/// <summary>
/// <p><em>Enables <see cref="MSpecSettings.TeamCity"/></em></p>
/// <p>Reporting for TeamCity CI integration (also auto-detected).</p>
/// </summary>
[Pure]
public static T EnableTeamCity<T>(this T toolSettings) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.TeamCity = true;
return toolSettings;
}
/// <summary>
/// <p><em>Disables <see cref="MSpecSettings.TeamCity"/></em></p>
/// <p>Reporting for TeamCity CI integration (also auto-detected).</p>
/// </summary>
[Pure]
public static T DisableTeamCity<T>(this T toolSettings) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.TeamCity = false;
return toolSettings;
}
/// <summary>
/// <p><em>Toggles <see cref="MSpecSettings.TeamCity"/></em></p>
/// <p>Reporting for TeamCity CI integration (also auto-detected).</p>
/// </summary>
[Pure]
public static T ToggleTeamCity<T>(this T toolSettings) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.TeamCity = !toolSettings.TeamCity;
return toolSettings;
}
#endregion
#region NoTeamCity
/// <summary>
/// <p><em>Sets <see cref="MSpecSettings.NoTeamCity"/></em></p>
/// <p>Disables TeamCity autodetection.</p>
/// </summary>
[Pure]
public static T SetNoTeamCity<T>(this T toolSettings, bool? noTeamCity) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.NoTeamCity = noTeamCity;
return toolSettings;
}
/// <summary>
/// <p><em>Resets <see cref="MSpecSettings.NoTeamCity"/></em></p>
/// <p>Disables TeamCity autodetection.</p>
/// </summary>
[Pure]
public static T ResetNoTeamCity<T>(this T toolSettings) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.NoTeamCity = null;
return toolSettings;
}
/// <summary>
/// <p><em>Enables <see cref="MSpecSettings.NoTeamCity"/></em></p>
/// <p>Disables TeamCity autodetection.</p>
/// </summary>
[Pure]
public static T EnableNoTeamCity<T>(this T toolSettings) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.NoTeamCity = true;
return toolSettings;
}
/// <summary>
/// <p><em>Disables <see cref="MSpecSettings.NoTeamCity"/></em></p>
/// <p>Disables TeamCity autodetection.</p>
/// </summary>
[Pure]
public static T DisableNoTeamCity<T>(this T toolSettings) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.NoTeamCity = false;
return toolSettings;
}
/// <summary>
/// <p><em>Toggles <see cref="MSpecSettings.NoTeamCity"/></em></p>
/// <p>Disables TeamCity autodetection.</p>
/// </summary>
[Pure]
public static T ToggleNoTeamCity<T>(this T toolSettings) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.NoTeamCity = !toolSettings.NoTeamCity;
return toolSettings;
}
#endregion
#region AppVeyor
/// <summary>
/// <p><em>Sets <see cref="MSpecSettings.AppVeyor"/></em></p>
/// <p>Reporting for AppVeyor CI integration (also auto-detected).</p>
/// </summary>
[Pure]
public static T SetAppVeyor<T>(this T toolSettings, bool? appVeyor) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.AppVeyor = appVeyor;
return toolSettings;
}
/// <summary>
/// <p><em>Resets <see cref="MSpecSettings.AppVeyor"/></em></p>
/// <p>Reporting for AppVeyor CI integration (also auto-detected).</p>
/// </summary>
[Pure]
public static T ResetAppVeyor<T>(this T toolSettings) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.AppVeyor = null;
return toolSettings;
}
/// <summary>
/// <p><em>Enables <see cref="MSpecSettings.AppVeyor"/></em></p>
/// <p>Reporting for AppVeyor CI integration (also auto-detected).</p>
/// </summary>
[Pure]
public static T EnableAppVeyor<T>(this T toolSettings) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.AppVeyor = true;
return toolSettings;
}
/// <summary>
/// <p><em>Disables <see cref="MSpecSettings.AppVeyor"/></em></p>
/// <p>Reporting for AppVeyor CI integration (also auto-detected).</p>
/// </summary>
[Pure]
public static T DisableAppVeyor<T>(this T toolSettings) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.AppVeyor = false;
return toolSettings;
}
/// <summary>
/// <p><em>Toggles <see cref="MSpecSettings.AppVeyor"/></em></p>
/// <p>Reporting for AppVeyor CI integration (also auto-detected).</p>
/// </summary>
[Pure]
public static T ToggleAppVeyor<T>(this T toolSettings) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.AppVeyor = !toolSettings.AppVeyor;
return toolSettings;
}
#endregion
#region NoAppVeyor
/// <summary>
/// <p><em>Sets <see cref="MSpecSettings.NoAppVeyor"/></em></p>
/// <p>Disables AppVeyor autodetection.</p>
/// </summary>
[Pure]
public static T SetNoAppVeyor<T>(this T toolSettings, bool? noAppVeyor) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.NoAppVeyor = noAppVeyor;
return toolSettings;
}
/// <summary>
/// <p><em>Resets <see cref="MSpecSettings.NoAppVeyor"/></em></p>
/// <p>Disables AppVeyor autodetection.</p>
/// </summary>
[Pure]
public static T ResetNoAppVeyor<T>(this T toolSettings) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.NoAppVeyor = null;
return toolSettings;
}
/// <summary>
/// <p><em>Enables <see cref="MSpecSettings.NoAppVeyor"/></em></p>
/// <p>Disables AppVeyor autodetection.</p>
/// </summary>
[Pure]
public static T EnableNoAppVeyor<T>(this T toolSettings) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.NoAppVeyor = true;
return toolSettings;
}
/// <summary>
/// <p><em>Disables <see cref="MSpecSettings.NoAppVeyor"/></em></p>
/// <p>Disables AppVeyor autodetection.</p>
/// </summary>
[Pure]
public static T DisableNoAppVeyor<T>(this T toolSettings) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.NoAppVeyor = false;
return toolSettings;
}
/// <summary>
/// <p><em>Toggles <see cref="MSpecSettings.NoAppVeyor"/></em></p>
/// <p>Disables AppVeyor autodetection.</p>
/// </summary>
[Pure]
public static T ToggleNoAppVeyor<T>(this T toolSettings) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.NoAppVeyor = !toolSettings.NoAppVeyor;
return toolSettings;
}
#endregion
#region TimeInfo
/// <summary>
/// <p><em>Sets <see cref="MSpecSettings.TimeInfo"/></em></p>
/// <p>Shows time-related information in HTML output.</p>
/// </summary>
[Pure]
public static T SetTimeInfo<T>(this T toolSettings, bool? timeInfo) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.TimeInfo = timeInfo;
return toolSettings;
}
/// <summary>
/// <p><em>Resets <see cref="MSpecSettings.TimeInfo"/></em></p>
/// <p>Shows time-related information in HTML output.</p>
/// </summary>
[Pure]
public static T ResetTimeInfo<T>(this T toolSettings) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.TimeInfo = null;
return toolSettings;
}
/// <summary>
/// <p><em>Enables <see cref="MSpecSettings.TimeInfo"/></em></p>
/// <p>Shows time-related information in HTML output.</p>
/// </summary>
[Pure]
public static T EnableTimeInfo<T>(this T toolSettings) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.TimeInfo = true;
return toolSettings;
}
/// <summary>
/// <p><em>Disables <see cref="MSpecSettings.TimeInfo"/></em></p>
/// <p>Shows time-related information in HTML output.</p>
/// </summary>
[Pure]
public static T DisableTimeInfo<T>(this T toolSettings) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.TimeInfo = false;
return toolSettings;
}
/// <summary>
/// <p><em>Toggles <see cref="MSpecSettings.TimeInfo"/></em></p>
/// <p>Shows time-related information in HTML output.</p>
/// </summary>
[Pure]
public static T ToggleTimeInfo<T>(this T toolSettings) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.TimeInfo = !toolSettings.TimeInfo;
return toolSettings;
}
#endregion
#region Silent
/// <summary>
/// <p><em>Sets <see cref="MSpecSettings.Silent"/></em></p>
/// <p>Suppress progress output (print fatal errors, failures and summary).</p>
/// </summary>
[Pure]
public static T SetSilent<T>(this T toolSettings, bool? silent) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.Silent = silent;
return toolSettings;
}
/// <summary>
/// <p><em>Resets <see cref="MSpecSettings.Silent"/></em></p>
/// <p>Suppress progress output (print fatal errors, failures and summary).</p>
/// </summary>
[Pure]
public static T ResetSilent<T>(this T toolSettings) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.Silent = null;
return toolSettings;
}
/// <summary>
/// <p><em>Enables <see cref="MSpecSettings.Silent"/></em></p>
/// <p>Suppress progress output (print fatal errors, failures and summary).</p>
/// </summary>
[Pure]
public static T EnableSilent<T>(this T toolSettings) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.Silent = true;
return toolSettings;
}
/// <summary>
/// <p><em>Disables <see cref="MSpecSettings.Silent"/></em></p>
/// <p>Suppress progress output (print fatal errors, failures and summary).</p>
/// </summary>
[Pure]
public static T DisableSilent<T>(this T toolSettings) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.Silent = false;
return toolSettings;
}
/// <summary>
/// <p><em>Toggles <see cref="MSpecSettings.Silent"/></em></p>
/// <p>Suppress progress output (print fatal errors, failures and summary).</p>
/// </summary>
[Pure]
public static T ToggleSilent<T>(this T toolSettings) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.Silent = !toolSettings.Silent;
return toolSettings;
}
#endregion
#region DottedProgress
/// <summary>
/// <p><em>Sets <see cref="MSpecSettings.DottedProgress"/></em></p>
/// <p>Print dotted progress output.</p>
/// </summary>
[Pure]
public static T SetDottedProgress<T>(this T toolSettings, bool? dottedProgress) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.DottedProgress = dottedProgress;
return toolSettings;
}
/// <summary>
/// <p><em>Resets <see cref="MSpecSettings.DottedProgress"/></em></p>
/// <p>Print dotted progress output.</p>
/// </summary>
[Pure]
public static T ResetDottedProgress<T>(this T toolSettings) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.DottedProgress = null;
return toolSettings;
}
/// <summary>
/// <p><em>Enables <see cref="MSpecSettings.DottedProgress"/></em></p>
/// <p>Print dotted progress output.</p>
/// </summary>
[Pure]
public static T EnableDottedProgress<T>(this T toolSettings) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.DottedProgress = true;
return toolSettings;
}
/// <summary>
/// <p><em>Disables <see cref="MSpecSettings.DottedProgress"/></em></p>
/// <p>Print dotted progress output.</p>
/// </summary>
[Pure]
public static T DisableDottedProgress<T>(this T toolSettings) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.DottedProgress = false;
return toolSettings;
}
/// <summary>
/// <p><em>Toggles <see cref="MSpecSettings.DottedProgress"/></em></p>
/// <p>Print dotted progress output.</p>
/// </summary>
[Pure]
public static T ToggleDottedProgress<T>(this T toolSettings) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.DottedProgress = !toolSettings.DottedProgress;
return toolSettings;
}
#endregion
#region NoColor
/// <summary>
/// <p><em>Sets <see cref="MSpecSettings.NoColor"/></em></p>
/// <p>Suppress colored console output.</p>
/// </summary>
[Pure]
public static T SetNoColor<T>(this T toolSettings, bool? noColor) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.NoColor = noColor;
return toolSettings;
}
/// <summary>
/// <p><em>Resets <see cref="MSpecSettings.NoColor"/></em></p>
/// <p>Suppress colored console output.</p>
/// </summary>
[Pure]
public static T ResetNoColor<T>(this T toolSettings) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.NoColor = null;
return toolSettings;
}
/// <summary>
/// <p><em>Enables <see cref="MSpecSettings.NoColor"/></em></p>
/// <p>Suppress colored console output.</p>
/// </summary>
[Pure]
public static T EnableNoColor<T>(this T toolSettings) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.NoColor = true;
return toolSettings;
}
/// <summary>
/// <p><em>Disables <see cref="MSpecSettings.NoColor"/></em></p>
/// <p>Suppress colored console output.</p>
/// </summary>
[Pure]
public static T DisableNoColor<T>(this T toolSettings) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.NoColor = false;
return toolSettings;
}
/// <summary>
/// <p><em>Toggles <see cref="MSpecSettings.NoColor"/></em></p>
/// <p>Suppress colored console output.</p>
/// </summary>
[Pure]
public static T ToggleNoColor<T>(this T toolSettings) where T : MSpecSettings
{
toolSettings = toolSettings.NewInstance();
toolSettings.NoColor = !toolSettings.NoColor;
return toolSettings;
}
#endregion
}
#endregion
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.IO;
using System.Linq;
using System.Net.Test.Common;
using System.Threading;
using System.Threading.Tasks;
using Xunit;
namespace System.Net.Http.Functional.Tests
{
public partial class HttpClientTest : HttpClientTestBase
{
[Fact]
public void Dispose_MultipleTimes_Success()
{
HttpClient client = CreateHttpClient();
client.Dispose();
client.Dispose();
}
[Fact]
public void DefaultRequestHeaders_Idempotent()
{
using (HttpClient client = CreateHttpClient())
{
Assert.NotNull(client.DefaultRequestHeaders);
Assert.Same(client.DefaultRequestHeaders, client.DefaultRequestHeaders);
}
}
[Fact]
public void BaseAddress_Roundtrip_Equal()
{
using (HttpClient client = CreateHttpClient())
{
Assert.Null(client.BaseAddress);
Uri uri = new Uri(CreateFakeUri());
client.BaseAddress = uri;
Assert.Equal(uri, client.BaseAddress);
client.BaseAddress = null;
Assert.Null(client.BaseAddress);
}
}
[Fact]
public void BaseAddress_InvalidUri_Throws()
{
using (HttpClient client = CreateHttpClient())
{
AssertExtensions.Throws<ArgumentException>("value", () => client.BaseAddress = new Uri("ftp://onlyhttpsupported"));
AssertExtensions.Throws<ArgumentException>("value", () => client.BaseAddress = new Uri("/onlyabsolutesupported", UriKind.Relative));
}
}
[Fact]
public void Timeout_Roundtrip_Equal()
{
using (HttpClient client = CreateHttpClient())
{
client.Timeout = Timeout.InfiniteTimeSpan;
Assert.Equal(Timeout.InfiniteTimeSpan, client.Timeout);
client.Timeout = TimeSpan.FromSeconds(1);
Assert.Equal(TimeSpan.FromSeconds(1), client.Timeout);
}
}
[Fact]
public void Timeout_OutOfRange_Throws()
{
using (HttpClient client = CreateHttpClient())
{
AssertExtensions.Throws<ArgumentOutOfRangeException>("value", () => client.Timeout = TimeSpan.FromSeconds(-2));
AssertExtensions.Throws<ArgumentOutOfRangeException>("value", () => client.Timeout = TimeSpan.FromSeconds(0));
AssertExtensions.Throws<ArgumentOutOfRangeException>("value", () => client.Timeout = TimeSpan.FromSeconds(int.MaxValue));
}
}
[Fact]
public void MaxResponseContentBufferSize_Roundtrip_Equal()
{
using (HttpClient client = CreateHttpClient())
{
client.MaxResponseContentBufferSize = 1;
Assert.Equal(1, client.MaxResponseContentBufferSize);
client.MaxResponseContentBufferSize = int.MaxValue;
Assert.Equal(int.MaxValue, client.MaxResponseContentBufferSize);
}
}
[Fact]
public void MaxResponseContentBufferSize_OutOfRange_Throws()
{
using (HttpClient client = CreateHttpClient())
{
AssertExtensions.Throws<ArgumentOutOfRangeException>("value", () => client.MaxResponseContentBufferSize = -1);
AssertExtensions.Throws<ArgumentOutOfRangeException>("value", () => client.MaxResponseContentBufferSize = 0);
AssertExtensions.Throws<ArgumentOutOfRangeException>("value", () => client.MaxResponseContentBufferSize = 1 + (long)int.MaxValue);
}
}
[SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework, "no exception throw on netfx")]
[Theory]
[InlineData(1, 2, true)]
[InlineData(1, 127, true)]
[InlineData(254, 255, true)]
[InlineData(10, 256, true)]
[InlineData(1, 440, true)]
[InlineData(2, 1, false)]
[InlineData(2, 2, false)]
[InlineData(1000, 1000, false)]
public async Task MaxResponseContentBufferSize_ThrowsIfTooSmallForContent(int maxSize, int contentLength, bool exceptionExpected)
{
using (HttpClient client = CreateHttpClient())
{
client.MaxResponseContentBufferSize = maxSize;
await LoopbackServer.CreateServerAsync(async (server, url) =>
{
Task<string> getTask = client.GetStringAsync(url);
Task serverTask = server.AcceptConnectionSendResponseAndCloseAsync(content: new string('s', contentLength));
Task bothTasks = TestHelper.WhenAllCompletedOrAnyFailed(getTask, serverTask);
if (exceptionExpected)
{
await Assert.ThrowsAsync<HttpRequestException>(() => bothTasks);
}
else
{
await bothTasks;
}
});
}
}
[Fact]
public async Task Properties_CantChangeAfterOperation_Throws()
{
using (var client = new HttpClient(new CustomResponseHandler((r,c) => Task.FromResult(new HttpResponseMessage()))))
{
(await client.GetAsync(CreateFakeUri())).Dispose();
Assert.Throws<InvalidOperationException>(() => client.BaseAddress = null);
Assert.Throws<InvalidOperationException>(() => client.Timeout = TimeSpan.FromSeconds(1));
Assert.Throws<InvalidOperationException>(() => client.MaxResponseContentBufferSize = 1);
}
}
[Theory]
[InlineData(null)]
[InlineData("/something.html")]
public void GetAsync_NoBaseAddress_InvalidUri_ThrowsException(string uri)
{
using (HttpClient client = CreateHttpClient())
{
Assert.Throws<InvalidOperationException>(() => { client.GetAsync(uri == null ? null : new Uri(uri, UriKind.RelativeOrAbsolute)); });
}
}
[Theory]
[InlineData(null)]
[InlineData("/")]
public async Task GetAsync_BaseAddress_ValidUri_Success(string uri)
{
using (var client = new HttpClient(new CustomResponseHandler((r,c) => Task.FromResult(new HttpResponseMessage()))))
{
client.BaseAddress = new Uri(CreateFakeUri());
using (HttpResponseMessage response = await client.GetAsync(uri, HttpCompletionOption.ResponseHeadersRead))
{
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
}
}
}
[Theory]
[InlineData(false)]
[InlineData(true)]
public async Task GetContentAsync_ErrorStatusCode_ExpectedExceptionThrown(bool withResponseContent)
{
using (var client = new HttpClient(new CustomResponseHandler(
(r,c) => Task.FromResult(new HttpResponseMessage(HttpStatusCode.BadRequest)
{
Content = withResponseContent ? new ByteArrayContent(new byte[1]) : null
}))))
{
await Assert.ThrowsAsync<HttpRequestException>(() => client.GetStringAsync(CreateFakeUri()));
await Assert.ThrowsAsync<HttpRequestException>(() => client.GetByteArrayAsync(CreateFakeUri()));
await Assert.ThrowsAsync<HttpRequestException>(() => client.GetStreamAsync(CreateFakeUri()));
}
}
[Fact]
public async Task GetContentAsync_NullResponse_Throws()
{
using (var client = new HttpClient(new CustomResponseHandler((r,c) => Task.FromResult<HttpResponseMessage>(null))))
{
await Assert.ThrowsAnyAsync<InvalidOperationException>(() => client.GetStringAsync(CreateFakeUri()));
}
}
[Fact]
public async Task GetContentAsync_NullResponseContent_ReturnsDefaultValue()
{
using (var client = new HttpClient(new CustomResponseHandler((r,c) => Task.FromResult(new HttpResponseMessage() { Content = null }))))
{
Assert.Same(string.Empty, await client.GetStringAsync(CreateFakeUri()));
Assert.Same(Array.Empty<byte>(), await client.GetByteArrayAsync(CreateFakeUri()));
Assert.Same(Stream.Null, await client.GetStreamAsync(CreateFakeUri()));
}
}
[Fact]
public async Task GetContentAsync_SerializingContentThrows_Synchronous_Throws()
{
var e = new FormatException();
using (var client = new HttpClient(new CustomResponseHandler(
(r, c) => Task.FromResult(new HttpResponseMessage() { Content = new CustomContent(stream => { throw e; }) }))))
{
Assert.Same(e, await Assert.ThrowsAsync<FormatException>(() => client.GetStringAsync(CreateFakeUri())));
Assert.Same(e, await Assert.ThrowsAsync<FormatException>(() => client.GetByteArrayAsync(CreateFakeUri())));
Assert.Same(e, await Assert.ThrowsAsync<FormatException>(() => client.GetStreamAsync(CreateFakeUri())));
}
}
[Fact]
public async Task GetContentAsync_SerializingContentThrows_Asynchronous_Throws()
{
var e = new FormatException();
using (var client = new HttpClient(new CustomResponseHandler(
(r, c) => Task.FromResult(new HttpResponseMessage() { Content = new CustomContent(stream => Task.FromException(e)) }))))
{
Assert.Same(e, await Assert.ThrowsAsync<FormatException>(() => client.GetStringAsync(CreateFakeUri())));
Assert.Same(e, await Assert.ThrowsAsync<FormatException>(() => client.GetByteArrayAsync(CreateFakeUri())));
Assert.Same(e, await Assert.ThrowsAsync<FormatException>(() => client.GetStreamAsync(CreateFakeUri())));
}
}
[Fact]
public async Task GetAsync_InvalidUrl_ExpectedExceptionThrown()
{
using (HttpClient client = CreateHttpClient())
{
await Assert.ThrowsAsync<HttpRequestException>(() => client.GetAsync(CreateFakeUri()));
await Assert.ThrowsAsync<HttpRequestException>(() => client.GetStringAsync(CreateFakeUri()));
}
}
[Fact]
public async Task GetPutPostDeleteAsync_Canceled_Throws()
{
using (var client = new HttpClient(new CustomResponseHandler((r, c) => WhenCanceled<HttpResponseMessage>(c))))
{
var content = new ByteArrayContent(new byte[1]);
var cts = new CancellationTokenSource();
Task t1 = client.GetAsync(CreateFakeUri(), cts.Token);
Task t2 = client.GetAsync(CreateFakeUri(), HttpCompletionOption.ResponseContentRead, cts.Token);
Task t3 = client.PostAsync(CreateFakeUri(), content, cts.Token);
Task t4 = client.PutAsync(CreateFakeUri(), content, cts.Token);
Task t5 = client.DeleteAsync(CreateFakeUri(), cts.Token);
cts.Cancel();
await Assert.ThrowsAnyAsync<OperationCanceledException>(() => t1);
await Assert.ThrowsAnyAsync<OperationCanceledException>(() => t2);
await Assert.ThrowsAnyAsync<OperationCanceledException>(() => t3);
await Assert.ThrowsAnyAsync<OperationCanceledException>(() => t4);
await Assert.ThrowsAnyAsync<OperationCanceledException>(() => t5);
}
}
[Fact]
public async Task GetPutPostDeleteAsync_Success()
{
Action<HttpResponseMessage> verify = message => { using (message) Assert.Equal(HttpStatusCode.OK, message.StatusCode); };
using (var client = new HttpClient(new CustomResponseHandler((r, c) => Task.FromResult(new HttpResponseMessage()))))
{
verify(await client.GetAsync(CreateFakeUri()));
verify(await client.GetAsync(CreateFakeUri(), CancellationToken.None));
verify(await client.GetAsync(CreateFakeUri(), HttpCompletionOption.ResponseContentRead));
verify(await client.GetAsync(CreateFakeUri(), HttpCompletionOption.ResponseContentRead, CancellationToken.None));
verify(await client.PostAsync(CreateFakeUri(), new ByteArrayContent(new byte[1])));
verify(await client.PostAsync(CreateFakeUri(), new ByteArrayContent(new byte[1]), CancellationToken.None));
verify(await client.PutAsync(CreateFakeUri(), new ByteArrayContent(new byte[1])));
verify(await client.PutAsync(CreateFakeUri(), new ByteArrayContent(new byte[1]), CancellationToken.None));
verify(await client.DeleteAsync(CreateFakeUri()));
verify(await client.DeleteAsync(CreateFakeUri(), CancellationToken.None));
}
}
[Fact]
public void GetAsync_CustomException_Synchronous_ThrowsException()
{
var e = new FormatException();
using (var client = new HttpClient(new CustomResponseHandler((r, c) => { throw e; })))
{
FormatException thrown = Assert.Throws<FormatException>(() => { client.GetAsync(CreateFakeUri()); });
Assert.Same(e, thrown);
}
}
[Fact]
public async Task GetAsync_CustomException_Asynchronous_ThrowsException()
{
var e = new FormatException();
using (var client = new HttpClient(new CustomResponseHandler((r, c) => Task.FromException<HttpResponseMessage>(e))))
{
FormatException thrown = await Assert.ThrowsAsync<FormatException>(() => client.GetAsync(CreateFakeUri()));
Assert.Same(e, thrown);
}
}
[Fact]
public void SendAsync_NullRequest_ThrowsException()
{
using (var client = new HttpClient(new CustomResponseHandler((r,c) => Task.FromResult<HttpResponseMessage>(null))))
{
AssertExtensions.Throws<ArgumentNullException>("request", () => { client.SendAsync(null); });
}
}
[Fact]
public async Task SendAsync_DuplicateRequest_ThrowsException()
{
using (var client = new HttpClient(new CustomResponseHandler((r, c) => Task.FromResult(new HttpResponseMessage()))))
using (var request = new HttpRequestMessage(HttpMethod.Get, CreateFakeUri()))
{
(await client.SendAsync(request)).Dispose();
Assert.Throws<InvalidOperationException>(() => { client.SendAsync(request); });
}
}
[Fact]
public async Task SendAsync_RequestContentNotDisposed()
{
var content = new ByteArrayContent(new byte[1]);
using (var request = new HttpRequestMessage(HttpMethod.Get, CreateFakeUri()) { Content = content })
using (var client = new HttpClient(new CustomResponseHandler((r, c) => Task.FromResult(new HttpResponseMessage()))))
{
await client.SendAsync(request);
await content.ReadAsStringAsync(); // no exception
}
}
[Fact]
public void Dispose_UseAfterDispose_Throws()
{
HttpClient client = CreateHttpClient();
client.Dispose();
Assert.Throws<ObjectDisposedException>(() => client.BaseAddress = null);
Assert.Throws<ObjectDisposedException>(() => client.CancelPendingRequests());
Assert.Throws<ObjectDisposedException>(() => { client.DeleteAsync(CreateFakeUri()); });
Assert.Throws<ObjectDisposedException>(() => { client.GetAsync(CreateFakeUri()); });
Assert.Throws<ObjectDisposedException>(() => { client.GetByteArrayAsync(CreateFakeUri()); });
Assert.Throws<ObjectDisposedException>(() => { client.GetStreamAsync(CreateFakeUri()); });
Assert.Throws<ObjectDisposedException>(() => { client.GetStringAsync(CreateFakeUri()); });
Assert.Throws<ObjectDisposedException>(() => { client.PostAsync(CreateFakeUri(), new ByteArrayContent(new byte[1])); });
Assert.Throws<ObjectDisposedException>(() => { client.PutAsync(CreateFakeUri(), new ByteArrayContent(new byte[1])); });
Assert.Throws<ObjectDisposedException>(() => { client.SendAsync(new HttpRequestMessage(HttpMethod.Get, CreateFakeUri())); });
Assert.Throws<ObjectDisposedException>(() => { client.Timeout = TimeSpan.FromSeconds(1); });
}
[Theory]
[InlineData(false)]
[InlineData(true)]
public void CancelAllPending_AllPendingOperationsCanceled(bool withInfiniteTimeout)
{
using (var client = new HttpClient(new CustomResponseHandler((r, c) => WhenCanceled<HttpResponseMessage>(c))))
{
if (withInfiniteTimeout)
{
client.Timeout = Timeout.InfiniteTimeSpan;
}
Task<HttpResponseMessage>[] tasks = Enumerable.Range(0, 3).Select(_ => client.GetAsync(CreateFakeUri())).ToArray();
client.CancelPendingRequests();
Assert.All(tasks, task => Assert.ThrowsAny<OperationCanceledException>(() => task.GetAwaiter().GetResult()));
}
}
[Fact]
public void Timeout_TooShort_AllPendingOperationsCanceled()
{
using (var client = new HttpClient(new CustomResponseHandler((r, c) => WhenCanceled<HttpResponseMessage>(c))))
{
client.Timeout = TimeSpan.FromMilliseconds(1);
Task<HttpResponseMessage>[] tasks = Enumerable.Range(0, 3).Select(_ => client.GetAsync(CreateFakeUri())).ToArray();
Assert.All(tasks, task => Assert.ThrowsAny<OperationCanceledException>(() => task.GetAwaiter().GetResult()));
}
}
[Fact]
[OuterLoop("One second delay in getting server's response")]
public async Task Timeout_SetTo30AndGetResponseFromLoopbackQuickly_Success()
{
using (HttpClient client = CreateHttpClient())
{
client.Timeout = TimeSpan.FromSeconds(30);
await LoopbackServer.CreateServerAsync(async (server, url) =>
{
Task getTask = client.GetStringAsync(url);
await Task.Delay(TimeSpan.FromSeconds(.5));
await TestHelper.WhenAllCompletedOrAnyFailed(
getTask,
server.AcceptConnectionSendResponseAndCloseAsync());
});
}
}
private static string CreateFakeUri() => $"http://{Guid.NewGuid().ToString("N")}";
private static async Task<T> WhenCanceled<T>(CancellationToken cancellationToken)
{
await Task.Delay(-1, cancellationToken).ConfigureAwait(false);
return default(T);
}
private sealed class CustomResponseHandler : HttpMessageHandler
{
private readonly Func<HttpRequestMessage, CancellationToken, Task<HttpResponseMessage>> _func;
public CustomResponseHandler(Func<HttpRequestMessage, CancellationToken, Task<HttpResponseMessage>> func) { _func = func; }
protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
{
return _func(request, cancellationToken);
}
}
private sealed class CustomContent : HttpContent
{
private readonly Func<Stream, Task> _func;
public CustomContent(Func<Stream, Task> func) { _func = func; }
protected override Task SerializeToStreamAsync(Stream stream, TransportContext context)
{
return _func(stream);
}
protected override bool TryComputeLength(out long length)
{
length = 0;
return false;
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
namespace System.Globalization
{
/// <summary>
/// JapaneseCalendar is based on Gregorian calendar. The month and day values are the same as
/// Gregorian calendar. However, the year value is an offset to the Gregorian
/// year based on the era.
///
/// This system is adopted by Emperor Meiji in 1868. The year value is counted based on the reign of an emperor,
/// and the era begins on the day an emperor ascends the throne and continues until his death.
/// The era changes at 12:00AM.
///
/// For example, the current era is Reiwa. It started on 2019/5/1 A.D. Therefore, Gregorian year 2019 is also Reiwa 1st.
/// 2019/5/1 A.D. is also Reiwa 1st 5/1.
///
/// Any date in the year during which era is changed can be reckoned in either era. For example,
/// 2019/1/1 can be 1/1 Reiwa 1st year or 1/1 Heisei 31st year.
///
/// Note:
/// The DateTime can be represented by the JapaneseCalendar are limited to two factors:
/// 1. The min value and max value of DateTime class.
/// 2. The available era information.
/// </summary>
/// <remarks>
/// Calendar support range:
/// Calendar Minimum Maximum
/// ========== ========== ==========
/// Gregorian 1868/09/08 9999/12/31
/// Japanese Meiji 01/01 Reiwa 7981/12/31
/// </remarks>
public partial class JapaneseCalendar : Calendar
{
private static readonly DateTime s_calendarMinValue = new DateTime(1868, 9, 8);
public override DateTime MinSupportedDateTime => s_calendarMinValue;
public override DateTime MaxSupportedDateTime => DateTime.MaxValue;
public override CalendarAlgorithmType AlgorithmType => CalendarAlgorithmType.SolarCalendar;
// Using a field initializer rather than a static constructor so that the whole class can be lazy
// init.
private static volatile EraInfo[]? s_japaneseEraInfo;
// m_EraInfo must be listed in reverse chronological order. The most recent era
// should be the first element.
// That is, m_EraInfo[0] contains the most recent era.
//
// We know about 4 built-in eras, however users may add additional era(s) from the
// registry, by adding values to HKLM\SYSTEM\CurrentControlSet\Control\Nls\Calendars\Japanese\Eras
// we don't read the registry and instead we call WinRT to get the needed informatio
//
// Registry values look like:
// yyyy.mm.dd=era_abbrev_english_englishabbrev
//
// Where yyyy.mm.dd is the registry value name, and also the date of the era start.
// yyyy, mm, and dd are the year, month & day the era begins (4, 2 & 2 digits long)
// era is the Japanese Era name
// abbrev is the Abbreviated Japanese Era Name
// english is the English name for the Era (unused)
// englishabbrev is the Abbreviated English name for the era.
// . is a delimiter, but the value of . doesn't matter.
// '_' marks the space between the japanese era name, japanese abbreviated era name
// english name, and abbreviated english names.
internal static EraInfo[] GetEraInfo()
{
// See if we need to build it
return s_japaneseEraInfo ??
(s_japaneseEraInfo = GetJapaneseEras()) ??
// See if we have to use the built-in eras
(s_japaneseEraInfo = new EraInfo[]
{
new EraInfo(5, 2019, 5, 1, 2018, 1, GregorianCalendar.MaxYear - 2018, "\x4ee4\x548c", "\x4ee4", "R"),
new EraInfo(4, 1989, 1, 8, 1988, 1, 2019 - 1988, "\x5e73\x6210", "\x5e73", "H"),
new EraInfo(3, 1926, 12, 25, 1925, 1, 1989 - 1925, "\x662d\x548c", "\x662d", "S"),
new EraInfo(2, 1912, 7, 30, 1911, 1, 1926 - 1911, "\x5927\x6b63", "\x5927", "T"),
new EraInfo(1, 1868, 1, 1, 1867, 1, 1912 - 1867, "\x660e\x6cbb", "\x660e", "M")
});
}
internal static volatile Calendar? s_defaultInstance;
internal GregorianCalendarHelper _helper;
internal static Calendar GetDefaultInstance() => s_defaultInstance ??= new JapaneseCalendar();
public JapaneseCalendar()
{
try
{
new CultureInfo("ja-JP");
}
catch (ArgumentException e)
{
throw new TypeInitializationException(this.GetType().ToString(), e);
}
_helper = new GregorianCalendarHelper(this, GetEraInfo());
}
internal override CalendarId ID => CalendarId.JAPAN;
public override DateTime AddMonths(DateTime time, int months)
{
return _helper.AddMonths(time, months);
}
public override DateTime AddYears(DateTime time, int years)
{
return _helper.AddYears(time, years);
}
public override int GetDaysInMonth(int year, int month, int era)
{
return _helper.GetDaysInMonth(year, month, era);
}
public override int GetDaysInYear(int year, int era)
{
return _helper.GetDaysInYear(year, era);
}
public override int GetDayOfMonth(DateTime time)
{
return _helper.GetDayOfMonth(time);
}
public override DayOfWeek GetDayOfWeek(DateTime time)
{
return _helper.GetDayOfWeek(time);
}
public override int GetDayOfYear(DateTime time)
{
return _helper.GetDayOfYear(time);
}
public override int GetMonthsInYear(int year, int era)
{
return _helper.GetMonthsInYear(year, era);
}
public override int GetWeekOfYear(DateTime time, CalendarWeekRule rule, DayOfWeek firstDayOfWeek)
{
return _helper.GetWeekOfYear(time, rule, firstDayOfWeek);
}
public override int GetEra(DateTime time)
{
return _helper.GetEra(time);
}
public override int GetMonth(DateTime time)
{
return _helper.GetMonth(time);
}
public override int GetYear(DateTime time)
{
return _helper.GetYear(time);
}
public override bool IsLeapDay(int year, int month, int day, int era)
{
return _helper.IsLeapDay(year, month, day, era);
}
public override bool IsLeapYear(int year, int era)
{
return _helper.IsLeapYear(year, era);
}
public override int GetLeapMonth(int year, int era)
{
return _helper.GetLeapMonth(year, era);
}
public override bool IsLeapMonth(int year, int month, int era)
{
return _helper.IsLeapMonth(year, month, era);
}
public override DateTime ToDateTime(int year, int month, int day, int hour, int minute, int second, int millisecond, int era)
{
return _helper.ToDateTime(year, month, day, hour, minute, second, millisecond, era);
}
/// <summary>
/// For Japanese calendar, four digit year is not used. Few emperors will live for more than one hundred years.
/// Therefore, for any two digit number, we just return the original number.
/// </summary>
public override int ToFourDigitYear(int year)
{
if (year <= 0)
{
throw new ArgumentOutOfRangeException(nameof(year), year, SR.ArgumentOutOfRange_NeedPosNum);
}
if (year > _helper.MaxYear)
{
throw new ArgumentOutOfRangeException(
nameof(year),
year,
SR.Format(SR.ArgumentOutOfRange_Range, 1, _helper.MaxYear));
}
return year;
}
public override int[] Eras => _helper.Eras;
/// <summary>
/// Return the various era strings
/// Note: The arrays are backwards of the eras
/// </summary>
internal static string[] EraNames()
{
EraInfo[] eras = GetEraInfo();
string[] eraNames = new string[eras.Length];
for (int i = 0; i < eras.Length; i++)
{
// Strings are in chronological order, eras are backwards order.
eraNames[i] = eras[eras.Length - i - 1].eraName!;
}
return eraNames;
}
internal static string[] AbbrevEraNames()
{
EraInfo[] eras = GetEraInfo();
string[] erasAbbrev = new string[eras.Length];
for (int i = 0; i < eras.Length; i++)
{
// Strings are in chronological order, eras are backwards order.
erasAbbrev[i] = eras[eras.Length - i - 1].abbrevEraName!;
}
return erasAbbrev;
}
internal static string[] EnglishEraNames()
{
EraInfo[] eras = GetEraInfo();
string[] erasEnglish = new string[eras.Length];
for (int i = 0; i < eras.Length; i++)
{
// Strings are in chronological order, eras are backwards order.
erasEnglish[i] = eras[eras.Length - i - 1].englishEraName!;
}
return erasEnglish;
}
private const int DefaultTwoDigitYearMax = 99;
internal override bool IsValidYear(int year, int era)
{
return _helper.IsValidYear(year, era);
}
public override int TwoDigitYearMax
{
get
{
if (_twoDigitYearMax == -1)
{
_twoDigitYearMax = GetSystemTwoDigitYearSetting(ID, DefaultTwoDigitYearMax);
}
return _twoDigitYearMax;
}
set
{
VerifyWritable();
if (value < 99 || value > _helper.MaxYear)
{
throw new ArgumentOutOfRangeException(
nameof(value),
value,
SR.Format(SR.ArgumentOutOfRange_Range, 99, _helper.MaxYear));
}
_twoDigitYearMax = value;
}
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System.Threading.Tasks;
using Xunit;
namespace System.Threading.Channels.Tests
{
public class BoundedChannelTests : ChannelTestBase
{
protected override Channel<int> CreateChannel() => Channel.CreateBounded<int>(1);
protected override Channel<int> CreateFullChannel()
{
var c = Channel.CreateBounded<int>(1);
c.Writer.WriteAsync(42).Wait();
return c;
}
[Theory]
[InlineData(1)]
[InlineData(10)]
[InlineData(10000)]
public void TryWrite_TryRead_Many_Wait(int bufferedCapacity)
{
var c = Channel.CreateBounded<int>(bufferedCapacity);
for (int i = 0; i < bufferedCapacity; i++)
{
Assert.True(c.Writer.TryWrite(i));
}
Assert.False(c.Writer.TryWrite(bufferedCapacity));
int result;
for (int i = 0; i < bufferedCapacity; i++)
{
Assert.True(c.Reader.TryRead(out result));
Assert.Equal(i, result);
}
Assert.False(c.Reader.TryRead(out result));
Assert.Equal(0, result);
}
[Theory]
[InlineData(1)]
[InlineData(10)]
[InlineData(10000)]
public void TryWrite_TryRead_Many_DropOldest(int bufferedCapacity)
{
var c = Channel.CreateBounded<int>(new BoundedChannelOptions(bufferedCapacity) { FullMode = BoundedChannelFullMode.DropOldest });
for (int i = 0; i < bufferedCapacity * 2; i++)
{
Assert.True(c.Writer.TryWrite(i));
}
int result;
for (int i = bufferedCapacity; i < bufferedCapacity * 2; i++)
{
Assert.True(c.Reader.TryRead(out result));
Assert.Equal(i, result);
}
Assert.False(c.Reader.TryRead(out result));
Assert.Equal(0, result);
}
[Theory]
[InlineData(1)]
[InlineData(10)]
[InlineData(10000)]
public void WriteAsync_TryRead_Many_DropOldest(int bufferedCapacity)
{
var c = Channel.CreateBounded<int>(new BoundedChannelOptions(bufferedCapacity) { FullMode = BoundedChannelFullMode.DropOldest });
for (int i = 0; i < bufferedCapacity * 2; i++)
{
AssertSynchronousSuccess(c.Writer.WriteAsync(i));
}
int result;
for (int i = bufferedCapacity; i < bufferedCapacity * 2; i++)
{
Assert.True(c.Reader.TryRead(out result));
Assert.Equal(i, result);
}
Assert.False(c.Reader.TryRead(out result));
Assert.Equal(0, result);
}
[Theory]
[InlineData(1)]
[InlineData(10)]
[InlineData(10000)]
public void TryWrite_TryRead_Many_DropNewest(int bufferedCapacity)
{
var c = Channel.CreateBounded<int>(new BoundedChannelOptions(bufferedCapacity) { FullMode = BoundedChannelFullMode.DropNewest });
for (int i = 0; i < bufferedCapacity * 2; i++)
{
Assert.True(c.Writer.TryWrite(i));
}
int result;
for (int i = 0; i < bufferedCapacity - 1; i++)
{
Assert.True(c.Reader.TryRead(out result));
Assert.Equal(i, result);
}
Assert.True(c.Reader.TryRead(out result));
Assert.Equal(bufferedCapacity * 2 - 1, result);
Assert.False(c.Reader.TryRead(out result));
Assert.Equal(0, result);
}
[Theory]
[InlineData(1)]
[InlineData(10)]
[InlineData(10000)]
public void WriteAsync_TryRead_Many_DropNewest(int bufferedCapacity)
{
var c = Channel.CreateBounded<int>(new BoundedChannelOptions(bufferedCapacity) { FullMode = BoundedChannelFullMode.DropNewest });
for (int i = 0; i < bufferedCapacity * 2; i++)
{
AssertSynchronousSuccess(c.Writer.WriteAsync(i));
}
int result;
for (int i = 0; i < bufferedCapacity - 1; i++)
{
Assert.True(c.Reader.TryRead(out result));
Assert.Equal(i, result);
}
Assert.True(c.Reader.TryRead(out result));
Assert.Equal(bufferedCapacity * 2 - 1, result);
Assert.False(c.Reader.TryRead(out result));
Assert.Equal(0, result);
}
[Fact]
public async Task TryWrite_DropNewest_WrappedAroundInternalQueue()
{
var c = Channel.CreateBounded<int>(new BoundedChannelOptions(3) { FullMode = BoundedChannelFullMode.DropNewest });
// Move head of dequeue beyond the beginning
Assert.True(c.Writer.TryWrite(1));
Assert.True(c.Reader.TryRead(out int item));
Assert.Equal(1, item);
// Add items to fill the capacity and put the tail at 0
Assert.True(c.Writer.TryWrite(2));
Assert.True(c.Writer.TryWrite(3));
Assert.True(c.Writer.TryWrite(4));
// Add an item to overwrite the newest
Assert.True(c.Writer.TryWrite(5));
// Verify current contents
Assert.Equal(2, await c.Reader.ReadAsync());
Assert.Equal(3, await c.Reader.ReadAsync());
Assert.Equal(5, await c.Reader.ReadAsync());
}
[Theory]
[InlineData(1)]
[InlineData(10)]
[InlineData(10000)]
public void TryWrite_TryRead_Many_Ignore(int bufferedCapacity)
{
var c = Channel.CreateBounded<int>(new BoundedChannelOptions(bufferedCapacity) { FullMode = BoundedChannelFullMode.DropWrite });
for (int i = 0; i < bufferedCapacity * 2; i++)
{
Assert.True(c.Writer.TryWrite(i));
}
int result;
for (int i = 0; i < bufferedCapacity; i++)
{
Assert.True(c.Reader.TryRead(out result));
Assert.Equal(i, result);
}
Assert.False(c.Reader.TryRead(out result));
Assert.Equal(0, result);
}
[Theory]
[InlineData(1)]
[InlineData(10)]
[InlineData(10000)]
public void WriteAsync_TryRead_Many_Ignore(int bufferedCapacity)
{
var c = Channel.CreateBounded<int>(new BoundedChannelOptions(bufferedCapacity) { FullMode = BoundedChannelFullMode.DropWrite });
for (int i = 0; i < bufferedCapacity * 2; i++)
{
AssertSynchronousSuccess(c.Writer.WriteAsync(i));
}
int result;
for (int i = 0; i < bufferedCapacity; i++)
{
Assert.True(c.Reader.TryRead(out result));
Assert.Equal(i, result);
}
Assert.False(c.Reader.TryRead(out result));
Assert.Equal(0, result);
}
[Fact]
public async Task CancelPendingWrite_Reading_DataTransferredFromCorrectWriter()
{
var c = Channel.CreateBounded<int>(1);
Assert.Equal(TaskStatus.RanToCompletion, c.Writer.WriteAsync(42).Status);
var cts = new CancellationTokenSource();
Task write1 = c.Writer.WriteAsync(43, cts.Token);
Assert.Equal(TaskStatus.WaitingForActivation, write1.Status);
cts.Cancel();
Task write2 = c.Writer.WriteAsync(44);
Assert.Equal(42, await c.Reader.ReadAsync());
Assert.Equal(44, await c.Reader.ReadAsync());
await AssertCanceled(write1, cts.Token);
await write2;
}
[Theory]
[InlineData(1)]
[InlineData(10)]
[InlineData(10000)]
public void TryWrite_TryRead_OneAtATime(int bufferedCapacity)
{
var c = Channel.CreateBounded<int>(bufferedCapacity);
const int NumItems = 100000;
for (int i = 0; i < NumItems; i++)
{
Assert.True(c.Writer.TryWrite(i));
Assert.True(c.Reader.TryRead(out int result));
Assert.Equal(i, result);
}
}
[Theory]
[InlineData(1)]
[InlineData(10)]
[InlineData(10000)]
public void SingleProducerConsumer_ConcurrentReadWrite_WithBufferedCapacity_Success(int bufferedCapacity)
{
var c = Channel.CreateBounded<int>(bufferedCapacity);
const int NumItems = 10000;
Task.WaitAll(
Task.Run(async () =>
{
for (int i = 0; i < NumItems; i++)
{
await c.Writer.WriteAsync(i);
}
}),
Task.Run(async () =>
{
for (int i = 0; i < NumItems; i++)
{
Assert.Equal(i, await c.Reader.ReadAsync());
}
}));
}
[Theory]
[InlineData(1)]
[InlineData(10)]
[InlineData(10000)]
public void ManyProducerConsumer_ConcurrentReadWrite_WithBufferedCapacity_Success(int bufferedCapacity)
{
var c = Channel.CreateBounded<int>(bufferedCapacity);
const int NumWriters = 10;
const int NumReaders = 10;
const int NumItems = 10000;
long readTotal = 0;
int remainingWriters = NumWriters;
int remainingItems = NumItems;
Task[] tasks = new Task[NumWriters + NumReaders];
for (int i = 0; i < NumReaders; i++)
{
tasks[i] = Task.Run(async () =>
{
try
{
while (true)
{
Interlocked.Add(ref readTotal, await c.Reader.ReadAsync());
}
}
catch (ChannelClosedException) { }
});
}
for (int i = 0; i < NumWriters; i++)
{
tasks[NumReaders + i] = Task.Run(async () =>
{
while (true)
{
int value = Interlocked.Decrement(ref remainingItems);
if (value < 0)
{
break;
}
await c.Writer.WriteAsync(value + 1);
}
if (Interlocked.Decrement(ref remainingWriters) == 0)
{
c.Writer.Complete();
}
});
}
Task.WaitAll(tasks);
Assert.Equal((NumItems * (NumItems + 1L)) / 2, readTotal);
}
[Fact]
public async Task WaitToWriteAsync_AfterFullThenRead_ReturnsTrue()
{
var c = Channel.CreateBounded<int>(1);
Assert.True(c.Writer.TryWrite(1));
Task<bool> write1 = c.Writer.WaitToWriteAsync();
Assert.False(write1.IsCompleted);
Task<bool> write2 = c.Writer.WaitToWriteAsync();
Assert.False(write2.IsCompleted);
Assert.Equal(1, await c.Reader.ReadAsync());
Assert.True(await write1);
Assert.True(await write2);
}
[Theory]
[InlineData(false)]
[InlineData(true)]
public void AllowSynchronousContinuations_WaitToReadAsync_ContinuationsInvokedAccordingToSetting(bool allowSynchronousContinuations)
{
var c = Channel.CreateBounded<int>(new BoundedChannelOptions(1) { AllowSynchronousContinuations = allowSynchronousContinuations });
int expectedId = Environment.CurrentManagedThreadId;
Task r = c.Reader.WaitToReadAsync().ContinueWith(_ =>
{
Assert.Equal(allowSynchronousContinuations, expectedId == Environment.CurrentManagedThreadId);
}, CancellationToken.None, TaskContinuationOptions.ExecuteSynchronously, TaskScheduler.Default);
Assert.Equal(TaskStatus.RanToCompletion, c.Writer.WriteAsync(42).Status);
((IAsyncResult)r).AsyncWaitHandle.WaitOne(); // avoid inlining the continuation
r.GetAwaiter().GetResult();
}
[Theory]
[InlineData(false)]
[InlineData(true)]
public void AllowSynchronousContinuations_CompletionTask_ContinuationsInvokedAccordingToSetting(bool allowSynchronousContinuations)
{
var c = Channel.CreateBounded<int>(new BoundedChannelOptions(1) { AllowSynchronousContinuations = allowSynchronousContinuations });
int expectedId = Environment.CurrentManagedThreadId;
Task r = c.Reader.Completion.ContinueWith(_ =>
{
Assert.Equal(allowSynchronousContinuations, expectedId == Environment.CurrentManagedThreadId);
}, CancellationToken.None, TaskContinuationOptions.ExecuteSynchronously, TaskScheduler.Default);
Assert.True(c.Writer.TryComplete());
((IAsyncResult)r).AsyncWaitHandle.WaitOne(); // avoid inlining the continuation
r.GetAwaiter().GetResult();
}
[Fact]
public void TryWrite_NoBlockedReaders_WaitingReader_WaiterNotifified()
{
Channel<int> c = CreateChannel();
Task<bool> r = c.Reader.WaitToReadAsync();
Assert.True(c.Writer.TryWrite(42));
AssertSynchronousTrue(r);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using Test.Cryptography;
using Xunit;
namespace System.Security.Cryptography.Dsa.Tests
{
public partial class DSASignVerify
{
[Fact]
public static void Fips186_2_1()
{
// http://csrc.nist.gov/groups/STM/cavp/documents/dss/186-2dsatestvectors.zip
// SigGen.txt, first case
const string p =
"f5422387a66acb198173f466e987ca692fd2337af0ed1ec7aa5f2e2088d0742c" +
"2d41ded76317001ca4044115f00aff09ad59d49b07c35ec2b25088be17ac391a" +
"f17575d52c232153df94f0023a0a17ca29d8548dfa08c5f034bad0bd4511ffae" +
"6b3c504c6f728d31d1e92aad9e88382a8a42b050441a747bb71dd84cb01d9ee7";
const string q = "f4a9d1750b46e27c3af7587c5d019ffc99f11f25";
const string g =
"7400ad91528a6c9e891f3f5fce7496ef4d01bf91a979736547049406ab4a2d2f" +
"e49fa3730cfb86a5af3ff21f5022f07e4ee0c15a88b8bd7b5f0bf8dea3863afb" +
"4f1cac16aba490d93f44be79c1cd01ce2e12dfdb75c593d64e5bf97e839526db" +
"cc0288cd3beb2fd7941f67d138faa88f9de90901efdc752569a4d1afbd193846";
const string x = "485e8ad4a4e49a85e0397af0bb115df175ead894";
const string y =
"ec86482ea1c463198d074bad01790283fb8866e53ab5e821219f0f4a25e7d047" +
"3f9cbd2ab7348625d322ea7f09ec9a15bbcc5a9ff1f3692392768970e9e86554" +
"5d3aa2934148f6d0a6ec410a16d5059c58ce428912f532cbc8f9bbbcf3657367" +
"d159212c11afd856587b1b092ab1bdae3c443661e6ba27078d03eb31e63e5922";
const string msg =
"96452f7f94b9cc004931df8f8118be7e56f16a1502e00934f16c96391b83d724" +
"90be8ffa54e7f6676eb966a63ce657a6095f8d65e1cf90a0a4685daf5ae35bab" +
"c6c290d13ed9152bba0cc76d2a5a401d0d1b06f63f85018f12753338a16da324" +
"61d89acef996129554b46ca9f47b612b89ad3b90c20b4547631a809b982797da";
const string r = "ed4715b8d218d31b7adf0bea5165777a7414315e";
const string s = "29c70a036aa83eb0742f1fa3f56ccead0fc0f61d";
Validate(p, q, g, x, y, msg, r, s, HashAlgorithmName.SHA1);
}
[ConditionalFact(nameof(SupportsFips186_3))]
public static void Fips186_3_L1024_N160_SHA256_1()
{
// http://csrc.nist.gov/groups/STM/cavp/documents/dss/186-3dsatestvectors.zip
// SigGen.txt
// [mod = L=1024, N=160, SHA-256], first case
const string p =
"cba13e533637c37c0e80d9fcd052c1e41a88ac325c4ebe13b7170088d54eef48" +
"81f3d35eae47c210385a8485d2423a64da3ffda63a26f92cf5a304f39260384a" +
"9b7759d8ac1adc81d3f8bfc5e6cb10efb4e0f75867f4e848d1a338586dd0648f" +
"eeb163647ffe7176174370540ee8a8f588da8cc143d939f70b114a7f981b8483";
const string q = "95031b8aa71f29d525b773ef8b7c6701ad8a5d99";
const string g =
"45bcaa443d4cd1602d27aaf84126edc73bd773de6ece15e97e7fef46f13072b7" +
"adcaf7b0053cf4706944df8c4568f26c997ee7753000fbe477a37766a4e970ff" +
"40008eb900b9de4b5f9ae06e06db6106e78711f3a67feca74dd5bddcdf675ae4" +
"014ee9489a42917fbee3bb9f2a24df67512c1c35c97bfbf2308eaacd28368c5c";
const string x = "2eac4f4196fedb3e651b3b00040184cfd6da2ab4";
const string y =
"4cd6178637d0f0de1488515c3b12e203a3c0ca652f2fe30d088dc7278a87affa" +
"634a727a721932d671994a958a0f89223c286c3a9b10a96560542e2626b72e0c" +
"d28e5133fb57dc238b7fab2de2a49863ecf998751861ae668bf7cad136e6933f" +
"57dfdba544e3147ce0e7370fa6e8ff1de690c51b4aeedf0485183889205591e8";
const string msg =
"812172f09cbae62517804885754125fc6066e9a902f9db2041eeddd7e8da67e4" +
"a2e65d0029c45ecacea6002f9540eb1004c883a8f900fd84a98b5c449ac49c56" +
"f3a91d8bed3f08f427935fbe437ce46f75cd666a0707265c61a096698dc2f36b" +
"28c65ec7b6e475c8b67ddfb444b2ee6a984e9d6d15233e25e44bd8d7924d129d";
const string r = "76683a085d6742eadf95a61af75f881276cfd26a";
const string s = "3b9da7f9926eaaad0bebd4845c67fcdb64d12453";
Validate(p, q, g, x, y, msg, r, s, HashAlgorithmName.SHA256);
}
[ConditionalFact(nameof(SupportsFips186_3))]
public static void Fips186_3_L1024_N160_SHA384_1()
{
// http://csrc.nist.gov/groups/STM/cavp/documents/dss/186-3dsatestvectors.zip
// SigGen.txt
// [mod = L=1024, N=160, SHA-384], first case
const string p =
"f24a4afc72c7e373a3c30962332fe5405c45930963909418c30792aaf135ddea" +
"561e94f24726716b75a18828982e4ce44c1fddcb746487b6b77a9a5a17f868ab" +
"50cd621b5bc9da470880b287d7398190a42a5ee22ed8d1ff147e2019810c8298" +
"ed68e1ca69d41d555f249e649fb1725ddb075c17b37beff467fdd1609243373f";
const string q = "da065a078ddb56ee5d2ad06cafab20820d2c4755";
const string g =
"47b5591b79043e4e03ca78a0e277c9a21e2a6b543bf4f044104cd9ac93eff8e1" +
"01bb6031efc8c596d5d2f92e3a3d0f1f74702dd54f77d3cd46c04dee7a5de9f0" +
"0ad317691fddcefe4a220a2651acae7fcedda92bfcca855db6705e8d864f8192" +
"bf6bf860c00f08ad6493ecc1872e0028d5c86d44505db57422515c3825a6f78a";
const string x = "649820168eb594f59cd9b28b9aefe8cc106a6c4f";
const string y =
"43a27b740f422cb2dc3eaa232315883a2f6a22927f997d024f5a638b507b17d3" +
"b1cbd3ec691cc674470960a0146efdecb95bb5fe249749e3c806cd5cc3e7f7ba" +
"b845dadbe1f50b3366fb827a942ce6246dda7bd2c13e1b4a926c0c82c8846395" +
"52d9d46036f9a4bc2a9e51c2d76e3074d1f53a63224c4279e0fa460474d4ffde";
const string msg =
"b0dbbf4a421ba5c5b0e52f09629801c113258c252f29898c3354706e39ec5824" +
"be523d0e2f8cfe022cd61165301274d5d621a59755f50404d8b802371ce616de" +
"fa962e3636ae934ec34e4bcf77a16c7eff8cf4cc08a0f4849d6ad4307e9f8df8" +
"3f24ad16ab46d1a61d2d7d4e21681eb2ae281a1a5f9bca8573a3f5281d308a5a";
const string r = "77c4d99f62b3ad7dd1fe6498db45a5da73ce7bde";
const string s = "23871a002ae503fdabaa6a84dcc8f38769737f01";
Validate(p, q, g, x, y, msg, r, s, HashAlgorithmName.SHA384);
}
[ConditionalFact(nameof(SupportsFips186_3))]
public static void Fips186_3_L1024_N160_SHA384_4()
{
// http://csrc.nist.gov/groups/STM/cavp/documents/dss/186-3dsatestvectors.zip
// SigGen.txt
// [mod = L=1024, N=160, SHA-384], fourth case (s=00....)
const string p =
"f24a4afc72c7e373a3c30962332fe5405c45930963909418c30792aaf135ddea" +
"561e94f24726716b75a18828982e4ce44c1fddcb746487b6b77a9a5a17f868ab" +
"50cd621b5bc9da470880b287d7398190a42a5ee22ed8d1ff147e2019810c8298" +
"ed68e1ca69d41d555f249e649fb1725ddb075c17b37beff467fdd1609243373f";
const string q = "da065a078ddb56ee5d2ad06cafab20820d2c4755";
const string g =
"47b5591b79043e4e03ca78a0e277c9a21e2a6b543bf4f044104cd9ac93eff8e1" +
"01bb6031efc8c596d5d2f92e3a3d0f1f74702dd54f77d3cd46c04dee7a5de9f0" +
"0ad317691fddcefe4a220a2651acae7fcedda92bfcca855db6705e8d864f8192" +
"bf6bf860c00f08ad6493ecc1872e0028d5c86d44505db57422515c3825a6f78a";
const string x = "bb318987a043158b97fdbbc2707471a38316ce58";
const string y =
"c9003995b014afad66de25fc0a2210b1f1b22d275da51a27faacda042fd76456" +
"86ec8b1b62d58d8af2e1063ab8e146d11e3a07710bc4521228f35f5173443bbf" +
"d089f642cd16641c57199c9ab6e0d9b0c01931c2d162f5e20dbe7365c93adc62" +
"fd5a461bea5956d7c11ac67647bedcead5bb311224a496aa155992aee74e45ad";
const string msg =
"36a25659a7f1de66b4721b48855cdebe98fe6113241b7beddc2691493ed0add0" +
"b6a9fbbf9fb870a1bc68a901b932f47ded532f93493b1c081408165807b38efc" +
"e7acc7dbc216bef74ed59e20973326553cc83779f742e3f469a7278eeb1537dd" +
"71cd8f15114d84693c2e6bbf62814a08e82ba71539f4cb4bf08c869d7db9dea9";
const string r = "17cc53b5b9558cc41df946055b8d7e1971be86d7";
const string s = "003c21503971c03b5ef4edc804d2f7d33f9ea9cc";
Validate(p, q, g, x, y, msg, r, s, HashAlgorithmName.SHA384);
}
[ConditionalFact(nameof(SupportsFips186_3))]
public static void Fips186_3_L1024_N160_SHA512_1()
{
// http://csrc.nist.gov/groups/STM/cavp/documents/dss/186-3dsatestvectors.zip
// SigGen.txt
// [mod = L=1024, N=160, SHA-512], first case
const string p =
"88d968e9602ecbda6d86f7c970a3ffbeb1da962f28c0afb9270ef05bc330ca98" +
"c3adf83c072feb05fb2e293b5065bbb0cbcc930c24d8d07869deaecd92a2604c" +
"0f5dd35c5b431fda6a222c52c3562bf7571c710209be8b3b858818788725fe81" +
"12b7d6bc82e0ff1cbbf5d6fe94690af2b510e41ad8207dc2c02fb9fa5cefaab5";
const string q = "a665689b9e5b9ce82fd1676006cf4cf67ecc56b7";
const string g =
"267e282857417752113fba3fca7155b5ce89e7c8a33c1a29122e2b720965fc04" +
"245267ff87fc67a5730fe5b308013aa3266990fbb398185a87e055b443a868ce" +
"0ce13ae6aee330b9d25d3bbb362665c5881daf0c5aa75e9d4a82e8f04c91a9ad" +
"294822e33978ab0c13fadc45831f9d37da4efa0fc2c5eb01371fa85b7ddb1f82";
const string x = "07ce8862e64b7f6c7482046dbfc93907123e5214";
const string y =
"60f5341e48ca7a3bc5decee61211dd2727cd8e2fc7635f3aabea262366e458f5" +
"c51c311afda916cb0dcdc5d5a5729f573a532b594743199bcfa7454903e74b33" +
"ddfe65896306cec20ebd8427682fa501ee06bc4c5d1425cbe31828ba008b19c9" +
"da68136cf71840b205919e783a628a5a57cf91cf569b2854ffef7a096eda96c9";
const string msg =
"3a84a5314e90fd33bb7cd6ca68720c69058da1da1b359046ae8922cac8afc5e0" +
"25771635fb4735491521a728441b5cb087d60776ee0ecc2174a41985a82cf46d" +
"8f8d8b274a0cc439b00971077c745f8cf701cf56bf9914cc57209b555dc87ca8" +
"c13da063270c60fc2c988e692b75a7f2a669903b93d2e14e8efb6fb9f8694a78";
const string r = "a53f1f8f20b8d3d4720f14a8bab5226b079d9953";
const string s = "11f53f6a4e56b51f60e20d4957ae89e162aea616";
Validate(p, q, g, x, y, msg, r, s, HashAlgorithmName.SHA512);
}
[ConditionalFact(nameof(SupportsFips186_3))]
public static void Fips186_3_L1024_N160_SHA512_4()
{
// http://csrc.nist.gov/groups/STM/cavp/documents/dss/186-3dsatestvectors.zip
// SigGen.txt
// [mod = L=1024, N=160, SHA-512], fourth case (r=000....)
const string p =
"88d968e9602ecbda6d86f7c970a3ffbeb1da962f28c0afb9270ef05bc330ca98" +
"c3adf83c072feb05fb2e293b5065bbb0cbcc930c24d8d07869deaecd92a2604c" +
"0f5dd35c5b431fda6a222c52c3562bf7571c710209be8b3b858818788725fe81" +
"12b7d6bc82e0ff1cbbf5d6fe94690af2b510e41ad8207dc2c02fb9fa5cefaab5";
const string q = "a665689b9e5b9ce82fd1676006cf4cf67ecc56b7";
const string g =
"267e282857417752113fba3fca7155b5ce89e7c8a33c1a29122e2b720965fc04" +
"245267ff87fc67a5730fe5b308013aa3266990fbb398185a87e055b443a868ce" +
"0ce13ae6aee330b9d25d3bbb362665c5881daf0c5aa75e9d4a82e8f04c91a9ad" +
"294822e33978ab0c13fadc45831f9d37da4efa0fc2c5eb01371fa85b7ddb1f82";
const string msg =
"16250c74ccb40443625a37c4b7e2b3615255768241f254a506fa819efbb8698a" +
"de38fc75946b3af09055578f28a181827dda311bd4038fd47f6d86cceb1bbbef" +
"2df20bf595a0ad77afd39c84877434ade3812f05ec541e0403abadc778d116fd" +
"077c95c6ec0f47241f4db813f31986b7504c1cd9ddb496ac6ed22b45e7df72cc";
const string x = "3fee04cc08624f3a7f34c538d87692209dd74797";
const string y =
"6e8c85150c5c9ca6dcb04806671db1b672fc1087c995311d7087ad12ab18f2c1" +
"4b612cea13bf79518d2b570b8b696b3e4efcd0fda522a253bbcb7dbb711d984c" +
"598fa201c21a8a9e2774bc15020920cd8c27c2875c779b08ef95093caac2c9ce" +
"a37ec498c23dd24b684abcb467ec952a202cbd2df7960c1ef929cc2b611ca6c8";
const string r = "00018f0fdc16d914971c8f310f1af7796c6f662a";
const string s = "62b7aecc75cbc6db00dd0c24339f7bdb5ae966a5";
Validate(p, q, g, x, y, msg, r, s, HashAlgorithmName.SHA512);
}
[ConditionalFact(nameof(SupportsFips186_3))]
public static void Fips186_3_L2048_N256_SHA256_1()
{
// http://csrc.nist.gov/groups/STM/cavp/documents/dss/186-3dsatestvectors.zip
// SigGen.txt
// [mod = L=2048, N=256, SHA-256], first case
const string p =
"a8adb6c0b4cf9588012e5deff1a871d383e0e2a85b5e8e03d814fe13a059705e" +
"663230a377bf7323a8fa117100200bfd5adf857393b0bbd67906c081e585410e" +
"38480ead51684dac3a38f7b64c9eb109f19739a4517cd7d5d6291e8af20a3fbf" +
"17336c7bf80ee718ee087e322ee41047dabefbcc34d10b66b644ddb3160a28c0" +
"639563d71993a26543eadb7718f317bf5d9577a6156561b082a10029cd44012b" +
"18de6844509fe058ba87980792285f2750969fe89c2cd6498db3545638d5379d" +
"125dccf64e06c1af33a6190841d223da1513333a7c9d78462abaab31b9f96d5f" +
"34445ceb6309f2f6d2c8dde06441e87980d303ef9a1ff007e8be2f0be06cc15f";
const string q =
"e71f8567447f42e75f5ef85ca20fe557ab0343d37ed09edc3f6e68604d6b9dfb";
const string g =
"5ba24de9607b8998e66ce6c4f812a314c6935842f7ab54cd82b19fa104abfb5d" +
"84579a623b2574b37d22ccae9b3e415e48f5c0f9bcbdff8071d63b9bb956e547" +
"af3a8df99e5d3061979652ff96b765cb3ee493643544c75dbe5bb39834531952" +
"a0fb4b0378b3fcbb4c8b5800a5330392a2a04e700bb6ed7e0b85795ea38b1b96" +
"2741b3f33b9dde2f4ec1354f09e2eb78e95f037a5804b6171659f88715ce1a9b" +
"0cc90c27f35ef2f10ff0c7c7a2bb0154d9b8ebe76a3d764aa879af372f4240de" +
"8347937e5a90cec9f41ff2f26b8da9a94a225d1a913717d73f10397d2183f1ba" +
"3b7b45a68f1ff1893caf69a827802f7b6a48d51da6fbefb64fd9a6c5b75c4561";
const string x =
"446969025446247f84fdea74d02d7dd13672b2deb7c085be11111441955a377b";
const string y =
"5a55dceddd1134ee5f11ed85deb4d634a3643f5f36dc3a70689256469a0b651a" +
"d22880f14ab85719434f9c0e407e60ea420e2a0cd29422c4899c416359dbb1e5" +
"92456f2b3cce233259c117542fd05f31ea25b015d9121c890b90e0bad033be13" +
"68d229985aac7226d1c8c2eab325ef3b2cd59d3b9f7de7dbc94af1a9339eb430" +
"ca36c26c46ecfa6c5481711496f624e188ad7540ef5df26f8efacb820bd17a1f" +
"618acb50c9bc197d4cb7ccac45d824a3bf795c234b556b06aeb9291734532520" +
"84003f69fe98045fe74002ba658f93475622f76791d9b2623d1b5fff2cc16844" +
"746efd2d30a6a8134bfc4c8cc80a46107901fb973c28fc553130f3286c1489da";
const string msg =
"4e3a28bcf90d1d2e75f075d9fbe55b36c5529b17bc3a9ccaba6935c9e2054825" +
"5b3dfae0f91db030c12f2c344b3a29c4151c5b209f5e319fdf1c23b190f64f1f" +
"e5b330cb7c8fa952f9d90f13aff1cb11d63181da9efc6f7e15bfed4862d1a62c" +
"7dcf3ba8bf1ff304b102b1ec3f1497dddf09712cf323f5610a9d10c3d9132659";
const string r =
"633055e055f237c38999d81c397848c38cce80a55b649d9e7905c298e2a51447";
const string s =
"2bbf68317660ec1e4b154915027b0bc00ee19cfc0bf75d01930504f2ce10a8b0";
Validate(p, q, g, x, y, msg, r, s, HashAlgorithmName.SHA256);
}
[ConditionalFact(nameof(SupportsFips186_3))]
public static void Fips186_3_L2048_N256_SHA384_1()
{
// http://csrc.nist.gov/groups/STM/cavp/documents/dss/186-3dsatestvectors.zip
// SigGen.txt
// [mod = L=2048, N=256, SHA-384], first case
const string p =
"a6167c16fff74e29342b8586aed3cd896f7b1635a2286ff16fdff41a06317ca6" +
"b05ca2ba7c060ad6db1561621ccb0c40b86a03619bfff32e204cbd90b79dcb5f" +
"86ebb493e3bd1988d8097fa23fa4d78fb3cddcb00c466423d8fa719873c37645" +
"fe4eecc57171bbedfe56fa9474c96385b8ba378c79972d7aaae69a2ba64cde8e" +
"5654f0f7b74550cd3447e7a472a33b4037db468dde31c348aa25e82b7fc41b83" +
"7f7fc226a6103966ecd8f9d14c2d3149556d43829f137451b8d20f8520b0ce8e" +
"3d705f74d0a57ea872c2bdee9714e0b63906cddfdc28b6777d19325000f8ed52" +
"78ec5d912d102109319cba3b6469d4672909b4f0dbeec0bbb634b551ba0cf213";
const string q =
"8427529044d214c07574f7b359c2e01c23fd97701b328ac8c1385b81c5373895";
const string g =
"6fc232415c31200cf523af3483f8e26ace808d2f1c6a8b863ab042cc7f6b7144" +
"b2d39472c3cb4c7681d0732843503d8f858cbe476e6740324aaa295950105978" +
"c335069b919ff9a6ff4b410581b80712fe5d3e04ddb4dfd26d5e7fbca2b0c52d" +
"8d404343d57b2f9b2a26daa7ece30ceab9e1789f9751aaa9387049965af32650" +
"c6ca5b374a5ae70b3f98e053f51857d6bbb17a670e6eaaf89844d641e1e13d5a" +
"1b24d053dc6b8fd101c624786951927e426310aba9498a0042b3dc7bbc59d705" +
"f80d9b807de415f7e94c5cf9d789992d3bb8336d1d808cb86b56dde09d934bb5" +
"27033922de14bf307376ab7d22fbcd616f9eda479ab214a17850bdd0802a871c";
const string x =
"459eb1588e9f7dd4f286677a7415cb25a1b46e7a7cfadc8a45100383e20da69d";
const string y =
"5ca7151bca0e457bbc46f59f71d81ab16688dc0eb7e4d17b166c3326c5b12c5b" +
"debb3613224d1a754023c50b83cb5ecc139096cef28933b3b12ca31038e40893" +
"83597c59cc27b902be5da62cae7da5f4af90e9410ed1604082e2e38e25eb0b78" +
"dfac0aeb2ad3b19dc23539d2bcd755db1cc6c9805a7dd109e1c98667a5b9d52b" +
"21c2772121b8d0d2b246e5fd3da80728e85bbf0d7067d1c6baa64394a29e7fcb" +
"f80842bd4ab02b35d83f59805a104e0bd69d0079a065f59e3e6f21573a00da99" +
"0b72ea537fa98caaa0a58800a7e7a0623e263d4fca65ebb8eded46efdfe7db92" +
"c9ebd38062d8f12534f015b186186ee2361d62c24e4f22b3e95da0f9062ce04d";
const string msg =
"8c78cffdcf25d8230b835b30512684c9b252115870b603d1b4ba2eb5d35b33f2" +
"6d96b684126ec34fff67dfe5c8c856acfe3a9ff45ae11d415f30449bcdc3bf9a" +
"9fb5a7e48afeaba6d0b0fc9bce0197eb2bf7a840249d4e550c5a25dc1c71370e" +
"67933edad2362fae6fad1efba5c08dc1931ca2841b44b78c0c63a1665ffac860";
const string r =
"4fd8f25c059030027381d4167c3174b6be0088c15f0a573d7ebd05960f5a1eb2";
const string s =
"5f56869cee7bf64fec5d5d6ea15bb1fa1169003a87eccc1621b90a1b892226f2";
Validate(p, q, g, x, y, msg, r, s, HashAlgorithmName.SHA384);
}
[ConditionalFact(nameof(SupportsFips186_3))]
public static void Fips186_3_L2048_N256_SHA384_3()
{
// http://csrc.nist.gov/groups/STM/cavp/documents/dss/186-3dsatestvectors.zip
// SigGen.txt
// [mod = L=2048, N=256, SHA-384], third case (r=00...)
const string p =
"a6167c16fff74e29342b8586aed3cd896f7b1635a2286ff16fdff41a06317ca6" +
"b05ca2ba7c060ad6db1561621ccb0c40b86a03619bfff32e204cbd90b79dcb5f" +
"86ebb493e3bd1988d8097fa23fa4d78fb3cddcb00c466423d8fa719873c37645" +
"fe4eecc57171bbedfe56fa9474c96385b8ba378c79972d7aaae69a2ba64cde8e" +
"5654f0f7b74550cd3447e7a472a33b4037db468dde31c348aa25e82b7fc41b83" +
"7f7fc226a6103966ecd8f9d14c2d3149556d43829f137451b8d20f8520b0ce8e" +
"3d705f74d0a57ea872c2bdee9714e0b63906cddfdc28b6777d19325000f8ed52" +
"78ec5d912d102109319cba3b6469d4672909b4f0dbeec0bbb634b551ba0cf213";
const string q =
"8427529044d214c07574f7b359c2e01c23fd97701b328ac8c1385b81c5373895";
const string g =
"6fc232415c31200cf523af3483f8e26ace808d2f1c6a8b863ab042cc7f6b7144" +
"b2d39472c3cb4c7681d0732843503d8f858cbe476e6740324aaa295950105978" +
"c335069b919ff9a6ff4b410581b80712fe5d3e04ddb4dfd26d5e7fbca2b0c52d" +
"8d404343d57b2f9b2a26daa7ece30ceab9e1789f9751aaa9387049965af32650" +
"c6ca5b374a5ae70b3f98e053f51857d6bbb17a670e6eaaf89844d641e1e13d5a" +
"1b24d053dc6b8fd101c624786951927e426310aba9498a0042b3dc7bbc59d705" +
"f80d9b807de415f7e94c5cf9d789992d3bb8336d1d808cb86b56dde09d934bb5" +
"27033922de14bf307376ab7d22fbcd616f9eda479ab214a17850bdd0802a871c";
const string x =
"6ba8f6638316dd804a24b7390f31023cd8b26e9325be90941b90d5fd3155115a";
const string y =
"10e6f50fd6dbb1ca16f2df5132a4a4eabc51da4a58fe619b2225d7adab0cea3a" +
"fc2db90b158b6231c8b0774e0f0d9074517f336ca053ae115671aee3c1de0f85" +
"728cff99deebc07ffc9a63631989a9277e64c54d9c25a7e739ae92f706ee237b" +
"98b8700a9df0de12d2124e2cfd81d9ec7b0469ee3a718ab15305de099d9a2f8c" +
"ecb79527d016447c8f6fe4905c3718ce5234d13bf4edd7169b9d0db9a6b0fc77" +
"b7d53bdd32b07dc15bc829620db085114581608ac9e0937752095951d289855d" +
"0bcc9d421b945cc4f37f80b0cb25f1ffee9c61e567f49d21f889ecbc3f4ed337" +
"bca666ba3ba684874c883fe228ac44952a8513e12d9f0c4ed43c9b60f35225b2";
const string msg =
"4f1c0053984ab55a491f3618db1be2379174a4385974825fcbe584e2b6d0702a" +
"bb8298dd9184eef1740b90a5eae850e9452b4e4ab219e187860f0fb4ad2be390" +
"ef2ba7d76cdedcaf10aeaf4f25e497b4da951375b687a8d67012d3f99c7b5ca8" +
"2e9bd0630dffcd635ecd8209cddb872da5bf4736309783345a35376b4fce4b91";
const string r =
"006b759fb718c34f1a6e518f834053b9f1825dd3eb8d719465c7bcc830322f4b";
const string s =
"47fa59852c9ae5e181381e3457a33b25420011d6f911efa90f3eaced1dee1329";
Validate(p, q, g, x, y, msg, r, s, HashAlgorithmName.SHA384);
}
[ConditionalFact(nameof(SupportsFips186_3))]
public static void Fips186_3_L3072_N256_SHA256_1()
{
// http://csrc.nist.gov/groups/STM/cavp/documents/dss/186-3dsatestvectors.zip
// SigGen.txt
// [mod = L=3072, N=256, SHA-256], first case
const string p =
"c7b86d7044218e367453d210e76433e4e27a983db1c560bb9755a8fb7d819912" +
"c56cfe002ab1ff3f72165b943c0b28ed46039a07de507d7a29f738603decd127" +
"0380a41f971f2592661a64ba2f351d9a69e51a888a05156b7fe1563c4b77ee93" +
"a44949138438a2ab8bdcfc49b4e78d1cde766e54984760057d76cd740c94a4dd" +
"25a46aa77b18e9d707d6738497d4eac364f4792d9766a16a0e234807e96b8c64" +
"d404bbdb876e39b5799ef53fe6cb9bab62ef19fdcc2bdd905beda13b9ef7ac35" +
"f1f557cb0dc458c019e2bc19a9f5dfc1e4eca9e6d466564124304a31f038605a" +
"3e342da01be1c2b545610edd2c1397a3c8396588c6329efeb4e165af5b368a39" +
"a88e4888e39f40bb3de4eb1416672f999fead37aef1ca9643ff32cdbc0fcebe6" +
"28d7e46d281a989d43dd21432151af68be3f6d56acfbdb6c97d87fcb5e6291bf" +
"8b4ee1275ae0eb4383cc753903c8d29f4adb6a547e405decdff288c5f6c7aa30" +
"dcb12f84d392493a70933317c0f5e6552601fae18f17e6e5bb6bf396d32d8ab9";
const string q =
"876fa09e1dc62b236ce1c3155ba48b0ccfda29f3ac5a97f7ffa1bd87b68d2a4b";
const string g =
"110afebb12c7f862b6de03d47fdbc3326e0d4d31b12a8ca95b2dee2123bcc667" +
"d4f72c1e7209767d2721f95fbd9a4d03236d54174fbfaff2c4ff7deae4738b20" +
"d9f37bf0a1134c288b420af0b5792e47a92513c0413f346a4edbab2c45bdca13" +
"f5341c2b55b8ba54932b9217b5a859e553f14bb8c120fbb9d99909dff5ea68e1" +
"4b379964fd3f3861e5ba5cc970c4a180eef54428703961021e7bd68cb637927b" +
"8cbee6805fa27285bfee4d1ef70e02c1a18a7cd78bef1dd9cdad45dde9cd6907" +
"55050fc4662937ee1d6f4db12807ccc95bc435f11b71e7086048b1dab5913c60" +
"55012de82e43a4e50cf93feff5dcab814abc224c5e0025bd868c3fc592041bba" +
"04747c10af513fc36e4d91c63ee5253422cf4063398d77c52fcb011427cbfcfa" +
"67b1b2c2d1aa4a3da72645cb1c767036054e2f31f88665a54461c885fb3219d5" +
"ad8748a01158f6c7c0df5a8c908ba8c3e536822428886c7b500bbc15b49df746" +
"b9de5a78fe3b4f6991d0110c3cbff458039dc36261cf46af4bc2515368f4abb7";
const string x =
"3470832055dade94e14cd8777171d18e5d06f66aeff4c61471e4eba74ee56164";
const string y =
"456a105c713566234838bc070b8a751a0b57767cb75e99114a1a46641e11da1f" +
"a9f22914d808ad7148612c1ea55d25301781e9ae0c9ae36a69d87ba039ec7cd8" +
"64c3ad094873e6e56709fd10d966853d611b1cff15d37fdee424506c184d62c7" +
"033358be78c2250943b6f6d043d63b317de56e5ad8d1fd97dd355abe96452f8e" +
"435485fb3b907b51900aa3f24418df50b4fcdafbf6137548c39373b8bc4ba3da" +
"bb4746ebd17b87fcd6a2f197c107b18ec5b465e6e4cb430d9c0ce78da5988441" +
"054a370792b730da9aba41a3169af26176f74e6f7c0c9c9b55b62bbe7ce38d46" +
"95d48157e660c2acb63f482f55418150e5fee43ace84c540c3ba7662ae80835c" +
"1a2d51890ea96ba206427c41ef8c38aa07d2a365e7e58380d8f4782e22ac2101" +
"af732ee22758337b253637838e16f50f56d313d07981880d685557f7d79a6db8" +
"23c61f1bb3dbc5d50421a4843a6f29690e78aa0f0cff304231818b81fc4a243f" +
"c00f09a54c466d6a8c73d32a55e1abd5ec8b4e1afa32a79b01df85a81f3f5cfe";
const string msg =
"cb06e02234263c22b80e832d6dc5a1bee5ea8af3bc2da752441c04027f176158" +
"bfe68372bd67f84d489c0d49b07d4025962976be60437be1a2d01d3be0992afa" +
"5abe0980e26a9da4ae72f827b423665195cc4eed6fe85c335b32d9c03c945a86" +
"e7fa99373f0a30c6eca938b3afb6dff67adb8bece6f8cfec4b6a12ea281e2323";
const string r =
"53bae6c6f336e2eb311c1e92d95fc449a929444ef81ec4279660b200d59433de";
const string s =
"49f3a74e953e77a7941af3aefeef4ed499be209976a0edb3fa5e7cb961b0c112";
Validate(p, q, g, x, y, msg, r, s, HashAlgorithmName.SHA256);
}
[ConditionalFact(nameof(SupportsFips186_3))]
public static void Fips186_3_L3072_N256_SHA384_1()
{
// http://csrc.nist.gov/groups/STM/cavp/documents/dss/186-3dsatestvectors.zip
// SigGen.txt
// [mod = L=3072, N=256, SHA-384], first case
const string p =
"a410d23ed9ad9964d3e401cb9317a25213f75712acbc5c12191abf3f1c0e723e" +
"2333b49eb1f95b0f9748d952f04a5ae358859d384403ce364aa3f58dd9769909" +
"b45048548c55872a6afbb3b15c54882f96c20df1b2df164f0bac849ca17ad2df" +
"63abd75c881922e79a5009f00b7d631622e90e7fa4e980618575e1d6bd1a72d5" +
"b6a50f4f6a68b793937c4af95fc11541759a1736577d9448b87792dff0723241" +
"5512e933755e12250d466e9cc8df150727d747e51fea7964158326b1365d580c" +
"b190f4518291598221fdf36c6305c8b8a8ed05663dd7b006e945f592abbecae4" +
"60f77c71b6ec649d3fd5394202ed7bbbd040f7b8fd57cb06a99be254fa25d71a" +
"3760734046c2a0db383e02397913ae67ce65870d9f6c6f67a9d00497be1d763b" +
"21937cf9cbf9a24ef97bbcaa07916f8894e5b7fb03258821ac46140965b23c54" +
"09ca49026efb2bf95bce025c4183a5f659bf6aaeef56d7933bb29697d7d54134" +
"8c871fa01f869678b2e34506f6dc0a4c132b689a0ed27dc3c8d53702aa584877";
const string q =
"abc67417725cf28fc7640d5de43825f416ebfa80e191c42ee886303338f56045";
const string g =
"867d5fb72f5936d1a14ed3b60499662f3124686ef108c5b3da6663a0e86197ec" +
"2cc4c9460193a74ff16028ac9441b0c7d27c2272d483ac7cd794d598416c4ff9" +
"099a61679d417d478ce5dd974bf349a14575afe74a88b12dd5f6d1cbd3f91ddd" +
"597ed68e79eba402613130c224b94ac28714a1f1c552475a5d29cfcdd8e08a6b" +
"1d65661e28ef313514d1408f5abd3e06ebe3a7d814d1ede316bf495273ca1d57" +
"4f42b482eea30db53466f454b51a175a0b89b3c05dda006e719a2e6371669080" +
"d768cc038cdfb8098e9aad9b8d83d4b759f43ac9d22b353ed88a33723550150d" +
"e0361b7a376f37b45d437f71cb711f2847de671ad1059516a1d45755224a15d3" +
"7b4aeada3f58c69a136daef0636fe38e3752064afe598433e80089fda24b144a" +
"462734bef8f77638845b00e59ce7fa4f1daf487a2cada11eaba72bb23e1df6b6" +
"6a183edd226c440272dd9b06bec0e57f1a0822d2e00212064b6dba64562085f5" +
"a75929afa5fe509e0b78e630aaf12f91e4980c9b0d6f7e059a2ea3e23479d930";
const string x =
"6d4c934391b7f6fb6e19e3141f8c0018ef5726118a11064358c7d35b37737377";
const string y =
"1f0a5c75e7985d6e70e4fbfda51a10b925f6accb600d7c6510db90ec367b93bb" +
"069bd286e8f979b22ef0702f717a8755c18309c87dae3fe82cc3dc8f4b7aa3d5" +
"f3876f4d4b3eb68bfe910c43076d6cd0d39fc88dde78f09480db55234e6c8ca5" +
"9fe2700efec04feee6b4e8ee2413721858be7190dbe905f456edcab55b2dc291" +
"6dc1e8731988d9ef8b619abcf8955aa960ef02b3f02a8dc649369222af50f133" +
"8ed28d667f3f10cae2a3c28a3c1d08df639c81ada13c8fd198c6dae3d62a3fe9" +
"f04c985c65f610c06cb8faea68edb80de6cf07a8e89c00218185a952b23572e3" +
"4df07ce5b4261e5de427eb503ee1baf5992db6d438b47434c40c22657bc163e7" +
"953fa33eff39dc2734607039aadd6ac27e4367131041f845ffa1a13f556bfba2" +
"307a5c78f2ccf11298c762e08871968e48dc3d1569d09965cd09da43cf0309a1" +
"6af1e20fee7da3dc21b364c4615cd5123fa5f9b23cfc4ffd9cfdcea670623840" +
"b062d4648d2eba786ad3f7ae337a4284324ace236f9f7174fbf442b99043002f";
const string msg =
"ed9a64d3109ef8a9292956b946873ca4bd887ce624b81be81b82c69c67aaddf5" +
"655f70fe4768114db2834c71787f858e5165da1a7fa961d855ad7e5bc4b7be31" +
"b97dbe770798ef7966152b14b86ae35625a28aee5663b9ef3067cbdfbabd8719" +
"7e5c842d3092eb88dca57c6c8ad4c00a19ddf2e1967b59bd06ccaef933bc28e7";
const string r =
"7695698a14755db4206e850b4f5f19c540b07d07e08aac591e20081646e6eedc";
const string s =
"3dae01154ecff7b19007a953f185f0663ef7f2537f0b15e04fb343c961f36de2";
Validate(p, q, g, x, y, msg, r, s, HashAlgorithmName.SHA384);
}
[ConditionalFact(nameof(SupportsFips186_3))]
public static void Fips186_3_L3072_N256_SHA512_1()
{
// http://csrc.nist.gov/groups/STM/cavp/documents/dss/186-3dsatestvectors.zip
// SigGen.txt
// [mod = L=3072, N=256, SHA-512], first case
const string p =
"c1d0a6d0b5ed615dee76ac5a60dd35ecb000a202063018b1ba0a06fe7a00f765" +
"db1c59a680cecfe3ad41475badb5ad50b6147e2596b88d34656052aca79486ea" +
"6f6ec90b23e363f3ab8cdc8b93b62a070e02688ea877843a4685c2ba6db111e9" +
"addbd7ca4bce65bb10c9ceb69bf806e2ebd7e54edeb7f996a65c907b50efdf8e" +
"575bae462a219c302fef2ae81d73cee75274625b5fc29c6d60c057ed9e7b0d46" +
"ad2f57fe01f823230f31422722319ce0abf1f141f326c00fbc2be4cdb8944b6f" +
"d050bd300bdb1c5f4da72537e553e01d51239c4d461860f1fb4fd8fa79f5d526" +
"3ff62fed7008e2e0a2d36bf7b9062d0d75db226c3464b67ba24101b085f2c670" +
"c0f87ae530d98ee60c5472f4aa15fb25041e19106354da06bc2b1d322d40ed97" +
"b21fd1cdad3025c69da6ce9c7ddf3dcf1ea4d56577bfdec23071c1f05ee4077b" +
"5391e9a404eaffe12d1ea62d06acd6bf19e91a158d2066b4cd20e4c4e52ffb1d" +
"5204cd022bc7108f2c799fb468866ef1cb09bce09dfd49e4740ff8140497be61";
const string q =
"bf65441c987b7737385eadec158dd01614da6f15386248e59f3cddbefc8e9dd1";
const string g =
"c02ac85375fab80ba2a784b94e4d145b3be0f92090eba17bd12358cf3e03f437" +
"9584f8742252f76b1ede3fc37281420e74a963e4c088796ff2bab8db6e9a4530" +
"fc67d51f88b905ab43995aab46364cb40c1256f0466f3dbce36203ef228b35e9" +
"0247e95e5115e831b126b628ee984f349911d30ffb9d613b50a84dfa1f042ba5" +
"36b82d5101e711c629f9f2096dc834deec63b70f2a2315a6d27323b995aa20d3" +
"d0737075186f5049af6f512a0c38a9da06817f4b619b94520edfac85c4a6e2e1" +
"86225c95a04ec3c3422b8deb284e98d24b31465802008a097c25969e826c2baa" +
"59d2cba33d6c1d9f3962330c1fcda7cfb18508fea7d0555e3a169daed353f3ee" +
"6f4bb30244319161dff6438a37ca793b24bbb1b1bc2194fc6e6ef60278157899" +
"cb03c5dd6fc91a836eb20a25c09945643d95f7bd50d206684d6ffc14d16d82d5" +
"f781225bff908392a5793b803f9b70b4dfcb394f9ed81c18e391a09eb3f93a03" +
"2d81ba670cabfd6f64aa5e3374cb7c2029f45200e4f0bfd820c8bd58dc5eeb34";
const string x =
"150b5c51ea6402276bc912322f0404f6d57ff7d32afcaa83b6dfde11abb48181";
const string y =
"6da54f2b0ddb4dcce2da1edfa16ba84953d8429ce60cd111a5c65edcf7ba5b8d" +
"9387ab6881c24880b2afbdb437e9ed7ffb8e96beca7ea80d1d90f24d54611262" +
"9df5c9e9661742cc872fdb3d409bc77b75b17c7e6cfff86261071c4b5c9f9898" +
"be1e9e27349b933c34fb345685f8fc6c12470d124cecf51b5d5adbf5e7a2490f" +
"8d67aac53a82ed6a2110686cf631c348bcbc4cf156f3a6980163e2feca72a45f" +
"6b3d68c10e5a2283b470b7292674490383f75fa26ccf93c0e1c8d0628ca35f2f" +
"3d9b6876505d118988957237a2fc8051cb47b410e8b7a619e73b1350a9f6a260" +
"c5f16841e7c4db53d8eaa0b4708d62f95b2a72e2f04ca14647bca6b5e3ee707f" +
"cdf758b925eb8d4e6ace4fc7443c9bc5819ff9e555be098aa055066828e21b81" +
"8fedc3aac517a0ee8f9060bd86e0d4cce212ab6a3a243c5ec0274563353ca710" +
"3af085e8f41be524fbb75cda88903907df94bfd69373e288949bd0626d85c139" +
"8b3073a139d5c747d24afdae7a3e745437335d0ee993eef36a3041c912f7eb58";
const string msg =
"494180eed0951371bbaf0a850ef13679df49c1f13fe3770b6c13285bf3ad93dc" +
"4ab018aab9139d74200808e9c55bf88300324cc697efeaa641d37f3acf72d8c9" +
"7bff0182a35b940150c98a03ef41a3e1487440c923a988e53ca3ce883a2fb532" +
"bb7441c122f1dc2f9d0b0bc07f26ba29a35cdf0da846a9d8eab405cbf8c8e77f";
const string r =
"a40a6c905654c55fc58e99c7d1a3feea2c5be64823d4086ce811f334cfdc448d";
const string s =
"6478050977ec585980454e0a2f26a03037b921ca588a78a4daff7e84d49a8a6c";
Validate(p, q, g, x, y, msg, r, s, HashAlgorithmName.SHA512);
}
[ConditionalFact(nameof(SupportsFips186_3))]
public static void Fips186_3_L3072_N256_SHA512_12()
{
// http://csrc.nist.gov/groups/STM/cavp/documents/dss/186-3dsatestvectors.zip
// SigGen.txt
// [mod = L=3072, N=256, SHA-512], twelfth case (y=00...)
const string p =
"c1d0a6d0b5ed615dee76ac5a60dd35ecb000a202063018b1ba0a06fe7a00f765" +
"db1c59a680cecfe3ad41475badb5ad50b6147e2596b88d34656052aca79486ea" +
"6f6ec90b23e363f3ab8cdc8b93b62a070e02688ea877843a4685c2ba6db111e9" +
"addbd7ca4bce65bb10c9ceb69bf806e2ebd7e54edeb7f996a65c907b50efdf8e" +
"575bae462a219c302fef2ae81d73cee75274625b5fc29c6d60c057ed9e7b0d46" +
"ad2f57fe01f823230f31422722319ce0abf1f141f326c00fbc2be4cdb8944b6f" +
"d050bd300bdb1c5f4da72537e553e01d51239c4d461860f1fb4fd8fa79f5d526" +
"3ff62fed7008e2e0a2d36bf7b9062d0d75db226c3464b67ba24101b085f2c670" +
"c0f87ae530d98ee60c5472f4aa15fb25041e19106354da06bc2b1d322d40ed97" +
"b21fd1cdad3025c69da6ce9c7ddf3dcf1ea4d56577bfdec23071c1f05ee4077b" +
"5391e9a404eaffe12d1ea62d06acd6bf19e91a158d2066b4cd20e4c4e52ffb1d" +
"5204cd022bc7108f2c799fb468866ef1cb09bce09dfd49e4740ff8140497be61";
const string q =
"bf65441c987b7737385eadec158dd01614da6f15386248e59f3cddbefc8e9dd1";
const string g =
"c02ac85375fab80ba2a784b94e4d145b3be0f92090eba17bd12358cf3e03f437" +
"9584f8742252f76b1ede3fc37281420e74a963e4c088796ff2bab8db6e9a4530" +
"fc67d51f88b905ab43995aab46364cb40c1256f0466f3dbce36203ef228b35e9" +
"0247e95e5115e831b126b628ee984f349911d30ffb9d613b50a84dfa1f042ba5" +
"36b82d5101e711c629f9f2096dc834deec63b70f2a2315a6d27323b995aa20d3" +
"d0737075186f5049af6f512a0c38a9da06817f4b619b94520edfac85c4a6e2e1" +
"86225c95a04ec3c3422b8deb284e98d24b31465802008a097c25969e826c2baa" +
"59d2cba33d6c1d9f3962330c1fcda7cfb18508fea7d0555e3a169daed353f3ee" +
"6f4bb30244319161dff6438a37ca793b24bbb1b1bc2194fc6e6ef60278157899" +
"cb03c5dd6fc91a836eb20a25c09945643d95f7bd50d206684d6ffc14d16d82d5" +
"f781225bff908392a5793b803f9b70b4dfcb394f9ed81c18e391a09eb3f93a03" +
"2d81ba670cabfd6f64aa5e3374cb7c2029f45200e4f0bfd820c8bd58dc5eeb34";
const string x =
"bd3006cf5d3ac04a8a5128140df6025d9942d78544e9b27efe28b2ca1f79e313";
const string y =
"00728e23e74bb82de0e1315d58164a5cecc8951d89e88da702f5b878020fd8d2" +
"a1791b3e8ab770e084ac2397d297971ca8708a30a4097d86740153ee2db6ab63" +
"43c5b6cc2c8a7fa59082a8d659931cc48a0433a033dbb2fff3aa545686f922c7" +
"063da1d52d9688142ec64a1002948e5da89165d9df8eed9aa469b61ee0210b40" +
"33562333097ba8659944e5f7924e04a21bc3edc6d551e202e4c543e97518f91e" +
"0cab49111029b29c3aa1bed5f35e5c90feb9d3c745953dbf859defce4537b4a0" +
"9801fdc8fe6999fbde39908079811b4b992c2e8333b9f800ea0d9f0a5f53607e" +
"308942e68efef01e03d7cca6f196872bf01f436d4a8e05fc59d8fbc6b88a166f" +
"57a4e99d67ddaece844653be77819747dd2e07d581c518cb9779e9f7960c17ff" +
"0bae710ecf575b09591b013b4805c88b235df262e61a4c94f46bf9a08284611d" +
"f44eadd94f44cef6225a808e211e4d3af5e96bce64a90f8013874f10749a8382" +
"a6026a855d90853440bfce31f258b3a258f7b5e659b43e702dee7c24c02d2284";
const string msg =
"baeb12a1ebd8057a99a0137ee60f60eed10d26f1eab22ae2d9adbc3e5ffc3252" +
"abf62b614707ad2546141bed779f0cfad9544a74e562da549e2f7b286efb6154" +
"49b0946dc7c498d8f12150b2eacbd27157966f592ad5f3e43a24c60b7e06630b" +
"82a4fdb699119dbd878b13a98bf22a7b3dc7efdd992ce6b8a950e61299c5663b";
const string r =
"8d357b0b956fb90e8e0b9ff284cedc88a04d171a90c5997d8ee1e9bc4d0b35ff";
const string s =
"ab37329c50145d146505015704fdc4fb0fd7207e0b11d8becbad934e6255c30c";
Validate(p, q, g, x, y, msg, r, s, HashAlgorithmName.SHA512);
}
private static void Validate(
string p,
string q,
string g,
string x,
string y,
string msg,
string r,
string s,
HashAlgorithmName hashAlgorithm)
{
// Public+Private key
using (DSA dsa = DSAFactory.Create())
{
dsa.ImportParameters(
new DSAParameters
{
P = p.HexToByteArray(),
Q = q.HexToByteArray(),
G = g.HexToByteArray(),
X = x.HexToByteArray(),
Y = y.HexToByteArray(),
});
byte[] message = msg.HexToByteArray();
byte[] signature = (r + s).HexToByteArray();
Assert.True(dsa.VerifyData(message, signature, hashAlgorithm), "Public+Private Valid Signature");
signature[0] ^= 0xFF;
Assert.False(dsa.VerifyData(message, signature, hashAlgorithm), "Public+Private Tampered Signature");
}
// Public only
using (DSA dsa = DSAFactory.Create())
{
dsa.ImportParameters(
new DSAParameters
{
P = p.HexToByteArray(),
Q = q.HexToByteArray(),
G = g.HexToByteArray(),
X = null,
Y = y.HexToByteArray(),
});
byte[] message = msg.HexToByteArray();
byte[] signature = (r + s).HexToByteArray();
Assert.True(dsa.VerifyData(message, signature, hashAlgorithm), "Public-Only Valid Signature");
signature[0] ^= 0xFF;
Assert.False(dsa.VerifyData(message, signature, hashAlgorithm), "Public-Only Tampered Signature");
}
}
}
}
| |
// Copyright 2009 The Noda Time Authors. All rights reserved.
// Use of this source code is governed by the Apache License 2.0,
// as found in the LICENSE.txt file.
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Runtime.Serialization;
using System.Xml;
using System.Xml.Schema;
using System.Xml.Serialization;
using JetBrains.Annotations;
using NodaTime.Annotations;
using NodaTime.Calendars;
using NodaTime.Text;
using NodaTime.TimeZones;
using NodaTime.Utility;
namespace NodaTime
{
// Note: documentation that refers to the LocalDateTime type within this class must use the fully-qualified
// reference to avoid being resolved to the LocalDateTime property instead.
/// <summary>
/// A <see cref="T:NodaTime.LocalDateTime" /> in a specific time zone and with a particular offset to distinguish
/// between otherwise-ambiguous instants. A <see cref="ZonedDateTime"/> is global, in that it maps to a single
/// <see cref="Instant"/>.
/// </summary>
/// <remarks>
/// <para>Although <see cref="ZonedDateTime" /> includes both local and global concepts, it only supports
/// duration-based - and not calendar-based - arithmetic. This avoids ambiguities
/// and skipped date/time values becoming a problem within a series of calculations; instead,
/// these can be considered just once, at the point of conversion to a <see cref="ZonedDateTime"/>.
/// </para>
/// <para>
/// <c>ZonedDateTime</c> does not implement ordered comparison operators, as there is no obvious natural ordering that works in all cases.
/// Equality is supported however, requiring equality of zone, calendar and date/time. If you want to sort <c>ZonedDateTime</c>
/// values, you should explicitly choose one of the orderings provided via the static properties in the
/// <see cref="ZonedDateTime.Comparer"/> nested class (or implement your own comparison).
/// </para>
/// </remarks>
/// <threadsafety>This type is an immutable value type. See the thread safety section of the user guide for more information.</threadsafety>
#if !PCL
[Serializable]
#endif
public struct ZonedDateTime : IEquatable<ZonedDateTime>, IFormattable, IXmlSerializable
#if !PCL
, ISerializable
#endif
{
[ReadWriteForEfficiency] private OffsetDateTime offsetDateTime;
private readonly DateTimeZone zone;
/// <summary>
/// Internal constructor from pre-validated values.
/// </summary>
internal ZonedDateTime(OffsetDateTime offsetDateTime, DateTimeZone zone)
{
this.offsetDateTime = offsetDateTime;
this.zone = zone;
}
/// <summary>
/// Initializes a new instance of the <see cref="ZonedDateTime"/> struct.
/// </summary>
/// <param name="instant">The instant.</param>
/// <param name="zone">The time zone.</param>
/// <param name="calendar">The calendar system.</param>
public ZonedDateTime(Instant instant, [NotNull] DateTimeZone zone, [NotNull] CalendarSystem calendar)
{
this.zone = Preconditions.CheckNotNull(zone, nameof(zone));
offsetDateTime = new OffsetDateTime(instant, zone.GetUtcOffset(instant), Preconditions.CheckNotNull(calendar, nameof(calendar)));
}
/// <summary>
/// Initializes a new instance of the <see cref="ZonedDateTime" /> struct in the specified time zone
/// and the ISO calendar.
/// </summary>
/// <param name="instant">The instant.</param>
/// <param name="zone">The time zone.</param>
public ZonedDateTime(Instant instant, [NotNull] DateTimeZone zone)
{
this.zone = Preconditions.CheckNotNull(zone, nameof(zone));
offsetDateTime = new OffsetDateTime(instant, zone.GetUtcOffset(instant));
}
/// <summary>
/// Initializes a new instance of the <see cref="ZonedDateTime"/> struct in the specified time zone
/// from a given local time and offset. The offset is validated to be correct as part of initialization.
/// In most cases a local time can only map to a single instant anyway, but the offset is included here for cases
/// where the local time is ambiguous, usually due to daylight saving transitions.
/// </summary>
/// <param name="localDateTime">The local date and time.</param>
/// <param name="zone">The time zone.</param>
/// <param name="offset">The offset between UTC and local time at the desired instant.</param>
/// <exception cref="ArgumentException"><paramref name="offset"/> is not a valid offset at the given
/// local date and time.</exception>
public ZonedDateTime(LocalDateTime localDateTime, [NotNull] DateTimeZone zone, Offset offset)
{
this.zone = Preconditions.CheckNotNull(zone, nameof(zone));
Instant candidateInstant = localDateTime.ToLocalInstant().Minus(offset);
Offset correctOffset = zone.GetUtcOffset(candidateInstant);
// Not using Preconditions, to avoid building the string unnecessarily.
if (correctOffset != offset)
{
throw new ArgumentException("Offset " + offset + " is invalid for local date and time " + localDateTime
+ " in time zone " + zone.Id, "offset");
}
offsetDateTime = new OffsetDateTime(localDateTime, offset);
}
/// <summary>Gets the offset of the local representation of this value from UTC.</summary>
/// <value>The offset of the local representation of this value from UTC.</value>
public Offset Offset => offsetDateTime.Offset;
/// <summary>Gets the time zone associated with this value.</summary>
/// <value>The time zone associated with this value.</value>
public DateTimeZone Zone => zone ?? DateTimeZone.Utc;
/// <summary>
/// Gets the local date and time represented by this zoned date and time.
/// </summary>
/// <remarks>
/// The returned
/// <see cref="T:NodaTime.LocalDateTime"/> will have the same calendar system and return the same values for
/// each of the calendar properties (Year, MonthOfYear and so on), but will not be associated with any
/// particular time zone.
/// </remarks>
/// <value>The local date and time represented by this zoned date and time.</value>
public LocalDateTime LocalDateTime => offsetDateTime.LocalDateTime;
/// <summary>Gets the calendar system associated with this zoned date and time.</summary>
/// <value>The calendar system associated with this zoned date and time.</value>
public CalendarSystem Calendar => offsetDateTime.Calendar;
/// <summary>
/// Gets the local date represented by this zoned date and time.
/// </summary>
/// <remarks>
/// The returned <see cref="LocalDate"/>
/// will have the same calendar system and return the same values for each of the date-based calendar
/// properties (Year, MonthOfYear and so on), but will not be associated with any particular time zone.
/// </remarks>
/// <value>The local date represented by this zoned date and time.</value>
public LocalDate Date => offsetDateTime.Date;
/// <summary>
/// Gets the time portion of this zoned date and time.
/// </summary>
/// <remarks>
/// The returned <see cref="LocalTime"/> will
/// return the same values for each of the time-based properties (Hour, Minute and so on), but
/// will not be associated with any particular time zone.
/// </remarks>
/// <value>The time portion of this zoned date and time.</value>
public LocalTime TimeOfDay => offsetDateTime.TimeOfDay;
/// <summary>Gets the era for this zoned date and time.</summary>
/// <value>The era for this zoned date and time.</value>
public Era Era => offsetDateTime.Era;
/// <summary>Gets the year of this zoned date and time.</summary>
/// <remarks>This returns the "absolute year", so, for the ISO calendar,
/// a value of 0 means 1 BC, for example.</remarks>
/// <value>The year of this zoned date and time.</value>
public int Year => offsetDateTime.Year;
/// <summary>Gets the year of this zoned date and time within its era.</summary>
/// <value>The year of this zoned date and time within its era.</value>
public int YearOfEra => offsetDateTime.YearOfEra;
/// <summary>
/// Gets the "week year" of this date and time.
/// </summary>
/// <remarks>
/// <para>
/// The WeekYear is the year that matches with the <see cref="WeekOfWeekYear"/> field.
/// In the standard ISO-8601 week algorithm, the first week of the year
/// is that in which at least 4 days are in the year. As a result of this
/// definition, day 1 of the first week may be in the previous year.
/// The WeekYear allows you to query the effective year for that day.
/// </para>
/// <para>
/// For example, January 1st 2011 was a Saturday, so only two days of that week
/// (Saturday and Sunday) were in 2011. Therefore January 1st is part of
/// week 52 of WeekYear 2010. Conversely, December 31st 2012 is a Monday,
/// so is part of week 1 of WeekYear 2013.
/// </para>
/// </remarks>
/// <value>The "week year" of this date and time.</value>
public int WeekYear => offsetDateTime.WeekYear;
/// <summary>Gets the month of this zoned date and time within the year.</summary>
/// <value>The month of this zoned date and time within the year.</value>
public int Month => offsetDateTime.Month;
/// <summary>Gets the week within the week-year. See <see cref="WeekYear"/> for more details.</summary>
/// <value>The week within the week-year.</value>
public int WeekOfWeekYear => offsetDateTime.WeekOfWeekYear;
/// <summary>Gets the day of this zoned date and time within the year.</summary>
/// <value>The day of this zoned date and time within the year.</value>
public int DayOfYear => offsetDateTime.DayOfYear;
/// <summary>
/// Gets the day of this zoned date and time within the month.
/// </summary>
/// <value>The day of this zoned date and time within the month.</value>
public int Day => offsetDateTime.Day;
/// <summary>
/// Gets the week day of this zoned date and time expressed as an <see cref="NodaTime.IsoDayOfWeek"/> value,
/// for calendars which use ISO days of the week.
/// </summary>
/// <exception cref="InvalidOperationException">The underlying calendar doesn't use ISO days of the week.</exception>
/// <seealso cref="DayOfWeek"/>
/// <value>The week day of this zoned date and time expressed as an <c>IsoDayOfWeek</c> value.</value>
public IsoDayOfWeek IsoDayOfWeek => offsetDateTime.IsoDayOfWeek;
/// <summary>
/// Gets the week day of this zoned date and time as a number.
/// </summary>
/// <remarks>
/// For calendars using ISO week days, this gives 1 for Monday to 7 for Sunday.
/// </remarks>
/// <value>The week day of this zoned date and time as a number.</value>
/// <seealso cref="IsoDayOfWeek"/>
public int DayOfWeek => offsetDateTime.DayOfWeek;
/// <summary>
/// Gets the hour of day of this zoned date and time, in the range 0 to 23 inclusive.
/// </summary>
/// <value>The hour of day of this zoned date and time, in the range 0 to 23 inclusive.</value>
public int Hour => offsetDateTime.Hour;
/// <summary>
/// Gets the hour of the half-day of this zoned date and time, in the range 1 to 12 inclusive.
/// </summary>
/// <value>The hour of the half-day of this zoned date and time, in the range 1 to 12 inclusive.</value>
public int ClockHourOfHalfDay => offsetDateTime.ClockHourOfHalfDay;
/// <summary>
/// Gets the minute of this zoned date and time, in the range 0 to 59 inclusive.
/// </summary>
/// <value>The minute of this zoned date and time, in the range 0 to 59 inclusive.</value>
public int Minute => offsetDateTime.Minute;
/// <summary>
/// Gets the second of this zoned date and time within the minute, in the range 0 to 59 inclusive.
/// </summary>
/// <value>The second of this zoned date and time within the minute, in the range 0 to 59 inclusive.</value>
public int Second => offsetDateTime.Second;
/// <summary>
/// Gets the millisecond of this zoned date and time within the second, in the range 0 to 999 inclusive.
/// </summary>
/// <value>The millisecond of this zoned date and time within the second, in the range 0 to 999 inclusive.</value>
public int Millisecond => offsetDateTime.Millisecond;
/// <summary>
/// Gets the tick of this zoned date and time within the second, in the range 0 to 9,999,999 inclusive.
/// </summary>
/// <value>The tick of this zoned date and time within the second, in the range 0 to 9,999,999 inclusive.</value>
public int TickOfSecond => offsetDateTime.TickOfSecond;
/// <summary>
/// Gets the tick of this zoned date and time within the day, in the range 0 to 863,999,999,999 inclusive.
/// </summary>
/// <value>The tick of this zoned date and time within the day, in the range 0 to 863,999,999,999 inclusive.</value>
public long TickOfDay => offsetDateTime.TickOfDay;
/// <summary>
/// Gets the nanosecond of this zoned date and time within the second, in the range 0 to 999,999,999 inclusive.
/// </summary>
/// <value>The nanosecond of this zoned date and time within the second, in the range 0 to 999,999,999 inclusive.</value>
public int NanosecondOfSecond => offsetDateTime.NanosecondOfSecond;
/// <summary>
/// Gets the nanosecond of this zoned date and time within the day, in the range 0 to 86,399,999,999,999 inclusive.
/// </summary>
/// <value>The nanosecond of this zoned date and time within the day, in the range 0 to 86,399,999,999,999 inclusive.</value>
public long NanosecondOfDay => offsetDateTime.NanosecondOfDay;
/// <summary>
/// Converts this value to the instant it represents on the time line.
/// </summary>
/// <remarks>
/// This is always an unambiguous conversion. Any difficulties due to daylight saving
/// transitions or other changes in time zone are handled when converting from a
/// <see cref="T:NodaTime.LocalDateTime" /> to a <see cref="ZonedDateTime"/>; the <c>ZonedDateTime</c> remembers
/// the actual offset from UTC to local time, so it always knows the exact instant represented.
/// </remarks>
/// <returns>The instant corresponding to this value.</returns>
[Pure]
public Instant ToInstant() => offsetDateTime.ToInstant();
/// <summary>
/// Creates a new <see cref="ZonedDateTime"/> representing the same instant in time, in the
/// same calendar but a different time zone.
/// </summary>
/// <param name="targetZone">The target time zone to convert to.</param>
/// <returns>A new value in the target time zone.</returns>
[Pure]
public ZonedDateTime WithZone([NotNull] DateTimeZone targetZone)
{
Preconditions.CheckNotNull(targetZone, nameof(targetZone));
return new ZonedDateTime(ToInstant(), targetZone, Calendar);
}
#region Equality
/// <summary>
/// Indicates whether the current object is equal to another object of the same type.
/// </summary>
/// <returns>
/// true if the current object is equal to the <paramref name="other"/> parameter; otherwise, false.
/// </returns>
/// <param name="other">An object to compare with this object.</param>
/// <returns>True if the specified value is the same instant in the same time zone; false otherwise.</returns>
public bool Equals(ZonedDateTime other) => offsetDateTime == other.offsetDateTime && Zone.Equals(other.Zone);
/// <summary>
/// Indicates whether this instance and a specified object are equal.
/// </summary>
/// <returns>
/// true if <paramref name="obj"/> and this instance are the same type and represent the same value; otherwise, false.
/// </returns>
/// <param name="obj">Another object to compare to.</param>
/// <filterpriority>2</filterpriority>
/// <returns>True if the specified value is a <see cref="ZonedDateTime"/> representing the same instant in the same time zone; false otherwise.</returns>
public override bool Equals(object obj) => obj is ZonedDateTime && Equals((ZonedDateTime)obj);
/// <summary>
/// Computes the hash code for this instance.
/// </summary>
/// <returns>
/// A 32-bit signed integer that is the hash code for this instance.
/// </returns>
/// <filterpriority>2</filterpriority>
public override int GetHashCode() => HashCodeHelper.Hash(offsetDateTime, Zone);
#endregion
#region Operators
/// <summary>
/// Implements the operator ==.
/// </summary>
/// <param name="left">The first value to compare</param>
/// <param name="right">The second value to compare</param>
/// <returns>True if the two operands are equal according to <see cref="Equals(ZonedDateTime)"/>; false otherwise</returns>
public static bool operator ==(ZonedDateTime left, ZonedDateTime right) => left.Equals(right);
/// <summary>
/// Implements the operator !=.
/// </summary>
/// <param name="left">The first value to compare</param>
/// <param name="right">The second value to compare</param>
/// <returns>False if the two operands are equal according to <see cref="Equals(ZonedDateTime)"/>; true otherwise</returns>
public static bool operator !=(ZonedDateTime left, ZonedDateTime right) => !(left == right);
/// <summary>
/// Adds a duration to a zoned date and time.
/// </summary>
/// <remarks>
/// This is an alternative way of calling <see cref="op_Addition(ZonedDateTime, Duration)"/>.
/// </remarks>
/// <param name="zonedDateTime">The value to add the duration to.</param>
/// <param name="duration">The duration to add</param>
/// <returns>A new value with the time advanced by the given duration, in the same calendar system and time zone.</returns>
public static ZonedDateTime Add(ZonedDateTime zonedDateTime, Duration duration) => zonedDateTime + duration;
/// <summary>
/// Returns the result of adding a duration to this zoned date and time.
/// </summary>
/// <remarks>
/// This is an alternative way of calling <see cref="op_Addition(ZonedDateTime, Duration)"/>.
/// </remarks>
/// <param name="duration">The duration to add</param>
/// <returns>A new <see cref="ZonedDateTime" /> representing the result of the addition.</returns>
[Pure]
public ZonedDateTime Plus(Duration duration) => this + duration;
/// <summary>
/// Returns a new <see cref="ZonedDateTime"/> with the time advanced by the given duration. Note that
/// due to daylight saving time changes this may not advance the local time by the same amount.
/// </summary>
/// <remarks>
/// The returned value retains the calendar system and time zone of <paramref name="zonedDateTime"/>.
/// </remarks>
/// <param name="zonedDateTime">The <see cref="ZonedDateTime"/> to add the duration to.</param>
/// <param name="duration">The duration to add.</param>
/// <returns>A new value with the time advanced by the given duration, in the same calendar system and time zone.</returns>
public static ZonedDateTime operator +(ZonedDateTime zonedDateTime, Duration duration) =>
new ZonedDateTime(zonedDateTime.ToInstant() + duration, zonedDateTime.Zone, zonedDateTime.Calendar);
/// <summary>
/// Subtracts a duration from a zoned date and time.
/// </summary>
/// <remarks>
/// This is an alternative way of calling <see cref="op_Subtraction(ZonedDateTime, Duration)"/>.
/// </remarks>
/// <param name="zonedDateTime">The value to subtract the duration from.</param>
/// <param name="duration">The duration to subtract.</param>
/// <returns>A new value with the time "rewound" by the given duration, in the same calendar system and time zone.</returns>
public static ZonedDateTime Subtract(ZonedDateTime zonedDateTime, Duration duration) => zonedDateTime - duration;
/// <summary>
/// Returns the result of subtracting a duration from this zoned date and time, for a fluent alternative to
/// <see cref="op_Subtraction(ZonedDateTime, Duration)"/>
/// </summary>
/// <param name="duration">The duration to subtract</param>
/// <returns>A new <see cref="ZonedDateTime" /> representing the result of the subtraction.</returns>
[Pure]
public ZonedDateTime Minus(Duration duration) => this - duration;
/// <summary>
/// Returns a new <see cref="ZonedDateTime"/> with the duration subtracted. Note that
/// due to daylight saving time changes this may not change the local time by the same amount.
/// </summary>
/// <remarks>
/// The returned value retains the calendar system and time zone of <paramref name="zonedDateTime"/>.
/// </remarks>
/// <param name="zonedDateTime">The value to subtract the duration from.</param>
/// <param name="duration">The duration to subtract.</param>
/// <returns>A new value with the time "rewound" by the given duration, in the same calendar system and time zone.</returns>
public static ZonedDateTime operator -(ZonedDateTime zonedDateTime, Duration duration) =>
new ZonedDateTime(zonedDateTime.ToInstant() - duration, zonedDateTime.Zone, zonedDateTime.Calendar);
/// <summary>
/// Subtracts one zoned date and time from another, returning an elapsed duration.
/// </summary>
/// <remarks>
/// This is an alternative way of calling <see cref="op_Subtraction(ZonedDateTime, ZonedDateTime)"/>.
/// </remarks>
/// <param name="end">The zoned date and time value to subtract from; if this is later than <paramref name="start"/>
/// then the result will be positive.</param>
/// <param name="start">The zoned date and time to subtract from <paramref name="end"/>.</param>
/// <returns>The elapsed duration from <paramref name="start"/> to <paramref name="end"/>.</returns>
public static Duration Subtract(ZonedDateTime end, ZonedDateTime start) => end - start;
/// <summary>
/// Returns the result of subtracting another zoned date and time from this one, resulting in the elapsed duration
/// between the two instants represented in the values.
/// </summary>
/// <remarks>
/// This is an alternative way of calling <see cref="op_Subtraction(ZonedDateTime, ZonedDateTime)"/>.
/// </remarks>
/// <param name="other">The zoned date and time to subtract from this one.</param>
/// <returns>The elapsed duration from <paramref name="other"/> to this value.</returns>
[Pure]
public Duration Minus(ZonedDateTime other) => this - other;
/// <summary>
/// Subtracts one <see cref="ZonedDateTime"/> from another, resulting in the elapsed time between
/// the two values.
/// </summary>
/// <remarks>
/// This is equivalent to <c>end.ToInstant() - start.ToInstant()</c>; in particular:
/// <list type="bullet">
/// <item><description>The two values can use different calendar systems</description></item>
/// <item><description>The two values can be in different time zones</description></item>
/// <item><description>The two values can have different UTC offsets</description></item>
/// </list>
/// </remarks>
/// <param name="end">The zoned date and time value to subtract from; if this is later than <paramref name="start"/>
/// then the result will be positive.</param>
/// <param name="start">The zoned date and time to subtract from <paramref name="end"/>.</param>
/// <returns>The elapsed duration from <paramref name="start"/> to <paramref name="end"/>.</returns>
public static Duration operator -(ZonedDateTime end, ZonedDateTime start) => end.ToInstant() - start.ToInstant();
#endregion
/// <summary>
/// Returns the <see cref="ZoneInterval"/> containing this value, in the time zone this
/// value refers to.
/// </summary>
/// <remarks>
/// This is simply a convenience method - it is logically equivalent to converting this
/// value to an <see cref="Instant"/> and then asking the appropriate <see cref="DateTimeZone"/>
/// for the <c>ZoneInterval</c> containing that instant.
/// </remarks>
/// <returns>The <c>ZoneInterval</c> containing this value.</returns>
[Pure]
public ZoneInterval GetZoneInterval() => Zone.GetZoneInterval(ToInstant());
/// <summary>
/// Indicates whether or not this <see cref="ZonedDateTime"/> is in daylight saving time
/// for its time zone. This is determined by checking the <see cref="ZoneInterval.Savings"/> property
/// of the zone interval containing this value.
/// </summary>
/// <seealso cref="GetZoneInterval()"/>
/// <returns><c>true</c> if the zone interval containing this value has a non-zero savings
/// component; <c>false</c> otherwise.</returns>
[Pure]
public bool IsDaylightSavingTime() => GetZoneInterval().Savings != Offset.Zero;
#region Formatting
/// <summary>
/// Returns a <see cref="System.String" /> that represents this instance.
/// </summary>
/// <returns>
/// The value of the current instance in the default format pattern ("G"), using the current thread's
/// culture to obtain a format provider.
/// </returns>
public override string ToString() =>
ZonedDateTimePattern.Patterns.BclSupport.Format(this, null, CultureInfo.CurrentCulture);
/// <summary>
/// Formats the value of the current instance using the specified pattern.
/// </summary>
/// <returns>
/// A <see cref="T:System.String" /> containing the value of the current instance in the specified format.
/// </returns>
/// <param name="patternText">The <see cref="T:System.String" /> specifying the pattern to use,
/// or null to use the default format pattern ("G").
/// </param>
/// <param name="formatProvider">The <see cref="T:System.IFormatProvider" /> to use when formatting the value,
/// or null to use the current thread's culture to obtain a format provider.
/// </param>
/// <filterpriority>2</filterpriority>
public string ToString(string patternText, IFormatProvider formatProvider) =>
ZonedDateTimePattern.Patterns.BclSupport.Format(this, patternText, formatProvider);
#endregion Formatting
/// <summary>
/// Constructs a <see cref="DateTimeOffset"/> value with the same local time and offset from
/// UTC as this value.
/// </summary>
/// <remarks>
/// An offset does not convey as much information as a time zone; a <see cref="DateTimeOffset"/>
/// represents an instant in time along with an associated local time, but it doesn't allow you
/// to find out what the local time would be for another instant.
/// </remarks>
/// <returns>A <see cref="DateTimeOffset"/> representation of this value.</returns>
[Pure]
public DateTimeOffset ToDateTimeOffset() => offsetDateTime.ToDateTimeOffset();
/// <summary>
/// Returns a new <see cref="ZonedDateTime"/> representing the same instant in time as the given
/// <see cref="DateTimeOffset"/>.
/// The time zone used will be a fixed time zone, which uses the same offset throughout time.
/// </summary>
/// <param name="dateTimeOffset">Date and time value with an offset.</param>
/// <returns>A <see cref="ZonedDateTime"/> value representing the same instant in time as the given <see cref="DateTimeOffset"/>.</returns>
public static ZonedDateTime FromDateTimeOffset(DateTimeOffset dateTimeOffset) =>
new ZonedDateTime(Instant.FromDateTimeOffset(dateTimeOffset),
new FixedDateTimeZone(Offset.FromTimeSpan(dateTimeOffset.Offset)));
/// <summary>
/// Constructs a <see cref="DateTime"/> from this <see cref="ZonedDateTime"/> which has a
/// <see cref="DateTime.Kind"/> of <see cref="DateTimeKind.Utc"/> and represents the same instant of time as
/// this value rather than the same local time.
/// </summary>
/// <returns>A <see cref="DateTime"/> representation of this value with a "universal" kind, with the same
/// instant of time as this value.</returns>
[Pure]
public DateTime ToDateTimeUtc() => ToInstant().ToDateTimeUtc();
/// <summary>
/// Constructs a <see cref="DateTime"/> from this <see cref="ZonedDateTime"/> which has a
/// <see cref="DateTime.Kind"/> of <see cref="DateTimeKind.Unspecified"/> and represents the same local time as
/// this value rather than the same instant in time.
/// </summary>
/// <remarks>
/// <see cref="DateTimeKind.Unspecified"/> is slightly odd - it can be treated as UTC if you use <see cref="DateTime.ToLocalTime"/>
/// or as system local time if you use <see cref="DateTime.ToUniversalTime"/>, but it's the only kind which allows
/// you to construct a <see cref="DateTimeOffset"/> with an arbitrary offset.
/// </remarks>
/// <returns>A <see cref="DateTime"/> representation of this value with an "unspecified" kind, with the same
/// local date and time as this value.</returns>
[Pure]
public DateTime ToDateTimeUnspecified() => LocalDateTime.ToDateTimeUnspecified();
/// <summary>
/// Constructs an <see cref="OffsetDateTime"/> with the same local date and time, and the same offset
/// as this zoned date and time, effectively just "removing" the time zone itself.
/// </summary>
/// <returns>An OffsetDateTime with the same local date/time and offset as this value.</returns>
[Pure]
public OffsetDateTime ToOffsetDateTime() => offsetDateTime;
#region Comparers
/// <summary>
/// Base class for <see cref="ZonedDateTime"/> comparers.
/// </summary>
/// <remarks>
/// Use the static properties of this class to obtain instances. This type is exposed so that the
/// same value can be used for both equality and ordering comparisons.
/// </remarks>
[Immutable]
public abstract class Comparer : IComparer<ZonedDateTime>, IEqualityComparer<ZonedDateTime>
{
// TODO(2.0): A comparer which compares instants, but in a calendar-sensitive manner?
/// <summary>
/// Gets a comparer which compares <see cref="ZonedDateTime"/> values by their local date/time, without reference to
/// the time zone or offset. Comparisons between two values of different calendar systems will fail with <see cref="ArgumentException"/>.
/// </summary>
/// <remarks>
/// <para>For example, this comparer considers 2013-03-04T20:21:00 (Europe/London) to be later than
/// 2013-03-04T19:21:00 (America/Los_Angeles) even though the second value represents a later instant in time.</para>
/// <para>This property will return a reference to the same instance every time it is called.</para>
/// </remarks>
/// <value>A comparer which compares values by their local date/time.</value>
public static Comparer Local => LocalComparer.Instance;
/// <summary>
/// Gets a comparer which compares <see cref="ZonedDateTime"/> values by the instants obtained by applying the offset to
/// the local date/time, ignoring the calendar system.
/// </summary>
/// <remarks>
/// <para>For example, this comparer considers 2013-03-04T20:21:00 (Europe/London) to be earlier than
/// 2013-03-04T19:21:00 (America/Los_Angeles) even though the second value has a local time which is earlier; the time zones
/// mean that the first value occurred earlier in the universal time line.</para>
/// <para>This property will return a reference to the same instance every time it is called.</para>
/// </remarks>
/// <value>A comparer which compares values by the instants obtained by applying the offset to
/// the local date/time, ignoring the calendar system.</value>
public static Comparer Instant => InstantComparer.Instance;
/// <summary>
/// Internal constructor to prevent external classes from deriving from this.
/// (That means we can add more abstract members in the future.)
/// </summary>
internal Comparer()
{
}
/// <summary>
/// Compares two <see cref="ZonedDateTime"/> values and returns a value indicating whether one is less than, equal to, or greater than the other.
/// </summary>
/// <param name="x">The first value to compare.</param>
/// <param name="y">The second value to compare.</param>
/// <returns>A signed integer that indicates the relative values of <paramref name="x"/> and <paramref name="y"/>, as shown in the following table.
/// <list type = "table">
/// <listheader>
/// <term>Value</term>
/// <description>Meaning</description>
/// </listheader>
/// <item>
/// <term>Less than zero</term>
/// <description><paramref name="x"/> is less than <paramref name="y"/>.</description>
/// </item>
/// <item>
/// <term>Zero</term>
/// <description><paramref name="x"/> is equals to <paramref name="y"/>.</description>
/// </item>
/// <item>
/// <term>Greater than zero</term>
/// <description><paramref name="x"/> is greater than <paramref name="y"/>.</description>
/// </item>
/// </list>
/// </returns>
public abstract int Compare(ZonedDateTime x, ZonedDateTime y);
/// <summary>
/// Determines whether the specified <c>ZonedDateTime</c> values are equal.
/// </summary>
/// <param name="x">The first <c>ZonedDateTime</c> to compare.</param>
/// <param name="y">The second <c>ZonedDateTime</c> to compare.</param>
/// <returns><c>true</c> if the specified objects are equal; otherwise, <c>false</c>.</returns>
public abstract bool Equals(ZonedDateTime x, ZonedDateTime y);
/// <summary>
/// Returns a hash code for the specified <c>ZonedDateTime</c>.
/// </summary>
/// <param name="obj">The <c>ZonedDateTime</c> for which a hash code is to be returned.</param>
/// <returns>A hash code for the specified value.</returns>
public abstract int GetHashCode(ZonedDateTime obj);
}
/// <summary>
/// Implementation for <see cref="Comparer.Local"/>.
/// </summary>
private sealed class LocalComparer : Comparer
{
internal static readonly Comparer Instance = new LocalComparer();
private LocalComparer()
{
}
/// <inheritdoc />
public override int Compare(ZonedDateTime x, ZonedDateTime y) =>
OffsetDateTime.Comparer.Local.Compare(x.offsetDateTime, y.offsetDateTime);
/// <inheritdoc />
public override bool Equals(ZonedDateTime x, ZonedDateTime y) =>
OffsetDateTime.Comparer.Local.Equals(x.offsetDateTime, y.offsetDateTime);
/// <inheritdoc />
public override int GetHashCode(ZonedDateTime obj) =>
OffsetDateTime.Comparer.Local.GetHashCode(obj.offsetDateTime);
}
/// <summary>
/// Implementation for <see cref="Comparer.Instant"/>.
/// </summary>
private sealed class InstantComparer : Comparer
{
internal static readonly Comparer Instance = new InstantComparer();
private InstantComparer()
{
}
/// <inheritdoc />
public override int Compare(ZonedDateTime x, ZonedDateTime y) =>
OffsetDateTime.Comparer.Instant.Compare(x.offsetDateTime, y.offsetDateTime);
/// <inheritdoc />
public override bool Equals(ZonedDateTime x, ZonedDateTime y) =>
OffsetDateTime.Comparer.Instant.Equals(x.offsetDateTime, y.offsetDateTime);
/// <inheritdoc />
public override int GetHashCode(ZonedDateTime obj) =>
OffsetDateTime.Comparer.Instant.GetHashCode(obj.offsetDateTime);
}
#endregion
#region XML serialization
/// <inheritdoc />
XmlSchema IXmlSerializable.GetSchema() => null;
/// <inheritdoc />
void IXmlSerializable.ReadXml([NotNull] XmlReader reader)
{
Preconditions.CheckNotNull(reader, nameof(reader));
var pattern = OffsetDateTimePattern.ExtendedIsoPattern;
if (!reader.MoveToAttribute("zone"))
{
throw new ArgumentException("No zone specified in XML for ZonedDateTime");
}
DateTimeZone newZone = DateTimeZoneProviders.Serialization[reader.Value];
if (reader.MoveToAttribute("calendar"))
{
string newCalendarId = reader.Value;
CalendarSystem newCalendar = CalendarSystem.ForId(newCalendarId);
var newTemplateValue = pattern.TemplateValue.WithCalendar(newCalendar);
pattern = pattern.WithTemplateValue(newTemplateValue);
}
reader.MoveToElement();
string text = reader.ReadElementContentAsString();
OffsetDateTime offsetDateTime = pattern.Parse(text).Value;
if (newZone.GetUtcOffset(offsetDateTime.ToInstant()) != offsetDateTime.Offset)
{
// Might as well use the exception we've already got...
ParseResult<ZonedDateTime>.InvalidOffset(text).GetValueOrThrow();
}
// Use the constructor which doesn't validate the offset, as we've already done that.
this = new ZonedDateTime(offsetDateTime, newZone);
}
/// <inheritdoc />
void IXmlSerializable.WriteXml([NotNull] XmlWriter writer)
{
Preconditions.CheckNotNull(writer, nameof(writer));
writer.WriteAttributeString("zone", Zone.Id);
if (Calendar != CalendarSystem.Iso)
{
writer.WriteAttributeString("calendar", Calendar.Id);
}
writer.WriteString(OffsetDateTimePattern.ExtendedIsoPattern.Format(ToOffsetDateTime()));
}
#endregion
#if !PCL
#region Binary serialization
private const string DaysSerializationName = "days";
private const string NanosecondOfDaySerializationName = "nanosecondOfDay";
private const string CalendarIdSerializationName = "calendar";
private const string OffsetMillisecondsSerializationName = "offsetMilliseconds";
private const string ZoneIdSerializationName = "zone";
/// <summary>
/// Private constructor only present for serialization.
/// </summary>
/// <param name="info">The <see cref="SerializationInfo"/> to fetch data from.</param>
/// <param name="context">The source for this deserialization.</param>
private ZonedDateTime(SerializationInfo info, StreamingContext context)
// Note: this uses the constructor which explicitly validates that the offset is reasonable.
: this(new LocalDateTime(new LocalDate(info.GetInt32(DaysSerializationName),
CalendarSystem.ForId(info.GetString(CalendarIdSerializationName))),
LocalTime.FromNanosecondsSinceMidnight(info.GetInt64(NanosecondOfDaySerializationName))),
DateTimeZoneProviders.Serialization[info.GetString(ZoneIdSerializationName)],
Offset.FromMilliseconds(info.GetInt32(OffsetMillisecondsSerializationName)))
{
}
/// <summary>
/// Implementation of <see cref="ISerializable.GetObjectData"/>.
/// </summary>
/// <param name="info">The <see cref="SerializationInfo"/> to populate with data.</param>
/// <param name="context">The destination for this serialization.</param>
[System.Security.SecurityCritical]
void ISerializable.GetObjectData(SerializationInfo info, StreamingContext context)
{
// FIXME(2.0): Revisit serialization
info.AddValue(DaysSerializationName, Date.DaysSinceEpoch);
info.AddValue(NanosecondOfDaySerializationName, TimeOfDay.NanosecondOfDay);
info.AddValue(CalendarIdSerializationName, Calendar.Id);
info.AddValue(OffsetMillisecondsSerializationName, Offset.Milliseconds);
info.AddValue(ZoneIdSerializationName, Zone.Id);
}
#endregion
#endif
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
using System.Runtime.InteropServices;
using System.Threading.Tasks;
using System.Threading.Tasks.Sources;
#if !netstandard
using Internal.Runtime.CompilerServices;
#endif
namespace System.Runtime.CompilerServices
{
/// <summary>Provides an awaitable type that enables configured awaits on a <see cref="ValueTask"/>.</summary>
[StructLayout(LayoutKind.Auto)]
public readonly struct ConfiguredValueTaskAwaitable
{
/// <summary>The wrapped <see cref="Task"/>.</summary>
private readonly ValueTask _value;
/// <summary>Initializes the awaitable.</summary>
/// <param name="value">The wrapped <see cref="ValueTask"/>.</param>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal ConfiguredValueTaskAwaitable(ValueTask value) => _value = value;
/// <summary>Returns an awaiter for this <see cref="ConfiguredValueTaskAwaitable"/> instance.</summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public ConfiguredValueTaskAwaiter GetAwaiter() => new ConfiguredValueTaskAwaiter(_value);
/// <summary>Provides an awaiter for a <see cref="ConfiguredValueTaskAwaitable"/>.</summary>
[StructLayout(LayoutKind.Auto)]
public readonly struct ConfiguredValueTaskAwaiter : ICriticalNotifyCompletion
#if CORECLR
, IValueTaskAwaiter
#endif
{
/// <summary>The value being awaited.</summary>
private readonly ValueTask _value;
/// <summary>Initializes the awaiter.</summary>
/// <param name="value">The value to be awaited.</param>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal ConfiguredValueTaskAwaiter(ValueTask value) => _value = value;
/// <summary>Gets whether the <see cref="ConfiguredValueTaskAwaitable"/> has completed.</summary>
public bool IsCompleted
{
[MethodImpl(MethodImplOptions.AggressiveInlining)]
get => _value.IsCompleted;
}
/// <summary>Gets the result of the ValueTask.</summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
[StackTraceHidden]
public void GetResult() => _value.ThrowIfCompletedUnsuccessfully();
/// <summary>Schedules the continuation action for the <see cref="ConfiguredValueTaskAwaitable"/>.</summary>
public void OnCompleted(Action continuation)
{
if (_value.ObjectIsTask)
{
_value.UnsafeGetTask().ConfigureAwait(_value.ContinueOnCapturedContext).GetAwaiter().OnCompleted(continuation);
}
else if (_value._obj != null)
{
_value.UnsafeGetValueTaskSource().OnCompleted(ValueTaskAwaiter.s_invokeActionDelegate, continuation, _value._token,
ValueTaskSourceOnCompletedFlags.FlowExecutionContext |
(_value.ContinueOnCapturedContext ? ValueTaskSourceOnCompletedFlags.UseSchedulingContext : ValueTaskSourceOnCompletedFlags.None));
}
else
{
ValueTask.CompletedTask.ConfigureAwait(_value.ContinueOnCapturedContext).GetAwaiter().OnCompleted(continuation);
}
}
/// <summary>Schedules the continuation action for the <see cref="ConfiguredValueTaskAwaitable"/>.</summary>
public void UnsafeOnCompleted(Action continuation)
{
if (_value.ObjectIsTask)
{
_value.UnsafeGetTask().ConfigureAwait(_value.ContinueOnCapturedContext).GetAwaiter().UnsafeOnCompleted(continuation);
}
else if (_value._obj != null)
{
_value.UnsafeGetValueTaskSource().OnCompleted(ValueTaskAwaiter.s_invokeActionDelegate, continuation, _value._token,
_value.ContinueOnCapturedContext ? ValueTaskSourceOnCompletedFlags.UseSchedulingContext : ValueTaskSourceOnCompletedFlags.None);
}
else
{
ValueTask.CompletedTask.ConfigureAwait(_value.ContinueOnCapturedContext).GetAwaiter().UnsafeOnCompleted(continuation);
}
}
#if CORECLR
void IValueTaskAwaiter.AwaitUnsafeOnCompleted(IAsyncStateMachineBox box)
{
if (_value.ObjectIsTask)
{
TaskAwaiter.UnsafeOnCompletedInternal(_value.UnsafeGetTask(), box, _value.ContinueOnCapturedContext);
}
else if (_value._obj != null)
{
_value.UnsafeGetValueTaskSource().OnCompleted(ValueTaskAwaiter.s_invokeAsyncStateMachineBox, box, _value._token,
_value.ContinueOnCapturedContext ? ValueTaskSourceOnCompletedFlags.UseSchedulingContext : ValueTaskSourceOnCompletedFlags.None);
}
else
{
TaskAwaiter.UnsafeOnCompletedInternal(Task.CompletedTask, box, _value.ContinueOnCapturedContext);
}
}
#endif
}
}
/// <summary>Provides an awaitable type that enables configured awaits on a <see cref="ValueTask{TResult}"/>.</summary>
/// <typeparam name="TResult">The type of the result produced.</typeparam>
[StructLayout(LayoutKind.Auto)]
public readonly struct ConfiguredValueTaskAwaitable<TResult>
{
/// <summary>The wrapped <see cref="ValueTask{TResult}"/>.</summary>
private readonly ValueTask<TResult> _value;
/// <summary>Initializes the awaitable.</summary>
/// <param name="value">The wrapped <see cref="ValueTask{TResult}"/>.</param>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal ConfiguredValueTaskAwaitable(ValueTask<TResult> value) => _value = value;
/// <summary>Returns an awaiter for this <see cref="ConfiguredValueTaskAwaitable{TResult}"/> instance.</summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public ConfiguredValueTaskAwaiter GetAwaiter() => new ConfiguredValueTaskAwaiter(_value);
/// <summary>Provides an awaiter for a <see cref="ConfiguredValueTaskAwaitable{TResult}"/>.</summary>
[StructLayout(LayoutKind.Auto)]
public readonly struct ConfiguredValueTaskAwaiter : ICriticalNotifyCompletion
#if CORECLR
, IValueTaskAwaiter
#endif
{
/// <summary>The value being awaited.</summary>
private readonly ValueTask<TResult> _value;
/// <summary>Initializes the awaiter.</summary>
/// <param name="value">The value to be awaited.</param>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal ConfiguredValueTaskAwaiter(ValueTask<TResult> value) => _value = value;
/// <summary>Gets whether the <see cref="ConfiguredValueTaskAwaitable{TResult}"/> has completed.</summary>
public bool IsCompleted
{
[MethodImpl(MethodImplOptions.AggressiveInlining)]
get => _value.IsCompleted;
}
/// <summary>Gets the result of the ValueTask.</summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
[StackTraceHidden]
public TResult GetResult() => _value.Result;
/// <summary>Schedules the continuation action for the <see cref="ConfiguredValueTaskAwaitable{TResult}"/>.</summary>
public void OnCompleted(Action continuation)
{
if (_value.ObjectIsTask)
{
_value.UnsafeGetTask().ConfigureAwait(_value.ContinueOnCapturedContext).GetAwaiter().OnCompleted(continuation);
}
else if (_value._obj != null)
{
_value.UnsafeGetValueTaskSource().OnCompleted(ValueTaskAwaiter.s_invokeActionDelegate, continuation, _value._token,
ValueTaskSourceOnCompletedFlags.FlowExecutionContext |
(_value.ContinueOnCapturedContext ? ValueTaskSourceOnCompletedFlags.UseSchedulingContext : ValueTaskSourceOnCompletedFlags.None));
}
else
{
ValueTask.CompletedTask.ConfigureAwait(_value.ContinueOnCapturedContext).GetAwaiter().OnCompleted(continuation);
}
}
/// <summary>Schedules the continuation action for the <see cref="ConfiguredValueTaskAwaitable{TResult}"/>.</summary>
public void UnsafeOnCompleted(Action continuation)
{
if (_value.ObjectIsTask)
{
_value.UnsafeGetTask().ConfigureAwait(_value.ContinueOnCapturedContext).GetAwaiter().UnsafeOnCompleted(continuation);
}
else if (_value._obj != null)
{
_value.UnsafeGetValueTaskSource().OnCompleted(ValueTaskAwaiter.s_invokeActionDelegate, continuation, _value._token,
_value.ContinueOnCapturedContext ? ValueTaskSourceOnCompletedFlags.UseSchedulingContext : ValueTaskSourceOnCompletedFlags.None);
}
else
{
ValueTask.CompletedTask.ConfigureAwait(_value.ContinueOnCapturedContext).GetAwaiter().UnsafeOnCompleted(continuation);
}
}
#if CORECLR
void IValueTaskAwaiter.AwaitUnsafeOnCompleted(IAsyncStateMachineBox box)
{
if (_value.ObjectIsTask)
{
TaskAwaiter.UnsafeOnCompletedInternal(_value.UnsafeGetTask(), box, _value.ContinueOnCapturedContext);
}
else if (_value._obj != null)
{
_value.UnsafeGetValueTaskSource().OnCompleted(ValueTaskAwaiter.s_invokeAsyncStateMachineBox, box, _value._token,
_value.ContinueOnCapturedContext ? ValueTaskSourceOnCompletedFlags.UseSchedulingContext : ValueTaskSourceOnCompletedFlags.None);
}
else
{
TaskAwaiter.UnsafeOnCompletedInternal(Task.CompletedTask, box, _value.ContinueOnCapturedContext);
}
}
#endif
}
}
}
| |
/* Copyright (c) Citrix Systems Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms,
* with or without modification, are permitted provided
* that the following conditions are met:
*
* * Redistributions of source code must retain the above
* copyright notice, this list of conditions and the
* following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the
* following disclaimer in the documentation and/or other
* materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
* CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Windows.Forms;
using XenAdmin.Actions;
using XenAdmin.Core;
using XenAdmin.Model;
using XenAdmin.Properties;
using XenAPI;
namespace XenAdmin.Dialogs.HealthCheck
{
public partial class HealthCheckSettingsDialog : XenDialogBase
{
private static readonly log4net.ILog log = log4net.LogManager.GetLogger(System.Reflection.MethodBase.GetCurrentMethod().DeclaringType);
private readonly Pool pool;
private HealthCheckSettings healthCheckSettings;
private bool authenticationRequired;
private bool authenticated;
private string authenticationToken;
private string diagnosticToken;
private string xsUserName;
private string xsPassword;
internal override string HelpName { get { return "HealthCheckSettingsDialog"; } }
public HealthCheckSettingsDialog(Pool pool, bool enrollNow)
{
this.pool = pool;
this.connection = pool.Connection;
healthCheckSettings = pool.HealthCheckSettings;
if (enrollNow)
healthCheckSettings.Status = HealthCheckStatus.Enabled;
authenticated = healthCheckSettings.TryGetExistingTokens(pool.Connection, out authenticationToken, out diagnosticToken);
authenticationRequired = !authenticated;
xsUserName = healthCheckSettings.GetSecretyInfo(pool.Connection, HealthCheckSettings.UPLOAD_CREDENTIAL_USER_SECRET);
xsPassword = healthCheckSettings.GetSecretyInfo(pool.Connection, HealthCheckSettings.UPLOAD_CREDENTIAL_PASSWORD_SECRET);
InitializeComponent();
PopulateControls();
InitializeControls();
UpdateButtons();
}
private void PopulateControls()
{
var list = BuildDays();
var ds = new BindingSource(list, null);
dayOfWeekComboBox.DataSource = ds;
dayOfWeekComboBox.ValueMember = "key";
dayOfWeekComboBox.DisplayMember = "value";
var list1 = BuildHours();
var ds1 = new BindingSource(list1, null);
timeOfDayComboBox.DataSource = ds1;
timeOfDayComboBox.ValueMember = "key";
timeOfDayComboBox.DisplayMember = "value";
}
private Dictionary<int, string> BuildDays()
{
Dictionary<int, string> days = new Dictionary<int, string>();
foreach (var dayOfWeek in Enum.GetValues(typeof(DayOfWeek)))
{
days.Add((int)dayOfWeek, HelpersGUI.DayOfWeekToString((DayOfWeek)dayOfWeek, true));
}
return days;
}
private SortedDictionary<int, string> BuildHours()
{
SortedDictionary<int, string> hours = new SortedDictionary<int, string>();
for (int hour = 0; hour <= 23; hour++)
{
DateTime time = new DateTime(1900, 1, 1, hour, 0, 0);
hours.Add(hour, HelpersGUI.DateTimeToString(time, Messages.DATEFORMAT_HM, true));
}
return hours;
}
private void InitializeControls()
{
Text = String.Format(Messages.HEALTHCHECK_ENROLLMENT_TITLE, pool.Name);
authenticationRubricTextBox.Text = authenticationRequired
? Messages.HEALTHCHECK_AUTHENTICATION_RUBRIC_NO_TOKEN
: Messages.HEALTHCHECK_AUTHENTICATION_RUBRIC_EXISTING_TOKEN;
enrollmentCheckBox.Checked = healthCheckSettings.Status != HealthCheckStatus.Disabled;
frequencyNumericBox.Value = healthCheckSettings.IntervalInWeeks;
dayOfWeekComboBox.SelectedValue = (int)healthCheckSettings.DayOfWeek;
timeOfDayComboBox.SelectedValue = healthCheckSettings.TimeOfDay;
existingAuthenticationRadioButton.Enabled = existingAuthenticationRadioButton.Checked = !authenticationRequired;
newAuthenticationRadioButton.Checked = authenticationRequired;
SetMyCitrixCredentials(existingAuthenticationRadioButton.Checked);
bool useCurrentXsCredentials = string.IsNullOrEmpty(xsUserName) || xsUserName == pool.Connection.Username;
newXsCredentialsRadioButton.Checked = !useCurrentXsCredentials;
currentXsCredentialsRadioButton.Checked = useCurrentXsCredentials;
SetXSCredentials(currentXsCredentialsRadioButton.Checked);
}
private bool ChangesMade()
{
if (enrollmentCheckBox.Checked && healthCheckSettings.Status != HealthCheckStatus.Enabled)
return true;
if (!enrollmentCheckBox.Checked && healthCheckSettings.Status != HealthCheckStatus.Disabled)
return true;
if (frequencyNumericBox.Value != healthCheckSettings.IntervalInWeeks)
return true;
if (dayOfWeekComboBox.SelectedIndex != (int)healthCheckSettings.DayOfWeek)
return true;
if (timeOfDayComboBox.SelectedIndex != healthCheckSettings.TimeOfDay)
return true;
if (authenticationToken != healthCheckSettings.GetSecretyInfo(pool.Connection, HealthCheckSettings.UPLOAD_TOKEN_SECRET))
return true;
if (textboxXSUserName.Text != xsUserName)
return true;
if (textboxXSPassword.Text != xsPassword)
return true;
return false;
}
private void UpdateButtons()
{
okButton.Enabled = m_ctrlError.PerformCheck(CheckCredentialsEntered);
}
private void okButton_Click(object sender, EventArgs e)
{
okButton.Enabled = false;
if (enrollmentCheckBox.Checked && newAuthenticationRadioButton.Checked
&& !m_ctrlError.PerformCheck(CheckUploadAuthentication))
{
okButton.Enabled = true;
return;
}
if (ChangesMade())
{
var newHealthCheckSettings = new HealthCheckSettings(pool.health_check_config);
newHealthCheckSettings.Status = enrollmentCheckBox.Checked ? HealthCheckStatus.Enabled : HealthCheckStatus.Disabled;
newHealthCheckSettings.IntervalInDays = (int)(frequencyNumericBox.Value * 7);
newHealthCheckSettings.DayOfWeek = (DayOfWeek)dayOfWeekComboBox.SelectedValue;
newHealthCheckSettings.TimeOfDay = (int)timeOfDayComboBox.SelectedValue;
newHealthCheckSettings. RetryInterval = HealthCheckSettings.DEFAULT_RETRY_INTERVAL;
new SaveHealthCheckSettingsAction(pool, newHealthCheckSettings, authenticationToken, diagnosticToken, textboxXSUserName.Text.Trim(), textboxXSPassword.Text, false).RunAsync();
new TransferHealthCheckSettingsAction(pool, newHealthCheckSettings, textboxXSUserName.Text.Trim(), textboxXSPassword.Text, true).RunAsync();
}
okButton.Enabled = true;
DialogResult = DialogResult.OK;
Close();
}
private void cancelButton_Click(object sender, EventArgs e)
{
DialogResult = DialogResult.Cancel;
Close();
}
private void enrollmentCheckBox_CheckedChanged(object sender, EventArgs e)
{
UpdateButtons();
}
private void newAuthenticationRadioButton_CheckedChanged(object sender, EventArgs e)
{
SetMyCitrixCredentials(existingAuthenticationRadioButton.Checked);
}
private void radioButton2_CheckedChanged(object sender, EventArgs e)
{
SetXSCredentials(currentXsCredentialsRadioButton.Checked);
testCredentialsButton.Enabled = newXsCredentialsRadioButton.Checked &&
!string.IsNullOrEmpty(textboxXSUserName.Text.Trim()) && !string.IsNullOrEmpty(textboxXSPassword.Text);
}
private void SetXSCredentials(bool useCurrent)
{
if (useCurrent)
{
textboxXSUserName.Text = pool.Connection.Username;
textboxXSPassword.Text = pool.Connection.Password;
textboxXSUserName.Enabled = false;
textboxXSPassword.Enabled = false;
}
else
{
textboxXSUserName.Text = xsUserName;
textboxXSPassword.Text = xsPassword;
textboxXSUserName.Enabled = true;
textboxXSPassword.Enabled = true;
}
}
private void SetMyCitrixCredentials(bool useExisting)
{
if (useExisting)
{
//textBoxMyCitrixUsername.Text = String.Empty;
//textBoxMyCitrixPassword.Text = String.Empty;
textBoxMyCitrixUsername.Enabled = false;
textBoxMyCitrixPassword.Enabled = false;
}
else
{
//textBoxMyCitrixUsername.Text = String.Empty;
//textBoxMyCitrixPassword.Text = String.Empty;
textBoxMyCitrixUsername.Enabled = true;
textBoxMyCitrixPassword.Enabled = true;
}
}
private bool CheckCredentialsEntered()
{
if (!enrollmentCheckBox.Checked || !newAuthenticationRadioButton.Checked)
return true;
if (newAuthenticationRadioButton.Checked &&
(string.IsNullOrEmpty(textBoxMyCitrixUsername.Text.Trim()) || string.IsNullOrEmpty(textBoxMyCitrixPassword.Text)))
return false;
if (newXsCredentialsRadioButton.Checked &&
(string.IsNullOrEmpty(textboxXSUserName.Text.Trim()) || string.IsNullOrEmpty(textboxXSPassword.Text)))
return false;
return true;
}
private bool CheckCredentialsEntered(out string error)
{
error = string.Empty;
return CheckCredentialsEntered();
}
private bool CheckUploadAuthentication(out string error)
{
error = string.Empty;
if (!CheckCredentialsEntered())
return false;
var action = new HealthCheckAuthenticationAction(textBoxMyCitrixUsername.Text.Trim(), textBoxMyCitrixPassword.Text.Trim(),
Registry.HealthCheckIdentityTokenDomainName, Registry.HealthCheckUploadGrantTokenDomainName, Registry.HealthCheckUploadTokenDomainName,
Registry.HealthCheckDiagnosticDomainName, Registry.HealthCheckProductKey, 0, false);
try
{
action.RunExternal(null);
}
catch
{
error = action.Exception != null ? action.Exception.Message : Messages.ERROR_UNKNOWN;
authenticationToken = null;
authenticated = false;
return authenticated;
}
authenticationToken = action.UploadToken; // curent upload token
diagnosticToken = action.DiagnosticToken; // curent diagnostic token
authenticated = !String.IsNullOrEmpty(authenticationToken) && !String.IsNullOrEmpty(diagnosticToken);
return authenticated;
}
private void credentials_TextChanged(object sender, EventArgs e)
{
UpdateButtons();
}
private void xsCredentials_TextChanged(object sender, EventArgs e)
{
UpdateButtons();
testCredentialsButton.Enabled = newXsCredentialsRadioButton.Checked &&
!string.IsNullOrEmpty(textboxXSUserName.Text.Trim()) && !string.IsNullOrEmpty(textboxXSPassword.Text);
HideTestCredentialsStatus();
}
private void PolicyStatementLinkLabel_LinkClicked(object sender, LinkLabelLinkClickedEventArgs e)
{
new HealthCheckPolicyStatementDialog().ShowDialog(this);
}
private void testCredentialsButton_Click(object sender, EventArgs e)
{
CheckXenServerCredentials();
}
private void CheckXenServerCredentials()
{
if (string.IsNullOrEmpty(textboxXSUserName.Text.Trim()) || string.IsNullOrEmpty(textboxXSPassword.Text))
return;
bool passedRbacChecks = false;
DelegatedAsyncAction action = new DelegatedAsyncAction(connection,
Messages.CREDENTIALS_CHECKING, "", "",
delegate
{
Session elevatedSession = null;
try
{
elevatedSession = connection.ElevatedSession(textboxXSUserName.Text.Trim(), textboxXSPassword.Text);
if (elevatedSession != null && (elevatedSession.IsLocalSuperuser || SessionAuthorized(elevatedSession, Role.ValidRoleList("pool.set_health_check_config", connection))))
passedRbacChecks = true;
}
catch (Failure f)
{
if (f.ErrorDescription.Count > 0 && f.ErrorDescription[0] == Failure.RBAC_PERMISSION_DENIED)
{
// we use a different error message here from the standard one in friendly names
throw new Exception(Messages.HEALTH_CHECK_USER_HAS_NO_PERMISSION_TO_CONNECT);
}
throw;
}
finally
{
if (elevatedSession != null)
{
elevatedSession.Connection.Logout(elevatedSession);
elevatedSession = null;
}
}
},
true);
action.Completed += delegate
{
log.DebugFormat("Logging with the new credentials returned: {0} ", passedRbacChecks);
Program.Invoke(Program.MainWindow, () =>
{
if (passedRbacChecks)
ShowTestCredentialsStatus(Resources._000_Tick_h32bit_16, null);
else
ShowTestCredentialsStatus(Resources._000_error_h32bit_16, action.Exception != null ? action.Exception.Message : Messages.HEALTH_CHECK_USER_NOT_AUTHORIZED);
textboxXSUserName.Enabled = textboxXSPassword.Enabled = testCredentialsButton.Enabled = newXsCredentialsRadioButton.Checked;
});
};
log.Debug("Testing logging in with the new credentials");
ShowTestCredentialsStatus(Resources.ajax_loader, null);
textboxXSUserName.Enabled = textboxXSPassword.Enabled = testCredentialsButton.Enabled = false;
action.RunAsync();
}
private void ShowTestCredentialsStatus(Image image, string errorMessage)
{
testCredentialsStatusImage.Visible = true;
testCredentialsStatusImage.Image = image;
errorLabel.Text = errorMessage;
errorLabel.Visible = !string.IsNullOrEmpty(errorMessage);
}
private void HideTestCredentialsStatus()
{
testCredentialsStatusImage.Visible = false;
errorLabel.Visible = false;
}
private bool SessionAuthorized(Session s, List<Role> authorizedRoles)
{
UserDetails ud = s.CurrentUserDetails;
foreach (Role r in s.Roles)
{
if (authorizedRoles.Contains(r))
{
log.DebugFormat("Subject '{0}' is authorized to complete the action", ud.UserName ?? ud.UserSid);
return true;
}
}
log.DebugFormat("Subject '{0}' is not authorized to complete the action", ud.UserName ?? ud.UserSid);
return false;
}
private void authenticationRubricTextBox_LinkClicked(object sender, LinkClickedEventArgs e)
{
Program.OpenURL(e.LinkText);
}
private void existingAuthenticationRadioButton_CheckedChanged(object sender, EventArgs e)
{
UpdateButtons();
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.IO;
using System.Reflection;
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.HostFiltering;
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Hosting.StaticWebAssets;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.HttpOverrides;
using Microsoft.AspNetCore.Routing;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace Microsoft.AspNetCore
{
/// <summary>
/// Provides convenience methods for creating instances of <see cref="IWebHost"/> and <see cref="IWebHostBuilder"/> with pre-configured defaults.
/// </summary>
public static class WebHost
{
/// <summary>
/// Initializes and starts a new <see cref="IWebHost"/> with pre-configured defaults.
/// See <see cref="CreateDefaultBuilder()"/> for details.
/// </summary>
/// <param name="app">A delegate that handles requests to the application.</param>
/// <returns>A started <see cref="IWebHost"/> that hosts the application.</returns>
public static IWebHost Start(RequestDelegate app) =>
Start(url: null!, app: app);
/// <summary>
/// Initializes and starts a new <see cref="IWebHost"/> with pre-configured defaults.
/// See <see cref="CreateDefaultBuilder()"/> for details.
/// </summary>
/// <param name="url">The URL the hosted application will listen on.</param>
/// <param name="app">A delegate that handles requests to the application.</param>
/// <returns>A started <see cref="IWebHost"/> that hosts the application.</returns>
public static IWebHost Start(string url, RequestDelegate app)
{
var startupAssemblyName = app.GetMethodInfo().DeclaringType!.Assembly.GetName().Name;
return StartWith(url: url, configureServices: null, app: appBuilder => appBuilder.Run(app), applicationName: startupAssemblyName);
}
/// <summary>
/// Initializes and starts a new <see cref="IWebHost"/> with pre-configured defaults.
/// See <see cref="CreateDefaultBuilder()"/> for details.
/// </summary>
/// <param name="routeBuilder">A delegate that configures the router for handling requests to the application.</param>
/// <returns>A started <see cref="IWebHost"/> that hosts the application.</returns>
public static IWebHost Start(Action<IRouteBuilder> routeBuilder) =>
Start(url: null!, routeBuilder: routeBuilder);
/// <summary>
/// Initializes and starts a new <see cref="IWebHost"/> with pre-configured defaults.
/// See <see cref="CreateDefaultBuilder()"/> for details.
/// </summary>
/// <param name="url">The URL the hosted application will listen on.</param>
/// <param name="routeBuilder">A delegate that configures the router for handling requests to the application.</param>
/// <returns>A started <see cref="IWebHost"/> that hosts the application.</returns>
public static IWebHost Start(string url, Action<IRouteBuilder> routeBuilder)
{
var startupAssemblyName = routeBuilder.GetMethodInfo().DeclaringType!.Assembly.GetName().Name;
return StartWith(url, services => services.AddRouting(), appBuilder => appBuilder.UseRouter(routeBuilder), applicationName: startupAssemblyName);
}
/// <summary>
/// Initializes and starts a new <see cref="IWebHost"/> with pre-configured defaults.
/// See <see cref="CreateDefaultBuilder()"/> for details.
/// </summary>
/// <param name="app">The delegate that configures the <see cref="IApplicationBuilder"/>.</param>
/// <returns>A started <see cref="IWebHost"/> that hosts the application.</returns>
public static IWebHost StartWith(Action<IApplicationBuilder> app) =>
StartWith(url: null!, app: app);
/// <summary>
/// Initializes and starts a new <see cref="IWebHost"/> with pre-configured defaults.
/// See <see cref="CreateDefaultBuilder()"/> for details.
/// </summary>
/// <param name="url">The URL the hosted application will listen on.</param>
/// <param name="app">The delegate that configures the <see cref="IApplicationBuilder"/>.</param>
/// <returns>A started <see cref="IWebHost"/> that hosts the application.</returns>
public static IWebHost StartWith(string url, Action<IApplicationBuilder> app) =>
StartWith(url: url, configureServices: null, app: app, applicationName: null);
private static IWebHost StartWith(string? url, Action<IServiceCollection>? configureServices, Action<IApplicationBuilder> app, string? applicationName)
{
var builder = CreateDefaultBuilder();
if (!string.IsNullOrEmpty(url))
{
builder.UseUrls(url);
}
if (configureServices != null)
{
builder.ConfigureServices(configureServices);
}
builder.Configure(app);
if (!string.IsNullOrEmpty(applicationName))
{
builder.UseSetting(WebHostDefaults.ApplicationKey, applicationName);
}
var host = builder.Build();
host.Start();
return host;
}
/// <summary>
/// Initializes a new instance of the <see cref="WebHostBuilder"/> class with pre-configured defaults.
/// </summary>
/// <remarks>
/// The following defaults are applied to the returned <see cref="WebHostBuilder"/>:
/// use Kestrel as the web server and configure it using the application's configuration providers,
/// set the <see cref="IHostEnvironment.ContentRootPath"/> to the result of <see cref="Directory.GetCurrentDirectory()"/>,
/// load <see cref="IConfiguration"/> from 'appsettings.json' and 'appsettings.[<see cref="IHostEnvironment.EnvironmentName"/>].json',
/// load <see cref="IConfiguration"/> from User Secrets when <see cref="IHostEnvironment.EnvironmentName"/> is 'Development' using the entry assembly,
/// load <see cref="IConfiguration"/> from environment variables,
/// configure the <see cref="ILoggerFactory"/> to log to the console and debug output,
/// adds the HostFiltering middleware,
/// adds the ForwardedHeaders middleware if ASPNETCORE_FORWARDEDHEADERS_ENABLED=true,
/// and enable IIS integration.
/// </remarks>
/// <returns>The initialized <see cref="IWebHostBuilder"/>.</returns>
public static IWebHostBuilder CreateDefaultBuilder() =>
CreateDefaultBuilder(args: null!);
/// <summary>
/// Initializes a new instance of the <see cref="WebHostBuilder"/> class with pre-configured defaults.
/// </summary>
/// <remarks>
/// The following defaults are applied to the returned <see cref="WebHostBuilder"/>:
/// use Kestrel as the web server and configure it using the application's configuration providers,
/// set the <see cref="IHostEnvironment.ContentRootPath"/> to the result of <see cref="Directory.GetCurrentDirectory()"/>,
/// load <see cref="IConfiguration"/> from 'appsettings.json' and 'appsettings.[<see cref="IHostEnvironment.EnvironmentName"/>].json',
/// load <see cref="IConfiguration"/> from User Secrets when <see cref="IHostEnvironment.EnvironmentName"/> is 'Development' using the entry assembly,
/// load <see cref="IConfiguration"/> from environment variables,
/// load <see cref="IConfiguration"/> from supplied command line args,
/// configure the <see cref="ILoggerFactory"/> to log to the console and debug output,
/// configure the <see cref="IWebHostEnvironment.WebRootFileProvider"/> to map static web assets when <see cref="IHostEnvironment.EnvironmentName"/> is 'Development' using the entry assembly,
/// adds the HostFiltering middleware,
/// adds the ForwardedHeaders middleware if ASPNETCORE_FORWARDEDHEADERS_ENABLED=true,
/// and enable IIS integration.
/// </remarks>
/// <param name="args">The command line args.</param>
/// <returns>The initialized <see cref="IWebHostBuilder"/>.</returns>
public static IWebHostBuilder CreateDefaultBuilder(string[] args)
{
var builder = new WebHostBuilder();
if (string.IsNullOrEmpty(builder.GetSetting(WebHostDefaults.ContentRootKey)))
{
builder.UseContentRoot(Directory.GetCurrentDirectory());
}
if (args != null)
{
builder.UseConfiguration(new ConfigurationBuilder().AddCommandLine(args).Build());
}
builder.ConfigureAppConfiguration((hostingContext, config) =>
{
var env = hostingContext.HostingEnvironment;
config.AddJsonFile("appsettings.json", optional: true, reloadOnChange: true)
.AddJsonFile($"appsettings.{env.EnvironmentName}.json", optional: true, reloadOnChange: true);
if (env.IsDevelopment())
{
var appAssembly = Assembly.Load(new AssemblyName(env.ApplicationName));
if (appAssembly != null)
{
config.AddUserSecrets(appAssembly, optional: true);
}
}
config.AddEnvironmentVariables();
if (args != null)
{
config.AddCommandLine(args);
}
})
.ConfigureLogging((hostingContext, loggingBuilder) =>
{
loggingBuilder.Configure(options =>
{
options.ActivityTrackingOptions = ActivityTrackingOptions.SpanId
| ActivityTrackingOptions.TraceId
| ActivityTrackingOptions.ParentId;
});
loggingBuilder.AddConfiguration(hostingContext.Configuration.GetSection("Logging"));
loggingBuilder.AddConsole();
loggingBuilder.AddDebug();
loggingBuilder.AddEventSourceLogger();
}).
UseDefaultServiceProvider((context, options) =>
{
options.ValidateScopes = context.HostingEnvironment.IsDevelopment();
});
ConfigureWebDefaults(builder);
return builder;
}
internal static void ConfigureWebDefaults(IWebHostBuilder builder)
{
builder.ConfigureAppConfiguration((ctx, cb) =>
{
if (ctx.HostingEnvironment.IsDevelopment())
{
StaticWebAssetsLoader.UseStaticWebAssets(ctx.HostingEnvironment, ctx.Configuration);
}
});
builder.UseKestrel((builderContext, options) =>
{
options.Configure(builderContext.Configuration.GetSection("Kestrel"), reloadOnChange: true);
})
.ConfigureServices((hostingContext, services) =>
{
// Fallback
services.PostConfigure<HostFilteringOptions>(options =>
{
if (options.AllowedHosts == null || options.AllowedHosts.Count == 0)
{
// "AllowedHosts": "localhost;127.0.0.1;[::1]"
var hosts = hostingContext.Configuration["AllowedHosts"]?.Split(new[] { ';' }, StringSplitOptions.RemoveEmptyEntries);
// Fall back to "*" to disable.
options.AllowedHosts = (hosts?.Length > 0 ? hosts : new[] { "*" });
}
});
// Change notification
services.AddSingleton<IOptionsChangeTokenSource<HostFilteringOptions>>(
new ConfigurationChangeTokenSource<HostFilteringOptions>(hostingContext.Configuration));
services.AddTransient<IStartupFilter, HostFilteringStartupFilter>();
services.AddTransient<IStartupFilter, ForwardedHeadersStartupFilter>();
services.AddTransient<IConfigureOptions<ForwardedHeadersOptions>, ForwardedHeadersOptionsSetup>();
services.AddRouting();
})
.UseIIS()
.UseIISIntegration();
}
/// <summary>
/// Initializes a new instance of the <see cref="WebHostBuilder"/> class with pre-configured defaults using typed Startup.
/// </summary>
/// <remarks>
/// The following defaults are applied to the returned <see cref="WebHostBuilder"/>:
/// use Kestrel as the web server and configure it using the application's configuration providers,
/// set the <see cref="IHostEnvironment.ContentRootPath"/> to the result of <see cref="Directory.GetCurrentDirectory()"/>,
/// load <see cref="IConfiguration"/> from 'appsettings.json' and 'appsettings.[<see cref="IHostEnvironment.EnvironmentName"/>].json',
/// load <see cref="IConfiguration"/> from User Secrets when <see cref="IHostEnvironment.EnvironmentName"/> is 'Development' using the entry assembly,
/// load <see cref="IConfiguration"/> from environment variables,
/// load <see cref="IConfiguration"/> from supplied command line args,
/// configure the <see cref="ILoggerFactory"/> to log to the console and debug output,
/// enable IIS integration.
/// </remarks>
/// <typeparam name ="TStartup">The type containing the startup methods for the application.</typeparam>
/// <param name="args">The command line args.</param>
/// <returns>The initialized <see cref="IWebHostBuilder"/>.</returns>
public static IWebHostBuilder CreateDefaultBuilder<TStartup>(string[] args) where TStartup : class =>
CreateDefaultBuilder(args).UseStartup<TStartup>();
}
}
| |
using System;
using System.Collections;
using System.Reflection;
using System.Reflection.Emit;
using NMock2.Internal;
namespace NMock2.Monitoring
{
/// <summary>
/// Summary description for MockObjectFactory.
/// </summary>
internal class MockObjectFactory
{
private static readonly Hashtable createdTypes = new Hashtable();
private readonly ModuleBuilder moduleBuilder;
private class TypeId
{
private readonly Type[] types;
public TypeId(params Type[] types)
{
this.types = types;
}
private bool ContainsSameTypesAs(TypeId other)
{
if (other.types.Length !=
types.Length)
{
return false;
}
for (int num1 = 0; num1 < types.Length; num1++)
{
if (Array.IndexOf(other.types, types[num1]) < 0)
{
return false;
}
}
return true;
}
public override bool Equals(object obj)
{
return ((obj is TypeId) && ContainsSameTypesAs((TypeId) obj));
}
public override int GetHashCode()
{
int num1 = 0;
foreach (Type type1 in types)
{
num1 ^= type1.GetHashCode();
}
return num1;
}
}
public MockObjectFactory(string name)
{
AssemblyName name1 = new AssemblyName();
name1.Name = name;
moduleBuilder =
AppDomain.CurrentDomain.DefineDynamicAssembly(
name1, AssemblyBuilderAccess.Run).DefineDynamicModule(name);
}
private static bool AllTypes(Type type, object criteria)
{
return true;
}
private static void BuildAllInterfaceMethods(
Type mockedType, TypeBuilder typeBuilder)
{
Type[] typeArray1 = mockedType.FindInterfaces(new TypeFilter(AllTypes), null);
foreach (Type type1 in typeArray1)
{
Console.WriteLine(type1);
BuildInterfaceMethods(typeBuilder, type1);
}
BuildInterfaceMethods(typeBuilder, mockedType);
}
private static void BuildConstructor(TypeBuilder typeBuilder)
{
Type[] typeArray1 =
new Type[] {typeof (Mockery), typeof (Type), typeof (string)};
ILGenerator generator1 =
typeBuilder.DefineConstructor(
MethodAttributes.Public, CallingConventions.HasThis, typeArray1).
GetILGenerator();
ConstructorInfo info1 =
typeof (MockObject).GetConstructor(
BindingFlags.NonPublic | BindingFlags.Instance, null, typeArray1, null);
generator1.Emit(OpCodes.Ldarg_0);
generator1.Emit(OpCodes.Ldarg_1);
generator1.Emit(OpCodes.Ldarg_2);
generator1.Emit(OpCodes.Ldarg_3);
generator1.Emit(OpCodes.Call, info1);
generator1.Emit(OpCodes.Ret);
}
private static void BuildInterfaceMethods(
TypeBuilder typeBuilder, Type mockedType)
{
typeBuilder.AddInterfaceImplementation(mockedType);
MethodInfo[] infoArray1 = mockedType.GetMethods();
foreach (MethodInfo info1 in infoArray1)
{
GenerateMethodBody(typeBuilder, info1);
}
}
public MockObject CreateMockObject(
Mockery mockery, Type mockedType, string name)
{
return
(Activator.CreateInstance(
GetMockedType(
Id(new Type[] {mockedType, typeof (IMockObject)}), mockedType),
new object[] {mockery, mockedType, name})
as MockObject);
}
private Type GetMockedType(TypeId id1, Type mockedType)
{
Type type1;
if (createdTypes.ContainsKey(id1))
{
type1 = (Type) createdTypes[id1];
}
else
{
createdTypes[id1] =
type1 = CreateType("MockObjectType" + (createdTypes.Count + 1), mockedType);
}
return type1;
}
private Type CreateType(string typeName, Type mockedType)
{
Type[] mockedInterface = new Type[] { mockedType };
TypeBuilder builder1 =
moduleBuilder.DefineType(
typeName,
TypeAttributes.Public,
typeof (MockObject),
mockedInterface);
BuildConstructor(builder1);
BuildAllInterfaceMethods(mockedType, builder1);
return builder1.CreateType();
}
private static void EmitReferenceMethodBody(ILGenerator gen)
{
gen.Emit(OpCodes.Ldnull);
gen.Emit(OpCodes.Ret);
}
private static void EmitValueMethodBody(MethodInfo method, ILGenerator gen)
{
gen.DeclareLocal(method.ReturnType);
gen.Emit(OpCodes.Ldloc_0);
gen.Emit(OpCodes.Ret);
}
private static void GenerateMethodBody(
TypeBuilder typeBuilder, MethodInfo method)
{
ILGenerator generator1 = PrepareMethodGenerator(typeBuilder, method);
generator1.Emit(OpCodes.Ldarg_0);
if (method.ReturnType == null)
{
generator1.Emit(OpCodes.Ret);
}
if (method.ReturnType.IsValueType)
{
EmitValueMethodBody(method, generator1);
}
else
{
EmitReferenceMethodBody(generator1);
}
}
private static TypeId Id(params Type[] types)
{
return new TypeId(types);
}
private static ILGenerator PrepareMethodGenerator(
TypeBuilder typeBuilder, MethodInfo method)
{
ParameterInfo[] infoArray1 = method.GetParameters();
Type[] typeArray1 = new Type[infoArray1.Length];
for (int num1 = 0; num1 < infoArray1.Length; num1++)
{
typeArray1[num1] = infoArray1[num1].ParameterType;
}
MethodBuilder builder1 =
typeBuilder.DefineMethod(
method.Name,
MethodAttributes.Virtual | MethodAttributes.Public,
method.CallingConvention,
method.ReturnType,
typeArray1);
builder1.InitLocals = true;
typeBuilder.DefineMethodOverride(builder1, method);
return builder1.GetILGenerator();
}
}
}
| |
//
// Copyright (c) 2004-2016 Jaroslaw Kowalski <jaak@jkowalski.net>, Kim Christensen, Julian Verdurmen
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * Neither the name of Jaroslaw Kowalski nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
// THE POSSIBILITY OF SUCH DAMAGE.
//
namespace NLog.LayoutRenderers
{
using System;
using System.Text;
using NLog.Common;
using NLog.Config;
using NLog.Internal;
/// <summary>
/// Render environmental information related to logging events.
/// </summary>
[NLogConfigurationItem]
public abstract class LayoutRenderer : ISupportsInitialize, IRenderable, IDisposable
{
private const int MaxInitialRenderBufferLength = 16384;
private int maxRenderedLength;
private bool isInitialized;
/// <summary>
/// Gets the logging configuration this target is part of.
/// </summary>
protected LoggingConfiguration LoggingConfiguration { get; private set; }
/// <summary>
/// Returns a <see cref="System.String"/> that represents this instance.
/// </summary>
/// <returns>
/// A <see cref="System.String"/> that represents this instance.
/// </returns>
public override string ToString()
{
var lra = (LayoutRendererAttribute)Attribute.GetCustomAttribute(this.GetType(), typeof(LayoutRendererAttribute));
if (lra != null)
{
return "Layout Renderer: ${" + lra.Name + "}";
}
return this.GetType().Name;
}
/// <summary>
/// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources.
/// </summary>
public void Dispose()
{
this.Dispose(true);
GC.SuppressFinalize(this);
}
/// <summary>
/// Renders the the value of layout renderer in the context of the specified log event.
/// </summary>
/// <param name="logEvent">The log event.</param>
/// <returns>String representation of a layout renderer.</returns>
public string Render(LogEventInfo logEvent)
{
int initialLength = this.maxRenderedLength;
if (initialLength > MaxInitialRenderBufferLength)
{
initialLength = MaxInitialRenderBufferLength;
}
var builder = new StringBuilder(initialLength);
this.Render(builder, logEvent);
if (builder.Length > this.maxRenderedLength)
{
this.maxRenderedLength = builder.Length;
}
return builder.ToString();
}
/// <summary>
/// Initializes this instance.
/// </summary>
/// <param name="configuration">The configuration.</param>
void ISupportsInitialize.Initialize(LoggingConfiguration configuration)
{
this.Initialize(configuration);
}
/// <summary>
/// Closes this instance.
/// </summary>
void ISupportsInitialize.Close()
{
this.Close();
}
/// <summary>
/// Initializes this instance.
/// </summary>
/// <param name="configuration">The configuration.</param>
internal void Initialize(LoggingConfiguration configuration)
{
if (!this.isInitialized)
{
this.LoggingConfiguration = configuration;
this.isInitialized = true;
this.InitializeLayoutRenderer();
}
}
/// <summary>
/// Closes this instance.
/// </summary>
internal void Close()
{
if (this.isInitialized)
{
this.LoggingConfiguration = null;
this.isInitialized = false;
this.CloseLayoutRenderer();
}
}
internal void Render(StringBuilder builder, LogEventInfo logEvent)
{
if (!this.isInitialized)
{
this.isInitialized = true;
this.InitializeLayoutRenderer();
}
try
{
this.Append(builder, logEvent);
}
catch (Exception exception)
{
InternalLogger.Warn(exception, "Exception in layout renderer.");
if (exception.MustBeRethrown())
{
throw;
}
}
}
/// <summary>
/// Renders the specified environmental information and appends it to the specified <see cref="StringBuilder" />.
/// </summary>
/// <param name="builder">The <see cref="StringBuilder"/> to append the rendered data to.</param>
/// <param name="logEvent">Logging event.</param>
protected abstract void Append(StringBuilder builder, LogEventInfo logEvent);
/// <summary>
/// Initializes the layout renderer.
/// </summary>
protected virtual void InitializeLayoutRenderer()
{
}
/// <summary>
/// Closes the layout renderer.
/// </summary>
protected virtual void CloseLayoutRenderer()
{
}
/// <summary>
/// Releases unmanaged and - optionally - managed resources.
/// </summary>
/// <param name="disposing">True to release both managed and unmanaged resources; <c>false</c> to release only unmanaged resources.</param>
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
this.Close();
}
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.CodeFixes;
using Microsoft.CodeAnalysis.CodeStyle;
using Microsoft.CodeAnalysis.CSharp.CodeStyle;
using Microsoft.CodeAnalysis.CSharp.UseExpressionBody;
using Microsoft.CodeAnalysis.Diagnostics;
using Microsoft.CodeAnalysis.Editor.CSharp.UnitTests.Diagnostics;
using Microsoft.CodeAnalysis.Options;
using Roslyn.Test.Utilities;
using Xunit;
namespace Microsoft.CodeAnalysis.Editor.CSharp.UnitTests.UseExpressionBody
{
public class UseExpressionBodyForAccessorsTests : AbstractCSharpDiagnosticProviderBasedUserDiagnosticTest
{
internal override Tuple<DiagnosticAnalyzer, CodeFixProvider> CreateDiagnosticProviderAndFixer(Workspace workspace)
=> new Tuple<DiagnosticAnalyzer, CodeFixProvider>(
new UseExpressionBodyForAccessorsDiagnosticAnalyzer(),
new UseExpressionBodyForAccessorsCodeFixProvider());
private static readonly Dictionary<OptionKey, object> UseExpressionBody =
new Dictionary<OptionKey, object>
{
{ CSharpCodeStyleOptions.PreferExpressionBodiedAccessors, CodeStyleOptions.TrueWithNoneEnforcement },
{ CSharpCodeStyleOptions.PreferExpressionBodiedProperties, CodeStyleOptions.FalseWithNoneEnforcement },
{ CSharpCodeStyleOptions.PreferExpressionBodiedIndexers, CodeStyleOptions.FalseWithNoneEnforcement }
};
private static readonly Dictionary<OptionKey, object> UseExpressionBodyIncludingPropertiesAndIndexers =
new Dictionary<OptionKey, object>
{
{ CSharpCodeStyleOptions.PreferExpressionBodiedAccessors, CodeStyleOptions.TrueWithNoneEnforcement },
{ CSharpCodeStyleOptions.PreferExpressionBodiedProperties, CodeStyleOptions.TrueWithNoneEnforcement },
{ CSharpCodeStyleOptions.PreferExpressionBodiedIndexers, CodeStyleOptions.TrueWithNoneEnforcement }
};
private static readonly Dictionary<OptionKey, object> UseBlockBody =
new Dictionary<OptionKey, object>
{
{ CSharpCodeStyleOptions.PreferExpressionBodiedAccessors, CodeStyleOptions.FalseWithNoneEnforcement }
};
[Fact, Trait(Traits.Feature, Traits.Features.CodeActionsUseExpressionBody)]
public async Task TestUseExpressionBody1()
{
await TestAsync(
@"class C
{
int Foo
{
get
{
[|return|] Bar();
}
}
}",
@"class C
{
int Foo
{
get => Bar();
}
}", options: UseExpressionBody);
}
[Fact, Trait(Traits.Feature, Traits.Features.CodeActionsUseExpressionBody)]
public async Task TestMissingIfPropertyIsOn()
{
await TestMissingAsync(
@"class C
{
int Foo
{
get
{
[|return|] Bar();
}
}
}", options: UseExpressionBodyIncludingPropertiesAndIndexers);
}
[Fact, Trait(Traits.Feature, Traits.Features.CodeActionsUseExpressionBody)]
public async Task TestOnIndexer1()
{
await TestAsync(
@"class C
{
int this[int i]
{
get
{
[|return|] Bar();
}
}
}",
@"class C
{
int this[int i]
{
get => Bar();
}
}", options: UseExpressionBody);
}
[Fact, Trait(Traits.Feature, Traits.Features.CodeActionsUseExpressionBody)]
public async Task TestMissingIfIndexerIsOn()
{
await TestMissingAsync(
@"class C
{
int this[int i]
{
get
{
[|return|] Bar();
}
}
}", options: UseExpressionBodyIncludingPropertiesAndIndexers);
}
[Fact, Trait(Traits.Feature, Traits.Features.CodeActionsUseExpressionBody)]
public async Task TestOnSetter1()
{
await TestAsync(
@"class C
{
int Foo
{
set
{
[|Bar|]();
}
}
}",
@"class C
{
int Foo
{
set => [|Bar|]();
}
}", options: UseExpressionBody);
}
[Fact, Trait(Traits.Feature, Traits.Features.CodeActionsUseExpressionBody)]
public async Task TestMissingWithOnlySetter()
{
await TestMissingAsync(
@"class C
{
int Foo
{
set => [|Bar|]();
}
}", options: UseExpressionBody);
}
[Fact, Trait(Traits.Feature, Traits.Features.CodeActionsUseExpressionBody)]
public async Task TestUseExpressionBody3()
{
await TestAsync(
@"class C
{
int Foo
{
get
{
[|throw|] new NotImplementedException();
}
}
}",
@"class C
{
int Foo
{
get => throw new NotImplementedException();
}
}", options: UseExpressionBody);
}
[Fact, Trait(Traits.Feature, Traits.Features.CodeActionsUseExpressionBody)]
public async Task TestUseExpressionBody4()
{
await TestAsync(
@"class C
{
int Foo
{
get
{
[|throw|] new NotImplementedException(); // comment
}
}
}",
@"class C
{
int Foo
{
get => throw new NotImplementedException(); // comment
}
}", compareTokens: false, options: UseExpressionBody);
}
[Fact, Trait(Traits.Feature, Traits.Features.CodeActionsUseExpressionBody)]
public async Task TestUseBlockBody1()
{
await TestAsync(
@"class C
{
int Foo
{
get [|=>|] Bar();
}
}",
@"class C
{
int Foo
{
get
{
return Bar();
}
}
}", options: UseBlockBody);
}
[Fact, Trait(Traits.Feature, Traits.Features.CodeActionsUseExpressionBody)]
public async Task TestUseBlockBodyForSetter1()
{
await TestAsync(
@"class C
{
int Foo
{
set [|=>|] Bar();
}
}",
@"class C
{
int Foo
{
set
{
Bar();
}
}
}", options: UseBlockBody);
}
[Fact, Trait(Traits.Feature, Traits.Features.CodeActionsUseExpressionBody)]
public async Task TestUseBlockBody3()
{
await TestAsync(
@"class C
{
int Foo
{
get [|=>|] throw new NotImplementedException();
}
}",
@"class C
{
int Foo
{
get
{
throw new NotImplementedException();
}
}
}", options: UseBlockBody);
}
[Fact, Trait(Traits.Feature, Traits.Features.CodeActionsUseExpressionBody)]
public async Task TestUseBlockBody4()
{
await TestAsync(
@"class C
{
int Foo
{
get [|=>|] throw new NotImplementedException(); // comment
}
}",
@"class C
{
int Foo
{
get
{
throw new NotImplementedException(); // comment
}
}
}", compareTokens: false, options: UseBlockBody);
}
}
}
| |
// Copyright (c) 1995-2009 held by the author(s). All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the names of the Naval Postgraduate School (NPS)
// Modeling Virtual Environments and Simulation (MOVES) Institute
// (http://www.nps.edu and http://www.MovesInstitute.org)
// nor the names of its contributors may be used to endorse or
// promote products derived from this software without specific
// prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
// COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
//
// Copyright (c) 2008, MOVES Institute, Naval Postgraduate School. All
// rights reserved. This work is licensed under the BSD open source license,
// available at https://www.movesinstitute.org/licenses/bsd.html
//
// Author: DMcG
// Modified for use with C#:
// - Peter Smith (Naval Air Warfare Center - Training Systems Division)
// - Zvonko Bostjancic (Blubit d.o.o. - zvonko.bostjancic@blubit.si)
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Text;
using System.Xml.Serialization;
using OpenDis.Core;
namespace OpenDis.Dis1998
{
/// <summary>
/// Section 5.2.40. Information about a geometry, a state associated with a geometry, a bounding volume, or an associated entity ID. NOTE: this class requires hand coding.
/// </summary>
[Serializable]
[XmlRoot]
public partial class Environment
{
/// <summary>
/// Record type
/// </summary>
private uint _environmentType;
/// <summary>
/// length, in bits
/// </summary>
private byte _length;
/// <summary>
/// Identify the sequentially numbered record index
/// </summary>
private byte _index;
/// <summary>
/// padding
/// </summary>
private byte _padding1;
/// <summary>
/// Geometry or state record
/// </summary>
private byte _geometry;
/// <summary>
/// padding to bring the total size up to a 64 bit boundry
/// </summary>
private byte _padding2;
/// <summary>
/// Initializes a new instance of the <see cref="Environment"/> class.
/// </summary>
public Environment()
{
}
/// <summary>
/// Implements the operator !=.
/// </summary>
/// <param name="left">The left operand.</param>
/// <param name="right">The right operand.</param>
/// <returns>
/// <c>true</c> if operands are not equal; otherwise, <c>false</c>.
/// </returns>
public static bool operator !=(Environment left, Environment right)
{
return !(left == right);
}
/// <summary>
/// Implements the operator ==.
/// </summary>
/// <param name="left">The left operand.</param>
/// <param name="right">The right operand.</param>
/// <returns>
/// <c>true</c> if both operands are equal; otherwise, <c>false</c>.
/// </returns>
public static bool operator ==(Environment left, Environment right)
{
if (object.ReferenceEquals(left, right))
{
return true;
}
if (((object)left == null) || ((object)right == null))
{
return false;
}
return left.Equals(right);
}
public virtual int GetMarshalledSize()
{
int marshalSize = 0;
marshalSize += 4; // this._environmentType
marshalSize += 1; // this._length
marshalSize += 1; // this._index
marshalSize += 1; // this._padding1
marshalSize += 1; // this._geometry
marshalSize += 1; // this._padding2
return marshalSize;
}
/// <summary>
/// Gets or sets the Record type
/// </summary>
[XmlElement(Type = typeof(uint), ElementName = "environmentType")]
public uint EnvironmentType
{
get
{
return this._environmentType;
}
set
{
this._environmentType = value;
}
}
/// <summary>
/// Gets or sets the length, in bits
/// </summary>
[XmlElement(Type = typeof(byte), ElementName = "length")]
public byte Length
{
get
{
return this._length;
}
set
{
this._length = value;
}
}
/// <summary>
/// Gets or sets the Identify the sequentially numbered record index
/// </summary>
[XmlElement(Type = typeof(byte), ElementName = "index")]
public byte Index
{
get
{
return this._index;
}
set
{
this._index = value;
}
}
/// <summary>
/// Gets or sets the padding
/// </summary>
[XmlElement(Type = typeof(byte), ElementName = "padding1")]
public byte Padding1
{
get
{
return this._padding1;
}
set
{
this._padding1 = value;
}
}
/// <summary>
/// Gets or sets the Geometry or state record
/// </summary>
[XmlElement(Type = typeof(byte), ElementName = "geometry")]
public byte Geometry
{
get
{
return this._geometry;
}
set
{
this._geometry = value;
}
}
/// <summary>
/// Gets or sets the padding to bring the total size up to a 64 bit boundry
/// </summary>
[XmlElement(Type = typeof(byte), ElementName = "padding2")]
public byte Padding2
{
get
{
return this._padding2;
}
set
{
this._padding2 = value;
}
}
/// <summary>
/// Occurs when exception when processing PDU is caught.
/// </summary>
public event EventHandler<PduExceptionEventArgs> ExceptionOccured;
/// <summary>
/// Called when exception occurs (raises the <see cref="Exception"/> event).
/// </summary>
/// <param name="e">The exception.</param>
protected void RaiseExceptionOccured(Exception e)
{
if (Pdu.FireExceptionEvents && this.ExceptionOccured != null)
{
this.ExceptionOccured(this, new PduExceptionEventArgs(e));
}
}
/// <summary>
/// Marshal the data to the DataOutputStream. Note: Length needs to be set before calling this method
/// </summary>
/// <param name="dos">The DataOutputStream instance to which the PDU is marshaled.</param>
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Due to ignoring errors.")]
public virtual void Marshal(DataOutputStream dos)
{
if (dos != null)
{
try
{
dos.WriteUnsignedInt((uint)this._environmentType);
dos.WriteUnsignedByte((byte)this._length);
dos.WriteUnsignedByte((byte)this._index);
dos.WriteUnsignedByte((byte)this._padding1);
dos.WriteUnsignedByte((byte)this._geometry);
dos.WriteUnsignedByte((byte)this._padding2);
}
catch (Exception e)
{
if (PduBase.TraceExceptions)
{
Trace.WriteLine(e);
Trace.Flush();
}
this.RaiseExceptionOccured(e);
if (PduBase.ThrowExceptions)
{
throw e;
}
}
}
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Due to ignoring errors.")]
public virtual void Unmarshal(DataInputStream dis)
{
if (dis != null)
{
try
{
this._environmentType = dis.ReadUnsignedInt();
this._length = dis.ReadUnsignedByte();
this._index = dis.ReadUnsignedByte();
this._padding1 = dis.ReadUnsignedByte();
this._geometry = dis.ReadUnsignedByte();
this._padding2 = dis.ReadUnsignedByte();
}
catch (Exception e)
{
if (PduBase.TraceExceptions)
{
Trace.WriteLine(e);
Trace.Flush();
}
this.RaiseExceptionOccured(e);
if (PduBase.ThrowExceptions)
{
throw e;
}
}
}
}
/// <summary>
/// This allows for a quick display of PDU data. The current format is unacceptable and only used for debugging.
/// This will be modified in the future to provide a better display. Usage:
/// pdu.GetType().InvokeMember("Reflection", System.Reflection.BindingFlags.InvokeMethod, null, pdu, new object[] { sb });
/// where pdu is an object representing a single pdu and sb is a StringBuilder.
/// Note: The supplied Utilities folder contains a method called 'DecodePDU' in the PDUProcessor Class that provides this functionality
/// </summary>
/// <param name="sb">The StringBuilder instance to which the PDU is written to.</param>
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Due to ignoring errors.")]
public virtual void Reflection(StringBuilder sb)
{
sb.AppendLine("<Environment>");
try
{
sb.AppendLine("<environmentType type=\"uint\">" + this._environmentType.ToString(CultureInfo.InvariantCulture) + "</environmentType>");
sb.AppendLine("<length type=\"byte\">" + this._length.ToString(CultureInfo.InvariantCulture) + "</length>");
sb.AppendLine("<index type=\"byte\">" + this._index.ToString(CultureInfo.InvariantCulture) + "</index>");
sb.AppendLine("<padding1 type=\"byte\">" + this._padding1.ToString(CultureInfo.InvariantCulture) + "</padding1>");
sb.AppendLine("<geometry type=\"byte\">" + this._geometry.ToString(CultureInfo.InvariantCulture) + "</geometry>");
sb.AppendLine("<padding2 type=\"byte\">" + this._padding2.ToString(CultureInfo.InvariantCulture) + "</padding2>");
sb.AppendLine("</Environment>");
}
catch (Exception e)
{
if (PduBase.TraceExceptions)
{
Trace.WriteLine(e);
Trace.Flush();
}
this.RaiseExceptionOccured(e);
if (PduBase.ThrowExceptions)
{
throw e;
}
}
}
/// <summary>
/// Determines whether the specified <see cref="System.Object"/> is equal to this instance.
/// </summary>
/// <param name="obj">The <see cref="System.Object"/> to compare with this instance.</param>
/// <returns>
/// <c>true</c> if the specified <see cref="System.Object"/> is equal to this instance; otherwise, <c>false</c>.
/// </returns>
public override bool Equals(object obj)
{
return this == obj as Environment;
}
/// <summary>
/// Compares for reference AND value equality.
/// </summary>
/// <param name="obj">The object to compare with this instance.</param>
/// <returns>
/// <c>true</c> if both operands are equal; otherwise, <c>false</c>.
/// </returns>
public bool Equals(Environment obj)
{
bool ivarsEqual = true;
if (obj.GetType() != this.GetType())
{
return false;
}
if (this._environmentType != obj._environmentType)
{
ivarsEqual = false;
}
if (this._length != obj._length)
{
ivarsEqual = false;
}
if (this._index != obj._index)
{
ivarsEqual = false;
}
if (this._padding1 != obj._padding1)
{
ivarsEqual = false;
}
if (this._geometry != obj._geometry)
{
ivarsEqual = false;
}
if (this._padding2 != obj._padding2)
{
ivarsEqual = false;
}
return ivarsEqual;
}
/// <summary>
/// HashCode Helper
/// </summary>
/// <param name="hash">The hash value.</param>
/// <returns>The new hash value.</returns>
private static int GenerateHash(int hash)
{
hash = hash << (5 + hash);
return hash;
}
/// <summary>
/// Gets the hash code.
/// </summary>
/// <returns>The hash code.</returns>
public override int GetHashCode()
{
int result = 0;
result = GenerateHash(result) ^ this._environmentType.GetHashCode();
result = GenerateHash(result) ^ this._length.GetHashCode();
result = GenerateHash(result) ^ this._index.GetHashCode();
result = GenerateHash(result) ^ this._padding1.GetHashCode();
result = GenerateHash(result) ^ this._geometry.GetHashCode();
result = GenerateHash(result) ^ this._padding2.GetHashCode();
return result;
}
}
}
| |
/*
* Copyright (c) 2006, Clutch, Inc.
* Original Author: Jeff Cesnik
* All rights reserved.
*
* - Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* - Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* - Neither the name of the openmetaverse.org nor the names
* of its contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Net;
using System.Net.Sockets;
using System.Threading;
using log4net;
namespace OpenMetaverse
{
/// <summary>
/// Base UDP server
/// </summary>
public abstract class OpenSimUDPBase
{
private static readonly ILog m_log = LogManager.GetLogger(System.Reflection.MethodBase.GetCurrentMethod().DeclaringType);
/// <summary>
/// This method is called when an incoming packet is received
/// </summary>
/// <param name="buffer">Incoming packet buffer</param>
protected abstract void PacketReceived(UDPPacketBuffer buffer);
/// <summary>UDP port to bind to in server mode</summary>
protected int m_udpPort;
/// <summary>Local IP address to bind to in server mode</summary>
protected IPAddress m_localBindAddress;
/// <summary>UDP socket, used in either client or server mode</summary>
private Socket m_udpSocket;
/// <summary>Flag to process packets asynchronously or synchronously</summary>
private bool m_asyncPacketHandling;
/// <summary>The all important shutdown flag</summary>
private volatile bool m_shutdownFlag = true;
/// <summary>Returns true if the server is currently listening, otherwise false</summary>
public bool IsRunning { get { return !m_shutdownFlag; } }
/// <summary>
/// Default constructor
/// </summary>
/// <param name="bindAddress">Local IP address to bind the server to</param>
/// <param name="port">Port to listening for incoming UDP packets on</param>
public OpenSimUDPBase(IPAddress bindAddress, int port)
{
m_localBindAddress = bindAddress;
m_udpPort = port;
}
/// <summary>
/// Start the UDP server
/// </summary>
/// <param name="recvBufferSize">The size of the receive buffer for
/// the UDP socket. This value is passed up to the operating system
/// and used in the system networking stack. Use zero to leave this
/// value as the default</param>
/// <param name="asyncPacketHandling">Set this to true to start
/// receiving more packets while current packet handler callbacks are
/// still running. Setting this to false will complete each packet
/// callback before the next packet is processed</param>
/// <remarks>This method will attempt to set the SIO_UDP_CONNRESET flag
/// on the socket to get newer versions of Windows to behave in a sane
/// manner (not throwing an exception when the remote side resets the
/// connection). This call is ignored on Mono where the flag is not
/// necessary</remarks>
public void Start(int recvBufferSize, bool asyncPacketHandling)
{
m_asyncPacketHandling = asyncPacketHandling;
if (m_shutdownFlag)
{
const int SIO_UDP_CONNRESET = -1744830452;
IPEndPoint ipep = new IPEndPoint(m_localBindAddress, m_udpPort);
m_udpSocket = new Socket(
AddressFamily.InterNetwork,
SocketType.Dgram,
ProtocolType.Udp);
try
{
// This udp socket flag is not supported under mono,
// so we'll catch the exception and continue
m_udpSocket.IOControl(SIO_UDP_CONNRESET, new byte[] { 0 }, null);
m_log.Debug("[UDPBASE]: SIO_UDP_CONNRESET flag set");
}
catch (SocketException)
{
m_log.Debug("[UDPBASE]: SIO_UDP_CONNRESET flag not supported on this platform, ignoring");
}
if (recvBufferSize != 0)
m_udpSocket.ReceiveBufferSize = recvBufferSize;
m_udpSocket.Bind(ipep);
// we're not shutting down, we're starting up
m_shutdownFlag = false;
// kick off an async receive. The Start() method will return, the
// actual receives will occur asynchronously and will be caught in
// AsyncEndRecieve().
AsyncBeginReceive();
}
}
/// <summary>
/// Stops the UDP server
/// </summary>
public void Stop()
{
if (!m_shutdownFlag)
{
// wait indefinitely for a writer lock. Once this is called, the .NET runtime
// will deny any more reader locks, in effect blocking all other send/receive
// threads. Once we have the lock, we set shutdownFlag to inform the other
// threads that the socket is closed.
m_shutdownFlag = true;
m_udpSocket.Close();
}
}
private void AsyncBeginReceive()
{
// allocate a packet buffer
//WrappedObject<UDPPacketBuffer> wrappedBuffer = Pool.CheckOut();
UDPPacketBuffer buf = new UDPPacketBuffer();
if (!m_shutdownFlag)
{
try
{
// kick off an async read
m_udpSocket.BeginReceiveFrom(
//wrappedBuffer.Instance.Data,
buf.Data,
0,
UDPPacketBuffer.BUFFER_SIZE,
SocketFlags.None,
ref buf.RemoteEndPoint,
AsyncEndReceive,
//wrappedBuffer);
buf);
}
catch (SocketException e)
{
if (e.SocketErrorCode == SocketError.ConnectionReset)
{
m_log.Warn("[UDPBASE]: SIO_UDP_CONNRESET was ignored, attempting to salvage the UDP listener on port " + m_udpPort);
bool salvaged = false;
while (!salvaged)
{
try
{
m_udpSocket.BeginReceiveFrom(
//wrappedBuffer.Instance.Data,
buf.Data,
0,
UDPPacketBuffer.BUFFER_SIZE,
SocketFlags.None,
ref buf.RemoteEndPoint,
AsyncEndReceive,
//wrappedBuffer);
buf);
salvaged = true;
}
catch (SocketException) { }
catch (ObjectDisposedException) { return; }
}
m_log.Warn("[UDPBASE]: Salvaged the UDP listener on port " + m_udpPort);
}
}
catch (ObjectDisposedException) { }
}
}
private void AsyncEndReceive(IAsyncResult iar)
{
// Asynchronous receive operations will complete here through the call
// to AsyncBeginReceive
if (!m_shutdownFlag)
{
// Asynchronous mode will start another receive before the
// callback for this packet is even fired. Very parallel :-)
if (m_asyncPacketHandling)
AsyncBeginReceive();
// get the buffer that was created in AsyncBeginReceive
// this is the received data
//WrappedObject<UDPPacketBuffer> wrappedBuffer = (WrappedObject<UDPPacketBuffer>)iar.AsyncState;
//UDPPacketBuffer buffer = wrappedBuffer.Instance;
UDPPacketBuffer buffer = (UDPPacketBuffer)iar.AsyncState;
try
{
// get the length of data actually read from the socket, store it with the
// buffer
buffer.DataLength = m_udpSocket.EndReceiveFrom(iar, ref buffer.RemoteEndPoint);
// call the abstract method PacketReceived(), passing the buffer that
// has just been filled from the socket read.
PacketReceived(buffer);
}
catch (SocketException) { }
catch (ObjectDisposedException) { }
finally
{
//wrappedBuffer.Dispose();
// Synchronous mode waits until the packet callback completes
// before starting the receive to fetch another packet
if (!m_asyncPacketHandling)
AsyncBeginReceive();
}
}
}
public void AsyncBeginSend(UDPPacketBuffer buf)
{
if (!m_shutdownFlag)
{
try
{
m_udpSocket.BeginSendTo(
buf.Data,
0,
buf.DataLength,
SocketFlags.None,
buf.RemoteEndPoint,
AsyncEndSend,
buf);
}
catch (SocketException) { }
catch (ObjectDisposedException) { }
}
}
void AsyncEndSend(IAsyncResult result)
{
try
{
UDPPacketBuffer buf = (UDPPacketBuffer)result.AsyncState;
int bytesSent = m_udpSocket.EndSendTo(result);
}
catch (SocketException) { }
catch (ObjectDisposedException) { }
}
}
}
| |
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Licensed under the Oculus Utilities SDK License Version 1.31 (the "License"); you may not use
the Utilities SDK except in compliance with the License, which is provided at the time of installation
or download, or which otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/utilities-1.31
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using UnityEngine;
using System.Collections;
/// <summary>
/// Sample that allows you to play with various VR settings.
/// </summary>
public class OVRSceneSampleController : MonoBehaviour
{
/// <summary>
/// The key that quits the application.
/// </summary>
public KeyCode quitKey = KeyCode.Escape;
/// <summary>
/// An optional texture that appears before the menu fades in.
/// </summary>
public Texture fadeInTexture = null;
/// <summary>
/// Controls how quickly the player's speed and rotation change based on input.
/// </summary>
public float speedRotationIncrement = 0.05f;
private OVRPlayerController playerController = null;
// Handle to OVRCameraRig
private OVRCameraRig cameraController = null;
/// <summary>
/// We can set the layer to be anything we want to, this allows
/// a specific camera to render it.
/// </summary>
public string layerName = "Default";
// Vision mode on/off
private bool visionMode = true;
// We want to hold onto GridCube, for potential sharing
// of the menu RenderTarget
OVRGridCube gridCube = null;
#if SHOW_DK2_VARIABLES
private string strVisionMode = "Vision Enabled: ON";
#endif
#region MonoBehaviour Message Handlers
/// <summary>
/// Awake this instance.
/// </summary>
void Awake()
{
// Find camera controller
OVRCameraRig[] cameraControllers;
cameraControllers = gameObject.GetComponentsInChildren<OVRCameraRig>();
if (cameraControllers.Length == 0)
{
Debug.LogWarning("OVRMainMenu: No OVRCameraRig attached.");
}
else if (cameraControllers.Length > 1)
{
Debug.LogWarning("OVRMainMenu: More then 1 OVRCameraRig attached.");
}
else
{
cameraController = cameraControllers[0];
}
// Find player controller
OVRPlayerController[] playerControllers;
playerControllers = gameObject.GetComponentsInChildren<OVRPlayerController>();
if (playerControllers.Length == 0)
{
Debug.LogWarning("OVRMainMenu: No OVRPlayerController attached.");
}
else if (playerControllers.Length > 1)
{
Debug.LogWarning("OVRMainMenu: More then 1 OVRPlayerController attached.");
}
else
{
playerController = playerControllers[0];
}
}
/// <summary>
/// Start this instance.
/// </summary>
void Start()
{
// Make sure to hide cursor
if (Application.isEditor == false)
{
Cursor.visible = false;
Cursor.lockState = CursorLockMode.Locked;
}
// CameraController updates
if (cameraController != null)
{
// Add a GridCube component to this object
gridCube = gameObject.AddComponent<OVRGridCube>();
gridCube.SetOVRCameraController(ref cameraController);
}
}
/// <summary>
/// Update this instance.
/// </summary>
void Update()
{
// Recenter pose
UpdateRecenterPose();
// Turn On/Off Vision Mode
UpdateVisionMode();
// Update Speed and Rotation Scale
if (playerController != null)
UpdateSpeedAndRotationScaleMultiplier();
// Toggle Fullscreen
if (Input.GetKeyDown(KeyCode.F11))
Screen.fullScreen = !Screen.fullScreen;
if (Input.GetKeyDown(KeyCode.M))
#if UNITY_2017_2_OR_NEWER
UnityEngine.XR.XRSettings.showDeviceView = !UnityEngine.XR.XRSettings.showDeviceView;
#else
UnityEngine.VR.VRSettings.showDeviceView = !UnityEngine.VR.VRSettings.showDeviceView;
#endif
#if !UNITY_ANDROID || UNITY_EDITOR
// Escape Application
if (Input.GetKeyDown(quitKey))
Application.Quit();
#endif
}
#endregion
/// <summary>
/// Updates the vision mode.
/// </summary>
void UpdateVisionMode()
{
if (Input.GetKeyDown(KeyCode.F2))
{
visionMode ^= visionMode;
OVRManager.tracker.isEnabled = visionMode;
}
}
/// <summary>
/// Updates the speed and rotation scale multiplier.
/// </summary>
void UpdateSpeedAndRotationScaleMultiplier()
{
float moveScaleMultiplier = 0.0f;
playerController.GetMoveScaleMultiplier(ref moveScaleMultiplier);
if (Input.GetKeyDown(KeyCode.Alpha7))
{
moveScaleMultiplier -= speedRotationIncrement;
}
else if (Input.GetKeyDown(KeyCode.Alpha8))
{
moveScaleMultiplier += speedRotationIncrement;
}
playerController.SetMoveScaleMultiplier(moveScaleMultiplier);
float rotationScaleMultiplier = 0.0f;
playerController.GetRotationScaleMultiplier(ref rotationScaleMultiplier);
if (Input.GetKeyDown(KeyCode.Alpha9))
{
rotationScaleMultiplier -= speedRotationIncrement;
}
else if (Input.GetKeyDown(KeyCode.Alpha0))
{
rotationScaleMultiplier += speedRotationIncrement;
}
playerController.SetRotationScaleMultiplier(rotationScaleMultiplier);
}
/// <summary>
/// Recenter pose
/// </summary>
void UpdateRecenterPose()
{
if (Input.GetKeyDown(KeyCode.R))
OVRManager.display.RecenterPose();
}
}
| |
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using System;
using System.IO;
using System.Management.Automation;
using System.Security;
using Microsoft.Management.Infrastructure;
using Microsoft.PowerShell;
using Xunit;
namespace PowerShell.Hosting.SDK.Tests
{
public static class HostingTests
{
[Fact]
public static void TestCommandFromUtility()
{
using (System.Management.Automation.PowerShell ps = System.Management.Automation.PowerShell.Create())
{
var results = ps.AddScript("Get-Verb -Verb get").Invoke();
foreach (dynamic item in results)
{
Assert.Equal("Get", item.Verb);
}
}
}
[Fact]
public static void TestCommandFromManagement()
{
using (System.Management.Automation.PowerShell ps = System.Management.Automation.PowerShell.Create())
{
var path = Environment.CurrentDirectory;
var results = ps.AddCommand("Test-Path").AddParameter("Path", path).Invoke<bool>();
foreach (dynamic item in results)
{
Assert.True(item);
}
}
}
[Fact]
public static void TestCommandFromCore()
{
using (System.Management.Automation.PowerShell ps = System.Management.Automation.PowerShell.Create())
{
var results = ps.AddScript(@"$i = 0 ; 1..3 | ForEach-Object { $i += $_} ; $i").Invoke<int>();
foreach (dynamic item in results)
{
Assert.Equal(6, item);
}
}
}
[SkippableFact]
public static void TestCommandFromMMI()
{
// Test is disabled since we do not have a CimCmdlets module released in the SDK.
Skip.IfNot(Platform.IsWindows);
using (System.Management.Automation.PowerShell ps = System.Management.Automation.PowerShell.Create())
{
var results = ps.AddScript("[Microsoft.Management.Infrastructure.CimInstance]::new('Win32_Process')").Invoke();
Assert.True(results.Count > 0);
}
}
[SkippableFact]
public static void TestCommandFromDiagnostics()
{
Skip.IfNot(Platform.IsWindows);
using (System.Management.Automation.PowerShell ps = System.Management.Automation.PowerShell.Create())
{
var results = ps.AddScript("Get-WinEvent -ListLog Application").Invoke();
foreach (dynamic item in results)
{
Assert.Equal("Application", item.LogName);
}
}
}
[SkippableFact]
public static void TestCommandFromSecurity()
{
Skip.IfNot(Platform.IsWindows);
using (System.Management.Automation.PowerShell ps = System.Management.Automation.PowerShell.Create())
{
var results = ps.AddScript("ConvertTo-SecureString -String test -AsPlainText -Force").Invoke<SecureString>();
Assert.IsType<SecureString>(results[0]);
}
}
[SkippableFact]
public static void TestCommandFromWSMan()
{
Skip.IfNot(Platform.IsWindows);
using (System.Management.Automation.PowerShell ps = System.Management.Automation.PowerShell.Create())
{
var results = ps.AddScript("Test-WSMan").Invoke();
foreach (dynamic item in results)
{
Assert.Equal("Microsoft Corporation", item.ProductVendor);
}
}
}
[Fact]
public static void TestCommandFromNative()
{
var fs = File.Create(Path.GetTempFileName());
fs.Close();
string target = fs.Name;
string path = Path.GetTempFileName();
using (System.Management.Automation.PowerShell ps = System.Management.Automation.PowerShell.Create())
{
// New-Item -ItemType SymbolicLink uses libpsl-native, hence using it for validating native dependencies.
string command = $"New-Item -ItemType SymbolicLink -Path {path} -Target {target}";
var results = ps.AddScript(command).Invoke<FileInfo>();
foreach (var item in results)
{
Assert.Equal(path, item.FullName);
}
}
if (File.Exists(path))
{
File.Delete(path);
}
if (File.Exists(target))
{
File.Delete(target);
}
}
/// <summary>
/// Reference assemblies should be handled correctly so that Add-Type works in the hosting scenario.
/// </summary>
[Fact]
public static void TestAddTypeCmdletInHostScenario()
{
string code = @"
using System;
public class Foo
{
public Foo(string name, string path)
{
this.Name = name;
this.Path = path;
}
public string Name;
public string Path;
}
";
using (System.Management.Automation.PowerShell ps = System.Management.Automation.PowerShell.Create())
{
ps.AddCommand("Add-Type").AddParameter("TypeDefinition", code).Invoke();
ps.Commands.Clear();
var results = ps.AddScript("[Foo]::new('Joe', 'Unknown')").Invoke();
Assert.Single(results);
dynamic foo = results[0];
Assert.Equal("Joe", foo.Name);
Assert.Equal("Unknown", foo.Path);
}
}
[Fact]
public static void TestConsoleShellScenario()
{
int ret = ConsoleShell.Start("Hello", string.Empty, new string[] { "-noprofile", "-c", "exit 42" });
Assert.Equal(42, ret);
}
[Fact]
public static void TestBuiltInModules()
{
var iss = System.Management.Automation.Runspaces.InitialSessionState.CreateDefault2();
if (System.Management.Automation.Platform.IsWindows)
{
iss.ExecutionPolicy = Microsoft.PowerShell.ExecutionPolicy.RemoteSigned;
}
using var runspace = System.Management.Automation.Runspaces.RunspaceFactory.CreateRunspace(iss);
runspace.Open();
using var ps = System.Management.Automation.PowerShell.Create(runspace);
var results_1 = ps.AddScript("Write-Output Hello > $null; Get-Module").Invoke<System.Management.Automation.PSModuleInfo>();
Assert.Single(results_1);
var module = results_1[0];
Assert.Equal("Microsoft.PowerShell.Utility", module.Name);
ps.Commands.Clear();
var results_2 = ps.AddScript("Join-Path $PSHOME 'Modules' 'Microsoft.PowerShell.Utility' 'Microsoft.PowerShell.Utility.psd1'").Invoke<string>();
var moduleManifestPath = results_2[0];
Assert.Equal(moduleManifestPath, module.Path, ignoreCase: true);
}
}
}
| |
using DotVVM.Framework.Binding;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using DotVVM.Framework.Hosting;
using DotVVM.Framework.Runtime;
using System.Collections;
using DotVVM.Framework.Runtime.Compilation.JavascriptCompilation;
namespace DotVVM.Framework.Controls
{
public class GridView : ItemsControl
{
public GridView() : base("table")
{
Columns = new List<GridViewColumn>();
RowDecorators = new List<Decorator>();
}
[MarkupOptions(AllowBinding = false, MappingMode = MappingMode.InnerElement)]
[ControlPropertyBindingDataContextChange("DataSource")]
[CollectionElementDataContextChange]
public List<GridViewColumn> Columns
{
get { return (List<GridViewColumn>)GetValue(ColumnsProperty); }
set { SetValue(ColumnsProperty, value); }
}
public static readonly DotvvmProperty ColumnsProperty =
DotvvmProperty.Register<List<GridViewColumn>, GridView>(c => c.Columns);
[MarkupOptions(AllowBinding = false, MappingMode = MappingMode.InnerElement)]
[ControlPropertyBindingDataContextChange("DataSource")]
[CollectionElementDataContextChange]
public List<Decorator> RowDecorators
{
get { return (List<Decorator>)GetValue(RowDecoratorsProperty); }
set { SetValue(RowDecoratorsProperty, value); }
}
public static readonly DotvvmProperty RowDecoratorsProperty =
DotvvmProperty.Register<List<Decorator>, GridView>(c => c.RowDecorators);
[ConstantDataContextChange(typeof(string))]
[MarkupOptions(AllowHardCodedValue = false)]
public Action<string> SortChanged
{
get { return (Action<string>)GetValue(SortChangedProperty); }
set { SetValue(SortChangedProperty, value); }
}
public static readonly DotvvmProperty SortChangedProperty =
DotvvmProperty.Register<Action<string>, GridView>(c => c.SortChanged, null);
protected internal override void OnLoad(DotvvmRequestContext context)
{
DataBind(context);
base.OnLoad(context);
}
protected internal override void OnPreRender(DotvvmRequestContext context)
{
DataBind(context); // TODO: support for observable collection
base.OnPreRender(context);
}
private void DataBind(DotvvmRequestContext context)
{
Children.Clear();
var dataSourceBinding = GetDataSourceBinding();
// var dataSourcePath = dataSourceBinding.GetViewModelPathExpression(this, DataSourceProperty);
var dataSource = DataSource;
Action<string> sortCommand = null;
if (dataSource is IGridViewDataSet)
{
sortCommand = ((IGridViewDataSet)dataSource).SetSortExpression; // dataSourcePath + ".SetSortExpression";
}
else
{
var sortCommandBinding = GetCommandBinding(SortChangedProperty);
if (sortCommandBinding != null)
{
sortCommand = s => sortCommandBinding.Delegate(new []{ s }.Concat(BindingExpression.GetDataContexts(this, true)).ToArray(), null);
}
}
var index = 0;
if (dataSource != null)
{
// create header row
CreateHeaderRow(context, sortCommand);
var items = GetIEnumerableFromDataSource(dataSource);
foreach (var item in items)
{
// create row
var placeholder = new DataItemContainer { DataItemIndex = index };
placeholder.SetBinding(DataContextProperty, GetItemBinding((IList)items, dataSourceBinding.GetKnockoutBindingExpression(), index));
Children.Add(placeholder);
CreateRow(context, placeholder);
index++;
}
}
}
private void CreateHeaderRow(DotvvmRequestContext context, Action<string> sortCommand)
{
var head = new HtmlGenericControl("thead");
Children.Add(head);
var headerRow = new HtmlGenericControl("tr");
head.Children.Add(headerRow);
foreach (var column in Columns)
{
var cell = new HtmlGenericControl("th");
SetCellAttributes(column, cell, true);
headerRow.Children.Add(cell);
column.CreateHeaderControls(context, this, sortCommand, cell);
}
}
private static void SetCellAttributes(GridViewColumn column, HtmlGenericControl cell, bool isHeaderCell)
{
if (!string.IsNullOrEmpty(column.Width))
{
cell.Attributes["style"] = "width: " + column.Width;
}
var cssClassBinding = column.GetValueBinding(isHeaderCell ? GridViewColumn.CssClassProperty : GridViewColumn.HeaderCssClassProperty);
if (cssClassBinding != null)
{
cell.Attributes["class"] = cssClassBinding;
}
else if (!string.IsNullOrWhiteSpace(column.CssClass))
{
cell.Attributes["class"] = column.CssClass;
}
}
private void CreateRow(DotvvmRequestContext context, DataItemContainer placeholder)
{
var row = new HtmlGenericControl("tr");
DotvvmControl container = row;
foreach (var decorator in RowDecorators)
{
var decoratorInstance = decorator.Clone();
decoratorInstance.Children.Add(container);
container = decoratorInstance;
}
placeholder.Children.Add(container);
// create cells
foreach (var column in Columns)
{
var cell = new HtmlGenericControl("td");
SetCellAttributes(column, cell, false);
row.Children.Add(cell);
column.CreateControls(context, cell);
}
}
protected override void RenderContents(IHtmlWriter writer, RenderContext context)
{
if (Children.Count == 0) return;
// render the header
Children[0].Render(writer, context);
// render body
var dataSourceBinding = GetDataSourceBinding();
if (!RenderOnServer)
{
var expression = dataSourceBinding.GetKnockoutBindingExpression();
writer.AddKnockoutForeachDataBind(expression);
}
writer.RenderBeginTag("tbody");
// render contents
if (RenderOnServer)
{
// render on server
var index = 0;
foreach (var child in Children.Skip(1))
{
Children[index].Render(writer, context);
index++;
}
}
else
{
// render on client
var placeholder = new DataItemContainer { DataContext = null };
placeholder.SetValue(Internal.PathFragmentProperty,
JavascriptCompilationHelper.AddIndexerToViewModel(dataSourceBinding.GetKnockoutBindingExpression(), "$index"));
Children.Add(placeholder);
CreateRow(context.RequestContext, placeholder);
placeholder.Render(writer, context);
}
writer.RenderEndTag();
}
}
}
| |
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) Under one or more
contributor license agreements. See the NOTICE file distributed with
this work for Additional information regarding copyright ownership.
The ASF licenses this file to You Under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed Under the License is distributed on an "AS Is" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations Under the License.
==================================================================== */
namespace NPOI.SS.Formula.PTG
{
using System;
using System.Text;
using NPOI.Util;
using NPOI.HSSF.Record;
using NPOI.SS.Util;
using NPOI.SS.Formula.Constant;
/**
* ArrayPtg - handles arrays
*
* The ArrayPtg is a little weird, the size of the Ptg when parsing initially only
* includes the Ptg sid and the reserved bytes. The next Ptg in the expression then follows.
* It is only after the "size" of all the Ptgs is met, that the ArrayPtg data is actually
* held after this. So Ptg.CreateParsedExpression keeps track of the number of
* ArrayPtg elements and need to Parse the data upto the FORMULA record size.
*
* @author Jason Height (jheight at chariot dot net dot au)
*/
internal class ArrayPtg : Ptg
{
public const byte sid = 0x20;
private const int RESERVED_FIELD_LEN = 7;
/**
* The size of the plain tArray token written within the standard formula tokens
* (not including the data which comes after all formula tokens)
*/
public const int PLAIN_TOKEN_SIZE = 1 + RESERVED_FIELD_LEN;
//private static byte[] DEFAULT_RESERVED_DATA = new byte[RESERVED_FIELD_LEN];
// 7 bytes of data (stored as an int, short and byte here)
private int _reserved0Int;
private int _reserved1Short;
private int _reserved2Byte;
// data from these fields comes after the Ptg data of all tokens in current formula
private int _nColumns;
private int _nRows;
private Object[] _arrayValues;
ArrayPtg(int reserved0, int reserved1, int reserved2, int nColumns, int nRows, Object[] arrayValues)
{
_reserved0Int = reserved0;
_reserved1Short = reserved1;
_reserved2Byte = reserved2;
_nColumns = nColumns;
_nRows = nRows;
_arrayValues = arrayValues;
}
/**
* @param values2d array values arranged in rows
*/
public ArrayPtg(Object[][] values2d)
{
int nColumns = values2d[0].Length;
int nRows = values2d.Length;
// convert 2-d to 1-d array (row by row according to getValueIndex())
_nColumns = (short)nColumns;
_nRows = (short)nRows;
Object[] vv = new Object[_nColumns * _nRows];
for (int r = 0; r < nRows; r++)
{
Object[] rowData = values2d[r];
for (int c = 0; c < nColumns; c++)
{
vv[GetValueIndex(c, r)] = rowData[c];
}
}
_arrayValues = vv;
_reserved0Int = 0;
_reserved1Short = 0;
_reserved2Byte = 0;
}
public Object[][] GetTokenArrayValues()
{
if (_arrayValues == null)
{
throw new InvalidOperationException("array values not read yet");
}
Object[][] result = new Object[_nRows][];
for (int r = 0; r < _nRows; r++)
{
result[r] = new object[_nColumns];
for (int c = 0; c < _nColumns; c++)
{
result[r][c] = _arrayValues[GetValueIndex(c, r)];
}
}
return result;
}
public override bool IsBaseToken
{
get { return false; }
}
public override String ToString()
{
StringBuilder buffer = new StringBuilder("[ArrayPtg]\n");
buffer.Append("columns = ").Append(ColumnCount).Append("\n");
buffer.Append("rows = ").Append(RowCount).Append("\n");
for (int x = 0; x < ColumnCount; x++)
{
for (int y = 0; y < RowCount; y++)
{
Object o = _arrayValues.GetValue(GetValueIndex(x, y));
buffer.Append("[").Append(x).Append("][").Append(y).Append("] = ").Append(o).Append("\n");
}
}
return buffer.ToString();
}
/**
* Note - (2D) array elements are stored column by column
* @return the index into the internal 1D array for the specified column and row
*/
/* package */
public int GetValueIndex(int colIx, int rowIx)
{
if (colIx < 0 || colIx >= _nColumns)
{
throw new ArgumentException("Specified colIx (" + colIx
+ ") is outside the allowed range (0.." + (_nColumns - 1) + ")");
}
if (rowIx < 0 || rowIx >= _nRows)
{
throw new ArgumentException("Specified rowIx (" + rowIx
+ ") is outside the allowed range (0.." + (_nRows - 1) + ")");
}
return rowIx * _nColumns + colIx;
}
public override void Write(ILittleEndianOutput out1)
{
out1.WriteByte(sid + PtgClass);
out1.WriteInt(_reserved0Int);
out1.WriteShort(_reserved1Short);
out1.WriteByte(_reserved2Byte);
}
public int WriteTokenValueBytes(ILittleEndianOutput out1)
{
out1.WriteByte(_nColumns - 1);
out1.WriteShort(_nRows - 1);
ConstantValueParser.Encode(out1, _arrayValues);
return 3 + ConstantValueParser.GetEncodedSize(_arrayValues);
}
public int RowCount
{
get
{
return _nRows;
}
}
public int ColumnCount
{
get
{
return _nColumns;
}
}
/** This size includes the size of the array Ptg plus the Array Ptg Token value size*/
public override int Size
{
get
{
int size = 1 + 7 + 1 + 2;
size += ConstantValueParser.GetEncodedSize(_arrayValues);
return size;
}
}
public override String ToFormulaString()
{
StringBuilder b = new StringBuilder();
b.Append("{");
for (int y = 0; y < RowCount; y++)
{
if (y > 0)
{
b.Append(";");
}
for (int x = 0; x < ColumnCount; x++)
{
if (x > 0)
{
b.Append(",");
}
Object o = _arrayValues.GetValue(GetValueIndex(x, y));
b.Append(GetConstantText(o));
}
}
b.Append("}");
return b.ToString();
}
private static String GetConstantText(Object o)
{
if (o == null)
{
return ""; // TODO - how is 'empty value' represented in formulas?
}
if (o is String)
{
return "\"" + (String)o + "\"";
}
if (o is Double || o is double)
{
return NumberToTextConverter.ToText((Double)o);
}
if (o is bool || o is Boolean)
{
return ((bool)o).ToString().ToUpper();
}
if (o is ErrorConstant)
{
return ((ErrorConstant)o).Text;
}
throw new ArgumentException("Unexpected constant class (" + o.GetType().Name + ")");
}
public override byte DefaultOperandClass
{
get { return Ptg.CLASS_ARRAY; }
}
/**
* Represents the initial plain tArray token (without the constant data that trails the whole
* formula). Objects of this class are only temporary and cannot be used as {@link Ptg}s.
* These temporary objects get converted to {@link ArrayPtg} by the
* {@link #finishReading(LittleEndianInput)} method.
*/
internal class Initial : Ptg
{
private int _reserved0;
private int _reserved1;
private int _reserved2;
public Initial(ILittleEndianInput in1)
{
_reserved0 = in1.ReadInt();
_reserved1 = in1.ReadUShort();
_reserved2 = in1.ReadUByte();
}
private static Exception Invalid()
{
throw new InvalidOperationException("This object is a partially initialised tArray, and cannot be used as a Ptg");
}
public override byte DefaultOperandClass
{
get
{
throw Invalid();
}
}
public override int Size
{
get
{
return PLAIN_TOKEN_SIZE;
}
}
public override bool IsBaseToken
{
get
{
return false;
}
}
public override String ToFormulaString()
{
throw Invalid();
}
public override void Write(ILittleEndianOutput out1)
{
throw Invalid();
}
/**
* Read in the actual token (array) values. This occurs
* AFTER the last Ptg in the expression.
* See page 304-305 of Excel97-2007BinaryFileFormat(xls)Specification.pdf
*/
public ArrayPtg FinishReading(ILittleEndianInput in1)
{
int nColumns = in1.ReadUByte();
short nRows = in1.ReadShort();
//The token_1_columns and token_2_rows do not follow the documentation.
//The number of physical rows and columns is actually +1 of these values.
//Which is not explicitly documented.
nColumns++;
nRows++;
int totalCount = nRows * nColumns;
Object[] arrayValues = ConstantValueParser.Parse(in1, totalCount);
ArrayPtg result = new ArrayPtg(_reserved0, _reserved1, _reserved2, nColumns, nRows, arrayValues);
result.PtgClass = this.PtgClass;
return result;
}
}
}
}
| |
// Copyright 2018 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
using gax = Google.Api.Gax;
using gaxgrpc = Google.Api.Gax.Grpc;
using pb = Google.Protobuf;
using pbwkt = Google.Protobuf.WellKnownTypes;
using grpccore = Grpc.Core;
using sys = System;
using sc = System.Collections;
using scg = System.Collections.Generic;
using sco = System.Collections.ObjectModel;
using st = System.Threading;
using stt = System.Threading.Tasks;
namespace Google.Cloud.ErrorReporting.V1Beta1
{
/// <summary>
/// Settings for a <see cref="ErrorGroupServiceClient"/>.
/// </summary>
public sealed partial class ErrorGroupServiceSettings : gaxgrpc::ServiceSettingsBase
{
/// <summary>
/// Get a new instance of the default <see cref="ErrorGroupServiceSettings"/>.
/// </summary>
/// <returns>
/// A new instance of the default <see cref="ErrorGroupServiceSettings"/>.
/// </returns>
public static ErrorGroupServiceSettings GetDefault() => new ErrorGroupServiceSettings();
/// <summary>
/// Constructs a new <see cref="ErrorGroupServiceSettings"/> object with default settings.
/// </summary>
public ErrorGroupServiceSettings() { }
private ErrorGroupServiceSettings(ErrorGroupServiceSettings existing) : base(existing)
{
gax::GaxPreconditions.CheckNotNull(existing, nameof(existing));
GetGroupSettings = existing.GetGroupSettings;
UpdateGroupSettings = existing.UpdateGroupSettings;
OnCopy(existing);
}
partial void OnCopy(ErrorGroupServiceSettings existing);
/// <summary>
/// The filter specifying which RPC <see cref="grpccore::StatusCode"/>s are eligible for retry
/// for "Idempotent" <see cref="ErrorGroupServiceClient"/> RPC methods.
/// </summary>
/// <remarks>
/// The eligible RPC <see cref="grpccore::StatusCode"/>s for retry for "Idempotent" RPC methods are:
/// <list type="bullet">
/// <item><description><see cref="grpccore::StatusCode.DeadlineExceeded"/></description></item>
/// <item><description><see cref="grpccore::StatusCode.Unavailable"/></description></item>
/// </list>
/// </remarks>
public static sys::Predicate<grpccore::RpcException> IdempotentRetryFilter { get; } =
gaxgrpc::RetrySettings.FilterForStatusCodes(grpccore::StatusCode.DeadlineExceeded, grpccore::StatusCode.Unavailable);
/// <summary>
/// The filter specifying which RPC <see cref="grpccore::StatusCode"/>s are eligible for retry
/// for "NonIdempotent" <see cref="ErrorGroupServiceClient"/> RPC methods.
/// </summary>
/// <remarks>
/// There are no RPC <see cref="grpccore::StatusCode"/>s eligible for retry for "NonIdempotent" RPC methods.
/// </remarks>
public static sys::Predicate<grpccore::RpcException> NonIdempotentRetryFilter { get; } =
gaxgrpc::RetrySettings.FilterForStatusCodes();
/// <summary>
/// "Default" retry backoff for <see cref="ErrorGroupServiceClient"/> RPC methods.
/// </summary>
/// <returns>
/// The "Default" retry backoff for <see cref="ErrorGroupServiceClient"/> RPC methods.
/// </returns>
/// <remarks>
/// The "Default" retry backoff for <see cref="ErrorGroupServiceClient"/> RPC methods is defined as:
/// <list type="bullet">
/// <item><description>Initial delay: 100 milliseconds</description></item>
/// <item><description>Maximum delay: 60000 milliseconds</description></item>
/// <item><description>Delay multiplier: 1.3</description></item>
/// </list>
/// </remarks>
public static gaxgrpc::BackoffSettings GetDefaultRetryBackoff() => new gaxgrpc::BackoffSettings(
delay: sys::TimeSpan.FromMilliseconds(100),
maxDelay: sys::TimeSpan.FromMilliseconds(60000),
delayMultiplier: 1.3
);
/// <summary>
/// "Default" timeout backoff for <see cref="ErrorGroupServiceClient"/> RPC methods.
/// </summary>
/// <returns>
/// The "Default" timeout backoff for <see cref="ErrorGroupServiceClient"/> RPC methods.
/// </returns>
/// <remarks>
/// The "Default" timeout backoff for <see cref="ErrorGroupServiceClient"/> RPC methods is defined as:
/// <list type="bullet">
/// <item><description>Initial timeout: 20000 milliseconds</description></item>
/// <item><description>Timeout multiplier: 1.0</description></item>
/// <item><description>Maximum timeout: 20000 milliseconds</description></item>
/// </list>
/// </remarks>
public static gaxgrpc::BackoffSettings GetDefaultTimeoutBackoff() => new gaxgrpc::BackoffSettings(
delay: sys::TimeSpan.FromMilliseconds(20000),
maxDelay: sys::TimeSpan.FromMilliseconds(20000),
delayMultiplier: 1.0
);
/// <summary>
/// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to
/// <c>ErrorGroupServiceClient.GetGroup</c> and <c>ErrorGroupServiceClient.GetGroupAsync</c>.
/// </summary>
/// <remarks>
/// The default <c>ErrorGroupServiceClient.GetGroup</c> and
/// <c>ErrorGroupServiceClient.GetGroupAsync</c> <see cref="gaxgrpc::RetrySettings"/> are:
/// <list type="bullet">
/// <item><description>Initial retry delay: 100 milliseconds</description></item>
/// <item><description>Retry delay multiplier: 1.3</description></item>
/// <item><description>Retry maximum delay: 60000 milliseconds</description></item>
/// <item><description>Initial timeout: 20000 milliseconds</description></item>
/// <item><description>Timeout multiplier: 1.0</description></item>
/// <item><description>Timeout maximum delay: 20000 milliseconds</description></item>
/// </list>
/// Retry will be attempted on the following response status codes:
/// <list>
/// <item><description><see cref="grpccore::StatusCode.DeadlineExceeded"/></description></item>
/// <item><description><see cref="grpccore::StatusCode.Unavailable"/></description></item>
/// </list>
/// Default RPC expiration is 600000 milliseconds.
/// </remarks>
public gaxgrpc::CallSettings GetGroupSettings { get; set; } = gaxgrpc::CallSettings.FromCallTiming(
gaxgrpc::CallTiming.FromRetry(new gaxgrpc::RetrySettings(
retryBackoff: GetDefaultRetryBackoff(),
timeoutBackoff: GetDefaultTimeoutBackoff(),
totalExpiration: gax::Expiration.FromTimeout(sys::TimeSpan.FromMilliseconds(600000)),
retryFilter: IdempotentRetryFilter
)));
/// <summary>
/// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to
/// <c>ErrorGroupServiceClient.UpdateGroup</c> and <c>ErrorGroupServiceClient.UpdateGroupAsync</c>.
/// </summary>
/// <remarks>
/// The default <c>ErrorGroupServiceClient.UpdateGroup</c> and
/// <c>ErrorGroupServiceClient.UpdateGroupAsync</c> <see cref="gaxgrpc::RetrySettings"/> are:
/// <list type="bullet">
/// <item><description>Initial retry delay: 100 milliseconds</description></item>
/// <item><description>Retry delay multiplier: 1.3</description></item>
/// <item><description>Retry maximum delay: 60000 milliseconds</description></item>
/// <item><description>Initial timeout: 20000 milliseconds</description></item>
/// <item><description>Timeout multiplier: 1.0</description></item>
/// <item><description>Timeout maximum delay: 20000 milliseconds</description></item>
/// </list>
/// Retry will be attempted on the following response status codes:
/// <list>
/// <item><description><see cref="grpccore::StatusCode.DeadlineExceeded"/></description></item>
/// <item><description><see cref="grpccore::StatusCode.Unavailable"/></description></item>
/// </list>
/// Default RPC expiration is 600000 milliseconds.
/// </remarks>
public gaxgrpc::CallSettings UpdateGroupSettings { get; set; } = gaxgrpc::CallSettings.FromCallTiming(
gaxgrpc::CallTiming.FromRetry(new gaxgrpc::RetrySettings(
retryBackoff: GetDefaultRetryBackoff(),
timeoutBackoff: GetDefaultTimeoutBackoff(),
totalExpiration: gax::Expiration.FromTimeout(sys::TimeSpan.FromMilliseconds(600000)),
retryFilter: IdempotentRetryFilter
)));
/// <summary>
/// Creates a deep clone of this object, with all the same property values.
/// </summary>
/// <returns>A deep clone of this <see cref="ErrorGroupServiceSettings"/> object.</returns>
public ErrorGroupServiceSettings Clone() => new ErrorGroupServiceSettings(this);
}
/// <summary>
/// ErrorGroupService client wrapper, for convenient use.
/// </summary>
public abstract partial class ErrorGroupServiceClient
{
/// <summary>
/// The default endpoint for the ErrorGroupService service, which is a host of "clouderrorreporting.googleapis.com" and a port of 443.
/// </summary>
public static gaxgrpc::ServiceEndpoint DefaultEndpoint { get; } = new gaxgrpc::ServiceEndpoint("clouderrorreporting.googleapis.com", 443);
/// <summary>
/// The default ErrorGroupService scopes.
/// </summary>
/// <remarks>
/// The default ErrorGroupService scopes are:
/// <list type="bullet">
/// <item><description>"https://www.googleapis.com/auth/cloud-platform"</description></item>
/// </list>
/// </remarks>
public static scg::IReadOnlyList<string> DefaultScopes { get; } = new sco::ReadOnlyCollection<string>(new string[] {
"https://www.googleapis.com/auth/cloud-platform",
});
private static readonly gaxgrpc::ChannelPool s_channelPool = new gaxgrpc::ChannelPool(DefaultScopes);
/// <summary>
/// Asynchronously creates a <see cref="ErrorGroupServiceClient"/>, applying defaults for all unspecified settings,
/// and creating a channel connecting to the given endpoint with application default credentials where
/// necessary. See the example for how to use custom credentials.
/// </summary>
/// <example>
/// This sample shows how to create a client using default credentials:
/// <code>
/// using Google.Cloud.ErrorReporting.V1Beta1;
/// ...
/// // When running on Google Cloud Platform this will use the project Compute Credential.
/// // Or set the GOOGLE_APPLICATION_CREDENTIALS environment variable to the path of a JSON
/// // credential file to use that credential.
/// ErrorGroupServiceClient client = await ErrorGroupServiceClient.CreateAsync();
/// </code>
/// This sample shows how to create a client using credentials loaded from a JSON file:
/// <code>
/// using Google.Cloud.ErrorReporting.V1Beta1;
/// using Google.Apis.Auth.OAuth2;
/// using Grpc.Auth;
/// using Grpc.Core;
/// ...
/// GoogleCredential cred = GoogleCredential.FromFile("/path/to/credentials.json");
/// Channel channel = new Channel(
/// ErrorGroupServiceClient.DefaultEndpoint.Host, ErrorGroupServiceClient.DefaultEndpoint.Port, cred.ToChannelCredentials());
/// ErrorGroupServiceClient client = ErrorGroupServiceClient.Create(channel);
/// ...
/// // Shutdown the channel when it is no longer required.
/// await channel.ShutdownAsync();
/// </code>
/// </example>
/// <param name="endpoint">Optional <see cref="gaxgrpc::ServiceEndpoint"/>.</param>
/// <param name="settings">Optional <see cref="ErrorGroupServiceSettings"/>.</param>
/// <returns>The task representing the created <see cref="ErrorGroupServiceClient"/>.</returns>
public static async stt::Task<ErrorGroupServiceClient> CreateAsync(gaxgrpc::ServiceEndpoint endpoint = null, ErrorGroupServiceSettings settings = null)
{
grpccore::Channel channel = await s_channelPool.GetChannelAsync(endpoint ?? DefaultEndpoint).ConfigureAwait(false);
return Create(channel, settings);
}
/// <summary>
/// Synchronously creates a <see cref="ErrorGroupServiceClient"/>, applying defaults for all unspecified settings,
/// and creating a channel connecting to the given endpoint with application default credentials where
/// necessary. See the example for how to use custom credentials.
/// </summary>
/// <example>
/// This sample shows how to create a client using default credentials:
/// <code>
/// using Google.Cloud.ErrorReporting.V1Beta1;
/// ...
/// // When running on Google Cloud Platform this will use the project Compute Credential.
/// // Or set the GOOGLE_APPLICATION_CREDENTIALS environment variable to the path of a JSON
/// // credential file to use that credential.
/// ErrorGroupServiceClient client = ErrorGroupServiceClient.Create();
/// </code>
/// This sample shows how to create a client using credentials loaded from a JSON file:
/// <code>
/// using Google.Cloud.ErrorReporting.V1Beta1;
/// using Google.Apis.Auth.OAuth2;
/// using Grpc.Auth;
/// using Grpc.Core;
/// ...
/// GoogleCredential cred = GoogleCredential.FromFile("/path/to/credentials.json");
/// Channel channel = new Channel(
/// ErrorGroupServiceClient.DefaultEndpoint.Host, ErrorGroupServiceClient.DefaultEndpoint.Port, cred.ToChannelCredentials());
/// ErrorGroupServiceClient client = ErrorGroupServiceClient.Create(channel);
/// ...
/// // Shutdown the channel when it is no longer required.
/// channel.ShutdownAsync().Wait();
/// </code>
/// </example>
/// <param name="endpoint">Optional <see cref="gaxgrpc::ServiceEndpoint"/>.</param>
/// <param name="settings">Optional <see cref="ErrorGroupServiceSettings"/>.</param>
/// <returns>The created <see cref="ErrorGroupServiceClient"/>.</returns>
public static ErrorGroupServiceClient Create(gaxgrpc::ServiceEndpoint endpoint = null, ErrorGroupServiceSettings settings = null)
{
grpccore::Channel channel = s_channelPool.GetChannel(endpoint ?? DefaultEndpoint);
return Create(channel, settings);
}
/// <summary>
/// Creates a <see cref="ErrorGroupServiceClient"/> which uses the specified channel for remote operations.
/// </summary>
/// <param name="channel">The <see cref="grpccore::Channel"/> for remote operations. Must not be null.</param>
/// <param name="settings">Optional <see cref="ErrorGroupServiceSettings"/>.</param>
/// <returns>The created <see cref="ErrorGroupServiceClient"/>.</returns>
public static ErrorGroupServiceClient Create(grpccore::Channel channel, ErrorGroupServiceSettings settings = null)
{
gax::GaxPreconditions.CheckNotNull(channel, nameof(channel));
return Create(new grpccore::DefaultCallInvoker(channel), settings);
}
/// <summary>
/// Creates a <see cref="ErrorGroupServiceClient"/> which uses the specified call invoker for remote operations.
/// </summary>
/// <param name="callInvoker">The <see cref="grpccore::CallInvoker"/> for remote operations. Must not be null.</param>
/// <param name="settings">Optional <see cref="ErrorGroupServiceSettings"/>.</param>
/// <returns>The created <see cref="ErrorGroupServiceClient"/>.</returns>
public static ErrorGroupServiceClient Create(grpccore::CallInvoker callInvoker, ErrorGroupServiceSettings settings = null)
{
gax::GaxPreconditions.CheckNotNull(callInvoker, nameof(callInvoker));
grpccore::Interceptors.Interceptor interceptor = settings?.Interceptor;
if (interceptor != null)
{
callInvoker = grpccore::Interceptors.CallInvokerExtensions.Intercept(callInvoker, interceptor);
}
ErrorGroupService.ErrorGroupServiceClient grpcClient = new ErrorGroupService.ErrorGroupServiceClient(callInvoker);
return new ErrorGroupServiceClientImpl(grpcClient, settings);
}
/// <summary>
/// Shuts down any channels automatically created by <see cref="Create(gaxgrpc::ServiceEndpoint, ErrorGroupServiceSettings)"/>
/// and <see cref="CreateAsync(gaxgrpc::ServiceEndpoint, ErrorGroupServiceSettings)"/>. Channels which weren't automatically
/// created are not affected.
/// </summary>
/// <remarks>After calling this method, further calls to <see cref="Create(gaxgrpc::ServiceEndpoint, ErrorGroupServiceSettings)"/>
/// and <see cref="CreateAsync(gaxgrpc::ServiceEndpoint, ErrorGroupServiceSettings)"/> will create new channels, which could
/// in turn be shut down by another call to this method.</remarks>
/// <returns>A task representing the asynchronous shutdown operation.</returns>
public static stt::Task ShutdownDefaultChannelsAsync() => s_channelPool.ShutdownChannelsAsync();
/// <summary>
/// The underlying gRPC ErrorGroupService client.
/// </summary>
public virtual ErrorGroupService.ErrorGroupServiceClient GrpcClient
{
get { throw new sys::NotImplementedException(); }
}
/// <summary>
/// Get the specified group.
/// </summary>
/// <param name="groupName">
/// [Required] The group resource name. Written as
/// <code>projects/<var>projectID</var>/groups/<var>group_name</var></code>.
/// Call
/// <a href="/error-reporting/reference/rest/v1beta1/projects.groupStats/list">
/// <code>groupStats.list</code></a> to return a list of groups belonging to
/// this project.
///
/// Example: <code>projects/my-project-123/groups/my-group</code>
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// A Task containing the RPC response.
/// </returns>
public virtual stt::Task<ErrorGroup> GetGroupAsync(
GroupName groupName,
gaxgrpc::CallSettings callSettings = null) => GetGroupAsync(
new GetGroupRequest
{
GroupNameAsGroupName = gax::GaxPreconditions.CheckNotNull(groupName, nameof(groupName)),
},
callSettings);
/// <summary>
/// Get the specified group.
/// </summary>
/// <param name="groupName">
/// [Required] The group resource name. Written as
/// <code>projects/<var>projectID</var>/groups/<var>group_name</var></code>.
/// Call
/// <a href="/error-reporting/reference/rest/v1beta1/projects.groupStats/list">
/// <code>groupStats.list</code></a> to return a list of groups belonging to
/// this project.
///
/// Example: <code>projects/my-project-123/groups/my-group</code>
/// </param>
/// <param name="cancellationToken">
/// A <see cref="st::CancellationToken"/> to use for this RPC.
/// </param>
/// <returns>
/// A Task containing the RPC response.
/// </returns>
public virtual stt::Task<ErrorGroup> GetGroupAsync(
GroupName groupName,
st::CancellationToken cancellationToken) => GetGroupAsync(
groupName,
gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
/// <summary>
/// Get the specified group.
/// </summary>
/// <param name="groupName">
/// [Required] The group resource name. Written as
/// <code>projects/<var>projectID</var>/groups/<var>group_name</var></code>.
/// Call
/// <a href="/error-reporting/reference/rest/v1beta1/projects.groupStats/list">
/// <code>groupStats.list</code></a> to return a list of groups belonging to
/// this project.
///
/// Example: <code>projects/my-project-123/groups/my-group</code>
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// The RPC response.
/// </returns>
public virtual ErrorGroup GetGroup(
GroupName groupName,
gaxgrpc::CallSettings callSettings = null) => GetGroup(
new GetGroupRequest
{
GroupNameAsGroupName = gax::GaxPreconditions.CheckNotNull(groupName, nameof(groupName)),
},
callSettings);
/// <summary>
/// Get the specified group.
/// </summary>
/// <param name="request">
/// The request object containing all of the parameters for the API call.
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// A Task containing the RPC response.
/// </returns>
public virtual stt::Task<ErrorGroup> GetGroupAsync(
GetGroupRequest request,
gaxgrpc::CallSettings callSettings = null)
{
throw new sys::NotImplementedException();
}
/// <summary>
/// Get the specified group.
/// </summary>
/// <param name="request">
/// The request object containing all of the parameters for the API call.
/// </param>
/// <param name="cancellationToken">
/// A <see cref="st::CancellationToken"/> to use for this RPC.
/// </param>
/// <returns>
/// A Task containing the RPC response.
/// </returns>
public virtual stt::Task<ErrorGroup> GetGroupAsync(
GetGroupRequest request,
st::CancellationToken cancellationToken) => GetGroupAsync(
request,
gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
/// <summary>
/// Get the specified group.
/// </summary>
/// <param name="request">
/// The request object containing all of the parameters for the API call.
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// The RPC response.
/// </returns>
public virtual ErrorGroup GetGroup(
GetGroupRequest request,
gaxgrpc::CallSettings callSettings = null)
{
throw new sys::NotImplementedException();
}
/// <summary>
/// Replace the data for the specified group.
/// Fails if the group does not exist.
/// </summary>
/// <param name="group">
/// [Required] The group which replaces the resource on the server.
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// A Task containing the RPC response.
/// </returns>
public virtual stt::Task<ErrorGroup> UpdateGroupAsync(
ErrorGroup group,
gaxgrpc::CallSettings callSettings = null) => UpdateGroupAsync(
new UpdateGroupRequest
{
Group = gax::GaxPreconditions.CheckNotNull(group, nameof(group)),
},
callSettings);
/// <summary>
/// Replace the data for the specified group.
/// Fails if the group does not exist.
/// </summary>
/// <param name="group">
/// [Required] The group which replaces the resource on the server.
/// </param>
/// <param name="cancellationToken">
/// A <see cref="st::CancellationToken"/> to use for this RPC.
/// </param>
/// <returns>
/// A Task containing the RPC response.
/// </returns>
public virtual stt::Task<ErrorGroup> UpdateGroupAsync(
ErrorGroup group,
st::CancellationToken cancellationToken) => UpdateGroupAsync(
group,
gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
/// <summary>
/// Replace the data for the specified group.
/// Fails if the group does not exist.
/// </summary>
/// <param name="group">
/// [Required] The group which replaces the resource on the server.
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// The RPC response.
/// </returns>
public virtual ErrorGroup UpdateGroup(
ErrorGroup group,
gaxgrpc::CallSettings callSettings = null) => UpdateGroup(
new UpdateGroupRequest
{
Group = gax::GaxPreconditions.CheckNotNull(group, nameof(group)),
},
callSettings);
/// <summary>
/// Replace the data for the specified group.
/// Fails if the group does not exist.
/// </summary>
/// <param name="request">
/// The request object containing all of the parameters for the API call.
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// A Task containing the RPC response.
/// </returns>
public virtual stt::Task<ErrorGroup> UpdateGroupAsync(
UpdateGroupRequest request,
gaxgrpc::CallSettings callSettings = null)
{
throw new sys::NotImplementedException();
}
/// <summary>
/// Replace the data for the specified group.
/// Fails if the group does not exist.
/// </summary>
/// <param name="request">
/// The request object containing all of the parameters for the API call.
/// </param>
/// <param name="cancellationToken">
/// A <see cref="st::CancellationToken"/> to use for this RPC.
/// </param>
/// <returns>
/// A Task containing the RPC response.
/// </returns>
public virtual stt::Task<ErrorGroup> UpdateGroupAsync(
UpdateGroupRequest request,
st::CancellationToken cancellationToken) => UpdateGroupAsync(
request,
gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
/// <summary>
/// Replace the data for the specified group.
/// Fails if the group does not exist.
/// </summary>
/// <param name="request">
/// The request object containing all of the parameters for the API call.
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// The RPC response.
/// </returns>
public virtual ErrorGroup UpdateGroup(
UpdateGroupRequest request,
gaxgrpc::CallSettings callSettings = null)
{
throw new sys::NotImplementedException();
}
}
/// <summary>
/// ErrorGroupService client wrapper implementation, for convenient use.
/// </summary>
public sealed partial class ErrorGroupServiceClientImpl : ErrorGroupServiceClient
{
private readonly gaxgrpc::ApiCall<GetGroupRequest, ErrorGroup> _callGetGroup;
private readonly gaxgrpc::ApiCall<UpdateGroupRequest, ErrorGroup> _callUpdateGroup;
/// <summary>
/// Constructs a client wrapper for the ErrorGroupService service, with the specified gRPC client and settings.
/// </summary>
/// <param name="grpcClient">The underlying gRPC client.</param>
/// <param name="settings">The base <see cref="ErrorGroupServiceSettings"/> used within this client </param>
public ErrorGroupServiceClientImpl(ErrorGroupService.ErrorGroupServiceClient grpcClient, ErrorGroupServiceSettings settings)
{
GrpcClient = grpcClient;
ErrorGroupServiceSettings effectiveSettings = settings ?? ErrorGroupServiceSettings.GetDefault();
gaxgrpc::ClientHelper clientHelper = new gaxgrpc::ClientHelper(effectiveSettings);
_callGetGroup = clientHelper.BuildApiCall<GetGroupRequest, ErrorGroup>(
GrpcClient.GetGroupAsync, GrpcClient.GetGroup, effectiveSettings.GetGroupSettings);
_callUpdateGroup = clientHelper.BuildApiCall<UpdateGroupRequest, ErrorGroup>(
GrpcClient.UpdateGroupAsync, GrpcClient.UpdateGroup, effectiveSettings.UpdateGroupSettings);
Modify_ApiCall(ref _callGetGroup);
Modify_GetGroupApiCall(ref _callGetGroup);
Modify_ApiCall(ref _callUpdateGroup);
Modify_UpdateGroupApiCall(ref _callUpdateGroup);
OnConstruction(grpcClient, effectiveSettings, clientHelper);
}
// Partial methods are named to (mostly) ensure there cannot be conflicts with RPC method names.
// Partial methods called for every ApiCall on construction.
// Allows modification of all the underlying ApiCall objects.
partial void Modify_ApiCall<TRequest, TResponse>(ref gaxgrpc::ApiCall<TRequest, TResponse> call)
where TRequest : class, pb::IMessage<TRequest>
where TResponse : class, pb::IMessage<TResponse>;
// Partial methods called for each ApiCall on construction.
// Allows per-RPC-method modification of the underlying ApiCall object.
partial void Modify_GetGroupApiCall(ref gaxgrpc::ApiCall<GetGroupRequest, ErrorGroup> call);
partial void Modify_UpdateGroupApiCall(ref gaxgrpc::ApiCall<UpdateGroupRequest, ErrorGroup> call);
partial void OnConstruction(ErrorGroupService.ErrorGroupServiceClient grpcClient, ErrorGroupServiceSettings effectiveSettings, gaxgrpc::ClientHelper clientHelper);
/// <summary>
/// The underlying gRPC ErrorGroupService client.
/// </summary>
public override ErrorGroupService.ErrorGroupServiceClient GrpcClient { get; }
// Partial methods called on each request.
// Allows per-RPC-call modification to the request and CallSettings objects,
// before the underlying RPC is performed.
partial void Modify_GetGroupRequest(ref GetGroupRequest request, ref gaxgrpc::CallSettings settings);
partial void Modify_UpdateGroupRequest(ref UpdateGroupRequest request, ref gaxgrpc::CallSettings settings);
/// <summary>
/// Get the specified group.
/// </summary>
/// <param name="request">
/// The request object containing all of the parameters for the API call.
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// A Task containing the RPC response.
/// </returns>
public override stt::Task<ErrorGroup> GetGroupAsync(
GetGroupRequest request,
gaxgrpc::CallSettings callSettings = null)
{
Modify_GetGroupRequest(ref request, ref callSettings);
return _callGetGroup.Async(request, callSettings);
}
/// <summary>
/// Get the specified group.
/// </summary>
/// <param name="request">
/// The request object containing all of the parameters for the API call.
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// The RPC response.
/// </returns>
public override ErrorGroup GetGroup(
GetGroupRequest request,
gaxgrpc::CallSettings callSettings = null)
{
Modify_GetGroupRequest(ref request, ref callSettings);
return _callGetGroup.Sync(request, callSettings);
}
/// <summary>
/// Replace the data for the specified group.
/// Fails if the group does not exist.
/// </summary>
/// <param name="request">
/// The request object containing all of the parameters for the API call.
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// A Task containing the RPC response.
/// </returns>
public override stt::Task<ErrorGroup> UpdateGroupAsync(
UpdateGroupRequest request,
gaxgrpc::CallSettings callSettings = null)
{
Modify_UpdateGroupRequest(ref request, ref callSettings);
return _callUpdateGroup.Async(request, callSettings);
}
/// <summary>
/// Replace the data for the specified group.
/// Fails if the group does not exist.
/// </summary>
/// <param name="request">
/// The request object containing all of the parameters for the API call.
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// The RPC response.
/// </returns>
public override ErrorGroup UpdateGroup(
UpdateGroupRequest request,
gaxgrpc::CallSettings callSettings = null)
{
Modify_UpdateGroupRequest(ref request, ref callSettings);
return _callUpdateGroup.Sync(request, callSettings);
}
}
// Partial classes to enable page-streaming
}
| |
// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the MIT Licence.
// See the LICENCE file in the repository root for full licence text.
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using osu.Framework.Allocation;
using osu.Framework.Audio;
using osu.Framework.Audio.Sample;
using osu.Framework.Bindables;
using osu.Framework.Graphics;
using osu.Framework.Graphics.Containers;
using osu.Framework.Input.Events;
using osu.Framework.Logging;
using osu.Framework.Screens;
using osu.Framework.Threading;
using osu.Game.Beatmaps;
using osu.Game.Configuration;
using osu.Game.Graphics.Containers;
using osu.Game.IO.Archives;
using osu.Game.Online.API;
using osu.Game.Online.Spectator;
using osu.Game.Overlays;
using osu.Game.Rulesets;
using osu.Game.Rulesets.Mods;
using osu.Game.Rulesets.Scoring;
using osu.Game.Rulesets.UI;
using osu.Game.Scoring;
using osu.Game.Scoring.Legacy;
using osu.Game.Screens.Ranking;
using osu.Game.Skinning;
using osu.Game.Users;
using osuTK.Graphics;
namespace osu.Game.Screens.Play
{
[Cached]
[Cached(typeof(ISamplePlaybackDisabler))]
public abstract class Player : ScreenWithBeatmapBackground, ISamplePlaybackDisabler
{
/// <summary>
/// The delay upon completion of the beatmap before displaying the results screen.
/// </summary>
public const double RESULTS_DISPLAY_DELAY = 1000.0;
public override bool AllowBackButton => false; // handled by HoldForMenuButton
protected override UserActivity InitialActivity => new UserActivity.SoloGame(Beatmap.Value.BeatmapInfo, Ruleset.Value);
public override float BackgroundParallaxAmount => 0.1f;
public override bool HideOverlaysOnEnter => true;
protected override OverlayActivation InitialOverlayActivationMode => OverlayActivation.UserTriggered;
// We are managing our own adjustments (see OnEntering/OnExiting).
public override bool AllowRateAdjustments => false;
private readonly IBindable<bool> gameActive = new Bindable<bool>(true);
private readonly Bindable<bool> samplePlaybackDisabled = new Bindable<bool>();
/// <summary>
/// Whether gameplay should pause when the game window focus is lost.
/// </summary>
protected virtual bool PauseOnFocusLost => true;
public Action RestartRequested;
public bool HasFailed { get; private set; }
private Bindable<bool> mouseWheelDisabled;
private readonly Bindable<bool> storyboardReplacesBackground = new Bindable<bool>();
protected readonly Bindable<bool> LocalUserPlaying = new Bindable<bool>();
public int RestartCount;
[Resolved]
private ScoreManager scoreManager { get; set; }
[Resolved]
private IAPIProvider api { get; set; }
[Resolved]
private MusicController musicController { get; set; }
[Resolved]
private SpectatorClient spectatorClient { get; set; }
protected Ruleset GameplayRuleset { get; private set; }
protected GameplayBeatmap GameplayBeatmap { get; private set; }
private Sample sampleRestart;
public BreakOverlay BreakOverlay;
/// <summary>
/// Whether the gameplay is currently in a break.
/// </summary>
public readonly IBindable<bool> IsBreakTime = new BindableBool();
private BreakTracker breakTracker;
private SkipOverlay skipIntroOverlay;
private SkipOverlay skipOutroOverlay;
protected ScoreProcessor ScoreProcessor { get; private set; }
protected HealthProcessor HealthProcessor { get; private set; }
protected DrawableRuleset DrawableRuleset { get; private set; }
protected HUDOverlay HUDOverlay { get; private set; }
public bool LoadedBeatmapSuccessfully => DrawableRuleset?.Objects.Any() == true;
protected GameplayClockContainer GameplayClockContainer { get; private set; }
public DimmableStoryboard DimmableStoryboard { get; private set; }
[Cached]
[Cached(Type = typeof(IBindable<IReadOnlyList<Mod>>))]
protected new readonly Bindable<IReadOnlyList<Mod>> Mods = new Bindable<IReadOnlyList<Mod>>(Array.Empty<Mod>());
/// <summary>
/// Whether failing should be allowed.
/// By default, this checks whether all selected mods allow failing.
/// </summary>
protected virtual bool CheckModsAllowFailure() => Mods.Value.OfType<IApplicableFailOverride>().All(m => m.PerformFail());
public readonly PlayerConfiguration Configuration;
protected Score Score { get; private set; }
/// <summary>
/// Create a new player instance.
/// </summary>
protected Player(PlayerConfiguration configuration = null)
{
Configuration = configuration ?? new PlayerConfiguration();
}
private ScreenSuspensionHandler screenSuspension;
private DependencyContainer dependencies;
protected override IReadOnlyDependencyContainer CreateChildDependencies(IReadOnlyDependencyContainer parent)
=> dependencies = new DependencyContainer(base.CreateChildDependencies(parent));
protected override void LoadComplete()
{
base.LoadComplete();
if (!LoadedBeatmapSuccessfully)
return;
Score = CreateScore();
// ensure the score is in a consistent state with the current player.
Score.ScoreInfo.Beatmap = Beatmap.Value.BeatmapInfo;
Score.ScoreInfo.Ruleset = GameplayRuleset.RulesetInfo;
Score.ScoreInfo.Mods = Mods.Value.ToArray();
PrepareReplay();
ScoreProcessor.NewJudgement += result => ScoreProcessor.PopulateScore(Score.ScoreInfo);
gameActive.BindValueChanged(_ => updatePauseOnFocusLostState(), true);
}
/// <summary>
/// Run any recording / playback setup for replays.
/// </summary>
protected virtual void PrepareReplay()
{
DrawableRuleset.SetRecordTarget(Score);
}
[BackgroundDependencyLoader(true)]
private void load(AudioManager audio, OsuConfigManager config, OsuGameBase game)
{
Mods.Value = base.Mods.Value.Select(m => m.CreateCopy()).ToArray();
if (Beatmap.Value is DummyWorkingBeatmap)
return;
IBeatmap playableBeatmap = loadPlayableBeatmap();
if (playableBeatmap == null)
return;
sampleRestart = audio.Samples.Get(@"Gameplay/restart");
mouseWheelDisabled = config.GetBindable<bool>(OsuSetting.MouseDisableWheel);
if (game != null)
gameActive.BindTo(game.IsActive);
if (game is OsuGame osuGame)
LocalUserPlaying.BindTo(osuGame.LocalUserPlaying);
DrawableRuleset = GameplayRuleset.CreateDrawableRulesetWith(playableBeatmap, Mods.Value);
dependencies.CacheAs(DrawableRuleset);
ScoreProcessor = GameplayRuleset.CreateScoreProcessor();
ScoreProcessor.ApplyBeatmap(playableBeatmap);
ScoreProcessor.Mods.BindTo(Mods);
dependencies.CacheAs(ScoreProcessor);
HealthProcessor = GameplayRuleset.CreateHealthProcessor(playableBeatmap.HitObjects[0].StartTime);
HealthProcessor.ApplyBeatmap(playableBeatmap);
dependencies.CacheAs(HealthProcessor);
if (!ScoreProcessor.Mode.Disabled)
config.BindWith(OsuSetting.ScoreDisplayMode, ScoreProcessor.Mode);
InternalChild = GameplayClockContainer = CreateGameplayClockContainer(Beatmap.Value, DrawableRuleset.GameplayStartTime);
AddInternal(GameplayBeatmap = new GameplayBeatmap(playableBeatmap));
AddInternal(screenSuspension = new ScreenSuspensionHandler(GameplayClockContainer));
dependencies.CacheAs(GameplayBeatmap);
var rulesetSkinProvider = new RulesetSkinProvidingContainer(GameplayRuleset, playableBeatmap, Beatmap.Value.Skin);
// load the skinning hierarchy first.
// this is intentionally done in two stages to ensure things are in a loaded state before exposing the ruleset to skin sources.
GameplayClockContainer.Add(rulesetSkinProvider);
rulesetSkinProvider.AddRange(new[]
{
// underlay and gameplay should have access to the skinning sources.
createUnderlayComponents(),
createGameplayComponents(Beatmap.Value, playableBeatmap)
});
// add the overlay components as a separate step as they proxy some elements from the above underlay/gameplay components.
// also give the overlays the ruleset skin provider to allow rulesets to potentially override HUD elements (used to disable combo counters etc.)
// we may want to limit this in the future to disallow rulesets from outright replacing elements the user expects to be there.
rulesetSkinProvider.Add(createOverlayComponents(Beatmap.Value));
if (!DrawableRuleset.AllowGameplayOverlays)
{
HUDOverlay.ShowHud.Value = false;
HUDOverlay.ShowHud.Disabled = true;
BreakOverlay.Hide();
}
DrawableRuleset.FrameStableClock.WaitingOnFrames.BindValueChanged(waiting =>
{
if (waiting.NewValue)
GameplayClockContainer.Stop();
else
GameplayClockContainer.Start();
});
DrawableRuleset.IsPaused.BindValueChanged(paused =>
{
updateGameplayState();
updateSampleDisabledState();
});
DrawableRuleset.FrameStableClock.IsCatchingUp.BindValueChanged(_ => updateSampleDisabledState());
DrawableRuleset.HasReplayLoaded.BindValueChanged(_ => updateGameplayState());
// bind clock into components that require it
DrawableRuleset.IsPaused.BindTo(GameplayClockContainer.IsPaused);
DrawableRuleset.NewResult += r =>
{
HealthProcessor.ApplyResult(r);
ScoreProcessor.ApplyResult(r);
GameplayBeatmap.ApplyResult(r);
};
DrawableRuleset.RevertResult += r =>
{
HealthProcessor.RevertResult(r);
ScoreProcessor.RevertResult(r);
};
DimmableStoryboard.HasStoryboardEnded.ValueChanged += storyboardEnded =>
{
if (storyboardEnded.NewValue)
progressToResults(true);
};
// Bind the judgement processors to ourselves
ScoreProcessor.HasCompleted.BindValueChanged(scoreCompletionChanged);
HealthProcessor.Failed += onFail;
foreach (var mod in Mods.Value.OfType<IApplicableToScoreProcessor>())
mod.ApplyToScoreProcessor(ScoreProcessor);
foreach (var mod in Mods.Value.OfType<IApplicableToHealthProcessor>())
mod.ApplyToHealthProcessor(HealthProcessor);
IsBreakTime.BindTo(breakTracker.IsBreakTime);
IsBreakTime.BindValueChanged(onBreakTimeChanged, true);
}
protected virtual GameplayClockContainer CreateGameplayClockContainer(WorkingBeatmap beatmap, double gameplayStart) => new MasterGameplayClockContainer(beatmap, gameplayStart);
private Drawable createUnderlayComponents() =>
DimmableStoryboard = new DimmableStoryboard(Beatmap.Value.Storyboard) { RelativeSizeAxes = Axes.Both };
private Drawable createGameplayComponents(WorkingBeatmap working, IBeatmap playableBeatmap) => new ScalingContainer(ScalingMode.Gameplay)
{
Children = new Drawable[]
{
DrawableRuleset.With(r =>
r.FrameStableComponents.Children = new Drawable[]
{
ScoreProcessor,
HealthProcessor,
new ComboEffects(ScoreProcessor),
breakTracker = new BreakTracker(DrawableRuleset.GameplayStartTime, ScoreProcessor)
{
Breaks = working.Beatmap.Breaks
}
}),
}
};
private Drawable createOverlayComponents(WorkingBeatmap working)
{
var container = new Container
{
RelativeSizeAxes = Axes.Both,
Children = new[]
{
DimmableStoryboard.OverlayLayerContainer.CreateProxy(),
BreakOverlay = new BreakOverlay(working.Beatmap.BeatmapInfo.LetterboxInBreaks, ScoreProcessor)
{
Clock = DrawableRuleset.FrameStableClock,
ProcessCustomClock = false,
Breaks = working.Beatmap.Breaks
},
// display the cursor above some HUD elements.
DrawableRuleset.Cursor?.CreateProxy() ?? new Container(),
DrawableRuleset.ResumeOverlay?.CreateProxy() ?? new Container(),
HUDOverlay = new HUDOverlay(DrawableRuleset, Mods.Value)
{
HoldToQuit =
{
Action = () => PerformExit(true),
IsPaused = { BindTarget = GameplayClockContainer.IsPaused }
},
KeyCounter =
{
AlwaysVisible = { BindTarget = DrawableRuleset.HasReplayLoaded },
IsCounting = false
},
Anchor = Anchor.Centre,
Origin = Anchor.Centre
},
skipIntroOverlay = new SkipOverlay(DrawableRuleset.GameplayStartTime)
{
RequestSkip = performUserRequestedSkip
},
skipOutroOverlay = new SkipOverlay(Beatmap.Value.Storyboard.LatestEventTime ?? 0)
{
RequestSkip = () => progressToResults(false),
Alpha = 0
},
FailOverlay = new FailOverlay
{
OnRetry = Restart,
OnQuit = () => PerformExit(true),
},
PauseOverlay = new PauseOverlay
{
OnResume = Resume,
Retries = RestartCount,
OnRetry = Restart,
OnQuit = () => PerformExit(true),
},
new HotkeyExitOverlay
{
Action = () =>
{
if (!this.IsCurrentScreen()) return;
fadeOut(true);
PerformExit(false);
},
},
failAnimation = new FailAnimation(DrawableRuleset) { OnComplete = onFailComplete, },
}
};
if (!Configuration.AllowSkipping || !DrawableRuleset.AllowGameplayOverlays)
{
skipIntroOverlay.Expire();
skipOutroOverlay.Expire();
}
if (GameplayClockContainer is MasterGameplayClockContainer master)
HUDOverlay.PlayerSettingsOverlay.PlaybackSettings.UserPlaybackRate.BindTarget = master.UserPlaybackRate;
if (Configuration.AllowRestart)
{
container.Add(new HotkeyRetryOverlay
{
Action = () =>
{
if (!this.IsCurrentScreen()) return;
fadeOut(true);
Restart();
},
});
}
return container;
}
private void onBreakTimeChanged(ValueChangedEvent<bool> isBreakTime)
{
updateGameplayState();
updatePauseOnFocusLostState();
HUDOverlay.KeyCounter.IsCounting = !isBreakTime.NewValue;
}
private void updateGameplayState()
{
bool inGameplay = !DrawableRuleset.HasReplayLoaded.Value && !DrawableRuleset.IsPaused.Value && !breakTracker.IsBreakTime.Value;
OverlayActivationMode.Value = inGameplay ? OverlayActivation.Disabled : OverlayActivation.UserTriggered;
LocalUserPlaying.Value = inGameplay;
}
private void updateSampleDisabledState()
{
samplePlaybackDisabled.Value = DrawableRuleset.FrameStableClock.IsCatchingUp.Value || GameplayClockContainer.GameplayClock.IsPaused.Value;
}
private void updatePauseOnFocusLostState()
{
if (!PauseOnFocusLost || !pausingSupportedByCurrentState || breakTracker.IsBreakTime.Value)
return;
if (gameActive.Value == false)
{
bool paused = Pause();
// if the initial pause could not be satisfied, the pause cooldown may be active.
// reschedule the pause attempt until it can be achieved.
if (!paused)
Scheduler.AddOnce(updatePauseOnFocusLostState);
}
}
private IBeatmap loadPlayableBeatmap()
{
IBeatmap playable;
try
{
if (Beatmap.Value.Beatmap == null)
throw new InvalidOperationException("Beatmap was not loaded");
var rulesetInfo = Ruleset.Value ?? Beatmap.Value.BeatmapInfo.Ruleset;
GameplayRuleset = rulesetInfo.CreateInstance();
try
{
playable = Beatmap.Value.GetPlayableBeatmap(GameplayRuleset.RulesetInfo, Mods.Value);
}
catch (BeatmapInvalidForRulesetException)
{
// A playable beatmap may not be creatable with the user's preferred ruleset, so try using the beatmap's default ruleset
rulesetInfo = Beatmap.Value.BeatmapInfo.Ruleset;
GameplayRuleset = rulesetInfo.CreateInstance();
playable = Beatmap.Value.GetPlayableBeatmap(rulesetInfo, Mods.Value);
}
if (playable.HitObjects.Count == 0)
{
Logger.Log("Beatmap contains no hit objects!", level: LogLevel.Error);
return null;
}
}
catch (Exception e)
{
Logger.Error(e, "Could not load beatmap successfully!");
//couldn't load, hard abort!
return null;
}
return playable;
}
/// <summary>
/// Attempts to complete a user request to exit gameplay.
/// </summary>
/// <remarks>
/// <list type="bullet">
/// <item>This should only be called in response to a user interaction. Exiting is not guaranteed.</item>
/// <item>This will interrupt any pending progression to the results screen, even if the transition has begun.</item>
/// </list>
/// </remarks>
/// <param name="showDialogFirst">
/// Whether the pause or fail dialog should be shown before performing an exit.
/// If <see langword="true"/> and a dialog is not yet displayed, the exit will be blocked and the relevant dialog will display instead.
/// </param>
protected void PerformExit(bool showDialogFirst)
{
// if an exit has been requested, cancel any pending completion (the user has shown intention to exit).
resultsDisplayDelegate?.Cancel();
// there is a chance that an exit request occurs after the transition to results has already started.
// even in such a case, the user has shown intent, so forcefully return to this screen (to proceed with the upwards exit process).
if (!this.IsCurrentScreen())
{
ValidForResume = false;
// in the potential case that this instance has already been exited, this is required to avoid a crash.
if (this.GetChildScreen() != null)
this.MakeCurrent();
return;
}
bool pauseOrFailDialogVisible =
PauseOverlay.State.Value == Visibility.Visible || FailOverlay.State.Value == Visibility.Visible;
if (showDialogFirst && !pauseOrFailDialogVisible)
{
// if the fail animation is currently in progress, accelerate it (it will show the pause dialog on completion).
if (ValidForResume && HasFailed)
{
failAnimation.FinishTransforms(true);
return;
}
// even if this call has requested a dialog, there is a chance the current player mode doesn't support pausing.
if (pausingSupportedByCurrentState)
{
// in the case a dialog needs to be shown, attempt to pause and show it.
// this may fail (see internal checks in Pause()) but the fail cases are temporary, so don't fall through to Exit().
Pause();
return;
}
}
// The actual exit is performed if
// - the pause / fail dialog was not requested
// - the pause / fail dialog was requested but is already displayed (user showing intention to exit).
// - the pause / fail dialog was requested but couldn't be displayed due to the type or state of this Player instance.
this.Exit();
}
private void performUserRequestedSkip()
{
// user requested skip
// disable sample playback to stop currently playing samples and perform skip
samplePlaybackDisabled.Value = true;
(GameplayClockContainer as MasterGameplayClockContainer)?.Skip();
// return samplePlaybackDisabled.Value to what is defined by the beatmap's current state
updateSampleDisabledState();
}
/// <summary>
/// Seek to a specific time in gameplay.
/// </summary>
/// <param name="time">The destination time to seek to.</param>
public void Seek(double time) => GameplayClockContainer.Seek(time);
private ScheduledDelegate frameStablePlaybackResetDelegate;
/// <summary>
/// Seeks to a specific time in gameplay, bypassing frame stability.
/// </summary>
/// <remarks>
/// Intermediate hitobject judgements may not be applied or reverted correctly during this seek.
/// </remarks>
/// <param name="time">The destination time to seek to.</param>
internal void NonFrameStableSeek(double time)
{
if (frameStablePlaybackResetDelegate?.Cancelled == false && !frameStablePlaybackResetDelegate.Completed)
frameStablePlaybackResetDelegate.RunTask();
bool wasFrameStable = DrawableRuleset.FrameStablePlayback;
DrawableRuleset.FrameStablePlayback = false;
Seek(time);
// Delay resetting frame-stable playback for one frame to give the FrameStabilityContainer a chance to seek.
frameStablePlaybackResetDelegate = ScheduleAfterChildren(() => DrawableRuleset.FrameStablePlayback = wasFrameStable);
}
/// <summary>
/// Restart gameplay via a parent <see cref="PlayerLoader"/>.
/// <remarks>This can be called from a child screen in order to trigger the restart process.</remarks>
/// </summary>
public void Restart()
{
if (!Configuration.AllowRestart)
return;
// at the point of restarting the track should either already be paused or the volume should be zero.
// stopping here is to ensure music doesn't become audible after exiting back to PlayerLoader.
musicController.Stop();
sampleRestart?.Play();
RestartRequested?.Invoke();
PerformExit(false);
}
/// <summary>
/// This delegate, when set, means the results screen has been queued to appear.
/// The display of the results screen may be delayed by any work being done in <see cref="PrepareScoreForResultsAsync"/>.
/// </summary>
/// <remarks>
/// Once set, this can *only* be cancelled by rewinding, ie. if <see cref="JudgementProcessor.HasCompleted">ScoreProcessor.HasCompleted</see> becomes <see langword="false"/>.
/// Even if the user requests an exit, it will forcefully proceed to the results screen (see special case in <see cref="OnExiting"/>).
/// </remarks>
private ScheduledDelegate resultsDisplayDelegate;
/// <summary>
/// A task which asynchronously prepares a completed score for display at results.
/// This may include performing net requests or importing the score into the database, generally to ensure things are in a sane state for the play session.
/// </summary>
private Task<ScoreInfo> prepareScoreForDisplayTask;
/// <summary>
/// Handles changes in player state which may progress the completion of gameplay / this screen's lifetime.
/// </summary>
/// <exception cref="InvalidOperationException">Thrown if this method is called more than once without changing state.</exception>
private void scoreCompletionChanged(ValueChangedEvent<bool> completed)
{
// If this player instance is in the middle of an exit, don't attempt any kind of state update.
if (!this.IsCurrentScreen())
return;
// Special case to handle rewinding post-completion. This is the only way already queued forward progress can be cancelled.
// TODO: Investigate whether this can be moved to a RewindablePlayer subclass or similar.
// Currently, even if this scenario is hit, prepareScoreForDisplay has already been queued (and potentially run).
// In scenarios where rewinding is possible (replay, spectating) this is a non-issue as no submission/import work is done,
// but it still doesn't feel right that this exists here.
if (!completed.NewValue)
{
resultsDisplayDelegate?.Cancel();
resultsDisplayDelegate = null;
ValidForResume = true;
skipOutroOverlay.Hide();
return;
}
// Only show the completion screen if the player hasn't failed
if (HealthProcessor.HasFailed)
return;
// Setting this early in the process means that even if something were to go wrong in the order of events following, there
// is no chance that a user could return to the (already completed) Player instance from a child screen.
ValidForResume = false;
// Ensure we are not writing to the replay any more, as we are about to consume and store the score.
DrawableRuleset.SetRecordTarget(null);
if (!Configuration.ShowResults)
return;
prepareScoreForDisplayTask ??= Task.Run(prepareScoreForResults);
bool storyboardHasOutro = DimmableStoryboard.ContentDisplayed && !DimmableStoryboard.HasStoryboardEnded.Value;
if (storyboardHasOutro)
{
// if the current beatmap has a storyboard, the progression to results will be handled by the storyboard ending
// or the user pressing the skip outro button.
skipOutroOverlay.Show();
return;
}
progressToResults(true);
}
/// <summary>
/// Asynchronously run score preparation operations (database import, online submission etc.).
/// </summary>
/// <returns>The final score.</returns>
private async Task<ScoreInfo> prepareScoreForResults()
{
try
{
await PrepareScoreForResultsAsync(Score).ConfigureAwait(false);
}
catch (Exception ex)
{
Logger.Error(ex, @"Score preparation failed!");
}
try
{
await ImportScore(Score).ConfigureAwait(false);
}
catch (Exception ex)
{
Logger.Error(ex, @"Score import failed!");
}
return Score.ScoreInfo;
}
/// <summary>
/// Queue the results screen for display.
/// </summary>
/// <remarks>
/// A final display will only occur once all work is completed in <see cref="PrepareScoreForResultsAsync"/>. This means that even after calling this method, the results screen will never be shown until <see cref="JudgementProcessor.HasCompleted">ScoreProcessor.HasCompleted</see> becomes <see langword="true"/>.
///
/// Calling this method multiple times will have no effect.
/// </remarks>
/// <param name="withDelay">Whether a minimum delay (<see cref="RESULTS_DISPLAY_DELAY"/>) should be added before the screen is displayed.</param>
private void progressToResults(bool withDelay)
{
if (resultsDisplayDelegate != null)
// Note that if progressToResults is called one withDelay=true and then withDelay=false, this no-delay timing will not be
// accounted for. shouldn't be a huge concern (a user pressing the skip button after a results progression has already been queued
// may take x00 more milliseconds than expected in the very rare edge case).
//
// If required we can handle this more correctly by rescheduling here.
return;
double delay = withDelay ? RESULTS_DISPLAY_DELAY : 0;
resultsDisplayDelegate = new ScheduledDelegate(() =>
{
if (prepareScoreForDisplayTask?.IsCompleted != true)
// If the asynchronous preparation has not completed, keep repeating this delegate.
return;
resultsDisplayDelegate?.Cancel();
if (!this.IsCurrentScreen())
// This player instance may already be in the process of exiting.
return;
this.Push(CreateResults(prepareScoreForDisplayTask.Result));
}, Time.Current + delay, 50);
Scheduler.Add(resultsDisplayDelegate);
}
protected override bool OnScroll(ScrollEvent e) => mouseWheelDisabled.Value && !GameplayClockContainer.IsPaused.Value;
#region Fail Logic
protected FailOverlay FailOverlay { get; private set; }
private FailAnimation failAnimation;
private bool onFail()
{
if (!CheckModsAllowFailure())
return false;
HasFailed = true;
// There is a chance that we could be in a paused state as the ruleset's internal clock (see FrameStabilityContainer)
// could process an extra frame after the GameplayClock is stopped.
// In such cases we want the fail state to precede a user triggered pause.
if (PauseOverlay.State.Value == Visibility.Visible)
PauseOverlay.Hide();
failAnimation.Start();
if (Mods.Value.OfType<IApplicableFailOverride>().Any(m => m.RestartOnFail))
Restart();
return true;
}
// Called back when the transform finishes
private void onFailComplete()
{
GameplayClockContainer.Stop();
FailOverlay.Retries = RestartCount;
FailOverlay.Show();
}
#endregion
#region Pause Logic
public bool IsResuming { get; private set; }
/// <summary>
/// The amount of gameplay time after which a second pause is allowed.
/// </summary>
private const double pause_cooldown = 1000;
protected PauseOverlay PauseOverlay { get; private set; }
private double? lastPauseActionTime;
protected bool PauseCooldownActive =>
lastPauseActionTime.HasValue && GameplayClockContainer.GameplayClock.CurrentTime < lastPauseActionTime + pause_cooldown;
/// <summary>
/// A set of conditionals which defines whether the current game state and configuration allows for
/// pausing to be attempted via <see cref="Pause"/>. If false, the game should generally exit if a user pause
/// is attempted.
/// </summary>
private bool pausingSupportedByCurrentState =>
// must pass basic screen conditions (beatmap loaded, instance allows pause)
LoadedBeatmapSuccessfully && Configuration.AllowPause && ValidForResume
// replays cannot be paused and exit immediately
&& !DrawableRuleset.HasReplayLoaded.Value
// cannot pause if we are already in a fail state
&& !HasFailed;
private bool canResume =>
// cannot resume from a non-paused state
GameplayClockContainer.IsPaused.Value
// cannot resume if we are already in a fail state
&& !HasFailed
// already resuming
&& !IsResuming;
public bool Pause()
{
if (!pausingSupportedByCurrentState) return false;
if (!IsResuming && PauseCooldownActive)
return false;
if (IsResuming)
{
DrawableRuleset.CancelResume();
IsResuming = false;
}
GameplayClockContainer.Stop();
PauseOverlay.Show();
lastPauseActionTime = GameplayClockContainer.GameplayClock.CurrentTime;
return true;
}
public void Resume()
{
if (!canResume) return;
IsResuming = true;
PauseOverlay.Hide();
// breaks and time-based conditions may allow instant resume.
if (breakTracker.IsBreakTime.Value)
completeResume();
else
DrawableRuleset.RequestResume(completeResume);
void completeResume()
{
GameplayClockContainer.Start();
IsResuming = false;
}
}
#endregion
#region Screen Logic
public override void OnEntering(IScreen last)
{
base.OnEntering(last);
if (!LoadedBeatmapSuccessfully)
return;
Alpha = 0;
this
.ScaleTo(0.7f)
.ScaleTo(1, 750, Easing.OutQuint)
.Delay(250)
.FadeIn(250);
ApplyToBackground(b =>
{
b.IgnoreUserSettings.Value = false;
b.BlurAmount.Value = 0;
b.FadeColour(Color4.White, 250);
// bind component bindables.
b.IsBreakTime.BindTo(breakTracker.IsBreakTime);
b.StoryboardReplacesBackground.BindTo(storyboardReplacesBackground);
});
HUDOverlay.IsBreakTime.BindTo(breakTracker.IsBreakTime);
DimmableStoryboard.IsBreakTime.BindTo(breakTracker.IsBreakTime);
DimmableStoryboard.StoryboardReplacesBackground.BindTo(storyboardReplacesBackground);
storyboardReplacesBackground.Value = Beatmap.Value.Storyboard.ReplacesBackground && Beatmap.Value.Storyboard.HasDrawable;
foreach (var mod in Mods.Value.OfType<IApplicableToPlayer>())
mod.ApplyToPlayer(this);
foreach (var mod in Mods.Value.OfType<IApplicableToHUD>())
mod.ApplyToHUD(HUDOverlay);
// Our mods are local copies of the global mods so they need to be re-applied to the track.
// This is done through the music controller (for now), because resetting speed adjustments on the beatmap track also removes adjustments provided by DrawableTrack.
// Todo: In the future, player will receive in a track and will probably not have to worry about this...
musicController.ResetTrackAdjustments();
foreach (var mod in Mods.Value.OfType<IApplicableToTrack>())
mod.ApplyToTrack(musicController.CurrentTrack);
updateGameplayState();
GameplayClockContainer.FadeInFromZero(750, Easing.OutQuint);
StartGameplay();
}
/// <summary>
/// Called to trigger the starting of the gameplay clock and underlying gameplay.
/// This will be called on entering the player screen once. A derived class may block the first call to this to delay the start of gameplay.
/// </summary>
protected virtual void StartGameplay()
{
if (GameplayClockContainer.GameplayClock.IsRunning)
throw new InvalidOperationException($"{nameof(StartGameplay)} should not be called when the gameplay clock is already running");
GameplayClockContainer.Reset();
}
public override void OnSuspending(IScreen next)
{
screenSuspension?.Expire();
fadeOut();
base.OnSuspending(next);
}
public override bool OnExiting(IScreen next)
{
screenSuspension?.Expire();
// EndPlaying() is typically called from ReplayRecorder.Dispose(). Disposal is currently asynchronous.
// To resolve test failures, forcefully end playing synchronously when this screen exits.
// Todo: Replace this with a more permanent solution once osu-framework has a synchronous cleanup method.
spectatorClient.EndPlaying();
// GameplayClockContainer performs seeks / start / stop operations on the beatmap's track.
// as we are no longer the current screen, we cannot guarantee the track is still usable.
(GameplayClockContainer as MasterGameplayClockContainer)?.StopUsingBeatmapClock();
musicController.ResetTrackAdjustments();
fadeOut();
return base.OnExiting(next);
}
/// <summary>
/// Creates the player's <see cref="Scoring.Score"/>.
/// </summary>
/// <returns>The <see cref="Scoring.Score"/>.</returns>
protected virtual Score CreateScore() => new Score
{
ScoreInfo = new ScoreInfo { User = api.LocalUser.Value },
};
/// <summary>
/// Imports the player's <see cref="Scoring.Score"/> to the local database.
/// </summary>
/// <param name="score">The <see cref="Scoring.Score"/> to import.</param>
/// <returns>The imported score.</returns>
protected virtual async Task ImportScore(Score score)
{
// Replays are already populated and present in the game's database, so should not be re-imported.
if (DrawableRuleset.ReplayScore != null)
return;
LegacyByteArrayReader replayReader;
using (var stream = new MemoryStream())
{
new LegacyScoreEncoder(score, GameplayBeatmap.PlayableBeatmap).Encode(stream);
replayReader = new LegacyByteArrayReader(stream.ToArray(), "replay.osr");
}
// For the time being, online ID responses are not really useful for anything.
// In addition, the IDs provided via new (lazer) endpoints are based on a different autoincrement from legacy (stable) scores.
//
// Until we better define the server-side logic behind this, let's not store the online ID to avoid potential unique constraint
// conflicts across various systems (ie. solo and multiplayer).
long? onlineScoreId = score.ScoreInfo.OnlineScoreID;
score.ScoreInfo.OnlineScoreID = null;
await scoreManager.Import(score.ScoreInfo, replayReader).ConfigureAwait(false);
// ... And restore the online ID for other processes to handle correctly (e.g. de-duplication for the results screen).
score.ScoreInfo.OnlineScoreID = onlineScoreId;
}
/// <summary>
/// Prepare the <see cref="Scoring.Score"/> for display at results.
/// </summary>
/// <param name="score">The <see cref="Scoring.Score"/> to prepare.</param>
/// <returns>A task that prepares the provided score. On completion, the score is assumed to be ready for display.</returns>
protected virtual Task PrepareScoreForResultsAsync(Score score)
{
// perform one final population to ensure everything is up-to-date.
ScoreProcessor.PopulateScore(score.ScoreInfo);
return Task.CompletedTask;
}
/// <summary>
/// Creates the <see cref="ResultsScreen"/> for a <see cref="ScoreInfo"/>.
/// </summary>
/// <param name="score">The <see cref="ScoreInfo"/> to be displayed in the results screen.</param>
/// <returns>The <see cref="ResultsScreen"/>.</returns>
protected virtual ResultsScreen CreateResults(ScoreInfo score) => new SoloResultsScreen(score, true);
private void fadeOut(bool instant = false)
{
float fadeOutDuration = instant ? 0 : 250;
this.FadeOut(fadeOutDuration);
ApplyToBackground(b => b.IgnoreUserSettings.Value = true);
storyboardReplacesBackground.Value = false;
}
#endregion
IBindable<bool> ISamplePlaybackDisabler.SamplePlaybackDisabled => samplePlaybackDisabled;
}
}
| |
using System;
using System.Collections.Generic;
using System.IO;
using Fake;
using Machine.Specifications;
using Microsoft.FSharp.Core;
namespace Test.FAKECore
{
public class when_accessing_internals
{
It should_have_access_to_FAKE_internals =
() => AssemblyInfoFile.getDependencies(new List<AssemblyInfoFile.Attribute>());
}
public class when_using_fsharp_task_with_default_config
{
It should_use_system_namespace_and_emit_a_version_module = () =>
{
string infoFile = Path.GetTempFileName();
var attributes = new[]
{
AssemblyInfoFile.Attribute.Product("TestLib"),
AssemblyInfoFile.Attribute.Version("1.0.0.0")
};
AssemblyInfoFile.CreateFSharpAssemblyInfo(infoFile, attributes);
const string expected = "// Auto-Generated by FAKE; do not edit\r\nnamespace System\r\nopen System.Reflection\r\n\r\n[<assembly: AssemblyProductAttribute(\"TestLib\")>]\r\n[<assembly: AssemblyVersionAttribute(\"1.0.0.0\")>]\r\ndo ()\r\n\r\nmodule internal AssemblyVersionInformation =\r\n let [<Literal>] AssemblyProduct = \"TestLib\"\r\n let [<Literal>] AssemblyVersion = \"1.0.0.0\"\r\n";
File.ReadAllText(infoFile)
.ShouldEqual(expected.Replace("\r\n", Environment.NewLine));
};
It should_emit_valid_syntax_when_there_is_more_then_one_attribute_of_the_same_type = () =>
{
string infoFile = Path.GetTempFileName();
var attributes = new[]
{
AssemblyInfoFile.Attribute.Product("TestLib"),
AssemblyInfoFile.Attribute.Version("1.0.0.0"),
AssemblyInfoFile.Attribute.InternalsVisibleTo("a"),
AssemblyInfoFile.Attribute.InternalsVisibleTo("a.b"),
};
AssemblyInfoFile.CreateFSharpAssemblyInfo(infoFile, attributes);
const string expected = "// Auto-Generated by FAKE; do not edit\r\nnamespace System\r\nopen System.Reflection\r\nopen System.Runtime.CompilerServices\r\n\r\n[<assembly: AssemblyProductAttribute(\"TestLib\")>]\r\n[<assembly: AssemblyVersionAttribute(\"1.0.0.0\")>]\r\n[<assembly: InternalsVisibleToAttribute(\"a\")>]\r\n[<assembly: InternalsVisibleToAttribute(\"a.b\")>]\r\ndo ()\r\n\r\nmodule internal AssemblyVersionInformation =\r\n let [<Literal>] AssemblyProduct = \"TestLib\"\r\n let [<Literal>] AssemblyVersion = \"1.0.0.0\"\r\n let [<Literal>] InternalsVisibleTo = \"a\"\r\n let [<Literal>] InternalsVisibleTo_1 = \"a.b\"\r\n";
File.ReadAllText(infoFile)
.ShouldEqual(expected.Replace("\r\n", Environment.NewLine));
};
It update_attributes_should_update_attributes_in_fs_file = () =>
{
// Arrange. Create attribute both with and without "Attribute" at the end
string infoFile = Path.Combine(Path.GetTempPath(), Guid.NewGuid() + ".fs");
const string original = "namespace System\r\nopen System.Reflection\r\n\r\n" +
"[<assembly: AssemblyProduct(\"TestLib\")>]\r\n" +
"[<assembly: AssemblyVersionAttribute(\"1.0.0.0\")>]\r\n";
File.WriteAllText(infoFile, original.Replace("\r\n", Environment.NewLine));
// Act
var attributes = new[]
{
AssemblyInfoFile.Attribute.Product("TestLibNew"),
AssemblyInfoFile.Attribute.Version("2.0.0.0")
};
AssemblyInfoFile.UpdateAttributes(infoFile, attributes);
// Assert
const string expected = "namespace System\r\nopen System.Reflection\r\n\r\n" +
"[<assembly: AssemblyProduct(\"TestLibNew\")>]\r\n" +
"[<assembly: AssemblyVersionAttribute(\"2.0.0.0\")>]\r\n";
File.ReadAllText(infoFile)
.ShouldEqual(expected.Replace("\r\n", Environment.NewLine));
};
It get_attribute_should_read_attribute_from_fs_file = () =>
{
// Arrange. Create attribute both with and without "Attribute" at the end
string infoFile = Path.Combine(Path.GetTempPath(), Guid.NewGuid() + ".fs");
const string original = "namespace System\r\nopen System.Reflection\r\n\r\n" +
"[<assembly: AssemblyProduct(\"TestLib\")>]\r\n" +
"[<assembly: AssemblyVersionAttribute(\"1.0.0.0\")>]\r\n";
File.WriteAllText(infoFile, original.Replace("\r\n", Environment.NewLine));
// Act
var productAttr = AssemblyInfoFile.GetAttribute("AssemblyProduct", infoFile).Value;
var versionAttr = AssemblyInfoFile.GetAttribute("AssemblyVersion", infoFile).Value;
// Assert
productAttr.Value.ShouldEqual("TestLib");
versionAttr.Value.ShouldEqual("1.0.0.0");
};
}
public class when_using_fsharp_task_with_custom_config
{
It should_use_custom_namespace_and_not_emit_a_version_module = () =>
{
var customConfig = new AssemblyInfoFile.AssemblyInfoFileConfig(false, new FSharpOption<bool>(false), new FSharpOption<string>("Custom"));
string infoFile = Path.GetTempFileName();
var attributes = new[]
{
AssemblyInfoFile.Attribute.Product("TestLib"),
AssemblyInfoFile.Attribute.Version("1.0.0.0")
};
AssemblyInfoFile.CreateFSharpAssemblyInfoWithConfig(infoFile, attributes, customConfig);
const string expected = "// Auto-Generated by FAKE; do not edit\r\nnamespace Custom\r\nopen System.Reflection\r\n\r\n[<assembly: AssemblyProductAttribute(\"TestLib\")>]\r\n[<assembly: AssemblyVersionAttribute(\"1.0.0.0\")>]\r\ndo ()\r\n\r\n";
File.ReadAllText(infoFile)
.ShouldEqual(expected.Replace("\r\n", Environment.NewLine));
};
}
public class when_using_csharp_task_with_default_config
{
It should_use_system_namespace_and_emit_a_version_module = () =>
{
string infoFile = Path.GetTempFileName();
var attributes = new[]
{
AssemblyInfoFile.Attribute.Product("TestLib"),
AssemblyInfoFile.Attribute.Version("1.0.0.0")
};
AssemblyInfoFile.CreateCSharpAssemblyInfo(infoFile, attributes);
const string expected = "// <auto-generated/>\r\nusing System.Reflection;\r\n\r\n[assembly: AssemblyProductAttribute(\"TestLib\")]\r\n[assembly: AssemblyVersionAttribute(\"1.0.0.0\")]\r\nnamespace System {\r\n internal static class AssemblyVersionInformation {\r\n internal const System.String AssemblyProduct = \"TestLib\";\r\n internal const System.String AssemblyVersion = \"1.0.0.0\";\r\n }\r\n}\r\n";
File.ReadAllText(infoFile)
.ShouldEqual(expected.Replace("\r\n", Environment.NewLine));
};
It should_emit_valid_syntax_when_there_is_more_then_one_attribute_of_the_same_type = () =>
{
string infoFile = Path.GetTempFileName();
var attributes = new[]
{
AssemblyInfoFile.Attribute.Product("TestLib"),
AssemblyInfoFile.Attribute.Version("1.0.0.0"),
AssemblyInfoFile.Attribute.InternalsVisibleTo("a"),
AssemblyInfoFile.Attribute.InternalsVisibleTo("b")
};
AssemblyInfoFile.CreateCSharpAssemblyInfo(infoFile, attributes);
const string expected = "// <auto-generated/>\r\nusing System.Reflection;\r\nusing System.Runtime.CompilerServices;\r\n\r\n[assembly: AssemblyProductAttribute(\"TestLib\")]\r\n[assembly: AssemblyVersionAttribute(\"1.0.0.0\")]\r\n[assembly: InternalsVisibleToAttribute(\"a\")]\r\n[assembly: InternalsVisibleToAttribute(\"b\")]\r\nnamespace System {\r\n internal static class AssemblyVersionInformation {\r\n internal const System.String AssemblyProduct = \"TestLib\";\r\n internal const System.String AssemblyVersion = \"1.0.0.0\";\r\n internal const System.String InternalsVisibleTo = \"a\";\r\n internal const System.String InternalsVisibleTo_1 = \"b\";\r\n }\r\n}\r\n";
File.ReadAllText(infoFile)
.ShouldEqual(expected.Replace("\r\n", Environment.NewLine));
};
It update_attributes_should_update_attributes_in_cs_file = () =>
{
// Arrange. Create attribute both with and without "Attribute" at the end
string infoFile = Path.Combine(Path.GetTempPath(), Guid.NewGuid() + ".cs");
const string original = "// <auto-generated/>\r\nusing System.Reflection;\r\n\r\n" +
"[assembly: AssemblyProduct(\"TestLib\")]\r\n" +
"[assembly: AssemblyVersionAttribute(\"1.0.0.0\")]\r\n";
File.WriteAllText(infoFile, original.Replace("\r\n", Environment.NewLine));
// Act
var attributes = new[]
{
AssemblyInfoFile.Attribute.Product("TestLibNew"),
AssemblyInfoFile.Attribute.Version("2.0.0.0")
};
AssemblyInfoFile.UpdateAttributes(infoFile, attributes);
// Assert
const string expected = "// <auto-generated/>\r\nusing System.Reflection;\r\n\r\n" +
"[assembly: AssemblyProduct(\"TestLibNew\")]\r\n" +
"[assembly: AssemblyVersionAttribute(\"2.0.0.0\")]\r\n";
File.ReadAllText(infoFile)
.ShouldEqual(expected.Replace("\r\n", Environment.NewLine));
};
It get_attribute_should_read_attribute_from_cs_file = () =>
{
// Arrange. Create attribute both with and without "Attribute" at the end
string infoFile = Path.Combine(Path.GetTempPath(), Guid.NewGuid() + ".cs");
const string original = "// <auto-generated/>\r\nusing System.Reflection;\r\n\r\n" +
"[assembly: AssemblyProduct(\"TestLib\")]\r\n" +
"[assembly: AssemblyVersionAttribute(\"1.0.0.0\")]\r\n";
File.WriteAllText(infoFile, original.Replace("\r\n", Environment.NewLine));
// Act
var productAttr = AssemblyInfoFile.GetAttribute("AssemblyProduct", infoFile).Value;
var versionAttr = AssemblyInfoFile.GetAttribute("AssemblyVersion", infoFile).Value;
// Assert
productAttr.Value.ShouldEqual("TestLib");
versionAttr.Value.ShouldEqual("1.0.0.0");
};
}
public class when_using_csharp_task_with_custom_config
{
It should_emit_resharper_suppressions_if_enabled = () =>
{
string infoFile = Path.GetTempFileName();
var attributes = new[]
{
AssemblyInfoFile.Attribute.Product("TestLib"),
AssemblyInfoFile.Attribute.Version("1.0.0.0")
};
var customConfig = new AssemblyInfoFile.AssemblyInfoFileConfig(true, new FSharpOption<bool>(true), new FSharpOption<string>("System"));
AssemblyInfoFile.CreateCSharpAssemblyInfoWithConfig(infoFile, attributes, customConfig);
const string expected = "// <auto-generated/>\r\nusing System.Diagnostics.CodeAnalysis;\r\nusing System.Reflection;\r\n\r\n[assembly: AssemblyProductAttribute(\"TestLib\")]\r\n[assembly: AssemblyVersionAttribute(\"1.0.0.0\")]\r\nnamespace System {\r\n [SuppressMessage(\"ReSharper\", \"RedundantNameQualifier\")]\r\n [SuppressMessage(\"ReSharper\", \"UnusedMember.Global\")]\r\n [SuppressMessage(\"ReSharper\", \"BuiltInTypeReferenceStyle\")]\r\n internal static class AssemblyVersionInformation {\r\n internal const System.String AssemblyProduct = \"TestLib\";\r\n internal const System.String AssemblyVersion = \"1.0.0.0\";\r\n }\r\n}\r\n";
File.ReadAllText(infoFile)
.ShouldEqual(expected.Replace("\r\n", Environment.NewLine));
};
}
public class when_using_cppcli_task_with_default_config
{
It should_emit_valid_syntax = () =>
{
string infoFile = Path.GetTempFileName();
var attributes = new[]
{
AssemblyInfoFile.Attribute.Product("TestLib"),
AssemblyInfoFile.Attribute.Version("1.0.0.0")
};
AssemblyInfoFile.CreateCppCliAssemblyInfo(infoFile, attributes);
const string expected = "// <auto-generated/>\r\nusing namespace System::Reflection;\r\n\r\n[assembly:AssemblyProductAttribute(\"TestLib\")];\r\n[assembly:AssemblyVersionAttribute(\"1.0.0.0\")];\r\n";
File.ReadAllText(infoFile)
.ShouldEqual(expected.Replace("\r\n", Environment.NewLine));
};
It update_attributes_should_update_attributes_in_cpp_file = () =>
{
// Arrange. Create attribute both with and without "Attribute" at the end
string infoFile = Path.Combine(Path.GetTempPath(), Guid.NewGuid() + ".cpp");
const string original = "// <auto-generated/>\r\nusing namespace System::Reflection;\r\n\r\n" +
"[assembly:AssemblyProduct(\"TestLib\")];\r\n" +
"[assembly:AssemblyVersionAttribute(\"1.0.0.0\")];\r\n";
File.WriteAllText(infoFile, original.Replace("\r\n", Environment.NewLine));
// Act
var attributes = new[]
{
AssemblyInfoFile.Attribute.Product("TestLibNew"),
AssemblyInfoFile.Attribute.Version("2.0.0.0")
};
AssemblyInfoFile.UpdateAttributes(infoFile, attributes);
// Assert
const string expected = "// <auto-generated/>\r\nusing namespace System::Reflection;\r\n\r\n" +
"[assembly:AssemblyProduct(\"TestLibNew\")];\r\n" +
"[assembly:AssemblyVersionAttribute(\"2.0.0.0\")];\r\n";
File.ReadAllText(infoFile)
.ShouldEqual(expected.Replace("\r\n", Environment.NewLine));
};
It get_attribute_should_read_attribute_from_cpp_file = () =>
{
// Arrange. Create attribute both with and without "Attribute" at the end
string infoFile = Path.Combine(Path.GetTempPath(), Guid.NewGuid() + ".cpp");
const string original = "// <auto-generated/>\r\nusing namespace System::Reflection;\r\n\r\n" +
"[assembly:AssemblyProduct(\"TestLib\")];\r\n" +
"[assembly:AssemblyVersionAttribute(\"1.0.0.0\")];\r\n";
File.WriteAllText(infoFile, original.Replace("\r\n", Environment.NewLine));
// Act
var productAttr = AssemblyInfoFile.GetAttribute("AssemblyProduct", infoFile).Value;
var versionAttr = AssemblyInfoFile.GetAttribute("AssemblyVersion", infoFile).Value;
// Assert
productAttr.Value.ShouldEqual("TestLib");
versionAttr.Value.ShouldEqual("1.0.0.0");
};
}
public class when_using_vb_task_with_default_config
{
It should_emit_valid_syntax = () =>
{
string infoFile = Path.GetTempFileName();
var attributes = new[]
{
AssemblyInfoFile.Attribute.Product("TestLib"),
AssemblyInfoFile.Attribute.Version("1.0.0.0")
};
AssemblyInfoFile.CreateVisualBasicAssemblyInfo(infoFile, attributes);
const string expected = "' <auto-generated/>\r\nImports System.Reflection\r\n\r\n<assembly: AssemblyProductAttribute(\"TestLib\")>\r\n<assembly: AssemblyVersionAttribute(\"1.0.0.0\")>\r\nFriend NotInheritable Class AssemblyVersionInformation\r\n Friend Const AssemblyProduct As System.String = \"TestLib\"\r\n Friend Const AssemblyVersion As System.String = \"1.0.0.0\"\r\nEnd Class\r\n";
File.ReadAllText(infoFile)
.ShouldEqual(expected.Replace("\r\n", Environment.NewLine));
};
It should_emit_valid_syntax_when_there_is_more_then_one_attribute_of_the_same_type = () =>
{
string infoFile = Path.GetTempFileName();
var attributes = new[]
{
AssemblyInfoFile.Attribute.Product("TestLib"),
AssemblyInfoFile.Attribute.Version("1.0.0.0"),
AssemblyInfoFile.Attribute.InternalsVisibleTo("a"),
AssemblyInfoFile.Attribute.InternalsVisibleTo("b")
};
AssemblyInfoFile.CreateVisualBasicAssemblyInfo(infoFile, attributes);
const string expected = "' <auto-generated/>\r\nImports System.Reflection\r\nImports System.Runtime.CompilerServices\r\n\r\n<assembly: AssemblyProductAttribute(\"TestLib\")>\r\n<assembly: AssemblyVersionAttribute(\"1.0.0.0\")>\r\n<assembly: InternalsVisibleToAttribute(\"a\")>\r\n<assembly: InternalsVisibleToAttribute(\"b\")>\r\nFriend NotInheritable Class AssemblyVersionInformation\r\n Friend Const AssemblyProduct As System.String = \"TestLib\"\r\n Friend Const AssemblyVersion As System.String = \"1.0.0.0\"\r\n Friend Const InternalsVisibleTo As System.String = \"a\"\r\n Friend Const InternalsVisibleTo_1 As System.String = \"b\"\r\nEnd Class\r\n";
File.ReadAllText(infoFile)
.ShouldEqual(expected.Replace("\r\n", Environment.NewLine));
};
It update_attributes_should_update_attributes_in_vb_file = () =>
{
// Arrange. Create attribute both with and without "Attribute" at the end
string infoFile = Path.Combine(Path.GetTempPath(), Guid.NewGuid() + ".vb");
const string original = "' <auto-generated/>\r\nImports System.Reflection\r\n\r\n" +
"<assembly: AssemblyProduct(\"TestLib\")>\r\n" +
"<Assembly: AssemblyCompany(\"TestCompany\")>\r\n" +
"<assembly: AssemblyVersionAttribute(\"1.0.0.0\")>\r\n" +
"<Assembly: ComVisibleAttribute(false)>";
File.WriteAllText(infoFile, original.Replace("\r\n", Environment.NewLine));
// Act
var attributes = new[]
{
AssemblyInfoFile.Attribute.Product("TestLibNew"),
AssemblyInfoFile.Attribute.Company("TestCompanyNew"),
AssemblyInfoFile.Attribute.Version("2.0.0.0")
};
AssemblyInfoFile.UpdateAttributes(infoFile, attributes);
// Assert
const string expected = "' <auto-generated/>\r\nImports System.Reflection\r\n\r\n" +
"<assembly: AssemblyProduct(\"TestLibNew\")>\r\n" +
"<Assembly: AssemblyCompany(\"TestCompanyNew\")>\r\n" +
"<assembly: AssemblyVersionAttribute(\"2.0.0.0\")>\r\n" +
"<Assembly: ComVisibleAttribute(false)>";
File.ReadAllText(infoFile)
.ShouldEqual(expected.Replace("\r\n", Environment.NewLine));
};
It get_attribute_should_read_attribute_from_vb_file = () =>
{
// Arrange. Create attribute both with and without "Attribute" at the end, and also
// case-insensitive attributes
string infoFile = Path.Combine(Path.GetTempPath(), Guid.NewGuid() + ".vb");
const string original = "' <auto-generated/>\r\nImports System.Reflection\r\n\r\n" +
"<assembly: AssemblyProduct(\"TestLib\")>\r\n" +
"<Assembly: AssemblyCompany(\"TestCompany\")>\r\n" +
"<assembly: AssemblyVersionAttribute(\"1.0.0.0\")>\r\n" +
"<Assembly: ComVisibleAttribute(false)>";
File.WriteAllText(infoFile, original.Replace("\r\n", Environment.NewLine));
// Act
var productAttr = AssemblyInfoFile.GetAttribute("AssemblyProduct", infoFile).Value;
var companyAttr = AssemblyInfoFile.GetAttribute("AssemblyCompany", infoFile).Value;
var versionAttr = AssemblyInfoFile.GetAttribute("AssemblyVersion", infoFile).Value;
var comVisibleAttr = AssemblyInfoFile.GetAttribute("ComVisible", infoFile).Value;
// Assert
productAttr.Value.ShouldEqual("TestLib");
companyAttr.Value.ShouldEqual("TestCompany");
versionAttr.Value.ShouldEqual("1.0.0.0");
comVisibleAttr.Value.ShouldEqual("false");
};
}
}
| |
//
// Copyright (c) 2004-2020 Jaroslaw Kowalski <jaak@jkowalski.net>, Kim Christensen, Julian Verdurmen
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * Neither the name of Jaroslaw Kowalski nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
// THE POSSIBILITY OF SUCH DAMAGE.
//
namespace NLog.Internal
{
using System;
using System.IO;
using System.Linq;
using System.Reflection;
using NLog.Common;
#if SILVERLIGHT && !WINDOWS_PHONE
using System.Windows;
#endif
/// <summary>
/// Helpers for <see cref="Assembly"/>.
/// </summary>
internal static class AssemblyHelpers
{
#if !NETSTANDARD1_3
/// <summary>
/// Load from url
/// </summary>
/// <param name="assemblyFileName">file or path, including .dll</param>
/// <param name="baseDirectory">basepath, optional</param>
/// <returns></returns>
public static Assembly LoadFromPath(string assemblyFileName, string baseDirectory = null)
{
string fullFileName = baseDirectory == null ? assemblyFileName : Path.Combine(baseDirectory, assemblyFileName);
InternalLogger.Info("Loading assembly file: {0}", fullFileName);
#if NETSTANDARD1_5
try
{
var assemblyName = System.Runtime.Loader.AssemblyLoadContext.GetAssemblyName(fullFileName);
return Assembly.Load(assemblyName);
}
catch (Exception ex)
{
// this doesn't usually work
InternalLogger.Warn(ex, "Fallback to AssemblyLoadContext.Default.LoadFromAssemblyPath for file: {0}", fullFileName);
return System.Runtime.Loader.AssemblyLoadContext.Default.LoadFromAssemblyPath(fullFileName);
}
#elif SILVERLIGHT && !WINDOWS_PHONE
var stream = Application.GetResourceStream(new Uri(assemblyFileName, UriKind.Relative));
var assemblyPart = new AssemblyPart();
Assembly assembly = assemblyPart.Load(stream.Stream);
return assembly;
#else
Assembly asm = Assembly.LoadFrom(fullFileName);
return asm;
#endif
}
#endif
/// <summary>
/// Load from url
/// </summary>
/// <param name="assemblyName">name without .dll</param>
/// <returns></returns>
public static Assembly LoadFromName(string assemblyName)
{
InternalLogger.Info("Loading assembly: {0}", assemblyName);
#if NETSTANDARD1_0 || WINDOWS_PHONE
var name = new AssemblyName(assemblyName);
return Assembly.Load(name);
#elif SILVERLIGHT && !WINDOWS_PHONE
//as embedded resource
var assemblyFile = assemblyName + ".dll";
var stream = Application.GetResourceStream(new Uri(assemblyFile, UriKind.Relative));
var assemblyPart = new AssemblyPart();
Assembly assembly = assemblyPart.Load(stream.Stream);
return assembly;
#else
try
{
Assembly assembly = Assembly.Load(assemblyName);
return assembly;
}
catch (FileNotFoundException)
{
var name = new AssemblyName(assemblyName);
InternalLogger.Trace("Try find '{0}' in current domain", assemblyName);
var loadedAssembly = AppDomain.CurrentDomain.GetAssemblies().FirstOrDefault(domainAssembly => IsAssemblyMatch(name, domainAssembly.GetName()));
if (loadedAssembly != null)
{
InternalLogger.Trace("Found '{0}' in current domain", assemblyName);
return loadedAssembly;
}
InternalLogger.Trace("Haven't found' '{0}' in current domain", assemblyName);
throw;
}
#endif
}
private static bool IsAssemblyMatch(AssemblyName expected, AssemblyName actual)
{
if (expected.Name != actual.Name)
return false;
if (expected.Version != null && expected.Version != actual.Version)
return false;
#if !NETSTANDARD1_3 && !NETSTANDARD1_5
if (expected.CultureInfo != null && expected.CultureInfo.Name != actual.CultureInfo.Name)
return false;
#endif
var expectedKeyToken = expected.GetPublicKeyToken();
var correctToken = expectedKeyToken == null || expectedKeyToken.SequenceEqual(actual.GetPublicKeyToken());
return correctToken;
}
#if !SILVERLIGHT && !NETSTANDARD1_3
public static string GetAssemblyFileLocation(Assembly assembly)
{
string fullName = string.Empty;
try
{
if (assembly == null)
{
return string.Empty;
}
fullName = assembly.FullName;
#if NETSTANDARD
if (string.IsNullOrEmpty(assembly.Location))
{
// Assembly with no actual location should be skipped (Avoid PlatformNotSupportedException)
InternalLogger.Warn("Ignoring assembly location because location is empty: {0}", fullName);
return string.Empty;
}
#endif
Uri assemblyCodeBase;
if (!Uri.TryCreate(assembly.CodeBase, UriKind.RelativeOrAbsolute, out assemblyCodeBase))
{
InternalLogger.Warn("Ignoring assembly location because code base is unknown: '{0}' ({1})", assembly.CodeBase, fullName);
return string.Empty;
}
var assemblyLocation = Path.GetDirectoryName(assemblyCodeBase.LocalPath);
if (string.IsNullOrEmpty(assemblyLocation))
{
InternalLogger.Warn("Ignoring assembly location because it is not a valid directory: '{0}' ({1})", assemblyCodeBase.LocalPath, fullName);
return string.Empty;
}
DirectoryInfo directoryInfo = new DirectoryInfo(assemblyLocation);
if (!directoryInfo.Exists)
{
InternalLogger.Warn("Ignoring assembly location because directory doesn't exists: '{0}' ({1})", assemblyLocation, fullName);
return string.Empty;
}
InternalLogger.Debug("Found assembly location directory: '{0}' ({1})", directoryInfo.FullName, fullName);
return directoryInfo.FullName;
}
catch (System.PlatformNotSupportedException ex)
{
InternalLogger.Warn(ex, "Ignoring assembly location because assembly lookup is not supported: {0}", fullName);
if (ex.MustBeRethrown())
{
throw;
}
return string.Empty;
}
catch (System.Security.SecurityException ex)
{
InternalLogger.Warn(ex, "Ignoring assembly location because assembly lookup is not allowed: {0}", fullName);
if (ex.MustBeRethrown())
{
throw;
}
return string.Empty;
}
catch (UnauthorizedAccessException ex)
{
InternalLogger.Warn(ex, "Ignoring assembly location because assembly lookup is not allowed: {0}", fullName);
if (ex.MustBeRethrown())
{
throw;
}
return string.Empty;
}
}
#endif
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.ComponentModel.Composition;
using System.Linq;
using Microsoft.CodeAnalysis.Shared.Extensions;
using Microsoft.CodeAnalysis.Text.Shared.Extensions;
using Microsoft.VisualStudio.Text;
using Microsoft.VisualStudio.Text.Editor;
using Microsoft.VisualStudio.Text.Editor.OptionsExtensionMethods;
using Microsoft.VisualStudio.Text.Projection;
using Microsoft.VisualStudio.Utilities;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis.Editor.Shared.Extensions
{
internal static class IProjectionBufferFactoryServiceExtensions
{
private const string RoslynPreviewContentType = nameof(RoslynPreviewContentType);
/// <summary>
/// Hack to get view taggers working on our preview surfaces. We need to define
/// both projection and text in order for this to work. Talk to JasonMal for he is the only
/// one who understands this.
/// </summary>
[Export]
[Name(RoslynPreviewContentType)]
[BaseDefinition("text")]
[BaseDefinition("projection")]
public static readonly ContentTypeDefinition RoslynPreviewContentTypeDefinition;
public static IElisionBuffer CreateElisionBufferWithoutIndentation(
this IProjectionBufferFactoryService factoryService,
IEditorOptions editorOptions,
params SnapshotSpan[] exposedSpans)
{
return factoryService.CreateElisionBufferWithoutIndentation(
editorOptions,
(IEnumerable<SnapshotSpan>)exposedSpans);
}
public static IElisionBuffer CreateElisionBufferWithoutIndentation(
this IProjectionBufferFactoryService factoryService,
IEditorOptions editorOptions,
IEnumerable<SnapshotSpan> exposedSpans)
{
var spans = new NormalizedSnapshotSpanCollection(exposedSpans);
if (spans.Count > 0)
{
// BUG(6335): We have to make sure that the spans refer to the current snapshot of
// the buffer.
var buffer = spans.First().Snapshot.TextBuffer;
var currentSnapshot = buffer.CurrentSnapshot;
spans = new NormalizedSnapshotSpanCollection(
spans.Select(s => s.TranslateTo(currentSnapshot, SpanTrackingMode.EdgeExclusive)));
}
var elisionBuffer = factoryService.CreateElisionBuffer(null, spans, ElisionBufferOptions.None);
if (spans.Count > 0)
{
var snapshot = spans.First().Snapshot;
var buffer = snapshot.TextBuffer;
// We need to figure out the shorted indentation level of the exposed lines. We'll
// then remove that indentation from all lines.
var indentationColumn = DetermineIndentationColumn(editorOptions, spans);
var spansToElide = new List<Span>();
foreach (var span in spans)
{
var startLineNumber = snapshot.GetLineNumberFromPosition(span.Start);
var endLineNumber = snapshot.GetLineNumberFromPosition(span.End);
for (var lineNumber = startLineNumber; lineNumber <= endLineNumber; lineNumber++)
{
var line = snapshot.GetLineFromLineNumber(lineNumber);
var lineOffsetOfColumn = line.GetLineOffsetFromColumn(indentationColumn, editorOptions);
spansToElide.Add(Span.FromBounds(line.Start, line.Start + lineOffsetOfColumn));
}
}
elisionBuffer.ElideSpans(new NormalizedSpanCollection(spansToElide));
}
return elisionBuffer;
}
private static int DetermineIndentationColumn(
IEditorOptions editorOptions,
IEnumerable<SnapshotSpan> spans)
{
int? indentationColumn = null;
foreach (var span in spans)
{
var snapshot = span.Snapshot;
var startLineNumber = snapshot.GetLineNumberFromPosition(span.Start);
var endLineNumber = snapshot.GetLineNumberFromPosition(span.End);
// If the span starts after the first non-whitespace of the first line, we'll
// exclude that line to avoid throwing off the calculation. Otherwise, the
// incorrect indentation will be returned for lambda cases like so:
//
// void M()
// {
// Func<int> f = () =>
// {
// return 1;
// };
// }
//
// Without throwing out the first line in the example above, the indentation column
// used will be 4, rather than 8.
var startLineFirstNonWhitespace = snapshot.GetLineFromLineNumber(startLineNumber).GetFirstNonWhitespacePosition();
if (startLineFirstNonWhitespace.HasValue && startLineFirstNonWhitespace.Value < span.Start)
{
startLineNumber++;
}
for (var lineNumber = startLineNumber; lineNumber <= endLineNumber; lineNumber++)
{
var line = snapshot.GetLineFromLineNumber(lineNumber);
if (string.IsNullOrWhiteSpace(line.GetText()))
{
continue;
}
indentationColumn = indentationColumn.HasValue
? Math.Min(indentationColumn.Value, line.GetColumnOfFirstNonWhitespaceCharacterOrEndOfLine(editorOptions))
: line.GetColumnOfFirstNonWhitespaceCharacterOrEndOfLine(editorOptions);
}
}
return indentationColumn ?? 0;
}
public static IProjectionBuffer CreateProjectionBuffer(
this IProjectionBufferFactoryService factoryService,
IContentTypeRegistryService registryService,
IEditorOptions editorOptions,
ITextSnapshot snapshot,
string separator,
params LineSpan[] exposedLineSpans)
{
return CreateProjectionBuffer(
factoryService,
registryService,
editorOptions,
snapshot,
separator,
suffixOpt: null,
trim: false,
exposedLineSpans: exposedLineSpans);
}
public static IProjectionBuffer CreateProjectionBufferWithoutIndentation(
this IProjectionBufferFactoryService factoryService,
IContentTypeRegistryService registryService,
IEditorOptions editorOptions,
ITextSnapshot snapshot,
string separator,
params LineSpan[] exposedLineSpans)
{
return factoryService.CreateProjectionBufferWithoutIndentation(
registryService,
editorOptions,
snapshot,
separator,
suffixOpt: null,
exposedLineSpans: exposedLineSpans);
}
public static IProjectionBuffer CreateProjectionBufferWithoutIndentation(
this IProjectionBufferFactoryService factoryService,
IContentTypeRegistryService registryService,
IEditorOptions editorOptions,
ITextSnapshot snapshot,
string separator,
object suffixOpt,
params LineSpan[] exposedLineSpans)
{
return CreateProjectionBuffer(
factoryService,
registryService,
editorOptions,
snapshot,
separator,
suffixOpt,
trim: true,
exposedLineSpans: exposedLineSpans);
}
public static IProjectionBuffer CreatePreviewProjectionBuffer(
this IProjectionBufferFactoryService factoryService,
IList<object> sourceSpans,
IContentTypeRegistryService registryService)
{
return factoryService.CreateProjectionBuffer(
projectionEditResolver: null,
sourceSpans: sourceSpans,
options: ProjectionBufferOptions.None,
contentType: registryService.GetContentType(RoslynPreviewContentType));
}
private static IProjectionBuffer CreateProjectionBuffer(
IProjectionBufferFactoryService factoryService,
IContentTypeRegistryService registryService,
IEditorOptions editorOptions,
ITextSnapshot snapshot,
string separator,
object suffixOpt,
bool trim,
params LineSpan[] exposedLineSpans)
{
var spans = new List<object>();
if (exposedLineSpans.Length > 0)
{
if (exposedLineSpans[0].Start > 0 && !string.IsNullOrEmpty(separator))
{
spans.Add(separator);
spans.Add(editorOptions.GetNewLineCharacter());
}
var snapshotSpanRanges = CreateSnapshotSpanRanges(snapshot, exposedLineSpans);
var indentColumn = trim
? DetermineIndentationColumn(editorOptions, snapshotSpanRanges.Flatten())
: 0;
foreach (var snapshotSpanRange in snapshotSpanRanges)
{
foreach (var snapshotSpan in snapshotSpanRange)
{
var line = snapshotSpan.Snapshot.GetLineFromPosition(snapshotSpan.Start);
var indentPosition = line.GetLineOffsetFromColumn(indentColumn, editorOptions) + line.Start;
var mappedSpan = new SnapshotSpan(snapshotSpan.Snapshot,
Span.FromBounds(indentPosition, snapshotSpan.End));
var trackingSpan = mappedSpan.CreateTrackingSpan(SpanTrackingMode.EdgeExclusive);
spans.Add(trackingSpan);
// Add a newline between every line.
if (snapshotSpan != snapshotSpanRange.Last())
{
spans.Add(editorOptions.GetNewLineCharacter());
}
}
// Add a separator between every set of lines.
if (snapshotSpanRange != snapshotSpanRanges.Last())
{
spans.Add(editorOptions.GetNewLineCharacter());
spans.Add(separator);
spans.Add(editorOptions.GetNewLineCharacter());
}
}
if (snapshot.GetLineNumberFromPosition(snapshotSpanRanges.Last().Last().End) < snapshot.LineCount - 1)
{
spans.Add(editorOptions.GetNewLineCharacter());
spans.Add(separator);
}
}
if (suffixOpt != null)
{
if (spans.Count >= 0)
{
if (!separator.Equals(spans.Last()))
{
spans.Add(editorOptions.GetNewLineCharacter());
spans.Add(separator);
}
spans.Add(editorOptions.GetNewLineCharacter());
}
spans.Add(suffixOpt);
}
return factoryService.CreateProjectionBuffer(
projectionEditResolver: null,
sourceSpans: spans,
options: ProjectionBufferOptions.None,
contentType: registryService.GetContentType(RoslynPreviewContentType));
}
private static IList<IList<SnapshotSpan>> CreateSnapshotSpanRanges(ITextSnapshot snapshot, LineSpan[] exposedLineSpans)
{
var result = new List<IList<SnapshotSpan>>();
foreach (var lineSpan in exposedLineSpans)
{
var snapshotSpans = CreateSnapshotSpans(snapshot, lineSpan);
if (snapshotSpans.Count > 0)
{
result.Add(snapshotSpans);
}
}
return result;
}
private static IList<SnapshotSpan> CreateSnapshotSpans(ITextSnapshot snapshot, LineSpan lineSpan)
{
var result = new List<SnapshotSpan>();
for (int i = lineSpan.Start; i < lineSpan.End; i++)
{
var line = snapshot.GetLineFromLineNumber(i);
result.Add(line.Extent);
}
return result;
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.