context
stringlengths
2.52k
185k
gt
stringclasses
1 value
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.IO; using System.Text; using System.Diagnostics; using System.Globalization; namespace System.Xml { // XmlTextEncoder // // This class does special handling of text content for XML. For example // it will replace special characters with entities whenever necessary. internal class XmlTextEncoder { // // Fields // // output text writer private TextWriter _textWriter; // true when writing out the content of attribute value private bool _inAttribute; // quote char of the attribute (when inAttribute) private char _quoteChar; // caching of attribute value private StringBuilder _attrValue; private bool _cacheAttrValue; // XmlCharType private XmlCharType _xmlCharType; // // Constructor // internal XmlTextEncoder(TextWriter textWriter) { _textWriter = textWriter; _quoteChar = '"'; _xmlCharType = XmlCharType.Instance; } // // Internal methods and properties // internal char QuoteChar { set { _quoteChar = value; } } internal void StartAttribute(bool cacheAttrValue) { _inAttribute = true; _cacheAttrValue = cacheAttrValue; if (cacheAttrValue) { if (_attrValue == null) { _attrValue = new StringBuilder(); } else { _attrValue.Length = 0; } } } internal void EndAttribute() { if (_cacheAttrValue) { _attrValue.Length = 0; } _inAttribute = false; _cacheAttrValue = false; } internal string AttributeValue { get { if (_cacheAttrValue) { return _attrValue.ToString(); } else { return String.Empty; } } } internal void WriteSurrogateChar(char lowChar, char highChar) { if (!XmlCharType.IsLowSurrogate(lowChar) || !XmlCharType.IsHighSurrogate(highChar)) { throw XmlConvert.CreateInvalidSurrogatePairException(lowChar, highChar); } _textWriter.Write(highChar); _textWriter.Write(lowChar); } [System.Security.SecurityCritical] internal void Write(char[] array, int offset, int count) { if (null == array) { throw new ArgumentNullException(nameof(array)); } if (0 > offset) { throw new ArgumentOutOfRangeException(nameof(offset)); } if (0 > count) { throw new ArgumentOutOfRangeException(nameof(count)); } if (count > array.Length - offset) { throw new ArgumentOutOfRangeException(nameof(count)); } if (_cacheAttrValue) { _attrValue.Append(array, offset, count); } int endPos = offset + count; int i = offset; char ch = (char)0; for (;;) { int startPos = i; unsafe { while (i < endPos && _xmlCharType.IsAttributeValueChar(ch = array[i])) { i++; } } if (startPos < i) { _textWriter.Write(array, startPos, i - startPos); } if (i == endPos) { break; } switch (ch) { case (char)0x9: _textWriter.Write(ch); break; case (char)0xA: case (char)0xD: if (_inAttribute) { WriteCharEntityImpl(ch); } else { _textWriter.Write(ch); } break; case '<': WriteEntityRefImpl("lt"); break; case '>': WriteEntityRefImpl("gt"); break; case '&': WriteEntityRefImpl("amp"); break; case '\'': if (_inAttribute && _quoteChar == ch) { WriteEntityRefImpl("apos"); } else { _textWriter.Write('\''); } break; case '"': if (_inAttribute && _quoteChar == ch) { WriteEntityRefImpl("quot"); } else { _textWriter.Write('"'); } break; default: if (XmlCharType.IsHighSurrogate(ch)) { if (i + 1 < endPos) { WriteSurrogateChar(array[++i], ch); } else { throw new ArgumentException(SR.Xml_SurrogatePairSplit); } } else if (XmlCharType.IsLowSurrogate(ch)) { throw XmlConvert.CreateInvalidHighSurrogateCharException(ch); } else { Debug.Assert((ch < 0x20 && !_xmlCharType.IsWhiteSpace(ch)) || (ch > 0xFFFD)); WriteCharEntityImpl(ch); } break; } i++; } } internal void WriteSurrogateCharEntity(char lowChar, char highChar) { if (!XmlCharType.IsLowSurrogate(lowChar) || !XmlCharType.IsHighSurrogate(highChar)) { throw XmlConvert.CreateInvalidSurrogatePairException(lowChar, highChar); } int surrogateChar = XmlCharType.CombineSurrogateChar(lowChar, highChar); if (_cacheAttrValue) { _attrValue.Append(highChar); _attrValue.Append(lowChar); } _textWriter.Write("&#x"); _textWriter.Write(surrogateChar.ToString("X", NumberFormatInfo.InvariantInfo)); _textWriter.Write(';'); } [System.Security.SecurityCritical] internal void Write(string text) { if (text == null) { return; } if (_cacheAttrValue) { _attrValue.Append(text); } // scan through the string to see if there are any characters to be escaped int len = text.Length; int i = 0; int startPos = 0; char ch = (char)0; for (;;) { unsafe { while (i < len && _xmlCharType.IsAttributeValueChar(ch = text[i])) { i++; } } if (i == len) { // reached the end of the string -> write it whole out _textWriter.Write(text); return; } if (_inAttribute) { if (ch == 0x9) { i++; continue; } } else { if (ch == 0x9 || ch == 0xA || ch == 0xD || ch == '"' || ch == '\'') { i++; continue; } } // some character that needs to be escaped is found: break; } char[] helperBuffer = new char[256]; for (;;) { if (startPos < i) { WriteStringFragment(text, startPos, i - startPos, helperBuffer); } if (i == len) { break; } switch (ch) { case (char)0x9: _textWriter.Write(ch); break; case (char)0xA: case (char)0xD: if (_inAttribute) { WriteCharEntityImpl(ch); } else { _textWriter.Write(ch); } break; case '<': WriteEntityRefImpl("lt"); break; case '>': WriteEntityRefImpl("gt"); break; case '&': WriteEntityRefImpl("amp"); break; case '\'': if (_inAttribute && _quoteChar == ch) { WriteEntityRefImpl("apos"); } else { _textWriter.Write('\''); } break; case '"': if (_inAttribute && _quoteChar == ch) { WriteEntityRefImpl("quot"); } else { _textWriter.Write('"'); } break; default: if (XmlCharType.IsHighSurrogate(ch)) { if (i + 1 < len) { WriteSurrogateChar(text[++i], ch); } else { throw XmlConvert.CreateInvalidSurrogatePairException(text[i], ch); } } else if (XmlCharType.IsLowSurrogate(ch)) { throw XmlConvert.CreateInvalidHighSurrogateCharException(ch); } else { Debug.Assert((ch < 0x20 && !_xmlCharType.IsWhiteSpace(ch)) || (ch > 0xFFFD)); WriteCharEntityImpl(ch); } break; } i++; startPos = i; unsafe { while (i < len && _xmlCharType.IsAttributeValueChar(ch = text[i])) { i++; } } } } [System.Security.SecurityCritical] internal void WriteRawWithSurrogateChecking(string text) { if (text == null) { return; } if (_cacheAttrValue) { _attrValue.Append(text); } int len = text.Length; int i = 0; char ch = (char)0; for (;;) { unsafe { while (i < len && (_xmlCharType.IsCharData((ch = text[i])) || ch < 0x20)) { i++; } } if (i == len) { break; } if (XmlCharType.IsHighSurrogate(ch)) { if (i + 1 < len) { char lowChar = text[i + 1]; if (XmlCharType.IsLowSurrogate(lowChar)) { i += 2; continue; } else { throw XmlConvert.CreateInvalidSurrogatePairException(lowChar, ch); } } throw new ArgumentException(SR.Xml_InvalidSurrogateMissingLowChar); } else if (XmlCharType.IsLowSurrogate(ch)) { throw XmlConvert.CreateInvalidHighSurrogateCharException(ch); } else { i++; } } _textWriter.Write(text); return; } internal void WriteRaw(char[] array, int offset, int count) { if (null == array) { throw new ArgumentNullException(nameof(array)); } if (0 > count) { throw new ArgumentOutOfRangeException(nameof(count)); } if (0 > offset) { throw new ArgumentOutOfRangeException(nameof(offset)); } if (count > array.Length - offset) { throw new ArgumentOutOfRangeException(nameof(count)); } if (_cacheAttrValue) { _attrValue.Append(array, offset, count); } _textWriter.Write(array, offset, count); } internal void WriteCharEntity(char ch) { if (XmlCharType.IsSurrogate(ch)) { throw new ArgumentException(SR.Xml_InvalidSurrogateMissingLowChar); } string strVal = ((int)ch).ToString("X", NumberFormatInfo.InvariantInfo); if (_cacheAttrValue) { _attrValue.Append("&#x"); _attrValue.Append(strVal); _attrValue.Append(';'); } WriteCharEntityImpl(strVal); } internal void WriteEntityRef(string name) { if (_cacheAttrValue) { _attrValue.Append('&'); _attrValue.Append(name); _attrValue.Append(';'); } WriteEntityRefImpl(name); } // // Private implementation methods // // This is a helper method to workaround the fact that TextWriter does not have a Write method // for fragment of a string such as Write( string, offset, count). // The string fragment will be written out by copying into a small helper buffer and then // calling textWriter to write out the buffer. private void WriteStringFragment(string str, int offset, int count, char[] helperBuffer) { int bufferSize = helperBuffer.Length; while (count > 0) { int copyCount = count; if (copyCount > bufferSize) { copyCount = bufferSize; } str.CopyTo(offset, helperBuffer, 0, copyCount); _textWriter.Write(helperBuffer, 0, copyCount); offset += copyCount; count -= copyCount; } } private void WriteCharEntityImpl(char ch) { WriteCharEntityImpl(((int)ch).ToString("X", NumberFormatInfo.InvariantInfo)); } private void WriteCharEntityImpl(string strVal) { _textWriter.Write("&#x"); _textWriter.Write(strVal); _textWriter.Write(';'); } private void WriteEntityRefImpl(string name) { _textWriter.Write('&'); _textWriter.Write(name); _textWriter.Write(';'); } } }
// ReSharper disable All using System.Collections.Generic; using System.Diagnostics; using System.Dynamic; using System.Linq; using System.Net; using System.Net.Http; using System.Web.Http; using Frapid.ApplicationState.Models; using Newtonsoft.Json; using Newtonsoft.Json.Linq; using Frapid.Config.DataAccess; using Frapid.Config.Api.Fakes; using Frapid.DataAccess; using Frapid.DataAccess.Models; using Xunit; namespace Frapid.Config.Api.Tests { public class FilterTests { public static FilterController Fixture() { FilterController controller = new FilterController(new FilterRepository()); return controller; } [Fact] [Conditional("Debug")] public void CountEntityColumns() { EntityView entityView = Fixture().GetEntityView(); Assert.Null(entityView.Columns); } [Fact] [Conditional("Debug")] public void Count() { long count = Fixture().Count(); Assert.Equal(1, count); } [Fact] [Conditional("Debug")] public void GetAll() { int count = Fixture().GetAll().Count(); Assert.Equal(1, count); } [Fact] [Conditional("Debug")] public void Export() { int count = Fixture().Export().Count(); Assert.Equal(1, count); } [Fact] [Conditional("Debug")] public void Get() { Frapid.Config.Entities.Filter filter = Fixture().Get(0); Assert.NotNull(filter); } [Fact] [Conditional("Debug")] public void First() { Frapid.Config.Entities.Filter filter = Fixture().GetFirst(); Assert.NotNull(filter); } [Fact] [Conditional("Debug")] public void Previous() { Frapid.Config.Entities.Filter filter = Fixture().GetPrevious(0); Assert.NotNull(filter); } [Fact] [Conditional("Debug")] public void Next() { Frapid.Config.Entities.Filter filter = Fixture().GetNext(0); Assert.NotNull(filter); } [Fact] [Conditional("Debug")] public void Last() { Frapid.Config.Entities.Filter filter = Fixture().GetLast(); Assert.NotNull(filter); } [Fact] [Conditional("Debug")] public void GetMultiple() { IEnumerable<Frapid.Config.Entities.Filter> filters = Fixture().Get(new long[] { }); Assert.NotNull(filters); } [Fact] [Conditional("Debug")] public void GetPaginatedResult() { int count = Fixture().GetPaginatedResult().Count(); Assert.Equal(1, count); count = Fixture().GetPaginatedResult(1).Count(); Assert.Equal(1, count); } [Fact] [Conditional("Debug")] public void CountWhere() { long count = Fixture().CountWhere(new JArray()); Assert.Equal(1, count); } [Fact] [Conditional("Debug")] public void GetWhere() { int count = Fixture().GetWhere(1, new JArray()).Count(); Assert.Equal(1, count); } [Fact] [Conditional("Debug")] public void CountFiltered() { long count = Fixture().CountFiltered(""); Assert.Equal(1, count); } [Fact] [Conditional("Debug")] public void GetFiltered() { int count = Fixture().GetFiltered(1, "").Count(); Assert.Equal(1, count); } [Fact] [Conditional("Debug")] public void GetDisplayFields() { int count = Fixture().GetDisplayFields().Count(); Assert.Equal(1, count); } [Fact] [Conditional("Debug")] public void GetCustomFields() { int count = Fixture().GetCustomFields().Count(); Assert.Equal(1, count); count = Fixture().GetCustomFields("").Count(); Assert.Equal(1, count); } [Fact] [Conditional("Debug")] public void AddOrEdit() { try { var form = new JArray { null, null }; Fixture().AddOrEdit(form); } catch (HttpResponseException ex) { Assert.Equal(HttpStatusCode.MethodNotAllowed, ex.Response.StatusCode); } } [Fact] [Conditional("Debug")] public void Add() { try { Fixture().Add(null); } catch (HttpResponseException ex) { Assert.Equal(HttpStatusCode.MethodNotAllowed, ex.Response.StatusCode); } } [Fact] [Conditional("Debug")] public void Edit() { try { Fixture().Edit(0, null); } catch (HttpResponseException ex) { Assert.Equal(HttpStatusCode.MethodNotAllowed, ex.Response.StatusCode); } } [Fact] [Conditional("Debug")] public void BulkImport() { var collection = new JArray { null, null, null, null }; var actual = Fixture().BulkImport(collection); Assert.NotNull(actual); } [Fact] [Conditional("Debug")] public void Delete() { try { Fixture().Delete(0); } catch (HttpResponseException ex) { Assert.Equal(HttpStatusCode.InternalServerError, ex.Response.StatusCode); } } } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: booking/pricing/reservation_price_override_notification.proto #pragma warning disable 1591, 0612, 3021 #region Designer generated code using pb = global::Google.Protobuf; using pbc = global::Google.Protobuf.Collections; using pbr = global::Google.Protobuf.Reflection; using scg = global::System.Collections.Generic; namespace HOLMS.Types.Booking.Pricing { /// <summary>Holder for reflection information generated from booking/pricing/reservation_price_override_notification.proto</summary> public static partial class ReservationPriceOverrideNotificationReflection { #region Descriptor /// <summary>File descriptor for booking/pricing/reservation_price_override_notification.proto</summary> public static pbr::FileDescriptor Descriptor { get { return descriptor; } } private static pbr::FileDescriptor descriptor; static ReservationPriceOverrideNotificationReflection() { byte[] descriptorData = global::System.Convert.FromBase64String( string.Concat( "Cj1ib29raW5nL3ByaWNpbmcvcmVzZXJ2YXRpb25fcHJpY2Vfb3ZlcnJpZGVf", "bm90aWZpY2F0aW9uLnByb3RvEhtob2xtcy50eXBlcy5ib29raW5nLnByaWNp", "bmcaLmJvb2tpbmcvcmVzZXJ2YXRpb25zL3Jlc2VydmF0aW9uX3N1bW1hcnku", "cHJvdG8aLmJvb2tpbmcvcHJpY2luZy9wcmV0YXhfcmVzZXJ2YXRpb25fcXVv", "dGUucHJvdG8i3AEKJFJlc2VydmF0aW9uUHJpY2VPdmVycmlkZU5vdGlmaWNh", "dGlvbhIRCglqX3dfdG9rZW4YASABKAkSUQoTcmVzZXJ2YXRpb25fc3VtbWFy", "eRgCIAEoCzI0LmhvbG1zLnR5cGVzLmJvb2tpbmcucmVzZXJ2YXRpb25zLlJl", "c2VydmF0aW9uU3VtbWFyeRJOChFyZXNlcnZhdGlvbl9xdW90ZRgDIAEoCzIz", "LmhvbG1zLnR5cGVzLmJvb2tpbmcucHJpY2luZy5QcmV0YXhSZXNlcnZhdGlv", "blF1b3RlQjRaFGJvb2tpbmcvcmVzZXJ2YXRpb25zqgIbSE9MTVMuVHlwZXMu", "Qm9va2luZy5QcmljaW5nYgZwcm90bzM=")); descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, new pbr::FileDescriptor[] { global::HOLMS.Types.Booking.Reservations.ReservationSummaryReflection.Descriptor, global::HOLMS.Types.Booking.Pricing.PretaxReservationQuoteReflection.Descriptor, }, new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::HOLMS.Types.Booking.Pricing.ReservationPriceOverrideNotification), global::HOLMS.Types.Booking.Pricing.ReservationPriceOverrideNotification.Parser, new[]{ "JWToken", "ReservationSummary", "ReservationQuote" }, null, null, null) })); } #endregion } #region Messages public sealed partial class ReservationPriceOverrideNotification : pb::IMessage<ReservationPriceOverrideNotification> { private static readonly pb::MessageParser<ReservationPriceOverrideNotification> _parser = new pb::MessageParser<ReservationPriceOverrideNotification>(() => new ReservationPriceOverrideNotification()); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pb::MessageParser<ReservationPriceOverrideNotification> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { get { return global::HOLMS.Types.Booking.Pricing.ReservationPriceOverrideNotificationReflection.Descriptor.MessageTypes[0]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public ReservationPriceOverrideNotification() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public ReservationPriceOverrideNotification(ReservationPriceOverrideNotification other) : this() { jWToken_ = other.jWToken_; ReservationSummary = other.reservationSummary_ != null ? other.ReservationSummary.Clone() : null; ReservationQuote = other.reservationQuote_ != null ? other.ReservationQuote.Clone() : null; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public ReservationPriceOverrideNotification Clone() { return new ReservationPriceOverrideNotification(this); } /// <summary>Field number for the "j_w_token" field.</summary> public const int JWTokenFieldNumber = 1; private string jWToken_ = ""; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public string JWToken { get { return jWToken_; } set { jWToken_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } /// <summary>Field number for the "reservation_summary" field.</summary> public const int ReservationSummaryFieldNumber = 2; private global::HOLMS.Types.Booking.Reservations.ReservationSummary reservationSummary_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public global::HOLMS.Types.Booking.Reservations.ReservationSummary ReservationSummary { get { return reservationSummary_; } set { reservationSummary_ = value; } } /// <summary>Field number for the "reservation_quote" field.</summary> public const int ReservationQuoteFieldNumber = 3; private global::HOLMS.Types.Booking.Pricing.PretaxReservationQuote reservationQuote_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public global::HOLMS.Types.Booking.Pricing.PretaxReservationQuote ReservationQuote { get { return reservationQuote_; } set { reservationQuote_ = value; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { return Equals(other as ReservationPriceOverrideNotification); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Equals(ReservationPriceOverrideNotification other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (JWToken != other.JWToken) return false; if (!object.Equals(ReservationSummary, other.ReservationSummary)) return false; if (!object.Equals(ReservationQuote, other.ReservationQuote)) return false; return true; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override int GetHashCode() { int hash = 1; if (JWToken.Length != 0) hash ^= JWToken.GetHashCode(); if (reservationSummary_ != null) hash ^= ReservationSummary.GetHashCode(); if (reservationQuote_ != null) hash ^= ReservationQuote.GetHashCode(); return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { if (JWToken.Length != 0) { output.WriteRawTag(10); output.WriteString(JWToken); } if (reservationSummary_ != null) { output.WriteRawTag(18); output.WriteMessage(ReservationSummary); } if (reservationQuote_ != null) { output.WriteRawTag(26); output.WriteMessage(ReservationQuote); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int CalculateSize() { int size = 0; if (JWToken.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(JWToken); } if (reservationSummary_ != null) { size += 1 + pb::CodedOutputStream.ComputeMessageSize(ReservationSummary); } if (reservationQuote_ != null) { size += 1 + pb::CodedOutputStream.ComputeMessageSize(ReservationQuote); } return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(ReservationPriceOverrideNotification other) { if (other == null) { return; } if (other.JWToken.Length != 0) { JWToken = other.JWToken; } if (other.reservationSummary_ != null) { if (reservationSummary_ == null) { reservationSummary_ = new global::HOLMS.Types.Booking.Reservations.ReservationSummary(); } ReservationSummary.MergeFrom(other.ReservationSummary); } if (other.reservationQuote_ != null) { if (reservationQuote_ == null) { reservationQuote_ = new global::HOLMS.Types.Booking.Pricing.PretaxReservationQuote(); } ReservationQuote.MergeFrom(other.ReservationQuote); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: input.SkipLastField(); break; case 10: { JWToken = input.ReadString(); break; } case 18: { if (reservationSummary_ == null) { reservationSummary_ = new global::HOLMS.Types.Booking.Reservations.ReservationSummary(); } input.ReadMessage(reservationSummary_); break; } case 26: { if (reservationQuote_ == null) { reservationQuote_ = new global::HOLMS.Types.Booking.Pricing.PretaxReservationQuote(); } input.ReadMessage(reservationQuote_); break; } } } } } #endregion } #endregion Designer generated code
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using Microsoft.Win32.SafeHandles; using System; using System.Collections.Generic; using System.ComponentModel; using System.Diagnostics; using System.Globalization; using System.Runtime.InteropServices; using System.Security; using System.Text; using System.Threading; namespace System.ServiceProcess { /// This class represents an NT service. It allows you to connect to a running or stopped service /// and manipulate it or get information about it. public class ServiceController : Component { private string _machineName; // Never null private readonly ManualResetEvent _waitForStatusSignal = new ManualResetEvent(false); private const string DefaultMachineName = "."; private string _name; private string _eitherName; private string _displayName; private int _commandsAccepted; private bool _statusGenerated; private bool _startTypeInitialized; private int _type; private bool _disposed; private SafeServiceHandle _serviceManagerHandle; private ServiceControllerStatus _status; private ServiceController[] _dependentServices; private ServiceController[] _servicesDependedOn; private ServiceStartMode _startType; public ServiceController() { _machineName = DefaultMachineName; _type = Interop.Advapi32.ServiceTypeOptions.SERVICE_TYPE_ALL; } /// Creates a ServiceController object, based on service name. public ServiceController(string name) : this(name, DefaultMachineName) { } /// Creates a ServiceController object, based on machine and service name. public ServiceController(string name, string machineName) { if (!CheckMachineName(machineName)) throw new ArgumentException(SR.Format(SR.BadMachineName, machineName)); if (string.IsNullOrEmpty(name)) throw new ArgumentException(SR.Format(SR.InvalidParameter, nameof(name), name)); _machineName = machineName; _eitherName = name; _type = Interop.Advapi32.ServiceTypeOptions.SERVICE_TYPE_ALL; } /// Used by the GetServices and GetDevices methods. Avoids duplicating work by the static /// methods and our own GenerateInfo(). private ServiceController(string machineName, Interop.Advapi32.ENUM_SERVICE_STATUS status) { if (!CheckMachineName(machineName)) throw new ArgumentException(SR.Format(SR.BadMachineName, machineName)); _machineName = machineName; _name = status.serviceName; _displayName = status.displayName; _commandsAccepted = status.controlsAccepted; _status = (ServiceControllerStatus)status.currentState; _type = status.serviceType; _statusGenerated = true; } /// Used by the GetServicesInGroup method. private ServiceController(string machineName, Interop.Advapi32.ENUM_SERVICE_STATUS_PROCESS status) { if (!CheckMachineName(machineName)) throw new ArgumentException(SR.Format(SR.BadMachineName, machineName)); _machineName = machineName; _name = status.serviceName; _displayName = status.displayName; _commandsAccepted = status.controlsAccepted; _status = (ServiceControllerStatus)status.currentState; _type = status.serviceType; _statusGenerated = true; } /// Tells if the service referenced by this object can be paused. public bool CanPauseAndContinue { get { GenerateStatus(); return (_commandsAccepted & Interop.Advapi32.AcceptOptions.ACCEPT_PAUSE_CONTINUE) != 0; } } /// Tells if the service is notified when system shutdown occurs. public bool CanShutdown { get { GenerateStatus(); return (_commandsAccepted & Interop.Advapi32.AcceptOptions.ACCEPT_SHUTDOWN) != 0; } } /// Tells if the service referenced by this object can be stopped. public bool CanStop { get { GenerateStatus(); return (_commandsAccepted & Interop.Advapi32.AcceptOptions.ACCEPT_STOP) != 0; } } /// The descriptive name shown for this service in the Service applet. public string DisplayName { get { if (String.IsNullOrEmpty(_displayName)) GenerateNames(); return _displayName; } set { if (value == null) throw new ArgumentNullException(nameof(value)); if (string.Equals(value, _displayName, StringComparison.OrdinalIgnoreCase)) { // they're just changing the casing. No need to close. _displayName = value; return; } Close(); _displayName = value; _name = ""; } } /// The set of services that depend on this service. These are the services that will be stopped if /// this service is stopped. public ServiceController[] DependentServices { get { if (_dependentServices == null) { using (var serviceHandle = GetServiceHandle(Interop.Advapi32.ServiceOptions.SERVICE_ENUMERATE_DEPENDENTS)) { // figure out how big a buffer we need to get the info int bytesNeeded = 0; int numEnumerated = 0; bool result = Interop.Advapi32.EnumDependentServices(serviceHandle, Interop.Advapi32.ServiceState.SERVICE_STATE_ALL, IntPtr.Zero, 0, ref bytesNeeded, ref numEnumerated); if (result) { _dependentServices = Array.Empty<ServiceController>(); return _dependentServices; } int lastError = Marshal.GetLastWin32Error(); if (lastError != Interop.Errors.ERROR_MORE_DATA) throw new Win32Exception(lastError); // allocate the buffer IntPtr enumBuffer = Marshal.AllocHGlobal((IntPtr)bytesNeeded); try { // get all the info result = Interop.Advapi32.EnumDependentServices(serviceHandle, Interop.Advapi32.ServiceState.SERVICE_STATE_ALL, enumBuffer, bytesNeeded, ref bytesNeeded, ref numEnumerated); if (!result) throw new Win32Exception(); // for each of the entries in the buffer, create a new ServiceController object. _dependentServices = new ServiceController[numEnumerated]; for (int i = 0; i < numEnumerated; i++) { Interop.Advapi32.ENUM_SERVICE_STATUS status = new Interop.Advapi32.ENUM_SERVICE_STATUS(); IntPtr structPtr = (IntPtr)((long)enumBuffer + (i * Marshal.SizeOf<Interop.Advapi32.ENUM_SERVICE_STATUS>())); Marshal.PtrToStructure(structPtr, status); _dependentServices[i] = new ServiceController(_machineName, status); } } finally { Marshal.FreeHGlobal(enumBuffer); } } } return _dependentServices; } } /// The name of the machine on which this service resides. public string MachineName { get { return _machineName; } set { if (!CheckMachineName(value)) throw new ArgumentException(SR.Format(SR.BadMachineName, value)); if (string.Equals(_machineName, value, StringComparison.OrdinalIgnoreCase)) { // no need to close, because the most they're changing is the // casing. _machineName = value; return; } Close(); _machineName = value; } } /// Returns the short name of the service referenced by this object. public string ServiceName { get { if (String.IsNullOrEmpty(_name)) GenerateNames(); return _name; } set { if (value == null) throw new ArgumentNullException(nameof(value)); if (string.Equals(value, _name, StringComparison.OrdinalIgnoreCase)) { // they might be changing the casing, but the service we refer to // is the same. No need to close. _name = value; return; } if (!ServiceBase.ValidServiceName(value)) throw new ArgumentException(SR.Format(SR.ServiceName, value, ServiceBase.MaxNameLength.ToString())); Close(); _name = value; _displayName = ""; } } public unsafe ServiceController[] ServicesDependedOn { get { if (_servicesDependedOn != null) return _servicesDependedOn; using (var serviceHandle = GetServiceHandle(Interop.Advapi32.ServiceOptions.SERVICE_QUERY_CONFIG)) { int bytesNeeded = 0; bool success = Interop.Advapi32.QueryServiceConfig(serviceHandle, IntPtr.Zero, 0, out bytesNeeded); if (success) { _servicesDependedOn = Array.Empty<ServiceController>(); return _servicesDependedOn; } int lastError = Marshal.GetLastWin32Error(); if (lastError != Interop.Errors.ERROR_INSUFFICIENT_BUFFER) throw new Win32Exception(lastError); // get the info IntPtr bufPtr = Marshal.AllocHGlobal((IntPtr)bytesNeeded); try { success = Interop.Advapi32.QueryServiceConfig(serviceHandle, bufPtr, bytesNeeded, out bytesNeeded); if (!success) throw new Win32Exception(Marshal.GetLastWin32Error()); Interop.Advapi32.QUERY_SERVICE_CONFIG config = new Interop.Advapi32.QUERY_SERVICE_CONFIG(); Marshal.PtrToStructure(bufPtr, config); Dictionary<string, ServiceController> dependencyHash = null; char* dependencyChar = config.lpDependencies; if (dependencyChar != null) { // lpDependencies points to the start of multiple null-terminated strings. The list is // double-null terminated. int length = 0; dependencyHash = new Dictionary<string, ServiceController>(); while (*(dependencyChar + length) != '\0') { length++; if (*(dependencyChar + length) == '\0') { string dependencyNameStr = new string(dependencyChar, 0, length); dependencyChar = dependencyChar + length + 1; length = 0; if (dependencyNameStr.StartsWith("+", StringComparison.Ordinal)) { // this entry is actually a service load group Interop.Advapi32.ENUM_SERVICE_STATUS_PROCESS[] loadGroup = GetServicesInGroup(_machineName, dependencyNameStr.Substring(1)); foreach (Interop.Advapi32.ENUM_SERVICE_STATUS_PROCESS groupMember in loadGroup) { if (!dependencyHash.ContainsKey(groupMember.serviceName)) dependencyHash.Add(groupMember.serviceName, new ServiceController(MachineName, groupMember)); } } else { if (!dependencyHash.ContainsKey(dependencyNameStr)) dependencyHash.Add(dependencyNameStr, new ServiceController(dependencyNameStr, MachineName)); } } } } if (dependencyHash != null) { _servicesDependedOn = new ServiceController[dependencyHash.Count]; dependencyHash.Values.CopyTo(_servicesDependedOn, 0); } else { _servicesDependedOn = Array.Empty<ServiceController>(); } return _servicesDependedOn; } finally { Marshal.FreeHGlobal(bufPtr); } } } } public ServiceStartMode StartType { get { if (_startTypeInitialized) return _startType; using (var serviceHandle = GetServiceHandle(Interop.Advapi32.ServiceOptions.SERVICE_QUERY_CONFIG)) { int bytesNeeded = 0; bool success = Interop.Advapi32.QueryServiceConfig(serviceHandle, IntPtr.Zero, 0, out bytesNeeded); int lastError = Marshal.GetLastWin32Error(); if (lastError != Interop.Errors.ERROR_INSUFFICIENT_BUFFER) throw new Win32Exception(lastError); // get the info IntPtr bufPtr = Marshal.AllocHGlobal((IntPtr)bytesNeeded); try { success = Interop.Advapi32.QueryServiceConfig(serviceHandle, bufPtr, bytesNeeded, out bytesNeeded); if (!success) throw new Win32Exception(Marshal.GetLastWin32Error()); Interop.Advapi32.QUERY_SERVICE_CONFIG config = new Interop.Advapi32.QUERY_SERVICE_CONFIG(); Marshal.PtrToStructure(bufPtr, config); _startType = (ServiceStartMode)config.dwStartType; _startTypeInitialized = true; } finally { Marshal.FreeHGlobal(bufPtr); } } return _startType; } } public SafeHandle ServiceHandle { get { return GetServiceHandle(Interop.Advapi32.ServiceOptions.SERVICE_ALL_ACCESS); } } /// Gets the status of the service referenced by this object, e.g., Running, Stopped, etc. public ServiceControllerStatus Status { get { GenerateStatus(); return _status; } } /// Gets the type of service that this object references. public ServiceType ServiceType { get { GenerateStatus(); return (ServiceType)_type; } } private static bool CheckMachineName(string value) { // string.Contains(char) is .NetCore2.1+ specific return !string.IsNullOrWhiteSpace(value) && value.IndexOf('\\') == -1; } /// <summary> /// Closes the handle to the service manager, but does not /// mark the class as disposed. /// </summary> /// <remarks> /// Violates design guidelines by not matching Dispose() -- matches .NET Framework /// </remarks> public void Close() { if (_serviceManagerHandle != null) { _serviceManagerHandle.Dispose(); _serviceManagerHandle = null; } _statusGenerated = false; _startTypeInitialized = false; _type = Interop.Advapi32.ServiceTypeOptions.SERVICE_TYPE_ALL; } /// <summary> /// Closes the handle to the service manager, and disposes. /// </summary> protected override void Dispose(bool disposing) { Close(); _disposed = true; base.Dispose(disposing); } private unsafe void GenerateStatus() { if (!_statusGenerated) { using (var serviceHandle = GetServiceHandle(Interop.Advapi32.ServiceOptions.SERVICE_QUERY_STATUS)) { Interop.Advapi32.SERVICE_STATUS svcStatus = new Interop.Advapi32.SERVICE_STATUS(); bool success = Interop.Advapi32.QueryServiceStatus(serviceHandle, &svcStatus); if (!success) throw new Win32Exception(Marshal.GetLastWin32Error()); _commandsAccepted = svcStatus.controlsAccepted; _status = (ServiceControllerStatus)svcStatus.currentState; _type = svcStatus.serviceType; _statusGenerated = true; } } } private unsafe void GenerateNames() { GetDataBaseHandleWithConnectAccess(); if (String.IsNullOrEmpty(_name)) { // Figure out the _name based on the information we have. // We must either have _displayName or the constructor parameter _eitherName. string userGivenName = String.IsNullOrEmpty(_eitherName) ? _displayName : _eitherName; if (String.IsNullOrEmpty(userGivenName)) throw new InvalidOperationException(SR.Format(SR.ServiceName, userGivenName, ServiceBase.MaxNameLength.ToString())); // Try it as a display name string result = GetServiceKeyName(_serviceManagerHandle, userGivenName); if (result != null) { // Now we have both _name = result; _displayName = userGivenName; _eitherName = null; return; } // Try it as a service name result = GetServiceDisplayName(_serviceManagerHandle, userGivenName); if (result == null) { throw new InvalidOperationException(SR.Format(SR.NoService, userGivenName, _machineName), new Win32Exception(Interop.Errors.ERROR_SERVICE_DOES_NOT_EXIST)); } _name = userGivenName; _displayName = result; _eitherName = null; } else if (String.IsNullOrEmpty(_displayName)) { // We must have _name string result = GetServiceDisplayName(_serviceManagerHandle, _name); if (result == null) { throw new InvalidOperationException(SR.Format(SR.NoService, _name, _machineName), new Win32Exception(Interop.Errors.ERROR_SERVICE_DOES_NOT_EXIST)); } _displayName = result; _eitherName = null; } } /// <summary> /// Gets service name (key name) from service display name. /// Returns null if service is not found. /// </summary> private unsafe string GetServiceKeyName(SafeServiceHandle SCMHandle, string serviceDisplayName) { Span<char> initialBuffer = stackalloc char[256]; var builder = new ValueStringBuilder(initialBuffer); int bufLen; while (true) { bufLen = builder.Capacity; fixed (char* c = builder) { if (Interop.Advapi32.GetServiceKeyName(SCMHandle, serviceDisplayName, c, ref bufLen)) break; } int lastError = Marshal.GetLastWin32Error(); if (lastError == Interop.Errors.ERROR_SERVICE_DOES_NOT_EXIST) { return null; } else if (lastError != Interop.Errors.ERROR_INSUFFICIENT_BUFFER) { throw new InvalidOperationException(SR.Format(SR.NoService, serviceDisplayName, _machineName), new Win32Exception(lastError)); } builder.EnsureCapacity(bufLen + 1); // Does not include null } builder.Length = bufLen; return builder.ToString(); } private unsafe string GetServiceDisplayName(SafeServiceHandle SCMHandle, string serviceName) { var builder = new ValueStringBuilder(4096); int bufLen; while (true) { bufLen = builder.Capacity; fixed (char* c = builder) { if (Interop.Advapi32.GetServiceDisplayName(SCMHandle, serviceName, c, ref bufLen)) break; } int lastError = Marshal.GetLastWin32Error(); if (lastError == Interop.Errors.ERROR_SERVICE_DOES_NOT_EXIST) { return null; } else if (lastError != Interop.Errors.ERROR_INSUFFICIENT_BUFFER) { throw new InvalidOperationException(SR.Format(SR.NoService, serviceName, _machineName), new Win32Exception(lastError)); } builder.EnsureCapacity(bufLen + 1); // Does not include null } builder.Length = bufLen; return builder.ToString(); } private static SafeServiceHandle GetDataBaseHandleWithAccess(string machineName, int serviceControlManagerAccess) { SafeServiceHandle databaseHandle = null; if (machineName.Equals(DefaultMachineName) || machineName.Length == 0) { databaseHandle = new SafeServiceHandle(Interop.Advapi32.OpenSCManager(null, null, serviceControlManagerAccess)); } else { databaseHandle = new SafeServiceHandle(Interop.Advapi32.OpenSCManager(machineName, null, serviceControlManagerAccess)); } if (databaseHandle.IsInvalid) { Exception inner = new Win32Exception(Marshal.GetLastWin32Error()); throw new InvalidOperationException(SR.Format(SR.OpenSC, machineName), inner); } return databaseHandle; } private void GetDataBaseHandleWithConnectAccess() { if (_disposed) { throw new ObjectDisposedException(GetType().Name); } // get a handle to SCM with connect access and store it in serviceManagerHandle field. if (_serviceManagerHandle == null) { _serviceManagerHandle = GetDataBaseHandleWithAccess(_machineName, Interop.Advapi32.ServiceControllerOptions.SC_MANAGER_CONNECT); } } /// Gets all the device-driver services on the local machine. public static ServiceController[] GetDevices() { return GetDevices(DefaultMachineName); } /// Gets all the device-driver services in the machine specified. public static ServiceController[] GetDevices(string machineName) { return GetServicesOfType(machineName, Interop.Advapi32.ServiceTypeOptions.SERVICE_TYPE_DRIVER); } /// Opens a handle for the current service. The handle must be Dispose()'d. private SafeServiceHandle GetServiceHandle(int desiredAccess) { GetDataBaseHandleWithConnectAccess(); var serviceHandle = new SafeServiceHandle(Interop.Advapi32.OpenService(_serviceManagerHandle, ServiceName, desiredAccess)); if (serviceHandle.IsInvalid) { Exception inner = new Win32Exception(Marshal.GetLastWin32Error()); throw new InvalidOperationException(SR.Format(SR.OpenService, ServiceName, _machineName), inner); } return serviceHandle; } /// Gets the services (not including device-driver services) on the local machine. public static ServiceController[] GetServices() { return GetServices(DefaultMachineName); } /// Gets the services (not including device-driver services) on the machine specified. public static ServiceController[] GetServices(string machineName) { return GetServicesOfType(machineName, Interop.Advapi32.ServiceTypeOptions.SERVICE_TYPE_WIN32); } /// Helper function for ServicesDependedOn. private static Interop.Advapi32.ENUM_SERVICE_STATUS_PROCESS[] GetServicesInGroup(string machineName, string group) { return GetServices<Interop.Advapi32.ENUM_SERVICE_STATUS_PROCESS>(machineName, Interop.Advapi32.ServiceTypeOptions.SERVICE_TYPE_WIN32, group, status => { return status; }); } /// Helper function for GetDevices and GetServices. private static ServiceController[] GetServicesOfType(string machineName, int serviceType) { if (!CheckMachineName(machineName)) throw new ArgumentException(SR.Format(SR.BadMachineName, machineName)); return GetServices<ServiceController>(machineName, serviceType, null, status => { return new ServiceController(machineName, status); }); } /// Helper for GetDevices, GetServices, and ServicesDependedOn private static T[] GetServices<T>(string machineName, int serviceType, string group, Func<Interop.Advapi32.ENUM_SERVICE_STATUS_PROCESS, T> selector) { int bytesNeeded; int servicesReturned; int resumeHandle = 0; T[] services; using (SafeServiceHandle databaseHandle = GetDataBaseHandleWithAccess(machineName, Interop.Advapi32.ServiceControllerOptions.SC_MANAGER_ENUMERATE_SERVICE)) { Interop.Advapi32.EnumServicesStatusEx( databaseHandle, Interop.Advapi32.ServiceControllerOptions.SC_ENUM_PROCESS_INFO, serviceType, Interop.Advapi32.StatusOptions.STATUS_ALL, IntPtr.Zero, 0, out bytesNeeded, out servicesReturned, ref resumeHandle, group); IntPtr memory = Marshal.AllocHGlobal((IntPtr)bytesNeeded); try { // // Get the set of services // Interop.Advapi32.EnumServicesStatusEx( databaseHandle, Interop.Advapi32.ServiceControllerOptions.SC_ENUM_PROCESS_INFO, serviceType, Interop.Advapi32.StatusOptions.STATUS_ALL, memory, bytesNeeded, out bytesNeeded, out servicesReturned, ref resumeHandle, group); // // Go through the block of memory it returned to us and select the results // services = new T[servicesReturned]; for (int i = 0; i < servicesReturned; i++) { IntPtr structPtr = (IntPtr)((long)memory + (i * Marshal.SizeOf<Interop.Advapi32.ENUM_SERVICE_STATUS_PROCESS>())); Interop.Advapi32.ENUM_SERVICE_STATUS_PROCESS status = new Interop.Advapi32.ENUM_SERVICE_STATUS_PROCESS(); Marshal.PtrToStructure(structPtr, status); services[i] = selector(status); } } finally { Marshal.FreeHGlobal(memory); } } return services; } /// Suspends a service's operation. public unsafe void Pause() { using (var serviceHandle = GetServiceHandle(Interop.Advapi32.ServiceOptions.SERVICE_PAUSE_CONTINUE)) { Interop.Advapi32.SERVICE_STATUS status = new Interop.Advapi32.SERVICE_STATUS(); bool result = Interop.Advapi32.ControlService(serviceHandle, Interop.Advapi32.ControlOptions.CONTROL_PAUSE, &status); if (!result) { Exception inner = new Win32Exception(Marshal.GetLastWin32Error()); throw new InvalidOperationException(SR.Format(SR.PauseService, ServiceName, _machineName), inner); } } } /// Continues a service after it has been paused. public unsafe void Continue() { using (var serviceHandle = GetServiceHandle(Interop.Advapi32.ServiceOptions.SERVICE_PAUSE_CONTINUE)) { Interop.Advapi32.SERVICE_STATUS status = new Interop.Advapi32.SERVICE_STATUS(); bool result = Interop.Advapi32.ControlService(serviceHandle, Interop.Advapi32.ControlOptions.CONTROL_CONTINUE, &status); if (!result) { Exception inner = new Win32Exception(Marshal.GetLastWin32Error()); throw new InvalidOperationException(SR.Format(SR.ResumeService, ServiceName, _machineName), inner); } } } public unsafe void ExecuteCommand(int command) { using (var serviceHandle = GetServiceHandle(Interop.Advapi32.ServiceOptions.SERVICE_USER_DEFINED_CONTROL)) { Interop.Advapi32.SERVICE_STATUS status = new Interop.Advapi32.SERVICE_STATUS(); bool result = Interop.Advapi32.ControlService(serviceHandle, command, &status); if (!result) { Exception inner = new Win32Exception(Marshal.GetLastWin32Error()); throw new InvalidOperationException(SR.Format(SR.ControlService, ServiceName, MachineName), inner); } } } /// Refreshes all property values. public void Refresh() { _statusGenerated = false; _startTypeInitialized = false; _dependentServices = null; _servicesDependedOn = null; } /// Starts the service. public void Start() { Start(Array.Empty<string>()); } /// Starts a service in the machine specified. public void Start(string[] args) { if (args == null) throw new ArgumentNullException(nameof(args)); using (SafeServiceHandle serviceHandle = GetServiceHandle(Interop.Advapi32.ServiceOptions.SERVICE_START)) { IntPtr[] argPtrs = new IntPtr[args.Length]; int i = 0; try { for (i = 0; i < args.Length; i++) { if (args[i] == null) throw new ArgumentNullException($"{nameof(args)}[{i}]", SR.ArgsCantBeNull); argPtrs[i] = Marshal.StringToHGlobalUni(args[i]); } } catch { for (int j = 0; j < i; j++) Marshal.FreeHGlobal(argPtrs[i]); throw; } GCHandle argPtrsHandle = new GCHandle(); try { argPtrsHandle = GCHandle.Alloc(argPtrs, GCHandleType.Pinned); bool result = Interop.Advapi32.StartService(serviceHandle, args.Length, (IntPtr)argPtrsHandle.AddrOfPinnedObject()); if (!result) { Exception inner = new Win32Exception(Marshal.GetLastWin32Error()); throw new InvalidOperationException(SR.Format(SR.CannotStart, ServiceName, _machineName), inner); } } finally { for (i = 0; i < args.Length; i++) Marshal.FreeHGlobal(argPtrs[i]); if (argPtrsHandle.IsAllocated) argPtrsHandle.Free(); } } } /// Stops the service. If any other services depend on this one for operation, /// they will be stopped first. The DependentServices property lists this set /// of services. public unsafe void Stop() { using (SafeServiceHandle serviceHandle = GetServiceHandle(Interop.Advapi32.ServiceOptions.SERVICE_STOP)) { // Before stopping this service, stop all the dependent services that are running. // (It's OK not to cache the result of getting the DependentServices property because it caches on its own.) for (int i = 0; i < DependentServices.Length; i++) { ServiceController currentDependent = DependentServices[i]; currentDependent.Refresh(); if (currentDependent.Status != ServiceControllerStatus.Stopped) { currentDependent.Stop(); currentDependent.WaitForStatus(ServiceControllerStatus.Stopped, new TimeSpan(0, 0, 30)); } } Interop.Advapi32.SERVICE_STATUS status = new Interop.Advapi32.SERVICE_STATUS(); bool result = Interop.Advapi32.ControlService(serviceHandle, Interop.Advapi32.ControlOptions.CONTROL_STOP, &status); if (!result) { Exception inner = new Win32Exception(Marshal.GetLastWin32Error()); throw new InvalidOperationException(SR.Format(SR.StopService, ServiceName, _machineName), inner); } } } /// Waits infinitely until the service has reached the given status. public void WaitForStatus(ServiceControllerStatus desiredStatus) { WaitForStatus(desiredStatus, TimeSpan.MaxValue); } /// Waits until the service has reached the given status or until the specified time /// has expired public void WaitForStatus(ServiceControllerStatus desiredStatus, TimeSpan timeout) { if (!Enum.IsDefined(typeof(ServiceControllerStatus), desiredStatus)) throw new ArgumentException(SR.Format(SR.InvalidEnumArgument, nameof(desiredStatus), (int)desiredStatus, typeof(ServiceControllerStatus))); DateTime start = DateTime.UtcNow; Refresh(); while (Status != desiredStatus) { if (DateTime.UtcNow - start > timeout) throw new System.ServiceProcess.TimeoutException(SR.Timeout); _waitForStatusSignal.WaitOne(250); Refresh(); } } } }
// ReSharper disable RedundantArgumentDefaultValue namespace Gu.State.Tests { using System; using System.Collections.Generic; using System.Collections.ObjectModel; using NUnit.Framework; using static ChangeTrackerTypes; public static partial class ChangeTrackerTests { public static class Throws { [TestCase(ReferenceHandling.Structural)] [TestCase(ReferenceHandling.Throw)] public static void WithNotifyingStruct(ReferenceHandling referenceHandling) { var expected = "Track.Changes(x, y) failed.\r\n" + "The type NotifyingStruct is a mutable struct that implements INotifyPropertyChanged.\r\n" + " As it is a value type subscribing to changes does not make sense.\r\n" + "The property With<NotifyingStruct>.Value of type NotifyingStruct is not supported.\r\n" + "Below are a couple of suggestions that may solve the problem:\r\n" + "* Make NotifyingStruct immutable or use an immutable type.\r\n" + " - For immutable types the following must hold:\r\n" + " - Must be a sealed class or a struct.\r\n" + " - All fields and properties must be readonly.\r\n" + " - All field and property types must be immutable.\r\n" + " - All indexers must be readonly.\r\n" + " - Event fields are ignored.\r\n" + "* Use PropertiesSettings and specify how change tracking is performed:\r\n" + " - ReferenceHandling.Structural means that a the entire graph is tracked.\r\n" + " - ReferenceHandling.References means that only the root level changes are tracked.\r\n" + " - Exclude a combination of the following:\r\n" + " - The property With<NotifyingStruct>.Value.\r\n" + " - The type NotifyingStruct.\r\n"; var exception = Assert.Throws<NotSupportedException>(() => Track.Changes(new With<NotifyingStruct>(), referenceHandling)); Assert.AreEqual(expected, exception.Message); exception = Assert.Throws<NotSupportedException>(() => Track.Changes(new With<NotifyingStruct>())); Assert.AreEqual(expected, exception.Message); } [Test] public static void WithComplexTypeThrows() { var expected = "Track.Changes(x, y) failed.\r\n" + "The property With<ComplexType>.Value of type ComplexType is not supported.\r\n" + "Below are a couple of suggestions that may solve the problem:\r\n" + "* Make ComplexType immutable or use an immutable type.\r\n" + " - For immutable types the following must hold:\r\n" + " - Must be a sealed class or a struct.\r\n" + " - All fields and properties must be readonly.\r\n" + " - All field and property types must be immutable.\r\n" + " - All indexers must be readonly.\r\n" + " - Event fields are ignored.\r\n" + "* Use PropertiesSettings and specify how change tracking is performed:\r\n" + " - ReferenceHandling.Structural means that a the entire graph is tracked.\r\n" + " - ReferenceHandling.References means that only the root level changes are tracked.\r\n" + " - Exclude a combination of the following:\r\n" + " - The property With<ComplexType>.Value.\r\n" + " - The type ComplexType.\r\n"; var exception = Assert.Throws<NotSupportedException>(() => Track.Changes(new With<ComplexType>(), ReferenceHandling.Throw)); Assert.AreEqual(expected, exception.Message); } [Test] public static void WithComplexTypeHappyPath() { Assert.DoesNotThrow(() => Track.Changes(new With<ComplexType>())); Assert.DoesNotThrow(() => Track.Changes(new With<ComplexType>(), ReferenceHandling.Structural)); Assert.DoesNotThrow(() => Track.Changes(new With<ComplexType>(), ReferenceHandling.References)); } [Test] public static void AddIllegalThrows() { var expected = "Track.Changes(x, y) failed.\r\n" + "The type IllegalType does not notify changes.\r\n" + "The property IllegalSubType.Illegal of type IllegalType is not supported.\r\n" + "Below are a couple of suggestions that may solve the problem:\r\n" + "* Implement INotifyPropertyChanged for IllegalType or use a type that does.\r\n" + "* Make IllegalSubType immutable or use an immutable type.\r\n" + "* Make IllegalType immutable or use an immutable type.\r\n" + " - For immutable types the following must hold:\r\n" + " - Must be a sealed class or a struct.\r\n" + " - All fields and properties must be readonly.\r\n" + " - All field and property types must be immutable.\r\n" + " - All indexers must be readonly.\r\n" + " - Event fields are ignored.\r\n" + "* Use PropertiesSettings and specify how change tracking is performed:\r\n" + " - ReferenceHandling.Structural means that a the entire graph is tracked.\r\n" + " - ReferenceHandling.References means that only the root level changes are tracked.\r\n" + " - Exclude a combination of the following:\r\n" + " - The property IllegalSubType.Illegal.\r\n" + " - The type IllegalSubType.\r\n" + " - The type IllegalType.\r\n"; var root = new ObservableCollection<ComplexType>(); using (Track.Changes(root)) { var exception = Assert.Throws<NotSupportedException>(() => root.Add(new IllegalSubType())); Assert.AreEqual(expected, exception.Message); } } [Test] public static void SetIllegalThrows() { var expected = "Track.Changes(x, y) failed.\r\n" + "The type IllegalType does not notify changes.\r\n" + "The property IllegalSubType.Illegal of type IllegalType is not supported.\r\n" + "Below are a couple of suggestions that may solve the problem:\r\n" + "* Implement INotifyPropertyChanged for IllegalType or use a type that does.\r\n" + "* Make IllegalSubType immutable or use an immutable type.\r\n" + "* Make IllegalType immutable or use an immutable type.\r\n" + " - For immutable types the following must hold:\r\n" + " - Must be a sealed class or a struct.\r\n" + " - All fields and properties must be readonly.\r\n" + " - All field and property types must be immutable.\r\n" + " - All indexers must be readonly.\r\n" + " - Event fields are ignored.\r\n" + "* Use PropertiesSettings and specify how change tracking is performed:\r\n" + " - ReferenceHandling.Structural means that a the entire graph is tracked.\r\n" + " - ReferenceHandling.References means that only the root level changes are tracked.\r\n" + " - Exclude a combination of the following:\r\n" + " - The property IllegalSubType.Illegal.\r\n" + " - The type IllegalSubType.\r\n" + " - The type IllegalType.\r\n"; var root = new With<ComplexType>(); using (Track.Changes(root)) { var exception = Assert.Throws<NotSupportedException>(() => root.Value = new IllegalSubType()); Assert.AreEqual(expected, exception.Message); } } [Test] public static void WithIllegal() { var expected = "Track.Changes(x, y) failed.\r\n" + "The type IllegalType does not notify changes.\r\n" + "The property WithIllegal.Illegal of type IllegalType is not supported.\r\n" + "Below are a couple of suggestions that may solve the problem:\r\n" + "* Implement INotifyPropertyChanged for IllegalType or use a type that does.\r\n" + "* Make IllegalType immutable or use an immutable type.\r\n" + " - For immutable types the following must hold:\r\n" + " - Must be a sealed class or a struct.\r\n" + " - All fields and properties must be readonly.\r\n" + " - All field and property types must be immutable.\r\n" + " - All indexers must be readonly.\r\n" + " - Event fields are ignored.\r\n" + "* Use PropertiesSettings and specify how change tracking is performed:\r\n" + " - ReferenceHandling.Structural means that a the entire graph is tracked.\r\n" + " - ReferenceHandling.References means that only the root level changes are tracked.\r\n" + " - Exclude a combination of the following:\r\n" + " - The property WithIllegal.Illegal.\r\n" + " - The type IllegalType.\r\n"; var item = new WithIllegal(); var settings = PropertiesSettings.GetOrCreate(); var exception = Assert.Throws<NotSupportedException>(() => Track.Changes(item, settings)); Assert.AreEqual(expected, exception.Message); exception = Assert.Throws<NotSupportedException>(() => Track.Changes(item)); Assert.AreEqual(expected, exception.Message); } [Test] public static void IllegalEnumerable() { var expected = "Track.Changes(x, y) failed.\r\n" + "The collection type IllegalEnumerable does not notify changes.\r\n" + "Below are a couple of suggestions that may solve the problem:\r\n" + "* Implement INotifyCollectionChanged for IllegalEnumerable or use a type that does.\r\n" + "* Use PropertiesSettings and specify how change tracking is performed:\r\n" + " - ReferenceHandling.Structural means that a the entire graph is tracked.\r\n" + " - ReferenceHandling.References means that only the root level changes are tracked.\r\n"; var item = new IllegalEnumerable(); var exception = Assert.Throws<NotSupportedException>(() => Track.Changes(item)); Assert.AreEqual(expected, exception.Message); exception = Assert.Throws<NotSupportedException>(() => Track.Changes(item, PropertiesSettings.GetOrCreate())); Assert.AreEqual(expected, exception.Message); } [Test] public static void WithListOfInts() { var expected = "Track.Changes(x, y) failed.\r\n" + "The collection type List<int> does not notify changes.\r\n" + "The property With<List<int>>.Value of type List<int> is not supported.\r\n" + "Below are a couple of suggestions that may solve the problem:\r\n" + "* Use a type that implements INotifyCollectionChanged instead of List<int>.\r\n" + "* Use an immutable type instead of List<int>.\r\n" + " - For immutable types the following must hold:\r\n" + " - Must be a sealed class or a struct.\r\n" + " - All fields and properties must be readonly.\r\n" + " - All field and property types must be immutable.\r\n" + " - All indexers must be readonly.\r\n" + " - Event fields are ignored.\r\n" + "* Use PropertiesSettings and specify how change tracking is performed:\r\n" + " - ReferenceHandling.Structural means that a the entire graph is tracked.\r\n" + " - ReferenceHandling.References means that only the root level changes are tracked.\r\n" + " - Exclude a combination of the following:\r\n" + " - The property With<List<int>>.Value.\r\n" + " - The type List<int>.\r\n"; var item = new With<List<int>>(); var exception = Assert.Throws<NotSupportedException>(() => Track.Changes(item)); Assert.AreEqual(expected, exception.Message); exception = Assert.Throws<NotSupportedException>(() => Track.Changes(item, PropertiesSettings.GetOrCreate())); Assert.AreEqual(expected, exception.Message); } } } }
using System; using NUnit.Framework; using Raksha.Crypto; using Raksha.Crypto.Parameters; using Raksha.Security; using Raksha.Utilities.Encoders; using Raksha.Tests.Utilities; namespace Raksha.Tests.Misc { /** * CMAC tester - <a href="http://www.nuee.nagoya-u.ac.jp/labs/tiwata/omac/tv/omac1-tv.txt">AES Official Test Vectors</a>. */ [TestFixture] public class CMacTest : SimpleTest { private static readonly byte[] keyBytes128 = Hex.Decode("2b7e151628aed2a6abf7158809cf4f3c"); private static readonly byte[] keyBytes192 = Hex.Decode( "8e73b0f7da0e6452c810f32b809079e5" + "62f8ead2522c6b7b"); private static readonly byte[] keyBytes256 = Hex.Decode( "603deb1015ca71be2b73aef0857d7781" + "1f352c073b6108d72d9810a30914dff4"); private static readonly byte[] input0 = Hex.Decode(""); private static readonly byte[] input16 = Hex.Decode("6bc1bee22e409f96e93d7e117393172a"); private static readonly byte[] input40 = Hex.Decode( "6bc1bee22e409f96e93d7e117393172a" + "ae2d8a571e03ac9c9eb76fac45af8e5130c81c46a35ce411"); private static readonly byte[] input64 = Hex.Decode( "6bc1bee22e409f96e93d7e117393172a" + "ae2d8a571e03ac9c9eb76fac45af8e51" + "30c81c46a35ce411e5fbc1191a0a52ef" + "f69f2445df4f9b17ad2b417be66c3710"); private static readonly byte[] output_k128_m0 = Hex.Decode("bb1d6929e95937287fa37d129b756746"); private static readonly byte[] output_k128_m16 = Hex.Decode("070a16b46b4d4144f79bdd9dd04a287c"); private static readonly byte[] output_k128_m40 = Hex.Decode("dfa66747de9ae63030ca32611497c827"); private static readonly byte[] output_k128_m64 = Hex.Decode("51f0bebf7e3b9d92fc49741779363cfe"); private static readonly byte[] output_k192_m0 = Hex.Decode("d17ddf46adaacde531cac483de7a9367"); private static readonly byte[] output_k192_m16 = Hex.Decode("9e99a7bf31e710900662f65e617c5184"); private static readonly byte[] output_k192_m40 = Hex.Decode("8a1de5be2eb31aad089a82e6ee908b0e"); private static readonly byte[] output_k192_m64 = Hex.Decode("a1d5df0eed790f794d77589659f39a11"); private static readonly byte[] output_k256_m0 = Hex.Decode("028962f61b7bf89efc6b551f4667d983"); private static readonly byte[] output_k256_m16 = Hex.Decode("28a7023f452e8f82bd4bf28d8c37c35c"); private static readonly byte[] output_k256_m40 = Hex.Decode("aaf3d8f1de5640c232f5b169b9c911e6"); private static readonly byte[] output_k256_m64 = Hex.Decode("e1992190549f6ed5696a2c056c315410"); private static readonly byte[] output_des_ede = Hex.Decode("1ca670dea381d37c"); public CMacTest() { } public override void PerformTest() { // Mac mac = Mac.getInstance("AESCMAC", "BC"); IMac mac = MacUtilities.GetMac("AESCMAC"); //128 bytes key // SecretKeySpec key = new SecretKeySpec(keyBytes128, "AES"); KeyParameter key = new KeyParameter(keyBytes128); // 0 bytes message - 128 bytes key mac.Init(key); mac.BlockUpdate(input0, 0, input0.Length); byte[] output = MacUtilities.DoFinal(mac); if (!AreEqual(output, output_k128_m0)) { Fail("Failed - expected " + Hex.ToHexString(output_k128_m0) + " got " + Hex.ToHexString(output)); } // 16 bytes message - 128 bytes key mac.Init(key); mac.BlockUpdate(input16, 0, input16.Length); output = MacUtilities.DoFinal(mac); if (!AreEqual(output, output_k128_m16)) { Fail("Failed - expected " + Hex.ToHexString(output_k128_m16) + " got " + Hex.ToHexString(output)); } // 40 bytes message - 128 bytes key mac.Init(key); mac.BlockUpdate(input40, 0, input40.Length); output = MacUtilities.DoFinal(mac); if (!AreEqual(output, output_k128_m40)) { Fail("Failed - expected " + Hex.ToHexString(output_k128_m40) + " got " + Hex.ToHexString(output)); } // 64 bytes message - 128 bytes key mac.Init(key); mac.BlockUpdate(input64, 0, input64.Length); output = MacUtilities.DoFinal(mac); if (!AreEqual(output, output_k128_m64)) { Fail("Failed - expected " + Hex.ToHexString(output_k128_m64) + " got " + Hex.ToHexString(output)); } //192 bytes key // key = new SecretKeySpec(keyBytes192, "AES"); key = new KeyParameter(keyBytes192); // 0 bytes message - 192 bytes key mac.Init(key); mac.BlockUpdate(input0, 0, input0.Length); output = MacUtilities.DoFinal(mac); if (!AreEqual(output, output_k192_m0)) { Fail("Failed - expected " + Hex.ToHexString(output_k192_m0) + " got " + Hex.ToHexString(output)); } // 16 bytes message - 192 bytes key mac.Init(key); mac.BlockUpdate(input16, 0, input16.Length); output = MacUtilities.DoFinal(mac); if (!AreEqual(output, output_k192_m16)) { Fail("Failed - expected " + Hex.ToHexString(output_k192_m16) + " got " + Hex.ToHexString(output)); } // 40 bytes message - 192 bytes key mac.Init(key); mac.BlockUpdate(input40, 0, input40.Length); output = MacUtilities.DoFinal(mac); if (!AreEqual(output, output_k192_m40)) { Fail("Failed - expected " + Hex.ToHexString(output_k192_m40) + " got " + Hex.ToHexString(output)); } // 64 bytes message - 192 bytes key mac.Init(key); mac.BlockUpdate(input64, 0, input64.Length); output = MacUtilities.DoFinal(mac); if (!AreEqual(output, output_k192_m64)) { Fail("Failed - expected " + Hex.ToHexString(output_k192_m64) + " got " + Hex.ToHexString(output)); } //256 bytes key // key = new SecretKeySpec(keyBytes256, "AES"); key = new KeyParameter(keyBytes256); // 0 bytes message - 256 bytes key mac.Init(key); mac.BlockUpdate(input0, 0, input0.Length); output = MacUtilities.DoFinal(mac); if (!AreEqual(output, output_k256_m0)) { Fail("Failed - expected " + Hex.ToHexString(output_k256_m0) + " got " + Hex.ToHexString(output)); } // 16 bytes message - 256 bytes key mac.Init(key); mac.BlockUpdate(input16, 0, input16.Length); output = MacUtilities.DoFinal(mac); if (!AreEqual(output, output_k256_m16)) { Fail("Failed - expected " + Hex.ToHexString(output_k256_m16) + " got " + Hex.ToHexString(output)); } // 40 bytes message - 256 bytes key mac.Init(key); mac.BlockUpdate(input40, 0, input40.Length); output = MacUtilities.DoFinal(mac); if (!AreEqual(output, output_k256_m40)) { Fail("Failed - expected " + Hex.ToHexString(output_k256_m40) + " got " + Hex.ToHexString(output)); } // 64 bytes message - 256 bytes key mac.Init(key); mac.BlockUpdate(input64, 0, input64.Length); output = MacUtilities.DoFinal(mac); if (!AreEqual(output, output_k256_m64)) { Fail("Failed - expected " + Hex.ToHexString(output_k256_m64) + " got " + Hex.ToHexString(output)); } // mac = Mac.getInstance("DESedeCMAC", "BC"); mac = MacUtilities.GetMac("DESedeCMAC"); //DESede // key = new SecretKeySpec(keyBytes128, "DESede"); key = new KeyParameter(keyBytes128); // 0 bytes message - 128 bytes key mac.Init(key); mac.BlockUpdate(input0, 0, input0.Length); output = MacUtilities.DoFinal(mac); if (!AreEqual(output, output_des_ede)) { Fail("Failed - expected " + Hex.ToHexString(output_des_ede) + " got " + Hex.ToHexString(output)); } } public override string Name { get { return "CMac"; } } public static void Main(string[] args) { RunTest(new CMacTest()); } [Test] public void TestFunction() { string resultText = Perform().ToString(); Assert.AreEqual(Name + ": Okay", resultText); } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using Xunit; using System; using System.Collections; using System.Collections.Specialized; namespace System.Collections.Specialized.Tests { public class CtorIntBoolTests { public const int MAX_LEN = 50; // max length of random strings [Fact] public void Test01() { HybridDictionary hd; const int BIG_LENGTH = 100; string[] valuesLong = new string[BIG_LENGTH]; string[] keysLong = new string[BIG_LENGTH]; int len; for (int i = 0; i < BIG_LENGTH; i++) { valuesLong[i] = "Item" + i; keysLong[i] = "keY" + i; } // [] HybridDictionary is constructed as expected //----------------------------------------------------------------- // [] Capacity 0, Case-sensitive ctor hd = new HybridDictionary(0, false); if (hd == null) { Assert.False(true, string.Format("Error, dictionary is null after default ctor")); } if (hd.Count != 0) { Assert.False(true, string.Format("Error, Count = {0} after default ctor", hd.Count)); } if (hd["key"] != null) { Assert.False(true, string.Format("Error, Item(some_key) returned non-null after default ctor")); } // // [] Add(string, string) // // should be able to add keys that differ only in casing hd.Add("Name", "Value"); if (hd.Count != 1) { Assert.False(true, string.Format("Error, Count returned {0} instead of 1", hd.Count)); } if (String.Compare(hd["Name"].ToString(), "Value") != 0) { Assert.False(true, string.Format("Error, Item() returned unexpected value")); } hd.Add("NaMe", "Value"); if (hd.Count != 2) { Assert.False(true, string.Format("Error, Count returned {0} instead of 2", hd.Count)); } if (String.Compare(hd["NaMe"].ToString(), "Value") != 0) { Assert.False(true, string.Format("Error, Item() returned unexpected value")); } // by default should be case sensitive if (hd["NAME"] != null) { Assert.False(true, string.Format("Error, Item() returned non-null value for uppercase key")); } // // [] Clear() short dictionary // hd.Clear(); if (hd.Count != 0) { Assert.False(true, string.Format("Error, Count returned {0} instead of 0 after Clear()", hd.Count)); } if (hd["Name"] != null) { Assert.False(true, string.Format("Error, Item() returned non-null value after Clear()")); } // // [] numerous Add(string, string) // len = valuesLong.Length; hd.Clear(); for (int i = 0; i < len; i++) { hd.Add(keysLong[i], valuesLong[i]); } if (hd.Count != len) { Assert.False(true, string.Format("Error, Count returned {0} instead of {1}", hd.Count, len)); } for (int i = 0; i < len; i++) { if (String.Compare(hd[keysLong[i]].ToString(), valuesLong[i]) != 0) { Assert.False(true, string.Format("Error, Item() returned unexpected value", i)); } if (hd[keysLong[i].ToUpper()] != null) { Assert.False(true, string.Format("Error, Item() returned non-null for uppercase key", i)); } } // // [] Clear() long dictionary // hd.Clear(); if (hd.Count != 0) { Assert.False(true, string.Format("Error, Count returned {0} instead of 0 after Clear()", hd.Count)); } // // [] few elements not overriding Equals() // hd.Clear(); Hashtable[] lbls = new Hashtable[2]; ArrayList[] bs = new ArrayList[2]; lbls[0] = new Hashtable(); lbls[1] = new Hashtable(); bs[0] = new ArrayList(); bs[1] = new ArrayList(); hd.Add(lbls[0], bs[0]); hd.Add(lbls[1], bs[1]); if (hd.Count != 2) { Assert.False(true, string.Format("Error, Count returned {0} instead of 2", hd.Count)); } if (!hd.Contains(lbls[0])) { Assert.False(true, string.Format("Error, doesn't contain 1st special item")); } if (!hd.Contains(lbls[1])) { Assert.False(true, string.Format("Error, doesn't contain 2nd special item")); } if (hd.Values.Count != 2) { Assert.False(true, string.Format("Error, Values.Count returned {0} instead of 2", hd.Values.Count)); } hd.Remove(lbls[1]); if (hd.Count != 1) { Assert.False(true, string.Format("Error, failed to remove item")); } if (hd.Contains(lbls[1])) { Assert.False(true, string.Format("Error, failed to remove special item")); } // // [] many elements not overriding Equals() // hd.Clear(); lbls = new Hashtable[BIG_LENGTH]; bs = new ArrayList[BIG_LENGTH]; for (int i = 0; i < BIG_LENGTH; i++) { lbls[i] = new Hashtable(); bs[i] = new ArrayList(); hd.Add(lbls[i], bs[i]); } if (hd.Count != BIG_LENGTH) { Assert.False(true, string.Format("Error, Count returned {0} instead of {1}", hd.Count, BIG_LENGTH)); } for (int i = 0; i < BIG_LENGTH; i++) { if (!hd.Contains(lbls[0])) { Assert.False(true, string.Format("Error, doesn't contain 1st special item")); } } if (hd.Values.Count != BIG_LENGTH) { Assert.False(true, string.Format("Error, Values.Count returned {0} instead of {1}", hd.Values.Count, BIG_LENGTH)); } hd.Remove(lbls[0]); if (hd.Count != BIG_LENGTH - 1) { Assert.False(true, string.Format("Error, failed to remove item")); } if (hd.Contains(lbls[0])) { Assert.False(true, string.Format("Error, failed to remove special item")); } // ---------------------------------------------------------------- //----------------------------------------------------------------- // [] Capacity 10 - Case-sensitive hd = new HybridDictionary(10, false); if (hd == null) { Assert.False(true, string.Format("Error, dictionary is null after default ctor")); } if (hd.Count != 0) { Assert.False(true, string.Format("Error, Count = {0} after default ctor", hd.Count)); } if (hd["key"] != null) { Assert.False(true, string.Format("Error, Item(some_key) returned non-null after default ctor")); } // // [] Add(string, string) // // should be able to add keys that differ only in casing hd.Add("Name", "Value"); if (hd.Count != 1) { Assert.False(true, string.Format("Error, Count returned {0} instead of 1", hd.Count)); } if (String.Compare(hd["Name"].ToString(), "Value") != 0) { Assert.False(true, string.Format("Error, Item() returned unexpected value")); } hd.Add("NaMe", "Value"); if (hd.Count != 2) { Assert.False(true, string.Format("Error, Count returned {0} instead of 2", hd.Count)); } if (String.Compare(hd["NaMe"].ToString(), "Value") != 0) { Assert.False(true, string.Format("Error, Item() returned unexpected value")); } // by default should be case sensitive if (hd["NAME"] != null) { Assert.False(true, string.Format("Error, Item() returned non-null value for uppercase key")); } // // [] Clear() short dictionary // hd.Clear(); if (hd.Count != 0) { Assert.False(true, string.Format("Error, Count returned {0} instead of 0 after Clear()", hd.Count)); } if (hd["Name"] != null) { Assert.False(true, string.Format("Error, Item() returned non-null value after Clear()")); } // // [] numerous Add(string, string) // len = valuesLong.Length; hd.Clear(); for (int i = 0; i < len; i++) { hd.Add(keysLong[i], valuesLong[i]); } if (hd.Count != len) { Assert.False(true, string.Format("Error, Count returned {0} instead of {1}", hd.Count, len)); } for (int i = 0; i < len; i++) { if (String.Compare(hd[keysLong[i]].ToString(), valuesLong[i]) != 0) { Assert.False(true, string.Format("Error, Item() returned unexpected value", i)); } if (hd[keysLong[i].ToUpper()] != null) { Assert.False(true, string.Format("Error, Item() returned non-null for uppercase key", i)); } } // // [] Clear() long dictionary // hd.Clear(); if (hd.Count != 0) { Assert.False(true, string.Format("Error, Count returned {0} instead of 0 after Clear()", hd.Count)); } // // [] few elements not overriding Equals() // hd.Clear(); lbls = new Hashtable[2]; bs = new ArrayList[2]; lbls[0] = new Hashtable(); lbls[1] = new Hashtable(); bs[0] = new ArrayList(); bs[1] = new ArrayList(); hd.Add(lbls[0], bs[0]); hd.Add(lbls[1], bs[1]); if (hd.Count != 2) { Assert.False(true, string.Format("Error, Count returned {0} instead of 2", hd.Count)); } if (!hd.Contains(lbls[0])) { Assert.False(true, string.Format("Error, doesn't contain 1st special item")); } if (!hd.Contains(lbls[1])) { Assert.False(true, string.Format("Error, doesn't contain 2nd special item")); } if (hd.Values.Count != 2) { Assert.False(true, string.Format("Error, Values.Count returned {0} instead of 2", hd.Values.Count)); } hd.Remove(lbls[1]); if (hd.Count != 1) { Assert.False(true, string.Format("Error, failed to remove item")); } if (hd.Contains(lbls[1])) { Assert.False(true, string.Format("Error, failed to remove special item")); } // // [] many elements not overriding Equals() // hd.Clear(); lbls = new Hashtable[BIG_LENGTH]; bs = new ArrayList[BIG_LENGTH]; for (int i = 0; i < BIG_LENGTH; i++) { lbls[i] = new Hashtable(); bs[i] = new ArrayList(); hd.Add(lbls[i], bs[i]); } if (hd.Count != BIG_LENGTH) { Assert.False(true, string.Format("Error, Count returned {0} instead of {1}", hd.Count, BIG_LENGTH)); } for (int i = 0; i < BIG_LENGTH; i++) { if (!hd.Contains(lbls[0])) { Assert.False(true, string.Format("Error, doesn't contain 1st special item")); } } if (hd.Values.Count != BIG_LENGTH) { Assert.False(true, string.Format("Error, Values.Count returned {0} instead of {1}", hd.Values.Count, BIG_LENGTH)); } hd.Remove(lbls[0]); if (hd.Count != BIG_LENGTH - 1) { Assert.False(true, string.Format("Error, failed to remove item")); } if (hd.Contains(lbls[0])) { Assert.False(true, string.Format("Error, failed to remove special item")); } // --------------------------------------------------------------- //---------------------------------------------------------------- // [] Capacity 100 - case-sensitive hd = new HybridDictionary(100, false); if (hd == null) { Assert.False(true, string.Format("Error, dictionary is null after default ctor")); } if (hd.Count != 0) { Assert.False(true, string.Format("Error, Count = {0} after default ctor", hd.Count)); } if (hd["key"] != null) { Assert.False(true, string.Format("Error, Item(some_key) returned non-null after default ctor")); } // // [] Add(string, string) // // should be able to add keys that differ only in casing hd.Add("Name", "Value"); if (hd.Count != 1) { Assert.False(true, string.Format("Error, Count returned {0} instead of 1", hd.Count)); } if (String.Compare(hd["Name"].ToString(), "Value") != 0) { Assert.False(true, string.Format("Error, Item() returned unexpected value")); } hd.Add("NaMe", "Value"); if (hd.Count != 2) { Assert.False(true, string.Format("Error, Count returned {0} instead of 2", hd.Count)); } if (String.Compare(hd["NaMe"].ToString(), "Value") != 0) { Assert.False(true, string.Format("Error, Item() returned unexpected value")); } // by default should be case sensitive if (hd["NAME"] != null) { Assert.False(true, string.Format("Error, Item() returned non-null value for uppercase key")); } // // [] Clear() short dictionary // hd.Clear(); if (hd.Count != 0) { Assert.False(true, string.Format("Error, Count returned {0} instead of 0 after Clear()", hd.Count)); } if (hd["Name"] != null) { Assert.False(true, string.Format("Error, Item() returned non-null value after Clear()")); } // // [] numerous Add(string, string) // len = valuesLong.Length; hd.Clear(); for (int i = 0; i < len; i++) { hd.Add(keysLong[i], valuesLong[i]); } if (hd.Count != len) { Assert.False(true, string.Format("Error, Count returned {0} instead of {1}", hd.Count, len)); } for (int i = 0; i < len; i++) { if (String.Compare(hd[keysLong[i]].ToString(), valuesLong[i]) != 0) { Assert.False(true, string.Format("Error, Item() returned unexpected value", i)); } if (hd[keysLong[i].ToUpper()] != null) { Assert.False(true, string.Format("Error, Item() returned non-null for uppercase key", i)); } } // // [] Clear() long dictionary // hd.Clear(); if (hd.Count != 0) { Assert.False(true, string.Format("Error, Count returned {0} instead of 0 after Clear()", hd.Count)); } // // [] few elements not overriding Equals() // hd.Clear(); lbls = new Hashtable[2]; bs = new ArrayList[2]; lbls[0] = new Hashtable(); lbls[1] = new Hashtable(); bs[0] = new ArrayList(); bs[1] = new ArrayList(); hd.Add(lbls[0], bs[0]); hd.Add(lbls[1], bs[1]); if (hd.Count != 2) { Assert.False(true, string.Format("Error, Count returned {0} instead of 2", hd.Count)); } if (!hd.Contains(lbls[0])) { Assert.False(true, string.Format("Error, doesn't contain 1st special item")); } if (!hd.Contains(lbls[1])) { Assert.False(true, string.Format("Error, doesn't contain 2nd special item")); } if (hd.Values.Count != 2) { Assert.False(true, string.Format("Error, Values.Count returned {0} instead of 2", hd.Values.Count)); } hd.Remove(lbls[1]); if (hd.Count != 1) { Assert.False(true, string.Format("Error, failed to remove item")); } if (hd.Contains(lbls[1])) { Assert.False(true, string.Format("Error, failed to remove special item")); } // // [] many elements not overriding Equals() // hd.Clear(); lbls = new Hashtable[BIG_LENGTH]; bs = new ArrayList[BIG_LENGTH]; for (int i = 0; i < BIG_LENGTH; i++) { lbls[i] = new Hashtable(); bs[i] = new ArrayList(); hd.Add(lbls[i], bs[i]); } if (hd.Count != BIG_LENGTH) { Assert.False(true, string.Format("Error, Count returned {0} instead of {1}", hd.Count, BIG_LENGTH)); } for (int i = 0; i < BIG_LENGTH; i++) { if (!hd.Contains(lbls[0])) { Assert.False(true, string.Format("Error, doesn't contain 1st special item")); } } if (hd.Values.Count != BIG_LENGTH) { Assert.False(true, string.Format("Error, Values.Count returned {0} instead of {1}", hd.Values.Count, BIG_LENGTH)); } hd.Remove(lbls[0]); if (hd.Count != BIG_LENGTH - 1) { Assert.False(true, string.Format("Error, failed to remove item")); } if (hd.Contains(lbls[0])) { Assert.False(true, string.Format("Error, failed to remove special item")); } // **************************************************************// ///// Case-insensitive ctor /////////////////////////////////////// // [] Capacity 0 - Case-insensitive ctor hd = new HybridDictionary(0, true); if (hd == null) { Assert.False(true, string.Format("Error, dictionary is null after default ctor")); } if (hd.Count != 0) { Assert.False(true, string.Format("Error, Count = {0} after default ctor", hd.Count)); } if (hd["key"] != null) { Assert.False(true, string.Format("Error, Item(some_key) returned non-null after default ctor")); } // // [] Add(string, string) // hd.Add("Name", "Value"); if (hd.Count != 1) { Assert.False(true, string.Format("Error, Count returned {0} instead of 1", hd.Count)); } if (String.Compare(hd["Name"].ToString(), "Value") != 0) { Assert.False(true, string.Format("Error, Item() returned unexpected value")); } // should not allow keys that differ only in casing Assert.Throws<ArgumentException>(() => { hd.Add("NaMe", "vl"); }); if (hd.Count != 1) { Assert.False(true, string.Format("Error, Count returned {0} instead of 1", hd.Count)); } // we created case-insensitive - should find this key if (String.Compare(hd["NAME"].ToString(), "Value") != 0) { Assert.False(true, string.Format("Error, Item() returned unexpected value for uppercase key")); } // // [] Clear() short dictionary // hd.Clear(); if (hd.Count != 0) { Assert.False(true, string.Format("Error, Count returned {0} instead of 0 after Clear()", hd.Count)); } if (hd["Name"] != null) { Assert.False(true, string.Format("Error, Item() returned non-null value after Clear()")); } // // [] numerous Add(string, string) // len = valuesLong.Length; hd.Clear(); for (int i = 0; i < len; i++) { hd.Add(keysLong[i], valuesLong[i]); } if (hd.Count != len) { Assert.False(true, string.Format("Error, Count returned {0} instead of {1}", hd.Count, len)); } for (int i = 0; i < len; i++) { if (String.Compare(hd[keysLong[i]].ToString(), valuesLong[i]) != 0) { Assert.False(true, string.Format("Error, Item() returned unexpected value", i)); } // should have case-insensitive dictionary if (String.Compare(hd[keysLong[i].ToUpper()].ToString(), valuesLong[i]) != 0) { Assert.False(true, string.Format("Error, Item() returned unexpected value for uppercase key", i)); } } // // [] Clear() long dictionary // hd.Clear(); if (hd.Count != 0) { Assert.False(true, string.Format("Error, Count returned {0} instead of 0 after Clear()", hd.Count)); } // // [] few elements not overriding Equals() // hd.Clear(); lbls = new Hashtable[2]; bs = new ArrayList[2]; lbls[0] = new Hashtable(); lbls[1] = new Hashtable(); bs[0] = new ArrayList(); bs[1] = new ArrayList(); hd.Add(lbls[0], bs[0]); // should get ArgumentException here Assert.Throws<ArgumentException>(() => { hd.Add(lbls[1], bs[1]); }); if (hd.Count != 1) { Assert.False(true, string.Format("Error, Count returned {0} instead of 1", hd.Count)); } // --------------------------------------------------------------- // --------------------------------------------------------------- // [] Capacity 10 - case-insensitive hd = new HybridDictionary(10, true); if (hd == null) { Assert.False(true, string.Format("Error, dictionary is null after default ctor")); } if (hd.Count != 0) { Assert.False(true, string.Format("Error, Count = {0} after default ctor", hd.Count)); } if (hd["key"] != null) { Assert.False(true, string.Format("Error, Item(some_key) returned non-null after default ctor")); } // // [] Add(string, string) // hd.Add("Name", "Value"); if (hd.Count != 1) { Assert.False(true, string.Format("Error, Count returned {0} instead of 1", hd.Count)); } if (String.Compare(hd["Name"].ToString(), "Value") != 0) { Assert.False(true, string.Format("Error, Item() returned unexpected value")); } // should not allow keys that differ only in casing Assert.Throws<ArgumentException>(() => { hd.Add("NaMe", "vl"); }); if (hd.Count != 1) { Assert.False(true, string.Format("Error, Count returned {0} instead of 1", hd.Count)); } // we created case-insensitive - should find this key if (String.Compare(hd["NAME"].ToString(), "Value") != 0) { Assert.False(true, string.Format("Error, Item() returned unexpected value for uppercase key")); } // // [] Clear() short dictionary // hd.Clear(); if (hd.Count != 0) { Assert.False(true, string.Format("Error, Count returned {0} instead of 0 after Clear()", hd.Count)); } if (hd["Name"] != null) { Assert.False(true, string.Format("Error, Item() returned non-null value after Clear()")); } // // [] numerous Add(string, string) // len = valuesLong.Length; hd.Clear(); for (int i = 0; i < len; i++) { hd.Add(keysLong[i], valuesLong[i]); } if (hd.Count != len) { Assert.False(true, string.Format("Error, Count returned {0} instead of {1}", hd.Count, len)); } for (int i = 0; i < len; i++) { if (String.Compare(hd[keysLong[i]].ToString(), valuesLong[i]) != 0) { Assert.False(true, string.Format("Error, Item() returned unexpected value", i)); } // should have case-insensitive dictionary if (String.Compare(hd[keysLong[i].ToUpper()].ToString(), valuesLong[i]) != 0) { Assert.False(true, string.Format("Error, Item() returned unexpected value for uppercase key", i)); } } // // [] Clear() long dictionary // hd.Clear(); if (hd.Count != 0) { Assert.False(true, string.Format("Error, Count returned {0} instead of 0 after Clear()", hd.Count)); } // // [] few elements not overriding Equals() // hd.Clear(); lbls = new Hashtable[2]; bs = new ArrayList[2]; lbls[0] = new Hashtable(); lbls[1] = new Hashtable(); bs[0] = new ArrayList(); bs[1] = new ArrayList(); hd.Add(lbls[0], bs[0]); // should get ArgumentException here Assert.Throws<ArgumentException>(() => { hd.Add(lbls[1], bs[1]); }); if (hd.Count != 1) { Assert.False(true, string.Format("Error, Count returned {0} instead of 1", hd.Count)); } // --------------------------------------------------------------- // --------------------------------------------------------------- // [] Capacity 100, case-insensitive hd = new HybridDictionary(100, true); if (hd == null) { Assert.False(true, string.Format("Error, dictionary is null after default ctor")); } if (hd.Count != 0) { Assert.False(true, string.Format("Error, Count = {0} after default ctor", hd.Count)); } if (hd["key"] != null) { Assert.False(true, string.Format("Error, Item(some_key) returned non-null after default ctor")); } // // [] Add(string, string) // hd.Add("Name", "Value"); if (hd.Count != 1) { Assert.False(true, string.Format("Error, Count returned {0} instead of 1", hd.Count)); } if (String.Compare(hd["Name"].ToString(), "Value") != 0) { Assert.False(true, string.Format("Error, Item() returned unexpected value")); } // should not allow keys that differ only in casing Assert.Throws<ArgumentException>(() => { hd.Add("NaMe", "vl"); }); if (hd.Count != 1) { Assert.False(true, string.Format("Error, Count returned {0} instead of 1", hd.Count)); } // we created case-insensitive - should find this key if (String.Compare(hd["NAME"].ToString(), "Value") != 0) { Assert.False(true, string.Format("Error, Item() returned unexpected value for uppercase key")); } // // [] Clear() // hd.Clear(); if (hd.Count != 0) { Assert.False(true, string.Format("Error, Count returned {0} instead of 0 after Clear()", hd.Count)); } if (hd["Name"] != null) { Assert.False(true, string.Format("Error, Item() returned non-null value after Clear()")); } // // [] numerous Add(string, string) // len = valuesLong.Length; hd.Clear(); for (int i = 0; i < len; i++) { hd.Add(keysLong[i], valuesLong[i]); } if (hd.Count != len) { Assert.False(true, string.Format("Error, Count returned {0} instead of {1}", hd.Count, len)); } for (int i = 0; i < len; i++) { if (String.Compare(hd[keysLong[i]].ToString(), valuesLong[i]) != 0) { Assert.False(true, string.Format("Error, Item() returned unexpected value", i)); } // should have case-insensitive dictionary if (String.Compare(hd[keysLong[i].ToUpper()].ToString(), valuesLong[i]) != 0) { Assert.False(true, string.Format("Error, Item() returned unexpected value for uppercase key", i)); } } // // [] Clear() long dictionary // hd.Clear(); if (hd.Count != 0) { Assert.False(true, string.Format("Error, Count returned {0} instead of 0 after Clear()", hd.Count)); } // // [] few elements not overriding Equals() // hd.Clear(); lbls = new Hashtable[2]; bs = new ArrayList[2]; lbls[0] = new Hashtable(); lbls[1] = new Hashtable(); bs[0] = new ArrayList(); bs[1] = new ArrayList(); hd.Add(lbls[0], bs[0]); // should get ArgumentException here Assert.Throws<ArgumentException>(() => { hd.Add(lbls[1], bs[1]); }); if (hd.Count != 1) { Assert.False(true, string.Format("Error, Count returned {0} instead of 1", hd.Count)); } } } }
namespace Tralus.Framework.Migration.Migrations { using System; using System.Data.Entity.Migrations; public partial class InitForDevDb : DbMigration { public override void Up() { CreateTable( "System.StateMachineAppearance", c => new { Id = c.Guid(nullable: false), TargetItems = c.String(), AppearanceItemType = c.String(), Criteria = c.String(), Context = c.String(), Priority = c.Int(nullable: false), FontStyle = c.Int(), FontColorInt = c.Int(nullable: false), BackColorInt = c.Int(nullable: false), Visibility = c.Int(), Enabled = c.Boolean(), Method = c.String(), State_Id = c.Guid(), }) .PrimaryKey(t => t.Id) .ForeignKey("System.StateMachineState", t => t.State_Id) .Index(t => t.State_Id); CreateTable( "System.StateMachineState", c => new { Id = c.Guid(nullable: false), Caption = c.String(), MarkerValue = c.String(), TargetObjectCriteria = c.String(), StateMachine_Id = c.Guid(), }) .PrimaryKey(t => t.Id) .ForeignKey("System.StateMachine", t => t.StateMachine_Id) .Index(t => t.StateMachine_Id); CreateTable( "System.StateMachine", c => new { Id = c.Guid(nullable: false), Name = c.String(), Active = c.Boolean(nullable: false), TargetObjectTypeName = c.String(), StatePropertyNameBase = c.String(), ExpandActionsInDetailView = c.Boolean(nullable: false), StartState_Id = c.Guid(), }) .PrimaryKey(t => t.Id) .ForeignKey("System.StateMachineState", t => t.StartState_Id) .Index(t => t.StartState_Id); CreateTable( "System.StateMachineTransition", c => new { Id = c.Guid(nullable: false), Caption = c.String(), Index = c.Int(nullable: false), SaveAndCloseView = c.Boolean(nullable: false), TargetState_Id = c.Guid(), SourceState_Id = c.Guid(), }) .PrimaryKey(t => t.Id) .ForeignKey("System.StateMachineState", t => t.TargetState_Id) .ForeignKey("System.StateMachineState", t => t.SourceState_Id) .Index(t => t.TargetState_Id) .Index(t => t.SourceState_Id); CreateTable( "System.CandidateUser", c => new { Id = c.Guid(nullable: false), Username = c.String(), Description = c.String(), }) .PrimaryKey(t => t.Id); CreateTable( "System.Role", c => new { Id = c.Guid(nullable: false), Name = c.String(), IsAdministrative = c.Boolean(nullable: false), CanEditModel = c.Boolean(nullable: false), }) .PrimaryKey(t => t.Id); CreateTable( "System.TypePermissionObject", c => new { Id = c.Guid(nullable: false), Permissions = c.String(), AllowRead = c.Boolean(nullable: false), AllowWrite = c.Boolean(nullable: false), AllowCreate = c.Boolean(nullable: false), AllowDelete = c.Boolean(nullable: false), AllowNavigate = c.Boolean(nullable: false), TargetTypeFullName = c.String(), Role_Id = c.Guid(), }) .PrimaryKey(t => t.Id) .ForeignKey("System.Role", t => t.Role_Id) .Index(t => t.Role_Id); CreateTable( "System.SecuritySystemInstancePermissionsObject", c => new { Id = c.Guid(nullable: false), Operations = c.String(), InstanceId = c.Guid(), Owner_Id = c.Guid(), }) .PrimaryKey(t => t.Id) .ForeignKey("System.TypePermissionObject", t => t.Owner_Id) .Index(t => t.Owner_Id); CreateTable( "System.SecuritySystemMemberPermissionsObject", c => new { Id = c.Guid(nullable: false), Members = c.String(), Criteria = c.String(), AllowRead = c.Boolean(nullable: false), AllowWrite = c.Boolean(nullable: false), EffectiveRead = c.Boolean(), EffectiveWrite = c.Boolean(), Owner_Id = c.Guid(), }) .PrimaryKey(t => t.Id) .ForeignKey("System.TypePermissionObject", t => t.Owner_Id) .Index(t => t.Owner_Id); CreateTable( "System.SecuritySystemObjectPermissionsObject", c => new { Id = c.Guid(nullable: false), Permissions = c.String(), Criteria = c.String(), AllowRead = c.Boolean(nullable: false), AllowWrite = c.Boolean(nullable: false), AllowDelete = c.Boolean(nullable: false), AllowNavigate = c.Boolean(nullable: false), EffectiveRead = c.Boolean(), EffectiveWrite = c.Boolean(), EffectiveDelete = c.Boolean(), EffectiveNavigate = c.Boolean(), Owner_Id = c.Guid(), }) .PrimaryKey(t => t.Id) .ForeignKey("System.TypePermissionObject", t => t.Owner_Id) .Index(t => t.Owner_Id); CreateTable( "System.User", c => new { Id = c.Guid(nullable: false), UserName = c.String(), IsActive = c.Boolean(nullable: false), ChangePasswordOnFirstLogon = c.Boolean(nullable: false), StoredPassword = c.String(), }) .PrimaryKey(t => t.Id); CreateTable( "System.RoleRoles", c => new { ParentRoleId = c.Guid(nullable: false), ChildRoleId = c.Guid(nullable: false), }) .PrimaryKey(t => new { t.ParentRoleId, t.ChildRoleId }) .ForeignKey("System.Role", t => t.ParentRoleId) .ForeignKey("System.Role", t => t.ChildRoleId) .Index(t => t.ParentRoleId) .Index(t => t.ChildRoleId); CreateTable( "System.UserRoles", c => new { UserId = c.Guid(nullable: false), RoleId = c.Guid(nullable: false), }) .PrimaryKey(t => new { t.UserId, t.RoleId }) .ForeignKey("System.User", t => t.UserId, cascadeDelete: true) .ForeignKey("System.Role", t => t.RoleId, cascadeDelete: true) .Index(t => t.UserId) .Index(t => t.RoleId); } public override void Down() { DropForeignKey("System.UserRoles", "RoleId", "System.Role"); DropForeignKey("System.UserRoles", "UserId", "System.User"); DropForeignKey("System.TypePermissionObject", "Role_Id", "System.Role"); DropForeignKey("System.SecuritySystemObjectPermissionsObject", "Owner_Id", "System.TypePermissionObject"); DropForeignKey("System.SecuritySystemMemberPermissionsObject", "Owner_Id", "System.TypePermissionObject"); DropForeignKey("System.SecuritySystemInstancePermissionsObject", "Owner_Id", "System.TypePermissionObject"); DropForeignKey("System.RoleRoles", "ChildRoleId", "System.Role"); DropForeignKey("System.RoleRoles", "ParentRoleId", "System.Role"); DropForeignKey("System.StateMachineTransition", "SourceState_Id", "System.StateMachineState"); DropForeignKey("System.StateMachineTransition", "TargetState_Id", "System.StateMachineState"); DropForeignKey("System.StateMachineState", "StateMachine_Id", "System.StateMachine"); DropForeignKey("System.StateMachine", "StartState_Id", "System.StateMachineState"); DropForeignKey("System.StateMachineAppearance", "State_Id", "System.StateMachineState"); DropIndex("System.UserRoles", new[] { "RoleId" }); DropIndex("System.UserRoles", new[] { "UserId" }); DropIndex("System.RoleRoles", new[] { "ChildRoleId" }); DropIndex("System.RoleRoles", new[] { "ParentRoleId" }); DropIndex("System.SecuritySystemObjectPermissionsObject", new[] { "Owner_Id" }); DropIndex("System.SecuritySystemMemberPermissionsObject", new[] { "Owner_Id" }); DropIndex("System.SecuritySystemInstancePermissionsObject", new[] { "Owner_Id" }); DropIndex("System.TypePermissionObject", new[] { "Role_Id" }); DropIndex("System.StateMachineTransition", new[] { "SourceState_Id" }); DropIndex("System.StateMachineTransition", new[] { "TargetState_Id" }); DropIndex("System.StateMachine", new[] { "StartState_Id" }); DropIndex("System.StateMachineState", new[] { "StateMachine_Id" }); DropIndex("System.StateMachineAppearance", new[] { "State_Id" }); DropTable("System.UserRoles"); DropTable("System.RoleRoles"); DropTable("System.User"); DropTable("System.SecuritySystemObjectPermissionsObject"); DropTable("System.SecuritySystemMemberPermissionsObject"); DropTable("System.SecuritySystemInstancePermissionsObject"); DropTable("System.TypePermissionObject"); DropTable("System.Role"); DropTable("System.CandidateUser"); DropTable("System.StateMachineTransition"); DropTable("System.StateMachine"); DropTable("System.StateMachineState"); DropTable("System.StateMachineAppearance"); } } }
// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the MIT Licence. // See the LICENCE file in the repository root for full licence text. using System; using System.IO; using System.Linq; using System.Threading; using System.Threading.Tasks; using Microsoft.Data.Sqlite; using osu.Framework.Development; using osu.Framework.IO.Network; using osu.Framework.Logging; using osu.Framework.Platform; using osu.Framework.Testing; using osu.Framework.Threading; using osu.Game.Database; using osu.Game.Online.API; using osu.Game.Online.API.Requests; using SharpCompress.Compressors; using SharpCompress.Compressors.BZip2; namespace osu.Game.Beatmaps { /// <summary> /// A component which handles population of online IDs for beatmaps using a two part lookup procedure. /// </summary> /// <remarks> /// On creating the component, a copy of a database containing metadata for a large subset of beatmaps (stored to <see cref="cache_database_name"/>) will be downloaded if not already present locally. /// This will always be checked before doing a second online query to get required metadata. /// </remarks> [ExcludeFromDynamicCompile] public class BeatmapOnlineLookupQueue : IDisposable { private readonly IAPIProvider api; private readonly Storage storage; private const int update_queue_request_concurrency = 4; private readonly ThreadedTaskScheduler updateScheduler = new ThreadedTaskScheduler(update_queue_request_concurrency, nameof(BeatmapOnlineLookupQueue)); private FileWebRequest cacheDownloadRequest; private const string cache_database_name = "online.db"; public BeatmapOnlineLookupQueue(IAPIProvider api, Storage storage) { this.api = api; this.storage = storage; // avoid downloading / using cache for unit tests. if (!DebugUtils.IsNUnitRunning && !storage.Exists(cache_database_name)) prepareLocalCache(); } public Task UpdateAsync(BeatmapSetInfo beatmapSet, CancellationToken cancellationToken) { return Task.WhenAll(beatmapSet.Beatmaps.Select(b => UpdateAsync(beatmapSet, b, cancellationToken)).ToArray()); } // todo: expose this when we need to do individual difficulty lookups. protected Task UpdateAsync(BeatmapSetInfo beatmapSet, BeatmapInfo beatmapInfo, CancellationToken cancellationToken) => Task.Factory.StartNew(() => lookup(beatmapSet, beatmapInfo), cancellationToken, TaskCreationOptions.HideScheduler | TaskCreationOptions.RunContinuationsAsynchronously, updateScheduler); private void lookup(BeatmapSetInfo set, BeatmapInfo beatmapInfo) { if (checkLocalCache(set, beatmapInfo)) return; if (api?.State.Value != APIState.Online) return; var req = new GetBeatmapRequest(beatmapInfo); req.Failure += fail; try { // intentionally blocking to limit web request concurrency api.Perform(req); var res = req.Response; if (res != null) { beatmapInfo.Status = res.Status; beatmapInfo.BeatmapSet.Status = res.BeatmapSet?.Status ?? BeatmapSetOnlineStatus.None; beatmapInfo.BeatmapSet.OnlineBeatmapSetID = res.OnlineBeatmapSetID; beatmapInfo.OnlineBeatmapID = res.OnlineID; if (beatmapInfo.Metadata != null) beatmapInfo.Metadata.AuthorID = res.AuthorID; if (beatmapInfo.BeatmapSet.Metadata != null) beatmapInfo.BeatmapSet.Metadata.AuthorID = res.AuthorID; logForModel(set, $"Online retrieval mapped {beatmapInfo} to {res.OnlineBeatmapSetID} / {res.OnlineID}."); } } catch (Exception e) { fail(e); } void fail(Exception e) { beatmapInfo.OnlineBeatmapID = null; logForModel(set, $"Online retrieval failed for {beatmapInfo} ({e.Message})"); } } private void prepareLocalCache() { string cacheFilePath = storage.GetFullPath(cache_database_name); string compressedCacheFilePath = $"{cacheFilePath}.bz2"; cacheDownloadRequest = new FileWebRequest(compressedCacheFilePath, $"https://assets.ppy.sh/client-resources/{cache_database_name}.bz2?{DateTimeOffset.UtcNow:yyyyMMdd}"); cacheDownloadRequest.Failed += ex => { File.Delete(compressedCacheFilePath); File.Delete(cacheFilePath); Logger.Log($"{nameof(BeatmapOnlineLookupQueue)}'s online cache download failed: {ex}", LoggingTarget.Database); }; cacheDownloadRequest.Finished += () => { try { using (var stream = File.OpenRead(cacheDownloadRequest.Filename)) using (var outStream = File.OpenWrite(cacheFilePath)) using (var bz2 = new BZip2Stream(stream, CompressionMode.Decompress, false)) bz2.CopyTo(outStream); // set to null on completion to allow lookups to begin using the new source cacheDownloadRequest = null; } catch (Exception ex) { Logger.Log($"{nameof(BeatmapOnlineLookupQueue)}'s online cache extraction failed: {ex}", LoggingTarget.Database); File.Delete(cacheFilePath); } finally { File.Delete(compressedCacheFilePath); } }; cacheDownloadRequest.PerformAsync(); } private bool checkLocalCache(BeatmapSetInfo set, BeatmapInfo beatmapInfo) { // download is in progress (or was, and failed). if (cacheDownloadRequest != null) return false; // database is unavailable. if (!storage.Exists(cache_database_name)) return false; if (string.IsNullOrEmpty(beatmapInfo.MD5Hash) && string.IsNullOrEmpty(beatmapInfo.Path) && beatmapInfo.OnlineBeatmapID == null) return false; try { using (var db = new SqliteConnection(DatabaseContextFactory.CreateDatabaseConnectionString("online.db", storage))) { db.Open(); using (var cmd = db.CreateCommand()) { cmd.CommandText = "SELECT beatmapset_id, beatmap_id, approved, user_id FROM osu_beatmaps WHERE checksum = @MD5Hash OR beatmap_id = @OnlineBeatmapID OR filename = @Path"; cmd.Parameters.Add(new SqliteParameter("@MD5Hash", beatmapInfo.MD5Hash)); cmd.Parameters.Add(new SqliteParameter("@OnlineBeatmapID", beatmapInfo.OnlineBeatmapID ?? (object)DBNull.Value)); cmd.Parameters.Add(new SqliteParameter("@Path", beatmapInfo.Path)); using (var reader = cmd.ExecuteReader()) { if (reader.Read()) { var status = (BeatmapSetOnlineStatus)reader.GetByte(2); beatmapInfo.Status = status; beatmapInfo.BeatmapSet.Status = status; beatmapInfo.BeatmapSet.OnlineBeatmapSetID = reader.GetInt32(0); beatmapInfo.OnlineBeatmapID = reader.GetInt32(1); if (beatmapInfo.Metadata != null) beatmapInfo.Metadata.AuthorID = reader.GetInt32(3); if (beatmapInfo.BeatmapSet.Metadata != null) beatmapInfo.BeatmapSet.Metadata.AuthorID = reader.GetInt32(3); logForModel(set, $"Cached local retrieval for {beatmapInfo}."); return true; } } } } } catch (Exception ex) { logForModel(set, $"Cached local retrieval for {beatmapInfo} failed with {ex}."); } return false; } private void logForModel(BeatmapSetInfo set, string message) => ArchiveModelManager<BeatmapSetInfo, BeatmapSetFileInfo>.LogForModel(set, $"[{nameof(BeatmapOnlineLookupQueue)}] {message}"); public void Dispose() { cacheDownloadRequest?.Dispose(); updateScheduler?.Dispose(); } } }
// -------------------------------------------------------------------------- // Functional Programming in .NET - Chapter 3 // -------------------------------------------------------------------------- // NOTE: This library contains several useful classes for functional // programming in C# that we implemented in chapter 3 and that we'll // extend and use later in the book. Each secion is marked with a reference // to a code listing or section in the book where it was discussed. // -------------------------------------------------------------------------- using System; using System.Collections.Generic; namespace System { public sealed class Tuple<T1, T2> { private readonly T1 item1; private readonly T2 item2; /// <summary> /// Retyurns the first element of the tuple /// </summary> public T1 Item1 { get { return item1; } } /// <summary> /// Returns the second element of the tuple /// </summary> public T2 Item2 { get { return item2; } } /// <summary> /// Create a new tuple value /// </summary> /// <param name="item1">First element of the tuple</param> /// <param name="second">Second element of the tuple</param> public Tuple(T1 item1, T2 item2) { this.item1 = item1; this.item2 = item2; } public override string ToString() { return string.Format("Tuple({0}, {1})", Item1, Item2); } public override int GetHashCode() { int hash = 17; hash = hash * 23 + item1.GetHashCode(); hash = hash * 23 + item2.GetHashCode(); return hash; } public override bool Equals(object o) { if (o.GetType() != typeof(Tuple<T1, T2>)) { return false; } var other = (Tuple<T1, T2>) o; return this == other; } public static bool operator==(Tuple<T1, T2> a, Tuple<T1, T2> b) { return a.item1.Equals(b.item1) && a.item2.Equals(b.item2); } public static bool operator!=(Tuple<T1, T2> a, Tuple<T1, T2> b) { return !(a == b); } public void Unpack(Action<T1, T2> unpackerDelegate) { unpackerDelegate(Item1, Item2); } } public sealed class Tuple<T1, T2, T3> { private readonly T1 item1; private readonly T2 item2; private readonly T3 item3; /// <summary> /// Retyurns the first element of the tuple /// </summary> public T1 Item1 { get { return item1; } } /// <summary> /// Returns the second element of the tuple /// </summary> public T2 Item2 { get { return item2; } } /// <summary> /// Returns the second element of the tuple /// </summary> public T3 Item3 { get { return item3; } } /// <summary> /// Create a new tuple value /// </summary> /// <param name="item1">First element of the tuple</param> /// <param name="second">Second element of the tuple</param> /// <param name="third">Third element of the tuple</param> public Tuple(T1 item1, T2 item2, T3 item3) { this.item1 = item1; this.item2 = item2; this.item3 = item3; } public override int GetHashCode() { int hash = 17; hash = hash * 23 + item1.GetHashCode(); hash = hash * 23 + item2.GetHashCode(); hash = hash * 23 + item3.GetHashCode(); return hash; } public override bool Equals(object o) { if (o.GetType() != typeof(Tuple<T1, T2, T3>)) { return false; } var other = (Tuple<T1, T2, T3>)o; return this == other; } public static bool operator==(Tuple<T1, T2, T3> a, Tuple<T1, T2, T3> b) { return a.item1.Equals(b.item1) && a.item2.Equals(b.item2) && a.item3.Equals(b.item3); } public static bool operator!=(Tuple<T1, T2, T3> a, Tuple<T1, T2, T3> b) { return !(a == b); } public void Unpack(Action<T1, T2, T3> unpackerDelegate) { unpackerDelegate(Item1, Item2, Item3); } } public sealed class Tuple<T1, T2, T3, T4> { private readonly T1 item1; private readonly T2 item2; private readonly T3 item3; private readonly T4 item4; /// <summary> /// Retyurns the first element of the tuple /// </summary> public T1 Item1 { get { return item1; } } /// <summary> /// Returns the second element of the tuple /// </summary> public T2 Item2 { get { return item2; } } /// <summary> /// Returns the second element of the tuple /// </summary> public T3 Item3 { get { return item3; } } /// <summary> /// Returns the second element of the tuple /// </summary> public T4 Item4 { get { return item4; } } /// <summary> /// Create a new tuple value /// </summary> /// <param name="item1">First element of the tuple</param> /// <param name="second">Second element of the tuple</param> /// <param name="third">Third element of the tuple</param> /// <param name="fourth">Fourth element of the tuple</param> public Tuple(T1 item1, T2 item2, T3 item3, T4 item4) { this.item1 = item1; this.item2 = item2; this.item3 = item3; this.item4 = item4; } public override int GetHashCode() { int hash = 17; hash = hash * 23 + item1.GetHashCode(); hash = hash * 23 + item2.GetHashCode(); hash = hash * 23 + item3.GetHashCode(); hash = hash * 23 + item4.GetHashCode(); return hash; } public override bool Equals(object o) { if (o.GetType() != typeof(Tuple<T1, T2, T3, T4>)) { return false; } var other = (Tuple<T1, T2, T3, T4>)o; return this == other; } public static bool operator==(Tuple<T1, T2, T3, T4> a, Tuple<T1, T2, T3, T4> b) { return a.item1.Equals(b.item1) && a.item2.Equals(b.item2) && a.item3.Equals(b.item3) && a.item4.Equals(b.item4); } public static bool operator!=(Tuple<T1, T2, T3, T4> a, Tuple<T1, T2, T3, T4> b) { return !(a == b); } public void Unpack(Action<T1, T2, T3, T4> unpackerDelegate) { unpackerDelegate(Item1, Item2, Item3, Item4); } } /// <summary> /// Utility class that simplifies cration of tuples by using /// method calls instead of constructor calls /// </summary> public static class Tuple { /// <summary> /// Creates a new tuple value with the specified elements. The method /// can be used without specifying the generic parameters, because C# /// compiler can usually infer the actual types. /// </summary> /// <param name="item1">First element of the tuple</param> /// <param name="second">Second element of the tuple</param> /// <returns>A newly created tuple</returns> public static Tuple<T1, T2> Create<T1, T2>(T1 item1, T2 second) { return new Tuple<T1, T2>(item1, second); } /// <summary> /// Creates a new tuple value with the specified elements. The method /// can be used without specifying the generic parameters, because C# /// compiler can usually infer the actual types. /// </summary> /// <param name="item1">First element of the tuple</param> /// <param name="second">Second element of the tuple</param> /// <param name="third">Third element of the tuple</param> /// <returns>A newly created tuple</returns> public static Tuple<T1, T2, T3> Create<T1, T2, T3>(T1 item1, T2 second, T3 third) { return new Tuple<T1, T2, T3>(item1, second, third); } /// <summary> /// Creates a new tuple value with the specified elements. The method /// can be used without specifying the generic parameters, because C# /// compiler can usually infer the actual types. /// </summary> /// <param name="item1">First element of the tuple</param> /// <param name="second">Second element of the tuple</param> /// <param name="third">Third element of the tuple</param> /// <param name="fourth">Fourth element of the tuple</param> /// <returns>A newly created tuple</returns> public static Tuple<T1, T2, T3, T4> Create<T1, T2, T3, T4>(T1 item1, T2 second, T3 third, T4 fourth) { return new Tuple<T1, T2, T3, T4>(item1, second, third, fourth); } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * */ using System; using System.Collections.Generic; using System.Reflection; using System.Text; using log4net; using Nini.Config; using OpenSim.Framework; using OpenSim.Framework.Communications; using OpenSim.Services.Interfaces; using IUserService = OpenSim.Framework.Communications.IUserService; namespace OpenSim.ApplicationPlugins.Rest.Inventory { public class Rest { internal static readonly ILog Log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); internal static bool DEBUG = Log.IsDebugEnabled; /// <summary> /// Supported authentication schemes /// </summary> public const string AS_BASIC = "Basic"; // simple user/password verification public const string AS_DIGEST = "Digest"; // password safe authentication /// Supported Digest algorithms public const string Digest_MD5 = "MD5"; // assumed default if omitted public const string Digest_MD5Sess = "MD5-sess"; // session-span - not good for REST? public const string Qop_Auth = "auth"; // authentication only public const string Qop_Int = "auth-int"; // TODO /// <summary> /// These values have a single value for the whole /// domain and lifetime of the plugin handler. We /// make them static for ease of reference within /// the assembly. These are initialized by the /// RestHandler class during start-up. /// </summary> internal static IRestHandler Plugin = null; internal static OpenSimBase main = null; internal static string Prefix = null; internal static IConfig Config = null; internal static string GodKey = null; internal static bool Authenticate = true; internal static bool Secure = true; internal static bool ExtendedEscape = true; internal static bool DumpAsset = false; internal static bool Fill = true; internal static bool FlushEnabled = true; internal static string Realm = "OpenSim REST"; internal static string Scheme = AS_BASIC; internal static int DumpLineSize = 32; // Should be a multiple of 16 or (possibly) 4 /// <summary> /// These are all dependent upon the Comms manager /// being initialized. So they have to be properties /// because the comms manager is now a module and is /// not guaranteed to be there when the rest handler /// initializes. /// </summary> internal static CommunicationsManager Comms { get { return main.CommunicationsManager; } } internal static IInventoryService InventoryServices { get { return main.SceneManager.CurrentOrFirstScene.InventoryService; } } internal static IUserService UserServices { get { return Comms.UserService; } } internal static IAvatarService AvatarServices { get { return Comms.AvatarService; } } internal static IAssetService AssetServices { get { return main.SceneManager.CurrentOrFirstScene.AssetService; } } /// <summary> /// HTTP requires that status information be generated for PUT /// and POST opertaions. This is in support of that. The /// operation verb gets substituted into the first string, /// and the completion code is inserted into the tail. The /// strings are put here to encourage consistency. /// </summary> internal static string statusHead = "<html><body><title>{0} status</title><break>"; internal static string statusTail = "</body></html>"; internal static Dictionary<int,string> HttpStatusDesc; static Rest() { HttpStatusDesc = new Dictionary<int,string>(); if (HttpStatusCodeArray.Length != HttpStatusDescArray.Length) { Log.ErrorFormat("{0} HTTP Status Code and Description arrays do not match"); throw new Exception("HTTP Status array discrepancy"); } // Repackage the data into something more tractable. The sparse // nature of HTTP return codes makes an array a bad choice. for (int i=0; i<HttpStatusCodeArray.Length; i++) { HttpStatusDesc.Add(HttpStatusCodeArray[i], HttpStatusDescArray[i]); } } internal static int CreationDate { get { return (int) (DateTime.UtcNow - new DateTime(1970, 1, 1)).TotalSeconds; } } internal static string MsgId { get { return Plugin.MsgId; } } internal static string RequestId { get { return Plugin.RequestId; } } internal static Encoding Encoding = Util.UTF8; /// <summary> /// Version control for REST implementation. This /// refers to the overall infrastructure represented /// by the following classes /// RequestData /// RequestInventoryPlugin /// Rest /// It does no describe implementation classes such as /// RestInventoryServices, which may morph much more /// often. Such classes ARE dependent upon this however /// and should check it in their Initialize method. /// </summary> public static readonly float Version = 1.0F; public const string Name = "REST 1.0"; /// <summary> /// Currently defined HTTP methods. /// Only GET and HEAD are required to be /// supported by all servers. See Respond /// to see how these are handled. /// </summary> // REST AGENT 1.0 interpretations public const string GET = "get"; // information retrieval - server state unchanged public const string HEAD = "head"; // same as get except only the headers are returned. public const string POST = "post"; // Replace the URI designated resource with the entity. public const string PUT = "put"; // Add the entity to the context represented by the URI public const string DELETE = "delete"; // Remove the URI designated resource from the server. public const string OPTIONS = "options"; // public const string TRACE = "trace"; // public const string CONNECT = "connect"; // // Define this in one place... public const string UrlPathSeparator = "/"; public const string UrlMethodSeparator = ":"; // Redirection qualifications public const bool PERMANENT = false; public const bool TEMPORARY = true; // Constant arrays used by String.Split public static readonly char C_SPACE = ' '; public static readonly char C_SLASH = '/'; public static readonly char C_PATHSEP = '/'; public static readonly char C_COLON = ':'; public static readonly char C_PLUS = '+'; public static readonly char C_PERIOD = '.'; public static readonly char C_COMMA = ','; public static readonly char C_DQUOTE = '"'; public static readonly string CS_SPACE = " "; public static readonly string CS_SLASH = "/"; public static readonly string CS_PATHSEP = "/"; public static readonly string CS_COLON = ":"; public static readonly string CS_PLUS = "+"; public static readonly string CS_PERIOD = "."; public static readonly string CS_COMMA = ","; public static readonly string CS_DQUOTE = "\""; public static readonly char[] CA_SPACE = { C_SPACE }; public static readonly char[] CA_SLASH = { C_SLASH }; public static readonly char[] CA_PATHSEP = { C_PATHSEP }; public static readonly char[] CA_COLON = { C_COLON }; public static readonly char[] CA_PERIOD = { C_PERIOD }; public static readonly char[] CA_PLUS = { C_PLUS }; public static readonly char[] CA_COMMA = { C_COMMA }; public static readonly char[] CA_DQUOTE = { C_DQUOTE }; // HTTP Code Values (in value order) public const int HttpStatusCodeContinue = 100; public const int HttpStatusCodeSwitchingProtocols = 101; public const int HttpStatusCodeOK = 200; public const int HttpStatusCodeCreated = 201; public const int HttpStatusCodeAccepted = 202; public const int HttpStatusCodeNonAuthoritative = 203; public const int HttpStatusCodeNoContent = 204; public const int HttpStatusCodeResetContent = 205; public const int HttpStatusCodePartialContent = 206; public const int HttpStatusCodeMultipleChoices = 300; public const int HttpStatusCodePermanentRedirect = 301; public const int HttpStatusCodeFound = 302; public const int HttpStatusCodeSeeOther = 303; public const int HttpStatusCodeNotModified = 304; public const int HttpStatusCodeUseProxy = 305; public const int HttpStatusCodeReserved306 = 306; public const int HttpStatusCodeTemporaryRedirect = 307; public const int HttpStatusCodeBadRequest = 400; public const int HttpStatusCodeNotAuthorized = 401; public const int HttpStatusCodePaymentRequired = 402; public const int HttpStatusCodeForbidden = 403; public const int HttpStatusCodeNotFound = 404; public const int HttpStatusCodeMethodNotAllowed = 405; public const int HttpStatusCodeNotAcceptable = 406; public const int HttpStatusCodeProxyAuthenticate = 407; public const int HttpStatusCodeTimeOut = 408; public const int HttpStatusCodeConflict = 409; public const int HttpStatusCodeGone = 410; public const int HttpStatusCodeLengthRequired = 411; public const int HttpStatusCodePreconditionFailed = 412; public const int HttpStatusCodeEntityTooLarge = 413; public const int HttpStatusCodeUriTooLarge = 414; public const int HttpStatusCodeUnsupportedMedia = 415; public const int HttpStatusCodeRangeNotSatsified = 416; public const int HttpStatusCodeExpectationFailed = 417; public const int HttpStatusCodeServerError = 500; public const int HttpStatusCodeNotImplemented = 501; public const int HttpStatusCodeBadGateway = 502; public const int HttpStatusCodeServiceUnavailable = 503; public const int HttpStatusCodeGatewayTimeout = 504; public const int HttpStatusCodeHttpVersionError = 505; public static readonly int[] HttpStatusCodeArray = { HttpStatusCodeContinue, HttpStatusCodeSwitchingProtocols, HttpStatusCodeOK, HttpStatusCodeCreated, HttpStatusCodeAccepted, HttpStatusCodeNonAuthoritative, HttpStatusCodeNoContent, HttpStatusCodeResetContent, HttpStatusCodePartialContent, HttpStatusCodeMultipleChoices, HttpStatusCodePermanentRedirect, HttpStatusCodeFound, HttpStatusCodeSeeOther, HttpStatusCodeNotModified, HttpStatusCodeUseProxy, HttpStatusCodeReserved306, HttpStatusCodeTemporaryRedirect, HttpStatusCodeBadRequest, HttpStatusCodeNotAuthorized, HttpStatusCodePaymentRequired, HttpStatusCodeForbidden, HttpStatusCodeNotFound, HttpStatusCodeMethodNotAllowed, HttpStatusCodeNotAcceptable, HttpStatusCodeProxyAuthenticate, HttpStatusCodeTimeOut, HttpStatusCodeConflict, HttpStatusCodeGone, HttpStatusCodeLengthRequired, HttpStatusCodePreconditionFailed, HttpStatusCodeEntityTooLarge, HttpStatusCodeUriTooLarge, HttpStatusCodeUnsupportedMedia, HttpStatusCodeRangeNotSatsified, HttpStatusCodeExpectationFailed, HttpStatusCodeServerError, HttpStatusCodeNotImplemented, HttpStatusCodeBadGateway, HttpStatusCodeServiceUnavailable, HttpStatusCodeGatewayTimeout, HttpStatusCodeHttpVersionError }; // HTTP Status Descriptions (in status code order) // This array must be kept strictly consistent with respect // to the status code array above. public static readonly string[] HttpStatusDescArray = { "Continue Request", "Switching Protocols", "OK", "CREATED", "ACCEPTED", "NON-AUTHORITATIVE INFORMATION", "NO CONTENT", "RESET CONTENT", "PARTIAL CONTENT", "MULTIPLE CHOICES", "PERMANENT REDIRECT", "FOUND", "SEE OTHER", "NOT MODIFIED", "USE PROXY", "RESERVED CODE 306", "TEMPORARY REDIRECT", "BAD REQUEST", "NOT AUTHORIZED", "PAYMENT REQUIRED", "FORBIDDEN", "NOT FOUND", "METHOD NOT ALLOWED", "NOT ACCEPTABLE", "PROXY AUTHENTICATION REQUIRED", "TIMEOUT", "CONFLICT", "GONE", "LENGTH REQUIRED", "PRECONDITION FAILED", "ENTITY TOO LARGE", "URI TOO LARGE", "UNSUPPORTED MEDIA", "RANGE NOT SATISFIED", "EXPECTATION FAILED", "SERVER ERROR", "NOT IMPLEMENTED", "BAD GATEWAY", "SERVICE UNAVAILABLE", "GATEWAY TIMEOUT", "HTTP VERSION NOT SUPPORTED" }; // HTTP Headers public const string HttpHeaderAccept = "Accept"; public const string HttpHeaderAcceptCharset = "Accept-Charset"; public const string HttpHeaderAcceptEncoding = "Accept-Encoding"; public const string HttpHeaderAcceptLanguage = "Accept-Language"; public const string HttpHeaderAcceptRanges = "Accept-Ranges"; public const string HttpHeaderAge = "Age"; public const string HttpHeaderAllow = "Allow"; public const string HttpHeaderAuthorization = "Authorization"; public const string HttpHeaderCacheControl = "Cache-Control"; public const string HttpHeaderConnection = "Connection"; public const string HttpHeaderContentEncoding = "Content-Encoding"; public const string HttpHeaderContentLanguage = "Content-Language"; public const string HttpHeaderContentLength = "Content-Length"; public const string HttpHeaderContentLocation = "Content-Location"; public const string HttpHeaderContentMD5 = "Content-MD5"; public const string HttpHeaderContentRange = "Content-Range"; public const string HttpHeaderContentType = "Content-Type"; public const string HttpHeaderDate = "Date"; public const string HttpHeaderETag = "ETag"; public const string HttpHeaderExpect = "Expect"; public const string HttpHeaderExpires = "Expires"; public const string HttpHeaderFrom = "From"; public const string HttpHeaderHost = "Host"; public const string HttpHeaderIfMatch = "If-Match"; public const string HttpHeaderIfModifiedSince = "If-Modified-Since"; public const string HttpHeaderIfNoneMatch = "If-None-Match"; public const string HttpHeaderIfRange = "If-Range"; public const string HttpHeaderIfUnmodifiedSince = "If-Unmodified-Since"; public const string HttpHeaderLastModified = "Last-Modified"; public const string HttpHeaderLocation = "Location"; public const string HttpHeaderMaxForwards = "Max-Forwards"; public const string HttpHeaderPragma = "Pragma"; public const string HttpHeaderProxyAuthenticate = "Proxy-Authenticate"; public const string HttpHeaderProxyAuthorization = "Proxy-Authorization"; public const string HttpHeaderRange = "Range"; public const string HttpHeaderReferer = "Referer"; public const string HttpHeaderRetryAfter = "Retry-After"; public const string HttpHeaderServer = "Server"; public const string HttpHeaderTE = "TE"; public const string HttpHeaderTrailer = "Trailer"; public const string HttpHeaderTransferEncoding = "Transfer-Encoding"; public const string HttpHeaderUpgrade = "Upgrade"; public const string HttpHeaderUserAgent = "User-Agent"; public const string HttpHeaderVary = "Vary"; public const string HttpHeaderVia = "Via"; public const string HttpHeaderWarning = "Warning"; public const string HttpHeaderWWWAuthenticate = "WWW-Authenticate"; /// Utility routines public static string StringToBase64(string str) { try { byte[] encData_byte = new byte[str.Length]; encData_byte = Util.UTF8.GetBytes(str); return Convert.ToBase64String(encData_byte); } catch { return String.Empty; } } public static string Base64ToString(string str) { try { return Util.Base64ToString(str); } catch { return String.Empty; } } private const string hvals = "0123456789abcdef"; public static int Hex2Int(string hex) { int val = 0; int sum = 0; string tmp = null; if (hex != null) { tmp = hex.ToLower(); for (int i = 0; i < tmp.Length; i++) { val = hvals.IndexOf(tmp[i]); if (val == -1) break; sum *= 16; sum += val; } } return sum; } // Nonce management public static string NonceGenerator() { return StringToBase64(CreationDate + Guid.NewGuid().ToString()); } // Dump the specified data stream public static void Dump(byte[] data) { char[] buffer = new char[DumpLineSize]; int cc = 0; for (int i = 0; i < data.Length; i++) { if (i % DumpLineSize == 0) Console.Write("\n{0}: ",i.ToString("d8")); if (i % 4 == 0) Console.Write(" "); Console.Write("{0}",data[i].ToString("x2")); if (data[i] < 127 && data[i] > 31) buffer[i % DumpLineSize] = (char) data[i]; else buffer[i % DumpLineSize] = '.'; cc++; if (i != 0 && (i + 1) % DumpLineSize == 0) { Console.Write(" |"+(new String(buffer))+"|"); cc = 0; } } // Finish off any incomplete line if (cc != 0) { for (int i = cc ; i < DumpLineSize; i++) { if (i % 4 == 0) Console.Write(" "); Console.Write(" "); buffer[i % DumpLineSize] = ' '; } Console.WriteLine(" |"+(new String(buffer))+"|"); } else { Console.Write("\n"); } } } // Local exception type public class RestException : Exception { internal int statusCode; internal string statusDesc; internal string httpmethod; internal string httppath; public RestException(string msg) : base(msg) { } } }
using System; using System.ComponentModel; using System.Data; using System.Drawing; using System.Text; using System.Windows.Forms; using Alvas.Audio; using System.IO; namespace RecordPlay { public partial class MainForm : Form { public MainForm() { InitializeComponent(); Init(); } private void Init() { ofdAudio.Filter = "*.wav|*.wav|*.mp3|*.mp3|*.avi|*.avi|*.wma;*.wmv;*.asf;*.mpg;*.aif;*.au;*.snd;*.mid;*.rmi;*.ogg;*.flac;*.cda;*.ac3;*.dts;*.mka;*.mkv;*.mpc;*.m4a;*.aac;*.mpa;*.mp2;*.m1a;*.m2a|*.wma;*.wmv;*.asf;*.mpg;*.aif;*.au;*.snd;*.mid;*.rmi;*.ogg;*.flac;*.cda;*.ac3;*.dts;*.mka;*.mkv;*.mpc;*.m4a;*.aac;*.mpa;*.mp2;*.m1a;*.m2a|*.*|*.*"; sfdAudio.Filter = "*.wav|*.wav|*.mp3|*.mp3|*.*|*.*"; //cbMute.; tbPlayer.Maximum = ushort.MaxValue;// 65535; tbRecorder.Maximum = ushort.MaxValue;// 65535; tspProgress.Maximum = short.MaxValue; // rp.PropertyChanged += new PropertyChangedEventHandler(rp_PropertyChanged); InitButtons(rp.State); EnumRecorders(); EnumPlayers(); cbMute.DataBindings.Add("Checked", rp, RecordPlayer.PlayerVolumeMuteProperty, false, DataSourceUpdateMode.OnPropertyChanged); tbPlayer.DataBindings.Add("Value", rp, RecordPlayer.PlayerVolumeProperty, false, DataSourceUpdateMode.OnPropertyChanged); // cbRecorderLine.DataSource = rp.RecorderLines; cbRecorderLine.DataBindings.Add("SelectedIndex", rp, RecordPlayer.RecorderLinesIndexProperty, false, DataSourceUpdateMode.OnPropertyChanged); tbRecorder.DataBindings.Add("Value", rp, RecordPlayer.RecorderVolumeProperty, false, DataSourceUpdateMode.OnPropertyChanged); // tbTimeline.SmallChange = 1000; tbTimeline.LargeChange = 10000; tbTimeline.DataBindings.Add("Maximum", rp, "Duration", false, DataSourceUpdateMode.OnPropertyChanged); tbTimeline.DataBindings.Add("Value", rp, "Position", false, DataSourceUpdateMode.OnPropertyChanged); // nudBufferSizeInMs.DataBindings.Add("Value", rp, RecordPlayer.BufferSizeInMSProperty, false, DataSourceUpdateMode.OnPropertyChanged); cbSkipSilent.DataBindings.Add("Checked", rp, RecordPlayer.SkipSilentProperty, false, DataSourceUpdateMode.OnPropertyChanged); nudSilentLevel.DataBindings.Add("Value", rp, RecordPlayer.SilentLevelProperty, false, DataSourceUpdateMode.OnPropertyChanged); nudVolumeLevelScale.Value = 100; nudVolumeLevelScale.Increment = 100; nudVolumeLevelScale.Minimum = 50; nudVolumeLevelScale.Maximum = 1000; nudVolumeLevelScale.DataBindings.Add("Value", rp, RecordPlayer.VolumeScaleProperty, false, DataSourceUpdateMode.OnPropertyChanged); } void rp_PropertyChanged(object sender, PropertyChangedEventArgs e) { switch (e.PropertyName) { case RecordPlayer.StateProperty: InitButtons(rp.State); break; case RecordPlayer.VolumeLevelProperty: tspProgress.Value = rp.VolumeLevel; break; case RecordPlayer.PositionProperty: case RecordPlayer.DurationProperty: tsslPosition.Text = string.Format("{0} : {1}", rp.Position, rp.Duration); break; default: break; } } RecordPlayer rp = new RecordPlayer(); private void InitButtons(DeviceState state) { tsslStatus.Text = state.ToString(); //Console.WriteLine(state); switch (state) { case DeviceState.Opened: tsbNew.Enabled = false; tsbOpen.Enabled = false; tsbRecord.Enabled = rp.CanRecord; tsbRecordFrom.Enabled = rp.CanRecord; tsbPlay.Enabled = true; tsbPause.Enabled = false; tsbStop.Enabled = false; tsbForward.Enabled = true; tsbBackward.Enabled = true; tsbPlayFrom.Enabled = true; tsbClose.Enabled = true; Text = rp.FormatDetails.ToString(); cbPlayer.Enabled = true; cbRecorder.Enabled = true; nudBufferSizeInMs.Enabled = true; break; case DeviceState.Stopped: tsbNew.Enabled = false; tsbOpen.Enabled = false; tsbRecord.Enabled = rp.CanRecord; tsbRecordFrom.Enabled = rp.CanRecord; tsbPlay.Enabled = true; tsbPause.Enabled = false; tsbStop.Enabled = false; tsbForward.Enabled = true; tsbBackward.Enabled = true; tsbPlayFrom.Enabled = true; tsbClose.Enabled = true; cbPlayer.Enabled = true; cbRecorder.Enabled = true; nudBufferSizeInMs.Enabled = true; break; case DeviceState.Paused: tsbNew.Enabled = false; tsbOpen.Enabled = false; tsbRecord.Enabled = rp.CanRecord; tsbRecordFrom.Enabled = rp.CanRecord; tsbPlay.Enabled = true; tsbPause.Enabled = false; tsbStop.Enabled = true; tsbForward.Enabled = true; tsbBackward.Enabled = true; tsbPlayFrom.Enabled = true; tsbClose.Enabled = false; cbPlayer.Enabled = false; cbRecorder.Enabled = false; nudBufferSizeInMs.Enabled = true; break; case DeviceState.InProgress: tsbNew.Enabled = false; tsbOpen.Enabled = false; tsbRecord.Enabled = false; tsbRecordFrom.Enabled = false; tsbPlay.Enabled = false; tsbPause.Enabled = true; tsbStop.Enabled = true; tsbForward.Enabled = true; tsbBackward.Enabled = true; tsbPlayFrom.Enabled = true; tsbClose.Enabled = false; cbPlayer.Enabled = false; cbRecorder.Enabled = false; nudBufferSizeInMs.Enabled = false; break; case DeviceState.Closed: default: tsbNew.Enabled = true; tsbOpen.Enabled = true; tsbRecord.Enabled = false; tsbRecordFrom.Enabled = false; tsbPlay.Enabled = false; tsbPause.Enabled = false; tsbStop.Enabled = false; tsbForward.Enabled = false; tsbBackward.Enabled = false; tsbPlayFrom.Enabled = false; tsbClose.Enabled = false; cbPlayer.Enabled = true; cbRecorder.Enabled = true; nudBufferSizeInMs.Enabled = true; break; } } private void tsbClose_Click(object sender, EventArgs e) { rp.Close(); } public int Time { get { try { return int.Parse(tstTime.Text); } catch { return 0; } } } private void tsbPlayFrom_Click(object sender, EventArgs e) { rp.Play(Position); } public int Position { get { try { return int.Parse(tstTime.Text) * 1000; } catch { return 0; } } } public int Step { get { try { return int.Parse(tstStep.Text) * 1000; } catch { return 10 * 1000; } } } private void tsbBackward_Click(object sender, EventArgs e) { rp.Backward(Step); } private void tsbForward_Click(object sender, EventArgs e) { rp.Forward(Step); } private void tsbStop_Click(object sender, EventArgs e) { rp.Stop(); } private void tsbPause_Click(object sender, EventArgs e) { rp.Pause(); } private void tsbPlay_Click(object sender, EventArgs e) { rp.Play(); } private void tsbRecord_Click(object sender, EventArgs e) { try { Record(); } catch (Exception ex) { MessageBox.Show(ex.Message, "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); } } private void Record() { int pos = rp.Position; if (pos <= 0) { rp.Record(); } else { rp.Record(pos); } } private void tsbNew_Click(object sender, EventArgs e) { if (sfdAudio.ShowDialog() == DialogResult.OK) { string fileName = sfdAudio.FileName; Stream stream = null; IAudioReadWriter arw = null; FormatDialog fd = null; switch (Path.GetExtension(fileName.ToLower())) { case ".wav": fd = new FormatDialog(false); if (fd.ShowDialog() == DialogResult.OK) { stream = File.Create(fileName); arw = new WaveReadWriter(stream, AudioCompressionManager.FormatBytes(fd.Format)); rp.Open(arw); } break; case ".mp3": fd = new FormatDialog(true); if (fd.ShowDialog() == DialogResult.OK) { stream = File.Create(fileName); arw = new Mp3ReadWriter(stream, fd.Format); rp.Open(arw); } break; default: fd = new FormatDialog(false); if (fd.ShowDialog() == DialogResult.OK) { stream = File.Create(fileName); arw = new RawReadWriter(stream, fd.Format); rp.Open(arw); } return; } } } private void tsbOpen_Click(object sender, EventArgs e) { if (ofdAudio.ShowDialog() == DialogResult.OK) { string fileName = ofdAudio.FileName; IAudioReader arw = null; switch (Path.GetExtension(fileName.ToLower())) { case ".avi": arw = new AviReader(File.Open(fileName, FileMode.Open, FileAccess.ReadWrite)); if (!((AviReader)arw).HasAudio) { MessageBox.Show(string.Format("'{0}' file is not contains audio data", fileName)); return; } break; case ".au": case ".snd": arw = new AuReader(File.OpenRead(fileName)); break; case ".wav": arw = new WaveReadWriter(File.Open(fileName, FileMode.Open, FileAccess.ReadWrite)); break; case ".mp3": arw = new Mp3ReadWriter(File.Open(fileName, FileMode.Open, FileAccess.ReadWrite)); break; default: arw = new DsReader(fileName); if (!((DsReader)arw).HasAudio) { MessageBox.Show(string.Format("'{0}' file is not contains audio data", fileName)); return; } break; //FormatDialog fd = new FormatDialog(false); //if (fd.ShowDialog() == DialogResult.OK) //{ // arw = new RawReadWriter(stream, fd.Format); // break; //} //else //{ // return; //} } rp.Open(arw); } } private void EnumPlayers() { int count = PlayerEx.PlayerCount; if (count > 0) { for (int i = -1; i < count; i++) { cbPlayer.Items.Add(PlayerEx.GetPlayerName(i)); } cbPlayer.SelectedIndex = 0; } } private void EnumRecorders() { int count = RecorderEx.RecorderCount; if (count > 0) { for (int i = -1; i < count; i++) { cbRecorder.Items.Add(RecorderEx.GetRecorderName(i)); } cbRecorder.SelectedIndex = 0; } } private void cbPlayer_SelectedIndexChanged(object sender, EventArgs e) { rp.PlayerID = cbPlayer.SelectedIndex - 1; } private void cbRecorder_SelectedIndexChanged(object sender, EventArgs e) { rp.RecorderID = cbRecorder.SelectedIndex - 1; } private void MainForm_FormClosed(object sender, FormClosedEventArgs e) { rp.PropertyChanged -= rp_PropertyChanged; } private void tsbRecordFrom_Click(object sender, EventArgs e) { try { rp.Record(Position); } catch (Exception ex) { MessageBox.Show(ex.Message, "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); } } } }
/// This code was generated by /// \ / _ _ _| _ _ /// | (_)\/(_)(_|\/| |(/_ v1.0.0 /// / / /// <summary> /// PLEASE NOTE that this class contains beta products that are subject to change. Use them with caution. /// /// NetworkAccessProfileNetworkResource /// </summary> using Newtonsoft.Json; using System; using System.Collections.Generic; using Twilio.Base; using Twilio.Clients; using Twilio.Converters; using Twilio.Exceptions; using Twilio.Http; namespace Twilio.Rest.Supersim.V1.NetworkAccessProfile { public class NetworkAccessProfileNetworkResource : Resource { private static Request BuildReadRequest(ReadNetworkAccessProfileNetworkOptions options, ITwilioRestClient client) { return new Request( HttpMethod.Get, Rest.Domain.Supersim, "/v1/NetworkAccessProfiles/" + options.PathNetworkAccessProfileSid + "/Networks", queryParams: options.GetParams(), headerParams: null ); } /// <summary> /// Retrieve a list of Network Access Profile resource's Network resource. /// </summary> /// <param name="options"> Read NetworkAccessProfileNetwork parameters </param> /// <param name="client"> Client to make requests to Twilio </param> /// <returns> A single instance of NetworkAccessProfileNetwork </returns> public static ResourceSet<NetworkAccessProfileNetworkResource> Read(ReadNetworkAccessProfileNetworkOptions options, ITwilioRestClient client = null) { client = client ?? TwilioClient.GetRestClient(); var response = client.Request(BuildReadRequest(options, client)); var page = Page<NetworkAccessProfileNetworkResource>.FromJson("networks", response.Content); return new ResourceSet<NetworkAccessProfileNetworkResource>(page, options, client); } #if !NET35 /// <summary> /// Retrieve a list of Network Access Profile resource's Network resource. /// </summary> /// <param name="options"> Read NetworkAccessProfileNetwork parameters </param> /// <param name="client"> Client to make requests to Twilio </param> /// <returns> Task that resolves to A single instance of NetworkAccessProfileNetwork </returns> public static async System.Threading.Tasks.Task<ResourceSet<NetworkAccessProfileNetworkResource>> ReadAsync(ReadNetworkAccessProfileNetworkOptions options, ITwilioRestClient client = null) { client = client ?? TwilioClient.GetRestClient(); var response = await client.RequestAsync(BuildReadRequest(options, client)); var page = Page<NetworkAccessProfileNetworkResource>.FromJson("networks", response.Content); return new ResourceSet<NetworkAccessProfileNetworkResource>(page, options, client); } #endif /// <summary> /// Retrieve a list of Network Access Profile resource's Network resource. /// </summary> /// <param name="pathNetworkAccessProfileSid"> The unique string that identifies the Network Access Profile resource /// </param> /// <param name="pageSize"> Page size </param> /// <param name="limit"> Record limit </param> /// <param name="client"> Client to make requests to Twilio </param> /// <returns> A single instance of NetworkAccessProfileNetwork </returns> public static ResourceSet<NetworkAccessProfileNetworkResource> Read(string pathNetworkAccessProfileSid, int? pageSize = null, long? limit = null, ITwilioRestClient client = null) { var options = new ReadNetworkAccessProfileNetworkOptions(pathNetworkAccessProfileSid){PageSize = pageSize, Limit = limit}; return Read(options, client); } #if !NET35 /// <summary> /// Retrieve a list of Network Access Profile resource's Network resource. /// </summary> /// <param name="pathNetworkAccessProfileSid"> The unique string that identifies the Network Access Profile resource /// </param> /// <param name="pageSize"> Page size </param> /// <param name="limit"> Record limit </param> /// <param name="client"> Client to make requests to Twilio </param> /// <returns> Task that resolves to A single instance of NetworkAccessProfileNetwork </returns> public static async System.Threading.Tasks.Task<ResourceSet<NetworkAccessProfileNetworkResource>> ReadAsync(string pathNetworkAccessProfileSid, int? pageSize = null, long? limit = null, ITwilioRestClient client = null) { var options = new ReadNetworkAccessProfileNetworkOptions(pathNetworkAccessProfileSid){PageSize = pageSize, Limit = limit}; return await ReadAsync(options, client); } #endif /// <summary> /// Fetch the target page of records /// </summary> /// <param name="targetUrl"> API-generated URL for the requested results page </param> /// <param name="client"> Client to make requests to Twilio </param> /// <returns> The target page of records </returns> public static Page<NetworkAccessProfileNetworkResource> GetPage(string targetUrl, ITwilioRestClient client) { client = client ?? TwilioClient.GetRestClient(); var request = new Request( HttpMethod.Get, targetUrl ); var response = client.Request(request); return Page<NetworkAccessProfileNetworkResource>.FromJson("networks", response.Content); } /// <summary> /// Fetch the next page of records /// </summary> /// <param name="page"> current page of records </param> /// <param name="client"> Client to make requests to Twilio </param> /// <returns> The next page of records </returns> public static Page<NetworkAccessProfileNetworkResource> NextPage(Page<NetworkAccessProfileNetworkResource> page, ITwilioRestClient client) { var request = new Request( HttpMethod.Get, page.GetNextPageUrl(Rest.Domain.Supersim) ); var response = client.Request(request); return Page<NetworkAccessProfileNetworkResource>.FromJson("networks", response.Content); } /// <summary> /// Fetch the previous page of records /// </summary> /// <param name="page"> current page of records </param> /// <param name="client"> Client to make requests to Twilio </param> /// <returns> The previous page of records </returns> public static Page<NetworkAccessProfileNetworkResource> PreviousPage(Page<NetworkAccessProfileNetworkResource> page, ITwilioRestClient client) { var request = new Request( HttpMethod.Get, page.GetPreviousPageUrl(Rest.Domain.Supersim) ); var response = client.Request(request); return Page<NetworkAccessProfileNetworkResource>.FromJson("networks", response.Content); } private static Request BuildCreateRequest(CreateNetworkAccessProfileNetworkOptions options, ITwilioRestClient client) { return new Request( HttpMethod.Post, Rest.Domain.Supersim, "/v1/NetworkAccessProfiles/" + options.PathNetworkAccessProfileSid + "/Networks", postParams: options.GetParams(), headerParams: null ); } /// <summary> /// Add a Network resource to the Network Access Profile resource. /// </summary> /// <param name="options"> Create NetworkAccessProfileNetwork parameters </param> /// <param name="client"> Client to make requests to Twilio </param> /// <returns> A single instance of NetworkAccessProfileNetwork </returns> public static NetworkAccessProfileNetworkResource Create(CreateNetworkAccessProfileNetworkOptions options, ITwilioRestClient client = null) { client = client ?? TwilioClient.GetRestClient(); var response = client.Request(BuildCreateRequest(options, client)); return FromJson(response.Content); } #if !NET35 /// <summary> /// Add a Network resource to the Network Access Profile resource. /// </summary> /// <param name="options"> Create NetworkAccessProfileNetwork parameters </param> /// <param name="client"> Client to make requests to Twilio </param> /// <returns> Task that resolves to A single instance of NetworkAccessProfileNetwork </returns> public static async System.Threading.Tasks.Task<NetworkAccessProfileNetworkResource> CreateAsync(CreateNetworkAccessProfileNetworkOptions options, ITwilioRestClient client = null) { client = client ?? TwilioClient.GetRestClient(); var response = await client.RequestAsync(BuildCreateRequest(options, client)); return FromJson(response.Content); } #endif /// <summary> /// Add a Network resource to the Network Access Profile resource. /// </summary> /// <param name="pathNetworkAccessProfileSid"> The unique string that identifies the Network Access Profile resource /// </param> /// <param name="network"> The SID that identifies the Network resource </param> /// <param name="client"> Client to make requests to Twilio </param> /// <returns> A single instance of NetworkAccessProfileNetwork </returns> public static NetworkAccessProfileNetworkResource Create(string pathNetworkAccessProfileSid, string network, ITwilioRestClient client = null) { var options = new CreateNetworkAccessProfileNetworkOptions(pathNetworkAccessProfileSid, network); return Create(options, client); } #if !NET35 /// <summary> /// Add a Network resource to the Network Access Profile resource. /// </summary> /// <param name="pathNetworkAccessProfileSid"> The unique string that identifies the Network Access Profile resource /// </param> /// <param name="network"> The SID that identifies the Network resource </param> /// <param name="client"> Client to make requests to Twilio </param> /// <returns> Task that resolves to A single instance of NetworkAccessProfileNetwork </returns> public static async System.Threading.Tasks.Task<NetworkAccessProfileNetworkResource> CreateAsync(string pathNetworkAccessProfileSid, string network, ITwilioRestClient client = null) { var options = new CreateNetworkAccessProfileNetworkOptions(pathNetworkAccessProfileSid, network); return await CreateAsync(options, client); } #endif private static Request BuildDeleteRequest(DeleteNetworkAccessProfileNetworkOptions options, ITwilioRestClient client) { return new Request( HttpMethod.Delete, Rest.Domain.Supersim, "/v1/NetworkAccessProfiles/" + options.PathNetworkAccessProfileSid + "/Networks/" + options.PathSid + "", queryParams: options.GetParams(), headerParams: null ); } /// <summary> /// Remove a Network resource from the Network Access Profile resource's. /// </summary> /// <param name="options"> Delete NetworkAccessProfileNetwork parameters </param> /// <param name="client"> Client to make requests to Twilio </param> /// <returns> A single instance of NetworkAccessProfileNetwork </returns> public static bool Delete(DeleteNetworkAccessProfileNetworkOptions options, ITwilioRestClient client = null) { client = client ?? TwilioClient.GetRestClient(); var response = client.Request(BuildDeleteRequest(options, client)); return response.StatusCode == System.Net.HttpStatusCode.NoContent; } #if !NET35 /// <summary> /// Remove a Network resource from the Network Access Profile resource's. /// </summary> /// <param name="options"> Delete NetworkAccessProfileNetwork parameters </param> /// <param name="client"> Client to make requests to Twilio </param> /// <returns> Task that resolves to A single instance of NetworkAccessProfileNetwork </returns> public static async System.Threading.Tasks.Task<bool> DeleteAsync(DeleteNetworkAccessProfileNetworkOptions options, ITwilioRestClient client = null) { client = client ?? TwilioClient.GetRestClient(); var response = await client.RequestAsync(BuildDeleteRequest(options, client)); return response.StatusCode == System.Net.HttpStatusCode.NoContent; } #endif /// <summary> /// Remove a Network resource from the Network Access Profile resource's. /// </summary> /// <param name="pathNetworkAccessProfileSid"> The unique string that identifies the Network Access Profile resource /// </param> /// <param name="pathSid"> The SID that identifies the Network resource </param> /// <param name="client"> Client to make requests to Twilio </param> /// <returns> A single instance of NetworkAccessProfileNetwork </returns> public static bool Delete(string pathNetworkAccessProfileSid, string pathSid, ITwilioRestClient client = null) { var options = new DeleteNetworkAccessProfileNetworkOptions(pathNetworkAccessProfileSid, pathSid); return Delete(options, client); } #if !NET35 /// <summary> /// Remove a Network resource from the Network Access Profile resource's. /// </summary> /// <param name="pathNetworkAccessProfileSid"> The unique string that identifies the Network Access Profile resource /// </param> /// <param name="pathSid"> The SID that identifies the Network resource </param> /// <param name="client"> Client to make requests to Twilio </param> /// <returns> Task that resolves to A single instance of NetworkAccessProfileNetwork </returns> public static async System.Threading.Tasks.Task<bool> DeleteAsync(string pathNetworkAccessProfileSid, string pathSid, ITwilioRestClient client = null) { var options = new DeleteNetworkAccessProfileNetworkOptions(pathNetworkAccessProfileSid, pathSid); return await DeleteAsync(options, client); } #endif private static Request BuildFetchRequest(FetchNetworkAccessProfileNetworkOptions options, ITwilioRestClient client) { return new Request( HttpMethod.Get, Rest.Domain.Supersim, "/v1/NetworkAccessProfiles/" + options.PathNetworkAccessProfileSid + "/Networks/" + options.PathSid + "", queryParams: options.GetParams(), headerParams: null ); } /// <summary> /// Fetch a Network Access Profile resource's Network resource. /// </summary> /// <param name="options"> Fetch NetworkAccessProfileNetwork parameters </param> /// <param name="client"> Client to make requests to Twilio </param> /// <returns> A single instance of NetworkAccessProfileNetwork </returns> public static NetworkAccessProfileNetworkResource Fetch(FetchNetworkAccessProfileNetworkOptions options, ITwilioRestClient client = null) { client = client ?? TwilioClient.GetRestClient(); var response = client.Request(BuildFetchRequest(options, client)); return FromJson(response.Content); } #if !NET35 /// <summary> /// Fetch a Network Access Profile resource's Network resource. /// </summary> /// <param name="options"> Fetch NetworkAccessProfileNetwork parameters </param> /// <param name="client"> Client to make requests to Twilio </param> /// <returns> Task that resolves to A single instance of NetworkAccessProfileNetwork </returns> public static async System.Threading.Tasks.Task<NetworkAccessProfileNetworkResource> FetchAsync(FetchNetworkAccessProfileNetworkOptions options, ITwilioRestClient client = null) { client = client ?? TwilioClient.GetRestClient(); var response = await client.RequestAsync(BuildFetchRequest(options, client)); return FromJson(response.Content); } #endif /// <summary> /// Fetch a Network Access Profile resource's Network resource. /// </summary> /// <param name="pathNetworkAccessProfileSid"> The unique string that identifies the Network Access Profile resource /// </param> /// <param name="pathSid"> The SID of the resource to fetch </param> /// <param name="client"> Client to make requests to Twilio </param> /// <returns> A single instance of NetworkAccessProfileNetwork </returns> public static NetworkAccessProfileNetworkResource Fetch(string pathNetworkAccessProfileSid, string pathSid, ITwilioRestClient client = null) { var options = new FetchNetworkAccessProfileNetworkOptions(pathNetworkAccessProfileSid, pathSid); return Fetch(options, client); } #if !NET35 /// <summary> /// Fetch a Network Access Profile resource's Network resource. /// </summary> /// <param name="pathNetworkAccessProfileSid"> The unique string that identifies the Network Access Profile resource /// </param> /// <param name="pathSid"> The SID of the resource to fetch </param> /// <param name="client"> Client to make requests to Twilio </param> /// <returns> Task that resolves to A single instance of NetworkAccessProfileNetwork </returns> public static async System.Threading.Tasks.Task<NetworkAccessProfileNetworkResource> FetchAsync(string pathNetworkAccessProfileSid, string pathSid, ITwilioRestClient client = null) { var options = new FetchNetworkAccessProfileNetworkOptions(pathNetworkAccessProfileSid, pathSid); return await FetchAsync(options, client); } #endif /// <summary> /// Converts a JSON string into a NetworkAccessProfileNetworkResource object /// </summary> /// <param name="json"> Raw JSON string </param> /// <returns> NetworkAccessProfileNetworkResource object represented by the provided JSON </returns> public static NetworkAccessProfileNetworkResource FromJson(string json) { // Convert all checked exceptions to Runtime try { return JsonConvert.DeserializeObject<NetworkAccessProfileNetworkResource>(json); } catch (JsonException e) { throw new ApiException(e.Message, e); } } /// <summary> /// The unique string that identifies the resource /// </summary> [JsonProperty("sid")] public string Sid { get; private set; } /// <summary> /// The unique string that identifies the Network Access Profile resource /// </summary> [JsonProperty("network_access_profile_sid")] public string NetworkAccessProfileSid { get; private set; } /// <summary> /// A human readable identifier of this resource /// </summary> [JsonProperty("friendly_name")] public string FriendlyName { get; private set; } /// <summary> /// The ISO country code of the Network resource /// </summary> [JsonProperty("iso_country")] public string IsoCountry { get; private set; } /// <summary> /// The MCC/MNCs included in the resource /// </summary> [JsonProperty("identifiers")] public List<object> Identifiers { get; private set; } /// <summary> /// The absolute URL of the resource /// </summary> [JsonProperty("url")] public Uri Url { get; private set; } private NetworkAccessProfileNetworkResource() { } } }
#define USE_CLIPPING_ALPHA_MASK using System; using MatterHackers.Agg.Image; using MatterHackers.Agg.RasterizerScanline; using MatterHackers.Agg.Transform; using MatterHackers.Agg.UI; using MatterHackers.Agg.UI.Examples; using MatterHackers.Agg.VertexSource; using MatterHackers.VectorMath; namespace MatterHackers.Agg { public class alpha_mask2_application : GuiWidget, IDemoApp { private byte[] alphaByteArray; private double sliderValue; private MatterHackers.Agg.UI.Slider numMasksSlider; private LionShape lionShape = new LionShape(); private MatterHackers.Agg.ScanlineRasterizer rasterizer = new ScanlineRasterizer(); private ScanlineCachePacked8 scanlineCache = new ScanlineCachePacked8(); private double angle = 0; private double lionScale = 1.0; private double skewX = 0; private double skewY = 0; private ImageBuffer alphaMaskImageBuffer; private IAlphaMask alphaMask; public alpha_mask2_application() { AnchorAll(); alphaMaskImageBuffer = new ImageBuffer(); #if USE_CLIPPING_ALPHA_MASK alphaMask = new AlphaMaskByteClipped(alphaMaskImageBuffer, 1, 0); #else alphaMask = new AlphaMaskByteUnclipped(alphaMaskImageBuffer, 1, 0); #endif numMasksSlider = new UI.Slider(5, 5, 150, 12); sliderValue = 0.0; AddChild(numMasksSlider); numMasksSlider.SetRange(5, 100); numMasksSlider.Value = 10; numMasksSlider.Text = "N={0:F3}"; numMasksSlider.OriginRelativeParent = Vector2.Zero; } public string Title { get; } = "Clipping to multiple rectangle regions"; public string DemoCategory { get; } = "Bitmap"; public string DemoDescription { get; } = ""; private unsafe void generate_alpha_mask(int cx, int cy) { alphaByteArray = new byte[cx * cy]; { #if USE_CLIPPING_ALPHA_MASK alphaMaskImageBuffer.AttachBuffer(alphaByteArray, 20 * cx + 20, cx - 40, cy - 40, cx, 8, 1); #else alphaMaskImageBuffer.attach(alphaByteArray, (int)cx, (int)cy, cx, 1); #endif ImageBuffer image = new ImageBuffer(); image.Attach(alphaMaskImageBuffer, new blender_gray(1), 1, 0, 8); ImageClippingProxy clippingProxy = new ImageClippingProxy(image); ScanlineCachePacked8 sl = new ScanlineCachePacked8(); clippingProxy.clear(new ColorF(0)); VertexSource.Ellipse ellipseForMask = new MatterHackers.Agg.VertexSource.Ellipse(); System.Random randGenerator = new Random(1432); ScanlineRenderer scanlineRenderer = new ScanlineRenderer(); int i; int num = (int)numMasksSlider.Value; for (i = 0; i < num; i++) { if (i == num - 1) { ellipseForMask.init(Width / 2, Height / 2, 110, 110, 100); rasterizer.add_path(ellipseForMask); scanlineRenderer.RenderSolid(clippingProxy, rasterizer, sl, new Color(0, 0, 0, 255)); ellipseForMask.init(ellipseForMask.originX, ellipseForMask.originY, ellipseForMask.radiusX - 10, ellipseForMask.radiusY - 10, 100); rasterizer.add_path(ellipseForMask); scanlineRenderer.RenderSolid(clippingProxy, rasterizer, sl, new Color(255, 0, 0, 255)); } else { ellipseForMask.init(randGenerator.Next() % cx, randGenerator.Next() % cy, randGenerator.Next() % 100 + 20, randGenerator.Next() % 100 + 20, 100); // set the color to draw into the alpha channel. // there is not very much reason to set the alpha as you will get the amount of // transparency based on the color you draw. (you might want some type of different edeg effect but it will be minor). rasterizer.add_path(ellipseForMask); scanlineRenderer.RenderSolid(clippingProxy, rasterizer, sl, new Color((int)((float)i / (float)num * 255), 0, 0, 255)); } } alphaMaskImageBuffer.DettachBuffer(); } } public override void OnBoundsChanged(EventArgs e) { if (Width > 0 && Height > 0) { generate_alpha_mask((int)Math.Ceiling(Width), (int)Math.Ceiling(Height)); } base.OnBoundsChanged(e); } public override void OnDraw(Graphics2D graphics2D) { ImageBuffer widgetsSubImage = ImageBuffer.NewSubImageReference(graphics2D.DestImage, graphics2D.GetClippingRect()); int width = (int)widgetsSubImage.Width; int height = (int)widgetsSubImage.Height; if (numMasksSlider.Value != sliderValue) { generate_alpha_mask(width, height); sliderValue = numMasksSlider.Value; } rasterizer.SetVectorClipBox(0, 0, width, height); unsafe { alphaMaskImageBuffer.AttachBuffer(alphaByteArray, 0, width, height, width, 8, 1); MatterHackers.Agg.Image.AlphaMaskAdaptor imageAlphaMaskAdaptor = new MatterHackers.Agg.Image.AlphaMaskAdaptor(widgetsSubImage, alphaMask); ImageClippingProxy alphaMaskClippingProxy = new ImageClippingProxy(imageAlphaMaskAdaptor); ImageClippingProxy clippingProxy = new ImageClippingProxy(widgetsSubImage); Affine transform = Affine.NewIdentity(); transform *= Affine.NewTranslation(-lionShape.Center.X, -lionShape.Center.Y); transform *= Affine.NewScaling(lionScale, lionScale); transform *= Affine.NewRotation(angle + Math.PI); transform *= Affine.NewSkewing(skewX / 1000.0, skewY / 1000.0); transform *= Affine.NewTranslation(Width / 2, Height / 2); clippingProxy.clear(new ColorF(1, 1, 1)); ScanlineRenderer scanlineRenderer = new ScanlineRenderer(); // draw a background to show how the mask is working better int RectWidth = 30; for (int i = 0; i < 40; i++) { for (int j = 0; j < 40; j++) { if ((i + j) % 2 != 0) { VertexSource.RoundedRect rect = new VertexSource.RoundedRect(i * RectWidth, j * RectWidth, (i + 1) * RectWidth, (j + 1) * RectWidth, 0); rect.normalize_radius(); // Drawing as an outline rasterizer.add_path(rect); scanlineRenderer.RenderSolid(clippingProxy, rasterizer, scanlineCache, new Color(.9, .9, .9)); } } } //int x, y; // Render the lion foreach (var shape in lionShape.Shapes) { rasterizer.add_path(new VertexSourceApplyTransform(shape.VertexStorage, transform)); scanlineRenderer.RenderSolid(alphaMaskClippingProxy, rasterizer, scanlineCache, shape.Color); } /* // Render random Bresenham lines and markers agg::renderer_markers<amask_ren_type> m(r); for(i = 0; i < 50; i++) { m.line_color(agg::rgba8(randGenerator.Next() & 0x7F, randGenerator.Next() & 0x7F, randGenerator.Next() & 0x7F, (randGenerator.Next() & 0x7F) + 0x7F)); m.fill_color(agg::rgba8(randGenerator.Next() & 0x7F, randGenerator.Next() & 0x7F, randGenerator.Next() & 0x7F, (randGenerator.Next() & 0x7F) + 0x7F)); m.line(m.coord(randGenerator.Next() % width), m.coord(randGenerator.Next() % height), m.coord(randGenerator.Next() % width), m.coord(randGenerator.Next() % height)); m.marker(randGenerator.Next() % width, randGenerator.Next() % height, randGenerator.Next() % 10 + 5, agg::marker_e(randGenerator.Next() % agg::end_of_markers)); } // Render random anti-aliased lines double w = 5.0; agg::line_profile_aa profile; profile.width(w); typedef agg::renderer_outline_aa<amask_ren_type> renderer_type; renderer_type ren(r, profile); typedef agg::rasterizer_outline_aa<renderer_type> rasterizer_type; rasterizer_type ras(ren); ras.round_cap(true); for(i = 0; i < 50; i++) { ren.Color = agg::rgba8(randGenerator.Next() & 0x7F, randGenerator.Next() & 0x7F, randGenerator.Next() & 0x7F, //255)); (randGenerator.Next() & 0x7F) + 0x7F); ras.move_to_d(randGenerator.Next() % width, randGenerator.Next() % height); ras.line_to_d(randGenerator.Next() % width, randGenerator.Next() % height); ras.render(false); } // Render random circles with gradient typedef agg::gradient_linear_color<color_type> grad_color; typedef agg::gradient_circle grad_func; typedef agg::span_interpolator_linear<> interpolator_type; typedef agg::span_gradient<color_type, interpolator_type, grad_func, grad_color> span_grad_type; agg::trans_affine grm; grad_func grf; grad_color grc(agg::rgba8(0,0,0), agg::rgba8(0,0,0)); agg::ellipse ell; agg::span_allocator<color_type> sa; interpolator_type inter(grm); span_grad_type sg(inter, grf, grc, 0, 10); agg::renderer_scanline_aa<amask_ren_type, agg::span_allocator<color_type>, span_grad_type> rg(r, sa, sg); for(i = 0; i < 50; i++) { x = randGenerator.Next() % width; y = randGenerator.Next() % height; double r = randGenerator.Next() % 10 + 5; grm.reset(); grm *= agg::trans_affine_scaling(r / 10.0); grm *= agg::trans_affine_translation(x, y); grm.invert(); grc.colors(agg::rgba8(255, 255, 255, 0), agg::rgba8(randGenerator.Next() & 0x7F, randGenerator.Next() & 0x7F, randGenerator.Next() & 0x7F, 255)); sg.color_function(grc); ell.init(x, y, r, r, 32); g_rasterizer.add_path(ell); agg::render_scanlines(g_rasterizer, g_scanline, rg); } */ //m_num_cb.Render(g_rasterizer, g_scanline, clippingProxy); } alphaMaskImageBuffer.DettachBuffer(); base.OnDraw(graphics2D); } private void doTransform(double width, double height, double x, double y) { x -= width / 2; y -= height / 2; angle = Math.Atan2(y, x); lionScale = Math.Sqrt(y * y + x * x) / 100.0; } private void UpdateLion(MouseEventArgs mouseEvent) { double x = mouseEvent.X; double y = mouseEvent.Y; if (mouseEvent.Button == MouseButtons.Left) { int width = (int)Width; int height = (int)Height; doTransform(width, height, x, y); Invalidate(); } if (mouseEvent.Button == MouseButtons.Right) { skewX = x; skewY = y; Invalidate(); } } public override void OnMouseDown(MouseEventArgs mouseEvent) { base.OnMouseDown(mouseEvent); if (MouseCaptured) { UpdateLion(mouseEvent); } } public override void OnMouseMove(MouseEventArgs mouseEvent) { base.OnMouseMove(mouseEvent); if (MouseCaptured) { UpdateLion(mouseEvent); } } [STAThread] public static void Main(string[] args) { var demoWidget = new alpha_mask2_application(); var systemWindow = new SystemWindow(512, 400); systemWindow.Title = demoWidget.Title; systemWindow.AddChild(demoWidget); systemWindow.ShowAsSystemWindow(); } } }
namespace InControl.NativeProfile { // @cond nodoc [AutoDiscover] public class PlayStation4WindowsNativeProfile : NativeInputDeviceProfile { public PlayStation4WindowsNativeProfile() { Name = "PlayStation 4 Controller"; Meta = "PlayStation 4 Controller on Windows"; // Link = "http://www.amazon.com/DualShock-Wireless-Controller-PlayStation-Black-4/dp/B00BGA9X9W"; DeviceClass = InputDeviceClass.Controller; DeviceStyle = InputDeviceStyle.PlayStation4; IncludePlatforms = new[] { "Windows" }; Matchers = new[] { new NativeInputDeviceMatcher { VendorID = 0x54c, ProductID = 0x5c4, }, new NativeInputDeviceMatcher { VendorID = 0x54c, ProductID = 0x9cc, }, new NativeInputDeviceMatcher { VendorID = 0x54c, ProductID = 0xba0, }, }; ButtonMappings = new[] { new InputControlMapping { Handle = "Square", Target = InputControlType.Action3, Source = Button( 0 ), }, new InputControlMapping { Handle = "Cross", Target = InputControlType.Action1, Source = Button( 1 ), }, new InputControlMapping { Handle = "Circle", Target = InputControlType.Action2, Source = Button( 2 ), }, new InputControlMapping { Handle = "Triangle", Target = InputControlType.Action4, Source = Button( 3 ), }, new InputControlMapping { Handle = "Left Bumper", Target = InputControlType.LeftBumper, Source = Button( 4 ), }, new InputControlMapping { Handle = "Right Bumper", Target = InputControlType.RightBumper, Source = Button( 5 ), }, new InputControlMapping { Handle = "Share", Target = InputControlType.Share, Source = Button( 8 ), }, new InputControlMapping { Handle = "Options", Target = InputControlType.Options, Source = Button( 9 ), }, new InputControlMapping { Handle = "Left Stick Button", Target = InputControlType.LeftStickButton, Source = Button( 10 ), }, new InputControlMapping { Handle = "Right Stick Button", Target = InputControlType.RightStickButton, Source = Button( 11 ), }, new InputControlMapping { Handle = "System", Target = InputControlType.System, Source = Button( 12 ), }, new InputControlMapping { Handle = "Touch Pad Tap", Target = InputControlType.TouchPadButton, Source = Button( 13 ), }, }; AnalogMappings = new[] { new InputControlMapping { Handle = "Right Stick Up", Target = InputControlType.RightStickUp, Source = Analog( 0 ), SourceRange = InputRange.ZeroToMinusOne, TargetRange = InputRange.ZeroToOne, }, new InputControlMapping { Handle = "Right Stick Down", Target = InputControlType.RightStickDown, Source = Analog( 0 ), SourceRange = InputRange.ZeroToOne, TargetRange = InputRange.ZeroToOne, }, new InputControlMapping { Handle = "Right Stick Left", Target = InputControlType.RightStickLeft, Source = Analog( 1 ), SourceRange = InputRange.ZeroToMinusOne, TargetRange = InputRange.ZeroToOne, }, new InputControlMapping { Handle = "Right Stick Right", Target = InputControlType.RightStickRight, Source = Analog( 1 ), SourceRange = InputRange.ZeroToOne, TargetRange = InputRange.ZeroToOne, }, new InputControlMapping { Handle = "Left Stick Up", Target = InputControlType.LeftStickUp, Source = Analog( 2 ), SourceRange = InputRange.ZeroToMinusOne, TargetRange = InputRange.ZeroToOne, }, new InputControlMapping { Handle = "Left Stick Down", Target = InputControlType.LeftStickDown, Source = Analog( 2 ), SourceRange = InputRange.ZeroToOne, TargetRange = InputRange.ZeroToOne, }, new InputControlMapping { Handle = "Left Stick Left", Target = InputControlType.LeftStickLeft, Source = Analog( 3 ), SourceRange = InputRange.ZeroToMinusOne, TargetRange = InputRange.ZeroToOne, }, new InputControlMapping { Handle = "Left Stick Right", Target = InputControlType.LeftStickRight, Source = Analog( 3 ), SourceRange = InputRange.ZeroToOne, TargetRange = InputRange.ZeroToOne, }, new InputControlMapping { Handle = "Right Trigger", Target = InputControlType.RightTrigger, Source = Analog( 4 ), SourceRange = InputRange.MinusOneToOne, TargetRange = InputRange.ZeroToOne, }, new InputControlMapping { Handle = "Left Trigger", Target = InputControlType.LeftTrigger, Source = Analog( 5 ), SourceRange = InputRange.MinusOneToOne, TargetRange = InputRange.ZeroToOne, }, new InputControlMapping { Handle = "DPad Left", Target = InputControlType.DPadLeft, Source = Analog( 6 ), SourceRange = InputRange.ZeroToMinusOne, TargetRange = InputRange.ZeroToOne, }, new InputControlMapping { Handle = "DPad Right", Target = InputControlType.DPadRight, Source = Analog( 6 ), SourceRange = InputRange.ZeroToOne, TargetRange = InputRange.ZeroToOne, }, new InputControlMapping { Handle = "DPad Up", Target = InputControlType.DPadUp, Source = Analog( 7 ), SourceRange = InputRange.ZeroToOne, TargetRange = InputRange.ZeroToOne, }, new InputControlMapping { Handle = "DPad Down", Target = InputControlType.DPadDown, Source = Analog( 7 ), SourceRange = InputRange.ZeroToMinusOne, TargetRange = InputRange.ZeroToOne, }, }; } } // @endcond }
using UnityEngine; using System.Collections; [AddComponentMenu("2D Toolkit/Sprite/tk2dClippedSprite")] [RequireComponent(typeof(MeshRenderer))] [RequireComponent(typeof(MeshFilter))] [ExecuteInEditMode] /// <summary> /// Sprite implementation that clips the sprite using normalized clip coordinates. /// </summary> public class tk2dClippedSprite : tk2dBaseSprite { Mesh mesh; Vector2[] meshUvs; Vector3[] meshVertices; Color32[] meshColors; Vector3[] meshNormals = null; Vector4[] meshTangents = null; int[] meshIndices; public Vector2 _clipBottomLeft = new Vector2(0, 0); public Vector2 _clipTopRight = new Vector2(1, 1); // Temp cached variables Rect _clipRect = new Rect(0, 0, 0, 0); /// <summary> /// Sets the clip rectangle /// 0, 0, 1, 1 = display the entire sprite /// </summary> public Rect ClipRect { get { _clipRect.Set( _clipBottomLeft.x, _clipBottomLeft.y, _clipTopRight.x - _clipBottomLeft.x, _clipTopRight.y - _clipBottomLeft.y ); return _clipRect; } set { Vector2 v = new Vector2( value.x, value.y ); clipBottomLeft = v; v.x += value.width; v.y += value.height; clipTopRight = v; } } /// <summary> /// Sets the bottom left clip area. /// 0, 0 = display full sprite /// </summary> public Vector2 clipBottomLeft { get { return _clipBottomLeft; } set { if (value != _clipBottomLeft) { _clipBottomLeft = new Vector2(value.x, value.y); Build(); UpdateCollider(); } } } /// <summary> /// Sets the top right clip area /// 1, 1 = display full sprite /// </summary> public Vector2 clipTopRight { get { return _clipTopRight; } set { if (value != _clipTopRight) { _clipTopRight = new Vector2(value.x, value.y); Build(); UpdateCollider(); } } } [SerializeField] protected bool _createBoxCollider = false; /// <summary> /// Create a trimmed box collider for this sprite /// </summary> public bool CreateBoxCollider { get { return _createBoxCollider; } set { if (_createBoxCollider != value) { _createBoxCollider = value; UpdateCollider(); } } } new void Awake() { base.Awake(); // Create mesh, independently to everything else mesh = new Mesh(); #if !UNITY_3_5 mesh.MarkDynamic(); #endif mesh.hideFlags = HideFlags.DontSave; GetComponent<MeshFilter>().mesh = mesh; // This will not be set when instantiating in code // In that case, Build will need to be called if (Collection) { // reset spriteId if outside bounds // this is when the sprite collection data is corrupt if (_spriteId < 0 || _spriteId >= Collection.Count) _spriteId = 0; Build(); } } protected void OnDestroy() { if (mesh) { #if UNITY_EDITOR DestroyImmediate(mesh); #else Destroy(mesh); #endif } } new protected void SetColors(Color32[] dest) { if (CurrentSprite.positions.Length == 4) { tk2dSpriteGeomGen.SetSpriteColors (dest, 0, 4, _color, collectionInst.premultipliedAlpha); } } // Calculated center and extents Vector3 boundsCenter = Vector3.zero, boundsExtents = Vector3.zero; protected void SetGeometry(Vector3[] vertices, Vector2[] uvs) { var sprite = CurrentSprite; float colliderOffsetZ = ( boxCollider != null ) ? ( boxCollider.center.z ) : 0.0f; float colliderExtentZ = ( boxCollider != null ) ? ( boxCollider.size.z * 0.5f ) : 0.5f; tk2dSpriteGeomGen.SetClippedSpriteGeom( meshVertices, meshUvs, 0, out boundsCenter, out boundsExtents, sprite, _scale, _clipBottomLeft, _clipTopRight, colliderOffsetZ, colliderExtentZ ); if (meshNormals.Length > 0 || meshTangents.Length > 0) { tk2dSpriteGeomGen.SetSpriteVertexNormals(meshVertices, meshVertices[0], meshVertices[3], sprite.normals, sprite.tangents, meshNormals, meshTangents); } // Only do this when there are exactly 4 polys to a sprite (i.e. the sprite isn't diced, and isnt a more complex mesh) if (sprite.positions.Length != 4 || sprite.complexGeometry) { // Only supports normal sprites for (int i = 0; i < vertices.Length; ++i) vertices[i] = Vector3.zero; } } public override void Build() { var spriteDef = CurrentSprite; meshUvs = new Vector2[4]; meshVertices = new Vector3[4]; meshColors = new Color32[4]; meshNormals = new Vector3[0]; meshTangents = new Vector4[0]; if (spriteDef.normals != null && spriteDef.normals.Length > 0) { meshNormals = new Vector3[4]; } if (spriteDef.tangents != null && spriteDef.tangents.Length > 0) { meshTangents = new Vector4[4]; } SetGeometry(meshVertices, meshUvs); SetColors(meshColors); if (mesh == null) { mesh = new Mesh(); #if !UNITY_3_5 mesh.MarkDynamic(); #endif mesh.hideFlags = HideFlags.DontSave; } else { mesh.Clear(); } mesh.vertices = meshVertices; mesh.colors32 = meshColors; mesh.uv = meshUvs; mesh.normals = meshNormals; mesh.tangents = meshTangents; int[] indices = new int[6]; tk2dSpriteGeomGen.SetClippedSpriteIndices(indices, 0, 0, CurrentSprite); mesh.triangles = indices; mesh.RecalculateBounds(); mesh.bounds = AdjustedMeshBounds( mesh.bounds, renderLayer ); GetComponent<MeshFilter>().mesh = mesh; UpdateCollider(); UpdateMaterial(); } protected override void UpdateGeometry() { UpdateGeometryImpl(); } protected override void UpdateColors() { UpdateColorsImpl(); } protected override void UpdateVertices() { UpdateGeometryImpl(); } protected void UpdateColorsImpl() { if (meshColors == null || meshColors.Length == 0) { Build(); } else { SetColors(meshColors); mesh.colors32 = meshColors; } } protected void UpdateGeometryImpl() { #if UNITY_EDITOR // This can happen with prefabs in the inspector if (mesh == null) return; #endif if (meshVertices == null || meshVertices.Length == 0) { Build(); } else { SetGeometry(meshVertices, meshUvs); mesh.vertices = meshVertices; mesh.uv = meshUvs; mesh.normals = meshNormals; mesh.tangents = meshTangents; mesh.RecalculateBounds(); mesh.bounds = AdjustedMeshBounds( mesh.bounds, renderLayer ); } } #region Collider protected override void UpdateCollider() { if (CreateBoxCollider) { if (CurrentSprite.physicsEngine == tk2dSpriteDefinition.PhysicsEngine.Physics3D) { if (boxCollider != null) { boxCollider.size = 2 * boundsExtents; boxCollider.center = boundsCenter; } } else if (CurrentSprite.physicsEngine == tk2dSpriteDefinition.PhysicsEngine.Physics2D) { #if !(UNITY_3_5 || UNITY_4_0 || UNITY_4_0_1 || UNITY_4_1 || UNITY_4_2) if (boxCollider2D != null) { boxCollider2D.size = 2 * boundsExtents; #if (UNITY_3_5 || UNITY_4_0 || UNITY_4_1 || UNITY_4_2 || UNITY_4_3 || UNITY_4_4 || UNITY_4_5 || UNITY_4_6 || UNITY_4_7 || UNITY_4_8 || UNITY_4_9) boxCollider2D.center = boundsCenter; #else boxCollider2D.offset = boundsCenter; #endif } #endif } } } #if UNITY_EDITOR void OnDrawGizmos() { if (mesh != null) { Bounds b = mesh.bounds; Gizmos.color = Color.clear; Gizmos.matrix = transform.localToWorldMatrix; Gizmos.DrawCube(b.center, b.extents * 2); Gizmos.matrix = Matrix4x4.identity; Gizmos.color = Color.white; } } #endif protected override void CreateCollider() { UpdateCollider(); } #if UNITY_EDITOR public override void EditMode__CreateCollider() { if (CreateBoxCollider) { base.CreateSimpleBoxCollider(); } UpdateCollider(); } #endif #endregion protected override void UpdateMaterial() { Renderer renderer = GetComponent<Renderer>(); if (renderer.sharedMaterial != collectionInst.spriteDefinitions[spriteId].materialInst) renderer.material = collectionInst.spriteDefinitions[spriteId].materialInst; } protected override int GetCurrentVertexCount() { #if UNITY_EDITOR if (meshVertices == null) return 0; #endif return 4; } public override void ReshapeBounds(Vector3 dMin, Vector3 dMax) { // Identical to tk2dSprite.ReshapeBounds float minSizeClampTexelScale = 0.1f; // Can't shrink sprite smaller than this many texels // Irrespective of transform var sprite = CurrentSprite; Vector3 oldAbsScale = new Vector3(Mathf.Abs(_scale.x), Mathf.Abs(_scale.y), Mathf.Abs(_scale.z)); Vector3 oldMin = Vector3.Scale(sprite.untrimmedBoundsData[0], _scale) - 0.5f * Vector3.Scale(sprite.untrimmedBoundsData[1], oldAbsScale); Vector3 oldSize = Vector3.Scale(sprite.untrimmedBoundsData[1], oldAbsScale); Vector3 newAbsScale = oldSize + dMax - dMin; newAbsScale.x /= sprite.untrimmedBoundsData[1].x; newAbsScale.y /= sprite.untrimmedBoundsData[1].y; // Clamp the minimum size to avoid having the pivot move when we scale from near-zero if (sprite.untrimmedBoundsData[1].x * newAbsScale.x < sprite.texelSize.x * minSizeClampTexelScale && newAbsScale.x < oldAbsScale.x) { dMin.x = 0; newAbsScale.x = oldAbsScale.x; } if (sprite.untrimmedBoundsData[1].y * newAbsScale.y < sprite.texelSize.y * minSizeClampTexelScale && newAbsScale.y < oldAbsScale.y) { dMin.y = 0; newAbsScale.y = oldAbsScale.y; } // Add our wanted local dMin offset, while negating the positional offset caused by scaling Vector2 scaleFactor = new Vector3(Mathf.Approximately(oldAbsScale.x, 0) ? 0 : (newAbsScale.x / oldAbsScale.x), Mathf.Approximately(oldAbsScale.y, 0) ? 0 : (newAbsScale.y / oldAbsScale.y)); Vector3 scaledMin = new Vector3(oldMin.x * scaleFactor.x, oldMin.y * scaleFactor.y); Vector3 offset = dMin + oldMin - scaledMin; offset.z = 0; transform.position = transform.TransformPoint(offset); scale = new Vector3(_scale.x * scaleFactor.x, _scale.y * scaleFactor.y, _scale.z); } }
#nullable enable using System; using System.Collections.Generic; using System.Linq; using DatenMeister.Core.EMOF.Implementation; using DatenMeister.Core.EMOF.Interface.Common; using DatenMeister.Core.EMOF.Interface.Identifiers; using DatenMeister.Core.EMOF.Interface.Reflection; using DatenMeister.Core.Helper; using DatenMeister.Core.Models; using DatenMeister.Core.Models.EMOF; using DatenMeister.Core.Uml.Helper; namespace DatenMeister.Forms.FormCreator { public partial class FormCreator { /// <summary> /// Stores the configuration whether we require a tab for each property /// </summary> private const bool ConfigurationFormCreatorSeparateProperties = true; /// <summary> /// Checks whether a detail form is already within the element form. /// If yes, then it is directly returned, otherwise a new detail form is created and added to the form /// </summary> /// <param name="extentForm">extentForm to be evaluated</param> public IElement GetOrCreateDetailFormIntoExtentForm(IElement extentForm) { var tabs = extentForm.getOrDefault<IReflectiveCollection>(_DatenMeister._Forms._ExtentForm.tab); foreach (var tab in tabs.OfType<IElement>()) { if (ClassifierMethods.IsSpecializedClassifierOf( tab.getMetaClass(), _DatenMeister.TheOne.Forms.__DetailForm)) { return tab; } } // Create new one var newTab = new MofFactory(extentForm).create(_DatenMeister.TheOne.Forms.__DetailForm); tabs.add(newTab); return newTab; } /// <summary> /// Creates an extent form containing the subforms /// </summary> /// <returns>The created extent</returns> public IElement CreateExtentForm(params IElement[] subForms) { var result = _factory.create(_DatenMeister.TheOne.Forms.__ExtentForm); result.set(_DatenMeister._Forms._ExtentForm.tab, subForms); return result; } /// <summary> /// Creates an extent form for the given extent by parsing through each element /// and creating the form out of the max elements /// </summary> /// <param name="extent">Extent to be parsed</param> /// <param name="creationMode">The creation mode being used</param> /// <returns>The created element</returns> public IElement CreateExtentForm(IExtent extent, CreationMode creationMode) { var extentFormConfiguration = new ExtentFormConfiguration(); var extentTypes = extent.GetConfiguration().ExtentTypes; extentFormConfiguration.ExtentTypes.AddRange(extentTypes); return CreateExtentForm(extent.elements(), creationMode, extentFormConfiguration); } /// <summary> /// Creates the extent by parsing through all the elements and creation of fields. /// </summary> /// <param name="elements">Elements which are parsed to create the form</param> /// <param name="creationMode">The creation mode defining whether metaclass are used or not</param> /// <param name="extentFormConfiguration">Configuration of the extent form</param> /// <returns>The created form</returns> public IElement CreateExtentForm(IReflectiveCollection elements, CreationMode creationMode, ExtentFormConfiguration? extentFormConfiguration) { extentFormConfiguration ??= new ExtentFormConfiguration(); var cache = new FormCreatorCache(); if (elements == null) throw new ArgumentNullException(nameof(elements)); var tabs = new List<IElement>(); var result = _factory.create(_DatenMeister.TheOne.Forms.__ExtentForm); result.set(_DatenMeister._Forms._ExtentForm.name, "Items"); var elementsAsObjects = elements.OfType<IObject>().ToList(); var elementsWithoutMetaClass = elementsAsObjects .Where(x => { var element = x as IElement; var metaClass = element?.getMetaClass(); return metaClass == null || metaClass is MofObjectShadow; }) .ToList(); var elementsWithMetaClass = elementsAsObjects .OfType<IElement>() .GroupBy(x => { var metaClass = x.getMetaClass(); return metaClass is MofObjectShadow ? null : metaClass; }) .Where(x => x.Key != null) .ToList(); // Goes through all the extent types and adds the default metaclasses into the list of tables var metaClasses = elementsWithMetaClass.Select(x => x.Key).ToList(); foreach (var extentType in extentFormConfiguration.ExtentTypes) { var extentTypeSetting = _extentSettings.GetExtentTypeSetting(extentType); if (extentTypeSetting == null) continue; metaClasses.AddRange(extentTypeSetting.rootElementMetaClasses); } // Create the tab for the elements of without any metaclass if (elementsWithoutMetaClass.Any() || elementsAsObjects.Count == 0) { // If there are elements without a metaclass or if there are no elements at all within the extent // then provide an empty list form var form = _factory.create(_DatenMeister.TheOne.Forms.__ListForm); form.set(_DatenMeister._Forms._ListForm.name, "Unclassified"); form.set(_DatenMeister._Forms._ListForm.noItemsWithMetaClass, true); foreach (var item in elementsWithoutMetaClass) AddToForm(form, item, creationMode, cache); AddTextFieldForNameIfNoFieldAvailable(form); SortFieldsByImportantProperties(form); SetDefaultTypesByPackages(form); tabs.Add(form); } // Go through all the meta classes and create a tab for each of them foreach (var groupedMetaclass in metaClasses) { var group = elementsWithMetaClass.FirstOrDefault(y => y.Key == groupedMetaclass)?.ToList() ?? new List<IElement>(); // Now try to figure out the metaclass if (groupedMetaclass == null) { // Should not happen, but we need to handle this continue; } IElement form; if (_formLogic != null) // View logic is used to ask for a default list view. { var extent = (elements as IHasExtent)?.Extent; if (extent == null) { throw new InvalidOperationException("elements does not have an extent"); } // Asks the view logic whether it has a list form for the specific metaclass // It will ask the form reportCreator, if there is no view association directly referencing // to the element form = _formLogic.GetListFormForExtentsItem( extent, groupedMetaclass, FormDefinitionMode.Default) ?? throw new InvalidOperationException("No form was found"); if (creationMode.HasFlag(CreationMode.ByPropertyValues)) { foreach (var element in group) { AddToFormByPropertyValues(form, element, creationMode, cache); } } } else { // If no view logic is given, then ask directly the form reportCreator. form = CreateListFormForMetaClass(groupedMetaclass, creationMode); } form.set(_DatenMeister._Forms._ListForm.metaClass, groupedMetaclass); SetDefaultTypesByPackages(form); tabs.Add(form); } result.set(_DatenMeister._Forms._ExtentForm.tab, tabs); return result; // Some helper method which creates the button to create new elements by the extent being connected // to the enumeration of elements void SetDefaultTypesByPackages(IObject form) { var extent = elements.GetAssociatedExtent(); var defaultTypePackages = extent?.GetConfiguration().GetDefaultTypePackages(); if (defaultTypePackages != null) { var currentDefaultPackages = form.get<IReflectiveCollection>(_DatenMeister._Forms._ListForm.defaultTypesForNewElements); // Now go through the packages and pick the classifier and add them to the list foreach (var package in defaultTypePackages) { var childItems = package.getOrDefault<IReflectiveCollection>(_UML._Packages._Package.packagedElement); if (childItems == null) continue; foreach (var type in childItems.OfType<IElement>()) { if (type.equals(_UML.TheOne.StructuredClassifiers.__Class)) { var defaultType = _factory.create(_DatenMeister.TheOne.Forms.__DefaultTypeForNewElement); defaultType.set(_DatenMeister._Forms._DefaultTypeForNewElement.metaClass, package); defaultType.set(_DatenMeister._Forms._DefaultTypeForNewElement.name, NamedElementMethods.GetName(package)); currentDefaultPackages.add(defaultType); } } } } } } /// <summary> /// Gets the extent form for the given metaclass. /// This method is used when the user selects a metaclass to which a form shall be created /// </summary> /// <param name="metaClass">The meta class for which the extent form shall be created</param> /// <param name="creationMode">Defines the creation mode</param> /// <returns>The created extent form</returns> public IElement CreateExtentFormByMetaClass(IElement metaClass, CreationMode creationMode = CreationMode.All) { var extentForm = _factory.create(_DatenMeister.TheOne.Forms.__ExtentForm); extentForm.set(_DatenMeister._Forms._ExtentForm.name, NamedElementMethods.GetName(metaClass) + " - List"); var tabs = new List<IElement>(); // Get all properties of the elements var properties = ClassifierMethods.GetPropertiesOfClassifier(metaClass).ToList(); if (properties == null) { throw new InvalidOperationException("ExtentForm cannot be created because given element does not have properties"); } var propertiesWithCollection = (from p in properties where PropertyMethods.IsCollection(p) select new {propertyName = NamedElementMethods.GetName(p), property = p}).ToList(); var propertiesWithoutCollection = (from p in properties where !PropertyMethods.IsCollection(p) select new {propertyName = NamedElementMethods.GetName(p), property = p}).ToList(); if (propertiesWithoutCollection.Any() || creationMode.HasFlag(CreationMode.AddMetaClass)) { var detailForm = _factory.create(_DatenMeister.TheOne.Forms.__DetailForm); detailForm.set(_DatenMeister._Forms._DetailForm.name, "Detail"); var fields = new List<IElement>(); foreach (var property in propertiesWithoutCollection) { var field = GetFieldForProperty( metaClass, property.property, CreationMode.All | CreationMode.ReadOnly); fields.Add(field); } if (creationMode.HasFlag(CreationMode.AddMetaClass) || !FormMethods.HasMetaClassFieldInForm(detailForm)) { // Add the element itself var metaClassField = _factory.create(_DatenMeister.TheOne.Forms.__MetaClassElementFieldData); metaClassField.set(_DatenMeister._Forms._MetaClassElementFieldData.name, "Metaclass"); fields.Add(metaClassField); } detailForm.set(_DatenMeister._Forms._DetailForm.field, fields); tabs.Add(detailForm); } foreach (var pair in propertiesWithCollection) { var propertyType = PropertyMethods.GetPropertyType(pair.property); // Now try to figure out the metaclass var form = CreateListFormForMetaClass( propertyType, CreationMode.ByMetaClass, pair.property); tabs.Add(form); } extentForm.set(_DatenMeister._Forms._ExtentForm.tab, tabs); return extentForm; } public class P { public string PropertyName { get; set; } = string.Empty; public IElement? PropertyType { get; set; } public class PropertyNameEqualityComparer : IEqualityComparer<P> { public bool Equals(P? x, P? y) { if (x == null || y == null) { return false; } return x.PropertyName?.Equals(y.PropertyName) == true; } public int GetHashCode(P obj) { return obj.PropertyName?.GetHashCode() ?? 0; } } } /// <summary> /// Creates the extent form for a specific object which is selected in the item explorer view. /// This is the typical method that is used to create the form via the FormFinder /// </summary> /// <param name="element">Element which shall be shown</param> /// <param name="extent">Extent containing the element</param> /// <param name="creationMode">The creation mode for auto-generation of the fields</param> /// <returns>Created Extent form as MofObject</returns> public IElement CreateExtentFormForObject(IObject element, IExtent extent, CreationMode creationMode) { if (_workspaceLogic == null) throw new InvalidOperationException("WorkspaceLogic is null"); var cache = new FormCreatorCache(); // Creates the empty form var extentForm = _factory.create(_DatenMeister.TheOne.Forms.__ExtentForm); extentForm.set(_DatenMeister._Forms._ExtentForm.name, NamedElementMethods.GetName(element)); var objectMetaClass = (element as IElement)?.getMetaClass(); var tabs = new List<IElement>(); // Get all properties of the elements var flagAddByMetaClass = creationMode.HasFlag(CreationMode.ByMetaClass) || creationMode.HasFlag(CreationMode.AddMetaClass); var propertyNamesWithCollection = new List<P>(); var propertyNamesWithoutCollection = new List<P>(); // Adds the properties by the stored properties of the element if (creationMode.HasFlag(CreationMode.ByPropertyValues)) { var properties = (element as IObjectAllProperties)?.getPropertiesBeingSet().ToList(); properties ??= new List<string>(); propertyNamesWithCollection = (from p in properties where element.IsPropertyOfType<IReflectiveCollection>(p) let propertyContent = element.get<IReflectiveCollection>(p) where propertyContent != null select new P {PropertyName = p}).ToList(); propertyNamesWithoutCollection = (from p in properties where !element.IsPropertyOfType<IReflectiveCollection>(p) let propertyContent = element.get(p) where propertyContent != null select new P {PropertyName = p}).ToList(); } // Adds the properties by the metaclasses if (flagAddByMetaClass && objectMetaClass != null) { var metaClassProperties = ClassifierMethods.GetPropertiesOfClassifier(objectMetaClass); foreach (var property in metaClassProperties) { if (PropertyMethods.IsCollection(property)) { propertyNamesWithCollection.Add( new P { PropertyName = NamedElementMethods.GetName(property), PropertyType = property }); } else { propertyNamesWithoutCollection.Add( new P { PropertyName = NamedElementMethods.GetName(property), PropertyType = property }); } } } // Now collect the property Values propertyNamesWithCollection.Reverse(); propertyNamesWithoutCollection.Reverse(); var propertiesWithCollection = from p in propertyNamesWithCollection.Distinct(new P.PropertyNameEqualityComparer()) let propertyContent = element.get<IReflectiveCollection>(p.PropertyName) select new {propertyName = p.PropertyName, propertyType = p.PropertyType, propertyContent}; var propertiesWithoutCollection = (from p in propertyNamesWithoutCollection.Distinct(new P.PropertyNameEqualityComparer()) let propertyContent = element.getOrDefault<object>(p.PropertyName) select new {propertyName = p.PropertyName, propertyType = p.PropertyType, propertyContent}) .ToList(); if (propertiesWithoutCollection.Any() || creationMode.HasFlag(CreationMode.AddMetaClass)) { var detailForm = _factory.create(_DatenMeister.TheOne.Forms.__DetailForm); detailForm.set(_DatenMeister._Forms._DetailForm.name, "Detail"); var fields = new List<IElement>(); foreach (var pair in propertiesWithoutCollection) { if (objectMetaClass != null && pair.propertyName != null) { var property = ClassifierMethods.GetPropertyOfClassifier(objectMetaClass, pair.propertyName); if (property != null) { var field = GetFieldForProperty( objectMetaClass, property, CreationMode.All | CreationMode.ReadOnly); fields.Add(field); } } } if (!cache.MetaClassAlreadyAdded && creationMode.HasFlag(CreationMode.AddMetaClass) && extent != null && (_workspaceLogic == null || !new FormMethods(_workspaceLogic).HasMetaClassFieldInForm(extent, fields))) { // Add the element itself var metaClassField = _factory.create(_DatenMeister.TheOne.Forms.__MetaClassElementFieldData); metaClassField.set(_DatenMeister._Forms._MetaClassElementFieldData.name, "Metaclass"); fields.Add(metaClassField); cache.MetaClassAlreadyAdded = true; } detailForm.set(_DatenMeister._Forms._DetailForm.field, fields); tabs.Add(detailForm); } foreach (var pair in propertiesWithCollection) { var propertyName = pair.propertyName; var elementsAsObjects = pair.propertyContent.OfType<IObject>().ToList(); if (propertyName == _UML._Packages._Package.packagedElement) { var elementsWithoutMetaClass = elementsAsObjects.Where(x => { if (x is IElement innerElement) { return innerElement.getMetaClass() == null; } return true; }).ToList(); var elementsWithMetaClass = elementsAsObjects .OfType<IElement>() .Where(x => x.getMetaClass() != null) .GroupBy(x => x.getMetaClass()!); if (elementsWithoutMetaClass.Any() || !elementsAsObjects.Any()) { // If there are elements included and they are filled // OR, if there is no element included at all, create the corresponding list form var form = _factory.create(_DatenMeister.TheOne.Forms.__ListForm); form.set(_DatenMeister._Forms._ListForm.name, propertyName); form.set(_DatenMeister._Forms._ListForm.property, propertyName); form.set(_DatenMeister._Forms._ListForm.noItemsWithMetaClass, true); foreach (var item in elementsWithoutMetaClass) { AddToForm(form, item, creationMode, cache); } tabs.Add(form); } foreach (var group in elementsWithMetaClass) { // Now try to figure out the metaclass var groupedMetaclass = group.Key ?? throw new InvalidOperationException("Key may not be null"); if (_formLogic != null && extent != null) { var form = _formLogic.GetListFormForExtentForPropertyInObject( element, extent, propertyName, groupedMetaclass, FormDefinitionMode.Default); if (form != null) { tabs.Add(form); } } else { tabs.Add( CreateListFormForPropertyInObject(groupedMetaclass, pair.propertyName, creationMode)); } } } else { // If there are elements included and they are filled // OR, if there is no element included at all, create the corresponding list form var form = _factory.create(_DatenMeister.TheOne.Forms.__ListForm); form.set(_DatenMeister._Forms._ListForm.name, propertyName); form.set(_DatenMeister._Forms._ListForm.property, propertyName); if (creationMode.HasFlagFast(CreationMode.ByPropertyValues)) { foreach (var item in elementsAsObjects) { AddToForm(form, item, creationMode, cache); } } if (creationMode.HasFlagFast(CreationMode.ByMetaClass)) { var property = pair.propertyType; var propertyType = property != null ? PropertyMethods.GetPropertyType(property) : null; if (propertyType != null) { AddToFormByMetaclass( form, propertyType, creationMode); } } AddTextFieldForNameIfNoFieldAvailable(form); SortFieldsByImportantProperties(form); // Adds the form to the tabs tabs.Add(form); } } // ReSharper restore HeuristicUnreachableCode extentForm.set(_DatenMeister._Forms._ExtentForm.tab, tabs); return extentForm; } private void SortFieldsByImportantProperties(IObject form) { var fields = form.getOrDefault<IReflectiveCollection>(_DatenMeister._Forms._ListForm.field); if (fields == null) return; var fieldsAsList = fields.OfType<IElement>().ToList(); // Check if the name is within the list, if yes, push it to the front var fieldName = fieldsAsList.FirstOrDefault(x => x.getOrDefault<string>(_UML._CommonStructure._NamedElement.name) == _UML._CommonStructure._NamedElement.name); if (fieldName != null) { fieldsAsList.Remove(fieldName); fieldsAsList.Insert(0, fieldName); } // Sets it form.set(_DatenMeister._Forms._ListForm.field, fieldsAsList); } /// <summary> /// Checks whether at least one field is given. /// If no field is given, then the one text field for the name will be added /// </summary> /// <param name="form">Form to be checked</param> private void AddTextFieldForNameIfNoFieldAvailable(IObject form) { // If the field is empty, create an empty textfield with 'name' as a placeholder var fieldLength = form.getOrDefault<IReflectiveCollection>(_DatenMeister._Forms._ListForm.field)?.Count() ?? 0; if (fieldLength == 0) { var factory = new MofFactory(form); var textFieldData = factory.create(_DatenMeister.TheOne.Forms.__TextFieldData); textFieldData.set(_DatenMeister._Forms._TextFieldData.name, "name"); textFieldData.set(_DatenMeister._Forms._TextFieldData.title, "name"); form.AddCollectionItem(_DatenMeister._Forms._ListForm.field, textFieldData); } } } /// <summary> /// A configuration helper class to create the extent form /// </summary> public class ExtentFormConfiguration { /// <summary> /// Gets or sets the extent type to be used /// </summary> public List<string> ExtentTypes { get; set; } = new List<string>(); } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using Xunit; namespace System.PrivateUri.Tests { public static class UriTests { public static IEnumerable<object[]> Uri_TestData { get { if (PlatformDetection.IsWindows) { yield return new object[] { @"file:///path1\path2/path3\path4", @"/path1/path2/path3/path4", @"/path1/path2/path3/path4", @"file:///path1/path2/path3/path4", "" }; yield return new object[] { @"file:///path1%5Cpath2\path3", @"/path1/path2/path3", @"/path1/path2/path3", @"file:///path1/path2/path3", ""}; yield return new object[] { @"file://localhost/path1\path2/path3\path4\", @"/path1/path2/path3/path4/", @"\\localhost\path1\path2\path3\path4\", @"file://localhost/path1/path2/path3/path4/", "localhost"}; yield return new object[] { @"file://randomhost/path1%5Cpath2\path3", @"/path1/path2/path3", @"\\randomhost\path1\path2\path3", @"file://randomhost/path1/path2/path3", "randomhost"}; } else { yield return new object[] { @"file:///path1\path2/path3\path4", @"/path1%5Cpath2/path3%5Cpath4", @"/path1\path2/path3\path4", @"file:///path1%5Cpath2/path3%5Cpath4", "" }; yield return new object[] { @"file:///path1%5Cpath2\path3", @"/path1%5Cpath2%5Cpath3", @"/path1\path2\path3", @"file:///path1%5Cpath2%5Cpath3", ""}; yield return new object[] { @"file://localhost/path1\path2/path3\path4\", @"/path1%5Cpath2/path3%5Cpath4%5C", @"\\localhost\path1\path2\path3\path4\", @"file://localhost/path1%5Cpath2/path3%5Cpath4%5C", "localhost"}; yield return new object[] { @"file://randomhost/path1%5Cpath2\path3", @"/path1%5Cpath2%5Cpath3", @"\\randomhost\path1\path2\path3", @"file://randomhost/path1%5Cpath2%5Cpath3", "randomhost"}; } } } [Theory] [MemberData(nameof(Uri_TestData))] public static void TestCtor_BackwardSlashInPath(string uri, string expectedAbsolutePath, string expectedLocalPath, string expectedAbsoluteUri, string expectedHost) { Uri actualUri = new Uri(uri); Assert.Equal(expectedAbsolutePath, actualUri.AbsolutePath); Assert.Equal(expectedLocalPath, actualUri.LocalPath); Assert.Equal(expectedAbsoluteUri, actualUri.AbsoluteUri); Assert.Equal(expectedHost, actualUri.Host); } [Fact] public static void TestCtor_String() { Uri uri = new Uri(@"http://foo/bar/baz#frag"); int i; string s; bool b; UriHostNameType uriHostNameType; string[] ss; s = uri.ToString(); Assert.Equal(@"http://foo/bar/baz#frag", s); s = uri.AbsolutePath; Assert.Equal(@"/bar/baz", s); s = uri.AbsoluteUri; Assert.Equal(@"http://foo/bar/baz#frag", s); s = uri.Authority; Assert.Equal(@"foo", s); s = uri.DnsSafeHost; Assert.Equal(@"foo", s); s = uri.Fragment; Assert.Equal(@"#frag", s); s = uri.Host; Assert.Equal(@"foo", s); uriHostNameType = uri.HostNameType; Assert.Equal<UriHostNameType>(UriHostNameType.Dns, uriHostNameType); b = uri.IsAbsoluteUri; Assert.True(b); b = uri.IsDefaultPort; Assert.True(b); b = uri.IsFile; Assert.False(b); b = uri.IsLoopback; Assert.False(b); b = uri.IsUnc; Assert.False(b); s = uri.LocalPath; Assert.Equal(@"/bar/baz", s); s = uri.OriginalString; Assert.Equal(@"http://foo/bar/baz#frag", s); s = uri.PathAndQuery; Assert.Equal(@"/bar/baz", s); i = uri.Port; Assert.Equal<int>(80, i); s = uri.Query; Assert.Equal(@"", s); s = uri.Scheme; Assert.Equal(@"http", s); ss = uri.Segments; Assert.Equal<int>(3, ss.Length); Assert.Equal(@"/", ss[0]); Assert.Equal(@"bar/", ss[1]); Assert.Equal(@"baz", ss[2]); b = uri.UserEscaped; Assert.False(b); s = uri.UserInfo; Assert.Equal(@"", s); } [Fact] public static void TestCtor_Uri_String() { Uri uri; uri = new Uri(@"http://www.contoso.com/"); uri = new Uri(uri, "catalog/shownew.htm?date=today"); int i; string s; bool b; UriHostNameType uriHostNameType; string[] ss; s = uri.ToString(); Assert.Equal(@"http://www.contoso.com/catalog/shownew.htm?date=today", s); s = uri.AbsolutePath; Assert.Equal(@"/catalog/shownew.htm", s); s = uri.AbsoluteUri; Assert.Equal(@"http://www.contoso.com/catalog/shownew.htm?date=today", s); s = uri.Authority; Assert.Equal(@"www.contoso.com", s); s = uri.DnsSafeHost; Assert.Equal(@"www.contoso.com", s); s = uri.Fragment; Assert.Equal(@"", s); s = uri.Host; Assert.Equal(@"www.contoso.com", s); uriHostNameType = uri.HostNameType; Assert.Equal<UriHostNameType>(UriHostNameType.Dns, uriHostNameType); b = uri.IsAbsoluteUri; Assert.True(b); b = uri.IsDefaultPort; Assert.True(b); b = uri.IsFile; Assert.False(b); b = uri.IsLoopback; Assert.False(b); b = uri.IsUnc; Assert.False(b); s = uri.LocalPath; Assert.Equal(@"/catalog/shownew.htm", s); s = uri.OriginalString; Assert.Equal(@"http://www.contoso.com/catalog/shownew.htm?date=today", s); s = uri.PathAndQuery; Assert.Equal(@"/catalog/shownew.htm?date=today", s); i = uri.Port; Assert.Equal<int>(80, i); s = uri.Query; Assert.Equal(@"?date=today", s); s = uri.Scheme; Assert.Equal(@"http", s); ss = uri.Segments; Assert.Equal<int>(3, ss.Length); Assert.Equal(@"/", ss[0]); Assert.Equal(@"catalog/", ss[1]); Assert.Equal(@"shownew.htm", ss[2]); b = uri.UserEscaped; Assert.False(b); s = uri.UserInfo; Assert.Equal(@"", s); } [Fact] public static void TestCtor_String_UriKind() { Uri uri = new Uri("catalog/shownew.htm?date=today", UriKind.Relative); string s; bool b; s = uri.ToString(); Assert.Equal(@"catalog/shownew.htm?date=today", s); Assert.Throws<System.InvalidOperationException>(() => { object o = uri.AbsolutePath; }); Assert.Throws<System.InvalidOperationException>(() => { object o = uri.AbsoluteUri; }); Assert.Throws<System.InvalidOperationException>(() => { object o = uri.Authority; }); Assert.Throws<System.InvalidOperationException>(() => { object o = uri.DnsSafeHost; }); Assert.Throws<System.InvalidOperationException>(() => { object o = uri.Fragment; }); Assert.Throws<System.InvalidOperationException>(() => { object o = uri.Host; }); Assert.Throws<System.InvalidOperationException>(() => { object o = uri.HostNameType; }); Assert.False(uri.IsAbsoluteUri); Assert.Throws<System.InvalidOperationException>(() => { object o = uri.IsDefaultPort; }); Assert.Throws<System.InvalidOperationException>(() => { object o = uri.IsFile; }); Assert.Throws<System.InvalidOperationException>(() => { object o = uri.IsLoopback; }); Assert.Throws<System.InvalidOperationException>(() => { object o = uri.IsUnc; }); Assert.Throws<System.InvalidOperationException>(() => { object o = uri.LocalPath; }); s = uri.OriginalString; Assert.Equal(@"catalog/shownew.htm?date=today", s); Assert.Throws<System.InvalidOperationException>(() => { object o = uri.PathAndQuery; }); Assert.Throws<System.InvalidOperationException>(() => { object o = uri.Port; }); Assert.Throws<System.InvalidOperationException>(() => { object o = uri.Query; }); Assert.Throws<System.InvalidOperationException>(() => { object o = uri.Scheme; }); Assert.Throws<System.InvalidOperationException>(() => { object o = uri.Segments; }); b = uri.UserEscaped; Assert.False(b); Assert.Throws<System.InvalidOperationException>(() => { object o = uri.UserInfo; }); } [Fact] public static void TestCtor_Uri_Uri() { Uri absoluteUri = new Uri("http://www.contoso.com/"); // Create a relative Uri from a string. allowRelative = true to allow for // creating a relative Uri. Uri relativeUri = new Uri("/catalog/shownew.htm?date=today", UriKind.Relative); // Create a new Uri from an absolute Uri and a relative Uri. Uri uri = new Uri(absoluteUri, relativeUri); int i; string s; bool b; UriHostNameType uriHostNameType; string[] ss; s = uri.ToString(); Assert.Equal(@"http://www.contoso.com/catalog/shownew.htm?date=today", s); s = uri.AbsolutePath; Assert.Equal(@"/catalog/shownew.htm", s); s = uri.AbsoluteUri; Assert.Equal(@"http://www.contoso.com/catalog/shownew.htm?date=today", s); s = uri.Authority; Assert.Equal(@"www.contoso.com", s); s = uri.DnsSafeHost; Assert.Equal(@"www.contoso.com", s); s = uri.Fragment; Assert.Equal(@"", s); s = uri.Host; Assert.Equal(@"www.contoso.com", s); uriHostNameType = uri.HostNameType; Assert.Equal<UriHostNameType>(UriHostNameType.Dns, uriHostNameType); b = uri.IsAbsoluteUri; Assert.True(b); b = uri.IsDefaultPort; Assert.True(b); b = uri.IsFile; Assert.False(b); b = uri.IsLoopback; Assert.False(b); b = uri.IsUnc; Assert.False(b); s = uri.LocalPath; Assert.Equal(@"/catalog/shownew.htm", s); s = uri.OriginalString; Assert.Equal(@"http://www.contoso.com/catalog/shownew.htm?date=today", s); s = uri.PathAndQuery; Assert.Equal(@"/catalog/shownew.htm?date=today", s); i = uri.Port; Assert.Equal<int>(80, i); s = uri.Query; Assert.Equal(@"?date=today", s); s = uri.Scheme; Assert.Equal(@"http", s); ss = uri.Segments; Assert.Equal<int>(3, ss.Length); Assert.Equal(@"/", ss[0]); Assert.Equal(@"catalog/", ss[1]); Assert.Equal(@"shownew.htm", ss[2]); b = uri.UserEscaped; Assert.False(b); s = uri.UserInfo; Assert.Equal(@"", s); } [Fact] public static void TestTryCreate_String_UriKind() { Uri uri; bool b = Uri.TryCreate("http://www.contoso.com/catalog/shownew.htm?date=today", UriKind.Absolute, out uri); Assert.True(b); int i; string s; UriHostNameType uriHostNameType; string[] ss; s = uri.ToString(); Assert.Equal(@"http://www.contoso.com/catalog/shownew.htm?date=today", s); s = uri.AbsolutePath; Assert.Equal(@"/catalog/shownew.htm", s); s = uri.AbsoluteUri; Assert.Equal(@"http://www.contoso.com/catalog/shownew.htm?date=today", s); s = uri.Authority; Assert.Equal(@"www.contoso.com", s); s = uri.DnsSafeHost; Assert.Equal(@"www.contoso.com", s); s = uri.Fragment; Assert.Equal(@"", s); s = uri.Host; Assert.Equal(@"www.contoso.com", s); uriHostNameType = uri.HostNameType; Assert.Equal<UriHostNameType>(UriHostNameType.Dns, uriHostNameType); b = uri.IsAbsoluteUri; Assert.True(b); b = uri.IsDefaultPort; Assert.True(b); b = uri.IsFile; Assert.False(b); b = uri.IsLoopback; Assert.False(b); b = uri.IsUnc; Assert.False(b); s = uri.LocalPath; Assert.Equal(@"/catalog/shownew.htm", s); s = uri.OriginalString; Assert.Equal(@"http://www.contoso.com/catalog/shownew.htm?date=today", s); s = uri.PathAndQuery; Assert.Equal(@"/catalog/shownew.htm?date=today", s); i = uri.Port; Assert.Equal<int>(80, i); s = uri.Query; Assert.Equal(@"?date=today", s); s = uri.Scheme; Assert.Equal(@"http", s); ss = uri.Segments; Assert.Equal<int>(3, ss.Length); Assert.Equal(@"/", ss[0]); Assert.Equal(@"catalog/", ss[1]); Assert.Equal(@"shownew.htm", ss[2]); b = uri.UserEscaped; Assert.False(b); s = uri.UserInfo; Assert.Equal(@"", s); } [Fact] public static void TestTryCreate_Uri_String() { Uri uri; Uri baseUri = new Uri("http://www.contoso.com/", UriKind.Absolute); bool b = Uri.TryCreate(baseUri, "catalog/shownew.htm?date=today", out uri); Assert.True(b); int i; string s; UriHostNameType uriHostNameType; string[] ss; s = uri.ToString(); Assert.Equal(@"http://www.contoso.com/catalog/shownew.htm?date=today", s); s = uri.AbsolutePath; Assert.Equal(@"/catalog/shownew.htm", s); s = uri.AbsoluteUri; Assert.Equal(@"http://www.contoso.com/catalog/shownew.htm?date=today", s); s = uri.Authority; Assert.Equal(@"www.contoso.com", s); s = uri.DnsSafeHost; Assert.Equal(@"www.contoso.com", s); s = uri.Fragment; Assert.Equal(@"", s); s = uri.Host; Assert.Equal(@"www.contoso.com", s); uriHostNameType = uri.HostNameType; Assert.Equal(UriHostNameType.Dns, uriHostNameType); b = uri.IsAbsoluteUri; Assert.True(b); b = uri.IsDefaultPort; Assert.True(b); b = uri.IsFile; Assert.False(b); b = uri.IsLoopback; Assert.False(b); b = uri.IsUnc; Assert.False(b); s = uri.LocalPath; Assert.Equal(@"/catalog/shownew.htm", s); s = uri.OriginalString; Assert.Equal(@"http://www.contoso.com/catalog/shownew.htm?date=today", s); s = uri.PathAndQuery; Assert.Equal(@"/catalog/shownew.htm?date=today", s); i = uri.Port; Assert.Equal<int>(80, i); s = uri.Query; Assert.Equal(@"?date=today", s); s = uri.Scheme; Assert.Equal(@"http", s); ss = uri.Segments; Assert.Equal<int>(3, ss.Length); Assert.Equal(@"/", ss[0]); Assert.Equal(@"catalog/", ss[1]); Assert.Equal(@"shownew.htm", ss[2]); b = uri.UserEscaped; Assert.False(b); s = uri.UserInfo; Assert.Equal(@"", s); } [Fact] public static void TestTryCreate_Uri_Uri() { Uri uri; Uri baseUri = new Uri("http://www.contoso.com/", UriKind.Absolute); Uri relativeUri = new Uri("catalog/shownew.htm?date=today", UriKind.Relative); bool b = Uri.TryCreate(baseUri, relativeUri, out uri); Assert.True(b); int i; string s; UriHostNameType uriHostNameType; string[] ss; s = uri.ToString(); Assert.Equal(@"http://www.contoso.com/catalog/shownew.htm?date=today", s); s = uri.AbsolutePath; Assert.Equal(@"/catalog/shownew.htm", s); s = uri.AbsoluteUri; Assert.Equal(@"http://www.contoso.com/catalog/shownew.htm?date=today", s); s = uri.Authority; Assert.Equal(@"www.contoso.com", s); s = uri.DnsSafeHost; Assert.Equal(@"www.contoso.com", s); s = uri.Fragment; Assert.Equal(@"", s); s = uri.Host; Assert.Equal(@"www.contoso.com", s); uriHostNameType = uri.HostNameType; Assert.Equal<UriHostNameType>(UriHostNameType.Dns, uriHostNameType); b = uri.IsAbsoluteUri; Assert.True(b); b = uri.IsDefaultPort; Assert.True(b); b = uri.IsFile; Assert.False(b); b = uri.IsLoopback; Assert.False(b); b = uri.IsUnc; Assert.False(b); s = uri.LocalPath; Assert.Equal(@"/catalog/shownew.htm", s); s = uri.OriginalString; Assert.Equal(@"http://www.contoso.com/catalog/shownew.htm?date=today", s); s = uri.PathAndQuery; Assert.Equal(@"/catalog/shownew.htm?date=today", s); i = uri.Port; Assert.Equal<int>(80, i); s = uri.Query; Assert.Equal(@"?date=today", s); s = uri.Scheme; Assert.Equal(@"http", s); ss = uri.Segments; Assert.Equal<int>(3, ss.Length); Assert.Equal(@"/", ss[0]); Assert.Equal(@"catalog/", ss[1]); Assert.Equal(@"shownew.htm", ss[2]); b = uri.UserEscaped; Assert.False(b); s = uri.UserInfo; Assert.Equal(@"", s); } [Fact] public static void TestMakeRelative() { // Create a base Uri. Uri address1 = new Uri("http://www.contoso.com/"); // Create a new Uri from a string. Uri address2 = new Uri("http://www.contoso.com/index.htm?date=today"); // Determine the relative Uri. Uri uri = address1.MakeRelativeUri(address2); string s; bool b; s = uri.ToString(); Assert.Equal(@"index.htm?date=today", s); Assert.Throws<System.InvalidOperationException>(() => { object o = uri.AbsolutePath; }); Assert.Throws<System.InvalidOperationException>(() => { object o = uri.AbsoluteUri; }); Assert.Throws<System.InvalidOperationException>(() => { object o = uri.Authority; }); Assert.Throws<System.InvalidOperationException>(() => { object o = uri.DnsSafeHost; }); Assert.Throws<System.InvalidOperationException>(() => { object o = uri.Fragment; }); Assert.Throws<System.InvalidOperationException>(() => { object o = uri.Host; }); Assert.Throws<System.InvalidOperationException>(() => { object o = uri.HostNameType; }); b = uri.IsAbsoluteUri; Assert.False(b); Assert.Throws<System.InvalidOperationException>(() => { object o = uri.IsDefaultPort; }); Assert.Throws<System.InvalidOperationException>(() => { object o = uri.IsFile; }); Assert.Throws<System.InvalidOperationException>(() => { object o = uri.IsLoopback; }); Assert.Throws<System.InvalidOperationException>(() => { object o = uri.IsUnc; }); Assert.Throws<System.InvalidOperationException>(() => { object o = uri.LocalPath; }); s = uri.OriginalString; Assert.Equal(@"index.htm?date=today", s); Assert.Throws<System.InvalidOperationException>(() => { object o = uri.PathAndQuery; }); Assert.Throws<System.InvalidOperationException>(() => { object o = uri.Port; }); Assert.Throws<System.InvalidOperationException>(() => { object o = uri.Query; }); Assert.Throws<System.InvalidOperationException>(() => { object o = uri.Scheme; }); Assert.Throws<System.InvalidOperationException>(() => { object o = uri.Segments; }); b = uri.UserEscaped; Assert.False(b); Assert.Throws<System.InvalidOperationException>(() => { object o = uri.UserInfo; }); } [Fact] public static void TestCheckHostName() { UriHostNameType u; u = Uri.CheckHostName("www.contoso.com"); Assert.Equal(UriHostNameType.Dns, u); u = Uri.CheckHostName("1.2.3.4"); Assert.Equal(UriHostNameType.IPv4, u); u = Uri.CheckHostName(null); Assert.Equal(UriHostNameType.Unknown, u); u = Uri.CheckHostName("!@*(@#&*#$&*#"); Assert.Equal(UriHostNameType.Unknown, u); } [Fact] public static void TestCheckSchemeName() { bool b; b = Uri.CheckSchemeName("http"); Assert.True(b); b = Uri.CheckSchemeName(null); Assert.False(b); b = Uri.CheckSchemeName("!"); Assert.False(b); } [Fact] public static void TestIsBaseOf() { bool b; Uri uri, uri2; uri = new Uri("http://host/path/path/file?query"); uri2 = new Uri(@"http://host/path/path/file/"); b = uri.IsBaseOf(uri2); Assert.True(b); uri2 = new Uri(@"http://host/path/path/#fragment"); b = uri.IsBaseOf(uri2); Assert.True(b); uri2 = new Uri("http://host/path/path/MoreDir/\""); b = uri.IsBaseOf(uri2); Assert.True(b); uri2 = new Uri(@"http://host/path/path/OtherFile?Query"); b = uri.IsBaseOf(uri2); Assert.True(b); uri2 = new Uri(@"http://host/path/path/"); b = uri.IsBaseOf(uri2); Assert.True(b); uri2 = new Uri(@"http://host/path/path/file"); b = uri.IsBaseOf(uri2); Assert.True(b); uri2 = new Uri(@"http://host/path/path"); b = uri.IsBaseOf(uri2); Assert.False(b); uri2 = new Uri(@"http://host/path/path?query"); b = uri.IsBaseOf(uri2); Assert.False(b); uri2 = new Uri(@"http://host/path/path#Fragment"); b = uri.IsBaseOf(uri2); Assert.False(b); uri2 = new Uri(@"http://host/path/path2/"); b = uri.IsBaseOf(uri2); Assert.False(b); uri2 = new Uri(@"http://host/path/path2/MoreDir"); b = uri.IsBaseOf(uri2); Assert.False(b); uri2 = new Uri(@"http://host/path/File"); b = uri.IsBaseOf(uri2); Assert.False(b); } [Fact] public static void TestIsWellFormedOriginalString() { Uri uri; bool b; uri = new Uri("http://www.contoso.com/path?name"); b = uri.IsWellFormedOriginalString(); Assert.True(b); uri = new Uri("http://www.contoso.com/path???/file name"); b = uri.IsWellFormedOriginalString(); Assert.False(b); uri = new Uri(@"c:\\directory\filename"); b = uri.IsWellFormedOriginalString(); Assert.False(b); uri = new Uri(@"file://c:/directory/filename"); b = uri.IsWellFormedOriginalString(); Assert.False(b); uri = new Uri(@"http:\\host/path/file"); b = uri.IsWellFormedOriginalString(); Assert.False(b); } [Fact] public static void TestCompare() { Uri uri1 = new Uri("http://www.contoso.com/path?name#frag"); Uri uri2 = new Uri("http://www.contosooo.com/path?name#slag"); Uri uri2a = new Uri("http://www.contosooo.com/path?name#slag"); int i; i = Uri.Compare(uri1, uri2, UriComponents.AbsoluteUri, UriFormat.UriEscaped, StringComparison.CurrentCulture); Assert.Equal(i, -1); i = Uri.Compare(uri1, uri2, UriComponents.Query, UriFormat.UriEscaped, StringComparison.CurrentCulture); Assert.Equal(0, i); i = Uri.Compare(uri1, uri2, UriComponents.Query | UriComponents.Fragment, UriFormat.UriEscaped, StringComparison.CurrentCulture); Assert.Equal(i, -1); bool b; b = uri1.Equals(uri2); Assert.False(b); b = uri1 == uri2; Assert.False(b); b = uri1 != uri2; Assert.True(b); b = uri2.Equals(uri2a); Assert.True(b); b = uri2 == uri2a; Assert.True(b); b = uri2 != uri2a; Assert.False(b); int h2 = uri2.GetHashCode(); int h2a = uri2a.GetHashCode(); Assert.Equal(h2, h2a); } [Fact] public static void TestEscapeDataString() { string s; s = Uri.EscapeDataString("Hello"); Assert.Equal("Hello", s); s = Uri.EscapeDataString(@"He\l/lo"); Assert.Equal("He%5Cl%2Flo", s); } [Fact] public static void TestUnescapeDataString() { string s; s = Uri.UnescapeDataString("Hello"); Assert.Equal("Hello", s); s = Uri.UnescapeDataString("He%5Cl%2Flo"); Assert.Equal(@"He\l/lo", s); } [Fact] public static void TestEscapeUriString() { string s; s = Uri.EscapeUriString("Hello"); Assert.Equal("Hello", s); s = Uri.EscapeUriString(@"He\l/lo"); Assert.Equal(@"He%5Cl/lo", s); } [Fact] public static void TestGetComponentParts() { Uri uri = new Uri("http://www.contoso.com/path?name#frag"); string s; s = uri.GetComponents(UriComponents.Fragment, UriFormat.UriEscaped); Assert.Equal("frag", s); s = uri.GetComponents(UriComponents.Host, UriFormat.UriEscaped); Assert.Equal("www.contoso.com", s); } [Fact] public static void TestCasingWhenCombiningAbsoluteAndRelativeUris() { Uri u = new Uri(new Uri("http://example.com/", UriKind.Absolute), new Uri("C(B:G", UriKind.Relative)); Assert.Equal("http://example.com/C(B:G", u.ToString()); } [Fact] public static void Uri_ColonInLongRelativeUri_SchemeSuccessfullyParsed() { Uri absolutePart = new Uri("http://www.contoso.com"); string relativePart = "a/" + new string('a', 1024) + ":"; // 1024 is the maximum scheme length supported by System.Uri. Uri u = new Uri(absolutePart, relativePart); // On .NET Framework this will throw System.UriFormatException: Invalid URI: The Uri scheme is too long. Assert.Equal("http", u.Scheme); } [Fact] public static void Uri_ExtremelyLongScheme_ThrowsUriFormatException() { string largeString = new string('a', 1_000_000) + ":"; // 2MB is large enough to cause a stack overflow if we stackalloc the scheme buffer. Assert.Throws<UriFormatException>(() => new Uri(largeString)); } [Fact] public static void Uri_HostTrailingSpaces_SpacesTrimmed() { string host = "www.contoso.com"; Uri u = new Uri($"http://{host} "); Assert.Equal($"http://{host}/", u.AbsoluteUri); Assert.Equal(host, u.Host); } [Theory] [InlineData("1234")] [InlineData("01234")] [InlineData("12340")] [InlineData("012340")] [InlineData("99")] [InlineData("09")] [InlineData("90")] [InlineData("0")] [InlineData("000")] [InlineData("65535")] public static void Uri_PortTrailingSpaces_SpacesTrimmed(string portString) { Uri u = new Uri($"http://www.contoso.com:{portString} "); int port = Int32.Parse(portString); Assert.Equal($"http://www.contoso.com:{port}/", u.AbsoluteUri); Assert.Equal(port, u.Port); } [Fact] public static void Uri_EmptyPortTrailingSpaces_UsesDefaultPortSpacesTrimmed() { Uri u = new Uri($"http://www.contoso.com: "); Assert.Equal($"http://www.contoso.com/", u.AbsoluteUri); Assert.Equal(80, u.Port); } [Fact] public static void Uri_PathTrailingSpaces_SpacesTrimmed() { string path = "/path/"; Uri u = new Uri($"http://www.contoso.com{path} "); Assert.Equal($"http://www.contoso.com{path}", u.AbsoluteUri); Assert.Equal(path, u.AbsolutePath); } [Fact] public static void Uri_QueryTrailingSpaces_SpacesTrimmed() { string query = "?query"; Uri u = new Uri($"http://www.contoso.com/{query} "); Assert.Equal($"http://www.contoso.com/{query}", u.AbsoluteUri); Assert.Equal(query, u.Query); } [Theory] [InlineData(" 80")] [InlineData("8 0")] [InlineData("80a")] [InlineData("65536")] [InlineData("100000")] [InlineData("10000000000")] public static void Uri_InvalidPort_ThrowsUriFormatException(string portString) { Assert.Throws<UriFormatException>( () => { Uri u = new Uri($"http://www.contoso.com:{portString}"); }); } [Fact] public static void Uri_EmptyPort_UsesDefaultPort() { Uri u = new Uri($"http://www.contoso.com:"); Assert.Equal($"http://www.contoso.com/", u.AbsoluteUri); Assert.Equal(80, u.Port); } } }
using System; using System.Diagnostics; using System.IO; using System.Linq; using Moq; using PassWinmenu.ExternalPrograms; using PassWinmenu.ExternalPrograms.Gpg; using PassWinmenuTests.Utilities; using Shouldly; using Xunit; namespace PassWinmenuTests.ExternalPrograms.Gpg { public class GpgAgentTests { private readonly Mock<IProcesses> processes = new Mock<IProcesses>(); [Fact] public void EnsureAgentResponsive_NoAgentRunning_ReturnsWithoutAction() { var installation = new GpgInstallationBuilder().Build(); var agent = new GpgAgent(processes.Object, installation); agent.EnsureAgentResponsive(); processes.Verify(p => p.GetProcessesByName(It.IsAny<string>())); processes.VerifyNoOtherCalls(); } [Fact] public void EnsureAgentResponsive_ConnectAgentDoesNotExist_DoesNotThrow() { AddRunningAgentProcess(); var installation = new GpgInstallationBuilder().Build(); var agent = new GpgAgent(processes.Object, installation); installation.GpgConnectAgentExecutable.Delete(); Should.NotThrow(() => agent.EnsureAgentResponsive()); } [Fact] public void EnsureAgentResponsive_StartConnectAgentThrows_CatchesException() { AddRunningAgentProcess(); processes.Setup(p => p.Start(It.IsAny<ProcessStartInfo>())).Throws<Exception>(); var installation = new GpgInstallationBuilder().Build(); var agent = new GpgAgent(processes.Object, installation); Should.NotThrow(() => agent.EnsureAgentResponsive()); } [Fact] public void EnsureAgentResponsive_AgentRunning_StartsConnectAgent() { AddRunningAgentProcess(); processes.Setup(p => p.Start(It.IsAny<ProcessStartInfo>())).Returns(() => new FakeProcess()); var installation = new GpgInstallationBuilder().Build(); var agent = new GpgAgent(processes.Object, installation); agent.EnsureAgentResponsive(); processes.Verify(p => p.GetProcessesByName(It.IsAny<string>())); processes.Verify(p => p.Start(It.Is<ProcessStartInfo>( info => info.FileName == installation.GpgConnectAgentExecutable.FullName ))); } [Theory] [InlineData("waiting for agent")] [InlineData("no running gpg-agent")] [InlineData("other output")] public void EnsureAgentResponsive_ConnectWaitsForAgentAndExits_Returns(string connectAgentOutput) { AddRunningAgentProcess(); processes.Setup(p => p.Start(It.IsAny<ProcessStartInfo>())) .Returns(() => new FakeProcessBuilder().WithStandardError(connectAgentOutput).Build()); var installation = new GpgInstallationBuilder().Build(); var agent = new GpgAgent(processes.Object, installation); agent.EnsureAgentResponsive(); processes.Verify(p => p.GetProcessesByName(It.IsAny<string>())); processes.Verify(p => p.Start(It.Is<ProcessStartInfo>( info => info.FileName == installation.GpgConnectAgentExecutable.FullName ))); } [Fact] public void EnsureAgentResponsive_AgentDoesNotRespond_KillsConnectAgent() { // Arrange AddRunningAgentProcess(); var connectAgentProcessMock = new Mock<IProcess>(); connectAgentProcessMock.Setup(p => p.StandardError).Returns(CreateBlockingStreamReader()); processes.Setup(p => p.Start(It.IsAny<ProcessStartInfo>())) .Returns(() => connectAgentProcessMock.Object); var installation = new GpgInstallationBuilder().Build(); var agent = new GpgAgent(processes.Object, installation); // Act agent.EnsureAgentResponsive(); // Assert connectAgentProcessMock.Verify(p => p.Kill(), Times.Once); } [Fact] public void EnsureAgentResponsive_ConnectAgentReadsButDoesNotExit_KillsConnectAgent() { // Arrange AddRunningAgentProcess(); var connectAgentProcessMock = new Mock<IProcess>(); connectAgentProcessMock.Setup(c => c.WaitForExit(It.IsAny<TimeSpan>())).Returns(false); connectAgentProcessMock.Setup(p => p.StandardError).Returns(new StreamReader(new MemoryStream(new byte[]{10}))); processes.Setup(p => p.Start(It.IsAny<ProcessStartInfo>())) .Returns(() => connectAgentProcessMock.Object); var installation = new GpgInstallationBuilder().Build(); var agent = new GpgAgent(processes.Object, installation); // Act agent.EnsureAgentResponsive(); // Assert connectAgentProcessMock.Verify(p => p.Kill(), Times.Once); } [Fact] public void EnsureAgentResponsive_AgentDoesNotRespond_KillsRunningAgents() { // Arrange var installation = new GpgInstallationBuilder().Build(); var runningAgentMocks = new[] { new Mock<IProcess>(), new Mock<IProcess>(), }; foreach (var mock in runningAgentMocks) { mock.Setup(p => p.MainModuleName).Returns(installation.GpgAgentExecutable.FullName); } processes.Setup(p => p.GetProcessesByName("gpg-agent")).Returns(runningAgentMocks.Select(m => m.Object).ToArray); processes.Setup(p => p.GetProcesses()).Returns(runningAgentMocks.Select(m => m.Object).ToArray); processes.Setup(p => p.Start(It.IsAny<ProcessStartInfo>())).Returns(CreateProcessWithBlockingStandardOutput); var agent = new GpgAgent(processes.Object, installation); // Act agent.EnsureAgentResponsive(); // Assert foreach (var mock in runningAgentMocks) { mock.Verify(m => m.Kill(), Times.Once); } } [Fact] public void EnsureAgentResponsive_AgentDoesNotRespond_LeavesOtherAgentsAlive() { // Arrange var installation = new GpgInstallationBuilder().Build(); var runningAgentMocks = new[] { new Mock<IProcess>(), new Mock<IProcess>(), }; runningAgentMocks[0].Setup(p => p.MainModuleName).Returns(installation.GpgAgentExecutable.FullName); runningAgentMocks[1].Setup(p => p.MainModuleName).Returns(@"C:\other\gpg-agent.exe"); processes.Setup(p => p.GetProcessesByName("gpg-agent")).Returns(runningAgentMocks.Select(m => m.Object).ToArray); processes.Setup(p => p.GetProcesses()).Returns(runningAgentMocks.Select(m => m.Object).ToArray); processes.Setup(p => p.Start(It.IsAny<ProcessStartInfo>())).Returns(CreateProcessWithBlockingStandardOutput); var agent = new GpgAgent(processes.Object, installation); // Act agent.EnsureAgentResponsive(); // Assert runningAgentMocks[0].Verify(m => m.Kill(), Times.Once); runningAgentMocks[1].Verify(m => m.Kill(), Times.Never); } private IProcess CreateProcessWithBlockingStandardOutput() { var blockingProcess = new Mock<IProcess>(); blockingProcess.Setup(c => c.WaitForExit(It.IsAny<TimeSpan>())).Returns(false); blockingProcess.Setup(p => p.StandardError).Returns(CreateBlockingStreamReader()); return blockingProcess.Object; } private StreamReader CreateBlockingStreamReader() { return new StreamReader(new BlockingStream(TimeSpan.FromSeconds(5))); } private void AddRunningAgentProcess() { processes.Setup(p => p.GetProcessesByName("gpg-agent")).Returns(new IProcess[] { new FakeProcess() }); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. using System.Collections.Generic; using Microsoft.Extensions.DiagnosticAdapter; namespace Microsoft.AspNetCore.Mvc { public class TestDiagnosticListener { public class OnBeforeActionEventData { public IProxyActionDescriptor ActionDescriptor { get; set; } public IProxyHttpContext HttpContext { get; set; } public IProxyRouteData RouteData { get; set; } } public OnBeforeActionEventData BeforeAction { get; set; } [DiagnosticName("Microsoft.AspNetCore.Mvc.BeforeAction")] public virtual void OnBeforeAction( IProxyHttpContext httpContext, IProxyRouteData routeData, IProxyActionDescriptor actionDescriptor) { BeforeAction = new OnBeforeActionEventData() { ActionDescriptor = actionDescriptor, HttpContext = httpContext, RouteData = routeData, }; } public class OnAfterActionEventData { public IProxyActionDescriptor ActionDescriptor { get; set; } public IProxyHttpContext HttpContext { get; set; } } public OnAfterActionEventData AfterAction { get; set; } [DiagnosticName("Microsoft.AspNetCore.Mvc.AfterAction")] public virtual void OnAfterAction( IProxyHttpContext httpContext, IProxyActionDescriptor actionDescriptor) { AfterAction = new OnAfterActionEventData() { ActionDescriptor = actionDescriptor, HttpContext = httpContext, }; } public class OnBeforeActionMethodEventData { public IProxyActionContext ActionContext { get; set; } public IReadOnlyDictionary<string, object> Arguments { get; set; } } public OnBeforeActionMethodEventData BeforeActionMethod { get; set; } [DiagnosticName("Microsoft.AspNetCore.Mvc.BeforeActionMethod")] public virtual void OnBeforeActionMethod( IProxyActionContext actionContext, IReadOnlyDictionary<string, object> arguments) { BeforeActionMethod = new OnBeforeActionMethodEventData() { ActionContext = actionContext, Arguments = arguments, }; } public class OnAfterActionMethodEventData { public IProxyActionContext ActionContext { get; set; } public IProxyActionResult Result { get; set; } } public OnAfterActionMethodEventData AfterActionMethod { get; set; } [DiagnosticName("Microsoft.AspNetCore.Mvc.AfterActionMethod")] public virtual void OnAfterActionMethod( IProxyActionContext actionContext, IProxyActionResult result) { AfterActionMethod = new OnAfterActionMethodEventData() { ActionContext = actionContext, Result = result, }; } public class OnBeforeActionResultEventData { public IProxyActionContext ActionContext { get; set; } public IProxyActionResult Result { get; set; } } public OnBeforeActionResultEventData BeforeActionResult { get; set; } [DiagnosticName("Microsoft.AspNetCore.Mvc.BeforeActionResult")] public virtual void OnBeforeActionResult(IProxyActionContext actionContext, IProxyActionResult result) { BeforeActionResult = new OnBeforeActionResultEventData() { ActionContext = actionContext, Result = result, }; } public class OnAfterActionResultEventData { public IProxyActionContext ActionContext { get; set; } public IProxyActionResult Result { get; set; } } public OnAfterActionResultEventData AfterActionResult { get; set; } [DiagnosticName("Microsoft.AspNetCore.Mvc.AfterActionResult")] public virtual void OnAfterActionResult(IProxyActionContext actionContext, IProxyActionResult result) { AfterActionResult = new OnAfterActionResultEventData() { ActionContext = actionContext, Result = result, }; } public class OnViewFoundEventData { public IProxyActionContext ActionContext { get; set; } public bool IsMainPage { get; set; } public IProxyActionResult Result { get; set; } public string ViewName { get; set; } public IProxyView View { get; set; } } public OnViewFoundEventData ViewFound { get; set; } [DiagnosticName("Microsoft.AspNetCore.Mvc.ViewFound")] public virtual void OnViewFound( IProxyActionContext actionContext, bool isMainPage, IProxyActionResult result, string viewName, IProxyView view) { ViewFound = new OnViewFoundEventData() { ActionContext = actionContext, IsMainPage = isMainPage, Result = result, ViewName = viewName, View = view, }; } public class OnViewNotFoundEventData { public IProxyActionContext ActionContext { get; set; } public bool IsMainPage { get; set; } public IProxyActionResult Result { get; set; } public string ViewName { get; set; } public IEnumerable<string> SearchedLocations { get; set; } } public OnViewNotFoundEventData ViewNotFound { get; set; } [DiagnosticName("Microsoft.AspNetCore.Mvc.ViewNotFound")] public virtual void OnViewNotFound( IProxyActionContext actionContext, bool isMainPage, IProxyActionResult result, string viewName, IEnumerable<string> searchedLocations) { ViewNotFound = new OnViewNotFoundEventData() { ActionContext = actionContext, IsMainPage = isMainPage, Result = result, ViewName = viewName, SearchedLocations = searchedLocations, }; } public class OnBeforeViewEventData { public IProxyView View { get; set; } public IProxyViewContext ViewContext { get; set; } } public OnBeforeViewEventData BeforeView { get; set; } [DiagnosticName("Microsoft.AspNetCore.Mvc.BeforeView")] public virtual void OnBeforeView(IProxyView view, IProxyViewContext viewContext) { BeforeView = new OnBeforeViewEventData() { View = view, ViewContext = viewContext, }; } public class OnAfterViewEventData { public IProxyView View { get; set; } public IProxyViewContext ViewContext { get; set; } } public OnAfterViewEventData AfterView { get; set; } [DiagnosticName("Microsoft.AspNetCore.Mvc.AfterView")] public virtual void OnAfterView(IProxyView view, IProxyViewContext viewContext) { AfterView = new OnAfterViewEventData() { View = view, ViewContext = viewContext, }; } public class OnBeforeViewPageEventData { public IProxyPage Page { get; set; } public IProxyViewContext ViewContext { get; set; } public IProxyActionDescriptor ActionDescriptor { get; set; } public IProxyHttpContext HttpContext { get; set; } } public OnBeforeViewPageEventData BeforeViewPage { get; set; } [DiagnosticName("Microsoft.AspNetCore.Mvc.Razor.BeforeViewPage")] public virtual void OnBeforeViewPage( IProxyPage page, IProxyViewContext viewContext, IProxyActionDescriptor actionDescriptor, IProxyHttpContext httpContext) { BeforeViewPage = new OnBeforeViewPageEventData() { Page = page, ViewContext = viewContext, ActionDescriptor = actionDescriptor, HttpContext = httpContext, }; } public class OnAfterViewPageEventData { public IProxyPage Page { get; set; } public IProxyViewContext ViewContext { get; set; } public IProxyActionDescriptor ActionDescriptor { get; set; } public IProxyHttpContext HttpContext { get; set; } } public OnAfterViewPageEventData AfterViewPage { get; set; } [DiagnosticName("Microsoft.AspNetCore.Mvc.Razor.AfterViewPage")] public virtual void OnAfterViewPage( IProxyPage page, IProxyViewContext viewContext, IProxyActionDescriptor actionDescriptor, IProxyHttpContext httpContext) { AfterViewPage = new OnAfterViewPageEventData() { Page = page, ViewContext = viewContext, ActionDescriptor = actionDescriptor, HttpContext = httpContext, }; } public class OnBeforeViewComponentEventData { public IProxyActionDescriptor ActionDescriptor { get; set; } public IProxyViewComponentContext ViewComponentContext { get; set; } public object ViewComponent { get; set; } } public OnBeforeViewComponentEventData BeforeViewComponent { get; set; } [DiagnosticName("Microsoft.AspNetCore.Mvc.BeforeViewComponent")] public virtual void OnBeforeViewComponent( IProxyActionDescriptor actionDescriptor, IProxyViewComponentContext viewComponentContext, object viewComponent) { BeforeViewComponent = new OnBeforeViewComponentEventData() { ActionDescriptor = actionDescriptor, ViewComponentContext = viewComponentContext, ViewComponent = viewComponent }; } public class OnAfterViewComponentEventData { public IProxyActionDescriptor ActionDescriptor { get; set; } public IProxyViewComponentContext ViewComponentContext { get; set; } public IProxyViewComponentResult ViewComponentResult { get; set; } public object ViewComponent { get; set; } } public OnAfterViewComponentEventData AfterViewComponent { get; set; } [DiagnosticName("Microsoft.AspNetCore.Mvc.AfterViewComponent")] public virtual void OnAfterViewComponent( IProxyActionDescriptor actionDescriptor, IProxyViewComponentContext viewComponentContext, IProxyViewComponentResult viewComponentResult, object viewComponent) { AfterViewComponent = new OnAfterViewComponentEventData() { ActionDescriptor = actionDescriptor, ViewComponentContext = viewComponentContext, ViewComponentResult = viewComponentResult, ViewComponent = viewComponent }; } public class OnViewComponentBeforeViewExecuteEventData { public IProxyActionDescriptor ActionDescriptor { get; set; } public IProxyViewComponentContext ViewComponentContext { get; set; } public IProxyView View { get; set; } } public OnViewComponentBeforeViewExecuteEventData ViewComponentBeforeViewExecute { get; set; } [DiagnosticName("Microsoft.AspNetCore.Mvc.ViewComponentBeforeViewExecute")] public virtual void OnViewComponentBeforeViewExecute( IProxyActionDescriptor actionDescriptor, IProxyViewComponentContext viewComponentContext, IProxyView view) { ViewComponentBeforeViewExecute = new OnViewComponentBeforeViewExecuteEventData() { ActionDescriptor = actionDescriptor, ViewComponentContext = viewComponentContext, View = view }; } public class OnViewComponentAfterViewExecuteEventData { public IProxyActionDescriptor ActionDescriptor { get; set; } public IProxyViewComponentContext ViewComponentContext { get; set; } public IProxyView View { get; set; } } public OnViewComponentAfterViewExecuteEventData ViewComponentAfterViewExecute { get; set; } [DiagnosticName("Microsoft.AspNetCore.Mvc.ViewComponentAfterViewExecute")] public virtual void OnViewComponentAfterViewExecute( IProxyActionDescriptor actionDescriptor, IProxyViewComponentContext viewComponentContext, IProxyView view) { ViewComponentAfterViewExecute = new OnViewComponentAfterViewExecuteEventData() { ActionDescriptor = actionDescriptor, ViewComponentContext = viewComponentContext, View = view }; } public class BeginPageInstrumentationData { public IProxyHttpContext HttpContext { get; set; } public string Path { get; set; } public int Position { get; set; } public int Length { get; set; } public bool IsLiteral { get; set; } } public class EndPageInstrumentationData { public IProxyHttpContext HttpContext { get; set; } public string Path { get; set; } } public List<object> PageInstrumentationData { get; set; } = new List<object>(); [DiagnosticName("Microsoft.AspNetCore.Mvc.Razor.BeginInstrumentationContext")] public virtual void OnBeginPageInstrumentationContext( IProxyHttpContext httpContext, string path, int position, int length, bool isLiteral) { PageInstrumentationData.Add(new BeginPageInstrumentationData { HttpContext = httpContext, Path = path, Position = position, Length = length, IsLiteral = isLiteral, }); } [DiagnosticName("Microsoft.AspNetCore.Mvc.Razor.EndInstrumentationContext")] public virtual void OnEndPageInstrumentationContext( IProxyHttpContext httpContext, string path, int position, int length, bool isLiteral) { PageInstrumentationData.Add(new EndPageInstrumentationData { HttpContext = httpContext, Path = path, }); } } }
/***************************************************************************** * Automatic import and advanced preview added by Mitch Thompson * Full irrevocable rights and permissions granted to Esoteric Software *****************************************************************************/ #define SPINE_SKELETON_ANIMATOR using System; using System.Collections.Generic; using UnityEditor; #if !UNITY_4_3 using UnityEditor.AnimatedValues; #endif using UnityEngine; using Spine; namespace Spine.Unity.Editor { [CustomEditor(typeof(SkeletonDataAsset))] public class SkeletonDataAssetInspector : UnityEditor.Editor { static bool showAnimationStateData = true; static bool showAnimationList = true; static bool showSlotList = false; static bool showAttachments = false; static bool showBaking = false; static bool bakeAnimations = true; static bool bakeIK = true; static SendMessageOptions bakeEventOptions = SendMessageOptions.DontRequireReceiver; private SerializedProperty atlasAssets, skeletonJSON, scale, fromAnimation, toAnimation, duration, defaultMix; #if SPINE_SKELETON_ANIMATOR static bool showMecanim = false; private SerializedProperty controller; #endif #if SPINE_TK2D private SerializedProperty spriteCollection; #endif private bool m_initialized = false; private SkeletonDataAsset m_skeletonDataAsset; private SkeletonData m_skeletonData; private string m_skeletonDataAssetGUID; private bool needToSerialize; List<string> warnings = new List<string>(); void OnEnable () { SpineEditorUtilities.ConfirmInitialization(); atlasAssets = serializedObject.FindProperty("atlasAssets"); atlasAssets.isExpanded = true; skeletonJSON = serializedObject.FindProperty("skeletonJSON"); scale = serializedObject.FindProperty("scale"); fromAnimation = serializedObject.FindProperty("fromAnimation"); toAnimation = serializedObject.FindProperty("toAnimation"); duration = serializedObject.FindProperty("duration"); defaultMix = serializedObject.FindProperty("defaultMix"); #if SPINE_SKELETON_ANIMATOR controller = serializedObject.FindProperty("controller"); #endif #if SPINE_TK2D spriteCollection = serializedObject.FindProperty("spriteCollection"); #endif m_skeletonDataAsset = (SkeletonDataAsset)target; m_skeletonDataAssetGUID = AssetDatabase.AssetPathToGUID(AssetDatabase.GetAssetPath(m_skeletonDataAsset)); EditorApplication.update += Update; m_skeletonData = m_skeletonDataAsset.GetSkeletonData(false); showBaking = EditorPrefs.GetBool("SkeletonDataAssetInspector_showUnity", false); RepopulateWarnings(); } void OnDestroy () { m_initialized = false; EditorApplication.update -= Update; this.DestroyPreviewInstances(); if (this.m_previewUtility != null) { this.m_previewUtility.Cleanup(); this.m_previewUtility = null; } } override public void OnInspectorGUI () { serializedObject.Update(); EditorGUI.BeginChangeCheck(); #if !SPINE_TK2D EditorGUILayout.PropertyField(atlasAssets, true); #else EditorGUI.BeginDisabledGroup(spriteCollection.objectReferenceValue != null); EditorGUILayout.PropertyField(atlasAssets, true); EditorGUI.EndDisabledGroup(); EditorGUILayout.PropertyField(spriteCollection, true); #endif EditorGUILayout.PropertyField(skeletonJSON); EditorGUILayout.PropertyField(scale); if (EditorGUI.EndChangeCheck()) { if (serializedObject.ApplyModifiedProperties()) { if (m_previewUtility != null) { m_previewUtility.Cleanup(); m_previewUtility = null; } RepopulateWarnings(); OnEnable(); return; } } if (m_skeletonData != null) { DrawAnimationStateInfo(); DrawAnimationList(); DrawSlotList(); DrawUnityTools(); } else { DrawReimportButton(); //Show Warnings foreach (var str in warnings) EditorGUILayout.LabelField(new GUIContent(str, SpineEditorUtilities.Icons.warning)); } if(!Application.isPlaying) serializedObject.ApplyModifiedProperties(); } void DrawUnityTools () { #if SPINE_SKELETON_ANIMATOR showMecanim = EditorGUILayout.Foldout(showMecanim, new GUIContent("SkeletonAnimator", SpineEditorUtilities.Icons.unityIcon)); if (showMecanim) { EditorGUI.indentLevel++; EditorGUILayout.PropertyField(controller, new GUIContent("Controller", SpineEditorUtilities.Icons.controllerIcon)); if (controller.objectReferenceValue == null) { using (new GUILayout.HorizontalScope()) { GUILayout.Space(32); if (GUILayout.Button(new GUIContent("Generate Mecanim Controller"), GUILayout.Width(195), GUILayout.Height(20))) SkeletonBaker.GenerateMecanimAnimationClips(m_skeletonDataAsset); } EditorGUILayout.LabelField("SkeletonAnimator is the Mecanim alternative to SkeletonAnimation. It is not required.", EditorStyles.miniLabel); } else { using (new GUILayout.HorizontalScope()) { GUILayout.Space(32); if (GUILayout.Button(new GUIContent("Update Controller Animations"), GUILayout.Width(195), GUILayout.Height(20))) SkeletonBaker.GenerateMecanimAnimationClips(m_skeletonDataAsset); } } EditorGUI.indentLevel--; } #endif bool pre = showBaking; showBaking = EditorGUILayout.Foldout(showBaking, new GUIContent("Baking", SpineEditorUtilities.Icons.unityIcon)); if (pre != showBaking) EditorPrefs.SetBool("SkeletonDataAssetInspector_showUnity", showBaking); if (showBaking) { EditorGUI.indentLevel++; EditorGUILayout.HelpBox("WARNING!\n\nBaking is NOT the same as SkeletonAnimator!\nDoes not support the following:\n\tFlipX or Y\n\tInheritScale\n\tColor Keys\n\tDraw Order Keys\n\tIK and Curves are sampled at 60fps and are not realtime.\n\tPlease read SkeletonBaker.cs comments for full details.\n\nThe main use of Baking is to export Spine projects to be used without the Spine Runtime (ie: for sale on the Asset Store, or background objects that are animated only with a wind noise generator)", MessageType.Warning, true); EditorGUI.indentLevel++; bakeAnimations = EditorGUILayout.Toggle("Bake Animations", bakeAnimations); EditorGUI.BeginDisabledGroup(!bakeAnimations); { EditorGUI.indentLevel++; bakeIK = EditorGUILayout.Toggle("Bake IK", bakeIK); bakeEventOptions = (SendMessageOptions)EditorGUILayout.EnumPopup("Event Options", bakeEventOptions); EditorGUI.indentLevel--; } EditorGUI.EndDisabledGroup(); EditorGUI.indentLevel++; using (new EditorGUILayout.HorizontalScope()) { if (GUILayout.Button(new GUIContent("Bake All Skins", SpineEditorUtilities.Icons.unityIcon), GUILayout.Height(32), GUILayout.Width(150))) SkeletonBaker.BakeToPrefab(m_skeletonDataAsset, m_skeletonData.Skins, "", bakeAnimations, bakeIK, bakeEventOptions); string skinName = "<No Skin>"; if (m_skeletonAnimation != null && m_skeletonAnimation.skeleton != null) { Skin bakeSkin = m_skeletonAnimation.skeleton.Skin; if (bakeSkin == null) { skinName = "Default"; bakeSkin = m_skeletonData.Skins.Items[0]; } else skinName = m_skeletonAnimation.skeleton.Skin.Name; using (new EditorGUILayout.VerticalScope()) { if (GUILayout.Button(new GUIContent("Bake " + skinName, SpineEditorUtilities.Icons.unityIcon), GUILayout.Height(32), GUILayout.Width(250))) SkeletonBaker.BakeToPrefab(m_skeletonDataAsset, new ExposedList<Skin>(new [] { bakeSkin }), "", bakeAnimations, bakeIK, bakeEventOptions); using (new EditorGUILayout.HorizontalScope()) { GUILayout.Label(new GUIContent("Skins", SpineEditorUtilities.Icons.skinsRoot), GUILayout.Width(50)); if (GUILayout.Button(skinName, EditorStyles.popup, GUILayout.Width(196))) { SelectSkinContext(); } } } } } EditorGUI.indentLevel--; EditorGUI.indentLevel--; } } void DrawReimportButton () { EditorGUI.BeginDisabledGroup(skeletonJSON.objectReferenceValue == null); if (GUILayout.Button(new GUIContent("Attempt Reimport", SpineEditorUtilities.Icons.warning))) { DoReimport(); return; } EditorGUI.EndDisabledGroup(); } void DoReimport () { SpineEditorUtilities.ImportSpineContent(new string[] { AssetDatabase.GetAssetPath(skeletonJSON.objectReferenceValue) }, true); if (m_previewUtility != null) { m_previewUtility.Cleanup(); m_previewUtility = null; } RepopulateWarnings(); OnEnable(); EditorUtility.SetDirty(m_skeletonDataAsset); } void DrawAnimationStateInfo () { showAnimationStateData = EditorGUILayout.Foldout(showAnimationStateData, "Animation State Data"); if (!showAnimationStateData) return; EditorGUI.BeginChangeCheck(); EditorGUILayout.PropertyField(defaultMix); // Animation names var animations = new string[m_skeletonData.Animations.Count]; for (int i = 0; i < animations.Length; i++) animations[i] = m_skeletonData.Animations.Items[i].Name; for (int i = 0; i < fromAnimation.arraySize; i++) { SerializedProperty from = fromAnimation.GetArrayElementAtIndex(i); SerializedProperty to = toAnimation.GetArrayElementAtIndex(i); SerializedProperty durationProp = duration.GetArrayElementAtIndex(i); using (new EditorGUILayout.HorizontalScope()) { from.stringValue = animations[EditorGUILayout.Popup(Math.Max(Array.IndexOf(animations, from.stringValue), 0), animations)]; to.stringValue = animations[EditorGUILayout.Popup(Math.Max(Array.IndexOf(animations, to.stringValue), 0), animations)]; durationProp.floatValue = EditorGUILayout.FloatField(durationProp.floatValue); if (GUILayout.Button("Delete")) { duration.DeleteArrayElementAtIndex(i); toAnimation.DeleteArrayElementAtIndex(i); fromAnimation.DeleteArrayElementAtIndex(i); } } } using (new EditorGUILayout.HorizontalScope()) { EditorGUILayout.Space(); if (GUILayout.Button("Add Mix")) { duration.arraySize++; toAnimation.arraySize++; fromAnimation.arraySize++; } EditorGUILayout.Space(); } if (EditorGUI.EndChangeCheck()) { m_skeletonDataAsset.FillStateData(); EditorUtility.SetDirty(m_skeletonDataAsset); serializedObject.ApplyModifiedProperties(); needToSerialize = true; } } void DrawAnimationList () { showAnimationList = EditorGUILayout.Foldout(showAnimationList, new GUIContent("Animations", SpineEditorUtilities.Icons.animationRoot)); if (!showAnimationList) return; if (m_skeletonAnimation != null && m_skeletonAnimation.state != null) { if (GUILayout.Button(new GUIContent("Setup Pose", SpineEditorUtilities.Icons.skeleton), GUILayout.Width(105), GUILayout.Height(18))) { StopAnimation(); m_skeletonAnimation.skeleton.SetToSetupPose(); m_requireRefresh = true; } } else { EditorGUILayout.HelpBox("Animations can be previewed if you expand the Preview window below.", MessageType.Info); } EditorGUILayout.LabelField("Name", "Duration"); foreach (Spine.Animation a in m_skeletonData.Animations) { using (new GUILayout.HorizontalScope()) { if (m_skeletonAnimation != null && m_skeletonAnimation.state != null) { if (m_skeletonAnimation.state.GetCurrent(0) != null && m_skeletonAnimation.state.GetCurrent(0).Animation == a) { GUI.contentColor = Color.red; if (GUILayout.Button("\u25BA", EditorStyles.toolbarButton, GUILayout.Width(24))) { StopAnimation(); } GUI.contentColor = Color.white; } else { if (GUILayout.Button("\u25BA", EditorStyles.toolbarButton, GUILayout.Width(24))) { PlayAnimation(a.Name, true); } } } else { GUILayout.Label("?", GUILayout.Width(24)); } EditorGUILayout.LabelField(new GUIContent(a.Name, SpineEditorUtilities.Icons.animation), new GUIContent(a.Duration.ToString("f3") + "s" + ("(" + (Mathf.RoundToInt(a.Duration * 30)) + ")").PadLeft(12, ' '))); } } } void DrawSlotList () { showSlotList = EditorGUILayout.Foldout(showSlotList, new GUIContent("Slots", SpineEditorUtilities.Icons.slotRoot)); if (!showSlotList) return; if (m_skeletonAnimation == null || m_skeletonAnimation.skeleton == null) return; EditorGUI.indentLevel++; try { showAttachments = EditorGUILayout.ToggleLeft("Show Attachments", showAttachments); } catch { return; } List<Attachment> slotAttachments = new List<Attachment>(); List<string> slotAttachmentNames = new List<string>(); List<string> defaultSkinAttachmentNames = new List<string>(); var defaultSkin = m_skeletonData.Skins.Items[0]; Skin skin = m_skeletonAnimation.skeleton.Skin; if (skin == null) { skin = defaultSkin; } for (int i = m_skeletonAnimation.skeleton.Slots.Count - 1; i >= 0; i--) { Slot slot = m_skeletonAnimation.skeleton.Slots.Items[i]; EditorGUILayout.LabelField(new GUIContent(slot.Data.Name, SpineEditorUtilities.Icons.slot)); if (showAttachments) { EditorGUI.indentLevel++; slotAttachments.Clear(); slotAttachmentNames.Clear(); defaultSkinAttachmentNames.Clear(); skin.FindNamesForSlot(i, slotAttachmentNames); skin.FindAttachmentsForSlot(i, slotAttachments); if (skin != defaultSkin) { defaultSkin.FindNamesForSlot(i, defaultSkinAttachmentNames); defaultSkin.FindNamesForSlot(i, slotAttachmentNames); defaultSkin.FindAttachmentsForSlot(i, slotAttachments); } else { defaultSkin.FindNamesForSlot(i, defaultSkinAttachmentNames); } for (int a = 0; a < slotAttachments.Count; a++) { Attachment attachment = slotAttachments[a]; string name = slotAttachmentNames[a]; Texture2D icon = null; var type = attachment.GetType(); if (type == typeof(RegionAttachment)) icon = SpineEditorUtilities.Icons.image; else if (type == typeof(MeshAttachment)) icon = SpineEditorUtilities.Icons.mesh; else if (type == typeof(BoundingBoxAttachment)) icon = SpineEditorUtilities.Icons.boundingBox; else if (type == typeof(WeightedMeshAttachment)) icon = SpineEditorUtilities.Icons.weights; else icon = SpineEditorUtilities.Icons.warning; //TODO: Waterboard Nate //if (name != attachment.Name) //icon = SpineEditorUtilities.Icons.skinPlaceholder; bool initialState = slot.Attachment == attachment; bool toggled = EditorGUILayout.ToggleLeft(new GUIContent(name, icon), slot.Attachment == attachment); if (!defaultSkinAttachmentNames.Contains(name)) { Rect skinPlaceHolderIconRect = GUILayoutUtility.GetLastRect(); skinPlaceHolderIconRect.width = SpineEditorUtilities.Icons.skinPlaceholder.width; skinPlaceHolderIconRect.height = SpineEditorUtilities.Icons.skinPlaceholder.height; GUI.DrawTexture(skinPlaceHolderIconRect, SpineEditorUtilities.Icons.skinPlaceholder); } if (toggled != initialState) { if (toggled) { slot.Attachment = attachment; } else { slot.Attachment = null; } m_requireRefresh = true; } } EditorGUI.indentLevel--; } } EditorGUI.indentLevel--; } void RepopulateWarnings () { warnings.Clear(); if (skeletonJSON.objectReferenceValue == null) warnings.Add("Missing Skeleton JSON"); else { if (SpineEditorUtilities.IsValidSpineData((TextAsset)skeletonJSON.objectReferenceValue) == false) { warnings.Add("Skeleton data file is not a valid JSON or binary file."); } else { bool detectedNullAtlasEntry = false; var atlasList = new List<Atlas>(); for (int i = 0; i < atlasAssets.arraySize; i++) { if (atlasAssets.GetArrayElementAtIndex(i).objectReferenceValue == null) { detectedNullAtlasEntry = true; break; } else { atlasList.Add(((AtlasAsset)atlasAssets.GetArrayElementAtIndex(i).objectReferenceValue).GetAtlas()); } } if (detectedNullAtlasEntry) warnings.Add("AtlasAsset elements cannot be Null"); else { //get requirements var missingPaths = SpineEditorUtilities.GetRequiredAtlasRegions(AssetDatabase.GetAssetPath((TextAsset)skeletonJSON.objectReferenceValue)); foreach (var atlas in atlasList) { for (int i = 0; i < missingPaths.Count; i++) { if (atlas.FindRegion(missingPaths[i]) != null) { missingPaths.RemoveAt(i); i--; } } } foreach (var str in missingPaths) warnings.Add("Missing Region: '" + str + "'"); } } } } //preview window stuff private PreviewRenderUtility m_previewUtility; private GameObject m_previewInstance; private Vector2 previewDir; private SkeletonAnimation m_skeletonAnimation; //private SkeletonData m_skeletonData; private static int sliderHash = "Slider".GetHashCode(); private float m_lastTime; private bool m_playing; private bool m_requireRefresh; private Color m_originColor = new Color(0.3f, 0.3f, 0.3f, 1); private void StopAnimation () { if (m_skeletonAnimation == null) { Debug.LogWarning("Animation was stopped but preview doesn't exist. It's possible that the Preview Panel is closed."); } m_skeletonAnimation.state.ClearTrack(0); m_playing = false; } List<Spine.Event> m_animEvents = new List<Spine.Event>(); List<float> m_animEventFrames = new List<float>(); private void PlayAnimation (string animName, bool loop) { m_animEvents.Clear(); m_animEventFrames.Clear(); m_skeletonAnimation.state.SetAnimation(0, animName, loop); Spine.Animation a = m_skeletonAnimation.state.GetCurrent(0).Animation; foreach (Timeline t in a.Timelines) { if (t.GetType() == typeof(EventTimeline)) { EventTimeline et = (EventTimeline)t; for (int i = 0; i < et.Events.Length; i++) { m_animEvents.Add(et.Events[i]); m_animEventFrames.Add(et.Frames[i]); } } } m_playing = true; } private void InitPreview () { if (this.m_previewUtility == null) { this.m_lastTime = Time.realtimeSinceStartup; this.m_previewUtility = new PreviewRenderUtility(true); this.m_previewUtility.m_Camera.orthographic = true; this.m_previewUtility.m_Camera.orthographicSize = 1; this.m_previewUtility.m_Camera.cullingMask = -2147483648; this.m_previewUtility.m_Camera.nearClipPlane = 0.01f; this.m_previewUtility.m_Camera.farClipPlane = 1000f; this.CreatePreviewInstances(); } } private void CreatePreviewInstances () { this.DestroyPreviewInstances(); if (this.m_previewInstance == null) { try { string skinName = EditorPrefs.GetString(m_skeletonDataAssetGUID + "_lastSkin", ""); m_previewInstance = SpineEditorUtilities.InstantiateSkeletonAnimation((SkeletonDataAsset)target, skinName).gameObject; m_previewInstance.hideFlags = HideFlags.HideAndDontSave; m_previewInstance.layer = 0x1f; m_skeletonAnimation = m_previewInstance.GetComponent<SkeletonAnimation>(); m_skeletonAnimation.initialSkinName = skinName; m_skeletonAnimation.LateUpdate(); m_skeletonData = m_skeletonAnimation.skeletonDataAsset.GetSkeletonData(true); m_previewInstance.GetComponent<Renderer>().enabled = false; m_initialized = true; AdjustCameraGoals(true); } catch { // WARNING: Suppresses errors. } } } private void DestroyPreviewInstances () { if (this.m_previewInstance != null) { DestroyImmediate(this.m_previewInstance); m_previewInstance = null; } m_initialized = false; } public override bool HasPreviewGUI () { //TODO: validate json data for (int i = 0; i < atlasAssets.arraySize; i++) { var prop = atlasAssets.GetArrayElementAtIndex(i); if (prop.objectReferenceValue == null) return false; } return skeletonJSON.objectReferenceValue != null; } Texture m_previewTex = new Texture(); public override void OnInteractivePreviewGUI (Rect r, GUIStyle background) { this.InitPreview(); if (UnityEngine.Event.current.type == EventType.Repaint) { if (m_requireRefresh) { this.m_previewUtility.BeginPreview(r, background); this.DoRenderPreview(true); this.m_previewTex = this.m_previewUtility.EndPreview(); m_requireRefresh = false; } if (this.m_previewTex != null) GUI.DrawTexture(r, m_previewTex, ScaleMode.StretchToFill, false); } DrawSkinToolbar(r); NormalizedTimeBar(r); //TODO: implement panning // this.previewDir = Drag2D(this.previewDir, r); MouseScroll(r); } float m_orthoGoal = 1; Vector3 m_posGoal = new Vector3(0, 0, -10); double m_adjustFrameEndTime = 0; private void AdjustCameraGoals (bool calculateMixTime) { if (this.m_previewInstance == null) return; if (calculateMixTime) { if (m_skeletonAnimation.state.GetCurrent(0) != null) { m_adjustFrameEndTime = EditorApplication.timeSinceStartup + m_skeletonAnimation.state.GetCurrent(0).Mix; } } GameObject go = this.m_previewInstance; Bounds bounds = go.GetComponent<Renderer>().bounds; m_orthoGoal = bounds.size.y; m_posGoal = bounds.center + new Vector3(0, 0, -10); } private void AdjustCameraGoals () { AdjustCameraGoals(false); } private void AdjustCamera () { if (m_previewUtility == null) return; if (EditorApplication.timeSinceStartup < m_adjustFrameEndTime) { AdjustCameraGoals(); } float orthoSet = Mathf.Lerp(this.m_previewUtility.m_Camera.orthographicSize, m_orthoGoal, 0.1f); this.m_previewUtility.m_Camera.orthographicSize = orthoSet; float dist = Vector3.Distance(m_previewUtility.m_Camera.transform.position, m_posGoal); if(dist > 0f) { Vector3 pos = Vector3.Lerp(this.m_previewUtility.m_Camera.transform.position, m_posGoal, 0.1f); pos.x = 0; this.m_previewUtility.m_Camera.transform.position = pos; this.m_previewUtility.m_Camera.transform.rotation = Quaternion.identity; m_requireRefresh = true; } } private void DoRenderPreview (bool drawHandles) { GameObject go = this.m_previewInstance; if (m_requireRefresh && go != null) { go.GetComponent<Renderer>().enabled = true; if (EditorApplication.isPlaying) { //do nothing } else { m_skeletonAnimation.Update((Time.realtimeSinceStartup - m_lastTime)); } m_lastTime = Time.realtimeSinceStartup; if (!EditorApplication.isPlaying) m_skeletonAnimation.LateUpdate(); if (drawHandles) { Handles.SetCamera(m_previewUtility.m_Camera); Handles.color = m_originColor; Handles.DrawLine(new Vector3(-1000 * m_skeletonDataAsset.scale, 0, 0), new Vector3(1000 * m_skeletonDataAsset.scale, 0, 0)); Handles.DrawLine(new Vector3(0, 1000 * m_skeletonDataAsset.scale, 0), new Vector3(0, -1000 * m_skeletonDataAsset.scale, 0)); } this.m_previewUtility.m_Camera.Render(); if (drawHandles) { Handles.SetCamera(m_previewUtility.m_Camera); foreach (var slot in m_skeletonAnimation.skeleton.Slots) { var boundingBoxAttachment = slot.Attachment as BoundingBoxAttachment; if (boundingBoxAttachment != null) { DrawBoundingBox (slot.Bone, boundingBoxAttachment); } } } go.GetComponent<Renderer>().enabled = false; } } static void DrawBoundingBox (Bone bone, BoundingBoxAttachment box) { if (box.Vertices.Length <= 0) return; // Handle cases where user creates a BoundingBoxAttachment but doesn't actually define it. var worldVerts = new float[box.Vertices.Length]; box.ComputeWorldVertices(bone, worldVerts); Handles.color = Color.green; Vector3 lastVert = Vector3.back; Vector3 vert = Vector3.back; Vector3 firstVert = new Vector3(worldVerts[0], worldVerts[1], -1); for (int i = 0; i < worldVerts.Length; i += 2) { vert.x = worldVerts[i]; vert.y = worldVerts[i + 1]; if (i > 0) { Handles.DrawLine(lastVert, vert); } lastVert = vert; } Handles.DrawLine(lastVert, firstVert); } void Update () { AdjustCamera(); if (m_playing) { m_requireRefresh = true; Repaint(); } else if (m_requireRefresh) { Repaint(); } else { //only needed if using smooth menus } if (needToSerialize) { needToSerialize = false; serializedObject.ApplyModifiedProperties(); } } void DrawSkinToolbar (Rect r) { if (m_skeletonAnimation == null) return; if (m_skeletonAnimation.skeleton != null) { string label = (m_skeletonAnimation.skeleton != null && m_skeletonAnimation.skeleton.Skin != null) ? m_skeletonAnimation.skeleton.Skin.Name : "default"; Rect popRect = new Rect(r); popRect.y += 32; popRect.x += 4; popRect.height = 24; popRect.width = 40; EditorGUI.DropShadowLabel(popRect, new GUIContent("Skin", SpineEditorUtilities.Icons.skinsRoot)); popRect.y += 11; popRect.width = 150; popRect.x += 44; if (GUI.Button(popRect, label, EditorStyles.popup)) { SelectSkinContext(); } } } void SelectSkinContext () { GenericMenu menu = new GenericMenu(); foreach (Skin s in m_skeletonData.Skins) { menu.AddItem(new GUIContent(s.Name), this.m_skeletonAnimation.skeleton.Skin == s, SetSkin, (object)s); } menu.ShowAsContext(); } void SetSkin (object o) { Skin skin = (Skin)o; m_skeletonAnimation.initialSkinName = skin.Name; m_skeletonAnimation.Initialize(true); m_requireRefresh = true; EditorPrefs.SetString(m_skeletonDataAssetGUID + "_lastSkin", skin.Name); } void NormalizedTimeBar (Rect r) { if (m_skeletonAnimation == null) return; Rect barRect = new Rect(r); barRect.height = 32; barRect.x += 4; barRect.width -= 4; GUI.Box(barRect, ""); Rect lineRect = new Rect(barRect); float width = lineRect.width; TrackEntry t = m_skeletonAnimation.state.GetCurrent(0); if (t != null) { int loopCount = (int)(t.Time / t.EndTime); float currentTime = t.Time - (t.EndTime * loopCount); float normalizedTime = currentTime / t.Animation.Duration; lineRect.x = barRect.x + (width * normalizedTime) - 0.5f; lineRect.width = 2; GUI.color = Color.red; GUI.DrawTexture(lineRect, EditorGUIUtility.whiteTexture); GUI.color = Color.white; for (int i = 0; i < m_animEvents.Count; i++) { //TODO: Tooltip //Spine.Event spev = animEvents[i]; float fr = m_animEventFrames[i]; var evRect = new Rect(barRect); evRect.x = Mathf.Clamp(((fr / t.Animation.Duration) * width) - (SpineEditorUtilities.Icons._event.width / 2), barRect.x, float.MaxValue); evRect.width = SpineEditorUtilities.Icons._event.width; evRect.height = SpineEditorUtilities.Icons._event.height; evRect.y += SpineEditorUtilities.Icons._event.height; GUI.DrawTexture(evRect, SpineEditorUtilities.Icons._event); //TODO: Tooltip /* UnityEngine.Event ev = UnityEngine.Event.current; if(ev.isMouse){ if(evRect.Contains(ev.mousePosition)){ Rect tooltipRect = new Rect(evRect); tooltipRect.width = 500; tooltipRect.y -= 4; tooltipRect.x += 4; GUI.Label(tooltipRect, spev.Data.Name); } } */ } } } void MouseScroll (Rect position) { UnityEngine.Event current = UnityEngine.Event.current; int controlID = GUIUtility.GetControlID(sliderHash, FocusType.Passive); switch (current.GetTypeForControl(controlID)) { case EventType.ScrollWheel: if (position.Contains(current.mousePosition)) { m_orthoGoal += current.delta.y; m_orthoGoal = Mathf.Max(0.01f, m_orthoGoal); GUIUtility.hotControl = controlID; current.Use(); } break; } } //TODO: Implement preview panning /* static Vector2 Drag2D(Vector2 scrollPosition, Rect position) { int controlID = GUIUtility.GetControlID(sliderHash, FocusType.Passive); UnityEngine.Event current = UnityEngine.Event.current; switch (current.GetTypeForControl(controlID)) { case EventType.MouseDown: if (position.Contains(current.mousePosition) && (position.width > 50f)) { GUIUtility.hotControl = controlID; current.Use(); EditorGUIUtility.SetWantsMouseJumping(1); } return scrollPosition; case EventType.MouseUp: if (GUIUtility.hotControl == controlID) { GUIUtility.hotControl = 0; } EditorGUIUtility.SetWantsMouseJumping(0); return scrollPosition; case EventType.MouseMove: return scrollPosition; case EventType.MouseDrag: if (GUIUtility.hotControl == controlID) { scrollPosition -= (Vector2) (((current.delta * (!current.shift ? ((float) 1) : ((float) 3))) / Mathf.Min(position.width, position.height)) * 140f); scrollPosition.y = Mathf.Clamp(scrollPosition.y, -90f, 90f); current.Use(); GUI.changed = true; } return scrollPosition; } return scrollPosition; } */ public override GUIContent GetPreviewTitle () { return new GUIContent("Preview"); } public override void OnPreviewSettings () { if (!m_initialized) { GUILayout.HorizontalSlider(0, 0, 2, GUILayout.MaxWidth(64)); } else { float speed = GUILayout.HorizontalSlider(m_skeletonAnimation.timeScale, 0, 2, GUILayout.MaxWidth(64)); //snap to nearest 0.25 float y = speed / 0.25f; int q = Mathf.RoundToInt(y); speed = q * 0.25f; m_skeletonAnimation.timeScale = speed; } } //TODO: Fix first-import error //TODO: Update preview without thumbnail public override Texture2D RenderStaticPreview (string assetPath, UnityEngine.Object[] subAssets, int width, int height) { var tex = new Texture2D(width, height, TextureFormat.ARGB32, false); this.InitPreview(); if (this.m_previewUtility.m_Camera == null) return null; m_requireRefresh = true; this.DoRenderPreview(false); AdjustCameraGoals(false); this.m_previewUtility.m_Camera.orthographicSize = m_orthoGoal / 2; this.m_previewUtility.m_Camera.transform.position = m_posGoal; this.m_previewUtility.BeginStaticPreview(new Rect(0, 0, width, height)); this.DoRenderPreview(false); //TODO: Figure out why this is throwing errors on first attempt // if(m_previewUtility != null){ // Handles.SetCamera(this.m_previewUtility.m_Camera); // Handles.BeginGUI(); // GUI.DrawTexture(new Rect(40,60,width,height), SpineEditorUtilities.Icons.spine, ScaleMode.StretchToFill); // Handles.EndGUI(); // } tex = this.m_previewUtility.EndStaticPreview(); return tex; } } }
/* * Copyright 2014 ZXing authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections.Generic; using NUnit.Framework; using ZXing.Common; using ZXing.Common.Test; namespace ZXing.OneD.Test { public class Code128WriterTestCase { private const String FNC1 = "11110101110"; private const String FNC2 = "11110101000"; private const String FNC3 = "10111100010"; private const String FNC4A = "11101011110"; private const String FNC4B = "10111101110"; private const String START_CODE_A = "11010000100"; private const String START_CODE_B = "11010010000"; private const String START_CODE_C = "11010011100"; private const String SWITCH_CODE_A = "11101011110"; private const String SWITCH_CODE_B = "10111101110"; private const String QUIET_SPACE = "00000"; private const String STOP = "1100011101011"; private const String LF = "10000110010"; private Writer writer; private Code128Reader reader; [SetUp] public void setUp() { writer = new Code128Writer(); reader = new Code128Reader(); } [Test] public void testEncodeWithFunc3() { const string toEncode = "\u00f3" + "123"; // "1" "2" "3" check digit 51 var expected = QUIET_SPACE + START_CODE_B + FNC3 + "10011100110" + "11001110010" + "11001011100" + "11101000110" + STOP + QUIET_SPACE; var result = writer.encode(toEncode, BarcodeFormat.CODE_128, 0, 0); var actual = BitMatrixTestCase.matrixToString(result); Assert.AreEqual(expected, actual); } [Test] public void testEncodeWithFunc2() { const string toEncode = "\u00f2" + "123"; // "1" "2" "3" check digit 56 var expected = QUIET_SPACE + START_CODE_B + FNC2 + "10011100110" + "11001110010" + "11001011100" + "11100010110" + STOP + QUIET_SPACE; var result = writer.encode(toEncode, BarcodeFormat.CODE_128, 0, 0); var actual = BitMatrixTestCase.matrixToString(result); Assert.AreEqual(expected, actual); } [Test] public void testEncodeWithFunc1() { const string toEncode = "\u00f1" + "123"; // "12" "3" check digit 92 var expected = QUIET_SPACE + START_CODE_C + FNC1 + "10110011100" + SWITCH_CODE_B + "11001011100" + "10101111000" + STOP + QUIET_SPACE; var result = writer.encode(toEncode, BarcodeFormat.CODE_128, 0, 0); var actual = BitMatrixTestCase.matrixToString(result); Assert.AreEqual(expected, actual); } [Test] public void testRoundtrip() { var toEncode = "\u00f1" + "10958" + "\u00f1" + "17160526"; var expected = "1095817160526"; var encResult = writer.encode(toEncode, BarcodeFormat.CODE_128, 0, 0); var row = encResult.getRow(0, null); var rtResult = reader.decodeRow(0, row, null); var actual = rtResult.Text; Assert.AreEqual(expected, actual); } [Test] public void testEncodeWithFunc4() { var toEncode = "\u00f4" + "123"; // "1" "2" "3" check digit 59 var expected = QUIET_SPACE + START_CODE_B + FNC4B + "10011100110" + "11001110010" + "11001011100" + "11100011010" + STOP + QUIET_SPACE; var result = writer.encode(toEncode, BarcodeFormat.CODE_128, 0, 0); var actual = BitMatrixTestCase.matrixToString(result); Assert.AreEqual(expected, actual); } [Test] public void testEncodeWithFncsAndNumberInCodesetA() { String toEncode = "\n" + "\u00f1" + "\u00f4" + "1" + "\n"; String expected = QUIET_SPACE + START_CODE_A + LF + FNC1 + FNC4A + "10011100110" + LF + "10101111000" + STOP + QUIET_SPACE; BitMatrix result = writer.encode(toEncode, BarcodeFormat.CODE_128, 0, 0); String actual = BitMatrixTestCase.matrixToString(result); Assert.That(actual, Is.EqualTo(expected)); } [Test] public void Should_Encode_And_Decode_Roundtrip() { var contents = String.Empty; for (var i = 0; i < 128; i++) { contents += (char)i; if ((i + 1) % 32 == 0) { Should_Encode(contents); contents = String.Empty; } } } [TestCase("\0ABab\u0010", TestName = "Start with A, switch to B and back to A")] [TestCase("ab\0ab", TestName = "Start with B, switch to A and back to B")] public void Should_Encode(string contents) { var sut = new Code128Writer(); var sutDecode = new Code128Reader(); var result = sut.encode(contents, BarcodeFormat.CODE_128, 0, 0); var resultString = BitMatrixTestCase.matrixToString(result); Console.WriteLine(contents); Console.WriteLine(resultString); Console.WriteLine(""); var matrix = BitMatrix.parse(resultString, "1", "0"); var row = new BitArray(matrix.Width); matrix.getRow(0, row); var decodingResult = sutDecode.decodeRow(0, row, null); Assert.That(decodingResult, Is.Not.Null); Assert.That(decodingResult.Text, Is.EqualTo(contents)); } [Test] public void testEncodeSwitchBetweenCodesetsAAndB() { // start with A switch to B and back to A // "\0" "A" "B" Switch to B "a" "b" Switch to A "\u0010" check digit testEncode("\0ABab\u0010", QUIET_SPACE + START_CODE_A + "10100001100" + "10100011000" + "10001011000" + SWITCH_CODE_B + "10010110000" + "10010000110" + SWITCH_CODE_A + "10100111100" + "11001110100" + STOP + QUIET_SPACE); // start with B switch to A and back to B // "a" "b" Switch to A "\0 "Switch to B" "a" "b" check digit testEncode("ab\0ab", QUIET_SPACE + START_CODE_B + "10010110000" + "10010000110" + SWITCH_CODE_A + "10100001100" + SWITCH_CODE_B + "10010110000" + "10010000110" + "11010001110" + STOP + QUIET_SPACE); } private void testEncode(String toEncode, String expected) { var result = writer.encode(toEncode, BarcodeFormat.CODE_128, 0, 0); var actual = BitMatrixTestCase.matrixToString(result); Assert.AreEqual(expected, actual, toEncode); var row = result.getRow(0, null); var rtResult = reader.decodeRow(0, row, null); var actualRoundtripResultText = rtResult.Text; Assert.AreEqual(toEncode, actualRoundtripResultText); } [Test] [ExpectedException(typeof(ArgumentException))] public void testEncodeWithForcedCodeSetFailureCodeSetABadCharacter() { // Lower case characters should not be accepted when the code set is forced to A. String toEncode = "ASDFx0123"; var options = new Code128EncodingOptions(); options.ForceCodeset = Code128EncodingOptions.Codesets.A; BitMatrix result = writer.encode(toEncode, BarcodeFormat.CODE_128, 0, 0, options.Hints); } [Test] [ExpectedException(typeof(ArgumentException))] public void testEncodeWithForcedCodeSetFailureCodeSetBBadCharacter() { String toEncode = "ASdf\00123"; // \0 (ascii value 0) // Characters with ASCII value below 32 should not be accepted when the code set is forced to B. var options = new Code128EncodingOptions(); options.ForceCodeset = Code128EncodingOptions.Codesets.B; BitMatrix result = writer.encode(toEncode, BarcodeFormat.CODE_128, 0, 0, options.Hints); } [Test] [ExpectedException(typeof(ArgumentException))] public void testEncodeWithForcedCodeSetFailureCodeSetCBadCharactersNonNum() { String toEncode = "123a5678"; // Non-digit characters should not be accepted when the code set is forced to C. var options = new Code128EncodingOptions(); options.ForceCodeset = Code128EncodingOptions.Codesets.C; BitMatrix result = writer.encode(toEncode, BarcodeFormat.CODE_128, 0, 0, options.Hints); } [Test] [ExpectedException(typeof(ArgumentException))] public void testEncodeWithForcedCodeSetFailureCodeSetCBadCharactersFncCode() { String toEncode = "123\u00f2a678"; // Function codes other than 1 should not be accepted when the code set is forced to C. var options = new Code128EncodingOptions(); options.ForceCodeset = Code128EncodingOptions.Codesets.C; BitMatrix result = writer.encode(toEncode, BarcodeFormat.CODE_128, 0, 0, options.Hints); } [Test] [ExpectedException(typeof(ArgumentException))] public void testEncodeWithForcedCodeSetFailureCodeSetCWrongAmountOfDigits() { String toEncode = "123456789"; // An uneven amount of digits should not be accepted when the code set is forced to C. var options = new Code128EncodingOptions(); options.ForceCodeset = Code128EncodingOptions.Codesets.C; BitMatrix result = writer.encode(toEncode, BarcodeFormat.CODE_128, 0, 0, options.Hints); } [Test] public void testEncodeWithForcedCodeSetFailureCodeSetA() { String toEncode = "AB123"; // would default to B "A" "B" "1" "2" "3" check digit 10 String expected = QUIET_SPACE + START_CODE_A + "10100011000" + "10001011000" + "10011100110" + "11001110010" + "11001011100" + "11001000100" + STOP + QUIET_SPACE; var options = new Code128EncodingOptions(); options.ForceCodeset = Code128EncodingOptions.Codesets.A; BitMatrix result = writer.encode(toEncode, BarcodeFormat.CODE_128, 0, 0, options.Hints); String actual = BitMatrixTestCase.matrixToString(result); Assert.AreEqual(expected, actual); } [Test] public void testEncodeWithForcedCodeSetFailureCodeSetB() { String toEncode = "1234"; // would default to C "1" "2" "3" "4" check digit 88 String expected = QUIET_SPACE + START_CODE_B + "10011100110" + "11001110010" + "11001011100" + "11001001110" + "11110010010" + STOP + QUIET_SPACE; var options = new Code128EncodingOptions(); options.ForceCodeset = Code128EncodingOptions.Codesets.B; BitMatrix result = writer.encode(toEncode, BarcodeFormat.CODE_128, 0, 0, options.Hints); String actual = BitMatrixTestCase.matrixToString(result); Assert.AreEqual(expected, actual); } } }
// // Copyright (c) Microsoft and contributors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // // See the License for the specific language governing permissions and // limitations under the License. // // Warning: This code was generated by a tool. // // Changes to this file may cause incorrect behavior and will be lost if the // code is regenerated. using System; using System.Linq; using System.Threading; using System.Threading.Tasks; using Microsoft.Azure; using Microsoft.Azure.Management.Network; using Microsoft.Azure.Management.Network.Models; namespace Microsoft.Azure.Management.Network { /// <summary> /// The Windows Azure Network management API provides a RESTful set of web /// services that interact with Windows Azure Networks service to manage /// your network resrources. The API has entities that capture the /// relationship between an end user and the Windows Azure Networks /// service. /// </summary> public static partial class LoadBalancerOperationsExtensions { /// <summary> /// The Put LoadBalancer operation creates/updates a LoadBalancer /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.Network.ILoadBalancerOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='loadBalancerName'> /// Required. The name of the loadBalancer. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the create/delete LoadBalancer /// operation /// </param> /// <returns> /// Response of a PUT Load Balancer operation /// </returns> public static LoadBalancerPutResponse BeginCreateOrUpdating(this ILoadBalancerOperations operations, string resourceGroupName, string loadBalancerName, LoadBalancer parameters) { return Task.Factory.StartNew((object s) => { return ((ILoadBalancerOperations)s).BeginCreateOrUpdatingAsync(resourceGroupName, loadBalancerName, parameters); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// The Put LoadBalancer operation creates/updates a LoadBalancer /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.Network.ILoadBalancerOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='loadBalancerName'> /// Required. The name of the loadBalancer. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the create/delete LoadBalancer /// operation /// </param> /// <returns> /// Response of a PUT Load Balancer operation /// </returns> public static Task<LoadBalancerPutResponse> BeginCreateOrUpdatingAsync(this ILoadBalancerOperations operations, string resourceGroupName, string loadBalancerName, LoadBalancer parameters) { return operations.BeginCreateOrUpdatingAsync(resourceGroupName, loadBalancerName, parameters, CancellationToken.None); } /// <summary> /// The delete loadbalancer operation deletes the specified /// loadbalancer. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.Network.ILoadBalancerOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='loadBalancerName'> /// Required. The name of the loadBalancer. /// </param> /// <returns> /// If the resource provide needs to return an error to any operation, /// it should return the appropriate HTTP error code and a message /// body as can be seen below.The message should be localized per the /// Accept-Language header specified in the original request such /// thatit could be directly be exposed to users /// </returns> public static UpdateOperationResponse BeginDeleting(this ILoadBalancerOperations operations, string resourceGroupName, string loadBalancerName) { return Task.Factory.StartNew((object s) => { return ((ILoadBalancerOperations)s).BeginDeletingAsync(resourceGroupName, loadBalancerName); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// The delete loadbalancer operation deletes the specified /// loadbalancer. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.Network.ILoadBalancerOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='loadBalancerName'> /// Required. The name of the loadBalancer. /// </param> /// <returns> /// If the resource provide needs to return an error to any operation, /// it should return the appropriate HTTP error code and a message /// body as can be seen below.The message should be localized per the /// Accept-Language header specified in the original request such /// thatit could be directly be exposed to users /// </returns> public static Task<UpdateOperationResponse> BeginDeletingAsync(this ILoadBalancerOperations operations, string resourceGroupName, string loadBalancerName) { return operations.BeginDeletingAsync(resourceGroupName, loadBalancerName, CancellationToken.None); } /// <summary> /// The Put LoadBalancer operation creates/updates a LoadBalancer /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.Network.ILoadBalancerOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='loadBalancerName'> /// Required. The name of the loadBalancer. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the create/update LoadBalancer /// operation /// </param> /// <returns> /// The response body contains the status of the specified asynchronous /// operation, indicating whether it has succeeded, is inprogress, or /// has failed. Note that this status is distinct from the HTTP status /// code returned for the Get Operation Status operation itself. If /// the asynchronous operation succeeded, the response body includes /// the HTTP status code for the successful request. If the /// asynchronous operation failed, the response body includes the HTTP /// status code for the failed request and error information regarding /// the failure. /// </returns> public static AzureAsyncOperationResponse CreateOrUpdate(this ILoadBalancerOperations operations, string resourceGroupName, string loadBalancerName, LoadBalancer parameters) { return Task.Factory.StartNew((object s) => { return ((ILoadBalancerOperations)s).CreateOrUpdateAsync(resourceGroupName, loadBalancerName, parameters); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// The Put LoadBalancer operation creates/updates a LoadBalancer /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.Network.ILoadBalancerOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='loadBalancerName'> /// Required. The name of the loadBalancer. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the create/update LoadBalancer /// operation /// </param> /// <returns> /// The response body contains the status of the specified asynchronous /// operation, indicating whether it has succeeded, is inprogress, or /// has failed. Note that this status is distinct from the HTTP status /// code returned for the Get Operation Status operation itself. If /// the asynchronous operation succeeded, the response body includes /// the HTTP status code for the successful request. If the /// asynchronous operation failed, the response body includes the HTTP /// status code for the failed request and error information regarding /// the failure. /// </returns> public static Task<AzureAsyncOperationResponse> CreateOrUpdateAsync(this ILoadBalancerOperations operations, string resourceGroupName, string loadBalancerName, LoadBalancer parameters) { return operations.CreateOrUpdateAsync(resourceGroupName, loadBalancerName, parameters, CancellationToken.None); } /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.Network.ILoadBalancerOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='loadBalancerName'> /// Required. The name of the loadBalancer. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> public static AzureOperationResponse Delete(this ILoadBalancerOperations operations, string resourceGroupName, string loadBalancerName) { return Task.Factory.StartNew((object s) => { return ((ILoadBalancerOperations)s).DeleteAsync(resourceGroupName, loadBalancerName); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.Network.ILoadBalancerOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='loadBalancerName'> /// Required. The name of the loadBalancer. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> public static Task<AzureOperationResponse> DeleteAsync(this ILoadBalancerOperations operations, string resourceGroupName, string loadBalancerName) { return operations.DeleteAsync(resourceGroupName, loadBalancerName, CancellationToken.None); } /// <summary> /// The Get ntework interface operation retreives information about the /// specified network interface. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.Network.ILoadBalancerOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='loadBalancerName'> /// Required. The name of the loadBalancer. /// </param> /// <returns> /// Response of a GET Load Balancer operation /// </returns> public static LoadBalancerGetResponse Get(this ILoadBalancerOperations operations, string resourceGroupName, string loadBalancerName) { return Task.Factory.StartNew((object s) => { return ((ILoadBalancerOperations)s).GetAsync(resourceGroupName, loadBalancerName); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// The Get ntework interface operation retreives information about the /// specified network interface. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.Network.ILoadBalancerOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='loadBalancerName'> /// Required. The name of the loadBalancer. /// </param> /// <returns> /// Response of a GET Load Balancer operation /// </returns> public static Task<LoadBalancerGetResponse> GetAsync(this ILoadBalancerOperations operations, string resourceGroupName, string loadBalancerName) { return operations.GetAsync(resourceGroupName, loadBalancerName, CancellationToken.None); } /// <summary> /// The List loadBalancer opertion retrieves all the loadbalancers in a /// resource group. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.Network.ILoadBalancerOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <returns> /// Response for ListLoadBalancers Api service call /// </returns> public static LoadBalancerListResponse List(this ILoadBalancerOperations operations, string resourceGroupName) { return Task.Factory.StartNew((object s) => { return ((ILoadBalancerOperations)s).ListAsync(resourceGroupName); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// The List loadBalancer opertion retrieves all the loadbalancers in a /// resource group. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.Network.ILoadBalancerOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <returns> /// Response for ListLoadBalancers Api service call /// </returns> public static Task<LoadBalancerListResponse> ListAsync(this ILoadBalancerOperations operations, string resourceGroupName) { return operations.ListAsync(resourceGroupName, CancellationToken.None); } /// <summary> /// The List loadBalancer opertion retrieves all the loadbalancers in a /// subscription. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.Network.ILoadBalancerOperations. /// </param> /// <returns> /// Response for ListLoadBalancers Api service call /// </returns> public static LoadBalancerListResponse ListAll(this ILoadBalancerOperations operations) { return Task.Factory.StartNew((object s) => { return ((ILoadBalancerOperations)s).ListAllAsync(); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// The List loadBalancer opertion retrieves all the loadbalancers in a /// subscription. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.Network.ILoadBalancerOperations. /// </param> /// <returns> /// Response for ListLoadBalancers Api service call /// </returns> public static Task<LoadBalancerListResponse> ListAllAsync(this ILoadBalancerOperations operations) { return operations.ListAllAsync(CancellationToken.None); } } }
using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Linq; using System.Text; using SquishIt.Framework.Caches; using SquishIt.Framework.Invalidation; using SquishIt.Framework.Minifiers; using SquishIt.Framework.Renderers; using SquishIt.Framework.Files; using SquishIt.Framework.Utilities; namespace SquishIt.Framework.Base { /// <summary> /// Base class for bundle implementations. Configuration methods all return (T)this. /// </summary> /// <typeparam name="T">Type of bundle being implemented (Javascript or CSS).</typeparam> public abstract partial class BundleBase<T> : IRenderable where T : BundleBase<T> { static readonly Dictionary<string, string> renderPathCache = new Dictionary<string, string>(); static readonly ConcurrentDictionary<string, BundleState> bundleStateCache = new ConcurrentDictionary<string, BundleState>(); static readonly ConcurrentDictionary<string, BundleState> rawContentBundleStateCache = new ConcurrentDictionary<string, BundleState>(); protected abstract IMinifier<T> DefaultMinifier { get; } protected abstract string tagFormat { get; } protected abstract string Template { get; } protected abstract string CachePrefix { get; } protected abstract IEnumerable<string> allowedExtensions { get; } protected abstract IEnumerable<string> disallowedExtensions { get; } protected abstract string defaultExtension { get; } protected string debugExtension { get { return ".squishit.debug" + defaultExtension.ToLowerInvariant(); } } protected abstract string ProcessFile(string file, string outputFile, Asset originalAsset); internal BundleState bundleState; readonly IContentCache bundleCache; readonly IContentCache rawContentCache; protected string BaseOutputHref = Configuration.Instance.DefaultOutputBaseHref; protected IFileWriterFactory fileWriterFactory; protected IFileReaderFactory fileReaderFactory; protected IDebugStatusReader debugStatusReader; protected IDirectoryWrapper directoryWrapper; protected IHasher hasher; protected IPathTranslator pathTranslator = Configuration.Instance.Platform.PathTranslator; IMinifier<T> minifier; protected IMinifier<T> Minifier { get { return minifier ?? DefaultMinifier; } set { minifier = value; } } protected BundleBase(IFileWriterFactory fileWriterFactory, IFileReaderFactory fileReaderFactory, IDebugStatusReader debugStatusReader, IDirectoryWrapper directoryWrapper, IHasher hasher, IContentCache bundleCache, IContentCache rawContentCache) { this.fileWriterFactory = fileWriterFactory; this.fileReaderFactory = fileReaderFactory; this.debugStatusReader = debugStatusReader; this.directoryWrapper = directoryWrapper; this.hasher = hasher; bundleState = new BundleState { DebugPredicate = Configuration.Instance.DefaultDebugPredicate, ShouldRenderOnlyIfOutputFileIsMissing = false, HashKeyName = Configuration.Instance.DefaultHashKeyName, CacheInvalidationStrategy = Configuration.Instance.DefaultCacheInvalidationStrategy }; this.bundleCache = bundleCache; this.rawContentCache = rawContentCache; } public bool IsDebuggingEnabled() { return debugStatusReader.IsDebuggingEnabled(bundleState.DebugPredicate); } protected IRenderer GetFileRenderer() { return IsDebuggingEnabled() ? new FileRenderer(fileWriterFactory) : bundleState.ReleaseFileRenderer ?? Configuration.Instance.DefaultReleaseRenderer ?? new FileRenderer(fileWriterFactory); } void AddAsset(Asset asset) { bundleState.Assets.Add(asset); } /// <summary> /// Specify that a bundle should be rendered without type="" in the html tag. /// </summary> public T WithoutTypeAttribute() { bundleState.Typeless = true; return (T)this; } /// <summary> /// Add a single file to a bundle. /// </summary> /// <param name="filePath">Path to file being added</param> public T Add(string filePath) { AddAsset(new Asset { LocalPath = filePath }); return (T)this; } /// <summary> /// Add a single file that has already been minified to a bundle. This will prevent the file from being minified again, a potential cause of bugs in combined file. /// </summary> /// <param name="filePath">Path to file being added</param> public T AddMinified(string filePath) { AddAsset(new Asset { LocalPath = filePath, Minify = false }); return (T)this; } /// <summary> /// Add all files in a directory with extensions matching those known to bundle type. Defaults to include subfolders. /// </summary> /// <param name="folderPath">Path to directory being added.</param> /// <param name="recursive">Include subfolders</param> public T AddDirectory(string folderPath, bool recursive = true) { return AddDirectory(folderPath, recursive, true); } /// <summary> /// Add all files in a directory with extensions matching those known to bundle type. Defaults to include subfolders. All files found will be considered pre-minified. /// </summary> /// <param name="folderPath">Path to directory.</param> /// <param name="recursive">Include subfolders</param> public T AddMinifiedDirectory(string folderPath, bool recursive = true) { return AddDirectory(folderPath, recursive, false); } T AddDirectory(string folderPath, bool recursive, bool minify) { AddAsset(new Asset { LocalPath = folderPath, IsRecursive = recursive, Minify = minify }); return (T)this; } /// <summary> /// Add arbitrary content that is not saved on disk. /// </summary> /// <param name="content">Content to include in bundle.</param> public T AddString(string content) { return AddString(content, defaultExtension, true); } /// <summary> /// Add arbitrary content that is not saved on disk with the assumption that it is treated as if found in a given directory. This is useful for adding LESS content that needs to get imports relative to a particular location. /// </summary> /// <param name="content">Content to include in bundle.</param> /// <param name="extension">Extension that would be included in filename if content were saved to disk - this is needed to determine if the content should be preprocessed.</param> /// <param name="currentDirectory">Folder that file would reside in if content were saved to disk - this is used for processing relative imports within arbitrary content.</param> public T AddString(string content, string extension, string currentDirectory = null) { return AddString(content, extension, true, currentDirectory); } /// <summary> /// Add pre-minified arbitrary content (not saved on disk). /// </summary> /// <param name="content">Minified content to include in bundle.</param> public T AddMinifiedString(string content) { return AddString(content, defaultExtension, false); } /// <summary> /// Add pre-minified arbitrary content (not saved on disk) with the assumption that it is treated as if found in a given directory. This is useful for adding LESS content that needs to get imports relative to a particular location. /// </summary> /// <param name="content">Minified content to include in bundle.</param> /// <param name="extension">Extension that would be included in filename if content were saved to disk - this is needed to determine if the content should be preprocessed.</param> /// <param name="currentDirectory">Folder that file would reside in if content were saved to disk - this is used for processing relative imports within arbitrary content.</param> public T AddMinifiedString(string content, string extension) { return AddString(content, extension, false); } T AddString(string content, string extension, bool minify, string currentDirectory = null) { if (bundleState.Assets.All(ac => ac.Content != content)) bundleState.Assets.Add(new Asset { Content = content, Extension = extension, Minify = minify, ArbitraryWorkingDirectory = currentDirectory }); return (T)this; } /// <summary> /// Add arbitrary content (not saved on disk) using string.Format to inject values. /// </summary> /// <param name="format">Content to include in bundle.</param> /// <param name="values">Values to be injected using string.Format.</param> public T AddString(string format, object[] values) { return AddString(format, defaultExtension, values); } /// <summary> /// Add arbitrary content (not saved on disk) using string.Format to inject values. /// </summary> /// <param name="format">Content to include in bundle.</param> /// <param name="extension">Extension that would be included in filename if content were saved to disk - this is needed to determine if the content should be preprocessed.</param> /// <param name="values">Values to be injected using string.Format.</param> public T AddString(string format, string extension, object[] values) { var content = string.Format(format, values); return AddString(content, extension); } /// <summary> /// Add a remote asset to bundle. /// </summary> /// <param name="localPath">Path to treat asset as if it comes from.</param> /// <param name="remotePath">URL to remote asset.</param> public T AddRemote(string localPath, string remotePath) { return AddRemote(localPath, remotePath, false); } /// <summary> /// Add a remote asset to bundle. /// </summary> /// <param name="localPath">Path to treat asset as if it comes from.</param> /// <param name="remotePath">URL to remote asset.</param> /// <param name="downloadRemote">Fetch remote content to include in bundle.</param> public T AddRemote(string localPath, string remotePath, bool downloadRemote) { var asset = new Asset { LocalPath = localPath, RemotePath = remotePath, DownloadRemote = downloadRemote }; AddAsset(asset); return (T)this; } /// <summary> /// Add dynamic (app-generated) content - the generated proxy file SignalR serves to clients is a good example. /// </summary> /// <param name="siteRelativePath">Site-relative path to content (eg "signalr/hubs").</param> public T AddDynamic(string siteRelativePath) { var absolutePath = pathTranslator.BuildAbsolutePath(siteRelativePath); return AddRemote(siteRelativePath, absolutePath, true); } /// <summary> /// Add embedded resource in root namespace. /// </summary> /// <param name="localPath">Path to treat asset as if it comes from.</param> /// <param name="embeddedResourcePath">Path to resource embedded in root namespace (eg "WebForms.js").</param> public T AddRootEmbeddedResource(string localPath, string embeddedResourcePath) { AddAsset(new Asset { LocalPath = localPath, RemotePath = embeddedResourcePath, Order = 0, IsEmbeddedResource = true, IsEmbeddedInRootNamespace = true }); return (T)this; } /// <summary> /// Add embedded resource. /// </summary> /// <param name="localPath">Path to treat asset as if it comes from.</param> /// <param name="embeddedResourcePath">Path to embedded resource (eg "SquishIt.Tests://EmbeddedResource.Embedded.css").</param> public T AddEmbeddedResource(string localPath, string embeddedResourcePath) { AddAsset(new Asset { LocalPath = localPath, RemotePath = embeddedResourcePath, Order = 0, IsEmbeddedResource = true }); return (T)this; } /// <summary> /// Configure bundle to bypass writing to disk if the output file already exists. /// </summary> public T RenderOnlyIfOutputFileMissing() { bundleState.ShouldRenderOnlyIfOutputFileIsMissing = true; return (T)this; } /// <summary> /// Configure bundle to always render in debug mode (assets served separately and unminified). /// </summary> public T ForceDebug() { debugStatusReader.ForceDebug(); bundleState.ForceDebug = true; return (T)this; } /// <summary> /// Configure bundle to render in debug mode (assets served separately and unminified) if a precondition is met. /// </summary> public T ForceDebugIf(Func<bool> predicate) { bundleState.DebugPredicate = predicate; return (T)this; } /// <summary> /// Configure bundle to always render in release mode (assets combined and minified). /// </summary> public T ForceRelease() { debugStatusReader.ForceRelease(); bundleState.ForceRelease = true; return (T)this; } /// <summary> /// Configure bundle to prefix paths with given base URL - this is useful for cdn scenarios. /// </summary> /// <param name="href">Base path to CDN (eg "http://static.myapp.com").</param> public T WithOutputBaseHref(string href) { BaseOutputHref = href; return (T)this; } /// <summary> /// Configure bundle to use a non-standard file renderer. This is useful if you want combined files uploaded to a static server or CDN. /// </summary> /// <param name="renderer">Implementation of <see cref="IRenderer">IRenderer</see> to be used when creating combined file.</param> public T WithReleaseFileRenderer(IRenderer renderer) { bundleState.ReleaseFileRenderer = renderer; return (T)this; } /// <summary> /// Configure bundle to use a non-standard cache invalidation strategy. /// </summary> /// <param name="strategy">Implementation of <see cref="ICacheInvalidationStrategy">ICacheInvalidationStrategy</see> to be used when generating content tag (eg <see cref="HashAsVirtualDirectoryCacheInvalidationStrategy">HashAsVirtualDirectoryCacheInvalidationStrategy</see>)</param> public T WithCacheInvalidationStrategy(ICacheInvalidationStrategy strategy) { bundleState.CacheInvalidationStrategy = strategy; return (T)this; } void AddAttributes(Dictionary<string, string> attributes, bool merge = true) { if (merge) { foreach (var attribute in attributes) { bundleState.Attributes[attribute.Key] = attribute.Value; } } else { bundleState.Attributes = attributes; } } /// <summary> /// Include a given HTML attribute in rendered tag. /// </summary> /// <param name="name">Attribute name.</param> /// <param name="value">Attribute value.</param> public T WithAttribute(string name, string value) { AddAttributes(new Dictionary<string, string> { { name, value } }); return (T)this; } /// <summary> /// Include a given HTML attribute in rendered tag. /// </summary> /// <param name="attributes">Attribute name/value pairs.</param> /// <param name="merge">Merge with attributes already added (false will overwrite).</param> public T WithAttributes(Dictionary<string, string> attributes, bool merge = true) { AddAttributes(attributes, merge: merge); return (T)this; } /// <summary> /// Configure bundle to use a type other than the default minifier for given bundle type. /// </summary> /// <typeparam name="TMin">Type of <see cref="IMinifier">IMinifier</see> to use.</typeparam> public T WithMinifier<TMin>() where TMin : IMinifier<T> { Minifier = MinifierFactory.Get<T, TMin>(); return (T)this; } /// <summary> /// Configure bundle to use a minifier instance. /// </summary> /// <typeparam name="TMin">Instance of <see cref="IMinifier">IMinifier</see> to use.</typeparam> public T WithMinifier<TMin>(TMin minifier) where TMin : IMinifier<T> { Minifier = minifier; return (T)this; } string FillTemplate(BundleState bundleState, string path) { return string.Format(Template, GetAdditionalAttributes(bundleState), path); } /// <summary> /// Configure bundle to use a specific name for cache-breaking parameter (only used with querystring invalidation). /// </summary> /// <param name="hashQueryStringKeyName">Name of parameter to be added to content URLs.</param> public T HashKeyNamed(string hashQueryStringKeyName) { bundleState.HashKeyName = hashQueryStringKeyName; return (T)this; } /// <summary> /// Configure bundle to bypass cache invalidation. /// </summary> public T WithoutRevisionHash() { return HashKeyNamed(string.Empty); } /// <summary> /// Configure bundle to use provided preprocessor instance. /// </summary> /// <param name="instance"><see cref="IPreprocessor">IPreprocessor</see> to use when rendering bundle.</param> /// <returns></returns> public T WithPreprocessor(IPreprocessor instance) { bundleState.AddPreprocessor(instance); return (T)this; } protected abstract void AggregateContent(List<Asset> assets, StringBuilder sb, string outputFile); BundleState GetCachedBundleState(string name) { BundleState bundle; if (bundleStateCache.TryGetValue(CachePrefix + name, out bundle)) { if (bundle.ForceDebug) { debugStatusReader.ForceDebug(); } if (bundle.ForceRelease) { debugStatusReader.ForceRelease(); } } return bundle; } /// <summary> /// Render bundle to a file. /// </summary> /// <param name="renderTo">Path to combined file.</param> /// <returns>HTML tag.</returns> public string Render(string renderTo) { string key = renderTo; return Render(renderTo, key, GetFileRenderer()); } /// <summary> /// Render tag for a cached bundle. /// </summary> /// <param name="name">Name of cached bundle.</param> /// <returns>HTML tag.</returns> public string RenderCachedAssetTag(string name) { bundleState = GetCachedBundleState(name); return Render(null, name, new CacheRenderer(CachePrefix, name)); } /// <summary> /// Render bundle into the cache with a given name. /// </summary> /// <param name="name">Name of bundle in cache.</param> /// <param name="renderToFilePath">File system path that cached bundle would be rendered to (for import processing).</param> public void AsNamed(string name, string renderToFilePath) { Render(renderToFilePath, name, GetFileRenderer()); bundleState.Path = renderToFilePath; bundleStateCache.AddOrUpdate(CachePrefix + name, bundleState, (x, y) => bundleState); } /// <summary> /// Render bundle into cache and return tag. /// </summary> /// <param name="name">Name of bundle in cache.</param> /// <param name="renderToFilePath">File system path that cached bundle would be rendered to (for import processing).</param> /// <returns>HTML tag.</returns> public string AsCached(string name, string renderToFilePath) { string result = Render(renderToFilePath, name, new CacheRenderer(CachePrefix, name)); bundleState.Path = renderToFilePath; bundleStateCache.AddOrUpdate(CachePrefix + name, bundleState, (x, y) => bundleState); return result; } /// <summary> /// Render bundle with a given name. /// </summary> /// <param name="name">Name for bundle.</param> /// <returns>HTML tag.</returns> public string RenderNamed(string name) { bundleState = GetCachedBundleState(name); if (!bundleState.DebugPredicate.SafeExecute()) { // Revisit https://github.com/jetheredge/SquishIt/pull/155 and https://github.com/jetheredge/SquishIt/issues/183 //hopefully we can find a better way to satisfy both of these requirements var fullName = (BaseOutputHref ?? "") + CachePrefix + name; var content = bundleCache.GetContent(fullName); if (content == null) { AsNamed(name, bundleState.Path); return bundleCache.GetContent(CachePrefix + name); } return content; } return RenderDebug(bundleState.Path, name, GetFileRenderer()); } /// <summary> /// Render bundle from cache with a given name. /// </summary> /// <param name="name">Name for cached bundle.</param> /// <returns>HTML tag.</returns> public string RenderCached(string name) { bundleState = GetCachedBundleState(name); var content = CacheRenderer.Get(CachePrefix, name); if (content == null) { AsCached(name, bundleState.Path); return CacheRenderer.Get(CachePrefix, name); } return content; } public void ClearCache() { bundleCache.ClearTestingCache(); } /// <summary> /// Retrieve number of assets included in bundle. /// </summary> public int AssetCount { get { return bundleState == null ? 0 : bundleState.Assets == null ? 0 : bundleState.Assets.Count; } } /// <summary> /// Render 'raw' content directly without building tags or writing to files (and save in cache by name) /// </summary> /// <returns>String representation of content, minified if needed.</returns> public string RenderRawContent(string bundleName) { var cacheKey = CachePrefix + "_raw_" + bundleName; string content; if (rawContentCache.ContainsKey(cacheKey)) { rawContentCache.Remove(cacheKey); } content = GetMinifiedContent(bundleState.Assets, string.Empty); rawContentCache.Add(cacheKey, content, bundleState.DependentFiles, IsDebuggingEnabled()); rawContentBundleStateCache.AddOrUpdate(cacheKey, bundleState, (x, y) => bundleState); return content; } /// <summary> /// Render cached 'raw' bundle content. /// </summary> /// <param name="bundleName">String representation of content according to cache.</param> /// <returns></returns> public string RenderCachedRawContent(string bundleName) { var cacheKey = CachePrefix + "_raw_" + bundleName; var output = rawContentCache.GetContent(cacheKey); if (output == null) { rawContentBundleStateCache.TryGetValue(cacheKey, out bundleState); if (bundleState == null) { throw new InvalidOperationException(string.Format("No cached bundle state named {0} was found.", bundleName)); } output = RenderRawContent(bundleName); } return output; } } }
/// This code was generated by /// \ / _ _ _| _ _ /// | (_)\/(_)(_|\/| |(/_ v1.0.0 /// / / using System; using System.Collections.Generic; using Twilio.Base; using Twilio.Converters; namespace Twilio.Rest.Preview.Wireless { /// <summary> /// PLEASE NOTE that this class contains preview products that are subject to change. Use them with caution. If you /// currently do not have developer preview access, please contact help@twilio.com. /// /// FetchSimOptions /// </summary> public class FetchSimOptions : IOptions<SimResource> { /// <summary> /// The sid /// </summary> public string PathSid { get; } /// <summary> /// Construct a new FetchSimOptions /// </summary> /// <param name="pathSid"> The sid </param> public FetchSimOptions(string pathSid) { PathSid = pathSid; } /// <summary> /// Generate the necessary parameters /// </summary> public List<KeyValuePair<string, string>> GetParams() { var p = new List<KeyValuePair<string, string>>(); return p; } } /// <summary> /// PLEASE NOTE that this class contains preview products that are subject to change. Use them with caution. If you /// currently do not have developer preview access, please contact help@twilio.com. /// /// ReadSimOptions /// </summary> public class ReadSimOptions : ReadOptions<SimResource> { /// <summary> /// The status /// </summary> public string Status { get; set; } /// <summary> /// The iccid /// </summary> public string Iccid { get; set; } /// <summary> /// The rate_plan /// </summary> public string RatePlan { get; set; } /// <summary> /// The e_id /// </summary> public string EId { get; set; } /// <summary> /// The sim_registration_code /// </summary> public string SimRegistrationCode { get; set; } /// <summary> /// Generate the necessary parameters /// </summary> public override List<KeyValuePair<string, string>> GetParams() { var p = new List<KeyValuePair<string, string>>(); if (Status != null) { p.Add(new KeyValuePair<string, string>("Status", Status)); } if (Iccid != null) { p.Add(new KeyValuePair<string, string>("Iccid", Iccid)); } if (RatePlan != null) { p.Add(new KeyValuePair<string, string>("RatePlan", RatePlan)); } if (EId != null) { p.Add(new KeyValuePair<string, string>("EId", EId)); } if (SimRegistrationCode != null) { p.Add(new KeyValuePair<string, string>("SimRegistrationCode", SimRegistrationCode)); } if (PageSize != null) { p.Add(new KeyValuePair<string, string>("PageSize", PageSize.ToString())); } return p; } } /// <summary> /// PLEASE NOTE that this class contains preview products that are subject to change. Use them with caution. If you /// currently do not have developer preview access, please contact help@twilio.com. /// /// UpdateSimOptions /// </summary> public class UpdateSimOptions : IOptions<SimResource> { /// <summary> /// The sid /// </summary> public string PathSid { get; } /// <summary> /// The unique_name /// </summary> public string UniqueName { get; set; } /// <summary> /// The callback_method /// </summary> public string CallbackMethod { get; set; } /// <summary> /// The callback_url /// </summary> public Uri CallbackUrl { get; set; } /// <summary> /// The friendly_name /// </summary> public string FriendlyName { get; set; } /// <summary> /// The rate_plan /// </summary> public string RatePlan { get; set; } /// <summary> /// The status /// </summary> public string Status { get; set; } /// <summary> /// The commands_callback_method /// </summary> public Twilio.Http.HttpMethod CommandsCallbackMethod { get; set; } /// <summary> /// The commands_callback_url /// </summary> public Uri CommandsCallbackUrl { get; set; } /// <summary> /// The sms_fallback_method /// </summary> public Twilio.Http.HttpMethod SmsFallbackMethod { get; set; } /// <summary> /// The sms_fallback_url /// </summary> public Uri SmsFallbackUrl { get; set; } /// <summary> /// The sms_method /// </summary> public Twilio.Http.HttpMethod SmsMethod { get; set; } /// <summary> /// The sms_url /// </summary> public Uri SmsUrl { get; set; } /// <summary> /// The voice_fallback_method /// </summary> public Twilio.Http.HttpMethod VoiceFallbackMethod { get; set; } /// <summary> /// The voice_fallback_url /// </summary> public Uri VoiceFallbackUrl { get; set; } /// <summary> /// The voice_method /// </summary> public Twilio.Http.HttpMethod VoiceMethod { get; set; } /// <summary> /// The voice_url /// </summary> public Uri VoiceUrl { get; set; } /// <summary> /// Construct a new UpdateSimOptions /// </summary> /// <param name="pathSid"> The sid </param> public UpdateSimOptions(string pathSid) { PathSid = pathSid; } /// <summary> /// Generate the necessary parameters /// </summary> public List<KeyValuePair<string, string>> GetParams() { var p = new List<KeyValuePair<string, string>>(); if (UniqueName != null) { p.Add(new KeyValuePair<string, string>("UniqueName", UniqueName)); } if (CallbackMethod != null) { p.Add(new KeyValuePair<string, string>("CallbackMethod", CallbackMethod)); } if (CallbackUrl != null) { p.Add(new KeyValuePair<string, string>("CallbackUrl", Serializers.Url(CallbackUrl))); } if (FriendlyName != null) { p.Add(new KeyValuePair<string, string>("FriendlyName", FriendlyName)); } if (RatePlan != null) { p.Add(new KeyValuePair<string, string>("RatePlan", RatePlan.ToString())); } if (Status != null) { p.Add(new KeyValuePair<string, string>("Status", Status)); } if (CommandsCallbackMethod != null) { p.Add(new KeyValuePair<string, string>("CommandsCallbackMethod", CommandsCallbackMethod.ToString())); } if (CommandsCallbackUrl != null) { p.Add(new KeyValuePair<string, string>("CommandsCallbackUrl", Serializers.Url(CommandsCallbackUrl))); } if (SmsFallbackMethod != null) { p.Add(new KeyValuePair<string, string>("SmsFallbackMethod", SmsFallbackMethod.ToString())); } if (SmsFallbackUrl != null) { p.Add(new KeyValuePair<string, string>("SmsFallbackUrl", Serializers.Url(SmsFallbackUrl))); } if (SmsMethod != null) { p.Add(new KeyValuePair<string, string>("SmsMethod", SmsMethod.ToString())); } if (SmsUrl != null) { p.Add(new KeyValuePair<string, string>("SmsUrl", Serializers.Url(SmsUrl))); } if (VoiceFallbackMethod != null) { p.Add(new KeyValuePair<string, string>("VoiceFallbackMethod", VoiceFallbackMethod.ToString())); } if (VoiceFallbackUrl != null) { p.Add(new KeyValuePair<string, string>("VoiceFallbackUrl", Serializers.Url(VoiceFallbackUrl))); } if (VoiceMethod != null) { p.Add(new KeyValuePair<string, string>("VoiceMethod", VoiceMethod.ToString())); } if (VoiceUrl != null) { p.Add(new KeyValuePair<string, string>("VoiceUrl", Serializers.Url(VoiceUrl))); } return p; } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System.Collections.Generic; using System.Linq; using Xunit; namespace System.Collections.Immutable.Test { public class ImmutableArrayExtensionsTest { private static readonly ImmutableArray<int> s_emptyDefault = default(ImmutableArray<int>); private static readonly ImmutableArray<int> s_empty = ImmutableArray.Create<int>(); private static readonly ImmutableArray<int> s_oneElement = ImmutableArray.Create(1); private static readonly ImmutableArray<int> s_manyElements = ImmutableArray.Create(1, 2, 3); private static readonly ImmutableArray<GenericParameterHelper> s_oneElementRefType = ImmutableArray.Create(new GenericParameterHelper(1)); private static readonly ImmutableArray<string> s_twoElementRefTypeWithNull = ImmutableArray.Create("1", null); private static readonly ImmutableArray<int>.Builder s_emptyBuilder = ImmutableArray.Create<int>().ToBuilder(); private static readonly ImmutableArray<int>.Builder s_oneElementBuilder = ImmutableArray.Create<int>(1).ToBuilder(); private static readonly ImmutableArray<int>.Builder s_manyElementsBuilder = ImmutableArray.Create<int>(1, 2, 3).ToBuilder(); [Fact] public void Select() { Assert.Equal(new[] { 4, 5, 6 }, ImmutableArrayExtensions.Select(s_manyElements, n => n + 3)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.Select<int, bool>(s_manyElements, null)); } [Fact] public void SelectEmptyDefault() { Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.Select<int, bool>(s_emptyDefault, null)); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.Select(s_emptyDefault, n => true)); } [Fact] public void SelectEmpty() { Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.Select<int, bool>(s_empty, null)); Assert.False(ImmutableArrayExtensions.Select(s_empty, n => true).Any()); } [Fact] public void SelectMany() { Func<int, IEnumerable<int>> collectionSelector = i => Enumerable.Range(i, 10); Func<int, int, int> resultSelector = (i, e) => e * 2; foreach (var arr in new[] { s_empty, s_oneElement, s_manyElements }) { Assert.Equal( Enumerable.SelectMany(arr, collectionSelector, resultSelector), ImmutableArrayExtensions.SelectMany(arr, collectionSelector, resultSelector)); } Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.SelectMany<int, int, int>(s_emptyDefault, null, null)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.SelectMany<int, int, int>(s_manyElements, null, (i, e) => e)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.SelectMany<int, int, int>(s_manyElements, i => new[] { i }, null)); } [Fact] public void Where() { Assert.Equal(new[] { 2, 3 }, ImmutableArrayExtensions.Where(s_manyElements, n => n > 1)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.Where(s_manyElements, null)); } [Fact] public void WhereEmptyDefault() { Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.Where(s_emptyDefault, null)); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.Where(s_emptyDefault, n => true)); } [Fact] public void WhereEmpty() { Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.Where(s_empty, null)); Assert.False(ImmutableArrayExtensions.Where(s_empty, n => true).Any()); } [Fact] public void Any() { Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.Any(s_oneElement, null)); Assert.True(ImmutableArrayExtensions.Any(s_oneElement)); Assert.True(ImmutableArrayExtensions.Any(s_manyElements, n => n == 2)); Assert.False(ImmutableArrayExtensions.Any(s_manyElements, n => n == 4)); Assert.True(ImmutableArrayExtensions.Any(s_oneElementBuilder)); } [Fact] public void AnyEmptyDefault() { Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.Any(s_emptyDefault)); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.Any(s_emptyDefault, n => true)); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.Any(s_emptyDefault, null)); } [Fact] public void AnyEmpty() { Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.Any(s_empty, null)); Assert.False(ImmutableArrayExtensions.Any(s_empty)); Assert.False(ImmutableArrayExtensions.Any(s_empty, n => true)); } [Fact] public void All() { Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.All(s_oneElement, null)); Assert.False(ImmutableArrayExtensions.All(s_manyElements, n => n == 2)); Assert.True(ImmutableArrayExtensions.All(s_manyElements, n => n > 0)); } [Fact] public void AllEmptyDefault() { Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.All(s_emptyDefault, n => true)); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.All(s_emptyDefault, null)); } [Fact] public void AllEmpty() { Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.All(s_empty, null)); Assert.True(ImmutableArrayExtensions.All(s_empty, n => { throw new ShouldNotBeInvokedException(); })); // predicate should never be invoked. } [Fact] public void SequenceEqual() { Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.SequenceEqual(s_oneElement, (IEnumerable<int>)null)); foreach (IEqualityComparer<int> comparer in new[] { null, EqualityComparer<int>.Default }) { Assert.True(ImmutableArrayExtensions.SequenceEqual(s_manyElements, s_manyElements, comparer)); Assert.True(ImmutableArrayExtensions.SequenceEqual(s_manyElements, (IEnumerable<int>)s_manyElements.ToArray(), comparer)); Assert.True(ImmutableArrayExtensions.SequenceEqual(s_manyElements, ImmutableArray.Create(s_manyElements.ToArray()), comparer)); Assert.False(ImmutableArrayExtensions.SequenceEqual(s_manyElements, s_oneElement, comparer)); Assert.False(ImmutableArrayExtensions.SequenceEqual(s_manyElements, (IEnumerable<int>)s_oneElement.ToArray(), comparer)); Assert.False(ImmutableArrayExtensions.SequenceEqual(s_manyElements, ImmutableArray.Create(s_oneElement.ToArray()), comparer)); Assert.False(ImmutableArrayExtensions.SequenceEqual(s_manyElements, (IEnumerable<int>)s_manyElements.Add(1).ToArray(), comparer)); Assert.False(ImmutableArrayExtensions.SequenceEqual(s_manyElements.Add(1), s_manyElements.Add(2).ToArray(), comparer)); Assert.False(ImmutableArrayExtensions.SequenceEqual(s_manyElements.Add(1), (IEnumerable<int>)s_manyElements.Add(2).ToArray(), comparer)); } Assert.True(ImmutableArrayExtensions.SequenceEqual(s_manyElements, s_manyElements, (a, b) => true)); Assert.False(ImmutableArrayExtensions.SequenceEqual(s_manyElements, s_oneElement, (a, b) => a == b)); Assert.False(ImmutableArrayExtensions.SequenceEqual(s_manyElements.Add(1), s_manyElements.Add(2), (a, b) => a == b)); Assert.True(ImmutableArrayExtensions.SequenceEqual(s_manyElements.Add(1), s_manyElements.Add(1), (a, b) => a == b)); Assert.False(ImmutableArrayExtensions.SequenceEqual(s_manyElements, ImmutableArray.Create(s_manyElements.ToArray()), (a, b) => false)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.SequenceEqual(s_oneElement, s_oneElement, (Func<int, int, bool>)null)); } [Fact] public void SequenceEqualEmptyDefault() { Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.SequenceEqual(s_oneElement, s_emptyDefault)); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.SequenceEqual(s_emptyDefault, s_empty)); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.SequenceEqual(s_emptyDefault, s_emptyDefault)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.SequenceEqual(s_emptyDefault, s_emptyDefault, (Func<int, int, bool>)null)); } [Fact] public void SequenceEqualEmpty() { Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.SequenceEqual(s_empty, (IEnumerable<int>)null)); Assert.True(ImmutableArrayExtensions.SequenceEqual(s_empty, s_empty)); Assert.True(ImmutableArrayExtensions.SequenceEqual(s_empty, s_empty.ToArray())); Assert.True(ImmutableArrayExtensions.SequenceEqual(s_empty, s_empty, (a, b) => true)); Assert.True(ImmutableArrayExtensions.SequenceEqual(s_empty, s_empty, (a, b) => false)); } [Fact] public void Aggregate() { Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.Aggregate(s_oneElement, null)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.Aggregate(s_oneElement, 1, null)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.Aggregate<int, int, int>(s_oneElement, 1, null, null)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.Aggregate<int, int, int>(s_oneElement, 1, (a, b) => a + b, null)); Assert.Equal(Enumerable.Aggregate(s_manyElements, (a, b) => a * b), ImmutableArrayExtensions.Aggregate(s_manyElements, (a, b) => a * b)); Assert.Equal(Enumerable.Aggregate(s_manyElements, 5, (a, b) => a * b), ImmutableArrayExtensions.Aggregate(s_manyElements, 5, (a, b) => a * b)); Assert.Equal(Enumerable.Aggregate(s_manyElements, 5, (a, b) => a * b, a => -a), ImmutableArrayExtensions.Aggregate(s_manyElements, 5, (a, b) => a * b, a => -a)); } [Fact] public void AggregateEmptyDefault() { Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.Aggregate(s_emptyDefault, (a, b) => a + b)); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.Aggregate(s_emptyDefault, 1, (a, b) => a + b)); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.Aggregate<int, int, int>(s_emptyDefault, 1, (a, b) => a + b, a => a)); } [Fact] public void AggregateEmpty() { Assert.Equal(0, ImmutableArrayExtensions.Aggregate(s_empty, (a, b) => a + b)); Assert.Equal(1, ImmutableArrayExtensions.Aggregate(s_empty, 1, (a, b) => a + b)); Assert.Equal(1, ImmutableArrayExtensions.Aggregate<int, int, int>(s_empty, 1, (a, b) => a + b, a => a)); } [Fact] public void ElementAt() { // Basis for some assertions that follow Assert.Throws<IndexOutOfRangeException>(() => Enumerable.ElementAt(s_empty, 0)); Assert.Throws<IndexOutOfRangeException>(() => Enumerable.ElementAt(s_manyElements, -1)); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.ElementAt(s_emptyDefault, 0)); Assert.Throws<IndexOutOfRangeException>(() => ImmutableArrayExtensions.ElementAt(s_empty, 0)); Assert.Throws<IndexOutOfRangeException>(() => ImmutableArrayExtensions.ElementAt(s_manyElements, -1)); Assert.Equal(1, ImmutableArrayExtensions.ElementAt(s_oneElement, 0)); Assert.Equal(3, ImmutableArrayExtensions.ElementAt(s_manyElements, 2)); } [Fact] public void ElementAtOrDefault() { Assert.Equal(Enumerable.ElementAtOrDefault(s_manyElements, -1), ImmutableArrayExtensions.ElementAtOrDefault(s_manyElements, -1)); Assert.Equal(Enumerable.ElementAtOrDefault(s_manyElements, 3), ImmutableArrayExtensions.ElementAtOrDefault(s_manyElements, 3)); Assert.Throws<InvalidOperationException>(() => Enumerable.ElementAtOrDefault(s_emptyDefault, 0)); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.ElementAtOrDefault(s_emptyDefault, 0)); Assert.Equal(0, ImmutableArrayExtensions.ElementAtOrDefault(s_empty, 0)); Assert.Equal(0, ImmutableArrayExtensions.ElementAtOrDefault(s_empty, 1)); Assert.Equal(1, ImmutableArrayExtensions.ElementAtOrDefault(s_oneElement, 0)); Assert.Equal(3, ImmutableArrayExtensions.ElementAtOrDefault(s_manyElements, 2)); } [Fact] public void First() { Assert.Equal(Enumerable.First(s_oneElement), ImmutableArrayExtensions.First(s_oneElement)); Assert.Equal(Enumerable.First(s_oneElement, i => true), ImmutableArrayExtensions.First(s_oneElement, i => true)); Assert.Equal(Enumerable.First(s_manyElements), ImmutableArrayExtensions.First(s_manyElements)); Assert.Equal(Enumerable.First(s_manyElements, i => true), ImmutableArrayExtensions.First(s_manyElements, i => true)); Assert.Equal(Enumerable.First(s_oneElementBuilder), ImmutableArrayExtensions.First(s_oneElementBuilder)); Assert.Equal(Enumerable.First(s_manyElementsBuilder), ImmutableArrayExtensions.First(s_manyElementsBuilder)); Assert.Throws<InvalidOperationException>(() => ImmutableArrayExtensions.First(s_empty)); Assert.Throws<InvalidOperationException>(() => ImmutableArrayExtensions.First(s_empty, i => true)); Assert.Throws<InvalidOperationException>(() => ImmutableArrayExtensions.First(s_manyElements, i => false)); } [Fact] public void FirstEmpty() { Assert.Throws<InvalidOperationException>(() => ImmutableArrayExtensions.First(s_empty)); Assert.Throws<InvalidOperationException>(() => ImmutableArrayExtensions.First(s_empty, n => true)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.First(s_empty, null)); Assert.Throws<InvalidOperationException>(() => ImmutableArrayExtensions.First(s_emptyBuilder)); } [Fact] public void FirstEmptyDefault() { Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.First(s_emptyDefault)); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.First(s_emptyDefault, n => true)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.First(s_emptyDefault, null)); } [Fact] public void FirstOrDefault() { Assert.Equal(Enumerable.FirstOrDefault(s_oneElement), ImmutableArrayExtensions.FirstOrDefault(s_oneElement)); Assert.Equal(Enumerable.FirstOrDefault(s_manyElements), ImmutableArrayExtensions.FirstOrDefault(s_manyElements)); foreach (bool result in new[] { true, false }) { Assert.Equal(Enumerable.FirstOrDefault(s_oneElement, i => result), ImmutableArrayExtensions.FirstOrDefault(s_oneElement, i => result)); Assert.Equal(Enumerable.FirstOrDefault(s_manyElements, i => result), ImmutableArrayExtensions.FirstOrDefault(s_manyElements, i => result)); } Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.FirstOrDefault(s_oneElement, null)); Assert.Equal(Enumerable.FirstOrDefault(s_oneElementBuilder), ImmutableArrayExtensions.FirstOrDefault(s_oneElementBuilder)); Assert.Equal(Enumerable.FirstOrDefault(s_manyElementsBuilder), ImmutableArrayExtensions.FirstOrDefault(s_manyElementsBuilder)); } [Fact] public void FirstOrDefaultEmpty() { Assert.Equal(0, ImmutableArrayExtensions.FirstOrDefault(s_empty)); Assert.Equal(0, ImmutableArrayExtensions.FirstOrDefault(s_empty, n => true)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.FirstOrDefault(s_empty, null)); Assert.Equal(0, ImmutableArrayExtensions.FirstOrDefault(s_emptyBuilder)); } [Fact] public void FirstOrDefaultEmptyDefault() { Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.FirstOrDefault(s_emptyDefault)); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.FirstOrDefault(s_emptyDefault, n => true)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.FirstOrDefault(s_emptyDefault, null)); } [Fact] public void Last() { Assert.Equal(Enumerable.Last(s_oneElement), ImmutableArrayExtensions.Last(s_oneElement)); Assert.Equal(Enumerable.Last(s_oneElement, i => true), ImmutableArrayExtensions.Last(s_oneElement, i => true)); Assert.Throws<InvalidOperationException>(() => ImmutableArrayExtensions.Last(s_oneElement, i => false)); Assert.Equal(Enumerable.Last(s_manyElements), ImmutableArrayExtensions.Last(s_manyElements)); Assert.Equal(Enumerable.Last(s_manyElements, i => true), ImmutableArrayExtensions.Last(s_manyElements, i => true)); Assert.Throws<InvalidOperationException>(() => ImmutableArrayExtensions.Last(s_manyElements, i => false)); Assert.Equal(Enumerable.Last(s_oneElementBuilder), ImmutableArrayExtensions.Last(s_oneElementBuilder)); Assert.Equal(Enumerable.Last(s_manyElementsBuilder), ImmutableArrayExtensions.Last(s_manyElementsBuilder)); } [Fact] public void LastEmpty() { Assert.Throws<InvalidOperationException>(() => ImmutableArrayExtensions.Last(s_empty)); Assert.Throws<InvalidOperationException>(() => ImmutableArrayExtensions.Last(s_empty, n => true)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.Last(s_empty, null)); Assert.Throws<InvalidOperationException>(() => ImmutableArrayExtensions.Last(s_emptyBuilder)); } [Fact] public void LastEmptyDefault() { Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.Last(s_emptyDefault)); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.Last(s_emptyDefault, n => true)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.Last(s_emptyDefault, null)); } [Fact] public void LastOrDefault() { Assert.Equal(Enumerable.LastOrDefault(s_oneElement), ImmutableArrayExtensions.LastOrDefault(s_oneElement)); Assert.Equal(Enumerable.LastOrDefault(s_manyElements), ImmutableArrayExtensions.LastOrDefault(s_manyElements)); foreach (bool result in new[] { true, false }) { Assert.Equal(Enumerable.LastOrDefault(s_oneElement, i => result), ImmutableArrayExtensions.LastOrDefault(s_oneElement, i => result)); Assert.Equal(Enumerable.LastOrDefault(s_manyElements, i => result), ImmutableArrayExtensions.LastOrDefault(s_manyElements, i => result)); } Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.LastOrDefault(s_oneElement, null)); Assert.Equal(Enumerable.LastOrDefault(s_oneElementBuilder), ImmutableArrayExtensions.LastOrDefault(s_oneElementBuilder)); Assert.Equal(Enumerable.LastOrDefault(s_manyElementsBuilder), ImmutableArrayExtensions.LastOrDefault(s_manyElementsBuilder)); } [Fact] public void LastOrDefaultEmpty() { Assert.Equal(0, ImmutableArrayExtensions.LastOrDefault(s_empty)); Assert.Equal(0, ImmutableArrayExtensions.LastOrDefault(s_empty, n => true)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.LastOrDefault(s_empty, null)); Assert.Equal(0, ImmutableArrayExtensions.LastOrDefault(s_emptyBuilder)); } [Fact] public void LastOrDefaultEmptyDefault() { Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.LastOrDefault(s_emptyDefault)); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.LastOrDefault(s_emptyDefault, n => true)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.LastOrDefault(s_emptyDefault, null)); } [Fact] public void Single() { Assert.Equal(Enumerable.Single(s_oneElement), ImmutableArrayExtensions.Single(s_oneElement)); Assert.Equal(Enumerable.Single(s_oneElement), ImmutableArrayExtensions.Single(s_oneElement, i => true)); Assert.Throws<InvalidOperationException>(() => ImmutableArrayExtensions.Single(s_manyElements)); Assert.Throws<InvalidOperationException>(() => ImmutableArrayExtensions.Single(s_manyElements, i => true)); Assert.Throws<InvalidOperationException>(() => ImmutableArrayExtensions.Single(s_manyElements, i => false)); Assert.Throws<InvalidOperationException>(() => ImmutableArrayExtensions.Single(s_oneElement, i => false)); } [Fact] public void SingleEmpty() { Assert.Throws<InvalidOperationException>(() => ImmutableArrayExtensions.Single(s_empty)); Assert.Throws<InvalidOperationException>(() => ImmutableArrayExtensions.Single(s_empty, n => true)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.Single(s_empty, null)); } [Fact] public void SingleEmptyDefault() { Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.Single(s_emptyDefault)); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.Single(s_emptyDefault, n => true)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.Single(s_emptyDefault, null)); } [Fact] public void SingleOrDefault() { Assert.Equal(Enumerable.SingleOrDefault(s_oneElement), ImmutableArrayExtensions.SingleOrDefault(s_oneElement)); Assert.Equal(Enumerable.SingleOrDefault(s_oneElement), ImmutableArrayExtensions.SingleOrDefault(s_oneElement, i => true)); Assert.Equal(Enumerable.SingleOrDefault(s_oneElement, i => false), ImmutableArrayExtensions.SingleOrDefault(s_oneElement, i => false)); Assert.Throws<InvalidOperationException>(() => ImmutableArrayExtensions.SingleOrDefault(s_manyElements)); Assert.Throws<InvalidOperationException>(() => ImmutableArrayExtensions.SingleOrDefault(s_manyElements, i => true)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.SingleOrDefault(s_oneElement, null)); } [Fact] public void SingleOrDefaultEmpty() { Assert.Equal(0, ImmutableArrayExtensions.SingleOrDefault(s_empty)); Assert.Equal(0, ImmutableArrayExtensions.SingleOrDefault(s_empty, n => true)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.SingleOrDefault(s_empty, null)); } [Fact] public void SingleOrDefaultEmptyDefault() { Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.SingleOrDefault(s_emptyDefault)); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.SingleOrDefault(s_emptyDefault, n => true)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.SingleOrDefault(s_emptyDefault, null)); } [Fact] public void ToDictionary() { Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.ToDictionary(s_manyElements, (Func<int, int>)null)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.ToDictionary(s_manyElements, (Func<int, int>)null, n => n)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.ToDictionary(s_manyElements, (Func<int, int>)null, n => n, EqualityComparer<int>.Default)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.ToDictionary(s_manyElements, n => n, (Func<int, string>)null)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.ToDictionary(s_manyElements, n => n, (Func<int, string>)null, EqualityComparer<int>.Default)); var stringToString = ImmutableArrayExtensions.ToDictionary(s_manyElements, n => n.ToString(), n => (n * 2).ToString()); Assert.Equal(stringToString.Count, s_manyElements.Length); Assert.Equal("2", stringToString["1"]); Assert.Equal("4", stringToString["2"]); Assert.Equal("6", stringToString["3"]); var stringToInt = ImmutableArrayExtensions.ToDictionary(s_manyElements, n => n.ToString()); Assert.Equal(stringToString.Count, s_manyElements.Length); Assert.Equal(1, stringToInt["1"]); Assert.Equal(2, stringToInt["2"]); Assert.Equal(3, stringToInt["3"]); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.ToDictionary(s_emptyDefault, n => n)); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.ToDictionary(s_emptyDefault, n => n, n => n)); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.ToDictionary(s_emptyDefault, n => n, EqualityComparer<int>.Default)); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.ToDictionary(s_emptyDefault, n => n, n => n, EqualityComparer<int>.Default)); } [Fact] public void ToArray() { Assert.Equal(0, ImmutableArrayExtensions.ToArray(s_empty).Length); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.ToArray(s_emptyDefault)); Assert.Equal(s_manyElements.ToArray(), ImmutableArrayExtensions.ToArray(s_manyElements)); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.ComponentModel; using System.Diagnostics; using System.Globalization; using System.Runtime.Serialization; namespace System.Data { public class DataException : SystemException { protected DataException(SerializationInfo info, StreamingContext context) : base(info, context) { } public DataException() : base(SR.DataSet_DefaultDataException) { HResult = HResults.Data; } public DataException(string s) : base(s) { HResult = HResults.Data; } public DataException(string s, Exception innerException) : base(s, innerException) { } }; public class ConstraintException : DataException { protected ConstraintException(SerializationInfo info, StreamingContext context) : base(info, context) { } public ConstraintException() : base(SR.DataSet_DefaultConstraintException) { HResult = HResults.DataConstraint; } public ConstraintException(string s) : base(s) { HResult = HResults.DataConstraint; } public ConstraintException(string message, Exception innerException) : base(message, innerException) { HResult = HResults.DataConstraint; } } public class DeletedRowInaccessibleException : DataException { protected DeletedRowInaccessibleException(SerializationInfo info, StreamingContext context) : base(info, context) { } /// <summary> /// Initializes a new instance of the <see cref='System.Data.DeletedRowInaccessibleException'/> class. /// </summary> public DeletedRowInaccessibleException() : base(SR.DataSet_DefaultDeletedRowInaccessibleException) { HResult = HResults.DataDeletedRowInaccessible; } /// <summary> /// Initializes a new instance of the <see cref='System.Data.DeletedRowInaccessibleException'/> class with the specified string. /// </summary> public DeletedRowInaccessibleException(string s) : base(s) { HResult = HResults.DataDeletedRowInaccessible; } public DeletedRowInaccessibleException(string message, Exception innerException) : base(message, innerException) { HResult = HResults.DataDeletedRowInaccessible; } } public class DuplicateNameException : DataException { protected DuplicateNameException(SerializationInfo info, StreamingContext context) : base(info, context) { } public DuplicateNameException() : base(SR.DataSet_DefaultDuplicateNameException) { HResult = HResults.DataDuplicateName; } public DuplicateNameException(string s) : base(s) { HResult = HResults.DataDuplicateName; } public DuplicateNameException(string message, Exception innerException) : base(message, innerException) { HResult = HResults.DataDuplicateName; } } public class InRowChangingEventException : DataException { protected InRowChangingEventException(SerializationInfo info, StreamingContext context) : base(info, context) { } public InRowChangingEventException() : base(SR.DataSet_DefaultInRowChangingEventException) { HResult = HResults.DataInRowChangingEvent; } public InRowChangingEventException(string s) : base(s) { HResult = HResults.DataInRowChangingEvent; } public InRowChangingEventException(string message, Exception innerException) : base(message, innerException) { HResult = HResults.DataInRowChangingEvent; } } public class InvalidConstraintException : DataException { protected InvalidConstraintException(SerializationInfo info, StreamingContext context) : base(info, context) { } public InvalidConstraintException() : base(SR.DataSet_DefaultInvalidConstraintException) { HResult = HResults.DataInvalidConstraint; } public InvalidConstraintException(string s) : base(s) { HResult = HResults.DataInvalidConstraint; } public InvalidConstraintException(string message, Exception innerException) : base(message, innerException) { HResult = HResults.DataInvalidConstraint; } } public class MissingPrimaryKeyException : DataException { protected MissingPrimaryKeyException(SerializationInfo info, StreamingContext context) : base(info, context) { } public MissingPrimaryKeyException() : base(SR.DataSet_DefaultMissingPrimaryKeyException) { HResult = HResults.DataMissingPrimaryKey; } public MissingPrimaryKeyException(string s) : base(s) { HResult = HResults.DataMissingPrimaryKey; } public MissingPrimaryKeyException(string message, Exception innerException) : base(message, innerException) { HResult = HResults.DataMissingPrimaryKey; } } public class NoNullAllowedException : DataException { protected NoNullAllowedException(SerializationInfo info, StreamingContext context) : base(info, context) { } public NoNullAllowedException() : base(SR.DataSet_DefaultNoNullAllowedException) { HResult = HResults.DataNoNullAllowed; } public NoNullAllowedException(string s) : base(s) { HResult = HResults.DataNoNullAllowed; } public NoNullAllowedException(string message, Exception innerException) : base(message, innerException) { HResult = HResults.DataNoNullAllowed; } } public class ReadOnlyException : DataException { protected ReadOnlyException(SerializationInfo info, StreamingContext context) : base(info, context) { } public ReadOnlyException() : base(SR.DataSet_DefaultReadOnlyException) { HResult = HResults.DataReadOnly; } public ReadOnlyException(string s) : base(s) { HResult = HResults.DataReadOnly; } public ReadOnlyException(string message, Exception innerException) : base(message, innerException) { HResult = HResults.DataReadOnly; } } public class RowNotInTableException : DataException { protected RowNotInTableException(SerializationInfo info, StreamingContext context) : base(info, context) { } public RowNotInTableException() : base(SR.DataSet_DefaultRowNotInTableException) { HResult = HResults.DataRowNotInTable; } public RowNotInTableException(string s) : base(s) { HResult = HResults.DataRowNotInTable; } public RowNotInTableException(string message, Exception innerException) : base(message, innerException) { HResult = HResults.DataRowNotInTable; } } public class VersionNotFoundException : DataException { protected VersionNotFoundException(SerializationInfo info, StreamingContext context) : base(info, context) { } public VersionNotFoundException() : base(SR.DataSet_DefaultVersionNotFoundException) { HResult = HResults.DataVersionNotFound; } public VersionNotFoundException(string s) : base(s) { HResult = HResults.DataVersionNotFound; } public VersionNotFoundException(string message, Exception innerException) : base(message, innerException) { HResult = HResults.DataVersionNotFound; } } internal static class ExceptionBuilder { // The class defines the exceptions that are specific to the DataSet. // The class contains functions that take the proper informational variables and then construct // the appropriate exception with an error string obtained from the resource Data.txt. // The exception is then returned to the caller, so that the caller may then throw from its // location so that the catcher of the exception will have the appropriate call stack. // This class is used so that there will be compile time checking of error messages. // The resource Data.txt will ensure proper string text based on the appropriate locale. // this method accepts BID format as an argument, this attribute allows FXCopBid rule to validate calls to it private static void TraceException(string trace, Exception e) { Debug.Assert(null != e, "TraceException: null Exception"); if (e != null) { DataCommonEventSource.Log.Trace(trace, e); } } internal static Exception TraceExceptionAsReturnValue(Exception e) { TraceException("<comm.ADP.TraceException|ERR|THROW> '{0}'", e); return e; } internal static Exception TraceExceptionForCapture(Exception e) { TraceException("<comm.ADP.TraceException|ERR|CATCH> '{0}'", e); return e; } internal static Exception TraceExceptionWithoutRethrow(Exception e) { TraceException("<comm.ADP.TraceException|ERR|CATCH> '{0}'", e); return e; } internal static Exception _Argument(string error) => TraceExceptionAsReturnValue(new ArgumentException(error)); internal static Exception _Argument(string paramName, string error) => TraceExceptionAsReturnValue(new ArgumentException(error)); internal static Exception _Argument(string error, Exception innerException) => TraceExceptionAsReturnValue(new ArgumentException(error, innerException)); private static Exception _ArgumentNull(string paramName, string msg) => TraceExceptionAsReturnValue(new ArgumentNullException(paramName, msg)); internal static Exception _ArgumentOutOfRange(string paramName, string msg) => TraceExceptionAsReturnValue(new ArgumentOutOfRangeException(paramName, msg)); private static Exception _IndexOutOfRange(string error) => TraceExceptionAsReturnValue(new IndexOutOfRangeException(error)); private static Exception _InvalidOperation(string error) => TraceExceptionAsReturnValue(new InvalidOperationException(error)); private static Exception _InvalidEnumArgumentException(string error) => TraceExceptionAsReturnValue(new InvalidEnumArgumentException(error)); private static Exception _InvalidEnumArgumentException<T>(T value) => _InvalidEnumArgumentException(SR.Format(SR.ADP_InvalidEnumerationValue, typeof(T).Name, value.ToString())); // // System.Data exceptions // private static void ThrowDataException(string error, Exception innerException) { throw TraceExceptionAsReturnValue(new DataException(error, innerException)); } private static Exception _Data(string error) => TraceExceptionAsReturnValue(new DataException(error)); private static Exception _Constraint(string error) => TraceExceptionAsReturnValue(new ConstraintException(error)); private static Exception _InvalidConstraint(string error) => TraceExceptionAsReturnValue(new InvalidConstraintException(error)); private static Exception _DeletedRowInaccessible(string error) => TraceExceptionAsReturnValue(new DeletedRowInaccessibleException(error)); private static Exception _DuplicateName(string error) => TraceExceptionAsReturnValue(new DuplicateNameException(error)); private static Exception _InRowChangingEvent(string error) => TraceExceptionAsReturnValue(new InRowChangingEventException(error)); private static Exception _MissingPrimaryKey(string error) => TraceExceptionAsReturnValue(new MissingPrimaryKeyException(error)); private static Exception _NoNullAllowed(string error) => TraceExceptionAsReturnValue(new NoNullAllowedException(error)); private static Exception _ReadOnly(string error) => TraceExceptionAsReturnValue(new ReadOnlyException(error)); private static Exception _RowNotInTable(string error) => TraceExceptionAsReturnValue(new RowNotInTableException(error)); private static Exception _VersionNotFound(string error) => TraceExceptionAsReturnValue(new VersionNotFoundException(error)); public static Exception ArgumentNull(string paramName) => _ArgumentNull(paramName, SR.Format(SR.Data_ArgumentNull, paramName)); public static Exception ArgumentOutOfRange(string paramName) => _ArgumentOutOfRange(paramName, SR.Format(SR.Data_ArgumentOutOfRange, paramName)); public static Exception BadObjectPropertyAccess(string error) => _InvalidOperation(SR.Format(SR.DataConstraint_BadObjectPropertyAccess, error)); public static Exception ArgumentContainsNull(string paramName) => _Argument(paramName, SR.Format(SR.Data_ArgumentContainsNull, paramName)); // // Collections // public static Exception CannotModifyCollection() => _Argument(SR.Data_CannotModifyCollection); public static Exception CaseInsensitiveNameConflict(string name) => _Argument(SR.Format(SR.Data_CaseInsensitiveNameConflict, name)); public static Exception NamespaceNameConflict(string name) => _Argument(SR.Format(SR.Data_NamespaceNameConflict, name)); public static Exception InvalidOffsetLength() => _Argument(SR.Data_InvalidOffsetLength); // // DataColumnCollection // public static Exception ColumnNotInTheTable(string column, string table) => _Argument(SR.Format(SR.DataColumn_NotInTheTable, column, table)); public static Exception ColumnNotInAnyTable() => _Argument(SR.DataColumn_NotInAnyTable); public static Exception ColumnOutOfRange(int index) => _IndexOutOfRange(SR.Format(SR.DataColumns_OutOfRange, (index).ToString(CultureInfo.InvariantCulture))); public static Exception ColumnOutOfRange(string column) => _IndexOutOfRange(SR.Format(SR.DataColumns_OutOfRange, column)); public static Exception CannotAddColumn1(string column) => _Argument(SR.Format(SR.DataColumns_Add1, column)); public static Exception CannotAddColumn2(string column) => _Argument(SR.Format(SR.DataColumns_Add2, column)); public static Exception CannotAddColumn3() => _Argument(SR.DataColumns_Add3); public static Exception CannotAddColumn4(string column) => _Argument(SR.Format(SR.DataColumns_Add4, column)); public static Exception CannotAddDuplicate(string column) => _DuplicateName(SR.Format(SR.DataColumns_AddDuplicate, column)); public static Exception CannotAddDuplicate2(string table) => _DuplicateName(SR.Format(SR.DataColumns_AddDuplicate2, table)); public static Exception CannotAddDuplicate3(string table) => _DuplicateName(SR.Format(SR.DataColumns_AddDuplicate3, table)); public static Exception CannotRemoveColumn() => _Argument(SR.DataColumns_Remove); public static Exception CannotRemovePrimaryKey() => _Argument(SR.DataColumns_RemovePrimaryKey); public static Exception CannotRemoveChildKey(string relation) => _Argument(SR.Format(SR.DataColumns_RemoveChildKey, relation)); public static Exception CannotRemoveConstraint(string constraint, string table) => _Argument(SR.Format(SR.DataColumns_RemoveConstraint, constraint, table)); public static Exception CannotRemoveExpression(string column, string expression) => _Argument(SR.Format(SR.DataColumns_RemoveExpression, column, expression)); public static Exception ColumnNotInTheUnderlyingTable(string column, string table) => _Argument(SR.Format(SR.DataColumn_NotInTheUnderlyingTable, column, table)); public static Exception InvalidOrdinal(string name, int ordinal) => _ArgumentOutOfRange(name, SR.Format(SR.DataColumn_OrdinalExceedMaximun, (ordinal).ToString(CultureInfo.InvariantCulture))); // // _Constraint and ConstrainsCollection // public static Exception AddPrimaryKeyConstraint() => _Argument(SR.DataConstraint_AddPrimaryKeyConstraint); public static Exception NoConstraintName() => _Argument(SR.DataConstraint_NoName); public static Exception ConstraintViolation(string constraint) => _Constraint(SR.Format(SR.DataConstraint_Violation, constraint)); public static Exception ConstraintNotInTheTable(string constraint) => _Argument(SR.Format(SR.DataConstraint_NotInTheTable, constraint)); public static string KeysToString(object[] keys) { string values = string.Empty; for (int i = 0; i < keys.Length; i++) { values += Convert.ToString(keys[i], null) + (i < keys.Length - 1 ? ", " : string.Empty); } return values; } public static string UniqueConstraintViolationText(DataColumn[] columns, object[] values) { if (columns.Length > 1) { string columnNames = string.Empty; for (int i = 0; i < columns.Length; i++) { columnNames += columns[i].ColumnName + (i < columns.Length - 1 ? ", " : ""); } return SR.Format(SR.DataConstraint_ViolationValue, columnNames, KeysToString(values)); } else { return SR.Format(SR.DataConstraint_ViolationValue, columns[0].ColumnName, Convert.ToString(values[0], null)); } } public static Exception ConstraintViolation(DataColumn[] columns, object[] values) => _Constraint(UniqueConstraintViolationText(columns, values)); public static Exception ConstraintOutOfRange(int index) => _IndexOutOfRange(SR.Format(SR.DataConstraint_OutOfRange, (index).ToString(CultureInfo.InvariantCulture))); public static Exception DuplicateConstraint(string constraint) => _Data(SR.Format(SR.DataConstraint_Duplicate, constraint)); public static Exception DuplicateConstraintName(string constraint) => _DuplicateName(SR.Format(SR.DataConstraint_DuplicateName, constraint)); public static Exception NeededForForeignKeyConstraint(UniqueConstraint key, ForeignKeyConstraint fk) => _Argument(SR.Format(SR.DataConstraint_NeededForForeignKeyConstraint, key.ConstraintName, fk.ConstraintName)); public static Exception UniqueConstraintViolation() => _Argument(SR.DataConstraint_UniqueViolation); public static Exception ConstraintForeignTable() => _Argument(SR.DataConstraint_ForeignTable); public static Exception ConstraintParentValues() => _Argument(SR.DataConstraint_ParentValues); public static Exception ConstraintAddFailed(DataTable table) => _InvalidConstraint(SR.Format(SR.DataConstraint_AddFailed, table.TableName)); public static Exception ConstraintRemoveFailed() => _Argument(SR.DataConstraint_RemoveFailed); public static Exception FailedCascadeDelete(string constraint) => _InvalidConstraint(SR.Format(SR.DataConstraint_CascadeDelete, constraint)); public static Exception FailedCascadeUpdate(string constraint) => _InvalidConstraint(SR.Format(SR.DataConstraint_CascadeUpdate, constraint)); public static Exception FailedClearParentTable(string table, string constraint, string childTable) => _InvalidConstraint(SR.Format(SR.DataConstraint_ClearParentTable, table, constraint, childTable)); public static Exception ForeignKeyViolation(string constraint, object[] keys) => _InvalidConstraint(SR.Format(SR.DataConstraint_ForeignKeyViolation, constraint, KeysToString(keys))); public static Exception RemoveParentRow(ForeignKeyConstraint constraint) => _InvalidConstraint(SR.Format(SR.DataConstraint_RemoveParentRow, constraint.ConstraintName)); public static string MaxLengthViolationText(string columnName) => SR.Format(SR.DataColumn_ExceedMaxLength, columnName); public static string NotAllowDBNullViolationText(string columnName) => SR.Format(SR.DataColumn_NotAllowDBNull, columnName); public static Exception CantAddConstraintToMultipleNestedTable(string tableName) => _Argument(SR.Format(SR.DataConstraint_CantAddConstraintToMultipleNestedTable, tableName)); // // DataColumn Set Properties conflicts // public static Exception AutoIncrementAndExpression() => _Argument(SR.DataColumn_AutoIncrementAndExpression); public static Exception AutoIncrementAndDefaultValue() => _Argument(SR.DataColumn_AutoIncrementAndDefaultValue); public static Exception AutoIncrementSeed() => _Argument(SR.DataColumn_AutoIncrementSeed); public static Exception CantChangeDataType() => _Argument(SR.DataColumn_ChangeDataType); public static Exception NullDataType() => _Argument(SR.DataColumn_NullDataType); public static Exception ColumnNameRequired() => _Argument(SR.DataColumn_NameRequired); public static Exception DefaultValueAndAutoIncrement() => _Argument(SR.DataColumn_DefaultValueAndAutoIncrement); public static Exception DefaultValueDataType(string column, Type defaultType, Type columnType, Exception inner) => column.Length == 0 ? _Argument(SR.Format(SR.DataColumn_DefaultValueDataType1, defaultType.FullName, columnType.FullName), inner) : _Argument(SR.Format(SR.DataColumn_DefaultValueDataType, column, defaultType.FullName, columnType.FullName), inner); public static Exception DefaultValueColumnDataType(string column, Type defaultType, Type columnType, Exception inner) => _Argument(SR.Format(SR.DataColumn_DefaultValueColumnDataType, column, defaultType.FullName, columnType.FullName), inner); public static Exception ExpressionAndUnique() => _Argument(SR.DataColumn_ExpressionAndUnique); public static Exception ExpressionAndReadOnly() => _Argument(SR.DataColumn_ExpressionAndReadOnly); public static Exception ExpressionAndConstraint(DataColumn column, Constraint constraint) => _Argument(SR.Format(SR.DataColumn_ExpressionAndConstraint, column.ColumnName, constraint.ConstraintName)); public static Exception ExpressionInConstraint(DataColumn column) => _Argument(SR.Format(SR.DataColumn_ExpressionInConstraint, column.ColumnName)); public static Exception ExpressionCircular() => _Argument(SR.DataColumn_ExpressionCircular); public static Exception NonUniqueValues(string column) => _InvalidConstraint(SR.Format(SR.DataColumn_NonUniqueValues, column)); public static Exception NullKeyValues(string column) => _Data(SR.Format(SR.DataColumn_NullKeyValues, column)); public static Exception NullValues(string column) => _NoNullAllowed(SR.Format(SR.DataColumn_NullValues, column)); public static Exception ReadOnlyAndExpression() => _ReadOnly(SR.DataColumn_ReadOnlyAndExpression); public static Exception ReadOnly(string column) => _ReadOnly(SR.Format(SR.DataColumn_ReadOnly, column)); public static Exception UniqueAndExpression() => _Argument(SR.DataColumn_UniqueAndExpression); public static Exception SetFailed(object value, DataColumn column, Type type, Exception innerException) => _Argument(innerException.Message + SR.Format(SR.DataColumn_SetFailed, value.ToString(), column.ColumnName, type.Name), innerException); public static Exception CannotSetToNull(DataColumn column) => _Argument(SR.Format(SR.DataColumn_CannotSetToNull, column.ColumnName)); public static Exception LongerThanMaxLength(DataColumn column) => _Argument(SR.Format(SR.DataColumn_LongerThanMaxLength, column.ColumnName)); public static Exception CannotSetMaxLength(DataColumn column, int value) => _Argument(SR.Format(SR.DataColumn_CannotSetMaxLength, column.ColumnName, value.ToString(CultureInfo.InvariantCulture))); public static Exception CannotSetMaxLength2(DataColumn column) => _Argument(SR.Format(SR.DataColumn_CannotSetMaxLength2, column.ColumnName)); public static Exception CannotSetSimpleContentType(string columnName, Type type) => _Argument(SR.Format(SR.DataColumn_CannotSimpleContentType, columnName, type)); public static Exception CannotSetSimpleContent(string columnName, Type type) => _Argument(SR.Format(SR.DataColumn_CannotSimpleContent, columnName, type)); public static Exception CannotChangeNamespace(string columnName) => _Argument(SR.Format(SR.DataColumn_CannotChangeNamespace, columnName)); public static Exception HasToBeStringType(DataColumn column) => _Argument(SR.Format(SR.DataColumn_HasToBeStringType, column.ColumnName)); public static Exception AutoIncrementCannotSetIfHasData(string typeName) => _Argument(SR.Format(SR.DataColumn_AutoIncrementCannotSetIfHasData, typeName)); public static Exception INullableUDTwithoutStaticNull(string typeName) => _Argument(SR.Format(SR.DataColumn_INullableUDTwithoutStaticNull, typeName)); public static Exception IComparableNotImplemented(string typeName) => _Data(SR.Format(SR.DataStorage_IComparableNotDefined, typeName)); public static Exception UDTImplementsIChangeTrackingButnotIRevertible(string typeName) => _InvalidOperation(SR.Format(SR.DataColumn_UDTImplementsIChangeTrackingButnotIRevertible, typeName)); public static Exception SetAddedAndModifiedCalledOnnonUnchanged() => _InvalidOperation(SR.DataColumn_SetAddedAndModifiedCalledOnNonUnchanged); public static Exception InvalidDataColumnMapping(Type type) => _Argument(SR.Format(SR.DataColumn_InvalidDataColumnMapping, type.AssemblyQualifiedName)); public static Exception CannotSetDateTimeModeForNonDateTimeColumns() => _InvalidOperation(SR.DataColumn_CannotSetDateTimeModeForNonDateTimeColumns); public static Exception InvalidDateTimeMode(DataSetDateTime mode) => _InvalidEnumArgumentException(mode); public static Exception CantChangeDateTimeMode(DataSetDateTime oldValue, DataSetDateTime newValue) => _InvalidOperation(SR.Format(SR.DataColumn_DateTimeMode, oldValue.ToString(), newValue.ToString())); public static Exception ColumnTypeNotSupported() => Common.ADP.NotSupported(SR.DataColumn_NullableTypesNotSupported); // // DataView // public static Exception SetFailed(string name) => _Data(SR.Format(SR.DataView_SetFailed, name)); public static Exception SetDataSetFailed() => _Data(SR.DataView_SetDataSetFailed); public static Exception SetRowStateFilter() => _Data(SR.DataView_SetRowStateFilter); public static Exception CanNotSetDataSet() => _Data(SR.DataView_CanNotSetDataSet); public static Exception CanNotUseDataViewManager() => _Data(SR.DataView_CanNotUseDataViewManager); public static Exception CanNotSetTable() => _Data(SR.DataView_CanNotSetTable); public static Exception CanNotUse() => _Data(SR.DataView_CanNotUse); public static Exception CanNotBindTable() => _Data(SR.DataView_CanNotBindTable); public static Exception SetTable() => _Data(SR.DataView_SetTable); public static Exception SetIListObject() => _Argument(SR.DataView_SetIListObject); public static Exception AddNewNotAllowNull() => _Data(SR.DataView_AddNewNotAllowNull); public static Exception NotOpen() => _Data(SR.DataView_NotOpen); public static Exception CreateChildView() => _Argument(SR.DataView_CreateChildView); public static Exception CanNotDelete() => _Data(SR.DataView_CanNotDelete); public static Exception CanNotEdit() => _Data(SR.DataView_CanNotEdit); public static Exception GetElementIndex(int index) => _IndexOutOfRange(SR.Format(SR.DataView_GetElementIndex, (index).ToString(CultureInfo.InvariantCulture))); public static Exception AddExternalObject() => _Argument(SR.DataView_AddExternalObject); public static Exception CanNotClear() => _Argument(SR.DataView_CanNotClear); public static Exception InsertExternalObject() => _Argument(SR.DataView_InsertExternalObject); public static Exception RemoveExternalObject() => _Argument(SR.DataView_RemoveExternalObject); public static Exception PropertyNotFound(string property, string table) => _Argument(SR.Format(SR.DataROWView_PropertyNotFound, property, table)); public static Exception ColumnToSortIsOutOfRange(string column) => _Argument(SR.Format(SR.DataColumns_OutOfRange, column)); // // Keys // public static Exception KeyTableMismatch() => _InvalidConstraint(SR.DataKey_TableMismatch); public static Exception KeyNoColumns() => _InvalidConstraint(SR.DataKey_NoColumns); public static Exception KeyTooManyColumns(int cols) => _InvalidConstraint(SR.Format(SR.DataKey_TooManyColumns, (cols).ToString(CultureInfo.InvariantCulture))); public static Exception KeyDuplicateColumns(string columnName) => _InvalidConstraint(SR.Format(SR.DataKey_DuplicateColumns, columnName)); // // Relations, constraints // public static Exception RelationDataSetMismatch() => _InvalidConstraint(SR.DataRelation_DataSetMismatch); public static Exception NoRelationName() => _Argument(SR.DataRelation_NoName); public static Exception ColumnsTypeMismatch() => _InvalidConstraint(SR.DataRelation_ColumnsTypeMismatch); public static Exception KeyLengthMismatch() => _Argument(SR.DataRelation_KeyLengthMismatch); public static Exception KeyLengthZero() => _Argument(SR.DataRelation_KeyZeroLength); public static Exception ForeignRelation() => _Argument(SR.DataRelation_ForeignDataSet); public static Exception KeyColumnsIdentical() => _InvalidConstraint(SR.DataRelation_KeyColumnsIdentical); public static Exception RelationForeignTable(string t1, string t2) => _InvalidConstraint(SR.Format(SR.DataRelation_ForeignTable, t1, t2)); public static Exception GetParentRowTableMismatch(string t1, string t2) => _InvalidConstraint(SR.Format(SR.DataRelation_GetParentRowTableMismatch, t1, t2)); public static Exception SetParentRowTableMismatch(string t1, string t2) => _InvalidConstraint(SR.Format(SR.DataRelation_SetParentRowTableMismatch, t1, t2)); public static Exception RelationForeignRow() => _Argument(SR.DataRelation_ForeignRow); public static Exception RelationNestedReadOnly() => _Argument(SR.DataRelation_RelationNestedReadOnly); public static Exception TableCantBeNestedInTwoTables(string tableName) => _Argument(SR.Format(SR.DataRelation_TableCantBeNestedInTwoTables, tableName)); public static Exception LoopInNestedRelations(string tableName) => _Argument(SR.Format(SR.DataRelation_LoopInNestedRelations, tableName)); public static Exception RelationDoesNotExist() => _Argument(SR.DataRelation_DoesNotExist); public static Exception ParentRowNotInTheDataSet() => _Argument(SR.DataRow_ParentRowNotInTheDataSet); public static Exception ParentOrChildColumnsDoNotHaveDataSet() => _InvalidConstraint(SR.DataRelation_ParentOrChildColumnsDoNotHaveDataSet); public static Exception InValidNestedRelation(string childTableName) => _InvalidOperation(SR.Format(SR.DataRelation_InValidNestedRelation, childTableName)); public static Exception InvalidParentNamespaceinNestedRelation(string childTableName) => _InvalidOperation(SR.Format(SR.DataRelation_InValidNamespaceInNestedRelation, childTableName)); // // Rows // public static Exception RowNotInTheDataSet() => _Argument(SR.DataRow_NotInTheDataSet); public static Exception RowNotInTheTable() => _RowNotInTable(SR.DataRow_NotInTheTable); public static Exception EditInRowChanging() => _InRowChangingEvent(SR.DataRow_EditInRowChanging); public static Exception EndEditInRowChanging() => _InRowChangingEvent(SR.DataRow_EndEditInRowChanging); public static Exception BeginEditInRowChanging() => _InRowChangingEvent(SR.DataRow_BeginEditInRowChanging); public static Exception CancelEditInRowChanging() => _InRowChangingEvent(SR.DataRow_CancelEditInRowChanging); public static Exception DeleteInRowDeleting() => _InRowChangingEvent(SR.DataRow_DeleteInRowDeleting); public static Exception ValueArrayLength() => _Argument(SR.DataRow_ValuesArrayLength); public static Exception NoCurrentData() => _VersionNotFound(SR.DataRow_NoCurrentData); public static Exception NoOriginalData() => _VersionNotFound(SR.DataRow_NoOriginalData); public static Exception NoProposedData() => _VersionNotFound(SR.DataRow_NoProposedData); public static Exception RowRemovedFromTheTable() => _RowNotInTable(SR.DataRow_RemovedFromTheTable); public static Exception DeletedRowInaccessible() => _DeletedRowInaccessible(SR.DataRow_DeletedRowInaccessible); public static Exception RowAlreadyDeleted() => _DeletedRowInaccessible(SR.DataRow_AlreadyDeleted); public static Exception RowEmpty() => _Argument(SR.DataRow_Empty); public static Exception InvalidRowVersion() => _Data(SR.DataRow_InvalidVersion); public static Exception RowOutOfRange() => _IndexOutOfRange(SR.DataRow_RowOutOfRange); public static Exception RowOutOfRange(int index) => _IndexOutOfRange(SR.Format(SR.DataRow_OutOfRange, (index).ToString(CultureInfo.InvariantCulture))); public static Exception RowInsertOutOfRange(int index) => _IndexOutOfRange(SR.Format(SR.DataRow_RowInsertOutOfRange, (index).ToString(CultureInfo.InvariantCulture))); public static Exception RowInsertTwice(int index, string tableName) => _IndexOutOfRange(SR.Format(SR.DataRow_RowInsertTwice, (index).ToString(CultureInfo.InvariantCulture), tableName)); public static Exception RowInsertMissing(string tableName) => _IndexOutOfRange(SR.Format(SR.DataRow_RowInsertMissing, tableName)); public static Exception RowAlreadyRemoved() => _Data(SR.DataRow_AlreadyRemoved); public static Exception MultipleParents() => _Data(SR.DataRow_MultipleParents); public static Exception InvalidRowState(DataRowState state) => _InvalidEnumArgumentException<DataRowState>(state); public static Exception InvalidRowBitPattern() => _Argument(SR.DataRow_InvalidRowBitPattern); // // DataSet // internal static Exception SetDataSetNameToEmpty() => _Argument(SR.DataSet_SetNameToEmpty); internal static Exception SetDataSetNameConflicting(string name) => _Argument(SR.Format(SR.DataSet_SetDataSetNameConflicting, name)); public static Exception DataSetUnsupportedSchema(string ns) => _Argument(SR.Format(SR.DataSet_UnsupportedSchema, ns)); public static Exception MergeMissingDefinition(string obj) => _Argument(SR.Format(SR.DataMerge_MissingDefinition, obj)); public static Exception TablesInDifferentSets() => _Argument(SR.DataRelation_TablesInDifferentSets); public static Exception RelationAlreadyExists() => _Argument(SR.DataRelation_AlreadyExists); public static Exception RowAlreadyInOtherCollection() => _Argument(SR.DataRow_AlreadyInOtherCollection); public static Exception RowAlreadyInTheCollection() => _Argument(SR.DataRow_AlreadyInTheCollection); public static Exception TableMissingPrimaryKey() => _MissingPrimaryKey(SR.DataTable_MissingPrimaryKey); public static Exception RecordStateRange() => _Argument(SR.DataIndex_RecordStateRange); public static Exception IndexKeyLength(int length, int keyLength) => length == 0 ? _Argument(SR.DataIndex_FindWithoutSortOrder) : _Argument(SR.Format(SR.DataIndex_KeyLength, (length).ToString(CultureInfo.InvariantCulture), (keyLength).ToString(CultureInfo.InvariantCulture))); public static Exception RemovePrimaryKey(DataTable table) => table.TableName.Length == 0 ? _Argument(SR.DataKey_RemovePrimaryKey) : _Argument(SR.Format(SR.DataKey_RemovePrimaryKey1, table.TableName)); public static Exception RelationAlreadyInOtherDataSet() => _Argument(SR.DataRelation_AlreadyInOtherDataSet); public static Exception RelationAlreadyInTheDataSet() => _Argument(SR.DataRelation_AlreadyInTheDataSet); public static Exception RelationNotInTheDataSet(string relation) => _Argument(SR.Format(SR.DataRelation_NotInTheDataSet, relation)); public static Exception RelationOutOfRange(object index) => _IndexOutOfRange(SR.Format(SR.DataRelation_OutOfRange, Convert.ToString(index, null))); public static Exception DuplicateRelation(string relation) => _DuplicateName(SR.Format(SR.DataRelation_DuplicateName, relation)); public static Exception RelationTableNull() => _Argument(SR.DataRelation_TableNull); public static Exception RelationDataSetNull() => _Argument(SR.DataRelation_TableNull); public static Exception RelationTableWasRemoved() => _Argument(SR.DataRelation_TableWasRemoved); public static Exception ParentTableMismatch() => _Argument(SR.DataRelation_ParentTableMismatch); public static Exception ChildTableMismatch() => _Argument(SR.DataRelation_ChildTableMismatch); public static Exception EnforceConstraint() => _Constraint(SR.Data_EnforceConstraints); public static Exception CaseLocaleMismatch() => _Argument(SR.DataRelation_CaseLocaleMismatch); public static Exception CannotChangeCaseLocale() => CannotChangeCaseLocale(null); public static Exception CannotChangeCaseLocale(Exception innerException) => _Argument(SR.DataSet_CannotChangeCaseLocale, innerException); public static Exception CannotChangeSchemaSerializationMode() => _InvalidOperation(SR.DataSet_CannotChangeSchemaSerializationMode); public static Exception InvalidSchemaSerializationMode(Type enumType, string mode) => _InvalidEnumArgumentException(SR.Format(SR.ADP_InvalidEnumerationValue, enumType.Name, mode)); public static Exception InvalidRemotingFormat(SerializationFormat mode) => _InvalidEnumArgumentException<SerializationFormat>(mode); // // DataTable and DataTableCollection // public static Exception TableForeignPrimaryKey() => _Argument(SR.DataTable_ForeignPrimaryKey); public static Exception TableCannotAddToSimpleContent() => _Argument(SR.DataTable_CannotAddToSimpleContent); public static Exception NoTableName() => _Argument(SR.DataTable_NoName); public static Exception MultipleTextOnlyColumns() => _Argument(SR.DataTable_MultipleSimpleContentColumns); public static Exception InvalidSortString(string sort) => _Argument(SR.Format(SR.DataTable_InvalidSortString, sort)); public static Exception DuplicateTableName(string table) => _DuplicateName(SR.Format(SR.DataTable_DuplicateName, table)); public static Exception DuplicateTableName2(string table, string ns) => _DuplicateName(SR.Format(SR.DataTable_DuplicateName2, table, ns)); public static Exception SelfnestedDatasetConflictingName(string table) => _DuplicateName(SR.Format(SR.DataTable_SelfnestedDatasetConflictingName, table)); public static Exception DatasetConflictingName(string table) => _DuplicateName(SR.Format(SR.DataTable_DatasetConflictingName, table)); public static Exception TableAlreadyInOtherDataSet() => _Argument(SR.DataTable_AlreadyInOtherDataSet); public static Exception TableAlreadyInTheDataSet() => _Argument(SR.DataTable_AlreadyInTheDataSet); public static Exception TableOutOfRange(int index) => _IndexOutOfRange(SR.Format(SR.DataTable_OutOfRange, (index).ToString(CultureInfo.InvariantCulture))); public static Exception TableNotInTheDataSet(string table) => _Argument(SR.Format(SR.DataTable_NotInTheDataSet, table)); public static Exception TableInRelation() => _Argument(SR.DataTable_InRelation); public static Exception TableInConstraint(DataTable table, Constraint constraint) => _Argument(SR.Format(SR.DataTable_InConstraint, table.TableName, constraint.ConstraintName)); public static Exception CanNotSerializeDataTableHierarchy() => _InvalidOperation(SR.DataTable_CanNotSerializeDataTableHierarchy); public static Exception CanNotRemoteDataTable() => _InvalidOperation(SR.DataTable_CanNotRemoteDataTable); public static Exception CanNotSetRemotingFormat() => _Argument(SR.DataTable_CanNotSetRemotingFormat); public static Exception CanNotSerializeDataTableWithEmptyName() => _InvalidOperation(SR.DataTable_CanNotSerializeDataTableWithEmptyName); public static Exception TableNotFound(string tableName) => _Argument(SR.Format(SR.DataTable_TableNotFound, tableName)); // // Storage // public static Exception AggregateException(AggregateType aggregateType, Type type) => _Data(SR.Format(SR.DataStorage_AggregateException, aggregateType.ToString(), type.Name)); public static Exception InvalidStorageType(TypeCode typecode) => _Data(SR.Format(SR.DataStorage_InvalidStorageType, typecode.ToString())); public static Exception RangeArgument(int min, int max) => _Argument(SR.Format(SR.Range_Argument, (min).ToString(CultureInfo.InvariantCulture), (max).ToString(CultureInfo.InvariantCulture))); public static Exception NullRange() => _Data(SR.Range_NullRange); public static Exception NegativeMinimumCapacity() => _Argument(SR.RecordManager_MinimumCapacity); public static Exception ProblematicChars(char charValue) => _Argument(SR.Format(SR.DataStorage_ProblematicChars, "0x" + ((ushort)charValue).ToString("X", CultureInfo.InvariantCulture))); public static Exception StorageSetFailed() => _Argument(SR.DataStorage_SetInvalidDataType); // // XML schema // public static Exception SimpleTypeNotSupported() => _Data(SR.Xml_SimpleTypeNotSupported); public static Exception MissingAttribute(string attribute) => MissingAttribute(string.Empty, attribute); public static Exception MissingAttribute(string element, string attribute) => _Data(SR.Format(SR.Xml_MissingAttribute, element, attribute)); public static Exception InvalidAttributeValue(string name, string value) => _Data(SR.Format(SR.Xml_ValueOutOfRange, name, value)); public static Exception AttributeValues(string name, string value1, string value2) => _Data(SR.Format(SR.Xml_AttributeValues, name, value1, value2)); public static Exception ElementTypeNotFound(string name) => _Data(SR.Format(SR.Xml_ElementTypeNotFound, name)); public static Exception RelationParentNameMissing(string rel) => _Data(SR.Format(SR.Xml_RelationParentNameMissing, rel)); public static Exception RelationChildNameMissing(string rel) => _Data(SR.Format(SR.Xml_RelationChildNameMissing, rel)); public static Exception RelationTableKeyMissing(string rel) => _Data(SR.Format(SR.Xml_RelationTableKeyMissing, rel)); public static Exception RelationChildKeyMissing(string rel) => _Data(SR.Format(SR.Xml_RelationChildKeyMissing, rel)); public static Exception UndefinedDatatype(string name) => _Data(SR.Format(SR.Xml_UndefinedDatatype, name)); public static Exception DatatypeNotDefined() => _Data(SR.Xml_DatatypeNotDefined); public static Exception MismatchKeyLength() => _Data(SR.Xml_MismatchKeyLength); public static Exception InvalidField(string name) => _Data(SR.Format(SR.Xml_InvalidField, name)); public static Exception InvalidSelector(string name) => _Data(SR.Format(SR.Xml_InvalidSelector, name)); public static Exception CircularComplexType(string name) => _Data(SR.Format(SR.Xml_CircularComplexType, name)); public static Exception CannotInstantiateAbstract(string name) => _Data(SR.Format(SR.Xml_CannotInstantiateAbstract, name)); public static Exception InvalidKey(string name) => _Data(SR.Format(SR.Xml_InvalidKey, name)); public static Exception DiffgramMissingTable(string name) => _Data(SR.Format(SR.Xml_MissingTable, name)); public static Exception DiffgramMissingSQL() => _Data(SR.Xml_MissingSQL); public static Exception DuplicateConstraintRead(string str) => _Data(SR.Format(SR.Xml_DuplicateConstraint, str)); public static Exception ColumnTypeConflict(string name) => _Data(SR.Format(SR.Xml_ColumnConflict, name)); public static Exception CannotConvert(string name, string type) => _Data(SR.Format(SR.Xml_CannotConvert, name, type)); public static Exception MissingRefer(string name) => _Data(SR.Format(SR.Xml_MissingRefer, Keywords.REFER, Keywords.XSD_KEYREF, name)); public static Exception InvalidPrefix(string name) => _Data(SR.Format(SR.Xml_InvalidPrefix, name)); public static Exception CanNotDeserializeObjectType() => _InvalidOperation(SR.Xml_CanNotDeserializeObjectType); public static Exception IsDataSetAttributeMissingInSchema() => _Data(SR.Xml_IsDataSetAttributeMissingInSchema); public static Exception TooManyIsDataSetAtributeInSchema() => _Data(SR.Xml_TooManyIsDataSetAtributeInSchema); // XML save public static Exception NestedCircular(string name) => _Data(SR.Format(SR.Xml_NestedCircular, name)); public static Exception MultipleParentRows(string tableQName) => _Data(SR.Format(SR.Xml_MultipleParentRows, tableQName)); public static Exception PolymorphismNotSupported(string typeName) => _InvalidOperation(SR.Format(SR.Xml_PolymorphismNotSupported, typeName)); public static Exception DataTableInferenceNotSupported() => _InvalidOperation(SR.Xml_DataTableInferenceNotSupported); /// <summary>throw DataException for multitarget failure</summary> internal static void ThrowMultipleTargetConverter(Exception innerException) { string res = (null != innerException) ? SR.Xml_MultipleTargetConverterError : SR.Xml_MultipleTargetConverterEmpty; ThrowDataException(res, innerException); } // Merge public static Exception DuplicateDeclaration(string name) => _Data(SR.Format(SR.Xml_MergeDuplicateDeclaration, name)); //Read Xml data public static Exception FoundEntity() => _Data(SR.Xml_FoundEntity); public static Exception MergeFailed(string name) => _Data(name); // SqlConvert public static Exception ConvertFailed(Type type1, Type type2) => _Data(SR.Format(SR.SqlConvert_ConvertFailed, type1.FullName, type2.FullName)); // DataTableReader public static Exception InvalidDataTableReader(string tableName) => _InvalidOperation(SR.Format(SR.DataTableReader_InvalidDataTableReader, tableName)); public static Exception DataTableReaderSchemaIsInvalid(string tableName) => _InvalidOperation(SR.Format(SR.DataTableReader_SchemaInvalidDataTableReader, tableName)); public static Exception CannotCreateDataReaderOnEmptyDataSet() => _Argument(SR.DataTableReader_CannotCreateDataReaderOnEmptyDataSet); public static Exception DataTableReaderArgumentIsEmpty() => _Argument(SR.DataTableReader_DataTableReaderArgumentIsEmpty); public static Exception ArgumentContainsNullValue() => _Argument(SR.DataTableReader_ArgumentContainsNullValue); public static Exception InvalidCurrentRowInDataTableReader() => _DeletedRowInaccessible(SR.DataTableReader_InvalidRowInDataTableReader); public static Exception EmptyDataTableReader(string tableName) => _DeletedRowInaccessible(SR.Format(SR.DataTableReader_DataTableCleared, tableName)); internal static Exception InvalidDuplicateNamedSimpleTypeDelaration(string stName, string errorStr) => _Argument(SR.Format(SR.NamedSimpleType_InvalidDuplicateNamedSimpleTypeDelaration, stName, errorStr)); // RbTree internal static Exception InternalRBTreeError(RBTreeError internalError) => _InvalidOperation(SR.Format(SR.RbTree_InvalidState, (int)internalError)); public static Exception EnumeratorModified() => _InvalidOperation(SR.RbTree_EnumerationBroken); } }
using System; using System.Collections.Generic; namespace Platform.VirtualFileSystem.Providers { public abstract class DirectoryConsulatationWrapper : NodeConsultationWrapper, IDirectory { public virtual event NodeActivityEventHandler RecursiveActivity { add { lock (this) { if (RecursiveActivityEvent == null) { this.Wrappee.Renamed += new NodeActivityEventHandler(DelegateRecursiveActivityEvent); } RecursiveActivityEvent = (NodeActivityEventHandler)Delegate.Combine(RecursiveActivityEvent, value); } } remove { lock (this) { RecursiveActivityEvent = (NodeActivityEventHandler)Delegate.Remove(RecursiveActivityEvent, value); if (RecursiveActivityEvent == null) { this.Wrappee.Renamed -= new NodeActivityEventHandler(DelegateRecursiveActivityEvent); } } } } private NodeActivityEventHandler RecursiveActivityEvent; private void DelegateRecursiveActivityEvent(object sender, NodeActivityEventArgs eventArgs) { OnRecursiveActivityEvent(eventArgs); } protected void OnRecursiveActivityEvent(NodeActivityEventArgs eventArgs) { lock (this) { if (RecursiveActivityEvent != null) { RecursiveActivityEvent(this, eventArgs); } } } /// <summary> /// Delegate DirectoryActivity event /// </summary> public virtual event NodeActivityEventHandler DirectoryActivity { add { lock (this) { if (DirectoryActivityEvent == null) { this.Wrappee.Renamed += new NodeActivityEventHandler(DelegateDirectoryActivityEvent); } DirectoryActivityEvent = (NodeActivityEventHandler)Delegate.Combine(DirectoryActivityEvent, value); } } remove { lock (this) { DirectoryActivityEvent = (NodeActivityEventHandler)Delegate.Remove(DirectoryActivityEvent, value); if (DirectoryActivityEvent == null) { this.Wrappee.Renamed -= new NodeActivityEventHandler(DelegateDirectoryActivityEvent); } } } } private NodeActivityEventHandler DirectoryActivityEvent; private void DelegateDirectoryActivityEvent(object sender, NodeActivityEventArgs eventArgs) { OnDirectoryActivityEvent(eventArgs); } protected void OnDirectoryActivityEvent(NodeActivityEventArgs eventArgs) { lock (this) { if (DirectoryActivityEvent != null) { DirectoryActivityEvent(this, eventArgs); } } } public virtual event JumpPointEventHandler JumpPointAdded { add { lock (this) { if (JumpPointAddedEvent == null) { this.Wrappee.JumpPointAdded += DelegateJumpPointAddedEvent; } JumpPointAddedEvent = (JumpPointEventHandler)Delegate.Combine(JumpPointAddedEvent, value); } } remove { lock (this) { JumpPointAddedEvent = (JumpPointEventHandler)Delegate.Remove(JumpPointAddedEvent, value); if (JumpPointAddedEvent == null) { this.Wrappee.JumpPointAdded -= DelegateJumpPointAddedEvent; } } } } private JumpPointEventHandler JumpPointAddedEvent; private void DelegateJumpPointAddedEvent(object sender, JumpPointEventArgs eventArgs) { OnJumpPointAddedEvent(eventArgs); } protected void OnJumpPointAddedEvent(JumpPointEventArgs eventArgs) { lock (this) { if (JumpPointAddedEvent != null) { JumpPointAddedEvent(this, eventArgs); } } } public virtual event JumpPointEventHandler JumpPointRemoved { add { lock (this) { if (JumpPointRemovedEvent == null) { this.Wrappee.JumpPointRemoved += DelegateJumpPointRemovedEvent; } JumpPointRemovedEvent = (JumpPointEventHandler)Delegate.Combine(JumpPointRemovedEvent, value); } } remove { lock (this) { JumpPointRemovedEvent = (JumpPointEventHandler)Delegate.Remove(JumpPointRemovedEvent, value); if (JumpPointRemovedEvent == null) { this.Wrappee.JumpPointRemoved -= DelegateJumpPointRemovedEvent; } } } } private JumpPointEventHandler JumpPointRemovedEvent; private void DelegateJumpPointRemovedEvent(object sender, JumpPointEventArgs eventArgs) { OnJumpPointRemovedEvent(eventArgs); } protected void OnJumpPointRemovedEvent(JumpPointEventArgs eventArgs) { lock (this) { if (JumpPointRemovedEvent != null) { JumpPointRemovedEvent(this, eventArgs); } } } protected DirectoryConsulatationWrapper(IDirectory innerDirectory) : base(innerDirectory) { } public virtual IEnumerable<INode> Walk() { return this.Wrappee.Walk(); } public virtual IEnumerable<INode> Walk(NodeType nodeType) { return this.Wrappee.Walk(nodeType); } public new virtual IDirectory Wrappee { get { return (IDirectory)base.Wrappee; } } public virtual IEnumerable<IFile> GetFiles() { return this.Wrappee.GetFiles(); } public virtual IEnumerable<IFile> GetFiles(Predicate<IFile> acceptFile) { return this.Wrappee.GetFiles(acceptFile); } public virtual IEnumerable<IDirectory> GetDirectories() { return this.Wrappee.GetDirectories(); } public virtual IEnumerable<IDirectory> GetDirectories(Predicate<IDirectory> acceptDirectory) { return this.Wrappee.GetDirectories(acceptDirectory); } public virtual bool ChildExists(string name) { return this.Wrappee.ChildExists(name); } public virtual IEnumerable<string> GetChildNames() { return this.Wrappee.GetChildNames(); } public virtual IEnumerable<string> GetChildNames(NodeType nodeType) { return this.Wrappee.GetChildNames(nodeType); } public virtual IEnumerable<string> GetChildNames(Predicate<string> acceptName) { return this.Wrappee.GetChildNames(acceptName); } public virtual IEnumerable<string> GetChildNames(NodeType nodeType, Predicate<string> acceptName) { return this.Wrappee.GetChildNames(nodeType, acceptName); } public virtual IEnumerable<INode> GetChildren() { return this.Wrappee.GetChildren(); } public virtual IEnumerable<INode> GetChildren(NodeType nodeType) { return this.Wrappee.GetChildren(nodeType); } public virtual IEnumerable<INode> GetChildren(Predicate<INode> acceptNode) { return this.Wrappee.GetChildren(acceptNode); } public virtual IEnumerable<INode> GetChildren(NodeType nodeType, Predicate<INode> acceptNode) { return this.GetChildren(nodeType, acceptNode); } public virtual IDirectory Delete(bool recursive) { this.Wrappee.Delete(recursive); return this; } IDirectory IDirectory.Create() { return this.Wrappee.Create(); } IDirectory IDirectory.Create(bool createParent) { return this.Wrappee.Create(createParent); } public virtual IFileSystem CreateView() { return this.Wrappee.CreateView(); } public virtual IFileSystem CreateView(string scheme, FileSystemOptions options) { return this.Wrappee.CreateView(scheme, options); } public IFileSystem CreateView(string scheme) { return this.Wrappee.CreateView(scheme); } public IFileSystem CreateView(FileSystemOptions options) { return this.Wrappee.CreateView(options); } public virtual INode AddJumpPoint(INode node) { return this.Wrappee.AddJumpPoint(node); } public virtual INode AddJumpPoint(string name, INode node) { return this.Wrappee.AddJumpPoint(name, node); } IDirectory IDirectory.Refresh() { return (IDirectory)this.Refresh(); } public override INode Refresh() { Refresh(DirectoryRefreshMask.All); return this; } public virtual IDirectory Refresh(DirectoryRefreshMask mask) { this.Wrappee.Refresh(mask); return this; } } }
using System; using System.ComponentModel; using System.Windows.Forms; using System.Drawing; using WeifenLuo.WinFormsUI.Docking; using System.IO; using System.Text; using System.Xml; using System.Globalization; namespace WeifenLuo.WinFormsUI.Docking { partial class DockPanel { private static class Persistor { private const string ConfigFileVersion = "1.0"; private static string[] CompatibleConfigFileVersions = new string[] { }; private class DummyContent : DockContent { } private struct DockPanelStruct { private double m_dockLeftPortion; public double DockLeftPortion { get { return m_dockLeftPortion; } set { m_dockLeftPortion = value; } } private double m_dockRightPortion; public double DockRightPortion { get { return m_dockRightPortion; } set { m_dockRightPortion = value; } } private double m_dockTopPortion; public double DockTopPortion { get { return m_dockTopPortion; } set { m_dockTopPortion = value; } } private double m_dockBottomPortion; public double DockBottomPortion { get { return m_dockBottomPortion; } set { m_dockBottomPortion = value; } } private int m_indexActiveDocumentPane; public int IndexActiveDocumentPane { get { return m_indexActiveDocumentPane; } set { m_indexActiveDocumentPane = value; } } private int m_indexActivePane; public int IndexActivePane { get { return m_indexActivePane; } set { m_indexActivePane = value; } } } private struct ContentStruct { private string m_persistString; public string PersistString { get { return m_persistString; } set { m_persistString = value; } } private double m_autoHidePortion; public double AutoHidePortion { get { return m_autoHidePortion; } set { m_autoHidePortion = value; } } private bool m_isHidden; public bool IsHidden { get { return m_isHidden; } set { m_isHidden = value; } } private bool m_isFloat; public bool IsFloat { get { return m_isFloat; } set { m_isFloat = value; } } } private struct PaneStruct { private DockState m_dockState; public DockState DockState { get { return m_dockState; } set { m_dockState = value; } } private int m_indexActiveContent; public int IndexActiveContent { get { return m_indexActiveContent; } set { m_indexActiveContent = value; } } private int[] m_indexContents; public int[] IndexContents { get { return m_indexContents; } set { m_indexContents = value; } } private int m_zOrderIndex; public int ZOrderIndex { get { return m_zOrderIndex; } set { m_zOrderIndex = value; } } } private struct NestedPane { private int m_indexPane; public int IndexPane { get { return m_indexPane; } set { m_indexPane = value; } } private int m_indexPrevPane; public int IndexPrevPane { get { return m_indexPrevPane; } set { m_indexPrevPane = value; } } private DockAlignment m_alignment; public DockAlignment Alignment { get { return m_alignment; } set { m_alignment = value; } } private double m_proportion; public double Proportion { get { return m_proportion; } set { m_proportion = value; } } } private struct DockWindowStruct { private DockState m_dockState; public DockState DockState { get { return m_dockState; } set { m_dockState = value; } } private int m_zOrderIndex; public int ZOrderIndex { get { return m_zOrderIndex; } set { m_zOrderIndex = value; } } private NestedPane[] m_nestedPanes; public NestedPane[] NestedPanes { get { return m_nestedPanes; } set { m_nestedPanes = value; } } } private struct FloatWindowStruct { private Rectangle m_bounds; public Rectangle Bounds { get { return m_bounds; } set { m_bounds = value; } } private int m_zOrderIndex; public int ZOrderIndex { get { return m_zOrderIndex; } set { m_zOrderIndex = value; } } private NestedPane[] m_nestedPanes; public NestedPane[] NestedPanes { get { return m_nestedPanes; } set { m_nestedPanes = value; } } } public static void SaveAsXml(DockPanel dockPanel, string fileName) { SaveAsXml(dockPanel, fileName, Encoding.Unicode); } public static void SaveAsXml(DockPanel dockPanel, string fileName, Encoding encoding) { FileStream fs = new FileStream(fileName, FileMode.Create); try { SaveAsXml(dockPanel, fs, encoding); } finally { fs.Close(); } } public static void SaveAsXml(DockPanel dockPanel, Stream stream, Encoding encoding) { SaveAsXml(dockPanel, stream, encoding, false); } public static void SaveAsXml(DockPanel dockPanel, Stream stream, Encoding encoding, bool upstream) { XmlTextWriter xmlOut = new XmlTextWriter(stream, encoding); // Use indenting for readability xmlOut.Formatting = Formatting.Indented; if (!upstream) xmlOut.WriteStartDocument(); // Always begin file with identification and warning xmlOut.WriteComment(Strings.DockPanel_Persistor_XmlFileComment1); xmlOut.WriteComment(Strings.DockPanel_Persistor_XmlFileComment2); // Associate a version number with the root element so that future version of the code // will be able to be backwards compatible or at least recognise out of date versions xmlOut.WriteStartElement("DockPanel"); xmlOut.WriteAttributeString("FormatVersion", ConfigFileVersion); xmlOut.WriteAttributeString("DockLeftPortion", dockPanel.DockLeftPortion.ToString(CultureInfo.InvariantCulture)); xmlOut.WriteAttributeString("DockRightPortion", dockPanel.DockRightPortion.ToString(CultureInfo.InvariantCulture)); xmlOut.WriteAttributeString("DockTopPortion", dockPanel.DockTopPortion.ToString(CultureInfo.InvariantCulture)); xmlOut.WriteAttributeString("DockBottomPortion", dockPanel.DockBottomPortion.ToString(CultureInfo.InvariantCulture)); xmlOut.WriteAttributeString("ActiveDocumentPane", dockPanel.Panes.IndexOf(dockPanel.ActiveDocumentPane).ToString(CultureInfo.InvariantCulture)); xmlOut.WriteAttributeString("ActivePane", dockPanel.Panes.IndexOf(dockPanel.ActivePane).ToString(CultureInfo.InvariantCulture)); // Contents xmlOut.WriteStartElement("Contents"); xmlOut.WriteAttributeString("Count", dockPanel.Contents.Count.ToString(CultureInfo.InvariantCulture)); foreach (IDockContent content in dockPanel.Contents) { xmlOut.WriteStartElement("Content"); xmlOut.WriteAttributeString("ID", dockPanel.Contents.IndexOf(content).ToString(CultureInfo.InvariantCulture)); xmlOut.WriteAttributeString("PersistString", content.DockHandler.PersistString); xmlOut.WriteAttributeString("AutoHidePortion", content.DockHandler.AutoHidePortion.ToString(CultureInfo.InvariantCulture)); xmlOut.WriteAttributeString("IsHidden", content.DockHandler.IsHidden.ToString(CultureInfo.InvariantCulture)); xmlOut.WriteAttributeString("IsFloat", content.DockHandler.IsFloat.ToString(CultureInfo.InvariantCulture)); xmlOut.WriteEndElement(); } xmlOut.WriteEndElement(); // Panes xmlOut.WriteStartElement("Panes"); xmlOut.WriteAttributeString("Count", dockPanel.Panes.Count.ToString(CultureInfo.InvariantCulture)); foreach (DockPane pane in dockPanel.Panes) { xmlOut.WriteStartElement("Pane"); xmlOut.WriteAttributeString("ID", dockPanel.Panes.IndexOf(pane).ToString(CultureInfo.InvariantCulture)); xmlOut.WriteAttributeString("DockState", pane.DockState.ToString()); xmlOut.WriteAttributeString("ActiveContent", dockPanel.Contents.IndexOf(pane.ActiveContent).ToString(CultureInfo.InvariantCulture)); xmlOut.WriteStartElement("Contents"); xmlOut.WriteAttributeString("Count", pane.Contents.Count.ToString(CultureInfo.InvariantCulture)); foreach (IDockContent content in pane.Contents) { xmlOut.WriteStartElement("Content"); xmlOut.WriteAttributeString("ID", pane.Contents.IndexOf(content).ToString(CultureInfo.InvariantCulture)); xmlOut.WriteAttributeString("RefID", dockPanel.Contents.IndexOf(content).ToString(CultureInfo.InvariantCulture)); xmlOut.WriteEndElement(); } xmlOut.WriteEndElement(); xmlOut.WriteEndElement(); } xmlOut.WriteEndElement(); // DockWindows xmlOut.WriteStartElement("DockWindows"); int dockWindowId = 0; foreach (DockWindow dw in dockPanel.DockWindows) { xmlOut.WriteStartElement("DockWindow"); xmlOut.WriteAttributeString("ID", dockWindowId.ToString(CultureInfo.InvariantCulture)); dockWindowId++; xmlOut.WriteAttributeString("DockState", dw.DockState.ToString()); xmlOut.WriteAttributeString("ZOrderIndex", dockPanel.Controls.IndexOf(dw).ToString(CultureInfo.InvariantCulture)); xmlOut.WriteStartElement("NestedPanes"); xmlOut.WriteAttributeString("Count", dw.NestedPanes.Count.ToString(CultureInfo.InvariantCulture)); foreach (DockPane pane in dw.NestedPanes) { xmlOut.WriteStartElement("Pane"); xmlOut.WriteAttributeString("ID", dw.NestedPanes.IndexOf(pane).ToString(CultureInfo.InvariantCulture)); xmlOut.WriteAttributeString("RefID", dockPanel.Panes.IndexOf(pane).ToString(CultureInfo.InvariantCulture)); NestedDockingStatus status = pane.NestedDockingStatus; xmlOut.WriteAttributeString("PrevPane", dockPanel.Panes.IndexOf(status.PreviousPane).ToString(CultureInfo.InvariantCulture)); xmlOut.WriteAttributeString("Alignment", status.Alignment.ToString()); xmlOut.WriteAttributeString("Proportion", status.Proportion.ToString(CultureInfo.InvariantCulture)); xmlOut.WriteEndElement(); } xmlOut.WriteEndElement(); xmlOut.WriteEndElement(); } xmlOut.WriteEndElement(); // FloatWindows RectangleConverter rectConverter = new RectangleConverter(); xmlOut.WriteStartElement("FloatWindows"); xmlOut.WriteAttributeString("Count", dockPanel.FloatWindows.Count.ToString(CultureInfo.InvariantCulture)); foreach (FloatWindow fw in dockPanel.FloatWindows) { xmlOut.WriteStartElement("FloatWindow"); xmlOut.WriteAttributeString("ID", dockPanel.FloatWindows.IndexOf(fw).ToString(CultureInfo.InvariantCulture)); xmlOut.WriteAttributeString("Bounds", rectConverter.ConvertToInvariantString(fw.Bounds)); xmlOut.WriteAttributeString("ZOrderIndex", fw.DockPanel.FloatWindows.IndexOf(fw).ToString(CultureInfo.InvariantCulture)); xmlOut.WriteStartElement("NestedPanes"); xmlOut.WriteAttributeString("Count", fw.NestedPanes.Count.ToString(CultureInfo.InvariantCulture)); foreach (DockPane pane in fw.NestedPanes) { xmlOut.WriteStartElement("Pane"); xmlOut.WriteAttributeString("ID", fw.NestedPanes.IndexOf(pane).ToString(CultureInfo.InvariantCulture)); xmlOut.WriteAttributeString("RefID", dockPanel.Panes.IndexOf(pane).ToString(CultureInfo.InvariantCulture)); NestedDockingStatus status = pane.NestedDockingStatus; xmlOut.WriteAttributeString("PrevPane", dockPanel.Panes.IndexOf(status.PreviousPane).ToString(CultureInfo.InvariantCulture)); xmlOut.WriteAttributeString("Alignment", status.Alignment.ToString()); xmlOut.WriteAttributeString("Proportion", status.Proportion.ToString(CultureInfo.InvariantCulture)); xmlOut.WriteEndElement(); } xmlOut.WriteEndElement(); xmlOut.WriteEndElement(); } xmlOut.WriteEndElement(); // </FloatWindows> xmlOut.WriteEndElement(); if (!upstream) { xmlOut.WriteEndDocument(); xmlOut.Close(); } else xmlOut.Flush(); } public static void LoadFromXml(DockPanel dockPanel, string fileName, DeserializeDockContent deserializeContent) { FileStream fs = new FileStream(fileName, FileMode.Open); try { LoadFromXml(dockPanel, fs, deserializeContent); } finally { fs.Close(); } } public static void LoadFromXml(DockPanel dockPanel, Stream stream, DeserializeDockContent deserializeContent) { LoadFromXml(dockPanel, stream, deserializeContent, true); } private static ContentStruct[] LoadContents(XmlTextReader xmlIn) { int countOfContents = Convert.ToInt32(xmlIn.GetAttribute("Count"), CultureInfo.InvariantCulture); ContentStruct[] contents = new ContentStruct[countOfContents]; MoveToNextElement(xmlIn); for (int i = 0; i < countOfContents; i++) { int id = Convert.ToInt32(xmlIn.GetAttribute("ID"), CultureInfo.InvariantCulture); if (xmlIn.Name != "Content" || id != i) throw new ArgumentException(Strings.DockPanel_LoadFromXml_InvalidXmlFormat); contents[i].PersistString = xmlIn.GetAttribute("PersistString"); contents[i].AutoHidePortion = Convert.ToDouble(xmlIn.GetAttribute("AutoHidePortion"), CultureInfo.InvariantCulture); contents[i].IsHidden = Convert.ToBoolean(xmlIn.GetAttribute("IsHidden"), CultureInfo.InvariantCulture); contents[i].IsFloat = Convert.ToBoolean(xmlIn.GetAttribute("IsFloat"), CultureInfo.InvariantCulture); MoveToNextElement(xmlIn); } return contents; } private static PaneStruct[] LoadPanes(XmlTextReader xmlIn) { EnumConverter dockStateConverter = new EnumConverter(typeof(DockState)); int countOfPanes = Convert.ToInt32(xmlIn.GetAttribute("Count"), CultureInfo.InvariantCulture); PaneStruct[] panes = new PaneStruct[countOfPanes]; MoveToNextElement(xmlIn); for (int i = 0; i < countOfPanes; i++) { int id = Convert.ToInt32(xmlIn.GetAttribute("ID"), CultureInfo.InvariantCulture); if (xmlIn.Name != "Pane" || id != i) throw new ArgumentException(Strings.DockPanel_LoadFromXml_InvalidXmlFormat); panes[i].DockState = (DockState)dockStateConverter.ConvertFrom(xmlIn.GetAttribute("DockState")); panes[i].IndexActiveContent = Convert.ToInt32(xmlIn.GetAttribute("ActiveContent"), CultureInfo.InvariantCulture); panes[i].ZOrderIndex = -1; MoveToNextElement(xmlIn); if (xmlIn.Name != "Contents") throw new ArgumentException(Strings.DockPanel_LoadFromXml_InvalidXmlFormat); int countOfPaneContents = Convert.ToInt32(xmlIn.GetAttribute("Count"), CultureInfo.InvariantCulture); panes[i].IndexContents = new int[countOfPaneContents]; MoveToNextElement(xmlIn); for (int j = 0; j < countOfPaneContents; j++) { int id2 = Convert.ToInt32(xmlIn.GetAttribute("ID"), CultureInfo.InvariantCulture); if (xmlIn.Name != "Content" || id2 != j) throw new ArgumentException(Strings.DockPanel_LoadFromXml_InvalidXmlFormat); panes[i].IndexContents[j] = Convert.ToInt32(xmlIn.GetAttribute("RefID"), CultureInfo.InvariantCulture); MoveToNextElement(xmlIn); } } return panes; } private static DockWindowStruct[] LoadDockWindows(XmlTextReader xmlIn, DockPanel dockPanel) { EnumConverter dockStateConverter = new EnumConverter(typeof(DockState)); EnumConverter dockAlignmentConverter = new EnumConverter(typeof(DockAlignment)); int countOfDockWindows = dockPanel.DockWindows.Count; DockWindowStruct[] dockWindows = new DockWindowStruct[countOfDockWindows]; MoveToNextElement(xmlIn); for (int i = 0; i < countOfDockWindows; i++) { int id = Convert.ToInt32(xmlIn.GetAttribute("ID"), CultureInfo.InvariantCulture); if (xmlIn.Name != "DockWindow" || id != i) throw new ArgumentException(Strings.DockPanel_LoadFromXml_InvalidXmlFormat); dockWindows[i].DockState = (DockState)dockStateConverter.ConvertFrom(xmlIn.GetAttribute("DockState")); dockWindows[i].ZOrderIndex = Convert.ToInt32(xmlIn.GetAttribute("ZOrderIndex"), CultureInfo.InvariantCulture); MoveToNextElement(xmlIn); if (xmlIn.Name != "DockList" && xmlIn.Name != "NestedPanes") throw new ArgumentException(Strings.DockPanel_LoadFromXml_InvalidXmlFormat); int countOfNestedPanes = Convert.ToInt32(xmlIn.GetAttribute("Count"), CultureInfo.InvariantCulture); dockWindows[i].NestedPanes = new NestedPane[countOfNestedPanes]; MoveToNextElement(xmlIn); for (int j = 0; j < countOfNestedPanes; j++) { int id2 = Convert.ToInt32(xmlIn.GetAttribute("ID"), CultureInfo.InvariantCulture); if (xmlIn.Name != "Pane" || id2 != j) throw new ArgumentException(Strings.DockPanel_LoadFromXml_InvalidXmlFormat); dockWindows[i].NestedPanes[j].IndexPane = Convert.ToInt32(xmlIn.GetAttribute("RefID"), CultureInfo.InvariantCulture); dockWindows[i].NestedPanes[j].IndexPrevPane = Convert.ToInt32(xmlIn.GetAttribute("PrevPane"), CultureInfo.InvariantCulture); dockWindows[i].NestedPanes[j].Alignment = (DockAlignment)dockAlignmentConverter.ConvertFrom(xmlIn.GetAttribute("Alignment")); dockWindows[i].NestedPanes[j].Proportion = Convert.ToDouble(xmlIn.GetAttribute("Proportion"), CultureInfo.InvariantCulture); MoveToNextElement(xmlIn); } } return dockWindows; } private static FloatWindowStruct[] LoadFloatWindows(XmlTextReader xmlIn) { EnumConverter dockAlignmentConverter = new EnumConverter(typeof(DockAlignment)); RectangleConverter rectConverter = new RectangleConverter(); int countOfFloatWindows = Convert.ToInt32(xmlIn.GetAttribute("Count"), CultureInfo.InvariantCulture); FloatWindowStruct[] floatWindows = new FloatWindowStruct[countOfFloatWindows]; MoveToNextElement(xmlIn); for (int i = 0; i < countOfFloatWindows; i++) { int id = Convert.ToInt32(xmlIn.GetAttribute("ID"), CultureInfo.InvariantCulture); if (xmlIn.Name != "FloatWindow" || id != i) throw new ArgumentException(Strings.DockPanel_LoadFromXml_InvalidXmlFormat); floatWindows[i].Bounds = (Rectangle)rectConverter.ConvertFromInvariantString(xmlIn.GetAttribute("Bounds")); floatWindows[i].ZOrderIndex = Convert.ToInt32(xmlIn.GetAttribute("ZOrderIndex"), CultureInfo.InvariantCulture); MoveToNextElement(xmlIn); if (xmlIn.Name != "DockList" && xmlIn.Name != "NestedPanes") throw new ArgumentException(Strings.DockPanel_LoadFromXml_InvalidXmlFormat); int countOfNestedPanes = Convert.ToInt32(xmlIn.GetAttribute("Count"), CultureInfo.InvariantCulture); floatWindows[i].NestedPanes = new NestedPane[countOfNestedPanes]; MoveToNextElement(xmlIn); for (int j = 0; j < countOfNestedPanes; j++) { int id2 = Convert.ToInt32(xmlIn.GetAttribute("ID"), CultureInfo.InvariantCulture); if (xmlIn.Name != "Pane" || id2 != j) throw new ArgumentException(Strings.DockPanel_LoadFromXml_InvalidXmlFormat); floatWindows[i].NestedPanes[j].IndexPane = Convert.ToInt32(xmlIn.GetAttribute("RefID"), CultureInfo.InvariantCulture); floatWindows[i].NestedPanes[j].IndexPrevPane = Convert.ToInt32(xmlIn.GetAttribute("PrevPane"), CultureInfo.InvariantCulture); floatWindows[i].NestedPanes[j].Alignment = (DockAlignment)dockAlignmentConverter.ConvertFrom(xmlIn.GetAttribute("Alignment")); floatWindows[i].NestedPanes[j].Proportion = Convert.ToDouble(xmlIn.GetAttribute("Proportion"), CultureInfo.InvariantCulture); MoveToNextElement(xmlIn); } } return floatWindows; } public static void LoadFromXml(DockPanel dockPanel, Stream stream, DeserializeDockContent deserializeContent, bool closeStream) { if (dockPanel.Contents.Count != 0) throw new InvalidOperationException(Strings.DockPanel_LoadFromXml_AlreadyInitialized); XmlTextReader xmlIn = new XmlTextReader(stream); xmlIn.WhitespaceHandling = WhitespaceHandling.None; xmlIn.MoveToContent(); while (!xmlIn.Name.Equals("DockPanel")) { if (!MoveToNextElement(xmlIn)) throw new ArgumentException(Strings.DockPanel_LoadFromXml_InvalidXmlFormat); } string formatVersion = xmlIn.GetAttribute("FormatVersion"); if (!IsFormatVersionValid(formatVersion)) throw new ArgumentException(Strings.DockPanel_LoadFromXml_InvalidFormatVersion); DockPanelStruct dockPanelStruct = new DockPanelStruct(); dockPanelStruct.DockLeftPortion = Convert.ToDouble(xmlIn.GetAttribute("DockLeftPortion"), CultureInfo.InvariantCulture); dockPanelStruct.DockRightPortion = Convert.ToDouble(xmlIn.GetAttribute("DockRightPortion"), CultureInfo.InvariantCulture); dockPanelStruct.DockTopPortion = Convert.ToDouble(xmlIn.GetAttribute("DockTopPortion"), CultureInfo.InvariantCulture); dockPanelStruct.DockBottomPortion = Convert.ToDouble(xmlIn.GetAttribute("DockBottomPortion"), CultureInfo.InvariantCulture); dockPanelStruct.IndexActiveDocumentPane = Convert.ToInt32(xmlIn.GetAttribute("ActiveDocumentPane"), CultureInfo.InvariantCulture); dockPanelStruct.IndexActivePane = Convert.ToInt32(xmlIn.GetAttribute("ActivePane"), CultureInfo.InvariantCulture); // Load Contents MoveToNextElement(xmlIn); if (xmlIn.Name != "Contents") throw new ArgumentException(Strings.DockPanel_LoadFromXml_InvalidXmlFormat); ContentStruct[] contents = LoadContents(xmlIn); // Load Panes if (xmlIn.Name != "Panes") throw new ArgumentException(Strings.DockPanel_LoadFromXml_InvalidXmlFormat); PaneStruct[] panes = LoadPanes(xmlIn); // Load DockWindows if (xmlIn.Name != "DockWindows") throw new ArgumentException(Strings.DockPanel_LoadFromXml_InvalidXmlFormat); DockWindowStruct[] dockWindows = LoadDockWindows(xmlIn, dockPanel); // Load FloatWindows if (xmlIn.Name != "FloatWindows") throw new ArgumentException(Strings.DockPanel_LoadFromXml_InvalidXmlFormat); FloatWindowStruct[] floatWindows = LoadFloatWindows(xmlIn); if (closeStream) xmlIn.Close(); dockPanel.SuspendLayout(true); dockPanel.DockLeftPortion = dockPanelStruct.DockLeftPortion; dockPanel.DockRightPortion = dockPanelStruct.DockRightPortion; dockPanel.DockTopPortion = dockPanelStruct.DockTopPortion; dockPanel.DockBottomPortion = dockPanelStruct.DockBottomPortion; // Set DockWindow ZOrders int prevMaxDockWindowZOrder = int.MaxValue; for (int i = 0; i < dockWindows.Length; i++) { int maxDockWindowZOrder = -1; int index = -1; for (int j = 0; j < dockWindows.Length; j++) { if (dockWindows[j].ZOrderIndex > maxDockWindowZOrder && dockWindows[j].ZOrderIndex < prevMaxDockWindowZOrder) { maxDockWindowZOrder = dockWindows[j].ZOrderIndex; index = j; } } dockPanel.DockWindows[dockWindows[index].DockState].BringToFront(); prevMaxDockWindowZOrder = maxDockWindowZOrder; } // Create Contents for (int i = 0; i < contents.Length; i++) { IDockContent content = deserializeContent(contents[i].PersistString); if (content == null) content = new DummyContent(); content.DockHandler.DockPanel = dockPanel; content.DockHandler.AutoHidePortion = contents[i].AutoHidePortion; content.DockHandler.IsHidden = true; content.DockHandler.IsFloat = contents[i].IsFloat; } // Create panes for (int i = 0; i < panes.Length; i++) { DockPane pane = null; for (int j = 0; j < panes[i].IndexContents.Length; j++) { IDockContent content = dockPanel.Contents[panes[i].IndexContents[j]]; if (j == 0) pane = dockPanel.DockPaneFactory.CreateDockPane(content, panes[i].DockState, false); else if (panes[i].DockState == DockState.Float) content.DockHandler.FloatPane = pane; else content.DockHandler.PanelPane = pane; } } // Assign Panes to DockWindows for (int i = 0; i < dockWindows.Length; i++) { for (int j = 0; j < dockWindows[i].NestedPanes.Length; j++) { DockWindow dw = dockPanel.DockWindows[dockWindows[i].DockState]; int indexPane = dockWindows[i].NestedPanes[j].IndexPane; DockPane pane = dockPanel.Panes[indexPane]; int indexPrevPane = dockWindows[i].NestedPanes[j].IndexPrevPane; DockPane prevPane = (indexPrevPane == -1) ? dw.NestedPanes.GetDefaultPreviousPane(pane) : dockPanel.Panes[indexPrevPane]; DockAlignment alignment = dockWindows[i].NestedPanes[j].Alignment; double proportion = dockWindows[i].NestedPanes[j].Proportion; pane.DockTo(dw, prevPane, alignment, proportion); if (panes[indexPane].DockState == dw.DockState) panes[indexPane].ZOrderIndex = dockWindows[i].ZOrderIndex; } } // Create float windows for (int i = 0; i < floatWindows.Length; i++) { FloatWindow fw = null; for (int j = 0; j < floatWindows[i].NestedPanes.Length; j++) { int indexPane = floatWindows[i].NestedPanes[j].IndexPane; DockPane pane = dockPanel.Panes[indexPane]; if (j == 0) fw = dockPanel.FloatWindowFactory.CreateFloatWindow(dockPanel, pane, floatWindows[i].Bounds); else { int indexPrevPane = floatWindows[i].NestedPanes[j].IndexPrevPane; DockPane prevPane = indexPrevPane == -1 ? null : dockPanel.Panes[indexPrevPane]; DockAlignment alignment = floatWindows[i].NestedPanes[j].Alignment; double proportion = floatWindows[i].NestedPanes[j].Proportion; pane.DockTo(fw, prevPane, alignment, proportion); if (panes[indexPane].DockState == fw.DockState) panes[indexPane].ZOrderIndex = floatWindows[i].ZOrderIndex; } } } // sort IDockContent by its Pane's ZOrder int[] sortedContents = null; if (contents.Length > 0) { sortedContents = new int[contents.Length]; for (int i = 0; i < contents.Length; i++) sortedContents[i] = i; int lastDocument = contents.Length; for (int i = 0; i < contents.Length - 1; i++) { for (int j = i + 1; j < contents.Length; j++) { DockPane pane1 = dockPanel.Contents[sortedContents[i]].DockHandler.Pane; int ZOrderIndex1 = pane1 == null ? 0 : panes[dockPanel.Panes.IndexOf(pane1)].ZOrderIndex; DockPane pane2 = dockPanel.Contents[sortedContents[j]].DockHandler.Pane; int ZOrderIndex2 = pane2 == null ? 0 : panes[dockPanel.Panes.IndexOf(pane2)].ZOrderIndex; if (ZOrderIndex1 > ZOrderIndex2) { int temp = sortedContents[i]; sortedContents[i] = sortedContents[j]; sortedContents[j] = temp; } } } } // show non-document IDockContent first to avoid screen flickers for (int i = 0; i < contents.Length; i++) { IDockContent content = dockPanel.Contents[sortedContents[i]]; if (content.DockHandler.Pane != null && content.DockHandler.Pane.DockState != DockState.Document) content.DockHandler.IsHidden = contents[sortedContents[i]].IsHidden; } // after all non-document IDockContent, show document IDockContent for (int i = 0; i < contents.Length; i++) { IDockContent content = dockPanel.Contents[sortedContents[i]]; if (content.DockHandler.Pane != null && content.DockHandler.Pane.DockState == DockState.Document) content.DockHandler.IsHidden = contents[sortedContents[i]].IsHidden; } for (int i = 0; i < panes.Length; i++) dockPanel.Panes[i].ActiveContent = panes[i].IndexActiveContent == -1 ? null : dockPanel.Contents[panes[i].IndexActiveContent]; if (dockPanelStruct.IndexActiveDocumentPane != -1) dockPanel.Panes[dockPanelStruct.IndexActiveDocumentPane].Activate(); if (dockPanelStruct.IndexActivePane != -1) dockPanel.Panes[dockPanelStruct.IndexActivePane].Activate(); for (int i = dockPanel.Contents.Count - 1; i >= 0; i--) if (dockPanel.Contents[i] is DummyContent) dockPanel.Contents[i].DockHandler.Form.Close(); dockPanel.ResumeLayout(true, true); } private static bool MoveToNextElement(XmlTextReader xmlIn) { if (!xmlIn.Read()) return false; while (xmlIn.NodeType == XmlNodeType.EndElement) { if (!xmlIn.Read()) return false; } return true; } private static bool IsFormatVersionValid(string formatVersion) { if (formatVersion == ConfigFileVersion) return true; foreach (string s in CompatibleConfigFileVersions) if (s == formatVersion) return true; return false; } } public void SaveAsXml(string fileName) { Persistor.SaveAsXml(this, fileName); } public void SaveAsXml(string fileName, Encoding encoding) { Persistor.SaveAsXml(this, fileName, encoding); } public void SaveAsXml(Stream stream, Encoding encoding) { Persistor.SaveAsXml(this, stream, encoding); } public void SaveAsXml(Stream stream, Encoding encoding, bool upstream) { Persistor.SaveAsXml(this, stream, encoding, upstream); } public void LoadFromXml(string fileName, DeserializeDockContent deserializeContent) { Persistor.LoadFromXml(this, fileName, deserializeContent); } public void LoadFromXml(Stream stream, DeserializeDockContent deserializeContent) { Persistor.LoadFromXml(this, stream, deserializeContent); } public void LoadFromXml(Stream stream, DeserializeDockContent deserializeContent, bool closeStream) { Persistor.LoadFromXml(this, stream, deserializeContent, closeStream); } } }
/******************************************************************************* * Copyright (c) 2013, Daniel Murphy * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. ******************************************************************************/ using System.Diagnostics; using SharpBox2D.Common; using SharpBox2D.Pooling; namespace SharpBox2D.Dynamics.Joints { /** * A mouse joint is used to make a point on a body track a specified world point. This a soft * constraint with a maximum force. This allows the constraint to stretch and without applying huge * forces. NOTE: this joint is not documented in the manual because it was developed to be used in * the testbed. If you want to learn how to use the mouse joint, look at the testbed. * * @author Daniel */ public class MouseJoint : Joint { private Vec2 m_localAnchorB = new Vec2(); private Vec2 m_targetA = new Vec2(); private float m_frequencyHz; private float m_dampingRatio; private float m_beta; // Solver shared private Vec2 m_impulse = new Vec2(); private float m_maxForce; private float m_gamma; // Solver temp private int m_indexB; private Vec2 m_rB = new Vec2(); private Vec2 m_localCenterB = new Vec2(); private float m_invMassB; private float m_invIB; private Mat22 m_mass = new Mat22(); private Vec2 m_C = new Vec2(); internal MouseJoint(IWorldPool argWorld, MouseJointDef def) : base(argWorld, def) { Debug.Assert(def.target.isValid()); Debug.Assert(def.maxForce >= 0); Debug.Assert(def.frequencyHz >= 0); Debug.Assert(def.dampingRatio >= 0); m_targetA.set(def.target); Transform.mulTransToOutUnsafe(m_bodyB.getTransform(), m_targetA, ref m_localAnchorB); m_maxForce = def.maxForce; m_impulse.setZero(); m_frequencyHz = def.frequencyHz; m_dampingRatio = def.dampingRatio; m_beta = 0; m_gamma = 0; } public override void getAnchorA(ref Vec2 argOut) { argOut.set(m_targetA); } public override void getAnchorB(ref Vec2 argOut) { m_bodyB.getWorldPointToOut(m_localAnchorB, ref argOut); } public override void getReactionForce(float invDt, ref Vec2 argOut) { argOut.set(m_impulse); argOut.mulLocal(invDt); } public override float getReactionTorque(float invDt) { return invDt*0.0f; } public void setTarget(Vec2 target) { if (m_bodyB.isAwake() == false) { m_bodyB.setAwake(true); } m_targetA.set(target); } public Vec2 getTarget() { return m_targetA; } // / set/get the maximum force in Newtons. public void setMaxForce(float force) { m_maxForce = force; } public float getMaxForce() { return m_maxForce; } // / set/get the frequency in Hertz. public void setFrequency(float hz) { m_frequencyHz = hz; } public float getFrequency() { return m_frequencyHz; } // / set/get the damping ratio (dimensionless). public void setDampingRatio(float ratio) { m_dampingRatio = ratio; } public float getDampingRatio() { return m_dampingRatio; } public override void initVelocityConstraints(SolverData data) { m_indexB = m_bodyB.m_islandIndex; m_localCenterB.set(m_bodyB.m_sweep.localCenter); m_invMassB = m_bodyB.m_invMass; m_invIB = m_bodyB.m_invI; Vec2 cB = data.positions[m_indexB].c; float aB = data.positions[m_indexB].a; Vec2 vB = data.velocities[m_indexB].v; float wB = data.velocities[m_indexB].w; Rot qB = pool.popRot(); qB.set(aB); float mass = m_bodyB.getMass(); // Frequency float omega = 2.0f*MathUtils.PI*m_frequencyHz; // Damping coefficient float d = 2.0f*mass*m_dampingRatio*omega; // Spring stiffness float k = mass*(omega*omega); // magic formulas // gamma has units of inverse mass. // beta has units of inverse time. float h = data.step.dt; Debug.Assert(d + h*k > Settings.EPSILON); m_gamma = h*(d + h*k); if (m_gamma != 0.0f) { m_gamma = 1.0f/m_gamma; } m_beta = h*k*m_gamma; Vec2 temp = pool.popVec2(); temp.set(m_localAnchorB); temp.subLocal(m_localCenterB); // Compute the effective mass matrix. Rot.mulToOutUnsafe(qB, temp, ref m_rB); // K = [(1/m1 + 1/m2) * eye(2) - skew(r1) * invI1 * skew(r1) - skew(r2) * invI2 * skew(r2)] // = [1/m1+1/m2 0 ] + invI1 * [r1.y*r1.y -r1.x*r1.y] + invI2 * [r1.y*r1.y -r1.x*r1.y] // [ 0 1/m1+1/m2] [-r1.x*r1.y r1.x*r1.x] [-r1.x*r1.y r1.x*r1.x] Mat22 K = pool.popMat22(); K.ex.x = m_invMassB + m_invIB*m_rB.y*m_rB.y + m_gamma; K.ex.y = -m_invIB*m_rB.x*m_rB.y; K.ey.x = K.ex.y; K.ey.y = m_invMassB + m_invIB*m_rB.x*m_rB.x + m_gamma; K.invertToOut(ref m_mass); m_C.set(cB); m_C.addLocal(m_rB); m_C.subLocal(m_targetA); m_C.mulLocal(m_beta); // Cheat with some damping wB *= 0.98f; if (data.step.warmStarting) { m_impulse.mulLocal(data.step.dtRatio); vB.x += m_invMassB*m_impulse.x; vB.y += m_invMassB*m_impulse.y; wB += m_invIB*Vec2.cross(m_rB, m_impulse); } else { m_impulse.setZero(); } // data.velocities[m_indexB].v.set(vB); data.velocities[m_indexB].w = wB; pool.pushVec2(1); pool.pushMat22(1); pool.pushRot(1); } public override bool solvePositionConstraints(SolverData data) { return true; } public override void solveVelocityConstraints(SolverData data) { Vec2 vB = data.velocities[m_indexB].v; float wB = data.velocities[m_indexB].w; // Cdot = v + cross(w, r) Vec2 Cdot = pool.popVec2(); Vec2.crossToOutUnsafe(wB, m_rB, ref Cdot); Cdot.addLocal(vB); Vec2 impulse = pool.popVec2(); Vec2 temp = pool.popVec2(); temp.set(m_impulse); temp.mulLocal(m_gamma); temp.addLocal(m_C); temp.addLocal(Cdot); temp.negateLocal(); Mat22.mulToOutUnsafe(m_mass, temp, ref impulse); Vec2 oldImpulse = temp; oldImpulse.set(m_impulse); m_impulse.addLocal(impulse); float maxImpulse = data.step.dt*m_maxForce; if (m_impulse.lengthSquared() > maxImpulse*maxImpulse) { m_impulse.mulLocal(maxImpulse/m_impulse.length()); } impulse.set(m_impulse); impulse.subLocal(oldImpulse); vB.x += m_invMassB*impulse.x; vB.y += m_invMassB*impulse.y; wB += m_invIB*Vec2.cross(m_rB, impulse); // data.velocities[m_indexB].v.set(vB); data.velocities[m_indexB].w = wB; pool.pushVec2(3); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the Apache 2.0 License. // See the LICENSE file in the project root for more information. #if FEATURE_CTYPES using System; using System.Collections.Generic; using System.ComponentModel; using System.Numerics; using System.Reflection; using System.Reflection.Emit; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Runtime.Versioning; using System.Security; using System.Security.Permissions; using System.Threading; using IronPython.Runtime; using IronPython.Runtime.Exceptions; using IronPython.Runtime.Operations; using IronPython.Runtime.Types; using Microsoft.Scripting.Runtime; using Microsoft.Scripting.Utils; [assembly: PythonModule("_ctypes", typeof(IronPython.Modules.CTypes))] namespace IronPython.Modules { /// <summary> /// Provides support for interop with native code from Python code. /// </summary> public static partial class CTypes { private static readonly object _lock = new object(); // lock for creating dynamic module for unsafe code private static readonly object _pointerTypeCacheKey = new object(); // key for system state for the pointer type cache private static readonly object _conversion_mode = new object(); // key for system state current conversion mode private static Dictionary<object, RefCountInfo> _refCountTable; // dictionary used to maintain a ref count on objects private static ModuleBuilder _dynamicModule; // the dynamic module we generate unsafe code into private static Dictionary<int, Type> _nativeTypes = new Dictionary<int, Type>(); // native types of the specified size for marshalling private static StringAtDelegate _stringAt = StringAt, _wstringAt = WStringAt; // delegates for wchar/char functions we hand addresses out to (just keeping it alive) private static CastDelegate _cast = Cast; // delegate for cast function whose address we hand out (just keeping it alive) public const string __version__ = "1.1.0"; [UnmanagedFunctionPointer(CallingConvention.Cdecl)] private delegate IntPtr CastDelegate(IntPtr data, IntPtr obj, IntPtr type); [UnmanagedFunctionPointer(CallingConvention.Cdecl)] private delegate IntPtr StringAtDelegate(IntPtr addr, int length); [SpecialName] public static void PerformModuleReload(PythonContext/*!*/ context, PythonDictionary/*!*/ dict) { context.EnsureModuleException("ArgumentError", dict, "ArgumentError", "_ctypes"); // TODO: Provide an implementation which is coordinated with our _refCountTable // context.SystemState.__dict__["getrefcount"] = null; PythonDictionary pointerTypeCache = new PythonDictionary(); dict["_pointer_type_cache"] = pointerTypeCache; context.SetModuleState(_pointerTypeCacheKey, pointerTypeCache); if (Environment.OSVersion.Platform == PlatformID.Win32NT || Environment.OSVersion.Platform == PlatformID.Win32S || Environment.OSVersion.Platform == PlatformID.Win32Windows || Environment.OSVersion.Platform == PlatformID.WinCE) { context.EnsureModuleException( "COMError", PythonExceptions.Exception, typeof(_COMError), dict, "COMError", "_ctypes", "Raised when a COM method call failed.", (msg, _) => new COMException(msg) ); context.SetModuleState(_conversion_mode, PythonTuple.MakeTuple("mbcs", "ignore")); } else { context.SetModuleState(_conversion_mode, PythonTuple.MakeTuple("ascii", "strict")); } } #region Public Functions /// <summary> /// Gets a function which casts the specified memory. Because this is used only /// w/ Python API we use a delegate as the return type instead of an actual address. /// </summary> public static object _cast_addr { get { return Marshal.GetFunctionPointerForDelegate(_cast).ToPython(); } } /// <summary> /// Implementation of our cast function. data is marshalled as a void* /// so it ends up as an address. obj and type are marshalled as an object /// so we need to unmarshal them. /// </summary> private static IntPtr Cast(IntPtr data, IntPtr obj, IntPtr type) { GCHandle objHandle = GCHandle.FromIntPtr(obj); GCHandle typeHandle = GCHandle.FromIntPtr(type); try { CData cdata = objHandle.Target as CData; PythonType pt = (PythonType)typeHandle.Target; CData res = (CData)pt.CreateInstance(pt.Context.SharedContext); if (IsPointer(pt)) { res._memHolder = new MemoryHolder(IntPtr.Size); if (IsPointer(DynamicHelpers.GetPythonType(cdata))) { res._memHolder.WriteIntPtr(0, cdata._memHolder.ReadIntPtr(0)); } else { res._memHolder.WriteIntPtr(0, data); } if (cdata != null) { res._memHolder.Objects = cdata._memHolder.Objects; res._memHolder.AddObject(IdDispenser.GetId(cdata), cdata); } } else { if (cdata != null) { res._memHolder = new MemoryHolder(data, ((INativeType)pt).Size, cdata._memHolder); } else { res._memHolder = new MemoryHolder(data, ((INativeType)pt).Size); } } return GCHandle.ToIntPtr(GCHandle.Alloc(res)); } finally { typeHandle.Free(); objHandle.Free(); } } private static bool IsPointer(PythonType pt) { SimpleType simpleType; return pt is PointerType || ((simpleType = pt as SimpleType) != null && (simpleType._type == SimpleTypeKind.Pointer || simpleType._type == SimpleTypeKind.CharPointer || simpleType._type == SimpleTypeKind.WCharPointer)); } public static object _memmove_addr { get { return NativeFunctions.GetMemMoveAddress().ToPython(); } } public static object _memset_addr { get { return NativeFunctions.GetMemSetAddress().ToPython(); } } public static object _string_at_addr { get { return Marshal.GetFunctionPointerForDelegate(_stringAt).ToPython(); } } public static object _wstring_at_addr { get { return Marshal.GetFunctionPointerForDelegate(_wstringAt).ToPython(); } } public static int CopyComPointer(object src, object dest) { throw new NotImplementedException("CopyComPointer"); } public static string FormatError() { return FormatError(get_last_error()); } public static string FormatError(int errorCode) { return new Win32Exception(errorCode).Message; } [SupportedOSPlatform("windows"), PythonHidden(PlatformsAttribute.PlatformFamily.Unix)] public static void FreeLibrary(int handle) { FreeLibrary(new IntPtr(handle)); } [SupportedOSPlatform("windows"), PythonHidden(PlatformsAttribute.PlatformFamily.Unix)] public static void FreeLibrary(BigInteger handle) { FreeLibrary(new IntPtr((long)handle)); } [SupportedOSPlatform("windows"), PythonHidden(PlatformsAttribute.PlatformFamily.Unix)] public static void FreeLibrary(IntPtr handle) { NativeFunctions.FreeLibrary(handle); } #nullable enable private static object LoadDLL(string? library, int mode) { if (library is not null && library.IndexOf((char)0) != -1) throw PythonOps.ValueError("embedded null byte"); IntPtr res = NativeFunctions.LoadDLL(library, mode); if (res == IntPtr.Zero) { throw PythonOps.OSError($"cannot load library {library}"); } return res.ToPython(); } [SupportedOSPlatform("windows"), PythonHidden(PlatformsAttribute.PlatformFamily.Unix)] public static object LoadLibrary([NotNull] string library, int mode = 0) => LoadDLL(library, mode); [PythonHidden(PlatformsAttribute.PlatformFamily.Windows)] public static object dlopen(string? library, int mode = 0) => LoadDLL(library, mode); #nullable restore /// <summary> /// Returns a new type which represents a pointer given the existing type. /// </summary> public static PythonType POINTER(CodeContext/*!*/ context, PythonType type) { PythonContext pc = context.LanguageContext; PythonDictionary dict = (PythonDictionary)pc.GetModuleState(_pointerTypeCacheKey); lock (dict) { if (!dict.TryGetValue(type, out object res)) { string name; if (type == null) { name = "c_void_p"; } else { name = "LP_" + type.Name; } dict[type] = res = MakePointer(context, name, PythonOps.MakeDictFromItems(new object[] { type, "_type_" })); } return res as PythonType; } } private static PointerType MakePointer(CodeContext context, string name, PythonDictionary dict) { return new PointerType(context, name, PythonTuple.MakeTuple(_Pointer), dict ); } public static PythonType POINTER(CodeContext/*!*/ context, [NotNull]string name) { PythonType res = MakePointer(context, name, new PythonDictionary()); PythonContext pc = context.LanguageContext; PythonDictionary dict = (PythonDictionary)pc.GetModuleState(_pointerTypeCacheKey); lock (dict) { dict[Builtin.id(res)] = res; } return res; } /// <summary> /// Converts an address acquired from PyObj_FromPtr or that has been /// marshaled as type 'O' back into an object. /// </summary> public static object PyObj_FromPtr(IntPtr address) { GCHandle handle = GCHandle.FromIntPtr(address); object res = handle.Target; handle.Free(); return res; } /// <summary> /// Converts an object into an opaque address which can be handed out to /// managed code. /// </summary> public static IntPtr PyObj_ToPtr(object obj) { return GCHandle.ToIntPtr(GCHandle.Alloc(obj)); } /// <summary> /// Decreases the ref count on an object which has been increased with /// Py_INCREF. /// </summary> public static void Py_DECREF(object key) { EnsureRefCountTable(); lock (_refCountTable) { if (!_refCountTable.TryGetValue(key, out RefCountInfo info)) { // dec without an inc throw new InvalidOperationException(); } info.RefCount--; if (info.RefCount == 0) { info.Handle.Free(); _refCountTable.Remove(key); } } } /// <summary> /// Increases the ref count on an object ensuring that it will not be collected. /// </summary> public static void Py_INCREF(object key) { EnsureRefCountTable(); lock (_refCountTable) { if (!_refCountTable.TryGetValue(key, out RefCountInfo info)) { _refCountTable[key] = info = new RefCountInfo(); // TODO: this only works w/ blittable types, what to do for others? info.Handle = GCHandle.Alloc(key, GCHandleType.Pinned); } info.RefCount++; } } // for testing purposes only public static PythonTuple buffer_info(CData data) { return data.GetBufferInfo(); } public static void _check_HRESULT(int hresult) { if (hresult < 0) { throw PythonOps.WindowsError("ctypes function returned failed HRESULT: {0:x}", (uint)hresult); } } public static void _unpickle() { } /// <summary> /// returns address of C instance internal buffer. /// /// It is the callers responsibility to ensure that the provided instance will /// stay alive if memory in the resulting address is to be used later. /// </summary> public static object addressof(CData data) { return data._memHolder.UnsafeAddress.ToPython(); } /// <summary> /// Gets the required alignment of the given type. /// </summary> public static int alignment(PythonType type) { if (!(type is INativeType nativeType)) { throw PythonOps.TypeError("this type has no size"); } return nativeType.Alignment; } /// <summary> /// Gets the required alignment of an object. /// </summary> public static int alignment(object o) { return alignment(DynamicHelpers.GetPythonType(o)); } public static object byref(CData instance, int offset = 0) { if (offset != 0) { // new in 2.6 throw new NotImplementedException("byref w/ arg"); } return new NativeArgument(instance, "P"); } public static object call_cdeclfunction(CodeContext context, int address, PythonTuple args) { return call_cdeclfunction(context, new IntPtr(address), args); } public static object call_cdeclfunction(CodeContext context, BigInteger address, PythonTuple args) { return call_cdeclfunction(context, new IntPtr((long)address), args); } public static object call_cdeclfunction(CodeContext context, IntPtr address, PythonTuple args) { CFuncPtrType funcType = GetFunctionType(context, FUNCFLAG_CDECL); _CFuncPtr func = (_CFuncPtr)funcType.CreateInstance(context, address); return PythonOps.CallWithArgsTuple(func, new object[0], args); } public static void call_commethod() { } public static object call_function(CodeContext context, int address, PythonTuple args) { return call_function(context, new IntPtr(address), args); } public static object call_function(CodeContext context, BigInteger address, PythonTuple args) { return call_function(context, new IntPtr((long)address), args); } public static object call_function(CodeContext context, IntPtr address, PythonTuple args) { CFuncPtrType funcType = GetFunctionType(context, FUNCFLAG_STDCALL); _CFuncPtr func = (_CFuncPtr)funcType.CreateInstance(context, address); return PythonOps.CallWithArgsTuple(func, new object[0], args); } private static CFuncPtrType GetFunctionType(CodeContext context, int flags) { // Ideally we should cache these... SimpleType resType = new SimpleType( context, "int", PythonTuple.MakeTuple(DynamicHelpers.GetPythonTypeFromType(typeof(SimpleCData))), PythonOps.MakeHomogeneousDictFromItems(new object[] { "i", "_type_" })); CFuncPtrType funcType = new CFuncPtrType( context, "func", PythonTuple.MakeTuple(DynamicHelpers.GetPythonTypeFromType(typeof(_CFuncPtr))), PythonOps.MakeHomogeneousDictFromItems(new object[] { FUNCFLAG_STDCALL, "_flags_", resType, "_restype_" })); return funcType; } public static int get_errno() { return 0; } public static int get_last_error() { if (Environment.OSVersion.Platform == PlatformID.Win32NT) { return NativeFunctions.GetLastError(); } throw PythonOps.NameError("get_last_error"); } /// <summary> /// Returns a pointer instance for the given CData /// </summary> public static Pointer pointer(CodeContext/*!*/ context, CData data) { PythonType ptrType = POINTER(context, DynamicHelpers.GetPythonType(data)); return (Pointer)ptrType.CreateInstance(context, data); } public static void resize(CData obj, int newSize) { if (newSize < obj.NativeType.Size) { throw PythonOps.ValueError("minimum size is {0}", newSize); } MemoryHolder newMem = new MemoryHolder(newSize); obj._memHolder.CopyTo(newMem, 0, Math.Min(obj._memHolder.Size, newSize)); obj._memHolder = newMem; } public static PythonTuple/*!*/ set_conversion_mode(CodeContext/*!*/ context, string encoding, string errors) { // TODO: Need an atomic update for module state PythonContext pc = context.LanguageContext; PythonTuple prev = (PythonTuple)pc.GetModuleState(_conversion_mode); pc.SetModuleState(_conversion_mode, PythonTuple.MakeTuple(encoding, errors)); return prev; } public static void set_errno() { // we can't support this without a native library } [SupportedOSPlatform("windows"), PythonHidden(PlatformsAttribute.PlatformFamily.Unix)] public static int set_last_error(int errorCode) { int old_errno = NativeFunctions.GetLastError(); NativeFunctions.SetLastError(errorCode); return old_errno; } public static int @sizeof(PythonType/*!*/ type) { if (!(type is INativeType simpleType)) { throw PythonOps.TypeError("this type has no size"); } return simpleType.Size; } public static int @sizeof(object/*!*/ instance) { if (instance is CData cdata && cdata._memHolder != null) { return cdata._memHolder.Size; } return @sizeof(DynamicHelpers.GetPythonType(instance)); } #endregion #region Public Constants public const int FUNCFLAG_STDCALL = 0; public const int FUNCFLAG_CDECL = 1; public const int FUNCFLAG_HRESULT = 2; public const int FUNCFLAG_PYTHONAPI = 4; public const int FUNCFLAG_USE_ERRNO = 8; public const int FUNCFLAG_USE_LASTERROR = 16; public const int RTLD_GLOBAL = 0; public const int RTLD_LOCAL = 0; #endregion #region Implementation Details /// <summary> /// Gets the ModuleBuilder used to generate our unsafe call stubs into. /// </summary> private static ModuleBuilder DynamicModule { get { if (_dynamicModule == null) { lock (_lock) { if (_dynamicModule == null) { var attributes = new[] { new CustomAttributeBuilder(typeof(UnverifiableCodeAttribute).GetConstructor(ReflectionUtils.EmptyTypes), new object[0]), #if !NETCOREAPP && !NETSTANDARD //PermissionSet(SecurityAction.Demand, Unrestricted = true) new CustomAttributeBuilder(typeof(PermissionSetAttribute).GetConstructor(new Type[] { typeof(SecurityAction) }), new object[]{ SecurityAction.Demand }, new PropertyInfo[] { typeof(PermissionSetAttribute).GetProperty(nameof(PermissionSetAttribute.Unrestricted)) }, new object[] { true } ) #endif }; string name = typeof(CTypes).Namespace + ".DynamicAssembly"; var assembly = AssemblyBuilder.DefineDynamicAssembly(new AssemblyName(name), AssemblyBuilderAccess.Run, attributes); #if !NETCOREAPP && !NETSTANDARD assembly.DefineVersionInfoResource(); #endif _dynamicModule = assembly.DefineDynamicModule(name); } } } return _dynamicModule; } } /// <summary> /// Given a specific size returns a .NET type of the equivalent size that /// we can use when marshalling these values across calls. /// </summary> private static Type/*!*/ GetMarshalTypeFromSize(int size) { lock (_nativeTypes) { if (!_nativeTypes.TryGetValue(size, out Type res)) { int sizeRemaining = size; TypeBuilder tb = DynamicModule.DefineType("interop_type_size_" + size, TypeAttributes.Public | TypeAttributes.SequentialLayout | TypeAttributes.Sealed | TypeAttributes.Serializable, typeof(ValueType), size); while (sizeRemaining > 8) { tb.DefineField("field" + sizeRemaining, typeof(long), FieldAttributes.Private); sizeRemaining -= 8; } while (sizeRemaining > 4) { tb.DefineField("field" + sizeRemaining, typeof(int), FieldAttributes.Private); sizeRemaining -= 4; } while (sizeRemaining > 0) { tb.DefineField("field" + sizeRemaining, typeof(byte), FieldAttributes.Private); sizeRemaining--; } _nativeTypes[size] = res = tb.CreateTypeInfo(); } return res; } } /// <summary> /// Shared helper between struct and union for getting field info and validating it. /// </summary> private static void GetFieldInfo(INativeType type, object o, out string fieldName, out INativeType cdata, out int? bitCount) { PythonTuple pt = o as PythonTuple; if (pt.Count != 2 && pt.Count != 3) { throw PythonOps.AttributeError("'_fields_' must be a sequence of pairs"); } fieldName = pt[0] as string; if (fieldName == null) { throw PythonOps.TypeError("first item in _fields_ tuple must be a string, got", PythonOps.GetPythonTypeName(pt[0])); } cdata = pt[1] as INativeType; if (cdata == null) { throw PythonOps.TypeError("second item in _fields_ tuple must be a C type, got {0}", PythonOps.GetPythonTypeName(pt[0])); } else if (cdata == type) { throw StructureCannotContainSelf(); } if (cdata is StructType st) { st.EnsureFinal(); } if (pt.Count != 3) { bitCount = null; } else { bitCount = CheckBits(cdata, pt); } } /// <summary> /// Verifies that the provided bit field settings are valid for this type. /// </summary> private static int CheckBits(INativeType cdata, PythonTuple pt) { int bitCount = Converter.ConvertToInt32(pt[2]); if (!(cdata is SimpleType simpType)) { throw PythonOps.TypeError("bit fields not allowed for type {0}", ((PythonType)cdata).Name); } switch (simpType._type) { case SimpleTypeKind.Object: case SimpleTypeKind.Pointer: case SimpleTypeKind.Single: case SimpleTypeKind.Double: case SimpleTypeKind.Char: case SimpleTypeKind.CharPointer: case SimpleTypeKind.WChar: case SimpleTypeKind.WCharPointer: throw PythonOps.TypeError("bit fields not allowed for type {0}", ((PythonType)cdata).Name); } if (bitCount <= 0 || bitCount > cdata.Size * 8) { throw PythonOps.ValueError("number of bits invalid for bit field"); } return bitCount; } /// <summary> /// Shared helper to get the _fields_ list for struct/union and validate it. /// </summary> private static IList<object>/*!*/ GetFieldsList(object fields) { if (!(fields is IList<object> list)) { throw PythonOps.TypeError("class must be a sequence of pairs"); } return list; } private static Exception StructureCannotContainSelf() { return PythonOps.AttributeError("Structure or union cannot contain itself"); } /// <summary> /// Helper function for translating from memset to NT's FillMemory API. /// </summary> private static IntPtr StringAt(IntPtr src, int len) { Bytes res; if (len == -1) { res = MemoryHolder.ReadBytes(src, 0); } else { res = MemoryHolder.ReadBytes(src, 0, len); } return GCHandle.ToIntPtr(GCHandle.Alloc(res)); } /// <summary> /// Helper function for translating from memset to NT's FillMemory API. /// </summary> private static IntPtr WStringAt(IntPtr src, int len) { string res; if (len == -1) { res = Marshal.PtrToStringUni(src); } else { res = Marshal.PtrToStringUni(src, len); } return GCHandle.ToIntPtr(GCHandle.Alloc(res)); } private static IntPtr GetHandleFromObject(object dll, string errorMsg) { IntPtr intPtrHandle; object dllHandle = PythonOps.GetBoundAttr(DefaultContext.Default, dll, "_handle"); if (!Converter.TryConvertToBigInteger(dllHandle, out BigInteger intHandle)) { throw PythonOps.TypeError(errorMsg); } intPtrHandle = new IntPtr((long)intHandle); return intPtrHandle; } private static void ValidateArraySizes(ArrayModule.array array, int offset, int size) { ValidateArraySizes(array.__len__() * array.itemsize, offset, size); } private static void ValidateArraySizes(Bytes bytes, int offset, int size) { ValidateArraySizes(bytes.Count, offset, size); } private static void ValidateArraySizes(string data, int offset, int size) { ValidateArraySizes(data.Length, offset, size); } private static void ValidateArraySizes(int arraySize, int offset, int size) { if (offset < 0) { throw PythonOps.ValueError("offset cannot be negative"); } else if (arraySize < size + offset) { throw PythonOps.ValueError($"Buffer size too small ({arraySize} instead of at least {size} bytes)"); } } private static void ValidateArraySizes(BigInteger arraySize, int offset, int size) { if (offset < 0) { throw PythonOps.ValueError("offset cannot be negative"); } else if (arraySize < size + offset) { throw PythonOps.ValueError($"Buffer size too small ({arraySize} instead of at least {size} bytes)"); } } // TODO: Move these to an Ops class [PythonHidden] public static object GetCharArrayValue(_Array arr) { return arr.NativeType.GetValue(arr._memHolder, arr, 0, false); } [PythonHidden] public static void SetCharArrayValue(_Array arr, object value) { arr.NativeType.SetValue(arr._memHolder, 0, value); } [PythonHidden] public static void DeleteCharArrayValue(_Array arr) { throw PythonOps.TypeError("cannot delete char array value"); } [PythonHidden] public static object GetCharArrayRaw(_Array arr) { return ((ArrayType)arr.NativeType).GetRawValue(arr._memHolder, 0); } [PythonHidden] public static void SetCharArrayRaw(_Array arr, object value) { ((ArrayType)arr.NativeType).SetRawValue(arr._memHolder, 0, value); } [PythonHidden] public static void DeleteCharArrayRaw(_Array arr) { throw PythonOps.AttributeError("cannot delete char array raw"); } [PythonHidden] public static object GetWCharArrayValue(_Array arr) { return arr.NativeType.GetValue(arr._memHolder, arr, 0, false); } [PythonHidden] public static void SetWCharArrayValue(_Array arr, object value) { arr.NativeType.SetValue(arr._memHolder, 0, value); } [PythonHidden] public static object DeleteWCharArrayValue(_Array arr) { throw PythonOps.TypeError("cannot delete wchar array value"); } private class RefCountInfo { public int RefCount; public GCHandle Handle; } /// <summary> /// Emits the marshalling code to create a CData object for reverse marshalling. /// </summary> private static void EmitCDataCreation(INativeType type, ILGenerator method, List<object> constantPool, int constantPoolArgument) { LocalBuilder locVal = method.DeclareLocal(type.GetNativeType()); method.Emit(OpCodes.Stloc, locVal); method.Emit(OpCodes.Ldloca, locVal); constantPool.Add(type); method.Emit(OpCodes.Ldarg, constantPoolArgument); method.Emit(OpCodes.Ldc_I4, constantPool.Count - 1); method.Emit(OpCodes.Ldelem_Ref); method.Emit(OpCodes.Call, typeof(ModuleOps).GetMethod(nameof(ModuleOps.CreateCData))); } private static void EnsureRefCountTable() { if (_refCountTable == null) { Interlocked.CompareExchange(ref _refCountTable, new Dictionary<object, RefCountInfo>(), null); } } #endregion [PythonHidden, PythonType("COMError"), DynamicBaseType] public class _COMError : PythonExceptions.BaseException { public _COMError(PythonType cls) : base(cls) { } public override void __init__(params object[] args) { base.__init__(args); if (args.Length < 3) { throw PythonOps.TypeError($"COMError() takes exactly 4 arguments({args.Length} given)"); } hresult = args[0]; text = args[1]; details = args[2]; } public object hresult { get; set; } public object text { get; set; } public object details { get; set; } } } } #endif
/*************************************************************************************************************************************** * Copyright (C) 2001-2012 LearnLift USA * * Contact: Learnlift USA, 12 Greenway Plaza, Suite 1510, Houston, Texas 77046, support@memorylifter.com * * * * This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License * * as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. * * * * This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty * * of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. * * * * You should have received a copy of the GNU Lesser General Public License along with this library; if not, * * write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA * ***************************************************************************************************************************************/ using System; using System.Collections.Generic; using System.ComponentModel; using System.Diagnostics; using System.IO; using System.Xml; using MLifter.DAL.Interfaces; using MLifter.DAL.Tools; using MLifter.DAL.DB.MsSqlCe; using MLifter.DAL.Interfaces.DB; using MLifter.DAL.Security; using MLifter.Generics; namespace MLifter.DAL.DB { /// <summary> /// Database implementation of IDictionary. /// </summary> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public class DbDictionary : IDictionary { private BackgroundWorker m_BackgroundWorker = null; /// <summary> /// Initializes a new instance of the <see cref="DbDictionary"/> class. /// </summary> /// <param name="lmid">The lmid.</param> /// <param name="user">The user.</param> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public DbDictionary(int lmid, IUser user) { id = lmid; parent = new ParentClass(user, this); DbMediaServer.DbMediaServer.Instance(parent).Start(); } private IDbDictionaryConnector connector { get { switch (parent.CurrentUser.ConnectionString.Typ) { case DatabaseType.PostgreSQL: return MLifter.DAL.DB.PostgreSQL.PgSqlDictionaryConnector.GetInstance(parent); case DatabaseType.MsSqlCe: return MLifter.DAL.DB.MsSqlCe.MsSqlCeDictionaryConnector.GetInstance(parent); default: throw new UnsupportedDatabaseTypeException(parent.CurrentUser.ConnectionString.Typ); } } } private IDbCardStyleConnector cardstyleconnector { get { switch (parent.CurrentUser.ConnectionString.Typ) { case DatabaseType.PostgreSQL: return MLifter.DAL.DB.PostgreSQL.PgSqlCardStyleConnector.GetInstance(parent); case DatabaseType.MsSqlCe: return MsSqlCeCardStyleConnector.GetInstance(parent); default: throw new UnsupportedDatabaseTypeException(parent.CurrentUser.ConnectionString.Typ); } } } private IDbMediaConnector mediaconnector { get { switch (parent.CurrentUser.ConnectionString.Typ) { case DatabaseType.PostgreSQL: return MLifter.DAL.DB.PostgreSQL.PgSqlMediaConnector.GetInstance(parent); case DatabaseType.MsSqlCe: return MLifter.DAL.DB.MsSqlCe.MsSqlCeMediaConnector.GetInstance(parent); default: throw new UnsupportedDatabaseTypeException(parent.CurrentUser.ConnectionString.Typ); } } } private IDbMediaConnector GetMediaConnector(ParentClass customParent) { switch (customParent.CurrentUser.ConnectionString.Typ) { case DatabaseType.PostgreSQL: return MLifter.DAL.DB.PostgreSQL.PgSqlMediaConnector.GetInstance(customParent); case DatabaseType.MsSqlCe: return MLifter.DAL.DB.MsSqlCe.MsSqlCeMediaConnector.GetInstance(customParent); default: throw new UnsupportedDatabaseTypeException(customParent.CurrentUser.ConnectionString.Typ); } } private IDbExtensionConnector extensionconnector { get { switch (parent.CurrentUser.ConnectionString.Typ) { case DatabaseType.PostgreSQL: return MLifter.DAL.DB.PostgreSQL.PgSqlExtensionConnector.GetInstance(parent); case DatabaseType.MsSqlCe: return MLifter.DAL.DB.MsSqlCe.MsSqlCeExtensionConnector.GetInstance(parent); default: throw new UnsupportedDatabaseTypeException(parent.CurrentUser.ConnectionString.Typ); } } } internal FileCleanupQueue FileCleanupQueue = new FileCleanupQueue(); #region IDictionary Members /// <summary> /// Gets a value indicating whether this instance is DB. /// </summary> /// <value><c>true</c> if this instance is DB; otherwise, <c>false</c>.</value> /// <remarks>Documented by Dev03, 2008-08-22</remarks> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public bool IsDB { get { return true; } } /// <summary> /// Gets or sets the background worker. /// </summary> /// <value>The background worker.</value> /// <remarks>Documented by Dev03, 2007-09-11</remarks> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public System.ComponentModel.BackgroundWorker BackgroundWorker { get { return m_BackgroundWorker; } set { m_BackgroundWorker = value; } } /// <summary> /// Occurs when [XML progress changed]. /// </summary> /// <remarks>Documented by Dev03, 2008-08-21</remarks> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public event StatusMessageEventHandler XmlProgressChanged; /// <summary> /// Occurs when [move progress changed]. /// </summary> /// <remarks>Documented by Dev03, 2008-08-21</remarks> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public event StatusMessageEventHandler MoveProgressChanged; /// <summary> /// Occurs when [save as progress changed]. /// </summary> /// <remarks>Documented by Dev03, 2008-08-21</remarks> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public event StatusMessageEventHandler SaveAsProgressChanged; /// <summary> /// Occurs when [create media progress changed]. /// </summary> /// <remarks>Documented by Dev03, 2008-08-21</remarks> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public event StatusMessageEventHandler CreateMediaProgressChanged; /// <summary> /// Gets the connection string (could conatain a path or a db connection string). /// </summary> /// <value>The connection string.</value> /// <remarks>Documented by Dev03, 2007-10-17</remarks> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public string Connection { get { //ToDo (done): Convert.ToString(Global.Properties[Property.ConnectionString]) return Parent.CurrentUser.ConnectionString.ConnectionString; //return Environment.CurrentDirectory; } } /// <summary> /// Gets the dictionary as Xml. /// </summary> /// <value>The Xml.</value> /// <remarks>Documented by Dev03, 2007-09-03</remarks> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public string Xml { get { //XmlDocument based approach XmlDocument document = new XmlDocument(); XmlNode dictionary = document.CreateElement(string.Empty, "dictionary", string.Empty); foreach (IChapter chapter in Chapters.Chapters) { dictionary.AppendChild(document.ImportNode((XmlNode)(chapter as DbChapter).Chapter, true)); } StatusMessageEventArgs args = new StatusMessageEventArgs(StatusMessageType.XmlProgress, Cards.Count); foreach (ICard card in Cards.Cards) { ReportProgressUpdate(args); args.Progress++; dictionary.AppendChild(document.ImportNode((XmlNode)card.Card, true)); } document.AppendChild(dictionary); return document.OuterXml; } } /// <summary> /// Sends the status message update. /// </summary> /// <param name="args">The <see cref="MLifter.DAL.Tools.StatusMessageEventArgs"/> instance containing the event data.</param> /// <returns> /// [true] if the process should be canceled. /// </returns> /// <remarks>Documented by Dev03, 2008-08-20</remarks> private bool ReportProgressUpdate(StatusMessageEventArgs args) { switch (args.MessageType) { case StatusMessageType.XmlProgress: if (XmlProgressChanged != null) XmlProgressChanged(null, args); break; case StatusMessageType.MoveProgress: if (MoveProgressChanged != null) MoveProgressChanged(null, args); break; case StatusMessageType.SaveAsProgress: if (SaveAsProgressChanged != null) SaveAsProgressChanged(null, args); break; case StatusMessageType.CreateMediaProgress: if (CreateMediaProgressChanged != null) CreateMediaProgressChanged(null, args); break; } bool cancelProcess = false; if (m_BackgroundWorker != null) { if (m_BackgroundWorker.CancellationPending) { cancelProcess = true; } else { m_BackgroundWorker.ReportProgress(args.ProgressPercentage); } } return !cancelProcess; } /// <summary> /// Sends the status message update. /// </summary> /// <param name="args">The <see cref="MLifter.DAL.Tools.StatusMessageEventArgs"/> instance containing the event data.</param> /// <param name="caller">The calling object.</param> /// <returns> /// [true] if the process should be canceled. /// </returns> /// <remarks>Documented by Dev03, 2008-08-20</remarks> private bool ReportProgressUpdate(StatusMessageEventArgs args, object caller) { switch (args.MessageType) { case StatusMessageType.CreateMediaProgress: if ((caller != null) && (caller is DbDictionary) && ((caller as DbDictionary).CreateMediaProgressChanged != null)) (caller as DbDictionary).CreateMediaProgressChanged(null, args); break; } return true; } /// <summary> /// Defines whether the content of the LM is protected from being copied/extracted. /// </summary> public bool ContentProtected { get { return connector.GetContentProtected(id); } } /// <summary> /// Gets the number of boxes. /// </summary> /// <value>The number of boxes.</value> /// <remarks>Documented by Dev03, 2007-10-17</remarks> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public int NumberOfBoxes { get { return Boxes.Box.Count - 1; } } /// <summary> /// Gets or sets the version. /// </summary> /// <value>The version.</value> /// <remarks>Documented by Dev03, 2007-09-03</remarks> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public int Version { get { //TODO: Find a better solution than simply converting (region settings/comma issues) return Convert.ToInt32(connector.GetDbVersion()); } } /// <summary> /// Gets or sets the title. /// </summary> /// <value>The title.</value> /// <remarks>Documented by Dev02, 2008-07-28</remarks> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public string Title { get { return connector.GetTitle(Id); } set { connector.SetTitle(Id, value); } } /// <summary> /// Gets or sets the author. /// </summary> /// <value>The author.</value> /// <remarks>Documented by Dev03, 2007-09-03</remarks> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public string Author { get { return connector.GetAuthor(Id); } set { connector.SetAuthor(Id, value); } } /// <summary> /// Gets or sets the description. /// </summary> /// <value>The description.</value> /// <remarks>Documented by Dev03, 2007-09-03</remarks> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public string Description { get { return connector.GetDescription(Id); } set { connector.SetDescription(Id, value); } } private int id; /// <summary> /// Gets the ID. /// </summary> /// <value>The ID.</value> /// <remarks>Documented by Dev02, 2008-07-28</remarks> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public int Id { get { return id; } } /// <summary> /// Gets or sets the GUID. /// </summary> /// <value>The GUID.</value> /// <remarks>Documented by Dev02, 2008-07-28</remarks> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public string Guid { get { return connector.GetGuid(Id); } set { connector.SetGuid(Id, value); } } /// <summary> /// Gets or sets the category. /// </summary> /// <value>The category.</value> /// <remarks>Documented by Dev03, 2007-09-03</remarks> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public Category Category { get { return connector.GetCategoryId(id); } set { connector.SetCategory(id, value.Converted ? value.Id : MLifter.DAL.Category.ConvertCategoryId(value.Id)); } } /// <summary> /// Gets or sets the media directory. /// </summary> /// <value>The media directory.</value> /// <remarks>Documented by Dev03, 2007-09-03</remarks> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public string MediaDirectory { get { throw new NotSupportedException(); } set { throw new NotSupportedException(); } } /// <summary> /// Gets the size of the dictionary. /// </summary> /// <value>The size of the dictionary.</value> /// <remarks>Documented by Dev08, 2008-10-02</remarks> public long DictionarySize { get { return connector.GetDictionarySize(id, 1024); } } /// <summary> /// Gets the number of all dictionary media objects/files. /// </summary> /// <value>The dictionary media objects count.</value> /// <remarks>Documented by Dev08, 2008-10-02</remarks> public int DictionaryMediaObjectsCount { get { return connector.GetDictionaryMediaObjectsCount(id); } } /// <summary> /// Gets actual the score. /// </summary> /// <value>The score.</value> /// <remarks>Documented by Dev03, 2007-09-03</remarks> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public double Score { get { double value = connector.GetScore(id); return value < 0 || value == double.NaN ? 0 : value > 100 ? 100 : value; } } /// <summary> /// Gets or sets the high score. /// </summary> /// <value>The high score.</value> /// <remarks>Documented by Dev03, 2007-09-03</remarks> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public double HighScore { get { return connector.GetHighscore(id); } set { connector.SetHighscore(id, value); } } /// <summary> /// Gets the boxes. /// </summary> /// <value>The boxes.</value> /// <remarks>Documented by Dev03, 2007-11-22</remarks> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public IBoxes Boxes { get { return new DbBoxes(Parent.GetChildParentClass(this)); } } /// <summary> /// Gets or sets the cards. /// </summary> /// <value>The cards.</value> /// <remarks>Documented by Dev03, 2007-09-03</remarks> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public ICards Cards { get { return new DbCards(Id, parent.GetChildParentClass(this)); } } /// <summary> /// Gets or sets the chapters. /// </summary> /// <value>The chapters.</value> /// <remarks>Documented by Dev03, 2007-09-03</remarks> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public IChapters Chapters { get { return new DbChapters(Id, Parent.GetChildParentClass(this)); } } /// <summary> /// Gets or sets the statistics. /// </summary> /// <value>The statistics.</value> /// <remarks>Documented by Dev03, 2007-09-03</remarks> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public IStatistics Statistics { get { return new DbStatistics(id, Parent.GetChildParentClass(this)); } } /// <summary> /// Loads this instance. /// </summary> /// <remarks>Documented by Dev03, 2007-09-03</remarks> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public void Load() { Debug.WriteLine("The method or operation is not implemented."); } /// <summary> /// Saves this instance. /// </summary> /// <remarks>Documented by Dev03, 2007-09-03</remarks> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public void Save() { Debug.WriteLine("The method or operation is not implemented."); } /// <summary> /// Saves the dictionary to the new path. /// </summary> /// <param name="newPath">The new path.</param> /// <remarks>Documented by Dev03, 2007-09-03</remarks> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public void SaveAs(string newPath) { Debug.WriteLine("The method or operation is not implemented."); } /// <summary> /// Saves the dictionary to the new path. /// </summary> /// <param name="newPath">The new path.</param> /// <param name="overwrite">if set to <c>true</c> [overwrite] existing files.</param> /// <remarks>Documented by Dev03, 2007-09-03</remarks> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public void SaveAs(string newPath, bool overwrite) { Debug.WriteLine("The method or operation is not implemented."); } /// <summary> /// Moves the specified new path. /// </summary> /// <param name="newPath">The new path.</param> /// <remarks>Documented by Dev03, 2007-09-03</remarks> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public void Move(string newPath) { Debug.WriteLine("The method or operation is not implemented."); } /// <summary> /// Moves the specified new path. /// </summary> /// <param name="newPath">The new path.</param> /// <param name="overwrite">if set to <c>true</c> [overwrite] existing files.</param> /// <remarks>Documented by Dev03, 2007-09-03</remarks> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public void Move(string newPath, bool overwrite) { Debug.WriteLine("The method or operation is not implemented."); } /// <summary> /// Changes the media path. /// </summary> /// <param name="path">The path.</param> /// <param name="move">if set to <c>true</c> [move].</param> /// <remarks>Documented by Dev03, 2007-09-03</remarks> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public void ChangeMediaPath(string path, bool move) { Debug.WriteLine("The method or operation is not implemented."); } /// <summary> /// Gets the resources. /// </summary> /// <returns></returns> /// <remarks>Documented by Dev03, 2007-09-03</remarks> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public List<string> GetResources() { IList<int> mediaIds = mediaconnector.GetMediaResources(Id); List<string> mediaUris = new List<string>(); foreach (int id in mediaIds) mediaUris.Add(MLifter.DAL.DB.DbMediaServer.DbMediaServer.Instance(parent).GetMediaURI(id).ToString()); return mediaUris; } /// <summary> /// Gets the empty resources (media content with missing data). /// </summary> /// <returns></returns> /// <remarks>Documented by Dev05, 2009-03-31</remarks> public List<int> GetEmptyResources() { return mediaconnector.GetEmptyMediaResources(id); } /// <summary> /// Creates a new instance of a card style object. /// </summary> /// <returns></returns> /// <remarks>Documented by Dev03, 2007-10-30</remarks> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public ICardStyle CreateCardStyle() { if (!this.HasPermission(PermissionTypes.CanModifyStyles)) throw new PermissionException(); return new DbCardStyle(cardstyleconnector.CreateNewCardStyle(), Parent.GetChildParentClass(this)); } /// <summary> /// Creates a new media object. /// </summary> /// <param name="type">The type.</param> /// <param name="path">The path.</param> /// <param name="isActive">if set to <c>true</c> [is active].</param> /// <param name="isDefault">if set to <c>true</c> [is default].</param> /// <param name="isExample">if set to <c>true</c> [is example].</param> /// <returns></returns> /// <remarks>Documented by Dev02, 2008-08-11</remarks> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public IMedia CreateMedia(EMedia type, string path, bool isActive, bool isDefault, bool isExample) { StatusMessageReportProgress rpu = new StatusMessageReportProgress(ReportProgressUpdate); return CreateNewMediaObject(this, rpu, type, path, isActive, isDefault, isExample); } /// <summary> /// Creates a new media object. /// </summary> /// <param name="caller">The calling object.</param> /// <param name="rpu">A delegate of the type StatusMessageReportProgress.</param> /// <param name="type">The type.</param> /// <param name="path">The path.</param> /// <param name="isActive">if set to <c>true</c> [is active].</param> /// <param name="isDefault">if set to <c>true</c> [is default].</param> /// <param name="isExample">if set to <c>true</c> [is example].</param> /// <returns></returns> /// <remarks>Documented by Dev02, 2008-08-11</remarks> internal IMedia CreateNewMediaObject(object caller, StatusMessageReportProgress rpu, EMedia type, string path, bool isActive, bool isDefault, bool isExample) { IMedia media = null; Uri uri; if (!this.HasPermission(PermissionTypes.CanModifyMedia)) throw new PermissionException(); if (path == null) throw new ArgumentNullException("Null value not allowed for media file path!"); try { if (File.Exists(Path.Combine(Environment.CurrentDirectory, path))) //to allow relative paths path = Path.Combine(Environment.CurrentDirectory, path); uri = new Uri(path); } catch (UriFormatException exception) { throw new FileNotFoundException("Uri format is invalid.", exception); } if (uri.Scheme == Uri.UriSchemeFile && uri.IsFile) //we got a new file { if (File.Exists(path)) { int newid; using (FileStream stream = File.Open(path, FileMode.Open, FileAccess.Read, FileShare.Read)) newid = mediaconnector.CreateMedia(stream, type, rpu, caller); media = DbMedia.CreateDisconnectedCardMedia(newid, type, isDefault, isExample, parent); Helper.UpdateMediaProperties(path, newid, mediaconnector); } else throw new FileNotFoundException("Media file could not be found.", path); } else if (uri.Scheme == "http" && uri.IsLoopback) //we got a http reference => file is already in db { if (DbMediaServer.DbMediaServer.Instance(parent).IsYours(uri)) { int mediaId = DbMediaServer.DbMediaServer.GetMediaID(uri.AbsolutePath); media = DbMedia.CreateDisconnectedCardMedia(mediaId, type, isDefault, isExample, parent); rpu(new StatusMessageEventArgs(StatusMessageType.CreateMediaProgress, 100, 100), caller); } else { DbMediaServer.DbMediaServer server = DbMediaServer.DbMediaServer.Instance(uri); int newid = mediaconnector.CreateMedia(GetMediaConnector(server.Parent).GetMediaStream(DbMediaServer.DbMediaServer.GetMediaID(uri.AbsolutePath)), type, rpu, caller); media = DbMedia.CreateDisconnectedCardMedia(newid, type, isDefault, isExample, parent); Helper.UpdateMediaProperties(path, newid, mediaconnector); } } return media; } /// <summary> /// Gets or sets the default settings. /// </summary> /// <value>The settings.</value> /// <remarks>Documented by Dev05, 2008-08-11</remarks> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public ISettings DefaultSettings { get { return connector.GetDefaultSettings(Id); } set { if (!(value is DbSettings)) return; connector.SetDefaultSettings(Id, ((DbSettings)value).Id); } } /// <summary> /// Gets or sets the user settings. /// </summary> /// <value>The user settings.</value> /// <remarks>Documented by Dev05, 2008-10-01</remarks> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public ISettings UserSettings { get { return connector.GetUserSettings(id); } set { connector.SetUserSettings(id, ((DbSettings)value).Id); } } /// <summary> /// Gets or sets the allowed settings. /// </summary> /// <value>The allowed settings.</value> /// <remarks>Documented by Dev05, 2008-09-22</remarks> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public ISettings AllowedSettings { get { return connector.GetAllowedSettings(id); } set { connector.SetAllowedSettings(id, ((DbSettings)value).Id); } } /// <summary> /// Creates the settings. /// </summary> /// <returns></returns> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public ISettings CreateSettings() { if (!this.HasPermission(PermissionTypes.CanModifySettings)) throw new PermissionException(); return connector.CreateSettings(); } /// <summary> /// Creates the settings object. /// </summary> /// <returns></returns> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public ISettings CreateSettingsObject() { if (!this.HasPermission(PermissionTypes.CanModifySettings)) throw new PermissionException(); return connector.CreateSettings(); } /// <summary> /// Resets the learning progress. /// </summary> /// <remarks>Documented by Dev02, 2008-09-08</remarks> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public IDictionary ResetLearningProgress() { if (parent.CurrentUser.ConnectionString.Typ != DatabaseType.MsSqlCe || parent.CurrentUser.ConnectionString.SyncType != SyncType.NotSynchronized) { Cards.ClearAllBoxes(); HighScore = 0; } parent.CurrentUser.Cache.Clear(); return Log.RestartLearningSuccess(parent); } /// <summary> /// Checks the user session. /// </summary> /// <returns></returns> /// <remarks>Documented by Dev05, 2008-11-18</remarks> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public bool CheckUserSession() { return connector.CheckUserSession(); } /// <summary> /// Preloads the card cache. /// </summary> /// <remarks>Documented by Dev09, 2009-04-28</remarks> /// <remarks>Documented by Dev09, 2009-04-28</remarks> public void PreloadCardCache() { connector.PreloadCardCache(id); } /// <summary> /// Clears the unused media. /// </summary> /// <remarks>Documented by Dev05, 2009-05-27</remarks> public void ClearUnusedMedia() { connector.ClearUnusedMedia(id); } /// <summary> /// Occurs when [backup completed]. /// </summary> /// <remarks>Documented by Dev02, 2008-09-08</remarks> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public event BackupCompletedEventHandler BackupCompleted { add { /* throw new NotSupportedException(); */ } remove { } } /// <summary> /// Gets the LearningModule extensions. /// </summary> /// <value>The extensions.</value> /// <remarks>Documented by Dev08, 2009-07-02</remarks> /// <remarks>Documented by Dev08, 2009-07-02</remarks> public IList<IExtension> Extensions { get { ObservableList<IExtension> extensions = new ObservableList<IExtension>(); IList<Guid> ExtensionGuids = connector.GetExtensions(id); foreach (Guid guid in ExtensionGuids) extensions.Add(new DbExtension(guid, parent)); extensions.ListChanged += new EventHandler<ObservableListChangedEventArgs<IExtension>>(extensions_ListChanged); return extensions; } } /// <summary> /// Handles the ListChanged event of the extensions control. /// </summary> /// <param name="sender">The source of the event.</param> /// <param name="e">The MLifter.Generics.ObservableListChangedEventArgs&lt;MLifter.DAL.Interfaces.IExtension&gt; instance containing the event data.</param> /// <remarks>Documented by Dev02, 2009-07-03</remarks> void extensions_ListChanged(object sender, ObservableListChangedEventArgs<IExtension> e) { switch (e.ListChangedType) { case ListChangedType.ItemAdded: extensionconnector.SetExtensionLM(e.Item.Id, this.Id); break; case ListChangedType.ItemDeleted: extensionconnector.DeleteExtension(e.Item.Id); break; default: break; } } /// <summary> /// Creates a new extension. /// </summary> /// <returns></returns> /// <remarks>Documented by Dev02, 2009-07-06</remarks> /// <remarks>Documented by Dev02, 2009-07-06</remarks> public IExtension ExtensionFactory() { Guid extensionGuid = extensionconnector.AddNewExtension(); extensionconnector.SetExtensionLM(extensionGuid, this.Id); return new DbExtension(extensionGuid, parent); } /// <summary> /// Creates new extensions. /// </summary> /// <param name="guid"></param> /// <returns></returns> /// <remarks>Documented by Dev02, 2009-07-06</remarks> /// <remarks>Documented by Dev02, 2009-07-06</remarks> public IExtension ExtensionFactory(Guid guid) { Guid extensionGuid = extensionconnector.AddNewExtension(guid); extensionconnector.SetExtensionLM(extensionGuid, this.Id); return new DbExtension(extensionGuid, parent); } #endregion #region IDisposable Members /// <summary> /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. /// </summary> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public void Dispose() { if (DbMediaServer.DbMediaServer.Instance(parent).IsAlive) DbMediaServer.DbMediaServer.Instance(parent).Stop(); if (Parent != null) Parent.OnDictionaryClosed(this, EventArgs.Empty); if (Parent.CurrentUser.ConnectionString.Typ == DatabaseType.MsSqlCe) MSSQLCEConn.CloseMyConnection(this.Connection); //MSSQLCEConn.CloseAllConnections(); FileCleanupQueue.DoCleanup(); } #endregion #region ICopy Members /// <summary> /// Copies to. /// </summary> /// <param name="target">The target.</param> /// <param name="progressDelegate">The progress delegate.</param> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public void CopyTo(ICopy target, CopyToProgress progressDelegate) { CopyBase.Copy(this, target, typeof(IDictionary), progressDelegate); //copy extensions foreach (IExtension extension in Extensions) { IExtension newExtension = ((IDictionary)target).ExtensionFactory(extension.Id); extension.CopyTo(newExtension, progressDelegate); } } #endregion #region IParent Members private ParentClass parent; /// <summary> /// Gets the parent. /// </summary> /// <value>The parent.</value> /// <remarks>Documented by Dev03, 2009-01-13</remarks> public ParentClass Parent { get { return parent; } } #endregion #region ISecurity Members /// <summary> /// Determines whether the object has the specified permission. /// </summary> /// <param name="permissionName">Name of the permission.</param> /// <returns> /// <c>true</c> if the object name has the specified permission; otherwise, <c>false</c>. /// </returns> public bool HasPermission(string permissionName) { return Parent.CurrentUser.HasPermission(this, permissionName); } /// <summary> /// Gets the permissions for the object. /// </summary> /// <returns> /// A list of permissions for the object. /// </returns> public List<SecurityFramework.PermissionInfo> GetPermissions() { return Parent.CurrentUser.GetPermissions(this); } #endregion } /// <summary> /// The requested operation is not possible in a database learning module. /// </summary> public class NotAllowedInDbModeException : Exception { } /// <summary> /// The requested operation is not possible on a synced learning module. /// </summary> public class NotAllowedInSyncedModeException : Exception { } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections; using System.Collections.Generic; using System.ComponentModel; using System.Drawing; using System.Globalization; using System.IO; using System.Linq; using System.Reflection; using System.Runtime.Serialization.Formatters.Binary; using Xunit; namespace System.Resources.Extensions.Tests { public class PreserializedResourceWriterTests { [Fact] public static void ExceptionforNullStream() { Assert.Throws<ArgumentNullException>("stream", () => new PreserializedResourceWriter((Stream)null)); } [Fact] public static void ExceptionforNullFile() { Assert.Throws<ArgumentNullException>("fileName", () => new PreserializedResourceWriter((string)null)); } [Fact] public static void ExceptionforReadOnlyStream() { AssertExtensions.Throws<ArgumentException>(null, () => { using (var readOnlyStream = new MemoryStream(new byte[1], false)) { new PreserializedResourceWriter(readOnlyStream); } }); } [Fact] public static void ExceptionforNullResourceId() { using (var writer = new PreserializedResourceWriter(new MemoryStream())) { Assert.Throws<ArgumentNullException>("name", () => writer.AddResource(null, "value")); Assert.Throws<ArgumentNullException>("name", () => writer.AddResource(null, new object())); Assert.Throws<ArgumentNullException>("name", () => writer.AddResource(null, new byte[0])); using (var stream = new MemoryStream()) { Assert.Throws<ArgumentNullException>("name", () => writer.AddResource(null, stream)); Assert.Throws<ArgumentNullException>("name", () => writer.AddResource(null, stream, true)); Assert.Throws<ArgumentNullException>("name", () => writer.AddActivatorResource(null, stream, "System.DayOfWeek", false)); } Assert.Throws<ArgumentNullException>("name", () => writer.AddBinaryFormattedResource(null, new byte[1], "System.DayOfWeek")); Assert.Throws<ArgumentNullException>("name", () => writer.AddTypeConverterResource(null, new byte[1], "System.DayOfWeek")); Assert.Throws<ArgumentNullException>("name", () => writer.AddResource(null, "Monday", "System.DayOfWeek")); } } [Fact] public static void ExceptionforDuplicateKey() { using (var writer = new PreserializedResourceWriter(new MemoryStream())) { writer.AddResource("duplicate", "value"); Assert.Throws<ArgumentException>(null, () => writer.AddResource("duplicate", "value")); Assert.Throws<ArgumentException>(null, () => writer.AddResource("duplicate", new object())); Assert.Throws<ArgumentException>(null, () => writer.AddResource("duplicate", new byte[0])); using (var stream = new MemoryStream()) { Assert.Throws<ArgumentException>(null, () => writer.AddResource("duplicate", stream)); Assert.Throws<ArgumentException>(null, () => writer.AddResource("duplicate", stream, true)); Assert.Throws<ArgumentException>(null, () => writer.AddActivatorResource("duplicate", stream, "System.DayOfWeek", false)); } Assert.Throws<ArgumentException>(null, () => writer.AddBinaryFormattedResource("duplicate", new byte[1], "System.DayOfWeek")); Assert.Throws<ArgumentException>(null, () => writer.AddTypeConverterResource("duplicate", new byte[1], "System.DayOfWeek")); Assert.Throws<ArgumentException>(null, () => writer.AddResource("duplicate", "Monday", "System.DayOfWeek")); Assert.Throws<ArgumentException>(null, () => writer.AddResource("Duplicate", "value")); Assert.Throws<ArgumentException>(null, () => writer.AddResource("dUplicate", new object())); Assert.Throws<ArgumentException>(null, () => writer.AddResource("duPlicate", new byte[0])); using (var stream = new MemoryStream()) { Assert.Throws<ArgumentException>(null, () => writer.AddResource("dupLicate", stream)); Assert.Throws<ArgumentException>(null, () => writer.AddResource("duplIcate", stream, true)); Assert.Throws<ArgumentException>(null, () => writer.AddActivatorResource("dupliCate", stream, "System.DayOfWeek", false)); } Assert.Throws<ArgumentException>(null, () => writer.AddBinaryFormattedResource("duplicAte", new byte[1], "System.DayOfWeek")); Assert.Throws<ArgumentException>(null, () => writer.AddTypeConverterResource("duplicaTe", new byte[1], "System.DayOfWeek")); Assert.Throws<ArgumentException>(null, () => writer.AddResource("duplicatE", "Monday", "System.DayOfWeek")); } } [Fact] public static void ExceptionForAddAfterGenerate() { using (var writer = new PreserializedResourceWriter(new MemoryStream())) { writer.AddResource("duplicate", "value"); writer.Generate(); Assert.Throws<InvalidOperationException>(() => writer.AddResource("duplicate", "value")); Assert.Throws<InvalidOperationException>(() => writer.AddResource("duplicate", new object())); Assert.Throws<InvalidOperationException>(() => writer.AddResource("duplicate", new byte[0])); using (var stream = new MemoryStream()) { Assert.Throws<InvalidOperationException>(() => writer.AddResource("duplicate", stream)); Assert.Throws<InvalidOperationException>(() => writer.AddResource("duplicate", stream, true)); Assert.Throws<InvalidOperationException>(() => writer.AddActivatorResource("duplicate", stream, "System.DayOfWeek", false)); } Assert.Throws<InvalidOperationException>(() => writer.AddBinaryFormattedResource("duplicate", new byte[1], "System.DayOfWeek")); Assert.Throws<InvalidOperationException>(() => writer.AddTypeConverterResource("duplicate", new byte[1], "System.DayOfWeek")); Assert.Throws<InvalidOperationException>(() => writer.AddResource("duplicate", "Monday", "System.DayOfWeek")); } } [Fact] public static void EmptyResources() { byte[] writerBuffer, binaryWriterBuffer; using (MemoryStream ms = new MemoryStream()) using (ResourceWriter writer = new ResourceWriter(ms)) { writer.Generate(); writerBuffer = ms.ToArray(); } using (MemoryStream ms = new MemoryStream()) using (PreserializedResourceWriter writer = new PreserializedResourceWriter(ms)) { writer.Generate(); binaryWriterBuffer = ms.ToArray(); } Assert.Equal(writerBuffer, binaryWriterBuffer); } [Fact] public static void PrimitiveResources() { IReadOnlyDictionary<string,object> values = TestData.Primitive; Action<IResourceWriter> addData = (writer) => { foreach (var pair in values) { writer.AddResource(pair.Key, pair.Value); } }; byte[] writerBuffer, binaryWriterBuffer; using (MemoryStream ms = new MemoryStream()) using (ResourceWriter writer = new ResourceWriter(ms)) { addData(writer); writer.Generate(); writerBuffer = ms.ToArray(); } using (MemoryStream ms = new MemoryStream()) using (PreserializedResourceWriter writer = new PreserializedResourceWriter(ms)) { addData(writer); writer.Generate(); binaryWriterBuffer = ms.ToArray(); } // PreserializedResourceWriter should write ResourceWriter/ResourceReader format Assert.Equal(writerBuffer, binaryWriterBuffer); using (MemoryStream ms = new MemoryStream(binaryWriterBuffer, false)) using (ResourceReader reader = new ResourceReader(ms)) { IDictionaryEnumerator dictEnum = reader.GetEnumerator(); while (dictEnum.MoveNext()) { Assert.Equal(values[(string)dictEnum.Key], dictEnum.Value); } } // DeserializingResourceReader can read ResourceReader format using (MemoryStream ms = new MemoryStream(binaryWriterBuffer, false)) using (DeserializingResourceReader reader = new DeserializingResourceReader(ms)) { IDictionaryEnumerator dictEnum = reader.GetEnumerator(); while (dictEnum.MoveNext()) { Assert.Equal(values[(string)dictEnum.Key], dictEnum.Value); } } } [Fact] public static void PrimitiveResourcesAsStrings() { IReadOnlyDictionary<string, object> values = TestData.Primitive; byte[] writerBuffer, binaryWriterBuffer; using (MemoryStream ms = new MemoryStream()) using (ResourceWriter writer = new ResourceWriter(ms)) { foreach (var pair in values) { writer.AddResource(pair.Key, pair.Value); } writer.Generate(); writerBuffer = ms.ToArray(); } using (MemoryStream ms = new MemoryStream()) using (PreserializedResourceWriter writer = new PreserializedResourceWriter(ms)) { foreach (var pair in values) { writer.AddResource(pair.Key, TestData.GetStringValue(pair.Value), TestData.GetSerializationTypeName(pair.Value.GetType())); } writer.Generate(); binaryWriterBuffer = ms.ToArray(); } // PreserializedResourceWriter should write ResourceWriter/ResourceReader format Assert.Equal(writerBuffer, binaryWriterBuffer); using (MemoryStream ms = new MemoryStream(binaryWriterBuffer, false)) using (ResourceReader reader = new ResourceReader(ms)) { IDictionaryEnumerator dictEnum = reader.GetEnumerator(); while (dictEnum.MoveNext()) { Assert.Equal(values[(string)dictEnum.Key], dictEnum.Value); } } // DeserializingResourceReader can read ResourceReader format using (MemoryStream ms = new MemoryStream(binaryWriterBuffer, false)) using (DeserializingResourceReader reader = new DeserializingResourceReader(ms)) { IDictionaryEnumerator dictEnum = reader.GetEnumerator(); while (dictEnum.MoveNext()) { Assert.Equal(values[(string)dictEnum.Key], dictEnum.Value); } } } [Fact] public static void BinaryFormattedResources() { var values = TestData.BinaryFormatted; byte[] binaryWriterBuffer; using (MemoryStream ms = new MemoryStream()) using (PreserializedResourceWriter writer = new PreserializedResourceWriter(ms)) { BinaryFormatter binaryFormatter = new BinaryFormatter(); foreach (var pair in values) { using (MemoryStream memoryStream = new MemoryStream()) { binaryFormatter.Serialize(memoryStream, pair.Value); writer.AddBinaryFormattedResource(pair.Key, memoryStream.ToArray(), TestData.GetSerializationTypeName(pair.Value.GetType())); } } writer.Generate(); binaryWriterBuffer = ms.ToArray(); } // DeserializingResourceReader can read BinaryFormatted resources with type names. using (MemoryStream ms = new MemoryStream(binaryWriterBuffer, false)) using (DeserializingResourceReader reader = new DeserializingResourceReader(ms)) { IDictionaryEnumerator dictEnum = reader.GetEnumerator(); while (dictEnum.MoveNext()) { ResourceValueEquals(values[(string)dictEnum.Key], dictEnum.Value); } } } [Fact] public static void BinaryFormattedResourcesWithoutTypeName() { var values = TestData.BinaryFormatted; byte[] binaryWriterBuffer; using (MemoryStream ms = new MemoryStream()) using (PreserializedResourceWriter writer = new PreserializedResourceWriter(ms)) { BinaryFormatter binaryFormatter = new BinaryFormatter(); foreach (var pair in values) { using (MemoryStream memoryStream = new MemoryStream()) { binaryFormatter.Serialize(memoryStream, pair.Value); writer.AddBinaryFormattedResource(pair.Key, memoryStream.ToArray()); } } writer.Generate(); binaryWriterBuffer = ms.ToArray(); } // DeserializingResourceReader can read ResourceReader format using (MemoryStream ms = new MemoryStream(binaryWriterBuffer, false)) using (DeserializingResourceReader reader = new DeserializingResourceReader(ms)) { IDictionaryEnumerator dictEnum = reader.GetEnumerator(); while (dictEnum.MoveNext()) { ResourceValueEquals(values[(string)dictEnum.Key], dictEnum.Value); } } } [Fact] public static void TypeConverterByteArrayResources() { var values = TestData.ByteArrayConverter; byte[] binaryWriterBuffer; using (MemoryStream ms = new MemoryStream()) using (PreserializedResourceWriter writer = new PreserializedResourceWriter(ms)) { foreach (var pair in values) { TypeConverter converter = TypeDescriptor.GetConverter(pair.Value.GetType()); byte[] buffer = (byte[])converter.ConvertTo(pair.Value, typeof(byte[])); writer.AddTypeConverterResource(pair.Key, buffer, TestData.GetSerializationTypeName(pair.Value.GetType())); } writer.Generate(); binaryWriterBuffer = ms.ToArray(); } using (MemoryStream ms = new MemoryStream(binaryWriterBuffer, false)) using (DeserializingResourceReader reader = new DeserializingResourceReader(ms)) { IDictionaryEnumerator dictEnum = reader.GetEnumerator(); while (dictEnum.MoveNext()) { ResourceValueEquals(values[(string)dictEnum.Key], dictEnum.Value); } } } [Fact] public static void TypeConverterStringResources() { var values = TestData.StringConverter; byte[] binaryWriterBuffer; using (MemoryStream ms = new MemoryStream()) using (PreserializedResourceWriter writer = new PreserializedResourceWriter(ms)) { foreach (var pair in values) { writer.AddResource(pair.Key, TestData.GetStringValue(pair.Value), TestData.GetSerializationTypeName(pair.Value.GetType())); } writer.Generate(); binaryWriterBuffer = ms.ToArray(); } using (MemoryStream ms = new MemoryStream(binaryWriterBuffer, false)) using (DeserializingResourceReader reader = new DeserializingResourceReader(ms)) { IDictionaryEnumerator dictEnum = reader.GetEnumerator(); while (dictEnum.MoveNext()) { ResourceValueEquals(values[(string)dictEnum.Key], dictEnum.Value); } } } [Fact] public static void StreamResources() { var values = TestData.Activator; byte[] binaryWriterBuffer; using (MemoryStream ms = new MemoryStream()) using (PreserializedResourceWriter writer = new PreserializedResourceWriter(ms)) { foreach (var pair in values) { pair.Value.stream.Seek(0, SeekOrigin.Begin); writer.AddActivatorResource(pair.Key, pair.Value.stream, TestData.GetSerializationTypeName(pair.Value.type), false); } writer.Generate(); binaryWriterBuffer = ms.ToArray(); } using (MemoryStream ms = new MemoryStream(binaryWriterBuffer, false)) using (DeserializingResourceReader reader = new DeserializingResourceReader(ms)) { IDictionaryEnumerator dictEnum = reader.GetEnumerator(); while (dictEnum.MoveNext()) { var expectedTuple = values[(string)dictEnum.Key]; expectedTuple.stream.Seek(0, SeekOrigin.Begin); object expected = Activator.CreateInstance(expectedTuple.type, new object[] { expectedTuple.stream }); ResourceValueEquals(expected, dictEnum.Value); } } } [Fact] public static void CanReadViaResourceManager() { ResourceManager resourceManager = new ResourceManager(typeof(TestData)); IEnumerable<KeyValuePair<string, object>> objectPairs = TestData.Primitive .Concat(TestData.PrimitiveAsString) .Concat(TestData.BinaryFormattedWithoutDrawing) .Concat(TestData.BinaryFormattedWithoutDrawingNoType) .Concat(TestData.ByteArrayConverterWithoutDrawing) .Concat(TestData.StringConverterWithoutDrawing); foreach(KeyValuePair<string, object> pair in objectPairs) { var actualValue = resourceManager.GetObject(pair.Key); Assert.Equal(pair.Value, actualValue); } foreach(KeyValuePair<string, (Type type, Stream stream)> pair in TestData.ActivatorWithoutDrawing) { pair.Value.stream.Seek(0, SeekOrigin.Begin); var expectedValue = Activator.CreateInstance(pair.Value.type, pair.Value.stream); var actualValue = resourceManager.GetObject(pair.Key); Assert.Equal(expectedValue, actualValue); } } [Fact] public static void ResourceManagerLoadsCorrectReader() { ResourceManager resourceManager = new ResourceManager(typeof(TestData)); ResourceSet resSet = resourceManager.GetResourceSet(CultureInfo.InvariantCulture, true, true); IResourceReader reader = (IResourceReader)resSet.GetType().GetProperty("Reader", BindingFlags.NonPublic | BindingFlags.Instance)?.GetValue(resSet); Assert.IsType<DeserializingResourceReader>(reader); } [Fact] public static void EmbeddedResourcesAreUpToDate() { // this is meant to catch a case where our embedded test resources are out of date with respect to the current writer. // that could be intentional, or accidental. Regardless we want to know. using (Stream resourcesStream = typeof(TestData).Assembly.GetManifestResourceStream("System.Resources.Extensions.Tests.TestData.resources")) using (MemoryStream actualData = new MemoryStream(), expectedData = new MemoryStream()) { TestData.WriteResourcesStream(actualData); resourcesStream.CopyTo(expectedData); if (!PlatformDetection.IsFullFramework) { // Some types rely on SerializationInfo.SetType on .NETCore // which result in a different binary format Assert.Equal(expectedData.ToArray(), actualData.ToArray()); } } } private static void ResourceValueEquals(object expected, object actual) { if (actual is Bitmap bitmap) { BitmapEquals((Bitmap)expected, bitmap); } else if (actual is Icon icon) { BitmapEquals(((Icon)expected).ToBitmap(), icon.ToBitmap()); } else if (actual is Font font) { Font expectedFont = (Font)expected; Assert.Equal(expectedFont.FontFamily, font.FontFamily); Assert.Equal(expectedFont.Size, font.Size); Assert.Equal(expectedFont.Style, font.Style); Assert.Equal(expectedFont.Unit, font.Unit); } else { Assert.Equal(expected, actual); } } private static void BitmapEquals(Bitmap left, Bitmap right) { Assert.Equal(left.Size, right.Size); for (int x = 0; x < left.Width; ++x) { for (int y = 0; y < left.Height; ++y) { Assert.Equal(left.GetPixel(x, y), right.GetPixel(x, y)); } } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using OLEDB.Test.ModuleCore; using System.IO; using XmlCoreTest.Common; namespace System.Xml.Tests { [InheritRequired()] public abstract partial class TCLinePos : TCXMLReaderBaseGeneral { public const String ST_ELEMENT = "ELEMENT"; public const String ST_SKIP = "SKIP"; public const String ST_ENTITYREF = "ENTITYREF"; public const String ST_A0 = "a0"; public const String ST_A1 = "a1"; public const String ST_A2 = "a2"; public const String ST_BASE64 = "BASE64"; public const String ST_BINHEX = "BINHEX"; public const String ST_CHARENTITY = "CHARENTITY"; public const String ST_BOOLXSD = "BOOLXSD"; public const String ST_DATE = "DATE"; public const String ST_DATETIME = "DATETIME"; public const String ST_INT = "INT"; public const String ST_TIME = "TIME"; public const String ST_TIMESPAN = "TIMESPAN"; public const String ST_DECIMAL2 = "DECIMAL"; private void CheckPos(int line, int pos) { if (!IsCustomReader()) { CError.WriteLine("(" + DataReader.Name + "," + DataReader.Value + ")"); CError.Compare(DataReader.Settings.LineNumberOffset, line, "LineNumber"); CError.Compare(DataReader.Settings.LinePositionOffset, pos, "LinePos"); } } public void PositionOnNodeType2(XmlNodeType nodeType) { while (DataReader.Read() && DataReader.NodeType != nodeType) { } if (DataReader.EOF) { throw new CTestFailedException("Couldn't find XmlNodeType " + nodeType); } } [Variation("LineNumber/LinePos after Read and NodeType = Element", Priority = 0)] public int TestLinePos1() { ReloadSource(TestData + "Common/LineNumber.xml"); DataReader.PositionOnElement(ST_ELEMENT); CheckPos(0, 0); return TEST_PASS; } [Variation("LineNumber/LinePos after Read and NodeType = CDATA", Priority = 0)] public int TestLinePos2() { ReloadSource(TestData + "Common/LineNumber.xml"); PositionOnNodeType2(XmlNodeType.CDATA); CheckPos(0, 0); return TEST_PASS; } [Variation("LineNumber/LinePos after Read and NodeType = Comment", Priority = 0)] public int TestLinePos4() { ReloadSource(TestData + "Common/LineNumber.xml"); PositionOnNodeType2(XmlNodeType.Comment); CheckPos(0, 0); return TEST_PASS; } [Variation("LineNumber/LinePos after Read and NodeType = EndElement", Priority = 0)] public int TestLinePos6() { ReloadSource(TestData + "Common/LineNumber.xml"); PositionOnNodeType2(XmlNodeType.EndElement); CheckPos(0, 0); return TEST_PASS; } [Variation("LineNumber/LinePos after Read and NodeType = EntityReference, not expanded", Priority = 0)] public int TestLinePos7() { CError.Skip("Skipped"); ReloadSource(TestData + "Common/LineNumber.xml"); PositionOnNodeType2(XmlNodeType.EntityReference); CheckPos(11, 14); DataReader.Read(); CheckPos(11, 17); DataReader.Read(); CheckPos(11, 19); DataReader.Read(); CheckPos(11, 24); DataReader.Read(); CError.Compare(DataReader.NodeType, XmlNodeType.EndElement, "ee"); CheckPos(11, 27); return TEST_PASS; } [Variation("LineNumber/LinePos after Read and NodeType = ProcessingInstruction", Priority = 0)] public int TestLinePos9() { ReloadSource(TestData + "Common/LineNumber.xml"); PositionOnNodeType2(XmlNodeType.ProcessingInstruction); CheckPos(0, 0); PositionOnNodeType2(XmlNodeType.ProcessingInstruction); CheckPos(0, 0); return TEST_PASS; } [Variation("LineNumber/LinePos after Read and NodeType = SignificantWhitespace", Priority = 0)] public int TestLinePos10() { ReloadSource(TestData + "Common/LineNumber.xml"); PositionOnNodeType2(XmlNodeType.SignificantWhitespace); CheckPos(0, 0); return TEST_PASS; } [Variation("LineNumber/LinePos after Read and NodeType = Text", Priority = 0)] public int TestLinePos11() { ReloadSource(TestData + "Common/LineNumber.xml"); PositionOnNodeType2(XmlNodeType.Text); CheckPos(0, 0); return TEST_PASS; } [Variation("LineNumber/LinePos after Read and NodeType = Whitespace", Priority = 0)] public int TestLinePos12() { ReloadSource(TestData + "Common/LineNumber.xml"); PositionOnNodeType2(XmlNodeType.Whitespace); CheckPos(0, 0); PositionOnNodeType2(XmlNodeType.Whitespace); CheckPos(0, 0); PositionOnNodeType2(XmlNodeType.Whitespace); CheckPos(0, 0); return TEST_PASS; } [Variation("LineNumber/LinePos after Read and NodeType = XmlDeclaration", Priority = 0)] public int TestLinePos13() { if (IsSubtreeReader()) CError.Skip("Skipped"); ReloadSource(TestData + "Common/LineNumber.xml"); PositionOnNodeType2(XmlNodeType.XmlDeclaration); CheckPos(0, 0); return TEST_PASS; } [Variation("LineNumber/LinePos after MoveToElement")] public int TestLinePos14() { ReloadSource(TestData + "Common/LineNumber.xml"); DataReader.PositionOnElement(ST_ELEMENT); CheckPos(0, 0); DataReader.MoveToAttribute(1); DataReader.MoveToElement(); CheckPos(0, 0); return TEST_PASS; } [Variation("LineNumber/LinePos after MoveToFirstAttribute/MoveToNextAttribute")] public int TestLinePos15() { ReloadSource(TestData + "Common/LineNumber.xml"); DataReader.PositionOnElement(ST_ELEMENT); CheckPos(0, 0); DataReader.MoveToFirstAttribute(); CheckPos(0, 0); DataReader.MoveToNextAttribute(); CheckPos(0, 0); DataReader.MoveToNextAttribute(); CheckPos(0, 0); return TEST_PASS; } [Variation("LineNumber/LinePos after MoveToAttribute")] public int TestLinePos16() { ReloadSource(TestData + "Common/LineNumber.xml"); DataReader.PositionOnElement(ST_ELEMENT); CheckPos(0, 0); DataReader.MoveToAttribute(1); CheckPos(0, 0); DataReader.MoveToAttribute(0); CheckPos(0, 0); DataReader.MoveToAttribute(2); CheckPos(0, 0); DataReader.MoveToAttribute(ST_A0); CheckPos(0, 0); DataReader.MoveToAttribute(ST_A2); CheckPos(0, 0); DataReader.MoveToAttribute(ST_A1); CheckPos(0, 0); return TEST_PASS; } [Variation("LineNumber/LinePos after Skip")] public int TestLinePos18() { ReloadSource(TestData + "Common/LineNumber.xml"); DataReader.PositionOnElement(ST_ELEMENT); DataReader.MoveToFirstAttribute(); DataReader.Skip(); CheckPos(0, 0); DataReader.PositionOnElement(ST_SKIP); DataReader.Skip(); CheckPos(0, 0); return TEST_PASS; } [Variation("LineNumber/LinePos after ReadInnerXml")] public int TestLinePos19() { ReloadSource(TestData + "Common/LineNumber.xml"); DataReader.PositionOnElement(ST_ELEMENT); DataReader.MoveToFirstAttribute(); DataReader.ReadInnerXml(); CheckPos(0, 0); DataReader.PositionOnElement(ST_SKIP); DataReader.ReadInnerXml(); CheckPos(0, 0); return TEST_PASS; } [Variation("LineNumber/LinePos after MoveToContent")] public int TestLinePos20() { if (IsSubtreeReader()) CError.Skip("Skipped"); ReloadSource(TestData + "Common/LineNumber.xml"); PositionOnNodeType2(XmlNodeType.XmlDeclaration); DataReader.MoveToContent(); CheckPos(0, 0); PositionOnNodeType2(XmlNodeType.Comment); DataReader.MoveToContent(); CheckPos(0, 0); PositionOnNodeType2(XmlNodeType.ProcessingInstruction); DataReader.MoveToContent(); CheckPos(0, 0); return TEST_PASS; } [Variation("LineNumber/LinePos after ReadBase64 succesive calls")] public int TestLinePos21() { if (IsCustomReader()) CError.Skip("Skipped"); ReloadSource(TestData + "Common/LineNumber.xml"); DataReader.PositionOnElement(ST_BASE64); byte[] arr = new byte[3]; DataReader.ReadElementContentAsBase64(arr, 0, 3); CheckPos(0, 0); DataReader.ReadElementContentAsBase64(arr, 0, 3); CheckPos(0, 0); DataReader.ReadElementContentAsBase64(arr, 0, 1); CheckPos(0, 0); return TEST_PASS; } [Variation("LineNumber/LinePos after ReadBinHex succesive calls")] public int TestLinePos22() { if (IsCustomReader()) CError.Skip("Skipped"); ReloadSource(TestData + "Common/LineNumber.xml"); DataReader.PositionOnElement(ST_BINHEX); byte[] arr = new byte[1]; DataReader.ReadElementContentAsBinHex(arr, 0, 1); CheckPos(0, 0); DataReader.ReadElementContentAsBinHex(arr, 0, 1); CheckPos(0, 0); DataReader.ReadElementContentAsBinHex(arr, 0, 1); CheckPos(0, 0); return TEST_PASS; } [Variation("LineNumber/LinePos after ReadEndElement")] public int TestLinePos26() { ReloadSource(TestData + "Common/LineNumber.xml"); DataReader.PositionOnElement(ST_CHARENTITY); DataReader.Read(); // Text DataReader.Read(); // EndElement CheckPos(0, 0); DataReader.ReadEndElement(); CheckPos(0, 0); return TEST_PASS; } [Variation("LineNumber/LinePos after ReadString")] public int TestLinePos27() { ReloadSource(TestData + "Common/LineNumber.xml"); DataReader.PositionOnElement(ST_ENTITYREF); DataReader.Read(); CheckPos(0, 0); DataReader.Read(); CheckPos(0, 0); DataReader.Read(); CheckPos(0, 0); DataReader.Read(); CheckPos(0, 0); DataReader.Read(); CheckPos(0, 0); return TEST_PASS; } [Variation("LineNumber/LinePos after element containing entities in attribute values")] public int TestLinePos39() { ReloadSource(TestData + "Common/LineNumber.xml"); DataReader.PositionOnElement("EMBEDDED"); CheckPos(0, 0); return TEST_PASS; } [Variation("LineNumber/LinePos when Read = false")] public int TestLinePos40() { ReloadSource(TestData + @"Common/LineNumber.xml"); while (DataReader.Read()) ; CheckPos(0, 0); return TEST_PASS; } [Variation("XmlTextReader:LineNumber and LinePos don't return the right position after ReadInnerXml is called")] public int TestLinePos41() { String strXml = "<ROOT><CHARS2>\nxxx<MARKUP/>yyy\n</CHARS2></ROOT>"; ReloadSourceStr(strXml); if (!IsCustomReader()) { DataReader.Read(); DataReader.Read(); CError.Equals(DataReader.LineNumber, 1, "ln1"); CError.Equals(DataReader.LinePosition, 8, "lp1"); DataReader.ReadInnerXml(); CError.Equals(DataReader.LineNumber, 3, "ln2"); CError.Equals(DataReader.LinePosition, 12, "lp2"); } return TEST_PASS; } [Variation("XmlTextReader: LineNum and LinePosition incorrect for EndTag token and text element")] public int TestLinePos42() { String strXml = "<foo>\n fooooooo\n</foo>"; ReloadSourceStr(strXml); if (!IsCustomReader()) { DataReader.Read(); CError.Equals(DataReader.LineNumber, 1, null); CError.Equals(DataReader.LinePosition, 2, null); DataReader.Read(); CError.Equals(DataReader.LineNumber, 1, null); CError.Equals(DataReader.LinePosition, 6, null); DataReader.Read(); CError.Equals(DataReader.LineNumber, 3, null); CError.Equals(DataReader.LinePosition, 3, null); } return TEST_PASS; } [Variation("Bogus LineNumber value when reading attribute over XmlTextReader")] public int TestLinePos43() { string strXml = "<foo\n attr1='bar'\n attr2='foo'\n/>"; ReloadSourceStr(strXml); if (!IsCustomReader()) { DataReader.Read(); CError.Equals(DataReader.LineNumber, 1, null); CError.Equals(DataReader.LinePosition, 2, null); DataReader.MoveToFirstAttribute(); CError.Equals(DataReader.LineNumber, 2, null); CError.Equals(DataReader.LinePosition, 5, null); DataReader.MoveToNextAttribute(); CError.Equals(DataReader.LineNumber, 3, null); CError.Equals(DataReader.LinePosition, 5, null); DataReader.Read(); CError.Equals(DataReader.LineNumber, IsSubtreeReader() ? 0 : 4, null); CError.Equals(DataReader.LinePosition, IsSubtreeReader() ? 0 : 3, null); } return TEST_PASS; } [Variation("LineNumber and LinePosition on attribute with columns")] public int TestLinePos44() { string strxml = "<PRODUCT xmlns:a='abc' xmlns:b='abc'/>"; ReloadSourceStr(strxml); CError.Compare(DataReader.Read(), true, "Read"); CError.Compare(DataReader.MoveToNextAttribute(), true, "MoveToNextAttribute"); CError.Compare(DataReader.Value, "abc", "MoveToNextAttribute"); CError.Compare(DataReader.VerifyNode(XmlNodeType.Attribute, "xmlns:a", "abc"), "xmlns:a"); CheckPos(0, 0); return TEST_PASS; } [Variation("HasLineInfo")] public int TestLinePos45() { XmlReader DataReader = ReaderHelper.Create(new StringReader("<root></root>")); DataReader.Read(); CError.Compare((DataReader as IXmlLineInfo).HasLineInfo(), "DataReader HasLineInfo"); XmlReaderSettings rs = new XmlReaderSettings(); XmlReader vr = ReaderHelper.Create(DataReader, rs); vr.Read(); CError.Compare((vr as IXmlLineInfo).HasLineInfo(), "DataReader HasLineInfo"); return TEST_PASS; } [Variation("XmlException LineNumber and LinePosition")] public int TestLinePos99() { string strxml = "<portfolio>\n <stock exchange=nasdaq/>\n</portfolio>"; ReloadSourceStr(strxml); try { while (DataReader.Read()) ; } catch (XmlException e) { CError.Compare(e.LineNumber, 2, "ln"); CError.Compare(e.LinePosition, 18, "lp"); return TEST_PASS; } return TEST_FAIL; } [Variation("Check error message on a non-wellformed XML")] public int ReadingNonWellFormedXmlThrows() { string filename = TestData + "Common/Bug86503.txt"; try { ReloadSource(filename); DataReader.Read(); } catch (XmlException) { return TEST_PASS; } return TEST_FAIL; } [Variation("When an XmlException is thrown both XmlException.LineNumber and XmlTextReader.LineNumber should be same")] public int XmlExceptionAndXmlTextReaderLineNumberShouldBeSameAfterExceptionIsThrown() { string filename = TestData + "Common/invalid-ucs4.xml"; if (!IsCustomReader()) { try { ReloadSource(filename); while (DataReader.Read()) ; return TEST_FAIL; } catch (XmlException e) { CError.WriteLine(e.Message); CError.WriteLine("Reader Line : {0}, Exception Line {1}", DataReader.LineNumber, e.LinePosition); CError.Equals(DataReader.LineNumber, IsSubtreeReader() ? 0 : e.LineNumber, "Reader line number and Exception line number must be same"); CError.WriteLine("Reader Position : {0}, Exception Position {1}", DataReader.LinePosition, e.LinePosition); CError.Equals(DataReader.LinePosition, IsSubtreeReader() ? 0 : e.LinePosition, "Reader line position and Exception line position must be same"); return TEST_PASS; } } return TEST_PASS; } [Variation("Xml(Text)Reader does not increase line number for a new line in element end tag")] public int XmlReaderShouldIncreaseLineNumberAfterNewLineInElementTag() { string fileName = Path.Combine(TestData, "Common", "Bug411697.xml"); if (!IsCustomReader()) { ReloadSource(fileName); int lastLineNumber = 0; while (DataReader.Read()) { lastLineNumber = DataReader.LineNumber; } if (lastLineNumber != 2 && !IsSubtreeReader()) CError.Compare(false, "Failed"); } return TEST_PASS; } [Variation("LineNumber and LinePosition are not correct")] public int LineNumberAndLinePositionAreCorrect() { XmlReaderSettings rs = new XmlReaderSettings(); Stream fs = FilePathUtil.getStream(TestData + "Common\\Bug297091.xsl"); { XmlReader DataReader = ReaderHelper.Create(fs, rs, TestData + "Common\\Bug297091.xsl"); DataReader.Read(); if (DataReader.NodeType != XmlNodeType.Element || ((IXmlLineInfo)DataReader).LineNumber != 1 || ((IXmlLineInfo)DataReader).LinePosition != 2) CError.Compare(false, "Failed"); DataReader.Read(); if (DataReader.NodeType != XmlNodeType.Whitespace || ((IXmlLineInfo)DataReader).LineNumber != 4 || ((IXmlLineInfo)DataReader).LinePosition != 2) CError.Compare(false, "Failed"); DataReader.Read(); if (DataReader.NodeType != XmlNodeType.Element || ((IXmlLineInfo)DataReader).LineNumber != 5 || ((IXmlLineInfo)DataReader).LinePosition != 3) CError.Compare(false, "Failed"); DataReader.Read(); if (DataReader.NodeType != XmlNodeType.Whitespace || ((IXmlLineInfo)DataReader).LineNumber != 5 || ((IXmlLineInfo)DataReader).LinePosition != 28) CError.Compare(false, "Failed"); DataReader.Read(); if (DataReader.NodeType != XmlNodeType.EndElement || ((IXmlLineInfo)DataReader).LineNumber != 6 || ((IXmlLineInfo)DataReader).LinePosition != 3) CError.Compare(false, "Failed"); } return TEST_PASS; } } }
/* Main class for Isosurface project * Most the code written by Lin * Other pieces of code borrowed from existing implementations * https://github.com/aewallin/dualcontouring * https://code.google.com/p/simplexnoise/ * http://www.volume-gfx.com/ * All of this code is meant for experimenting purposes only! * Do not use any of it as a guide for how every algorithm works specifically * NONE of the algorithms are implemented to completion, or to the exact specification in the original papers * For example, the QEF solvers for Dual Contouring use a brute-force method of calculating the best point * In the 3D DC implementations, QEF solving is disabled altogether * The Dual Marching Squares implementation substitutes an error-reducing function with a separate, faster one * Some implementations might exhibit bugs, like improper connectivity in the 2D DC implementations * These should all be fixed in time though * The goal of this code is to provide the simplest, basic implementations of each algorithm for people looking to get better than Marching Cubes results * All of the implemented algorithms have their own namespace in their own folder, which means they don't depend on anything else * With the exception of the QEF solvers and Sampler class, and of course the abstract class ISurfaceAlgorithm * You can find all of the papers by using Google * Good luck! * https://github.com/Lin20/isosurface */ using System; using System.Collections.Generic; using System.Linq; using Microsoft.Xna.Framework; using Microsoft.Xna.Framework.Audio; using Microsoft.Xna.Framework.Content; using Microsoft.Xna.Framework.GamerServices; using Microsoft.Xna.Framework.Graphics; using Microsoft.Xna.Framework.Input; using Microsoft.Xna.Framework.Media; using System.Reflection; namespace Isosurface { public enum WireframeModes { Fill = 1, Wireframe = 2 } public struct RawModel { public string Filename; public int Width; public int Height; public int Length; public float IsoLevel; public bool Flip; public int Bytes; public bool Mrc; public RawModel(string filename, int width, int height, int length, float isolevel, bool flip = true, int bytes = 1) { Filename = filename; Width = width; Height = height; Length = length; IsoLevel = isolevel; Flip = flip; Bytes = bytes; Mrc = filename.EndsWith(".mrc"); } } public class Game1 : Microsoft.Xna.Framework.Game { GraphicsDeviceManager graphics; SpriteBatch spriteBatch; Effect dn_effect; Effect reg_effect; Effect wire_effect; KeyboardState last_state; public int QualityIndex { get; set; } public int AlgorithmIndex { get; set; } public int ModelIndex { get; set; } public float[] Qualities = { 0.0f, 0.001f, 0.01f, 0.05f, 0.1f, 0.2f, 0.4f, 0.5f, 0.8f, 1.0f, 1.5f, 2.0f, 5.0f, 10.0f, 25.0f, 50.0f, 100.0f, 250.0f, 500.0f, 1000.0f, 2500.0f, 5000.0f, 10000.0f, 25000.0f, 50000.0f, 100000.0f }; public RawModel[] Models = { new RawModel("BostonTeapot", 178, 256, 256, 0.1f), new RawModel("engine", 128, 256, 256, 0.2f), new RawModel("bonsai", 256, 256, 256, 0.15f, false), new RawModel("lobster", 56, 324, 301, 0.18f), new RawModel("horse.mrc", 256,256,256, 1, false), new RawModel("dragon.mrc", 256,256,256, 1, false), new RawModel("dragon2.mrc", 256,256,256, 1, false), new RawModel("star.mrc", 256,256,256, 1, false), new RawModel("table.mrc", 256,256,256, 1, false), new RawModel("piano.mrc", 256,256,256, 1, false), new RawModel("statue.mrc", 256,256,256, 1, false) }; /* Add new algorithms here to see them by pressing Tab */ public Type[] AlgorithmTypes = { typeof(ManifoldDC.MDC3D) /*,typeof(DMCNeilson.DMCN)*//*, typeof(DualMarchingSquaresNeilson.DMSNeilson), typeof(DualMarchingSquares.DMS), typeof(UniformDualContouring2D.DC), typeof(AdaptiveDualContouring2D.ADC), typeof(UniformDualContouring.DC3D)*/, typeof(AdaptiveDualContouring.ADC3D) }; public ISurfaceAlgorithm SelectedAlgorithm { get; set; } private Camera Camera { get; set; } public const int TileSize = 14; public const int Resolution = 64; public DrawModes DrawMode { get; set; } public RasterizerState RState { get; set; } public WireframeModes WireframeMode { get; set; } public DeferredShader DeferredRenderer { get; set; } public Game1() { graphics = new GraphicsDeviceManager(this); Content.RootDirectory = "Content"; } protected override void Initialize() { AdvancingFrontVIS2006.AdvancingFrontVIS2006.GetIdealEdgeLength(0, (Resolution / 2 - 2), 0); //DualMarchingSquaresNeilson.MarchingSquaresTableGenerator.PrintCaseTable(); ModelIndex = -1; if (ModelIndex > -1) Sampler.ReadData(Models[ModelIndex], Resolution); float n = SimplexNoise.Noise(0, 0); RState = new RasterizerState(); RState.CullMode = (Sampler.ImageData != null ? CullMode.CullCounterClockwiseFace : CullMode.CullClockwiseFace); GraphicsDevice.RasterizerState = RState; graphics.PreferredBackBufferWidth = 1600; graphics.PreferredBackBufferHeight = 900; graphics.PreferMultiSampling = true; graphics.ApplyChanges(); IsMouseVisible = true; //effect = new BasicEffect(GraphicsDevice); reg_effect = Content.Load<Effect>("ShaderRegular"); reg_effect.Parameters["ColorEnabled"].SetValue(true); dn_effect = Content.Load<Effect>("ShaderDN"); dn_effect.Parameters["ColorEnabled"].SetValue(true); wire_effect = Content.Load<Effect>("WireShader"); QualityIndex = 0; NextAlgorithm(); //effect.VertexColorEnabled = true; Camera = new Camera(GraphicsDevice, new Vector3(-Resolution, Resolution, -Resolution), 1f); Camera.Projection = Matrix.CreatePerspectiveFieldOfView(MathHelper.ToRadians(45), (float)graphics.PreferredBackBufferWidth / (float)graphics.PreferredBackBufferHeight, 0.1f, 1000.0f); if (SelectedAlgorithm.Is3D) { Camera.Update(true); //effect.View = Camera.View; reg_effect.Parameters["View"].SetValue(Camera.View); reg_effect.Parameters["Projection"].SetValue(Camera.Projection); dn_effect.Parameters["View"].SetValue(Camera.View); dn_effect.Parameters["Projection"].SetValue(Camera.Projection); } last_state = Keyboard.GetState(); DrawMode = Isosurface.DrawModes.Mesh; WireframeMode = WireframeModes.Fill; base.Initialize(); } protected override void LoadContent() { // Create a new SpriteBatch, which can be used to draw textures. spriteBatch = new SpriteBatch(GraphicsDevice); DeferredRenderer = new DeferredShader(GraphicsDevice, Content, spriteBatch); } public void NextAlgorithm() { SetAlgorithm(AlgorithmTypes[AlgorithmIndex]); AlgorithmIndex = (AlgorithmIndex + 1) % AlgorithmTypes.Length; } public void SetAlgorithm(Type t) { SelectedAlgorithm = (ISurfaceAlgorithm)Activator.CreateInstance(t, GraphicsDevice, Resolution, TileSize); UpdateQuality(); if (SelectedAlgorithm.Is3D) { /*effect.View = Matrix.CreateLookAt(new Vector3(-1, 1, 1) * (float)Resolution, Vector3.Zero, Vector3.Up); effect.Projection = Matrix.CreatePerspectiveFieldOfView(MathHelper.ToRadians(45), (float)graphics.PreferredBackBufferWidth / (float)graphics.PreferredBackBufferHeight, 1.0f, 1000.0f); effect.EnableDefaultLighting();*/ Effect e = (SelectedAlgorithm.SpecialShader ? dn_effect : reg_effect); e.Parameters["View"].SetValue(Matrix.CreateLookAt(new Vector3(-1, 1, 1) * (float)Resolution, Vector3.Zero, Vector3.Up)); if (Camera != null) e.Parameters["Projection"].SetValue(Camera.Projection); } else { /*effect.Projection = Matrix.CreateOrthographicOffCenter(0, GraphicsDevice.Viewport.Width, GraphicsDevice.Viewport.Height, 0, 0, 1); effect.View = Matrix.Identity;*/ } } private void UpdateQuality() { long time = SelectedAlgorithm.Contour(Qualities[QualityIndex]); System.Text.StringBuilder text = new System.Text.StringBuilder(); text.Append(SelectedAlgorithm.Name).Append(" - "); string topology_type = (SelectedAlgorithm.Is3D ? "Triangles" : "Lines"); if (SelectedAlgorithm.IsIndexed) text.Append((SelectedAlgorithm.IndexCount / (SelectedAlgorithm.Is3D ? 3 : 2)) + " " + topology_type + ", " + SelectedAlgorithm.VertexCount + " Vertices"); else text.Append((SelectedAlgorithm.VertexCount / (SelectedAlgorithm.Is3D ? 3 : 2)) + " " + topology_type); if (SelectedAlgorithm.ExtraInformation != "") text.Append(", " + SelectedAlgorithm.ExtraInformation); text.Append(" (" + time + " ms)"); text.Append(" - Quality " + Qualities[QualityIndex]); Window.Title = text.ToString(); } protected override void UnloadContent() { } protected override void Update(GameTime gameTime) { // Allows the game to exit if (Keyboard.GetState().IsKeyDown(Keys.Escape)) this.Exit(); if (!last_state.IsKeyDown(Keys.Space) && Keyboard.GetState().IsKeyDown(Keys.Space)) { QualityIndex = (QualityIndex + 1) % Qualities.Length; UpdateQuality(); } if (!last_state.IsKeyDown(Keys.Tab) && Keyboard.GetState().IsKeyDown(Keys.Tab)) { NextAlgorithm(); } if (!last_state.IsKeyDown(Keys.F) && Keyboard.GetState().IsKeyDown(Keys.F)) { SelectedAlgorithm = (ISurfaceAlgorithm)Activator.CreateInstance(SelectedAlgorithm.GetType(), GraphicsDevice, Resolution, TileSize); ModelIndex = (ModelIndex + 1) % Models.Length; Sampler.ReadData(Models[ModelIndex], Resolution); UpdateQuality(); } if (!last_state.IsKeyDown(Keys.D1) && Keyboard.GetState().IsKeyDown(Keys.D1)) { if (DrawMode != DrawModes.Mesh) DrawMode ^= DrawModes.Mesh; } if (!last_state.IsKeyDown(Keys.D2) && Keyboard.GetState().IsKeyDown(Keys.D2)) { if (DrawMode != DrawModes.Outline) DrawMode ^= DrawModes.Outline; } if (!last_state.IsKeyDown(Keys.D3) && Keyboard.GetState().IsKeyDown(Keys.D3)) { if (WireframeMode == WireframeModes.Fill) WireframeMode = WireframeModes.Fill | WireframeModes.Wireframe; else if (WireframeMode == (WireframeModes.Fill | WireframeModes.Wireframe)) WireframeMode = WireframeModes.Wireframe; else WireframeMode = WireframeModes.Fill; /*if (WireframeMode != (WireframeModes.Fill | WireframeModes.Wireframe)) { RState = new RasterizerState(); RState.CullMode = CullMode.None; RState.FillMode = (WireframeMode == WireframeModes.Fill ? FillMode.Solid : FillMode.WireFrame); GraphicsDevice.RasterizerState = RState; }*/ } if (!last_state.IsKeyDown(Keys.C) && Keyboard.GetState().IsKeyDown(Keys.C)) { Camera.MouseLocked = !Camera.MouseLocked; } if (!last_state.IsKeyDown(Keys.M) && Keyboard.GetState().IsKeyDown(Keys.M)) { if (SelectedAlgorithm.GetType() == typeof(ManifoldDC.MDC3D)) { ((ManifoldDC.MDC3D)SelectedAlgorithm).EnforceManifold = !((ManifoldDC.MDC3D)SelectedAlgorithm).EnforceManifold; UpdateQuality(); } } if (SelectedAlgorithm.Is3D) { Camera.Update(true); //effect.View = Camera.View; (SelectedAlgorithm.SpecialShader ? dn_effect : reg_effect).Parameters["View"].SetValue(Camera.View); } last_state = Keyboard.GetState(); base.Update(gameTime); } protected override void Draw(GameTime gameTime) { if (SelectedAlgorithm.SupportsDeferred) { DeferredRenderer.Draw(SelectedAlgorithm, Camera); return; } if (SelectedAlgorithm.Is3D) GraphicsDevice.Clear(Color.DimGray); else GraphicsDevice.Clear(Color.WhiteSmoke); Effect e = (SelectedAlgorithm.SpecialShader ? dn_effect : reg_effect); if (SelectedAlgorithm.Is3D) e.Parameters["World"].SetValue(Matrix.CreateTranslation(new Vector3(-Resolution / 2, -Resolution / 2, -Resolution / 2))); else e.Parameters["World"].SetValue(Matrix.Identity); if (SelectedAlgorithm.Is3D && (int)(WireframeMode & WireframeModes.Fill) != 0) { RasterizerState rs = new RasterizerState(); rs.CullMode = (Sampler.ImageData != null ? CullMode.CullCounterClockwiseFace : CullMode.CullClockwiseFace); rs.FillMode = FillMode.Solid; rs.DepthBias = 0; GraphicsDevice.RasterizerState = rs; } if (!SelectedAlgorithm.Is3D || WireframeMode != WireframeModes.Wireframe) SelectedAlgorithm.Draw(e, false, DrawMode); if (SelectedAlgorithm.Is3D && (int)(WireframeMode & WireframeModes.Wireframe) != 0 && !SelectedAlgorithm.SupportsDeferred) { if (!SelectedAlgorithm.CustomWireframe) { RasterizerState rs = new RasterizerState(); rs.CullMode = (Sampler.ImageData != null ? CullMode.CullCounterClockwiseFace : CullMode.CullClockwiseFace); rs.FillMode = FillMode.WireFrame; //rs.DepthBias = -0.0001f; GraphicsDevice.RasterizerState = rs; e.Parameters["ColorEnabled"].SetValue(false); SelectedAlgorithm.Draw(e, false, DrawMode); e.Parameters["ColorEnabled"].SetValue(true); } else { //effect.Parameters["ColorEnabled"].SetValue(false); SelectedAlgorithm.DrawWireframe(Camera, wire_effect, Matrix.CreateTranslation(new Vector3(-Resolution / 2, -Resolution / 2, -Resolution / 2))); //effect.Parameters["ColorEnabled"].SetValue(true); } } base.Draw(gameTime); } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Fixtures.AcceptanceTestsBodyDateTime { using Models; using System.Threading; using System.Threading.Tasks; /// <summary> /// Extension methods for Datetime. /// </summary> public static partial class DatetimeExtensions { /// <summary> /// Get null datetime value /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static System.DateTime? GetNull(this IDatetime operations) { return Task.Factory.StartNew(s => ((IDatetime)s).GetNullAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Get null datetime value /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<System.DateTime?> GetNullAsync(this IDatetime operations, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.GetNullWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Get invalid datetime value /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static System.DateTime? GetInvalid(this IDatetime operations) { return Task.Factory.StartNew(s => ((IDatetime)s).GetInvalidAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Get invalid datetime value /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<System.DateTime?> GetInvalidAsync(this IDatetime operations, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.GetInvalidWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Get overflow datetime value /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static System.DateTime? GetOverflow(this IDatetime operations) { return Task.Factory.StartNew(s => ((IDatetime)s).GetOverflowAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Get overflow datetime value /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<System.DateTime?> GetOverflowAsync(this IDatetime operations, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.GetOverflowWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Get underflow datetime value /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static System.DateTime? GetUnderflow(this IDatetime operations) { return Task.Factory.StartNew(s => ((IDatetime)s).GetUnderflowAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Get underflow datetime value /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<System.DateTime?> GetUnderflowAsync(this IDatetime operations, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.GetUnderflowWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Put max datetime value 9999-12-31T23:59:59.9999999Z /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='datetimeBody'> /// </param> public static void PutUtcMaxDateTime(this IDatetime operations, System.DateTime datetimeBody) { Task.Factory.StartNew(s => ((IDatetime)s).PutUtcMaxDateTimeAsync(datetimeBody), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Put max datetime value 9999-12-31T23:59:59.9999999Z /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='datetimeBody'> /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task PutUtcMaxDateTimeAsync(this IDatetime operations, System.DateTime datetimeBody, CancellationToken cancellationToken = default(CancellationToken)) { await operations.PutUtcMaxDateTimeWithHttpMessagesAsync(datetimeBody, null, cancellationToken).ConfigureAwait(false); } /// <summary> /// Get max datetime value 9999-12-31t23:59:59.9999999z /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static System.DateTime? GetUtcLowercaseMaxDateTime(this IDatetime operations) { return Task.Factory.StartNew(s => ((IDatetime)s).GetUtcLowercaseMaxDateTimeAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Get max datetime value 9999-12-31t23:59:59.9999999z /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<System.DateTime?> GetUtcLowercaseMaxDateTimeAsync(this IDatetime operations, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.GetUtcLowercaseMaxDateTimeWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Get max datetime value 9999-12-31T23:59:59.9999999Z /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static System.DateTime? GetUtcUppercaseMaxDateTime(this IDatetime operations) { return Task.Factory.StartNew(s => ((IDatetime)s).GetUtcUppercaseMaxDateTimeAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Get max datetime value 9999-12-31T23:59:59.9999999Z /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<System.DateTime?> GetUtcUppercaseMaxDateTimeAsync(this IDatetime operations, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.GetUtcUppercaseMaxDateTimeWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Put max datetime value with positive numoffset /// 9999-12-31t23:59:59.9999999+14:00 /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='datetimeBody'> /// </param> public static void PutLocalPositiveOffsetMaxDateTime(this IDatetime operations, System.DateTime datetimeBody) { Task.Factory.StartNew(s => ((IDatetime)s).PutLocalPositiveOffsetMaxDateTimeAsync(datetimeBody), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Put max datetime value with positive numoffset /// 9999-12-31t23:59:59.9999999+14:00 /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='datetimeBody'> /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task PutLocalPositiveOffsetMaxDateTimeAsync(this IDatetime operations, System.DateTime datetimeBody, CancellationToken cancellationToken = default(CancellationToken)) { await operations.PutLocalPositiveOffsetMaxDateTimeWithHttpMessagesAsync(datetimeBody, null, cancellationToken).ConfigureAwait(false); } /// <summary> /// Get max datetime value with positive num offset /// 9999-12-31t23:59:59.9999999+14:00 /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static System.DateTime? GetLocalPositiveOffsetLowercaseMaxDateTime(this IDatetime operations) { return Task.Factory.StartNew(s => ((IDatetime)s).GetLocalPositiveOffsetLowercaseMaxDateTimeAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Get max datetime value with positive num offset /// 9999-12-31t23:59:59.9999999+14:00 /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<System.DateTime?> GetLocalPositiveOffsetLowercaseMaxDateTimeAsync(this IDatetime operations, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.GetLocalPositiveOffsetLowercaseMaxDateTimeWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Get max datetime value with positive num offset /// 9999-12-31T23:59:59.9999999+14:00 /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static System.DateTime? GetLocalPositiveOffsetUppercaseMaxDateTime(this IDatetime operations) { return Task.Factory.StartNew(s => ((IDatetime)s).GetLocalPositiveOffsetUppercaseMaxDateTimeAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Get max datetime value with positive num offset /// 9999-12-31T23:59:59.9999999+14:00 /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<System.DateTime?> GetLocalPositiveOffsetUppercaseMaxDateTimeAsync(this IDatetime operations, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.GetLocalPositiveOffsetUppercaseMaxDateTimeWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Put max datetime value with positive numoffset /// 9999-12-31t23:59:59.9999999-14:00 /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='datetimeBody'> /// </param> public static void PutLocalNegativeOffsetMaxDateTime(this IDatetime operations, System.DateTime datetimeBody) { Task.Factory.StartNew(s => ((IDatetime)s).PutLocalNegativeOffsetMaxDateTimeAsync(datetimeBody), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Put max datetime value with positive numoffset /// 9999-12-31t23:59:59.9999999-14:00 /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='datetimeBody'> /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task PutLocalNegativeOffsetMaxDateTimeAsync(this IDatetime operations, System.DateTime datetimeBody, CancellationToken cancellationToken = default(CancellationToken)) { await operations.PutLocalNegativeOffsetMaxDateTimeWithHttpMessagesAsync(datetimeBody, null, cancellationToken).ConfigureAwait(false); } /// <summary> /// Get max datetime value with positive num offset /// 9999-12-31T23:59:59.9999999-14:00 /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static System.DateTime? GetLocalNegativeOffsetUppercaseMaxDateTime(this IDatetime operations) { return Task.Factory.StartNew(s => ((IDatetime)s).GetLocalNegativeOffsetUppercaseMaxDateTimeAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Get max datetime value with positive num offset /// 9999-12-31T23:59:59.9999999-14:00 /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<System.DateTime?> GetLocalNegativeOffsetUppercaseMaxDateTimeAsync(this IDatetime operations, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.GetLocalNegativeOffsetUppercaseMaxDateTimeWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Get max datetime value with positive num offset /// 9999-12-31t23:59:59.9999999-14:00 /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static System.DateTime? GetLocalNegativeOffsetLowercaseMaxDateTime(this IDatetime operations) { return Task.Factory.StartNew(s => ((IDatetime)s).GetLocalNegativeOffsetLowercaseMaxDateTimeAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Get max datetime value with positive num offset /// 9999-12-31t23:59:59.9999999-14:00 /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<System.DateTime?> GetLocalNegativeOffsetLowercaseMaxDateTimeAsync(this IDatetime operations, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.GetLocalNegativeOffsetLowercaseMaxDateTimeWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Put min datetime value 0001-01-01T00:00:00Z /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='datetimeBody'> /// </param> public static void PutUtcMinDateTime(this IDatetime operations, System.DateTime datetimeBody) { Task.Factory.StartNew(s => ((IDatetime)s).PutUtcMinDateTimeAsync(datetimeBody), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Put min datetime value 0001-01-01T00:00:00Z /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='datetimeBody'> /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task PutUtcMinDateTimeAsync(this IDatetime operations, System.DateTime datetimeBody, CancellationToken cancellationToken = default(CancellationToken)) { await operations.PutUtcMinDateTimeWithHttpMessagesAsync(datetimeBody, null, cancellationToken).ConfigureAwait(false); } /// <summary> /// Get min datetime value 0001-01-01T00:00:00Z /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static System.DateTime? GetUtcMinDateTime(this IDatetime operations) { return Task.Factory.StartNew(s => ((IDatetime)s).GetUtcMinDateTimeAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Get min datetime value 0001-01-01T00:00:00Z /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<System.DateTime?> GetUtcMinDateTimeAsync(this IDatetime operations, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.GetUtcMinDateTimeWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Put min datetime value 0001-01-01T00:00:00+14:00 /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='datetimeBody'> /// </param> public static void PutLocalPositiveOffsetMinDateTime(this IDatetime operations, System.DateTime datetimeBody) { Task.Factory.StartNew(s => ((IDatetime)s).PutLocalPositiveOffsetMinDateTimeAsync(datetimeBody), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Put min datetime value 0001-01-01T00:00:00+14:00 /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='datetimeBody'> /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task PutLocalPositiveOffsetMinDateTimeAsync(this IDatetime operations, System.DateTime datetimeBody, CancellationToken cancellationToken = default(CancellationToken)) { await operations.PutLocalPositiveOffsetMinDateTimeWithHttpMessagesAsync(datetimeBody, null, cancellationToken).ConfigureAwait(false); } /// <summary> /// Get min datetime value 0001-01-01T00:00:00+14:00 /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static System.DateTime? GetLocalPositiveOffsetMinDateTime(this IDatetime operations) { return Task.Factory.StartNew(s => ((IDatetime)s).GetLocalPositiveOffsetMinDateTimeAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Get min datetime value 0001-01-01T00:00:00+14:00 /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<System.DateTime?> GetLocalPositiveOffsetMinDateTimeAsync(this IDatetime operations, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.GetLocalPositiveOffsetMinDateTimeWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Put min datetime value 0001-01-01T00:00:00-14:00 /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='datetimeBody'> /// </param> public static void PutLocalNegativeOffsetMinDateTime(this IDatetime operations, System.DateTime datetimeBody) { Task.Factory.StartNew(s => ((IDatetime)s).PutLocalNegativeOffsetMinDateTimeAsync(datetimeBody), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Put min datetime value 0001-01-01T00:00:00-14:00 /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='datetimeBody'> /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task PutLocalNegativeOffsetMinDateTimeAsync(this IDatetime operations, System.DateTime datetimeBody, CancellationToken cancellationToken = default(CancellationToken)) { await operations.PutLocalNegativeOffsetMinDateTimeWithHttpMessagesAsync(datetimeBody, null, cancellationToken).ConfigureAwait(false); } /// <summary> /// Get min datetime value 0001-01-01T00:00:00-14:00 /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static System.DateTime? GetLocalNegativeOffsetMinDateTime(this IDatetime operations) { return Task.Factory.StartNew(s => ((IDatetime)s).GetLocalNegativeOffsetMinDateTimeAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Get min datetime value 0001-01-01T00:00:00-14:00 /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<System.DateTime?> GetLocalNegativeOffsetMinDateTimeAsync(this IDatetime operations, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.GetLocalNegativeOffsetMinDateTimeWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } } }
using System; using System.Data; using System.Data.Common; using System.IO; using System.Text; namespace BLToolkit.Mapping { public class TextDataReader : IDataReader { #region Constructors public TextDataReader(Stream stream) : this(stream, Map.DefaultSchema) { } public TextDataReader(Stream stream, MappingSchema mappingSchema) { GC.SuppressFinalize(this); if (mappingSchema == null) throw new ArgumentNullException("mappingSchema"); _reader = new StreamReader(stream); _mappingSchema = mappingSchema; ReadHeader(); } #endregion #region Protected Members private readonly StreamReader _reader; private readonly MappingSchema _mappingSchema; private string _line = string.Empty; private string[] _names = _empty; private string[] _values = _empty; private int _lineNumber = 0; private static readonly string[] _empty = new string[0]; private bool IsEof { get { return _line == null; } } private bool ReadNextLine() { while (!IsEof) { _line = _reader.ReadLine(); _lineNumber++; if (!string.IsNullOrEmpty(_line) && _line[0] == '*') return true; } return false; } private void ReadHeader() { while (ReadNextLine()) { if (_line.StartsWith("*:")) { _names = _line.Substring(2).Split(':'); _values = new string[_names.Length]; for (int i = 0; i < _names.Length; i++) _names[i] = _names[i].Trim(); } else if (_line.StartsWith("**") || _line.StartsWith("*-")) break; } } private bool ReadRecord() { if (!IsEof) { if (_line.StartsWith("*-")) return false; if (_line.StartsWith("**") && _line.Length > 3) { string[] values = _line.Substring(3).Split(_line[2]); for (int i = 0; i < _values.Length && i < values.Length; i++) { string value = values[i]; _values[i] = value.Length == 0? null: value[0] == '*'? value.Substring(1): value[0] == '+'? Encoding.Unicode.GetString(Convert.FromBase64String(value.Substring(1))): value; } ReadNextLine(); return true; } throw new MappingException( string.Format("Invalid data format in the line {0}.", _lineNumber)); } return false; } #endregion #region IDataReader Members public virtual void Close() { _line = null; } public virtual int Depth { get { return 0; } } public virtual Type GetFieldType(int index) { return typeof(string); } public virtual string GetName(int index) { return _names[index]; } private DataTable _schemaTable; public virtual DataTable GetSchemaTable() { if (_schemaTable == null) { _schemaTable = new DataTable("SchemaTable"); _schemaTable.Columns.AddRange(new DataColumn[] { new DataColumn(SchemaTableColumn.ColumnName, typeof(string)), new DataColumn(SchemaTableColumn.ColumnOrdinal, typeof(int)), new DataColumn(SchemaTableColumn.ColumnSize, typeof(int)), new DataColumn(SchemaTableColumn.NumericPrecision, typeof(short)), new DataColumn(SchemaTableColumn.NumericScale, typeof(short)), new DataColumn(SchemaTableColumn.DataType, typeof(Type)), new DataColumn(SchemaTableColumn.NonVersionedProviderType, typeof(int)), new DataColumn(SchemaTableColumn.ProviderType, typeof(int)), new DataColumn(SchemaTableColumn.IsLong, typeof(bool)), new DataColumn(SchemaTableColumn.AllowDBNull, typeof(bool)), new DataColumn(SchemaTableColumn.IsUnique, typeof(bool)), new DataColumn(SchemaTableColumn.IsKey, typeof(bool)), new DataColumn(SchemaTableColumn.BaseSchemaName, typeof(string)), new DataColumn(SchemaTableColumn.BaseTableName, typeof(string)), new DataColumn(SchemaTableColumn.BaseColumnName, typeof(string)), new DataColumn(SchemaTableColumn.IsAliased, typeof(bool)), new DataColumn(SchemaTableColumn.IsExpression, typeof(bool)), }); for (int i = 0; i < _names.Length; i++) { DataRow row = _schemaTable.NewRow(); row[SchemaTableColumn.ColumnName] = _names[i]; row[SchemaTableColumn.ColumnOrdinal] = i; row[SchemaTableColumn.ColumnSize] = (int)byte.MaxValue; row[SchemaTableColumn.NumericPrecision] = (short)0; row[SchemaTableColumn.NumericScale] = (short)0; row[SchemaTableColumn.DataType] = typeof(string); row[SchemaTableColumn.NonVersionedProviderType] = 1; row[SchemaTableColumn.ProviderType] = 1; row[SchemaTableColumn.IsLong] = false; row[SchemaTableColumn.AllowDBNull] = true; row[SchemaTableColumn.IsUnique] = false; row[SchemaTableColumn.IsKey] = false; row[SchemaTableColumn.BaseSchemaName] = string.Empty; row[SchemaTableColumn.BaseTableName] = string.Empty; row[SchemaTableColumn.BaseColumnName] = string.Empty; row[SchemaTableColumn.IsAliased] = false; row[SchemaTableColumn.IsExpression] = false; _schemaTable.Rows.Add(row); } } return _schemaTable; } public virtual int FieldCount { get { return _names.Length; } } public virtual bool IsClosed { get { return IsEof; } } public virtual bool NextResult() { ReadHeader(); return !IsEof; } public virtual bool Read() { return ReadRecord(); } public virtual int RecordsAffected { get { return -1; } } #endregion #region IDisposable Members public virtual void Dispose() { } #endregion #region IDataRecord Members public virtual bool GetBoolean(int i) { return _mappingSchema.ConvertToBoolean(_values[i]); } public virtual byte GetByte(int i) { return _mappingSchema.ConvertToByte(_values[i]); } public virtual long GetBytes(int i, long fieldOffset, byte[] buffer, int bufferoffset, int length) { throw new Exception("The method or operation is not implemented."); } public virtual char GetChar(int i) { return _mappingSchema.ConvertToChar(_values[i]); } public virtual long GetChars(int i, long fieldoffset, char[] buffer, int bufferoffset, int length) { throw new Exception("The method or operation is not implemented."); } public virtual IDataReader GetData(int i) { throw new Exception("The method or operation is not implemented."); } public virtual string GetDataTypeName(int i) { return typeof(string).FullName; } public virtual DateTime GetDateTime(int i) { return _mappingSchema.ConvertToDateTime(_values[i]); } #if FW3 public virtual DateTimeOffset GetDateTimeOffset(int i) { return _mappingSchema.ConvertToDateTimeOffset(_values[i]); } #endif public virtual decimal GetDecimal(int i) { return _mappingSchema.ConvertToDecimal(_values[i]); } public virtual double GetDouble(int i) { return _mappingSchema.ConvertToDouble(_values[i]); } public virtual float GetFloat(int i) { return _mappingSchema.ConvertToSingle(_values[i]); } public virtual Guid GetGuid(int i) { return _mappingSchema.ConvertToGuid(_values[i]); } public virtual short GetInt16(int i) { return _mappingSchema.ConvertToInt16(_values[i]); } public virtual int GetInt32(int i) { return _mappingSchema.ConvertToInt32(_values[i]); } public virtual long GetInt64(int i) { return _mappingSchema.ConvertToInt64(_values[i]); } public virtual int GetOrdinal(string name) { for (int i = 0; i < _names.Length; i++) if (_names[i] == name) return i; return -1; } public virtual string GetString(int i) { return _values[i]; } public virtual object GetValue(int i) { return _values[i]; } public virtual int GetValues(object[] values) { int n = Math.Min(values.Length, _values.Length); for (int i = 0; i < n; i++) values[i] = _values[i]; return n; } public virtual bool IsDBNull(int i) { return _values[i] == null; } public virtual object this[string name] { get { for (int i = 0; i < _names.Length; i++) if (_names[i] == name) return _values[i]; throw new ArgumentException(string.Format("Invalid field name '{0}'", name)); } } public virtual object this[int i] { get { return _values[i]; } } #endregion } }
using System; using System.Collections.Generic; using System.Text; using System.CodeDom; namespace Thinktecture.Tools.Web.Services.CodeGeneration { internal class CollectionTypeGenerator { #region Private Members private ICollectionTypeProvider collectionTypeProvider; private Dictionary<string, CodeTypeReference> generatedTypes; private ExtendedCodeDomTree code; #endregion #region Constructors public CollectionTypeGenerator(ICollectionTypeProvider collectionTypeProvider, ExtendedCodeDomTree code) { this.collectionTypeProvider = collectionTypeProvider; this.generatedTypes = new Dictionary<string, CodeTypeReference>(); this.code = code; } #endregion #region Public Methods public void Execute() { RunConverter(code.DataContracts); RunConverter(code.MessageContracts); RunConverter(code.ClientTypes); RunConverter(code.ServiceContracts); RunConverter(code.ServiceTypes); } #endregion #region Private Methods private void RunConverter(FilteredTypes collection) { // Get the initial count in the collection. int icount = collection.Count; // Do this for each type found in the filtered type collection. for (int i = 0; i < icount; i++ ) { CodeTypeExtension typeExtension = collection[i]; // Send the fields to members converter ConvertMembers(typeExtension.Fields); // Send the properties to the members converter. ConvertMembers(typeExtension.Properties); // Send the methods to the members converter. ConvertMembers(typeExtension.Methods); // Send the constructors to the members converter. ConvertMembers(typeExtension.Constructors); } } private void ConvertMembers(FilteredTypeMembers filteredTypeMembers) { foreach (CodeTypeMemberExtension memberExt in filteredTypeMembers) { // Move to the next item if this is not convertible. if (!IsConvertibleMemeber(memberExt)) { continue; } if (memberExt.Kind == CodeTypeMemberKind.Field) { CodeMemberField field = (CodeMemberField)memberExt.ExtendedObject; field.Type = GetCollectionTypeReference(field.Type); } else if (memberExt.Kind == CodeTypeMemberKind.Property) { CodeMemberProperty property = (CodeMemberProperty)memberExt.ExtendedObject; property.Type = GetCollectionTypeReference(property.Type); } else if (memberExt.Kind == CodeTypeMemberKind.Method || memberExt.Kind == CodeTypeMemberKind.Constructor || memberExt.Kind == CodeTypeMemberKind.StaticConstructor) { CodeMemberMethod method = (CodeMemberMethod)memberExt.ExtendedObject; ProcessMethod(method); } } } /// <summary> /// This method ensures that we can generate a collection type to substitute /// given members type. /// </summary> private bool IsConvertibleMemeber(CodeTypeMemberExtension memberExtension) { if (memberExtension.Kind == CodeTypeMemberKind.Field) { CodeMemberField field = (CodeMemberField)memberExtension.ExtendedObject; if (field.Type.ArrayElementType == null) { return false; } // The field is not convertible if it is used in a property that has invalid attributes. foreach (CodeTypeMemberExtension parent in memberExtension.Parent.Properties) { CodeMemberProperty property = (CodeMemberProperty)parent.ExtendedObject; foreach (CodeStatement statement in property.GetStatements) { // Get the return statement for the property getter. CodeMethodReturnStatement returnStatement = statement as CodeMethodReturnStatement; if (returnStatement != null) { // Do we have a field reference on the right side of the assignment statement? CodeFieldReferenceExpression fieldRef = returnStatement.Expression as CodeFieldReferenceExpression; if (fieldRef != null) { // Is the field referenced the one we are checking? if (fieldRef.FieldName == field.Name) { // Does the property have invalid attributes? if (HasInvalidAttributes(parent)) { // If so, then the field should not be processed! return false; } } } } } } // Return true if we don't have any invalid attributes. return !HasInvalidAttributes(memberExtension); } else if (memberExtension.Kind == CodeTypeMemberKind.Property) { CodeMemberProperty property = (CodeMemberProperty)memberExtension.ExtendedObject; if (property.Type.ArrayElementType == null) { return false; } // Return true if we don't have any invalid attributes. return !HasInvalidAttributes(memberExtension); } else if (memberExtension.Kind == CodeTypeMemberKind.Method) { return true; } else if (memberExtension.Kind == CodeTypeMemberKind.Constructor || memberExtension.Kind == CodeTypeMemberKind.StaticConstructor) { return true; } else { // Currently we support only converting properties, fields and methods. return false; } } /// <summary> /// This method checks whether a given CodeTypeMember contains any attributes that will /// prevent from converting its type from an array to a collection. /// </summary> private bool HasInvalidAttributes(CodeTypeMemberExtension memberExtension) { if (memberExtension.FindAttribute("System.Xml.Serialization.XmlChoiceIdentifierAttribute") != null) { return true; } else if (memberExtension.FindAttribute("System.Xml.Serialization.XmlIgnoreAttribute") != null) { return true; } else { return false; } } private void ProcessMethod(CodeMemberMethod method) { // First we process the parameters. foreach (CodeParameterDeclarationExpression paramExp in method.Parameters) { if (paramExp.Type.ArrayElementType != null) { paramExp.Type = GetCollectionTypeReference(paramExp.Type, false); } } // Now we process the return type. if (method.ReturnType != null && method.ReturnType.ArrayElementType != null) { method.ReturnType = GetCollectionTypeReference(method.ReturnType, false); } } private CodeTypeReference GetCollectionTypeReference(CodeTypeReference ctr) { return GetCollectionTypeReference(ctr, true); } private CodeTypeReference GetCollectionTypeReference(CodeTypeReference ctr, bool create) { CodeTypeReference nctr = CacheLookup(ctr); if (nctr == null) { if (create) { nctr = collectionTypeProvider.CreateCollectionType(ctr, code); CacheNewType(ctr, nctr); } else { nctr = ctr; } } return nctr; } private CodeTypeReference CacheLookup(CodeTypeReference type) { if (generatedTypes.ContainsKey(type.BaseType)) { return generatedTypes[type.BaseType]; } return null; } private void CacheNewType(CodeTypeReference oldTypeRef, CodeTypeReference newTypeRef) { generatedTypes.Add(oldTypeRef.BaseType, newTypeRef); } #endregion } }
// Generated by SharpKit.QooxDoo.Generator using System; using System.Collections.Generic; using SharpKit.Html; using SharpKit.JavaScript; using qx.ui.list.core; namespace qx.ui.list.provider { /// <summary> /// <para>The provider implements the <see cref="qx.ui.virtual.core.IWidgetCellProvider"/> API, /// which can be used as delegate for the widget cell rendering and it /// provides a API to bind the model with the rendered item.</para> /// </summary> [JsType(JsMode.Prototype, Name = "qx.ui.list.provider.WidgetProvider", OmitOptionalParameters = true, Export = false)] public partial class WidgetProvider : qx.core.Object, qx.ui.virtualx.core.IWidgetCellProvider, qx.ui.list.provider.IListProvider { #region Events /// <summary> /// Fired on change of the property <see cref="Delegate"/>. /// </summary> public event Action<qx.eventx.type.Data> OnChangeDelegate; #endregion Events #region Properties /// <summary> /// <para>Delegation object, which can have one or more functions defined by the /// <see cref="qx.ui.list.core.IListDelegate"/> interface.</para> /// </summary> /// <remarks> /// Allow nulls: true /// </remarks> [JsProperty(Name = "delegate", NativeField = true)] public object Delegate { get; set; } /// <summary> /// <para>A map containing the options for the group label binding. The possible keys /// can be found in the <see cref="qx.data.SingleValueBinding"/> documentation.</para> /// </summary> /// <remarks> /// Allow nulls: true /// </remarks> [JsProperty(Name = "groupLabelOptions", NativeField = true)] public object GroupLabelOptions { get; set; } /// <summary> /// <para>The path to the property which holds the information that should be /// displayed as a group label. This is only needed if objects are stored in the /// model.</para> /// </summary> /// <remarks> /// Allow nulls: true /// </remarks> [JsProperty(Name = "groupLabelPath", NativeField = true)] public string GroupLabelPath { get; set; } /// <summary> /// <para>A map containing the options for the icon binding. The possible keys /// can be found in the <see cref="qx.data.SingleValueBinding"/> documentation.</para> /// </summary> /// <remarks> /// Allow nulls: true /// </remarks> [JsProperty(Name = "iconOptions", NativeField = true)] public object IconOptions { get; set; } /// <summary> /// <para>The path to the property which holds the information that should be /// shown as an icon. This is only needed if objects are stored in the model /// and if the icon should be shown.</para> /// </summary> /// <remarks> /// Allow nulls: true /// </remarks> [JsProperty(Name = "iconPath", NativeField = true)] public string IconPath { get; set; } /// <summary> /// <para>A map containing the options for the label binding. The possible keys /// can be found in the <see cref="qx.data.SingleValueBinding"/> documentation.</para> /// </summary> /// <remarks> /// Allow nulls: true /// </remarks> [JsProperty(Name = "labelOptions", NativeField = true)] public object LabelOptions { get; set; } /// <summary> /// <para>The path to the property which holds the information that should be /// shown as a label. This is only needed if objects are stored in the model.</para> /// </summary> /// <remarks> /// Allow nulls: true /// </remarks> [JsProperty(Name = "labelPath", NativeField = true)] public string LabelPath { get; set; } #endregion Properties #region Methods public WidgetProvider() { throw new NotImplementedException(); } /// <summary> /// <para>Creates the WidgetProvider</para> /// </summary> /// <param name="list">list to provide.</param> public WidgetProvider(qx.ui.list.List list) { throw new NotImplementedException(); } /// <summary> /// <para>Creates a renderer for group rendering.</para> /// </summary> /// <returns>new group renderer.</returns> [JsMethod(Name = "createGroupRenderer")] public object CreateGroupRenderer() { throw new NotImplementedException(); } /// <summary> /// <para>Creates a renderer for item rendering.</para> /// </summary> /// <returns>new item renderer.</returns> [JsMethod(Name = "createItemRenderer")] public object CreateItemRenderer() { throw new NotImplementedException(); } /// <summary> /// <para>Creates a layer for item and group rendering.</para> /// </summary> /// <returns>new layer.</returns> [JsMethod(Name = "createLayer")] public qx.ui.virtualx.layer.Abstract CreateLayer() { throw new NotImplementedException(); } /// <summary> /// <para>This method returns the configured cell for the given cell. The return /// value may be null to indicate that the cell should be empty.</para> /// </summary> /// <param name="row">The cell&#8217;s row index.</param> /// <param name="column">The cell&#8217;s column index.</param> /// <returns>The configured widget for the given cell.</returns> [JsMethod(Name = "getCellWidget")] public qx.ui.core.LayoutItem GetCellWidget(double row, double column) { throw new NotImplementedException(); } /// <summary> /// <para>Returns if the passed row can be selected or not.</para> /// </summary> /// <param name="row">row to select.</param> /// <returns>true when the row can be selected, false otherwise.</returns> [JsMethod(Name = "isSelectable")] public bool IsSelectable(double row) { throw new NotImplementedException(); } /// <summary> /// <para>Release the given cell widget. Either pool or destroy the widget.</para> /// </summary> /// <param name="widget">The cell widget to pool.</param> [JsMethod(Name = "poolCellWidget")] public void PoolCellWidget(qx.ui.core.LayoutItem widget) { throw new NotImplementedException(); } /// <summary> /// <para>Styles a selected item.</para> /// </summary> /// <param name="row">row to style.</param> [JsMethod(Name = "styleSelectabled")] public void StyleSelectabled(double row) { throw new NotImplementedException(); } /// <summary> /// <para>Styles a not selected item.</para> /// </summary> /// <param name="row">row to style.</param> [JsMethod(Name = "styleUnselectabled")] public void StyleUnselectabled(double row) { throw new NotImplementedException(); } /// <summary> /// <para>Helper-Method for binding the default properties from /// the model to the target widget. The used default properties /// depends on the passed item. When the passed item is /// a list item the &#8220;label&#8221; and &#8220;icon&#8221; property is used. /// When the passed item is a group item the &#8220;value&#8221; property is /// used.</para> /// <para>This method should only be called in the /// <see cref="IListDelegate.BindItem"/> function /// implemented by the <see cref="Delegate"/> property.</para> /// </summary> /// <param name="item">The internally created and used list or group item.</param> /// <param name="index">The index of the item.</param> [JsMethod(Name = "bindDefaultProperties")] public void BindDefaultProperties(qx.ui.core.Widget item, double index) { throw new NotImplementedException(); } /// <summary> /// <para>Helper-Method for binding a given property from the model to the target /// widget. /// This method should only be called in the /// <see cref="IListDelegate.BindItem"/> function implemented by the /// <see cref="Delegate"/> property.</para> /// </summary> /// <param name="sourcePath">The path to the property in the model. If you use an empty string, the whole model item will be bound.</param> /// <param name="targetProperty">The name of the property in the target widget.</param> /// <param name="options">The options to use for the binding.</param> /// <param name="targetWidget">The target widget.</param> /// <param name="index">The index of the current binding.</param> [JsMethod(Name = "bindProperty")] public void BindProperty(string sourcePath, string targetProperty, object options, qx.ui.core.Widget targetWidget, double index) { throw new NotImplementedException(); } /// <summary> /// <para>Helper-Method for binding a given property from the target widget to /// the model. /// This method should only be called in the /// <see cref="IListDelegate.BindItem"/> function implemented by the /// <see cref="Delegate"/> property.</para> /// </summary> /// <param name="targetPath">The path to the property in the model.</param> /// <param name="sourceProperty">The name of the property in the target.</param> /// <param name="options">The options to use for the binding.</param> /// <param name="sourceWidget">The source widget.</param> /// <param name="index">The index of the current binding.</param> [JsMethod(Name = "bindPropertyReverse")] public void BindPropertyReverse(string targetPath, string sourceProperty, object options, qx.ui.core.Widget sourceWidget, double index) { throw new NotImplementedException(); } /// <summary> /// <para>Returns the (computed) value of the property delegate.</para> /// </summary> [JsMethod(Name = "getDelegate")] public object GetDelegate() { throw new NotImplementedException(); } /// <summary> /// <para>Returns the (computed) value of the property groupLabelOptions.</para> /// </summary> [JsMethod(Name = "getGroupLabelOptions")] public object GetGroupLabelOptions() { throw new NotImplementedException(); } /// <summary> /// <para>Returns the (computed) value of the property groupLabelPath.</para> /// </summary> [JsMethod(Name = "getGroupLabelPath")] public string GetGroupLabelPath() { throw new NotImplementedException(); } /// <summary> /// <para>Returns the (computed) value of the property iconOptions.</para> /// </summary> [JsMethod(Name = "getIconOptions")] public object GetIconOptions() { throw new NotImplementedException(); } /// <summary> /// <para>Returns the (computed) value of the property iconPath.</para> /// </summary> [JsMethod(Name = "getIconPath")] public string GetIconPath() { throw new NotImplementedException(); } /// <summary> /// <para>Returns the (computed) value of the property labelOptions.</para> /// </summary> [JsMethod(Name = "getLabelOptions")] public object GetLabelOptions() { throw new NotImplementedException(); } /// <summary> /// <para>Returns the (computed) value of the property labelPath.</para> /// </summary> [JsMethod(Name = "getLabelPath")] public string GetLabelPath() { throw new NotImplementedException(); } /// <summary> /// <para>Calls the apply method and dispatches the change event of the property delegate /// with the default value defined by the class developer. This function can /// only be called from the constructor of a class.</para> /// </summary> /// <param name="value">Initial value for property delegate.</param> [JsMethod(Name = "initDelegate")] public void InitDelegate(object value) { throw new NotImplementedException(); } /// <summary> /// <para>Calls the apply method and dispatches the change event of the property groupLabelOptions /// with the default value defined by the class developer. This function can /// only be called from the constructor of a class.</para> /// </summary> /// <param name="value">Initial value for property groupLabelOptions.</param> [JsMethod(Name = "initGroupLabelOptions")] public void InitGroupLabelOptions(object value) { throw new NotImplementedException(); } /// <summary> /// <para>Calls the apply method and dispatches the change event of the property groupLabelPath /// with the default value defined by the class developer. This function can /// only be called from the constructor of a class.</para> /// </summary> /// <param name="value">Initial value for property groupLabelPath.</param> [JsMethod(Name = "initGroupLabelPath")] public void InitGroupLabelPath(object value) { throw new NotImplementedException(); } /// <summary> /// <para>Calls the apply method and dispatches the change event of the property iconOptions /// with the default value defined by the class developer. This function can /// only be called from the constructor of a class.</para> /// </summary> /// <param name="value">Initial value for property iconOptions.</param> [JsMethod(Name = "initIconOptions")] public void InitIconOptions(object value) { throw new NotImplementedException(); } /// <summary> /// <para>Calls the apply method and dispatches the change event of the property iconPath /// with the default value defined by the class developer. This function can /// only be called from the constructor of a class.</para> /// </summary> /// <param name="value">Initial value for property iconPath.</param> [JsMethod(Name = "initIconPath")] public void InitIconPath(object value) { throw new NotImplementedException(); } /// <summary> /// <para>Calls the apply method and dispatches the change event of the property labelOptions /// with the default value defined by the class developer. This function can /// only be called from the constructor of a class.</para> /// </summary> /// <param name="value">Initial value for property labelOptions.</param> [JsMethod(Name = "initLabelOptions")] public void InitLabelOptions(object value) { throw new NotImplementedException(); } /// <summary> /// <para>Calls the apply method and dispatches the change event of the property labelPath /// with the default value defined by the class developer. This function can /// only be called from the constructor of a class.</para> /// </summary> /// <param name="value">Initial value for property labelPath.</param> [JsMethod(Name = "initLabelPath")] public void InitLabelPath(object value) { throw new NotImplementedException(); } /// <summary> /// <para>Remove all bindings from all bounded items.</para> /// </summary> [JsMethod(Name = "removeBindings")] public void RemoveBindings() { throw new NotImplementedException(); } /// <summary> /// <para>Resets the user value of the property delegate.</para> /// <para>The computed value falls back to the next available value e.g. appearance, init or /// inheritance value depeneding on the property configuration and value availability.</para> /// </summary> [JsMethod(Name = "resetDelegate")] public void ResetDelegate() { throw new NotImplementedException(); } /// <summary> /// <para>Resets the user value of the property groupLabelOptions.</para> /// <para>The computed value falls back to the next available value e.g. appearance, init or /// inheritance value depeneding on the property configuration and value availability.</para> /// </summary> [JsMethod(Name = "resetGroupLabelOptions")] public void ResetGroupLabelOptions() { throw new NotImplementedException(); } /// <summary> /// <para>Resets the user value of the property groupLabelPath.</para> /// <para>The computed value falls back to the next available value e.g. appearance, init or /// inheritance value depeneding on the property configuration and value availability.</para> /// </summary> [JsMethod(Name = "resetGroupLabelPath")] public void ResetGroupLabelPath() { throw new NotImplementedException(); } /// <summary> /// <para>Resets the user value of the property iconOptions.</para> /// <para>The computed value falls back to the next available value e.g. appearance, init or /// inheritance value depeneding on the property configuration and value availability.</para> /// </summary> [JsMethod(Name = "resetIconOptions")] public void ResetIconOptions() { throw new NotImplementedException(); } /// <summary> /// <para>Resets the user value of the property iconPath.</para> /// <para>The computed value falls back to the next available value e.g. appearance, init or /// inheritance value depeneding on the property configuration and value availability.</para> /// </summary> [JsMethod(Name = "resetIconPath")] public void ResetIconPath() { throw new NotImplementedException(); } /// <summary> /// <para>Resets the user value of the property labelOptions.</para> /// <para>The computed value falls back to the next available value e.g. appearance, init or /// inheritance value depeneding on the property configuration and value availability.</para> /// </summary> [JsMethod(Name = "resetLabelOptions")] public void ResetLabelOptions() { throw new NotImplementedException(); } /// <summary> /// <para>Resets the user value of the property labelPath.</para> /// <para>The computed value falls back to the next available value e.g. appearance, init or /// inheritance value depeneding on the property configuration and value availability.</para> /// </summary> [JsMethod(Name = "resetLabelPath")] public void ResetLabelPath() { throw new NotImplementedException(); } /// <summary> /// <para>Delegation object, which can have one or more functions defined by the /// <see cref="qx.ui.list.core.IListDelegate"/> interface.</para> /// </summary> /// <param name="delegatex">delegation object.</param> [JsMethod(Name = "setDelegate")] public void SetDelegate(object delegatex) { throw new NotImplementedException(); } /// <summary> /// <para>Sets the user value of the property groupLabelOptions.</para> /// </summary> /// <param name="value">New value for property groupLabelOptions.</param> [JsMethod(Name = "setGroupLabelOptions")] public void SetGroupLabelOptions(object value) { throw new NotImplementedException(); } /// <summary> /// <para>Sets the user value of the property groupLabelPath.</para> /// </summary> /// <param name="value">New value for property groupLabelPath.</param> [JsMethod(Name = "setGroupLabelPath")] public void SetGroupLabelPath(string value) { throw new NotImplementedException(); } /// <summary> /// <para>A map containing the options for the icon binding. The possible keys /// can be found in the <see cref="qx.data.SingleValueBinding"/> documentation.</para> /// </summary> /// <param name="options">options for the icon binding.</param> [JsMethod(Name = "setIconOptions")] public void SetIconOptions(object options) { throw new NotImplementedException(); } /// <summary> /// <para>The path to the property which holds the information that should be /// shown as an icon. This is only needed if objects are stored in the model /// and if the icon should be shown.</para> /// </summary> /// <param name="path">path to the property.</param> [JsMethod(Name = "setIconPath")] public void SetIconPath(string path) { throw new NotImplementedException(); } /// <summary> /// <para>A map containing the options for the label binding. The possible keys /// can be found in the <see cref="qx.data.SingleValueBinding"/> documentation.</para> /// </summary> /// <param name="options">options for the label binding.</param> [JsMethod(Name = "setLabelOptions")] public void SetLabelOptions(object options) { throw new NotImplementedException(); } /// <summary> /// <para>The path to the property which holds the information that should be /// shown as a label. This is only needed if objects are stored in the model.</para> /// </summary> /// <param name="path">path to the property.</param> [JsMethod(Name = "setLabelPath")] public void SetLabelPath(string path) { throw new NotImplementedException(); } #endregion Methods } }
using Microsoft.VisualBasic; using System; using System.Collections; using System.Collections.Generic; using System.Data; using System.Diagnostics; namespace nzy3D.Maths { public class Utils { /// <summary> /// Convert a number into a string. /// </summary> /// <param name="parseMode">Output format /// C or c : Currency. <paramref name="precision"/> parameters provides the number of decimal digits (after comma). /// D or d : Decimal (integer digits with optional negativ sign). <paramref name="precision"/> parameters provides the minimum number of digits (before comma, zeros will be added when required). /// E or e : Exponential notation. <paramref name="precision"/> parameters provides the number of digits after comma. /// F or f : Integral and decimal digits with optional negative sign. <paramref name="precision"/> parameters provides the number of decimal digits (after comma). /// G or g : The most compact of either fixed-point or scientific notation. <paramref name="precision"/> parameters provides the number of significant digits. /// N or n : Integral and decimal digits, group separators, and a decimal separator with optional negative sign. <paramref name="precision"/> parameters provides the number of decimal digits (after comma). /// P or p : Number multiplied by 100 and displayed with a percent symbol. <paramref name="precision"/> parameters provides the number of decimal digits (after comma). /// </param> /// <param name="num">Number to convert to string</param> /// <param name="precision">Number of digits (meaning depends on <paramref name="parseMode"/> value)</param> /// <returns></returns> /// <remarks></remarks> public static string num2str(char parseMode, double num, int precision) { return string.Format("{0:" + parseMode + precision + "}", num); } /// <summary> /// Same as other <see cref="Utils.num2str"/> but without precision. /// </summary> /// <param name="parseMode"></param> /// <param name="num"></param> /// <returns></returns> /// <remarks></remarks> public static string num2str(char parseMode, double num) { return string.Format("{0:" + parseMode + "}", num); } /// <summary> /// Same as other <see cref="Utils.num2str"/> but without parseMode (g by default). /// </summary> /// <param name="num"></param> /// <param name="precision"></param> public static string num2str(double num, int precision) { return num2str('g', num, precision); } /// <summary> /// Same as other <see cref="Utils.num2str"/> but without parseMode (g by default) nor precision. /// </summary> /// <param name="num"></param> public static string num2str(double num) { return num2str(Convert.ToChar("g"), num); } public static string dat2str(System.DateTime m_date, string format) { return m_date.ToString(format); } public static string dat2str(System.DateTime m_date) { return dat2str(m_date, "dd/MM/yyyy HH:mm:ss"); } public static long dat2num(System.DateTime m_date) { return m_date.Ticks; } public static System.DateTime num2date(long m_ticks) { return new System.DateTime(m_ticks); } public static string blanks(int length) { string b = ""; for (int i = 0; i <= length - 1; i++) { b += " "; } return b; } /// <summary> /// Return the absolute values of an array of doubles /// </summary> /// <remarks>Current array is not modified</remarks> public static double[] abs(double[] values) { double[] output = new double[values.Length]; for (int i = 0; i <= values.Length - 1; i++) { output[i] = Math.Abs(values[i]); } return output; } /// <summary> /// Computes the sum of an array of doubles. NaN values are ignored during the computation /// </summary> public static double sum(double[] values) { if (values.Length == 0) { throw new ArgumentException("Input array must have a length greater than 0", "values"); } double total = 0; for (int i = 0; i <= values.Length - 1; i++) { if (!double.IsNaN(values[i])) { total += values[i]; } } return total; } /// <summary> /// Computes the sum of an array of integers. /// </summary> public static int sum(int[] values) { if (values.Length == 0) { throw new ArgumentException("Input array must have a length greater than 0", "values"); } int total = 0; for (int i = 0; i <= values.Length - 1; i++) { total += values[i]; } return total; } /// <summary> /// Generate a vector of doubles containing regular increasing values from min to max, with /// nstep steps (including min and max value). /// </summary> /// <param name="min">Min value</param> /// <param name="max">Max value</param> /// <param name="nstep">Number of steps (including min and max values)</param> /// <returns></returns> /// <remarks>Algorithm ensure first and last values of array are equal to min and max value without any rounding error.</remarks> public static double[] vector(double min, double max, int nstep) { if (nstep <= 1) { throw new ArgumentException("Number of step must be at least 2", "nstep"); } double dstep = (max - min) / (nstep - 1); double[] grid = new double[nstep]; for (int i = 0; i <= nstep - 2; i++) { grid[i] = min + i * dstep; } grid[nstep - 1] = max; //Force max value to avoid rounding errors return grid; } /// <summary> /// Generate a vector of doubles containing regular increasing values from min to max, with an offset of 1. /// </summary> /// <param name="min">Min value</param> /// <param name="max">Max value</param> /// <returns></returns> /// <remarks>Algorithm ensure first and last values of array are equal to min and max value without any rounding error.</remarks> public static double[] vector(double min, double max) { return vector(min, max, Convert.ToInt32(Math.Abs(max - min) + 1)); } /// <summary> /// Generate a vector of integers containing regular increasing values from min to max, with /// nstep steps (including min and max value). /// </summary> /// <param name="min">Min value</param> /// <param name="max">Max value</param> /// <param name="nstep">Number of steps (including min and max values)</param> /// <returns></returns> /// <remarks>Algorithm ensure first and last values of array are equal to min and max value without any rounding error.</remarks> public static int[] vector(int min, int max, int nstep) { if (nstep <= 1) { throw new ArgumentException("Number of step must be at least 2", "nstep"); } int dstep = (max - min) / (nstep - 1); int[] grid = new int[nstep]; for (int i = 0; i <= nstep - 2; i++) { grid[i] = min + i * dstep; } grid[nstep - 1] = max; //Force max value to avoid rounding errors return grid; } /// <summary> /// Generate a vector of integers containing regular increasing values from min to max, with an offset of 1. /// </summary> /// <param name="min">Min value</param> /// <param name="max">Max value</param> /// <returns></returns> /// <remarks>Algorithm ensure first and last values of array are equal to min and max value without any rounding error.</remarks> public static int[] vector(int min, int max) { return vector(min, max, Math.Abs(max - min) + 1); } public static System.DateTime min(System.DateTime[] dates) { return DateTime.MinValue; } public static System.DateTime max(System.DateTime[] dates) { return DateTime.MaxValue; } } } //======================================================= //Service provided by Telerik (www.telerik.com) //Conversion powered by NRefactory. //Twitter: @telerik //Facebook: facebook.com/telerik //=======================================================
/* * Farseer Physics Engine based on Box2D.XNA port: * Copyright (c) 2010 Ian Qvist * * Box2D.XNA port of Box2D: * Copyright (c) 2009 Brandon Furtwangler, Nathan Furtwangler * * Original source Box2D: * Copyright (c) 2006-2009 Erin Catto http://www.gphysics.com * * This software is provided 'as-is', without any express or implied * warranty. In no event will the authors be held liable for any damages * arising from the use of this software. * Permission is granted to anyone to use this software for any purpose, * including commercial applications, and to alter it and redistribute it * freely, subject to the following restrictions: * 1. The origin of this software must not be misrepresented; you must not * claim that you wrote the original software. If you use this software * in a product, an acknowledgment in the product documentation would be * appreciated but is not required. * 2. Altered source versions must be plainly marked as such, and must not be * misrepresented as being the original software. * 3. This notice may not be removed or altered from any source distribution. */ using System; using System.Diagnostics; using FarseerPhysics.Collision; using FarseerPhysics.Collision.Shapes; using FarseerPhysics.Common; using Microsoft.Xna.Framework; namespace FarseerPhysics.Dynamics.Contacts { public sealed class ContactConstraintPoint { public Vector2 LocalPoint; public float NormalImpulse; public float NormalMass; public float TangentImpulse; public float TangentMass; public float VelocityBias; public Vector2 rA; public Vector2 rB; } public sealed class ContactConstraint { public Body BodyA; public Body BodyB; public float Friction; public Mat22 K; public Vector2 LocalNormal; public Vector2 LocalPoint; public Manifold Manifold; public Vector2 Normal; public Mat22 NormalMass; public int PointCount; public ContactConstraintPoint[] Points = new ContactConstraintPoint[Settings.MaxPolygonVertices]; public float RadiusA; public float RadiusB; public float Restitution; public ManifoldType Type; public ContactConstraint() { for (int i = 0; i < Settings.MaxManifoldPoints; i++) { Points[i] = new ContactConstraintPoint(); } } } public class ContactSolver { public ContactConstraint[] Constraints; private int _constraintCount; // collection can be bigger. private Contact[] _contacts; public void Reset(Contact[] contacts, int contactCount, float impulseRatio, bool warmstarting) { _contacts = contacts; _constraintCount = contactCount; // grow the array if (Constraints == null || Constraints.Length < _constraintCount) { Constraints = new ContactConstraint[_constraintCount*2]; for (int i = 0; i < Constraints.Length; i++) { Constraints[i] = new ContactConstraint(); } } // Initialize position independent portions of the constraints. for (int i = 0; i < _constraintCount; ++i) { Contact contact = contacts[i]; Fixture fixtureA = contact.FixtureA; Fixture fixtureB = contact.FixtureB; Shape shapeA = fixtureA.Shape; Shape shapeB = fixtureB.Shape; float radiusA = shapeA.Radius; float radiusB = shapeB.Radius; Body bodyA = fixtureA.Body; Body bodyB = fixtureB.Body; Manifold manifold = contact.Manifold; Debug.Assert(manifold.PointCount > 0); ContactConstraint cc = Constraints[i]; cc.Friction = Settings.MixFriction(fixtureA.Friction, fixtureB.Friction); cc.Restitution = Settings.MixRestitution(fixtureA.Restitution, fixtureB.Restitution); cc.BodyA = bodyA; cc.BodyB = bodyB; cc.Manifold = manifold; cc.Normal = Vector2.Zero; cc.PointCount = manifold.PointCount; cc.LocalNormal = manifold.LocalNormal; cc.LocalPoint = manifold.LocalPoint; cc.RadiusA = radiusA; cc.RadiusB = radiusB; cc.Type = manifold.Type; for (int j = 0; j < cc.PointCount; ++j) { ManifoldPoint cp = manifold.Points[j]; ContactConstraintPoint ccp = cc.Points[j]; if (warmstarting) { ccp.NormalImpulse = impulseRatio*cp.NormalImpulse; ccp.TangentImpulse = impulseRatio*cp.TangentImpulse; } else { ccp.NormalImpulse = 0.0f; ccp.TangentImpulse = 0.0f; } ccp.LocalPoint = cp.LocalPoint; ccp.rA = Vector2.Zero; ccp.rB = Vector2.Zero; ccp.NormalMass = 0.0f; ccp.TangentMass = 0.0f; ccp.VelocityBias = 0.0f; } cc.K.SetZero(); cc.NormalMass.SetZero(); } } public void InitializeVelocityConstraints() { for (int i = 0; i < _constraintCount; ++i) { ContactConstraint cc = Constraints[i]; float radiusA = cc.RadiusA; float radiusB = cc.RadiusB; Body bodyA = cc.BodyA; Body bodyB = cc.BodyB; Manifold manifold = cc.Manifold; Vector2 vA = bodyA.LinearVelocity; Vector2 vB = bodyB.LinearVelocity; float wA = bodyA.AngularVelocity; float wB = bodyB.AngularVelocity; Debug.Assert(manifold.PointCount > 0); FixedArray2<Vector2> points; Collision.Collision.GetWorldManifold(ref manifold, ref bodyA.Xf, radiusA, ref bodyB.Xf, radiusB, out cc.Normal, out points); Vector2 tangent = new Vector2(cc.Normal.Y, -cc.Normal.X); for (int j = 0; j < cc.PointCount; ++j) { ContactConstraintPoint ccp = cc.Points[j]; ccp.rA = points[j] - bodyA.Sweep.C; ccp.rB = points[j] - bodyB.Sweep.C; float rnA = ccp.rA.X*cc.Normal.Y - ccp.rA.Y*cc.Normal.X; float rnB = ccp.rB.X*cc.Normal.Y - ccp.rB.Y*cc.Normal.X; rnA *= rnA; rnB *= rnB; float kNormal = bodyA.InvMass + bodyB.InvMass + bodyA.InvI*rnA + bodyB.InvI*rnB; Debug.Assert(kNormal > Settings.Epsilon); ccp.NormalMass = 1.0f/kNormal; float rtA = ccp.rA.X*tangent.Y - ccp.rA.Y*tangent.X; float rtB = ccp.rB.X*tangent.Y - ccp.rB.Y*tangent.X; rtA *= rtA; rtB *= rtB; float kTangent = bodyA.InvMass + bodyB.InvMass + bodyA.InvI*rtA + bodyB.InvI*rtB; Debug.Assert(kTangent > Settings.Epsilon); ccp.TangentMass = 1.0f/kTangent; // Setup a velocity bias for restitution. ccp.VelocityBias = 0.0f; float vRel = cc.Normal.X*(vB.X + -wB*ccp.rB.Y - vA.X - -wA*ccp.rA.Y) + cc.Normal.Y*(vB.Y + wB*ccp.rB.X - vA.Y - wA*ccp.rA.X); if (vRel < -Settings.VelocityThreshold) { ccp.VelocityBias = -cc.Restitution*vRel; } } // If we have two points, then prepare the block solver. if (cc.PointCount == 2) { ContactConstraintPoint ccp1 = cc.Points[0]; ContactConstraintPoint ccp2 = cc.Points[1]; float invMassA = bodyA.InvMass; float invIA = bodyA.InvI; float invMassB = bodyB.InvMass; float invIB = bodyB.InvI; float rn1A = ccp1.rA.X*cc.Normal.Y - ccp1.rA.Y*cc.Normal.X; float rn1B = ccp1.rB.X*cc.Normal.Y - ccp1.rB.Y*cc.Normal.X; float rn2A = ccp2.rA.X*cc.Normal.Y - ccp2.rA.Y*cc.Normal.X; float rn2B = ccp2.rB.X*cc.Normal.Y - ccp2.rB.Y*cc.Normal.X; float k11 = invMassA + invMassB + invIA*rn1A*rn1A + invIB*rn1B*rn1B; float k22 = invMassA + invMassB + invIA*rn2A*rn2A + invIB*rn2B*rn2B; float k12 = invMassA + invMassB + invIA*rn1A*rn2A + invIB*rn1B*rn2B; // Ensure a reasonable condition number. const float k_maxConditionNumber = 100.0f; if (k11*k11 < k_maxConditionNumber*(k11*k22 - k12*k12)) { // K is safe to invert. cc.K.Col1.X = k11; cc.K.Col1.Y = k12; cc.K.Col2.X = k12; cc.K.Col2.Y = k22; float a = cc.K.Col1.X, b = cc.K.Col2.X, c = cc.K.Col1.Y, d = cc.K.Col2.Y; float det = a*d - b*c; if (det != 0.0f) { det = 1.0f/det; } cc.NormalMass.Col1.X = det*d; cc.NormalMass.Col1.Y = -det*c; cc.NormalMass.Col2.X = -det*b; cc.NormalMass.Col2.Y = det*a; } else { // The constraints are redundant, just use one. // TODO_ERIN use deepest? cc.PointCount = 1; } } } } public void WarmStart() { // Warm start. for (int i = 0; i < _constraintCount; ++i) { ContactConstraint c = Constraints[i]; float tangentx = c.Normal.Y; float tangenty = -c.Normal.X; for (int j = 0; j < c.PointCount; ++j) { ContactConstraintPoint ccp = c.Points[j]; float px = ccp.NormalImpulse*c.Normal.X + ccp.TangentImpulse*tangentx; float py = ccp.NormalImpulse*c.Normal.Y + ccp.TangentImpulse*tangenty; c.BodyA.AngularVelocityInternal -= c.BodyA.InvI*(ccp.rA.X*py - ccp.rA.Y*px); c.BodyA.LinearVelocityInternal.X -= c.BodyA.InvMass*px; c.BodyA.LinearVelocityInternal.Y -= c.BodyA.InvMass*py; c.BodyB.AngularVelocityInternal += c.BodyB.InvI*(ccp.rB.X*py - ccp.rB.Y*px); c.BodyB.LinearVelocityInternal.X += c.BodyB.InvMass*px; c.BodyB.LinearVelocityInternal.Y += c.BodyB.InvMass*py; } } } public void SolveVelocityConstraints() { for (int i = 0; i < _constraintCount; ++i) { ContactConstraint c = Constraints[i]; float wA = c.BodyA.AngularVelocityInternal; float wB = c.BodyB.AngularVelocityInternal; float tangentx = c.Normal.Y; float tangenty = -c.Normal.X; float friction = c.Friction; Debug.Assert(c.PointCount == 1 || c.PointCount == 2); // Solve tangent constraints for (int j = 0; j < c.PointCount; ++j) { ContactConstraintPoint ccp = c.Points[j]; float lambda = ccp.TangentMass* -((c.BodyB.LinearVelocityInternal.X + (-wB*ccp.rB.Y) - c.BodyA.LinearVelocityInternal.X - (-wA*ccp.rA.Y))*tangentx + (c.BodyB.LinearVelocityInternal.Y + (wB*ccp.rB.X) - c.BodyA.LinearVelocityInternal.Y - (wA*ccp.rA.X))*tangenty); // MathUtils.Clamp the accumulated force float maxFriction = friction*ccp.NormalImpulse; float newImpulse = Math.Max(-maxFriction, Math.Min(ccp.TangentImpulse + lambda, maxFriction)); lambda = newImpulse - ccp.TangentImpulse; // Apply contact impulse float px = lambda*tangentx; float py = lambda*tangenty; c.BodyA.LinearVelocityInternal.X -= c.BodyA.InvMass*px; c.BodyA.LinearVelocityInternal.Y -= c.BodyA.InvMass*py; wA -= c.BodyA.InvI*(ccp.rA.X*py - ccp.rA.Y*px); c.BodyB.LinearVelocityInternal.X += c.BodyB.InvMass*px; c.BodyB.LinearVelocityInternal.Y += c.BodyB.InvMass*py; wB += c.BodyB.InvI*(ccp.rB.X*py - ccp.rB.Y*px); ccp.TangentImpulse = newImpulse; } // Solve normal constraints if (c.PointCount == 1) { ContactConstraintPoint ccp = c.Points[0]; // Relative velocity at contact // Compute normal impulse float lambda = -ccp.NormalMass* ((c.BodyB.LinearVelocityInternal.X + (-wB*ccp.rB.Y) - c.BodyA.LinearVelocityInternal.X - (-wA*ccp.rA.Y))*c.Normal.X + (c.BodyB.LinearVelocityInternal.Y + (wB*ccp.rB.X) - c.BodyA.LinearVelocityInternal.Y - (wA*ccp.rA.X))*c.Normal.Y - ccp.VelocityBias); // Clamp the accumulated impulse float newImpulse = Math.Max(ccp.NormalImpulse + lambda, 0.0f); lambda = newImpulse - ccp.NormalImpulse; // Apply contact impulse float px = lambda*c.Normal.X; float py = lambda*c.Normal.Y; c.BodyA.LinearVelocityInternal.X -= c.BodyA.InvMass*px; c.BodyA.LinearVelocityInternal.Y -= c.BodyA.InvMass*py; wA -= c.BodyA.InvI*(ccp.rA.X*py - ccp.rA.Y*px); c.BodyB.LinearVelocityInternal.X += c.BodyB.InvMass*px; c.BodyB.LinearVelocityInternal.Y += c.BodyB.InvMass*py; wB += c.BodyB.InvI*(ccp.rB.X*py - ccp.rB.Y*px); ccp.NormalImpulse = newImpulse; } else { // Block solver developed in collaboration with Dirk Gregorius (back in 01/07 on Box2D_Lite). // Build the mini LCP for this contact patch // // vn = A * x + b, vn >= 0, , vn >= 0, x >= 0 and vn_i * x_i = 0 with i = 1..2 // // A = J * W * JT and J = ( -n, -r1 x n, n, r2 x n ) // b = vn_0 - velocityBias // // The system is solved using the "Total enumeration method" (s. Murty). The complementary constraint vn_i * x_i // implies that we must have in any solution either vn_i = 0 or x_i = 0. So for the 2D contact problem the cases // vn1 = 0 and vn2 = 0, x1 = 0 and x2 = 0, x1 = 0 and vn2 = 0, x2 = 0 and vn1 = 0 need to be tested. The first valid // solution that satisfies the problem is chosen. // // In order to account of the accumulated impulse 'a' (because of the iterative nature of the solver which only requires // that the accumulated impulse is clamped and not the incremental impulse) we change the impulse variable (x_i). // // Substitute: // // x = x' - a // // Plug into above equation: // // vn = A * x + b // = A * (x' - a) + b // = A * x' + b - A * a // = A * x' + b' // b' = b - A * a; ContactConstraintPoint cp1 = c.Points[0]; ContactConstraintPoint cp2 = c.Points[1]; float ax = cp1.NormalImpulse; float ay = cp2.NormalImpulse; Debug.Assert(ax >= 0.0f && ay >= 0.0f); // Relative velocity at contact // Compute normal velocity float vn1 = (c.BodyB.LinearVelocityInternal.X + (-wB*cp1.rB.Y) - c.BodyA.LinearVelocityInternal.X - (-wA*cp1.rA.Y))*c.Normal.X + (c.BodyB.LinearVelocityInternal.Y + (wB*cp1.rB.X) - c.BodyA.LinearVelocityInternal.Y - (wA*cp1.rA.X))*c.Normal.Y; float vn2 = (c.BodyB.LinearVelocityInternal.X + (-wB*cp2.rB.Y) - c.BodyA.LinearVelocityInternal.X - (-wA*cp2.rA.Y))*c.Normal.X + (c.BodyB.LinearVelocityInternal.Y + (wB*cp2.rB.X) - c.BodyA.LinearVelocityInternal.Y - (wA*cp2.rA.X))*c.Normal.Y; float bx = vn1 - cp1.VelocityBias - (c.K.Col1.X*ax + c.K.Col2.X*ay); float by = vn2 - cp2.VelocityBias - (c.K.Col1.Y*ax + c.K.Col2.Y*ay); float xx = -(c.NormalMass.Col1.X*bx + c.NormalMass.Col2.X*by); float xy = -(c.NormalMass.Col1.Y*bx + c.NormalMass.Col2.Y*by); while (true) { // // Case 1: vn = 0 // // 0 = A * x' + b' // // Solve for x': // // x' = - inv(A) * b' // if (xx >= 0.0f && xy >= 0.0f) { // Resubstitute for the incremental impulse float dx = xx - ax; float dy = xy - ay; // Apply incremental impulse float p1x = dx*c.Normal.X; float p1y = dx*c.Normal.Y; float p2x = dy*c.Normal.X; float p2y = dy*c.Normal.Y; float p12x = p1x + p2x; float p12y = p1y + p2y; c.BodyA.LinearVelocityInternal.X -= c.BodyA.InvMass*p12x; c.BodyA.LinearVelocityInternal.Y -= c.BodyA.InvMass*p12y; wA -= c.BodyA.InvI*((cp1.rA.X*p1y - cp1.rA.Y*p1x) + (cp2.rA.X*p2y - cp2.rA.Y*p2x)); c.BodyB.LinearVelocityInternal.X += c.BodyB.InvMass*p12x; c.BodyB.LinearVelocityInternal.Y += c.BodyB.InvMass*p12y; wB += c.BodyB.InvI*((cp1.rB.X*p1y - cp1.rB.Y*p1x) + (cp2.rB.X*p2y - cp2.rB.Y*p2x)); // Accumulate cp1.NormalImpulse = xx; cp2.NormalImpulse = xy; #if B2_DEBUG_SOLVER float k_errorTol = 1e-3f; // Postconditions dv1 = vB + MathUtils.Cross(wB, cp1.rB) - vA - MathUtils.Cross(wA, cp1.rA); dv2 = vB + MathUtils.Cross(wB, cp2.rB) - vA - MathUtils.Cross(wA, cp2.rA); // Compute normal velocity vn1 = Vector2.Dot(dv1, normal); vn2 = Vector2.Dot(dv2, normal); Debug.Assert(MathUtils.Abs(vn1 - cp1.velocityBias) < k_errorTol); Debug.Assert(MathUtils.Abs(vn2 - cp2.velocityBias) < k_errorTol); #endif break; } // // Case 2: vn1 = 0 and x2 = 0 // // 0 = a11 * x1' + a12 * 0 + b1' // vn2 = a21 * x1' + a22 * 0 + b2' // xx = -cp1.NormalMass*bx; xy = 0.0f; vn1 = 0.0f; vn2 = c.K.Col1.Y*xx + by; if (xx >= 0.0f && vn2 >= 0.0f) { // Resubstitute for the incremental impulse float dx = xx - ax; float dy = xy - ay; // Apply incremental impulse float p1x = dx*c.Normal.X; float p1y = dx*c.Normal.Y; float p2x = dy*c.Normal.X; float p2y = dy*c.Normal.Y; float p12x = p1x + p2x; float p12y = p1y + p2y; c.BodyA.LinearVelocityInternal.X -= c.BodyA.InvMass*p12x; c.BodyA.LinearVelocityInternal.Y -= c.BodyA.InvMass*p12y; wA -= c.BodyA.InvI*((cp1.rA.X*p1y - cp1.rA.Y*p1x) + (cp2.rA.X*p2y - cp2.rA.Y*p2x)); c.BodyB.LinearVelocityInternal.X += c.BodyB.InvMass*p12x; c.BodyB.LinearVelocityInternal.Y += c.BodyB.InvMass*p12y; wB += c.BodyB.InvI*((cp1.rB.X*p1y - cp1.rB.Y*p1x) + (cp2.rB.X*p2y - cp2.rB.Y*p2x)); // Accumulate cp1.NormalImpulse = xx; cp2.NormalImpulse = xy; #if B2_DEBUG_SOLVER // Postconditions dv1 = vB + MathUtils.Cross(wB, cp1.rB) - vA - MathUtils.Cross(wA, cp1.rA); // Compute normal velocity vn1 = Vector2.Dot(dv1, normal); Debug.Assert(MathUtils.Abs(vn1 - cp1.velocityBias) < k_errorTol); #endif break; } // // Case 3: vn2 = 0 and x1 = 0 // // vn1 = a11 * 0 + a12 * x2' + b1' // 0 = a21 * 0 + a22 * x2' + b2' // xx = 0.0f; xy = -cp2.NormalMass*by; vn1 = c.K.Col2.X*xy + bx; vn2 = 0.0f; if (xy >= 0.0f && vn1 >= 0.0f) { // Resubstitute for the incremental impulse float dx = xx - ax; float dy = xy - ay; // Apply incremental impulse float p1x = dx*c.Normal.X; float p1y = dx*c.Normal.Y; float p2x = dy*c.Normal.X; float p2y = dy*c.Normal.Y; float p12x = p1x + p2x; float p12y = p1y + p2y; c.BodyA.LinearVelocityInternal.X -= c.BodyA.InvMass*p12x; c.BodyA.LinearVelocityInternal.Y -= c.BodyA.InvMass*p12y; wA -= c.BodyA.InvI*((cp1.rA.X*p1y - cp1.rA.Y*p1x) + (cp2.rA.X*p2y - cp2.rA.Y*p2x)); c.BodyB.LinearVelocityInternal.X += c.BodyB.InvMass*p12x; c.BodyB.LinearVelocityInternal.Y += c.BodyB.InvMass*p12y; wB += c.BodyB.InvI*((cp1.rB.X*p1y - cp1.rB.Y*p1x) + (cp2.rB.X*p2y - cp2.rB.Y*p2x)); // Accumulate cp1.NormalImpulse = xx; cp2.NormalImpulse = xy; #if B2_DEBUG_SOLVER // Postconditions dv2 = vB + MathUtils.Cross(wB, cp2.rB) - vA - MathUtils.Cross(wA, cp2.rA); // Compute normal velocity vn2 = Vector2.Dot(dv2, normal); Debug.Assert(MathUtils.Abs(vn2 - cp2.velocityBias) < k_errorTol); #endif break; } // // Case 4: x1 = 0 and x2 = 0 // // vn1 = b1 // vn2 = b2; xx = 0.0f; xy = 0.0f; vn1 = bx; vn2 = by; if (vn1 >= 0.0f && vn2 >= 0.0f) { // Resubstitute for the incremental impulse float dx = xx - ax; float dy = xy - ay; // Apply incremental impulse float p1x = dx*c.Normal.X; float p1y = dx*c.Normal.Y; float p2x = dy*c.Normal.X; float p2y = dy*c.Normal.Y; float p12x = p1x + p2x; float p12y = p1y + p2y; c.BodyA.LinearVelocityInternal.X -= c.BodyA.InvMass*p12x; c.BodyA.LinearVelocityInternal.Y -= c.BodyA.InvMass*p12y; wA -= c.BodyA.InvI*((cp1.rA.X*p1y - cp1.rA.Y*p1x) + (cp2.rA.X*p2y - cp2.rA.Y*p2x)); c.BodyB.LinearVelocityInternal.X += c.BodyB.InvMass*p12x; c.BodyB.LinearVelocityInternal.Y += c.BodyB.InvMass*p12y; wB += c.BodyB.InvI*((cp1.rB.X*p1y - cp1.rB.Y*p1x) + (cp2.rB.X*p2y - cp2.rB.Y*p2x)); // Accumulate cp1.NormalImpulse = xx; cp2.NormalImpulse = xy; break; } // No solution, give up. This is hit sometimes, but it doesn't seem to matter. break; } } c.BodyA.AngularVelocityInternal = wA; c.BodyB.AngularVelocityInternal = wB; } } public void StoreImpulses() { for (int i = 0; i < _constraintCount; ++i) { ContactConstraint c = Constraints[i]; Manifold m = c.Manifold; for (int j = 0; j < c.PointCount; ++j) { ManifoldPoint pj = m.Points[j]; ContactConstraintPoint cp = c.Points[j]; pj.NormalImpulse = cp.NormalImpulse; pj.TangentImpulse = cp.TangentImpulse; m.Points[j] = pj; } c.Manifold = m; _contacts[i].Manifold = m; } } public bool SolvePositionConstraints(float baumgarte) { float minSeparation = 0.0f; for (int i = 0; i < _constraintCount; ++i) { ContactConstraint c = Constraints[i]; Body bodyA = c.BodyA; Body bodyB = c.BodyB; float invMassA = bodyA.Mass*bodyA.InvMass; float invIA = bodyA.Mass*bodyA.InvI; float invMassB = bodyB.Mass*bodyB.InvMass; float invIB = bodyB.Mass*bodyB.InvI; // Solve normal constraints for (int j = 0; j < c.PointCount; ++j) { Vector2 normal; Vector2 point; float separation; Solve(c, j, out normal, out point, out separation); float rax = point.X - bodyA.Sweep.C.X; float ray = point.Y - bodyA.Sweep.C.Y; float rbx = point.X - bodyB.Sweep.C.X; float rby = point.Y - bodyB.Sweep.C.Y; // Track max constraint error. minSeparation = Math.Min(minSeparation, separation); // Prevent large corrections and allow slop. float C = Math.Max(-Settings.MaxLinearCorrection, Math.Min(baumgarte*(separation + Settings.LinearSlop), 0.0f)); // Compute the effective mass. float rnA = rax*normal.Y - ray*normal.X; float rnB = rbx*normal.Y - rby*normal.X; float K = invMassA + invMassB + invIA*rnA*rnA + invIB*rnB*rnB; // Compute normal impulse float impulse = K > 0.0f ? -C/K : 0.0f; float px = impulse*normal.X; float py = impulse*normal.Y; bodyA.Sweep.C.X -= invMassA*px; bodyA.Sweep.C.Y -= invMassA*py; bodyA.Sweep.A -= invIA*(rax*py - ray*px); bodyB.Sweep.C.X += invMassB*px; bodyB.Sweep.C.Y += invMassB*py; bodyB.Sweep.A += invIB*(rbx*py - rby*px); bodyA.SynchronizeTransform(); bodyB.SynchronizeTransform(); } } // We can't expect minSpeparation >= -Settings.b2_linearSlop because we don't // push the separation above -Settings.b2_linearSlop. return minSeparation >= -1.5f*Settings.LinearSlop; } // Sequential position solver for position constraints. public bool SolvePositionConstraintsTOI(float baumgarte) { float minSeparation = 0.0f; for (int i = 0; i < _constraintCount; ++i) { ContactConstraint c = Constraints[i]; Body bodyA = c.BodyA; Body bodyB = c.BodyB; float invMassA = bodyA.Mass*bodyA.InvMass; float invIA = bodyA.Mass*bodyA.InvI; float invMassB = bodyB.Mass*bodyB.InvMass; float invIB = bodyB.Mass*bodyB.InvI; // Solve normal constraints for (int j = 0; j < c.PointCount; ++j) { Vector2 normal; Vector2 point; float separation; //FPE 3 only: Used to be PositionSolverManifold Solve(c, j, out normal, out point, out separation); float rax = point.X - bodyA.Sweep.C.X; float ray = point.Y - bodyA.Sweep.C.Y; float rbx = point.X - bodyB.Sweep.C.X; float rby = point.Y - bodyB.Sweep.C.Y; // Track max constraint error. minSeparation = Math.Min(minSeparation, separation); // Prevent large corrections and allow slop. float C = Math.Max(-Settings.MaxLinearCorrection, Math.Min(baumgarte*(separation + Settings.LinearSlop), 0.0f)); // Compute the effective mass. float rnA = rax*normal.Y - ray*normal.X; float rnB = rbx*normal.Y - rby*normal.X; float K = invMassA + invMassB + invIA*rnA*rnA + invIB*rnB*rnB; // Compute normal impulse float impulse = K > 0.0f ? -C/K : 0.0f; float px = impulse*normal.X; float py = impulse*normal.Y; bodyA.Sweep.C.X -= invMassA*px; bodyA.Sweep.C.Y -= invMassA*py; bodyA.Sweep.A -= invIA*(rax*py - ray*px); bodyA.SynchronizeTransform(); bodyB.Sweep.C.X += invMassB*px; bodyB.Sweep.C.Y += invMassB*py; bodyB.Sweep.A += invIB*(rbx*py - rby*px); bodyB.SynchronizeTransform(); } } // We can't expect minSpeparation >= -b2_linearSlop because we don't // push the separation above -b2_linearSlop. return minSeparation >= -1.5f*Settings.LinearSlop; } private static void Solve(ContactConstraint cc, int index, out Vector2 normal, out Vector2 point, out float separation) { Debug.Assert(cc.PointCount > 0); normal = Vector2.Zero; switch (cc.Type) { case ManifoldType.Circles: { Vector2 pointA = cc.BodyA.GetWorldPoint(ref cc.LocalPoint); Vector2 pointB = cc.BodyB.GetWorldPoint(ref cc.Points[0].LocalPoint); float a = (pointA.X - pointB.X)*(pointA.X - pointB.X) + (pointA.Y - pointB.Y)*(pointA.Y - pointB.Y); if (a > Settings.Epsilon*Settings.Epsilon) { Vector2 normalTmp = pointB - pointA; float factor = 1f/(float) Math.Sqrt(normalTmp.X*normalTmp.X + normalTmp.Y*normalTmp.Y); normal.X = normalTmp.X*factor; normal.Y = normalTmp.Y*factor; } else { normal.X = 1; normal.Y = 0; } point = 0.5f*(pointA + pointB); separation = (pointB.X - pointA.X)*normal.X + (pointB.Y - pointA.Y)*normal.Y - cc.RadiusA - cc.RadiusB; } break; case ManifoldType.FaceA: { normal = cc.BodyA.GetWorldVector(ref cc.LocalNormal); Vector2 planePoint = cc.BodyA.GetWorldPoint(ref cc.LocalPoint); Vector2 clipPoint = cc.BodyB.GetWorldPoint(ref cc.Points[index].LocalPoint); separation = (clipPoint.X - planePoint.X)*normal.X + (clipPoint.Y - planePoint.Y)*normal.Y - cc.RadiusA - cc.RadiusB; point = clipPoint; } break; case ManifoldType.FaceB: { normal = cc.BodyB.GetWorldVector(ref cc.LocalNormal); Vector2 planePoint = cc.BodyB.GetWorldPoint(ref cc.LocalPoint); Vector2 clipPoint = cc.BodyA.GetWorldPoint(ref cc.Points[index].LocalPoint); separation = (clipPoint.X - planePoint.X)*normal.X + (clipPoint.Y - planePoint.Y)*normal.Y - cc.RadiusA - cc.RadiusB; point = clipPoint; // Ensure normal points from A to B normal = -normal; } break; default: point = Vector2.Zero; separation = 0.0f; break; } } } }
using System.Collections; using System.Collections.Generic; using UnityEngine; using System; using UnityEngine.Events; namespace Stratus { public abstract class StratusStateDrivenToggle : StratusBehaviour { public enum Validation { [Tooltip("The object is enabled during any of these states")] EnableOn, [Tooltip("The object is disabled during any of these states")] DisableOn } /// <summary> /// The extent to which this component is toggled by states /// </summary> public enum Extent { [Tooltip("A single state")] Single, [Tooltip("Multiple states")] Multiple } } /// <summary> /// Given a provided enum class used for defining exclusive global states, /// provides a component to handle propagating changes based on the given state /// </summary> /// <typeparam name="State"></typeparam> public abstract class StratusStateDrivenToggle<State> : StratusStateDrivenToggle where State : struct, IConvertible { /// <summary> /// Callback for when the state has changed /// </summary> /// <param name="state"></param> public delegate void OnStateChange(State state); //--------------------------------------------------------------------------------------------/ // Fields //--------------------------------------------------------------------------------------------/ [Header("States")] //[Tooltip("How many states this object responds to")] //public Extent extent = Extent.Single; ///// <summary> ///// The state at which this object is active ///// </summary> //public State activeState; [Tooltip("Defines how this object is toggled")] public Validation validation = Validation.EnableOn; /// <summary> /// The states at which this object is active /// </summary> [Tooltip("The state at which this object is active")] public List<State> states = new List<State>(); /// <summary> /// A delay between toggling this object on and off /// </summary> [Header("Toggle Response")] [Tooltip("A delay between toggling this object on and off")] public float delay = 0.0f; /// <summary> /// Any methods to invoke when enabled /// </summary> [Space] [Tooltip("Any methods to invoke when toggled")] public UnityEvent onEnabled = new UnityEvent(); /// <summary> /// Any methods to invoke when disabled /// </summary> [Tooltip("Any methods to invoke when toggled")] public UnityEvent onDisabled = new UnityEvent(); //--------------------------------------------------------------------------------------------/ // Properties //--------------------------------------------------------------------------------------------/ /// <summary> /// The current global state /// </summary> public static State currentState { get; private set; } /// <summary> /// A provided callback for when the state has changed /// </summary> public static OnStateChange onStateChange { get; set; } /// <summary> /// The list of all subscribed objects. When states change, these are notified. /// </summary> public static List<StratusStateDrivenToggle<State>> toggleables { get; private set; } = new List<StratusStateDrivenToggle<State>>(); /// <summary> /// List of all handlers for listening to state changes /// </summary> private static List<StateHandler> handlers { get; set; } = new List<StateHandler>(); /// <summary> /// Whether the initial state has bene set /// </summary> private static bool initialized { get; set; } = false; /// <summary> /// The previous state /// </summary> private static State previousState; //private static bool debug //{ // get { return PlayerPrefs.GetInt("de")} //} //--------------------------------------------------------------------------------------------/ // Messages //--------------------------------------------------------------------------------------------/ /// <summary> /// When first enabled, adds itself to the list of all objects to be toggled on/off on a state change /// </summary> private void OnEnable() { toggleables.Add(this); if (initialized) Apply(currentState); } private void OnDestroy() { toggleables.Remove(this); } private void Reset() { states.Add(default(State)); } private void Awake() { } //--------------------------------------------------------------------------------------------/ // Methods //--------------------------------------------------------------------------------------------/ /// <summary> /// Changes the global state, notifying all subscribers /// </summary> /// <param name="nextState"></param> public static void Change(State nextState) { initialized = true; previousState = currentState; currentState = nextState; StratusDebug.Log($"'{previousState}' -> '{ currentState}'"); foreach (var toggle in toggleables.ToArray()) toggle.Apply(currentState); foreach (var handler in handlers) handler.Inform(currentState); onStateChange?.Invoke(nextState); } /// <summary> /// Reverts to the previous global state, notifying all subscribers /// </summary> public static void Revert() { if (!initialized) return; //Trace.Script($"Reverting to '{previousState}"); Change(previousState); } /// <summary> /// Enables this object /// </summary> public void Toggle(bool toggled) { //Trace.Script("Toggled = " + toggled, this); if (toggled) onEnabled?.Invoke(); else onDisabled?.Invoke(); if (delay > 0.0f) this.StartCoroutine(StratusRoutines.Call(() => { gameObject.SetActive(toggled); }, this.delay), "Delay"); else gameObject.SetActive(toggled); } /// <summary> /// Applies the state change to the object /// </summary> /// <param name="state"></param> private void Apply(State nextState) { bool isActive = false; if (validation == Validation.EnableOn) { // If the given state is present among given states, enable this object foreach (var state in states) { bool isEqual = Compare(state, nextState); if (isEqual) { isActive = true; break; } } } else if (validation == Validation.DisableOn) { // If the given state is present among given states, disable this object foreach (var state in states) { bool isEqual = Compare(state, nextState); if (isEqual) { isActive = false; break; } } } Toggle(isActive); } /// <summary> /// Custom comparator since this is a generic type /// </summary> /// <param name="x"></param> /// <param name="y"></param> /// <returns></returns> private static bool Compare(State x, State y) { return EqualityComparer<State>.Default.Equals(x, y); } //--------------------------------------------------------------------------------------------/ // Declarations //--------------------------------------------------------------------------------------------/ /// <summary> /// Informs this object to be notified of specific state changes /// </summary> public class StateHandler { public delegate void Callback(); public delegate void ToggleCallback(bool isAtState); //------------------------------------------------------------------------/ // Fields //------------------------------------------------------------------------/ /// <summary> /// The class instance this handler belongs to /// </summary> MonoBehaviour parent; /// <summary> /// The goal state for this handler /// </summary> State goal; /// <summary> /// Function called when entering this state /// </summary> Callback onEnterState; /// <summary> /// Function called when exiting this state /// </summary> Callback onExitState; /// <summary> /// Function called when the state has changed /// </summary> ToggleCallback onStateChange; /// <summary> /// Whether this handle is currently at the goal state /// </summary> bool isAtGoalState = false; /// <summary> /// Whether to print state changes /// </summary> public bool logging = false; /// <summary> /// Constructor /// </summary> /// <param name="state">The desired goal state for this handler</param> /// <param name="onEnter">The function to be called when the goal state has been entered</param> /// <param name="onExit">The function to be called when the goal state has been exited</param> public StateHandler(State state, MonoBehaviour parent = null, bool log = false) { goal = state; this.parent = parent; logging = log; handlers.Add(this); } /// <summary> /// DTOR. Unsubscribes this from the list of objects to be notified of state changes. /// </summary> ~StateHandler() { Shutdown(); } /// <summary> /// Sets callbacks for when the target goal state has been entered and exited /// </summary> /// <param name="onEnter">The function which will be invoked when the target goal state has been entered</param> /// <param name="onExit">The function which will be invoked when the target goal state has been exited</param> public void Set(Callback onEnter, Callback onExit) { onEnterState = onEnter; onExitState = onExit; if (initialized) Inform(currentState); } /// <summary> /// Sets a callback for a function that will receive a bool signaling whether the target goal state has been reached /// </summary> /// <param name="onStateChange"></param> public void Set(ToggleCallback onStateChange, bool isCalledImmediately = true) { this.onStateChange = onStateChange; if (isCalledImmediately && initialized) Inform(currentState); } /// <summary> /// Informs the handler of state changes /// </summary> /// <param name="state"></param> internal void Inform(State state) { //if (Tracing) // Trace.Script("Goal = " + Goal + ", State = " + state, Parent); // If the state has been changed to the goal state and we are not there currently... if (Compare(state, goal) && !isAtGoalState) { if (logging) StratusDebug.Log("Now at goal state '" + goal.ToString() + "'", parent); isAtGoalState = true; if (onEnterState != null) onEnterState(); else if (onStateChange != null) onStateChange(isAtGoalState); } // If we were at the goal state and the state has been changed to another... else if (!Compare(state, goal) && isAtGoalState) { isAtGoalState = false; if (onExitState != null) { if (logging) StratusDebug.Log("Now exiting state '" + state.ToString() + "'", parent); onExitState(); } else if (onStateChange != null) { if (logging) StratusDebug.Log("Not at goal state '" + goal.ToString() + "', flipping state to " + isAtGoalState, parent); onStateChange(isAtGoalState); } } } /// <summary> /// Terminates this statehandler /// </summary> public void Shutdown() { handlers.Remove(this); } } } }
using System; using System.Collections; using System.Collections.Generic; using System.Text; namespace Nohros.Collections { /// <summary> /// A recursive data storage class optimized for the namely storing a hierarchical /// tree of simple <see cref="Values"/>. /// <remarks> /// This class specifies a recursive data storage class. So it is fairly expressive. /// However, the API is optimized for the common case, namely storing a hierarchical /// tree of simple values. Given a DictionaryValue root, you can easily do things like: /// <para> /// <code> /// root.SetString("global.pages.homepage", "http://sys.nohros.com"); /// string homepage = "http://nohros.com"; // default/fallback value /// homepage = root.GetString("global.pages.homepage", out homepage); /// </code> /// </para> /// where "global" and "pages" are also DictionaryValues, and "homepage" /// is a string setting. If some elements of the path didn't exist yet, /// the SetString() method would create the missing elements and attach them /// to root before attaching the homepage value. /// </remarks> /// </summary> public class DictionaryValue<T>: Value, IEnumerable, IEnumerable<T>, IDictionaryValue where T: class, IValue { Dictionary<string, IValue> dictionary_; #region .ctor /// <summary> /// Initializes a new instance of the DictionaryValue class. /// </summary> public DictionaryValue() : base(ValueType.GenericDictionary) { dictionary_ = new Dictionary<string, IValue>(); } #endregion /// <summary> /// Adds the specified key and value to the DictionaryValue. /// </summary> /// <param name="path">The key of the element to add.</param> /// <param name="value">The value of the element to add. The value can be null.</param> public void Add(string path, T value) { this[path] = value; } /// <summary> /// Determines whether the <typeparamref name="Nohros.Data.DictionaryValue"/> contains /// the specified key. /// </summary> /// <param name="key">The key to locate in the<typeparamref name="Nohros.Data.DictionaryValue"/></param> /// <returns>true if the <typeparamref name="Nohros.Data.DictionaryValue"/> contains an element with the /// specified key; otherwise, false.</returns> /// <exception cref="ArgumentNullException">key is null</exception> public bool HasKey(string key) { return dictionary_.ContainsKey(key); } /// <summary> /// Gets a <see cref="IValue"/> object from dictionary, using the specified path. /// </summary> /// <param name="path">The path of the value to get.</param> /// <returns>An IValue instance with path <paramref name="path"/> or null if <paramref name="path"/> is /// not found.</returns> /// <remarks> /// This method is used internally to traverse the internal dictionary tree. The elements of the internal /// dictionary could be a <see cref="DictionaryValue&lt;T&gt;"/> or a <typeparamref name="T"/>. So, to traverse it /// recursivelly we need a method that returns a object of the type <see cref="IValue"/> instead of an object of the /// type <paramref name="T"/>. /// </remarks> IValue GetValue(string path) { string key = path; int delimiter_position = path.IndexOf('.', 0); if (delimiter_position != -1) key = path.Substring(0, delimiter_position); IValue entry; if (!dictionary_.TryGetValue(key, out entry)) return null; if (delimiter_position == -1) return entry; if (entry.ValueType == ValueType.GenericDictionary) { DictionaryValue<T> dictionary = entry as DictionaryValue<T>; return dictionary.GetValue(path.Substring(delimiter_position + 1)); } return null; } /// <summary> /// Gets a <see cref="IValue"/> object from dictionary, using the specified path. /// </summary> /// <param name="path">The path of the value to get.</param> /// <param name="out_value">When this method returns contains an IValue object associated with the /// <paramref name="path"/> or null if <paramref name="path"/> is not found.</param> /// <returns>true if <paramref name="path"/> is foound; otherwise null.</returns> /// <remarks> /// This method is used internally to traverse the internal dictionary tree. The elements of the internal /// dictionary could be a <see cref="DictionaryValue&lt;T&gt;"/> or a <typeparamref name="T"/>. So, to traverse it /// recursivelly we need a method that returns a object of the type <see cref="IValue"/> instead of an object of the /// type <paramref name="T"/>. /// </remarks> bool GetValue(string path, out IValue out_value) { out_value = GetValue(path); return (out_value != null); } /// <summary> /// Sets the <see cref="IValue"/> object associated with the given path string. /// </summary> /// <param name="path">The path of the value to get.</param> /// <returns>An IValue instance with path <paramref name="path"/> or null if <paramref name="path"/> is /// not found.</returns> /// <remarks> /// This method is used internally to traverse the internal dictionary tree. The elements of the internal /// dictionary could be a <see cref="DictionaryValue&lt;T&gt;"/> or a <typeparamref name="T"/>. So, to traverse it /// recursivelly we need a method that returns a object of the type <see cref="IValue"/> instead of an object of the /// type <paramref name="T"/>. /// </remarks> void SetValue(string path, IValue value) { string key = path; int delimiter_position = path.IndexOf('.', 0); // If there isn't a dictionary delimiter in the path, we're done. if (delimiter_position == -1) { dictionary_[key] = value; return; } else { key = path.Substring(0, delimiter_position); } // Assume we're are indexing into a dictionary. IValue dict; DictionaryValue<T> entry = null; if (dictionary_.TryGetValue(key, out dict) && dict.ValueType == ValueType.GenericDictionary) { entry = dict as DictionaryValue<T>; } else { entry = new DictionaryValue<T>(); dictionary_[key] = entry; } entry.SetValue(path.Substring(delimiter_position + 1), value); } /// <summary> /// Gets or sets the <typeparamref name="T"/> associated with the given path starting /// from this object. /// </summary> /// <param name="path">The path of the value to get or set</param> /// <returns>The value of the last key in the path if it can be resolved successfully; otherwise, /// it will return a null reference. /// </returns> /// <remarks>A path has the form "&lt;key&gt;" or "&lt;key&gt;.&lt;key&gt;.[...]", where "." indexes /// into the next <typeparamref name="Nohros.Data.DictionaryValue"/> down. Obviously, /// "." can't be used within a key, but there are no other restrictions on keys. /// <para> /// If the key at any step of the way doesn't exist, or exists but isn't /// a <typeparamref name="DictionaryValue&lt;T&gt;"/>, a new <typeparamref name="DictionaryValue&lt;T&gt;"/> /// instance will be created and attached to the path in that location. /// </para> /// </remarks> public T this[string path] { get { T t = GetValue(path) as T; return t; } set { SetValue(path, value); } } /// <summary> /// Gets the <typeparamref name="T"/> associated with the given path starting /// from this object. /// </summary> /// <param name="path">The path to get</param> /// <param name="out_value"></param> /// <returns>A path has the form "&alt;key&gt" or "&alt;key&gt.&alt;key&gt.[...]", where /// "." indexes into the next <typeparamref name="DictionaryValue&lt;T&gt;"/> down. /// <para> /// If the path can be resolved successfully, the value for the last key in the path will /// be returned through the "value" parameter, and the function will return true. /// Otherwise, it will return false and "value" will contains null. /// </para> /// </returns> public bool Get(string path, out T out_value) { out_value = this[path]; if (out_value == null) return false; return true; } /// <summary> /// Removes the <typeparamref name="T"/> object with the specified path /// from this dictionary(or one of its child dictionaries, if the path is more that just /// a local key). /// </summary> /// <param name="path">The path of the item to remove</param> /// <param name="out_value">When this method returns <paramref name="out_value"/> will contain a /// reference to the removed value or null if the specified path is not found.</param> /// <returns>true if the specified path is found and successfully removed; otherwise, false</returns> public bool Remove(string path, out T out_value) { out_value = Remove(path); if (out_value == null) return false; return true; } /// <summary> /// Removes the <typeparamref name="T"/> object with the specified path /// from this dictionary(or one of its child dictionaries, if the path is more that just /// a local key). /// </summary> /// <param name="path">The path of the item to remove</param> /// <returns>A reference to the removed value or null if the specified path is not found.</returns> public T Remove(string path) { string key = path; int delimiter_position = path.IndexOf('.', 0); if (delimiter_position != -1) { key = path.Substring(0, delimiter_position); } IValue entry = GetValue(path); if (entry == null) return null; if (delimiter_position == -1) { if (entry is T) { dictionary_.Remove(path); return (T)entry; } return null; } if (entry.IsType(ValueType.GenericDictionary)) { return ((DictionaryValue<T>)entry).Remove(path.Substring(delimiter_position + 1)); } return null; } public override IValue DeepCopy() { DictionaryValue result = new DictionaryValue(); foreach (KeyValuePair<string, IValue> pair in dictionary_) { result[pair.Key] = pair.Value; } return result; } public override bool Equals(IValue other) { if (other.ValueType != ValueType) return false; DictionaryValue<T> other_dict = other as DictionaryValue<T>; Dictionary<string, IValue>.KeyCollection keys = dictionary_.Keys; if (keys.Count != other_dict.dictionary_.Keys.Count) return false; IValue lhs, rhs; foreach (string path in keys) { if (!GetValue(path, out lhs) || !other_dict.GetValue(path, out rhs) || !lhs.Equals(rhs)) { return false; } } return true; } /// <summary> /// Copies the elements of the <see cref="DictionaryValue"/> to a new array. /// </summary> /// <typeparam name="T">The type of elements to copy.</typeparam> /// <returns>An array containing copies of the elements of the <see cref="DictionaryValue"/></returns> /// <remarks>Only the elements of the type <typeparamref name="T"/> are copied. /// <para> /// This method is an O(n) operation, where n is <see cref="Size"/> /// </para> /// <para> /// ToArray&lt;T&gt; will never return a null reference; however, the returned array /// will contain zero elements if the dictionary contains no elements. /// </para> /// </remarks> public T[] ToArray() { if (Size == 0) return new T[0]; int pos = 0; T[] destination_array = new T[Size]; Dictionary<string, IValue>.ValueCollection values = dictionary_.Values; // filtering the elements of the type T. foreach (T value in values) { // all the elements must be an instance of T. if (value.ValueType == ValueType.GenericDictionary) return new T[0]; destination_array[pos++] = value; } return destination_array; } #region IDictionaryValue bool IDictionaryValue.Get(string path, out IValue out_value) { out_value = null; return false; } void IDictionaryValue.Add(string path, IValue value) { } bool IDictionaryValue.Remove(string path, out IValue out_value) { out_value = null; return false; } IValue IDictionaryValue.Remove(string path) { return null; } IValue IDictionaryValue.this[string path] { get { return null; } set { } } #endregion #region IEnumerable /// <summary> /// Returns an enumerator that iterates through the <see cref="DictionaryValue&lt;T&gt;"/> /// </summary> /// <returns></returns> IEnumerator IEnumerable.GetEnumerator() { return GetEnumerator(); } public IEnumerator<T> GetEnumerator() { Dictionary<string, IValue>.ValueCollection values = dictionary_.Values; // filtering the elements of the type T. foreach (T value in values) { // all the elements must be an instance of T. if (value.ValueType == ValueType.GenericDictionary) continue; yield return value; } } #endregion /// <summary> /// Gets the number of elements in this dictionary. /// </summary> public int Size { get { return dictionary_.Count; } } } }
//! \file ArcPSB.cs //! \date Thu Mar 24 01:40:57 2016 //! \brief E-mote engine image container. // // Copyright (C) 2016 by morkt // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to // deal in the Software without restriction, including without limitation the // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or // sell copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS // IN THE SOFTWARE. // using System; using System.Collections; using System.Collections.Generic; using System.ComponentModel.Composition; using System.IO; using System.Text; using System.Windows.Media; using GameRes.Utility; namespace GameRes.Formats.Emote { internal class TexEntry : Entry { public string TexType; public int Width; public int Height; public int TruncatedWidth; public int TruncatedHeight; public int OffsetX; public int OffsetY; } [Serializable] public class PsbScheme : ResourceScheme { public uint[] KnownKeys; } [Export(typeof(ArchiveFormat))] public class PsbOpener : ArchiveFormat { public override string Tag { get { return "PSB/EMOTE"; } } public override string Description { get { return "E-mote engine texture container"; } } public override uint Signature { get { return 0x425350; } } // 'PSB' public override bool IsHierarchic { get { return false; } } public override bool CanWrite { get { return false; } } static uint[] KnownKeys = new uint[] { 970396437u }; public PsbOpener () { Extensions = new string[] { "psb", "pimg", "dpak", "psbz" }; } public override ArcFile TryOpen (ArcView file) { using (var input = file.CreateStream()) using (var reader = new PsbReader (input)) { foreach (var key in KnownKeys) { try { if (reader.Parse (key)) return OpenArcFile (reader, file); if (!reader.IsEncrypted) break; } catch { /* ignore parse errors caused by invalid key */ } } if (reader.ParseNonEncrypted()) return OpenArcFile (reader, file); return null; } } ArcFile OpenArcFile (PsbReader reader, ArcView file) { var dir = reader.GetTextures(); if (null == dir) dir = reader.GetLayers(); if (null == dir) dir = reader.GetChunks(); if (null == dir || 0 == dir.Count) return null; return new ArcFile (file, this, dir); } public override IImageDecoder OpenImage (ArcFile arc, Entry entry) { var tex = entry as TexEntry; if (null == tex) return base.OpenImage (arc, entry); if ("TLG" == tex.TexType) return OpenTlg (arc, tex); var info = new PsbTexMetaData { FullWidth = tex.Width, FullHeight = tex.Height, Width = (uint)tex.TruncatedWidth, Height = (uint)tex.TruncatedHeight, TexType = tex.TexType, BPP = 32 }; var input = arc.File.CreateStream (entry.Offset, entry.Size); return new PsbTextureDecoder (input, info); } IImageDecoder OpenTlg (ArcFile arc, TexEntry entry) { var input = arc.File.CreateStream (entry.Offset, entry.Size); try { var info = TlgFormat.ReadMetaData (input); if (null == info) throw new InvalidFormatException(); info.OffsetX = entry.OffsetX; info.OffsetY = entry.OffsetY; return new ImageFormatDecoder (input, TlgFormat, info); } catch { input.Dispose(); throw; } } public override ResourceScheme Scheme { get { return new PsbScheme { KnownKeys = KnownKeys }; } set { KnownKeys = ((PsbScheme)value).KnownKeys; } } ImageFormat TlgFormat { get { return s_TlgFormat.Value; } } static Lazy<ImageFormat> s_TlgFormat = new Lazy<ImageFormat> (() => ImageFormat.FindByTag ("TLG")); } /// <summary> /// PSB container deserialization. /// </summary> internal sealed class PsbReader : IDisposable { IBinaryStream m_input; public PsbReader (IBinaryStream input) { m_input = input; } public int Version { get { return m_version; } } public bool IsEncrypted { get { return 0 != (m_flags & 3); } } public int DataOffset { get { return m_chunk_data; } } public T GetRootKey<T> (string key) { int obj_offset; if (!GetKey (key, m_root, out obj_offset)) return default(T); return (T)GetObject (obj_offset); } int m_version; int m_flags; uint[] m_key = new uint[6]; Dictionary<int, string> m_name_map; public bool ParseNonEncrypted () { return Parse (false); } public bool Parse (uint key) { m_key[0] = 0x075BCD15; m_key[1] = 0x159A55E5; m_key[2] = 0x1F123BB5; m_key[3] = key; m_key[4] = 0; m_key[5] = 0; return Parse (true); } bool Parse (bool encrypted) { if (!ReadHeader (encrypted)) return false; if (Version < 2) throw new NotSupportedException ("Not supported PSB version"); m_name_map = ReadNames(); #if DEBUG var dict = GetDict (m_root); // returns all metadata in a single dictionary #endif return true; } public List<Entry> GetLayers () { var layers = GetRootKey<IList> ("layers"); if (null == layers || 0 == layers.Count) return null; var dir = new List<Entry> (layers.Count); foreach (IDictionary layer in layers) { var name = layer["layer_id"].ToString() + ".tlg"; var layer_data = GetRootKey<EmChunk> (name); if (null == layer_data) continue; var entry = new TexEntry { Name = name, Type = "image", Offset = DataOffset + layer_data.Offset, Size = (uint)layer_data.Length, TexType = "TLG", OffsetX = Convert.ToInt32 (layer["left"]), OffsetY = Convert.ToInt32 (layer["top"]), Width = Convert.ToInt32 (layer["width"]), Height = Convert.ToInt32 (layer["height"]), }; dir.Add (entry); } if (0 == dir.Count) return null; return dir; } public List<Entry> GetTextures () { var source = GetRootKey<IDictionary> ("source"); if (null == source || 0 == source.Count) return null; var dir = new List<Entry> (source.Count); foreach (DictionaryEntry item in source) { var item_value = item.Value as IDictionary; if (null == item_value) continue; if (item_value.Contains ("texture")) { AddTextureEntry (dir, item.Key, item_value["texture"] as IDictionary); } else if (item_value.Contains ("icon")) { AddIconEntry (dir, item.Key, item_value["icon"] as IDictionary); } } return dir; } public List<Entry> GetChunks () { var dict = GetDict (m_root); if (0 == dict.Count) return null; var dir = new List<Entry> (dict.Count); foreach (DictionaryEntry item in dict) { var name = item.Key.ToString(); var data = item.Value as EmChunk; if (string.IsNullOrEmpty (name) || null == data) continue; var entry = new Entry { Name = name, Type = FormatCatalog.Instance.GetTypeFromName (name), Offset = DataOffset + data.Offset, Size = (uint)data.Length, }; dir.Add (entry); } if (0 == dir.Count) return null; return dir; } void AddTextureEntry (List<Entry> dir, object name, IDictionary texture) { if (null == texture) return; var pixel = texture["pixel"] as EmChunk; if (null == pixel) return; var entry = new TexEntry { Name = name.ToString(), Type = "image", Offset = DataOffset + pixel.Offset, Size = (uint)pixel.Length, TexType = texture["type"].ToString(), Width = Convert.ToInt32 (texture["width"]), Height = Convert.ToInt32 (texture["height"]), TruncatedWidth = Convert.ToInt32 (texture["truncated_width"]), TruncatedHeight = Convert.ToInt32 (texture["truncated_height"]), }; dir.Add (entry); } void AddIconEntry (List<Entry> dir, object name, IDictionary icon_list) { if (null == icon_list) return; foreach (DictionaryEntry icon in icon_list) { var layer = icon.Value as IDictionary; var pixel = layer["pixel"] as EmChunk; if (null == pixel) continue; var entry = new TexEntry { Name = name.ToString()+'#'+icon.Key.ToString(), Type = "image", Offset = DataOffset + pixel.Offset, Size = (uint)pixel.Length, Width = Convert.ToInt32 (layer["width"]), Height = Convert.ToInt32 (layer["height"]), OffsetX = Convert.ToInt32 (layer["originX"]), OffsetY = Convert.ToInt32 (layer["originY"]), TexType = layer.Contains ("compress") ? layer["compress"].ToString() : "RGBA8", }; entry.TruncatedWidth = entry.Width; entry.TruncatedHeight = entry.Height; dir.Add (entry); } } int m_names; int m_strings; int m_strings_data; int m_chunk_offsets; int m_chunk_lengths; int m_chunk_data; int m_extra_offsets; int m_extra_lengths; int m_extra_data; int m_root; byte[] m_data; bool ReadHeader (bool encrypted) { m_input.Position = 4; m_version = m_input.ReadUInt16(); m_flags = m_input.ReadUInt16(); if (encrypted && m_version < 3) m_flags = 2; int header_size = m_version > 3 ? 0x30 : 0x20; var header = m_input.ReadBytes (header_size); if (encrypted && 0 != (m_flags & 1)) { if (m_version > 3) { Decrypt (header, 0, 0x24); Decrypt (header, 0x24, 0xC); } else Decrypt (header, 0, 0x20); } m_names = LittleEndian.ToInt32 (header, 0x04); // 0x08 m_strings = LittleEndian.ToInt32 (header, 0x08); // 0x0C m_strings_data = LittleEndian.ToInt32 (header, 0x0C); // 0x10 m_chunk_offsets = LittleEndian.ToInt32 (header, 0x10); // 0x14 m_chunk_lengths = LittleEndian.ToInt32 (header, 0x14); // 0x18 m_chunk_data = LittleEndian.ToInt32 (header, 0x18); // 0x1C m_root = LittleEndian.ToInt32 (header, 0x1C); // 0x20 if (m_version > 3) { m_extra_offsets = LittleEndian.ToInt32 (header, 0x24); m_extra_lengths = LittleEndian.ToInt32 (header, 0x28); m_extra_data = LittleEndian.ToInt32 (header, 0x2C); } int buffer_length = (int)m_input.Length; if (!(m_names >= 0x28 && m_names < m_chunk_data && m_strings >= 0x28 && m_strings < m_chunk_data && m_strings_data >= 0x28 && m_strings_data < m_chunk_data && m_chunk_offsets >= 0x28 && m_chunk_offsets < m_chunk_data && m_chunk_lengths >= 0x28 && m_chunk_lengths < m_chunk_data && m_chunk_data >= 0x28 && m_chunk_data <= buffer_length && m_root >= 0x28 && m_root < m_chunk_data)) return false; if (null == m_data || m_data.Length < m_chunk_data) m_data = new byte[m_chunk_data]; int data_pos = (int)m_input.Position; m_input.Read (m_data, data_pos, m_chunk_data-data_pos); if (encrypted && 0 != (m_flags & 2)) Decrypt (m_data, m_names, m_chunk_offsets-m_names); // root object is a dictionary return 0x21 == m_data[m_root]; } bool GetKey (string name, int dict_offset, out int value_offset) { value_offset = 0; int offset; if (!GetOffset (name, out offset)) return false; var keys = GetArray (++dict_offset); if (0 == keys.Count) return false; int upper_bound = keys.Count; int lower_bound = 0; int key_index = 0; while (lower_bound < upper_bound) { key_index = (upper_bound + lower_bound) >> 1; int key = GetArrayElem (keys, key_index); if (key == offset) break; if (key >= offset) upper_bound = (upper_bound + lower_bound) >> 1; else lower_bound = key_index + 1; } if (lower_bound >= upper_bound) return false; var values = GetArray (dict_offset + keys.ArraySize); int data_offset = GetArrayElem (values, key_index); value_offset = dict_offset + keys.ArraySize + values.ArraySize + data_offset; return true; } bool GetOffset (string name, out int offset) { // FIXME works for ASCII names only. var nm1 = GetArray (m_names); var nm2 = GetArray (m_names + nm1.ArraySize); int i = 0; for (int name_idx = 0; ; ++name_idx) { char symbol = name_idx < name.Length ? name[name_idx] : '\0'; int prev_i = i; i = symbol + GetArrayElem (nm1, i); if (i >= nm1.Count || GetArrayElem (nm2, i) != prev_i) break; if (name_idx >= name.Length) { offset = GetArrayElem (nm1, i); return true; } } offset = 0; return false; } Dictionary<int, string> ReadNames () { // this implementation is utterly inefficient. FIXME var lookup = new Dictionary<int, byte[]>(); var next_lookup = new Dictionary<int, byte[]>(); var dict = new Dictionary<int, string>(); var nm1 = GetArray (m_names); var nm2 = GetArray (m_names + nm1.ArraySize); lookup[0] = new byte[0]; while (lookup.Count > 0) { foreach (var item in lookup) { int first = GetArrayElem (nm1, item.Key); for (int i = 0; i < 256 && i + first < nm2.Count; ++i) { if (GetArrayElem (nm2, i + first) == item.Key) { if (0 == i) dict[GetArrayElem (nm1, i + first)] = Encoding.UTF8.GetString (item.Value); else next_lookup[i+first] = ArrayAppend (item.Value, (byte)i); } } } var tmp = lookup; lookup = next_lookup; next_lookup = tmp; next_lookup.Clear(); } return dict; } static byte[] ArrayAppend (byte[] array, byte n) { var new_array = new byte[array.Length+1]; Buffer.BlockCopy (array, 0, new_array, 0, array.Length); new_array[array.Length] = n; return new_array; } EmArray GetArray (int offset) { int data_offset = m_data[offset] - 10; var array = new EmArray { Count = GetInteger (offset, 0xC), ElemSize = m_data[offset + data_offset - 1] - 12, DataOffset = offset + data_offset, }; array.ArraySize = array.Count * array.ElemSize + data_offset; return array; } int GetArrayElem (EmArray a1, int index) { int offset = index * a1.ElemSize; switch (a1.ElemSize) { case 1: return m_data[a1.DataOffset + offset]; case 2: return LittleEndian.ToUInt16 (m_data, a1.DataOffset + offset); case 3: return LittleEndian.ToUInt16 (m_data, a1.DataOffset + offset) | m_data[a1.DataOffset + offset + 2] << 16; case 4: return LittleEndian.ToInt32 (m_data, a1.DataOffset + offset); default: throw new InvalidFormatException ("Invalid PSB array structure"); } } object GetObject (int offset) { switch (m_data[offset]) { case 1: return null; case 2: return true; case 3: return false; case 4: case 5: case 6: case 7: case 8: return GetInteger (offset, 4); case 9: case 0x0A: case 0x0B: case 0x0C: return GetLong (offset); case 0x15: case 0x16: case 0x17: case 0x18: return GetString (offset); case 0x19: case 0x1A: case 0x1B: case 0x1C: return GetChunk (offset); case 0x1D: case 0x1E: return GetFloat (offset); case 0x1F: return GetDouble (offset); case 0x20: return GetList (offset); case 0x21: return GetDict (offset); case 0x22: case 0x23: case 0x24: case 0x25: return GetExtraChunk (offset); default: throw new InvalidFormatException (string.Format ("Unknown serialized object type 0x{0:X2}", m_data[offset])); } } int GetInteger (int offset, int base_type) { switch (m_data[offset] - base_type) { case 1: return m_data[offset+1]; case 2: return LittleEndian.ToUInt16 (m_data, offset+1); case 3: return LittleEndian.ToUInt16 (m_data, offset+1) | m_data[offset+3] << 16; case 4: return LittleEndian.ToInt32 (m_data, offset+1); default: return 0; } } float GetFloat (int offset) { if (0x1E == m_data[offset]) return BitConverter.ToSingle (m_data, offset+1); // FIXME endianness else return 0.0f; } double GetDouble (int offset) { if (0x1F == m_data[offset]) return BitConverter.ToDouble (m_data, offset+1); // FIXME endianness else return 0.0; } long GetLong (int offset) { switch (m_data[offset]) { case 0x09: return LittleEndian.ToUInt32 (m_data, offset+1) | (long)(sbyte)m_data[offset+5] << 32; case 0x0A: return LittleEndian.ToUInt32 (m_data, offset+1) | (long)LittleEndian.ToInt16 (m_data, offset+5) << 32; case 0x0B: return LittleEndian.ToUInt32 (m_data, offset+1) | (long)LittleEndian.ToUInt16 (m_data, offset+5) << 32 | (long)(sbyte)m_data[offset+6] << 48; case 0x0C: return LittleEndian.ToInt64 (m_data, offset+1); default: return 0L; } } string GetString (int obj_offset) { int index = GetInteger (obj_offset, 0x14); var array = GetArray (m_strings); int data_offset = m_strings_data + GetArrayElem (array, index); return Binary.GetCString (m_data, data_offset, m_data.Length-data_offset, Encoding.UTF8); } IList GetList (int offset) { var array = GetArray (++offset); var list = new ArrayList (array.Count); for (int i = 0; i < array.Count; ++i) { int item_offset = offset + array.ArraySize + GetArrayElem (array, i); var item = GetObject (item_offset); list.Add (item); } return list; } IDictionary GetDict (int offset) { var keys = GetArray (++offset); if (0 == keys.Count) return new Dictionary<string, object>(); var values = GetArray (offset + keys.ArraySize); var dict = new Dictionary<string, object> (keys.Count); for (int i = 0; i < keys.Count; ++i) { int key = GetArrayElem (keys, i); var value_offset = GetArrayElem (values, i); string key_name = m_name_map[key]; dict[key_name] = GetObject (offset + value_offset + keys.ArraySize + values.ArraySize); } return dict; } EmChunk GetChunk (int offset) { var chunk_index = GetInteger (offset, 0x18); var chunks = GetArray (m_chunk_offsets); if (chunk_index >= chunks.Count) throw new InvalidFormatException ("Invalid chunk index"); var lengths = GetArray (m_chunk_lengths); return new EmChunk { Offset = GetArrayElem (chunks, chunk_index), Length = GetArrayElem (lengths, chunk_index), }; } EmChunk GetExtraChunk (int offset) { var chunk_index = GetInteger (offset, 0x21); var chunks = GetArray (m_extra_offsets); if (chunk_index >= chunks.Count) throw new InvalidFormatException ("Invalid chunk index"); var lengths = GetArray (m_extra_lengths); return new EmChunk { Offset = GetArrayElem (chunks, chunk_index), Length = GetArrayElem (lengths, chunk_index), }; } void Decrypt (byte[] data, int offset, int length) { for (int i = 0; i < length; ++i) { if (0 == m_key[4]) { var v5 = m_key[3]; var v6 = m_key[0] ^ (m_key[0] << 11); m_key[0] = m_key[1]; m_key[1] = m_key[2]; var eax = v6 ^ v5 ^ ((v6 ^ (v5 >> 11)) >> 8); m_key[2] = v5; m_key[3] = eax; m_key[4] = eax; } data[offset+i] ^= (byte)m_key[4]; m_key[4] >>= 8; } } internal class EmArray { public int ArraySize; public int Count; public int ElemSize; public int DataOffset; } internal class EmChunk { public int Offset; public int Length; } #region IDisposable Members public void Dispose () { } #endregion } internal class PsbTexMetaData : ImageMetaData { public string TexType; public int FullWidth; public int FullHeight; } /// <summary> /// Artificial format representing PSB texture. /// </summary> internal sealed class PsbTextureDecoder : BinaryImageDecoder { PsbTexMetaData m_info; public PsbTextureDecoder (IBinaryStream input, PsbTexMetaData info) : base (input, info) { m_info = info; } protected override ImageData GetImageData () { int stride = (int)m_info.Width * 4; var pixels = new byte[stride * (int)m_info.Height]; if ("RGBA8" == m_info.TexType) ReadRgba8 (pixels, stride); else if ("L8" == m_info.TexType) ReadL8 (pixels, stride); else if ("A8L8" == m_info.TexType) ReadA8L8 (pixels, stride); else if ("RGBA4444" == m_info.TexType) ReadRgba4444 (pixels, stride); else if ("RL" == m_info.TexType) ReadRle (pixels, stride); else if ("DXT5" == m_info.TexType) pixels = ReadDxt5(); else throw new NotImplementedException (string.Format ("PSB texture format '{0}' not implemented", m_info.TexType)); return ImageData.Create (m_info, PixelFormats.Bgra32, null, pixels, stride); } void ReadRgba8 (byte[] output, int dst_stride) { long next_row = 0; int src_stride = m_info.FullWidth * 4; int dst = 0; for (uint i = 0; i < m_info.Height; ++i) { m_input.Position = next_row; m_input.Read (output, dst, dst_stride); dst += dst_stride; next_row += src_stride; } } void ReadL8 (byte[] output, int dst_stride) { int src_stride = m_info.FullWidth; int dst = 0; var row = new byte[src_stride]; m_input.Position = 0; for (uint i = 0; i < m_info.Height; ++i) { m_input.Read (row, 0, src_stride); int src = 0; for (int x = 0; x < dst_stride; x += 4) { byte c = row[src++]; output[dst++] = c; output[dst++] = c; output[dst++] = c; output[dst++] = 0xFF; } } } void ReadA8L8 (byte[] output, int dst_stride) { int src_stride = m_info.FullWidth * 2; int dst = 0; var row = new byte[src_stride]; m_input.Position = 0; for (uint i = 0; i < m_info.Height; ++i) { m_input.Read (row, 0, src_stride); int src = 0; for (int x = 0; x < dst_stride; x += 4) { byte c = row[src++]; byte a = row[src++]; output[dst++] = c; output[dst++] = c; output[dst++] = c; output[dst++] = a; } } } void ReadRgba4444 (byte[] output, int dst_stride) { int src_stride = m_info.FullWidth * 2; int dst = 0; var row = new byte[src_stride]; m_input.Position = 0; for (uint i = 0; i < m_info.Height; ++i) { m_input.Read (row, 0, src_stride); int src = 0; for (int x = 0; x < dst_stride; x += 4) { uint p = LittleEndian.ToUInt16 (row, src); src += 2; output[dst++] = (byte)((p & 0x000Fu) * 0xFFu / 0x000Fu); output[dst++] = (byte)((p & 0x00F0u) * 0xFFu / 0x00F0u); output[dst++] = (byte)((p & 0x0F00u) * 0xFFu / 0x0F00u); output[dst++] = (byte)((p & 0xF000u) * 0xFFu / 0xF000u); } } } void ReadRle (byte[] output, int dst_stride) { const int pixel_size = 4; m_input.Position = 0; int dst = 0; while (dst < output.Length) { int count = m_input.ReadUInt8(); if (0 == (count & 0x80)) { count = pixel_size * (count + 1); dst += m_input.Read (output, dst, count); } else { count = pixel_size * ((count & 0x7F) + 3); m_input.Read (output, dst, pixel_size); Binary.CopyOverlapped (output, dst, dst+pixel_size, count-pixel_size); dst += count; } } } byte[] ReadDxt5 () { var packed = m_input.ReadBytes ((int)m_input.Length); var dxt = new DirectDraw.DxtDecoder (packed, m_info); return dxt.UnpackDXT5(); } } }
// Unity SDK for Qualisys Track Manager. Copyright 2015 Qualisys AB // using QTMRealTimeSDK; using QTMRealTimeSDK.Data; using QTMRealTimeSDK.Settings; using System; using System.Collections.Generic; using UnityEngine; namespace QualisysRealTime.Unity { public class RTClient : IDisposable { const int LOWEST_SUPPORTED_UNITY_MAJOR_VERSION = 1; const int LOWEST_SUPPORTED_UNITY_MINOR_VERSION = 13; RTProtocol mProtocol; private static RTClient mInstance; private ushort replyPort = (ushort)new System.Random().Next(1333, 1388); private List<SixDOFBody> mBodies; public List<SixDOFBody> Bodies { get { return mBodies; } } private List<LabeledMarker> mMarkers; public List<LabeledMarker> Markers { get { return mMarkers; } } private List<Bone> mBones; public List<Bone> Bones { get { return mBones; } } private Axis mUpAxis; private Quaternion mCoordinateSystemChange; private RTPacket mPacket; private bool mStreamingStatus; // processor of realtime data // Function is called every time protocol receives a datapacket from server public void Process(RTPacket packet) { mPacket = packet; List<Q6DOF> bodyData = packet.Get6DOFData(); List<Q3D> markerData = packet.Get3DMarkerData(); if (bodyData != null) { for (int i = 0; i < bodyData.Count; i++) { Vector3 position = new Vector3(bodyData[i].Position.X, bodyData[i].Position.Y, bodyData[i].Position.Z); //Set rotation and position to work with unity position /= 1000; mBodies[i].Position = QuaternionHelper.Rotate(mCoordinateSystemChange, position); mBodies[i].Position.z *= -1; mBodies[i].Rotation = mCoordinateSystemChange * QuaternionHelper.FromMatrix(bodyData[i].Matrix); mBodies[i].Rotation.z *= -1; mBodies[i].Rotation.w *= -1; mBodies[i].Rotation *= QuaternionHelper.RotationZ(Mathf.PI * .5f); mBodies[i].Rotation *= QuaternionHelper.RotationX(-Mathf.PI * .5f); } } //Get marker data that is labeled and update values if (markerData != null) { for (int i = 0; i < markerData.Count; i++) { Q3D marker = markerData[i]; Vector3 position = new Vector3(marker.Position.X, marker.Position.Y, marker.Position.Z); position /= 1000; mMarkers[i].Position = QuaternionHelper.Rotate(mCoordinateSystemChange, position); mMarkers[i].Position.z *= -1; } } } // called every time a event is broad casted from QTM server. public void Events(RTPacket packet) { QTMEvent currentEvent = packet.GetEvent(); Debug.Log("Event occurred! : " + currentEvent); if (currentEvent == QTMEvent.EventRTFromFileStarted) { // reload settings when we start streaming to get proper settings Debug.Log("Reloading Settings"); Get3DSettings(); Get6DOFSettings(); } } // get frame from latest packet public int GetFrame() { return mPacket.Frame; } // Constructor private RTClient() { //New instance of protocol, contains a RT packet mProtocol = new RTProtocol(); //list of bodies that server streams mBodies = new List<SixDOFBody>(); //list of markers mMarkers = new List<LabeledMarker>(); //list of bones mBones = new List<Bone>(); mStreamingStatus = false; mPacket = RTPacket.ErrorPacket; } ~RTClient() { Dispose(false); } public static RTClient GetInstance() { //Singleton method since we only want one instance (one connection to server) if (mInstance == null) { mInstance = new RTClient(); } return mInstance; } //Method for objects to call to get data from body public SixDOFBody GetBody(string name) { if (string.IsNullOrEmpty(name)) return null; if (mBodies.Count > 0) { foreach (SixDOFBody body in mBodies) { if (body.Name == name) { return body; } } } return null; } // Get marker data from streamed data public LabeledMarker GetMarker(string name) { if (mMarkers.Count > 0) { foreach (LabeledMarker marker in mMarkers) { if (marker.Label == name) { return marker; } } } return null; } /// <summary> /// Get list of servers available on network (always add localhost) /// </summary> /// <returns><c>true</c>, if discovery packet was sent, <c>false</c> otherwise.</returns> /// <param name="list">List of discovered servers</param> public List<DiscoveryResponse> GetServers() { // Send discovery packet List<DiscoveryResponse> list = new List<DiscoveryResponse>(); if (mProtocol.DiscoverRTServers(replyPort)) { if (mProtocol.DiscoveryResponses.Count > 0) { //Get list of all servers from protocol foreach (var discoveryResponse in mProtocol.DiscoveryResponses) { //add them to our list for user to pick from list.Add(discoveryResponse); } } } list.Add(new DiscoveryResponse { HostName = "Localhost", IpAddress = "127.0.0.1", Port = RTProtocol.Constants.STANDARD_BASE_PORT, InfoText = "", CameraCount = 0 }); return list; } /// <summary> /// Connect the specified pickedServer. /// </summary> /// <param name="pickedServer">Picked server.</param> /// <param name="udpPort">UDP port streaming should occur on.</param> /// <param name="stream6d"> if 6DOF data should be streamed.</param> /// <param name="stream3d"> if labeled markers should be streamed.</param> public bool Connect(DiscoveryResponse discoveryResponse, short udpPort, bool stream6d, bool stream3d) { if (!mProtocol.Connect(discoveryResponse, udpPort, RTProtocol.Constants.MAJOR_VERSION, RTProtocol.Constants.MINOR_VERSION)) { if (!mProtocol.Connect(discoveryResponse, udpPort, LOWEST_SUPPORTED_UNITY_MAJOR_VERSION, LOWEST_SUPPORTED_UNITY_MINOR_VERSION)) { Debug.Log("Error Creating Connection to server"); return false; } } return ConnectStream(udpPort, StreamRate.RateAllFrames, stream6d, stream3d); } /// <summary> /// Connect the specified IpAddress. /// </summary> /// <param name="IpAddress">IP adress</param> /// <param name="udpPort">UDP port streaming should occur on.</param> /// <param name="stream6d"> if 6DOF data should be streamed.</param> /// <param name="stream3d"> if labeled markers should be streamed.</param> public bool Connect(string IpAddress, short udpPort, bool stream6d, bool stream3d) { if (mProtocol.Connect(IpAddress, udpPort)) { return ConnectStream(udpPort, StreamRate.RateAllFrames, stream6d, stream3d); } Debug.Log("Error Creating Connection to server"); return false; } // Get protocol error string public string GetErrorString() { return mProtocol.GetErrorString(); } // Get streaming status of client public bool GetStreamingStatus() { return mStreamingStatus; } // Disconnect from server public void Disconnect() { mBodies.Clear(); mMarkers.Clear(); mBones.Clear(); mStreamingStatus = false; mProtocol.StreamFramesStop(); mProtocol.StopStreamListen(); mProtocol.Disconnect(); } private bool Get6DOFSettings() { // Get settings and information for streamed bodies bool getstatus = mProtocol.Get6DSettings(); if (getstatus) { mBodies.Clear(); Settings6D settings = mProtocol.Settings6DOF; foreach (Settings6DOF body in settings.bodies) { SixDOFBody newbody = new SixDOFBody(); newbody.Name = body.Name; newbody.Position = Vector3.zero; newbody.Rotation = Quaternion.identity; mBodies.Add(newbody); } return true; } return false; } private bool Get3DSettings() { bool getstatus = mProtocol.Get3Dsettings(); if (getstatus) { mUpAxis = mProtocol.Settings3D.axisUpwards; Rotation.ECoordinateAxes xAxis, yAxis, zAxis; Rotation.GetCalibrationAxesOrder(mUpAxis, out xAxis, out yAxis, out zAxis); mCoordinateSystemChange = Rotation.GetAxesOrderRotation(xAxis, yAxis, zAxis); // Save marker settings mMarkers.Clear(); foreach (Settings3DLabel marker in mProtocol.Settings3D.labels3D) { LabeledMarker newMarker = new LabeledMarker(); newMarker.Label = marker.Name; newMarker.Position = Vector3.zero; newMarker.Color.r = (marker.ColorRGB) & 0xFF; newMarker.Color.g = (marker.ColorRGB >> 8) & 0xFF; newMarker.Color.b = (marker.ColorRGB >> 16) & 0xFF; newMarker.Color /= 255; newMarker.Color.a = 1F; Markers.Add(newMarker); } // Save bone settings if (mProtocol.Settings3D.bones != null) { Bones.Clear(); //Save bone settings foreach (var settingsBone in mProtocol.Settings3D.bones) { Bone bone = new Bone(); bone.From = settingsBone.from; bone.FromMarker = GetMarker(settingsBone.from); bone.To = settingsBone.to; bone.ToMarker = GetMarker(settingsBone.to); bone.Color.r = (settingsBone.color) & 0xFF; bone.Color.g = (settingsBone.color >> 8) & 0xFF; bone.Color.b = (settingsBone.color >> 16) & 0xFF; bone.Color /= 255; bone.Color.a = 1F; mBones.Add(bone); } } return true; } return false; } public bool ConnectStream(short udpPort, StreamRate streamRate, bool stream6d, bool stream3d) { List<ComponentType> streamedTypes = new List<ComponentType>(); if (stream3d) streamedTypes.Add(ComponentType.Component3d); if (stream6d) streamedTypes.Add(ComponentType.Component6d); //Start streaming and get the settings if (mProtocol.StreamFrames(streamRate, -1, false, streamedTypes, udpPort)) { if (stream3d) { if (!Get3DSettings()) { Debug.Log("Error retrieving settings"); return false; } } if (stream6d) { if (!Get6DOFSettings()) { Debug.Log("Error retrieving settings"); return false; } } // we register our function "process" as a callback for when protocol receives real time data packets // (eventDataCallback is also available to listen to events) mProtocol.RealTimeDataCallback += Process; mProtocol.EventDataCallback += Events; //Tell protocol to start listening to real time data mProtocol.ListenToStream(); mStreamingStatus = true; return true; } else { Debug.Log("Error Creating Connection to server"); } return false; } protected virtual void Dispose(bool disposing) { if (disposing) { Disconnect(); } } public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } } }
using EpisodeInformer.Core.Browsing; using EpisodeInformer.Properties; using System; using System.Collections.Generic; using System.ComponentModel; using System.Data; using System.Drawing; using System.Windows.Forms; using TvdbLib.Data; namespace EpisodeInformer { public partial class frmResult : Form { private IContainer components = (IContainer)null; private ListView livResult; private ColumnHeader ID; private ColumnHeader Title; private ColumnHeader OV; private Button btnSelect; private Button btnCancel; private PictureBox pibBanner; private ToolStripPanel BottomToolStripPanel; private ToolStripPanel TopToolStripPanel; private ToolStripPanel RightToolStripPanel; private ToolStripPanel LeftToolStripPanel; private ToolStripContentPanel ContentPanel; private StatusStrip statusStrip2; private ToolStripStatusLabel tslStatus; private ToolStripProgressBar tspProgress; private Panel panel1; private Panel panel2; private Label label3; private Label label2; private Label label1; private TextBox txtOV; private TextBox txtTitle; private TextBox txtSID; protected override void Dispose(bool disposing) { if (disposing && this.components != null) this.components.Dispose(); base.Dispose(disposing); } private void InitializeComponent() { this.BottomToolStripPanel = new ToolStripPanel(); this.TopToolStripPanel = new ToolStripPanel(); this.RightToolStripPanel = new ToolStripPanel(); this.LeftToolStripPanel = new ToolStripPanel(); this.ContentPanel = new ToolStripContentPanel(); this.livResult = new ListView(); this.ID = new ColumnHeader(); this.Title = new ColumnHeader(); this.OV = new ColumnHeader(); this.btnSelect = new Button(); this.btnCancel = new Button(); this.statusStrip2 = new StatusStrip(); this.tslStatus = new ToolStripStatusLabel(); this.tspProgress = new ToolStripProgressBar(); this.pibBanner = new PictureBox(); this.panel1 = new Panel(); this.panel2 = new Panel(); this.label3 = new Label(); this.label2 = new Label(); this.label1 = new Label(); this.txtOV = new TextBox(); this.txtTitle = new TextBox(); this.txtSID = new TextBox(); this.statusStrip2.SuspendLayout(); ((ISupportInitialize)this.pibBanner).BeginInit(); this.panel1.SuspendLayout(); this.panel2.SuspendLayout(); this.SuspendLayout(); this.BottomToolStripPanel.Location = new Point(0, 0); this.BottomToolStripPanel.Name = "BottomToolStripPanel"; this.BottomToolStripPanel.Orientation = Orientation.Horizontal; this.BottomToolStripPanel.RowMargin = new Padding(3, 0, 0, 0); this.BottomToolStripPanel.Size = new Size(0, 0); this.TopToolStripPanel.Location = new Point(0, 0); this.TopToolStripPanel.Name = "TopToolStripPanel"; this.TopToolStripPanel.Orientation = Orientation.Horizontal; this.TopToolStripPanel.RowMargin = new Padding(3, 0, 0, 0); this.TopToolStripPanel.Size = new Size(0, 0); this.RightToolStripPanel.Location = new Point(0, 0); this.RightToolStripPanel.Name = "RightToolStripPanel"; this.RightToolStripPanel.Orientation = Orientation.Horizontal; this.RightToolStripPanel.RowMargin = new Padding(3, 0, 0, 0); this.RightToolStripPanel.Size = new Size(0, 0); this.LeftToolStripPanel.Location = new Point(0, 0); this.LeftToolStripPanel.Name = "LeftToolStripPanel"; this.LeftToolStripPanel.Orientation = Orientation.Horizontal; this.LeftToolStripPanel.RowMargin = new Padding(3, 0, 0, 0); this.LeftToolStripPanel.Size = new Size(0, 0); this.ContentPanel.AutoScroll = true; this.ContentPanel.Size = new Size(486, 391); this.livResult.Columns.AddRange(new ColumnHeader[3] { this.ID, this.Title, this.OV }); this.livResult.Font = new Font("Segoe UI Symbol", 9f, FontStyle.Regular, GraphicsUnit.Point, (byte)0); this.livResult.FullRowSelect = true; this.livResult.Location = new Point(12, 118); this.livResult.MultiSelect = false; this.livResult.Name = "livResult"; this.livResult.Size = new Size(521, 229); this.livResult.TabIndex = 0; this.livResult.UseCompatibleStateImageBehavior = false; this.livResult.View = View.Details; this.livResult.ItemSelectionChanged += new ListViewItemSelectionChangedEventHandler(this.livResult_ItemSelectionChanged); this.ID.Text = "ID"; this.ID.Width = 81; this.Title.Text = "Title"; this.Title.Width = 160; this.OV.Text = "Overview"; this.OV.Width = 275; this.btnSelect.Enabled = false; this.btnSelect.Font = new Font("Segoe UI Symbol", 9f, FontStyle.Regular, GraphicsUnit.Point, (byte)0); this.btnSelect.Location = new Point(348, 3); this.btnSelect.Name = "btnSelect"; this.btnSelect.Size = new Size(75, 23); this.btnSelect.TabIndex = 1; this.btnSelect.Text = "&Select"; this.btnSelect.UseVisualStyleBackColor = true; this.btnSelect.Click += new EventHandler(this.btnSelect_Click); this.btnCancel.Font = new Font("Segoe UI Symbol", 9f, FontStyle.Regular, GraphicsUnit.Point, (byte)0); this.btnCancel.Location = new Point(429, 3); this.btnCancel.Name = "btnCancel"; this.btnCancel.Size = new Size(75, 23); this.btnCancel.TabIndex = 0; this.btnCancel.Text = "&Cancel"; this.btnCancel.UseVisualStyleBackColor = true; this.btnCancel.Click += new EventHandler(this.btnCancel_Click); this.statusStrip2.Items.AddRange(new ToolStripItem[2] { (ToolStripItem) this.tslStatus, (ToolStripItem) this.tspProgress }); this.statusStrip2.Location = new Point(0, 529); this.statusStrip2.Name = "statusStrip2"; this.statusStrip2.Size = new Size(543, 22); this.statusStrip2.SizingGrip = false; this.statusStrip2.TabIndex = 3; this.statusStrip2.Text = "statusStrip2"; this.tslStatus.Name = "tslStatus"; this.tslStatus.Size = new Size(48, 17); this.tslStatus.Text = "Waiting"; this.tspProgress.Name = "tspProgress"; this.tspProgress.Size = new Size(100, 16); this.pibBanner.BackColor = SystemColors.ControlText; this.pibBanner.BackgroundImage = (Image)Resources.dbanner; this.pibBanner.BackgroundImageLayout = ImageLayout.Stretch; this.pibBanner.BorderStyle = BorderStyle.FixedSingle; this.pibBanner.Location = new Point(13, 12); this.pibBanner.Name = "pibBanner"; this.pibBanner.Size = new Size(520, 96); this.pibBanner.SizeMode = PictureBoxSizeMode.StretchImage; this.pibBanner.TabIndex = 2; this.pibBanner.TabStop = false; this.panel1.BackgroundImage = (Image)Resources.frmresdwn; this.panel1.Controls.Add((Control)this.btnSelect); this.panel1.Controls.Add((Control)this.btnCancel); this.panel1.Location = new Point(14, 495); this.panel1.Name = "panel1"; this.panel1.Size = new Size(517, 29); this.panel1.TabIndex = 4; this.panel2.BackgroundImage = (Image)Resources.frmres1; this.panel2.Controls.Add((Control)this.label3); this.panel2.Controls.Add((Control)this.label2); this.panel2.Controls.Add((Control)this.label1); this.panel2.Controls.Add((Control)this.txtOV); this.panel2.Controls.Add((Control)this.txtTitle); this.panel2.Controls.Add((Control)this.txtSID); this.panel2.Location = new Point(14, 353); this.panel2.Name = "panel2"; this.panel2.Size = new Size(517, 139); this.panel2.TabIndex = 5; this.label3.AutoSize = true; this.label3.BackColor = Color.Transparent; this.label3.Font = new Font("Segoe UI Symbol", 9f, FontStyle.Regular, GraphicsUnit.Point, (byte)0); this.label3.Location = new Point(10, 38); this.label3.Name = "label3"; this.label3.Size = new Size(56, 15); this.label3.TabIndex = 5; this.label3.Text = "Overview"; this.label2.AutoSize = true; this.label2.BackColor = Color.Transparent; this.label2.Font = new Font("Segoe UI Symbol", 9f, FontStyle.Regular, GraphicsUnit.Point, (byte)0); this.label2.Location = new Point(185, 12); this.label2.Name = "label2"; this.label2.Size = new Size(30, 15); this.label2.TabIndex = 4; this.label2.Text = "Title"; this.label1.AutoSize = true; this.label1.BackColor = Color.Transparent; this.label1.Font = new Font("Segoe UI Symbol", 9f, FontStyle.Regular, GraphicsUnit.Point, (byte)0); this.label1.Location = new Point(48, 12); this.label1.Name = "label1"; this.label1.Size = new Size(18, 15); this.label1.TabIndex = 3; this.label1.Text = "ID"; this.txtOV.BackColor = SystemColors.Window; this.txtOV.Font = new Font("Segoe UI", 8.25f, FontStyle.Regular, GraphicsUnit.Point, (byte)0); this.txtOV.Location = new Point(72, 36); this.txtOV.Multiline = true; this.txtOV.Name = "txtOV"; this.txtOV.ReadOnly = true; this.txtOV.ScrollBars = ScrollBars.Vertical; this.txtOV.Size = new Size(432, 87); this.txtOV.TabIndex = 2; this.txtTitle.BackColor = SystemColors.Window; this.txtTitle.Font = new Font("Segoe UI", 8.25f, FontStyle.Regular, GraphicsUnit.Point, (byte)0); this.txtTitle.Location = new Point(221, 10); this.txtTitle.Name = "txtTitle"; this.txtTitle.ReadOnly = true; this.txtTitle.Size = new Size(283, 22); this.txtTitle.TabIndex = 1; this.txtSID.BackColor = SystemColors.Window; this.txtSID.Font = new Font("Segoe UI", 8.25f, FontStyle.Regular, GraphicsUnit.Point, (byte)0); this.txtSID.Location = new Point(72, 10); this.txtSID.Name = "txtSID"; this.txtSID.ReadOnly = true; this.txtSID.Size = new Size(107, 22); this.txtSID.TabIndex = 0; this.AutoScaleDimensions = new SizeF(6f, 13f); this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font; this.BackgroundImage = (Image)Resources.back; this.ClientSize = new Size(543, 551); this.ControlBox = false; this.Controls.Add((Control)this.panel2); this.Controls.Add((Control)this.panel1); this.Controls.Add((Control)this.statusStrip2); this.Controls.Add((Control)this.pibBanner); this.Controls.Add((Control)this.livResult); this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedSingle; this.MaximizeBox = false; this.Name = "frmResult"; this.ShowInTaskbar = false; this.StartPosition = System.Windows.Forms.FormStartPosition.CenterParent; this.Text = "TvDB.com : Search Result"; this.Load += new EventHandler(this.frmResult_Load); this.statusStrip2.ResumeLayout(false); this.statusStrip2.PerformLayout(); ((ISupportInitialize)this.pibBanner).EndInit(); this.panel1.ResumeLayout(false); this.panel2.ResumeLayout(false); this.panel2.PerformLayout(); this.ResumeLayout(false); this.PerformLayout(); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections.Generic; using System.CommandLine; using System.CommandLine.Invocation; using System.Reflection; using System.Runtime.InteropServices; using System.Threading.Tasks; using Internal.IL; using Internal.TypeSystem; using Internal.TypeSystem.Ecma; using Internal.CommandLine; using System.Linq; using System.IO; namespace ILCompiler { internal class Program { private const string DefaultSystemModule = "System.Private.CoreLib"; private CommandLineOptions _commandLineOptions; public TargetOS _targetOS; public TargetArchitecture _targetArchitecture; public OptimizationMode _optimizationMode; private Dictionary<string, string> _inputFilePaths = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase); private Dictionary<string, string> _referenceFilePaths = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase); private Program(CommandLineOptions commandLineOptions) { _commandLineOptions = commandLineOptions; } private void Help(string helpText) { Console.WriteLine(); Console.Write("Microsoft (R) CoreCLR Native Image Generator"); Console.Write(" "); Console.Write(typeof(Program).GetTypeInfo().Assembly.GetName().Version); Console.WriteLine(); Console.WriteLine(); Console.WriteLine(helpText); } private void InitializeDefaultOptions() { // We could offer this as a command line option, but then we also need to // load a different RyuJIT, so this is a future nice to have... if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) _targetOS = TargetOS.Windows; else if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) _targetOS = TargetOS.Linux; else if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) _targetOS = TargetOS.OSX; else throw new NotImplementedException(); switch (RuntimeInformation.ProcessArchitecture) { case Architecture.X86: _targetArchitecture = TargetArchitecture.X86; break; case Architecture.X64: _targetArchitecture = TargetArchitecture.X64; break; case Architecture.Arm: _targetArchitecture = TargetArchitecture.ARM; break; case Architecture.Arm64: _targetArchitecture = TargetArchitecture.ARM64; break; default: throw new NotImplementedException(); } // Workaround for https://github.com/dotnet/corefx/issues/25267 // If pointer size is 8, we're obviously not an X86 process... if (_targetArchitecture == TargetArchitecture.X86 && IntPtr.Size == 8) _targetArchitecture = TargetArchitecture.X64; } private void ProcessCommandLine() { AssemblyName name = typeof(Program).GetTypeInfo().Assembly.GetName(); if (_commandLineOptions.WaitForDebugger) { Console.WriteLine("Waiting for debugger to attach. Press ENTER to continue"); Console.ReadLine(); } if (_commandLineOptions.CompileBubbleGenerics) { if (!_commandLineOptions.InputBubble) { Console.WriteLine("Warning: ignoring --compilebubblegenerics because --inputbubble was not specified"); _commandLineOptions.CompileBubbleGenerics = false; } } _optimizationMode = OptimizationMode.None; if (_commandLineOptions.OptimizeSpace) { if (_commandLineOptions.OptimizeTime) Console.WriteLine("Warning: overriding -Ot with -Os"); _optimizationMode = OptimizationMode.PreferSize; } else if (_commandLineOptions.OptimizeTime) _optimizationMode = OptimizationMode.PreferSpeed; else if (_commandLineOptions.Optimize) _optimizationMode = OptimizationMode.Blended; foreach (var input in _commandLineOptions.InputFilePaths ?? Enumerable.Empty<FileInfo>()) Helpers.AppendExpandedPaths(_inputFilePaths, input.FullName, true); foreach (var reference in _commandLineOptions.Reference ?? Enumerable.Empty<string>()) Helpers.AppendExpandedPaths(_referenceFilePaths, reference, false); } private int Run() { InitializeDefaultOptions(); ProcessCommandLine(); if (_commandLineOptions.OutputFilePath == null) throw new CommandLineException("Output filename must be specified (/out <file>)"); // // Set target Architecture and OS // if (_commandLineOptions.TargetArch != null) { if (_commandLineOptions.TargetArch.Equals("x86", StringComparison.OrdinalIgnoreCase)) _targetArchitecture = TargetArchitecture.X86; else if (_commandLineOptions.TargetArch.Equals("x64", StringComparison.OrdinalIgnoreCase)) _targetArchitecture = TargetArchitecture.X64; else if (_commandLineOptions.TargetArch.Equals("arm", StringComparison.OrdinalIgnoreCase)) _targetArchitecture = TargetArchitecture.ARM; else if (_commandLineOptions.TargetArch.Equals("armel", StringComparison.OrdinalIgnoreCase)) _targetArchitecture = TargetArchitecture.ARM; else if (_commandLineOptions.TargetArch.Equals("arm64", StringComparison.OrdinalIgnoreCase)) _targetArchitecture = TargetArchitecture.ARM64; else throw new CommandLineException("Target architecture is not supported"); } if (_commandLineOptions.TargetOS != null) { if (_commandLineOptions.TargetOS.Equals("windows", StringComparison.OrdinalIgnoreCase)) _targetOS = TargetOS.Windows; else if (_commandLineOptions.TargetOS.Equals("linux", StringComparison.OrdinalIgnoreCase)) _targetOS = TargetOS.Linux; else if (_commandLineOptions.TargetOS.Equals("osx", StringComparison.OrdinalIgnoreCase)) _targetOS = TargetOS.OSX; else throw new CommandLineException("Target OS is not supported"); } using (PerfEventSource.StartStopEvents.CompilationEvents()) { ICompilation compilation; using (PerfEventSource.StartStopEvents.LoadingEvents()) { // // Initialize type system context // SharedGenericsMode genericsMode = SharedGenericsMode.CanonicalReferenceTypes; var targetDetails = new TargetDetails(_targetArchitecture, _targetOS, TargetAbi.CoreRT, SimdVectorLength.None); CompilerTypeSystemContext typeSystemContext = new ReadyToRunCompilerContext(targetDetails, genericsMode); // // TODO: To support our pre-compiled test tree, allow input files that aren't managed assemblies since // some tests contain a mixture of both managed and native binaries. // // See: https://github.com/dotnet/corert/issues/2785 // // When we undo this this hack, replace this foreach with // typeSystemContext.InputFilePaths = _inputFilePaths; // Dictionary<string, string> inputFilePaths = new Dictionary<string, string>(); foreach (var inputFile in _inputFilePaths) { try { var module = typeSystemContext.GetModuleFromPath(inputFile.Value); inputFilePaths.Add(inputFile.Key, inputFile.Value); } catch (TypeSystemException.BadImageFormatException) { // Keep calm and carry on. } } typeSystemContext.InputFilePaths = inputFilePaths; typeSystemContext.ReferenceFilePaths = _referenceFilePaths; string systemModuleName = _commandLineOptions.SystemModule ?? DefaultSystemModule; typeSystemContext.SetSystemModule(typeSystemContext.GetModuleForSimpleName(systemModuleName)); if (typeSystemContext.InputFilePaths.Count == 0) throw new CommandLineException("No input files specified"); // // Initialize compilation group and compilation roots // // Single method mode? MethodDesc singleMethod = CheckAndParseSingleMethodModeArguments(typeSystemContext); var logger = new Logger(Console.Out, _commandLineOptions.Verbose); List<ModuleDesc> referenceableModules = new List<ModuleDesc>(); foreach (var inputFile in inputFilePaths) { try { referenceableModules.Add(typeSystemContext.GetModuleFromPath(inputFile.Value)); } catch { } // Ignore non-managed pe files } foreach (var referenceFile in _referenceFilePaths.Values) { try { referenceableModules.Add(typeSystemContext.GetModuleFromPath(referenceFile)); } catch { } // Ignore non-managed pe files } ProfileDataManager profileDataManager = new ProfileDataManager(logger, referenceableModules); CompilationModuleGroup compilationGroup; List<ICompilationRootProvider> compilationRoots = new List<ICompilationRootProvider>(); if (singleMethod != null) { // Compiling just a single method compilationGroup = new SingleMethodCompilationModuleGroup(singleMethod); compilationRoots.Add(new SingleMethodRootProvider(singleMethod)); } else { // Either single file, or multifile library, or multifile consumption. EcmaModule entrypointModule = null; foreach (var inputFile in typeSystemContext.InputFilePaths) { EcmaModule module = typeSystemContext.GetModuleFromPath(inputFile.Value); if (module.PEReader.PEHeaders.IsExe) { if (entrypointModule != null) throw new Exception("Multiple EXE modules"); entrypointModule = module; } } List<EcmaModule> inputModules = new List<EcmaModule>(); foreach (var inputFile in typeSystemContext.InputFilePaths) { EcmaModule module = typeSystemContext.GetModuleFromPath(inputFile.Value); compilationRoots.Add(new ReadyToRunRootProvider(module, profileDataManager)); inputModules.Add(module); if (!_commandLineOptions.InputBubble) { break; } } List<ModuleDesc> versionBubbleModules = new List<ModuleDesc>(); if (_commandLineOptions.InputBubble) { // In large version bubble mode add reference paths to the compilation group foreach (string referenceFile in _referenceFilePaths.Values) { try { // Currently SimpleTest.targets has no easy way to filter out non-managed assemblies // from the reference list. EcmaModule module = typeSystemContext.GetModuleFromPath(referenceFile); versionBubbleModules.Add(module); } catch (TypeSystemException.BadImageFormatException ex) { Console.WriteLine("Warning: cannot open reference assembly '{0}': {1}", referenceFile, ex.Message); } } } compilationGroup = new ReadyToRunSingleAssemblyCompilationModuleGroup( typeSystemContext, inputModules, versionBubbleModules, _commandLineOptions.CompileBubbleGenerics, _commandLineOptions.Partial ? profileDataManager : null); } // // Compile // string inputFilePath = ""; foreach (var input in typeSystemContext.InputFilePaths) { inputFilePath = input.Value; break; } CompilationBuilder builder = new ReadyToRunCodegenCompilationBuilder(typeSystemContext, compilationGroup, inputFilePath, ibcTuning: _commandLineOptions.Tuning, resilient: _commandLineOptions.Resilient); string compilationUnitPrefix = ""; builder.UseCompilationUnitPrefix(compilationUnitPrefix); ILProvider ilProvider = new ReadyToRunILProvider(); DependencyTrackingLevel trackingLevel = _commandLineOptions.DgmlLogFileName == null ? DependencyTrackingLevel.None : (_commandLineOptions.GenerateFullDgmlLog ? DependencyTrackingLevel.All : DependencyTrackingLevel.First); builder .UseILProvider(ilProvider) .UseJitPath(_commandLineOptions.JitPath) .UseBackendOptions(_commandLineOptions.CodegenOptions) .UseLogger(logger) .UseDependencyTracking(trackingLevel) .UseCompilationRoots(compilationRoots) .UseOptimizationMode(_optimizationMode); compilation = builder.ToCompilation(); } compilation.Compile(_commandLineOptions.OutputFilePath.FullName); if (_commandLineOptions.DgmlLogFileName != null) compilation.WriteDependencyLog(_commandLineOptions.DgmlLogFileName.FullName); } return 0; } private TypeDesc FindType(CompilerTypeSystemContext context, string typeName) { ModuleDesc systemModule = context.SystemModule; TypeDesc foundType = systemModule.GetTypeByCustomAttributeTypeName(typeName, false, (typeDefName, module, throwIfNotFound) => { return (MetadataType)context.GetCanonType(typeDefName) ?? CustomAttributeTypeNameParser.ResolveCustomAttributeTypeDefinitionName(typeDefName, module, throwIfNotFound); }); if (foundType == null) throw new CommandLineException($"Type '{typeName}' not found"); return foundType; } private MethodDesc CheckAndParseSingleMethodModeArguments(CompilerTypeSystemContext context) { if (_commandLineOptions.SingleMethodName == null && _commandLineOptions.SingleMethodTypeName == null && _commandLineOptions.SingleMethodGenericArgs == null) return null; if (_commandLineOptions.SingleMethodName == null || _commandLineOptions.SingleMethodTypeName == null) throw new CommandLineException("Both method name and type name are required parameters for single method mode"); TypeDesc owningType = FindType(context, _commandLineOptions.SingleMethodTypeName); // TODO: allow specifying signature to distinguish overloads MethodDesc method = owningType.GetMethod(_commandLineOptions.SingleMethodName, null); if (method == null) throw new CommandLineException($"Method '{_commandLineOptions.SingleMethodName}' not found in '{_commandLineOptions.SingleMethodTypeName}'"); if (method.HasInstantiation != (_commandLineOptions.SingleMethodGenericArgs != null) || (method.HasInstantiation && (method.Instantiation.Length != _commandLineOptions.SingleMethodGenericArgs.Length))) { throw new CommandLineException( $"Expected {method.Instantiation.Length} generic arguments for method '{_commandLineOptions.SingleMethodName}' on type '{_commandLineOptions.SingleMethodTypeName}'"); } if (method.HasInstantiation) { List<TypeDesc> genericArguments = new List<TypeDesc>(); foreach (var argString in _commandLineOptions.SingleMethodGenericArgs) genericArguments.Add(FindType(context, argString)); method = method.MakeInstantiatedMethod(genericArguments.ToArray()); } return method; } private static bool DumpReproArguments(CodeGenerationFailedException ex) { Console.WriteLine("To repro, add following arguments to the command line:"); MethodDesc failingMethod = ex.Method; var formatter = new CustomAttributeTypeNameFormatter((IAssemblyDesc)failingMethod.Context.SystemModule); Console.Write($"--singlemethodtypename \"{formatter.FormatName(failingMethod.OwningType, true)}\""); Console.Write($" --singlemethodname {failingMethod.Name}"); for (int i = 0; i < failingMethod.Instantiation.Length; i++) Console.Write($" --singlemethodgenericarg \"{formatter.FormatName(failingMethod.Instantiation[i], true)}\""); return false; } public static async Task<int> Main(string[] args) { var command = CommandLineOptions.RootCommand(); command.Handler = CommandHandler.Create<CommandLineOptions>((CommandLineOptions options) => InnerMain(options)); return await command.InvokeAsync(args); } private static int InnerMain(CommandLineOptions buildOptions) { #if DEBUG try { return new Program(buildOptions).Run(); } catch (CodeGenerationFailedException ex) when (DumpReproArguments(ex)) { throw new NotSupportedException(); // Unreachable } #else try { return new Program(buildOptions).Run(); } catch (Exception e) { Console.Error.WriteLine("Error: " + e.Message); Console.Error.WriteLine(e.ToString()); return 1; } #endif } } }
//KKM4 added this comment for testing using System; using System.Collections.Generic; using System.Text; using System.Data; using Epi.Collections; using Epi.DataSets; using Epi.Fields; using VariableCollection = Epi.Collections.NamedObjectCollection<Epi.IVariable>; using VariableCollectionStack = System.Collections.Generic.Stack<Epi.Collections.NamedObjectCollection<Epi.IVariable>>; namespace Epi { /// <summary> /// Class MemoryRegion /// </summary> public class MemoryRegion : IMemoryRegion { private class LocalBlockVariableCollection : VariableCollection { } /// <summary> /// syncLock /// </summary> protected static object syncLock = new object(); private static bool staticInitalized = false; private static VariableCollection systemVariables; private static VariableCollection permanentVariables; /// <summary> /// Global Variables /// </summary> protected VariableCollection globalVariables; /// <summary> /// Local Variable Stack /// </summary> protected Stack<VariableCollection> localVariableStack; /// <summary> /// Configuration Updated EventHandler /// </summary> protected EventHandler configurationUpdated; /// <summary> /// Constructor /// </summary> public MemoryRegion() { lock (syncLock) { if (!staticInitalized) { LoadPermanentVariables(); //LoadSystemVariables(); configurationUpdated = new EventHandler(MemoryRegion.ConfigurationUpdated); staticInitalized = true; } } localVariableStack = new Stack<VariableCollection>(); globalVariables = new VariableCollection(); PushLocalRegion(); } private static void DeletePermanentVariable(string variableName) { Configuration config = Configuration.GetNewInstance(); DataRow[] result = config.PermanentVariables.Select("Name='" + variableName + "'"); if (result.Length != 1) { throw new ConfigurationException(ConfigurationException.ConfigurationIssue.ContentsInvalid); } result[0].Delete(); Configuration.Save(config); } public static void UpdatePermanentVariable(IVariable variable) { Configuration config = Configuration.GetNewInstance(); DataRow[] result = config.PermanentVariables.Select("Name='" + variable.Name + "'"); if (result.Length < 1) { config.PermanentVariables.AddPermanentVariableRow( variable.Name, variable.Expression ?? "", (int)variable.DataType, config.ParentRowPermanentVariables); } else if (result.Length == 1) { ((DataSets.Config.PermanentVariableRow)result[0]).DataValue = variable.Expression ?? ""; ((DataSets.Config.PermanentVariableRow)result[0]).DataType = (int)variable.DataType; } else { throw new ConfigurationException(ConfigurationException.ConfigurationIssue.ContentsInvalid, "Duplicate permanent variable rows encountered."); } Configuration.Save(config); } /// <summary> /// GetVariablesInScope() /// </summary> /// <param name="scopeCombination">The logically ORed scopes to be included</param> /// <returns>VariableCollection</returns> public VariableCollection GetVariablesInScope(VariableType scopeCombination) { VariableCollection shortList = new VariableCollection(); VariableCollection masterList = GetVariablesInScope(); foreach (IVariable variable in masterList) { if ((variable.VarType & scopeCombination) > 0) { shortList.Add(variable); } } return shortList; } /// <summary> /// Returns a list of all standard variables. /// </summary> /// <returns></returns> public List<ISetBasedVariable> GetStandardVariables() { List<ISetBasedVariable> vars = new List<ISetBasedVariable>(); VariableCollection varsInScope = GetVariablesInScope(VariableType.Standard); foreach (IVariable var in varsInScope) { vars.Add(var as ISetBasedVariable); } return vars; } /// <summary> /// Returns a list of all data source variables. /// </summary> /// <returns></returns> public List<ISetBasedVariable> GetDataSourceVariables() { List<ISetBasedVariable> vars = new List<ISetBasedVariable>(); VariableCollection varsInScope = GetVariablesInScope(VariableType.DataSource); foreach (IVariable var in varsInScope) { vars.Add(var as ISetBasedVariable); } return vars; } /// <summary> /// Returns a list of all Data source redefined variables. /// </summary> /// <returns></returns> public List<ISetBasedVariable> GetDataSourceRedefinedVariables() { List<ISetBasedVariable> vars = new List<ISetBasedVariable>(); VariableCollection varsInScope = GetVariablesInScope(VariableType.DataSourceRedefined); foreach (IVariable var in varsInScope) { vars.Add(var as ISetBasedVariable); } return vars; } /// <summary> /// Returns a list of all standard variables. /// </summary> /// <returns></returns> public List<IScalarVariable> GetGlobalVariables() { List<IScalarVariable> vars = new List<IScalarVariable>(); VariableCollection varsInScope = GetVariablesInScope(VariableType.Global); foreach (IVariable var in varsInScope) { vars.Add(var as IScalarVariable); } return vars; } /// <summary> /// Returns a list of all standard variables. /// </summary> /// <returns></returns> public List<IScalarVariable> GetPermanentVariables() { List<IScalarVariable> vars = new List<IScalarVariable>(); VariableCollection varsInScope = GetVariablesInScope(VariableType.Permanent); foreach (IVariable var in varsInScope) { vars.Add(var as IScalarVariable); } return vars; } /// <summary> /// GetVariablesInScope() /// </summary> /// <remarks> Returns all variables, regardless of scope or type</remarks> /// <returns>VariableCollection</returns> public VariableCollection GetVariablesInScope() { VariableCollection result; lock (syncLock) { VariableCollection localVariables = new VariableCollection(); if (localVariableStack.Peek() is LocalBlockVariableCollection) { VariableCollection[] locals = new VariableCollection[localVariableStack.Count]; localVariableStack.CopyTo(locals, 0); for (int i = 0; i < locals.Length; i++) { localVariables = CombineCollections(localVariables, locals[i]); if (!(locals[i] is LocalBlockVariableCollection)) { break; } } } else { localVariables = localVariableStack.Peek(); } result = CombineCollections(systemVariables, permanentVariables, globalVariables, localVariables); } return result; } /// <summary> /// dcs0 8/9/2007 /// Gets all the variables as a datatable /// </summary> /// <returns>Newly constructed DataTable with appropriate columns</returns> private DataTable CreateAllVariablesTable() { DataTable dt = new DataTable(); dt.Columns.Add(ColumnNames.NAME, typeof(string)); dt.Columns.Add(ColumnNames.VARIABLE_SCOPE, typeof(Int16)); dt.Columns.Add(ColumnNames.DATA_TABLE_NAME, typeof(string)); dt.Columns.Add(ColumnNames.FIELD_TYPE_ID, typeof(Int16)); dt.Columns.Add(ColumnNames.DATA_TYPE, typeof(Int16)); dt.Columns.Add(ColumnNames.VARIABLE_VALUE, typeof(string)); dt.Columns.Add(ColumnNames.ADDITIONAL_INFO, typeof(string)); dt.Columns.Add(ColumnNames.PROMPT, typeof(string)); return dt; } private DataRow AddRow(DataTable table, IVariable var) { DataRow row = table.NewRow(); row[ColumnNames.NAME] = var.Name; row[ColumnNames.DATA_TYPE] = var.DataType; row[ColumnNames.VARIABLE_SCOPE] = var.VarType; row[ColumnNames.VARIABLE_VALUE] = var.Expression; row[ColumnNames.ADDITIONAL_INFO] = string.Empty; row[ColumnNames.PROMPT] = string.Empty; // Data source variables have an extra piece of information: Table name if (var is IDataSourceVariable) { IDataSourceVariable dataSourceVar = var as IDataSourceVariable; row[ColumnNames.DATA_TABLE_NAME] = dataSourceVar.TableName; } // Data field variables know the field type. if (var is IDataField) { IDataField dataField = var as IDataField; row[ColumnNames.FIELD_TYPE_ID] = dataField.FieldType; } table.Rows.Add(row); return row; } /// <summary> /// GetVariablesAsDataTable() /// </summary> /// <param name="scopeCombination"></param> /// <returns>DataTable</returns> public DataTable GetVariablesAsDataTable(VariableType scopeCombination) { //TODO: HERE VariableCollection allVariables = GetVariablesInScope(scopeCombination); DataTable variablesDataTable = CreateAllVariablesTable(); DataRow newRow = null; foreach (IVariable variable in allVariables) { if (variable.IsVarType(scopeCombination)) { newRow = AddRow(variablesDataTable, variable); if (variable.IsVarType(VariableType.DataSource)) { string tableName = ((IDataSourceVariable)variable).TableName; tableName = (tableName == null) ? string.Empty : tableName; newRow[ColumnNames.DATA_TABLE_NAME] = tableName; } else { newRow[ColumnNames.DATA_TABLE_NAME] = SharedStrings.DEFINED_VARIABLE; } } } return variablesDataTable; } /// <summary> /// IsVariableInScope() /// </summary> /// <param name="varName"></param> /// <returns>bool</returns> public bool IsVariableInScope(string varName) { return GetVariablesInScope().Contains(varName); } /// <summary> /// UndefineVariable() /// </summary> /// <param name="varName"></param> public void UndefineVariable(string varName) { if (localVariableStack.Peek().Contains(varName)) { localVariableStack.Peek().Remove(varName); } else if (globalVariables.Contains(varName)) { globalVariables.Remove(varName); } else { lock (syncLock) { if (permanentVariables.Contains(varName)) { DeletePermanentVariable(varName); permanentVariables.Remove(varName); } //else if (systemVariables.Contains(varName)) //{ // throw new GeneralException(string.Format("System variable '{0}' cannot be undefined.", varName)); //} //else //{ // throw new GeneralException(string.Format("Variable '{0}' is not defined.", varName)); //} } } } /// <summary> /// DefineVariable() /// </summary> /// <param name="variable"></param> public void DefineVariable(IVariable variable) { if (IsVariableInScope(variable.Name)) { //throw new GeneralException(string.Format("Variable '{0}' is already defined.", variable.Name)); } else { if (variable.VarType == VariableType.Permanent) { DefinePermanentVariable(variable); LoadPermanentVariables(); } else if (variable.VarType == VariableType.System) { DefineSystemVariable(variable); } else if (variable.VarType == VariableType.Global) { globalVariables.Add(variable); } else // Standard, DataSource or DataSourceRedefined variables { VariableCollection localVariables = localVariableStack.Peek(); localVariables.Add(variable); } } } private static void DefinePermanentVariable(IVariable variable) { lock (syncLock) { UpdatePermanentVariable(variable); permanentVariables.Add(variable); } } private static void DefineSystemVariable(IVariable variable) { lock (syncLock) { systemVariables.Add(variable); } } /// <summary> /// Removes all variables of the given types. /// </summary> /// <param name="varTypes"></param> public void RemoveVariablesInScope(VariableType varTypes) { VariableCollection vars = GetVariablesInScope(varTypes); foreach (IVariable var in vars) { UndefineVariable(var.Name); } } /// <summary> /// Try to get a given variable /// </summary> /// <param name="varName">The name of the variable</param> /// <param name="var">The variable</param> /// <returns>bool</returns> public bool TryGetVariable(string varName, out IVariable var) { var = null; try { if (localVariableStack.Peek().Contains(varName)) { var = localVariableStack.Peek()[varName]; } else if (globalVariables.Contains(varName)) { var = globalVariables[varName]; } else { lock (syncLock) { if (permanentVariables.Contains(varName)) { var = permanentVariables[varName]; } else /*if (systemVariables.Contains(varName)) { var = systemVariables[varName]; } else*/ { return false; } } } } catch { return false; } return true; } /// <summary> /// Get a given variable /// </summary> /// <param name="varName">The name of the variable</param> /// <returns>IVariable</returns> public IVariable GetVariable(string varName) { IVariable var = null; if (localVariableStack.Peek().Contains(varName)) { return localVariableStack.Peek()[varName]; } else if (globalVariables.Contains(varName)) { var = globalVariables[varName]; } else { lock (syncLock) { if (permanentVariables.Contains(varName)) { var = permanentVariables[varName]; } else if (systemVariables != null && systemVariables.Contains(varName)) { var = systemVariables[varName]; } else { // throw new GeneralException(string.Format("Variable '{0}' is not defined.", varName)); } } } return var; } /// <summary> /// CombineCollections /// </summary> /// <remarks> /// Null parameters are allowed /// </remarks> /// <param name="collections">Collections</param> /// <returns>VariableCollection</returns> private static VariableCollection CombineCollections(params VariableCollection[] collections) { VariableCollection combinedCollection = new VariableCollection(); foreach (VariableCollection col in collections) { if (col != null) { combinedCollection.Add(col); } } return combinedCollection; } /// <summary> /// Push the Local Region onto the stack /// </summary> public void PushLocalRegion() { localVariableStack.Push(new VariableCollection()); } /// <summary> /// Push the Local Block Region onto the stack /// </summary> public void PushLocalBlockRegion() { localVariableStack.Push(new LocalBlockVariableCollection()); } /// <summary> /// PopRegion() /// </summary> /// <remarks> /// Pop the region off the stack /// </remarks> public void PopRegion() { if (localVariableStack.Count > 1) { localVariableStack.Pop(); } else { throw new GeneralException("Root local variable stack cannot be popped off the local memory stack."); } } private static void ConfigurationUpdated(object sender, EventArgs e) { LoadPermanentVariables(); } private static void LoadPermanentVariables() { lock (syncLock) { MemoryRegion.permanentVariables = new VariableCollection(); Configuration config = Configuration.GetNewInstance(); foreach (Config.PermanentVariableRow row in config.PermanentVariables) { DefinePermanentVariable(new PermanentVariable(row)); } } } private static void LoadSystemVariables() { lock (syncLock) { MemoryRegion.systemVariables = new VariableCollection(); foreach (IVariable variable in GetSystemVariables()) { DefineSystemVariable(variable); } } } private static VariableCollection GetSystemVariables() { VariableCollection list = new VariableCollection(); string[] names = Enum.GetNames(typeof(SystemVariables)); foreach (string variableName in names) { list.Add(GetSystemVariable(variableName)); } return list; } private static IVariable GetSystemVariable(string variableName) { IVariable target = null; if (Enum.IsDefined(typeof(SystemVariables), variableName)) { SystemVariables targetType = (SystemVariables)Enum.Parse(typeof(SystemVariables), variableName); Type type = typeof(MemoryRegion).GetNestedType(variableName, System.Reflection.BindingFlags.NonPublic); target = (IVariable)Activator.CreateInstance(type); } else { // look up from plugin =) } return target; } /* #region SYSTEMDATE private class SYSTEMDATE : VariableBase, ISystemVariable { public SYSTEMDATE() : base("SYSTEMDATE", DataType.Date, VariableType.System) { } public string Value { get { return DateTime.Now.ToShortDateString(); } } } #endregion #region SYSTEMTIME private class SYSTEMTIME : VariableBase, ISystemVariable { public SYSTEMTIME() : base("SYSTEMTIME", DataType.Date, VariableType.System) { } public override string Expression { get { return DateTime.Now.ToShortTimeString(); } set { throw new NotSupportedException(); } } public string Value { get { return Expression; } } } #endregion*/ } }
//! \file ArcGRP.cs //! \date Sun Mar 20 02:07:17 2016 //! \brief Ankh resource archive. // // Copyright (C) 2016 by morkt // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to // deal in the Software without restriction, including without limitation the // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or // sell copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS // IN THE SOFTWARE. // using System; using System.Collections.Generic; using System.ComponentModel.Composition; using System.IO; using GameRes.Compression; using GameRes.Utility; namespace GameRes.Formats.Ankh { [Export(typeof(ArchiveFormat))] [ExportMetadata("Priority", -1)] public class GrpOpener : ArchiveFormat { public override string Tag { get { return "GRP/ICE"; } } public override string Description { get { return "Ice Soft resource archive"; } } public override uint Signature { get { return 0; } } public override bool IsHierarchic { get { return false; } } public override bool CanWrite { get { return false; } } public GrpOpener () { Extensions = new string[] { "grp", "bin", "dat", "vc" }; } public override ArcFile TryOpen (ArcView file) { uint first_offset = file.View.ReadUInt32 (0); if (first_offset < 8 || first_offset >= file.MaxOffset || 0 != (first_offset & 3)) return null; int count = (int)(first_offset - 4) / 4; if (!IsSaneCount (count)) return null; var base_name = Path.GetFileNameWithoutExtension (file.Name); uint index_offset = 0; uint next_offset = first_offset; var dir = new List<Entry> (count); for (int i = 0; i < count && next_offset < file.MaxOffset; ++i) { var entry = new PackedEntry { Offset = next_offset }; index_offset += 4; next_offset = file.View.ReadUInt32 (index_offset); if (next_offset < entry.Offset) return null; entry.Size = (uint)(next_offset - entry.Offset); entry.UnpackedSize = entry.Size; if (entry.Size != 0) { if (!entry.CheckPlacement (file.MaxOffset)) return null; entry.Name = string.Format ("{0}#{1:D4}", base_name, i); dir.Add (entry); } } if (0 == dir.Count) return null; DetectFileTypes (file, dir); return new ArcFile (file, this, dir); } internal void DetectFileTypes (ArcView file, List<Entry> dir) { var header = new byte[16]; foreach (PackedEntry entry in dir) { if (entry.Size <= 8) continue; file.View.Read (entry.Offset, header, 0, 16); if (header.AsciiEqual ("TPW")) { entry.IsPacked =header[3] != 0; long start_offset = entry.Offset+4; if (entry.IsPacked) { entry.UnpackedSize = file.View.ReadUInt32 (start_offset); start_offset = entry.Offset+11; } else { entry.Offset = start_offset; entry.Size -= 4; } if (file.View.AsciiEqual (start_offset, "BM")) entry.ChangeType (ImageFormat.Bmp); } else if (header.AsciiEqual (4, "HDJ\0")) { if (header.AsciiEqual (12, "BM")) entry.ChangeType (ImageFormat.Bmp); else if (header.AsciiEqual (12, "MThd")) entry.Name = Path.ChangeExtension (entry.Name, "mid"); entry.UnpackedSize = header.ToUInt32 (0); entry.IsPacked = true; } else if (header.AsciiEqual (4, "OggS")) { entry.ChangeType (OggAudio.Instance); entry.Offset += 4; entry.Size -= 4; } else if (entry.Size > 12 && (header.AsciiEqual (8, "RIFF") || ((header[4] & 0xF) == 0xF && header.AsciiEqual (5, "RIFF")))) { entry.ChangeType (AudioFormat.Wav); entry.UnpackedSize = header.ToUInt32 (0); entry.IsPacked = true; } else { uint signature = header.ToUInt32 (0); var res = AutoEntry.DetectFileType (signature); if (res != null) { entry.ChangeType (res); } else if ((signature & 0xFFFF) == 0xFBFF) { entry.ChangeType (Mp3Format.Value); } else if (entry.Size > 0x16 && IsAudioEntry (file, entry)) { entry.Type = "audio"; } } } } internal static ResourceInstance<AudioFormat> Mp3Format = new ResourceInstance<AudioFormat> ("MP3"); bool IsAudioEntry (ArcView file, Entry entry) { uint signature = file.View.ReadUInt32 (entry.Offset); if (signature != 0x010001 && signature != 0x020001) return false; int extra = file.View.ReadUInt16 (entry.Offset+0x10); if (extra != 0) return false; uint size = file.View.ReadUInt32 (entry.Offset+0x12); return 0x16 + size == entry.Size; } public override Stream OpenEntry (ArcFile arc, Entry entry) { var pent = entry as PackedEntry; if (pent != null && pent.IsPacked && pent.Size > 8) { try { if (arc.File.View.AsciiEqual (entry.Offset, "TPW")) return OpenTpw (arc, pent); if (arc.File.View.AsciiEqual (entry.Offset+4, "HDJ\0")) return OpenImage (arc, pent); if (entry.Size > 12) { byte type = arc.File.View.ReadByte (entry.Offset+4); if ('W' == type && arc.File.View.AsciiEqual (entry.Offset+8, "RIFF")) return OpenAudio (arc, entry); if ((type & 0xF) == 0xF && arc.File.View.AsciiEqual (entry.Offset+5, "RIFF")) { var input = arc.File.CreateStream (entry.Offset+4, entry.Size-4); return new LzssStream (input); } } } catch (Exception X) { System.Diagnostics.Trace.WriteLine (X.Message, "[GRP]"); } } return base.OpenEntry (arc, entry); } Stream OpenImage (ArcFile arc, PackedEntry entry) { using (var packed = arc.File.CreateStream (entry.Offset+8, entry.Size-8)) using (var reader = new GrpUnpacker (packed)) { var unpacked = new byte[entry.UnpackedSize]; reader.UnpackHDJ (unpacked, 0); return new BinMemoryStream (unpacked, entry.Name); } } Stream OpenAudio (ArcFile arc, Entry entry) { int unpacked_size = arc.File.View.ReadInt32 (entry.Offset); byte pack_type = arc.File.View.ReadByte (entry.Offset+5); byte channels = arc.File.View.ReadByte (entry.Offset+6); byte header_size = arc.File.View.ReadByte (entry.Offset+7); if (unpacked_size <= 0 || header_size > unpacked_size || !('A' == pack_type || 'S' == pack_type)) return base.OpenEntry (arc, entry); var unpacked = new byte[unpacked_size]; arc.File.View.Read (entry.Offset+8, unpacked, 0, header_size); uint packed_size = entry.Size - 8 - header_size; using (var packed = arc.File.CreateStream (entry.Offset+8+header_size, packed_size)) using (var reader = new GrpUnpacker (packed)) { if ('A' == pack_type) reader.UnpackA (unpacked, header_size, channels); else reader.UnpackS (unpacked, header_size, channels); return new BinMemoryStream (unpacked, entry.Name); } } Stream OpenTpw (ArcFile arc, PackedEntry entry) { var output = new byte[entry.UnpackedSize]; using (var input = arc.File.CreateStream (entry.Offset, entry.Size)) { input.Position = 8; var offsets = new int[4]; offsets[0] = input.ReadUInt16(); offsets[1] = offsets[0] * 2; offsets[2] = offsets[0] * 3; offsets[3] = offsets[0] * 4; int dst = 0; while (dst < output.Length) { byte ctl = input.ReadUInt8(); if (0 == ctl) break; int count; if (ctl < 0x40) { count = Math.Min (ctl, output.Length - dst); input.Read (output, dst, count); dst += count; } else if (ctl <= 0x6F) { if (0x6F == ctl) count = input.ReadUInt16(); else count = ctl - 0x3D; byte v = input.ReadUInt8(); while (count --> 0) output[dst++] = v; } else if (ctl <= 0x9F) { if (ctl == 0x9F) count = input.ReadUInt16(); else count = ctl - 0x6E; byte v1 = input.ReadUInt8(); byte v2 = input.ReadUInt8(); while (count --> 0) { output[dst++] = v1; output[dst++] = v2; } } else if (ctl <= 0xBF) { if (ctl == 0xBF) count = input.ReadUInt16(); else count = ctl - 0x9E; input.Read (output, dst, 3); if (count > 0) { count *= 3; Binary.CopyOverlapped (output, dst, dst+3, count-3); dst += count; } } else { count = (ctl & 0x3F) + 3; int offset = input.ReadUInt8(); offset = (offset & 0x3F) - offsets[offset >> 6]; Binary.CopyOverlapped (output, dst+offset, dst, count); dst += count; } } return new BinMemoryStream (output, entry.Name); } } } internal sealed class GrpUnpacker : IDisposable { IBinaryStream m_input; uint m_bits; int m_cached_bits; public GrpUnpacker (IBinaryStream input) { m_input = input; } // Different games have slightly different formats using exact same headers, // so have to invent some flawed format recognition here. enum GrpVariant { Default, BoD }; static GrpVariant LastUsedMethod = GrpVariant.Default; static GrpVariant GetOppositeVariant () { return GrpVariant.Default == LastUsedMethod ? GrpVariant.BoD : GrpVariant.Default; } public void UnpackHDJ (byte[] output, int dst) { try { if (UnpackHDJVariant (output, dst, LastUsedMethod)) return; } catch { /* ignore unpack errors */ } var method = GetOppositeVariant(); m_input.Position = 0; if (!UnpackHDJVariant (output, dst, method)) throw new InvalidFormatException(); LastUsedMethod = method; } private bool UnpackHDJVariant (byte[] output, int dst, GrpVariant method) { ResetBits(); int word_count = 0; int byte_count = 0; uint next_byte = 0; uint next_word = 0; while (dst < output.Length) { if (GetNextBit() != 0) { int count = 0; bool long_count = false; int offset; if (GetNextBit() != 0) { if (0 == word_count) { next_word = m_input.ReadUInt32(); word_count = 2; } count = (int)((next_word >> 13) & 7) + 3; offset = (int)(next_word | 0xFFFFE000); next_word >>= 16; --word_count; long_count = 10 == count; } else { if (method == GrpVariant.Default) count = GetBits (2); if (0 == byte_count) { next_byte = m_input.ReadUInt32(); byte_count = 4; } if (method != GrpVariant.Default) count = GetBits (2); count += 2; long_count = 5 == count; offset = (int)(next_byte | 0xFFFFFF00); next_byte >>= 8; --byte_count; } if (long_count) { int n = 0; while (GetNextBit() != 0) ++n; if (n != 0) count += GetBits (n) + 1; } int src = dst + offset; if (src < 0 || src >= dst || dst + count > output.Length) return false; Binary.CopyOverlapped (output, src, dst, count); dst += count; } else { if (0 == byte_count) { next_byte = m_input.ReadUInt32(); byte_count = 4; } output[dst++] = (byte)next_byte; next_byte >>= 8; --byte_count; } } return true; } public void UnpackS (byte[] output, int dst, int channels) { try { if (UnpackSVariant (output, dst, channels, LastUsedMethod)) return; } catch { /* ignore parse errors */ } var method = GetOppositeVariant(); m_input.Position = 0; if (UnpackSVariant (output, dst, channels, method)) LastUsedMethod = method; } bool UnpackSVariant (byte[] output, int dst, int channels, GrpVariant method) { if (GrpVariant.Default == method) UnpackSv2 (output, dst, channels); else UnpackSv1 (output, dst, channels); return m_input.PeekByte() == -1; // rather loose test, but whatever } void UnpackSv1 (byte[] output, int dst, int channels) { ResetBits(); short last_word = 0; while (dst < output.Length) { int word; if (GetNextBit() != 0) { if (GetNextBit() != 0) word = GetBits (10) << 6; else word = 0; } else { int adjust = GetBits (5) << 6; if (0 != (adjust & 0x400)) adjust = -(adjust & 0x3FF); word = last_word + adjust; } last_word = (short)word; LittleEndian.Pack (last_word, output, dst); dst += 2; } } void UnpackSv2 (byte[] output, int dst, int channels) { if (channels != 1) m_input.Seek ((channels-1) * 4, SeekOrigin.Current); int step = channels * 2; for (int i = 0; i < channels; ++i) { ResetBits(); int pos = dst; short last_word = 0; while (pos < output.Length) { int word; if (GetNextBit() != 0) { if (GetNextBit() != 0) { word = GetBits (10) << 6; } else { int repeat; if (GetNextBit() != 0) { int bit_length = 0; do { ++bit_length; } while (GetNextBit() != 0); repeat = GetBits (bit_length) + 4; } else { repeat = GetBits (2); } word = 0; while (repeat --> 0) { output[pos] = 0; output[pos+1] = 0; pos += step; } } } else { int adjust = (short)(GetBits (5) << 11) >> 5; word = last_word + adjust; } LittleEndian.Pack ((short)word, output, pos); last_word = (short)word; pos += step; } dst += 2; } } public void UnpackA (byte[] output, int dst, int channels) { if (channels != 1) m_input.Seek ((channels-1) * 4, SeekOrigin.Current); int step = 2 * channels; for (int i = 0; i < channels; ++i) { int pos = dst; ResetBits(); while (pos < output.Length) { int word = GetBits (10) << 6;; LittleEndian.Pack ((short)word, output, pos); pos += step; } dst += 2; } } void ResetBits () { m_cached_bits = 0; } int GetNextBit () { return GetBits (1); } int GetBits (int count) { if (0 == m_cached_bits) { m_bits = m_input.ReadUInt32(); m_cached_bits = 32; } uint val; if (m_cached_bits < count) { uint next_bits = m_input.ReadUInt32(); val = (m_bits | (next_bits >> m_cached_bits)) >> (32 - count); m_bits = next_bits << (count - m_cached_bits); m_cached_bits = 32 - (count - m_cached_bits); } else { val = m_bits >> (32 - count); m_bits <<= count; m_cached_bits -= count; } return (int)val; } #region IDisposable Members bool _disposed = false; public void Dispose () { if (!_disposed) { m_input.Dispose(); _disposed = true; } } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Runtime; using System.Diagnostics; namespace System.Text { public sealed class EncoderReplacementFallback : EncoderFallback { // Our variables private String _strDefault; // Construction. Default replacement fallback uses no best fit and ? replacement string public EncoderReplacementFallback() : this("?") { } public EncoderReplacementFallback(String replacement) { // Must not be null if (replacement == null) throw new ArgumentNullException(nameof(replacement)); // Make sure it doesn't have bad surrogate pairs bool bFoundHigh = false; for (int i = 0; i < replacement.Length; i++) { // Found a surrogate? if (Char.IsSurrogate(replacement, i)) { // High or Low? if (Char.IsHighSurrogate(replacement, i)) { // if already had a high one, stop if (bFoundHigh) break; // break & throw at the bFoundHIgh below bFoundHigh = true; } else { // Low, did we have a high? if (!bFoundHigh) { // Didn't have one, make if fail when we stop bFoundHigh = true; break; } // Clear flag bFoundHigh = false; } } // If last was high we're in trouble (not surrogate so not low surrogate, so break) else if (bFoundHigh) break; } if (bFoundHigh) throw new ArgumentException(SR.Format(SR.Argument_InvalidCharSequenceNoIndex, nameof(replacement))); _strDefault = replacement; } public String DefaultString { get { return _strDefault; } } public override EncoderFallbackBuffer CreateFallbackBuffer() { return new EncoderReplacementFallbackBuffer(this); } // Maximum number of characters that this instance of this fallback could return public override int MaxCharCount { get { return _strDefault.Length; } } public override bool Equals(Object value) { EncoderReplacementFallback that = value as EncoderReplacementFallback; if (that != null) { return (_strDefault == that._strDefault); } return (false); } public override int GetHashCode() { return _strDefault.GetHashCode(); } } public sealed class EncoderReplacementFallbackBuffer : EncoderFallbackBuffer { // Store our default string private String _strDefault; private int _fallbackCount = -1; private int _fallbackIndex = -1; // Construction public EncoderReplacementFallbackBuffer(EncoderReplacementFallback fallback) { // 2X in case we're a surrogate pair _strDefault = fallback.DefaultString + fallback.DefaultString; } // Fallback Methods public override bool Fallback(char charUnknown, int index) { // If we had a buffer already we're being recursive, throw, it's probably at the suspect // character in our array. if (_fallbackCount >= 1) { // If we're recursive we may still have something in our buffer that makes this a surrogate if (char.IsHighSurrogate(charUnknown) && _fallbackCount >= 0 && char.IsLowSurrogate(_strDefault[_fallbackIndex + 1])) ThrowLastCharRecursive(Char.ConvertToUtf32(charUnknown, _strDefault[_fallbackIndex + 1])); // Nope, just one character ThrowLastCharRecursive(unchecked((int)charUnknown)); } // Go ahead and get our fallback // Divide by 2 because we aren't a surrogate pair _fallbackCount = _strDefault.Length / 2; _fallbackIndex = -1; return _fallbackCount != 0; } public override bool Fallback(char charUnknownHigh, char charUnknownLow, int index) { // Double check input surrogate pair if (!Char.IsHighSurrogate(charUnknownHigh)) throw new ArgumentOutOfRangeException(nameof(charUnknownHigh), SR.Format(SR.ArgumentOutOfRange_Range, 0xD800, 0xDBFF)); if (!Char.IsLowSurrogate(charUnknownLow)) throw new ArgumentOutOfRangeException(nameof(charUnknownLow), SR.Format(SR.ArgumentOutOfRange_Range, 0xDC00, 0xDFFF)); // If we had a buffer already we're being recursive, throw, it's probably at the suspect // character in our array. if (_fallbackCount >= 1) ThrowLastCharRecursive(Char.ConvertToUtf32(charUnknownHigh, charUnknownLow)); // Go ahead and get our fallback _fallbackCount = _strDefault.Length; _fallbackIndex = -1; return _fallbackCount != 0; } public override char GetNextChar() { // We want it to get < 0 because == 0 means that the current/last character is a fallback // and we need to detect recursion. We could have a flag but we already have this counter. _fallbackCount--; _fallbackIndex++; // Do we have anything left? 0 is now last fallback char, negative is nothing left if (_fallbackCount < 0) return '\0'; // Need to get it out of the buffer. // Make sure it didn't wrap from the fast count-- path if (_fallbackCount == int.MaxValue) { _fallbackCount = -1; return '\0'; } // Now make sure its in the expected range Debug.Assert(_fallbackIndex < _strDefault.Length && _fallbackIndex >= 0, "Index exceeds buffer range"); return _strDefault[_fallbackIndex]; } public override bool MovePrevious() { // Back up one, only if we just processed the last character (or earlier) if (_fallbackCount >= -1 && _fallbackIndex >= 0) { _fallbackIndex--; _fallbackCount++; return true; } // Return false 'cause we couldn't do it. return false; } // How many characters left to output? public override int Remaining { get { // Our count is 0 for 1 character left. return (_fallbackCount < 0) ? 0 : _fallbackCount; } } // Clear the buffer public override unsafe void Reset() { _fallbackCount = -1; _fallbackIndex = 0; charStart = null; bFallingBack = false; } } }
using System; using System.CodeDom; using System.Collections.Concurrent; using System.Collections.Generic; using System.Linq; using System.Net; using System.Reflection; using System.Threading; using System.Threading.Tasks; using Orleans.Runtime; using Orleans.Runtime.Configuration; namespace Orleans.Messaging { // <summary> // This class is used on the client only. // It provides the client counterpart to the Gateway and GatewayAcceptor classes on the silo side. // // There is one ProxiedMessageCenter instance per OutsideRuntimeClient. There can be multiple ProxiedMessageCenter instances // in a single process, but because RuntimeClient keeps a static pointer to a single OutsideRuntimeClient instance, this is not // generally done in practice. // // Each ProxiedMessageCenter keeps a collection of GatewayConnection instances. Each of these represents a bidirectional connection // to a single gateway endpoint. Requests are assigned to a specific connection based on the target grain ID, so that requests to // the same grain will go to the same gateway, in sending order. To do this efficiently and scalably, we bucket grains together // based on their hash code mod a reasonably large number (currently 8192). // // When the first message is sent to a bucket, we assign a gateway to that bucket, selecting in round-robin fashion from the known // gateways. If this is the first message to be sent to the gateway, we will create a new connection for it and assign the bucket to // the new connection. Either way, all messages to grains in that bucket will be sent to the assigned connection as long as the // connection is live. // // Connections stay live as long as possible. If a socket error or other communications error occurs, then the client will try to // reconnect twice before giving up on the gateway. If the connection cannot be re-established, then the gateway is deemed (temporarily) // dead, and any buckets assigned to the connection are unassigned (so that the next message sent will cause a new gateway to be selected). // There is no assumption that this death is permanent; the system will try to reuse the gateway every 5 minutes. // // The list of known gateways is managed by the GatewayManager class. See comments there for details... // ======================================================================================================================================= // Locking and lock protocol: // The ProxiedMessageCenter instance itself may be accessed by many client threads simultaneously, and each GatewayConnection instance // is accessed by its own thread, by the thread for its Receiver, and potentially by client threads from within the ProxiedMessageCenter. // Thus, we need locks to protect the various data structured from concurrent modifications. // // Each GatewayConnection instance has a "lockable" field that is used to lock local information. This lock is used by both the GatewayConnection // thread and the Receiver thread. // // The ProxiedMessageCenter instance also has a "lockable" field. This lock is used by any client thread running methods within the instance. // // Note that we take care to ensure that client threads never need locked access to GatewayConnection state and GatewayConnection threads never need // locked access to ProxiedMessageCenter state. Thus, we don't need to worry about lock ordering across these objects. // // Finally, the GatewayManager instance within the ProxiedMessageCenter has two collections, knownGateways and knownDead, that it needs to // protect with locks. Rather than using a "lockable" field, each collection is lcoked to protect the collection. // All sorts of threads can run within the GatewayManager, including client threads and GatewayConnection threads, so we need to // be careful about locks here. The protocol we use is to always take GatewayManager locks last, to only take them within GatewayManager methods, // and to always release them before returning from the method. In addition, we never simultaneously hold the knownGateways and knownDead locks, // so there's no need to worry about the order in which we take and release those locks. // </summary> internal class ProxiedMessageCenter : IMessageCenter, IDisposable { #region Constants internal static readonly TimeSpan MINIMUM_INTERCONNECT_DELAY = TimeSpan.FromMilliseconds(100); // wait one tenth of a second between connect attempts internal const int CONNECT_RETRY_COUNT = 2; // Retry twice before giving up on a gateway server #endregion internal GrainId ClientId { get; private set; } internal bool Running { get; private set; } internal readonly GatewayManager GatewayManager; internal readonly BlockingCollection<Message> PendingInboundMessages; private readonly Dictionary<Uri, GatewayConnection> gatewayConnections; private int numMessages; // The grainBuckets array is used to select the connection to use when sending an ordered message to a grain. // Requests are bucketed by GrainID, so that all requests to a grain get routed through the same bucket. // Each bucket holds a (possibly null) weak reference to a GatewayConnection object. That connection instance is used // if the WeakReference is non-null, is alive, and points to a live gateway connection. If any of these conditions is // false, then a new gateway is selected using the gateway manager, and a new connection established if necessary. private readonly WeakReference[] grainBuckets; private readonly Logger logger; private readonly object lockable; public SiloAddress MyAddress { get; private set; } public IMessagingConfiguration MessagingConfiguration { get; private set; } private readonly QueueTrackingStatistic queueTracking; private int numberOfConnectedGateways = 0; public ProxiedMessageCenter(ClientConfiguration config, IPAddress localAddress, int gen, GrainId clientId, IGatewayListProvider gatewayListProvider) { lockable = new object(); MyAddress = SiloAddress.New(new IPEndPoint(localAddress, 0), gen); ClientId = clientId; Running = false; MessagingConfiguration = config; GatewayManager = new GatewayManager(config, gatewayListProvider); PendingInboundMessages = new BlockingCollection<Message>(); gatewayConnections = new Dictionary<Uri, GatewayConnection>(); numMessages = 0; grainBuckets = new WeakReference[config.ClientSenderBuckets]; logger = LogManager.GetLogger("Messaging.ProxiedMessageCenter", LoggerType.Runtime); if (logger.IsVerbose) logger.Verbose("Proxy grain client constructed"); IntValueStatistic.FindOrCreate(StatisticNames.CLIENT_CONNECTED_GATEWAY_COUNT, () => { lock (gatewayConnections) { return gatewayConnections.Values.Count(conn => conn.IsLive); } }); if (StatisticsCollector.CollectQueueStats) { queueTracking = new QueueTrackingStatistic("ClientReceiver"); } } public void Start() { Running = true; if (StatisticsCollector.CollectQueueStats) { queueTracking.OnStartExecution(); } if (logger.IsVerbose) logger.Verbose("Proxy grain client started"); } public void PrepareToStop() { // put any pre stop logic here. } public void Stop() { Running = false; Utils.SafeExecute(() => { PendingInboundMessages.CompleteAdding(); }); if (StatisticsCollector.CollectQueueStats) { queueTracking.OnStopExecution(); } GatewayManager.Stop(); foreach (var gateway in gatewayConnections.Values) { gateway.Stop(); } } public void SendMessage(Message msg) { GatewayConnection gatewayConnection = null; bool startRequired = false; // If there's a specific gateway specified, use it if (msg.TargetSilo != null) { Uri addr = msg.TargetSilo.ToGatewayUri(); lock (lockable) { if (!gatewayConnections.TryGetValue(addr, out gatewayConnection) || !gatewayConnection.IsLive) { gatewayConnection = new GatewayConnection(addr, this); gatewayConnections[addr] = gatewayConnection; if (logger.IsVerbose) logger.Verbose("Creating gateway to {0} for pre-addressed message", addr); startRequired = true; } } } // For untargeted messages to system targets, and for unordered messages, pick a next connection in round robin fashion. else if (msg.TargetGrain.IsSystemTarget || msg.IsUnordered) { // Get the cached list of live gateways. // Pick a next gateway name in a round robin fashion. // See if we have a live connection to it. // If Yes, use it. // If not, create a new GatewayConnection and start it. // If start fails, we will mark this connection as dead and remove it from the GetCachedLiveGatewayNames. lock (lockable) { int msgNumber = numMessages; numMessages = unchecked(numMessages + 1); IList<Uri> gatewayNames = GatewayManager.GetLiveGateways(); int numGateways = gatewayNames.Count; if (numGateways == 0) { RejectMessage(msg, "No gateways available"); logger.Warn(ErrorCode.ProxyClient_CannotSend, "Unable to send message {0}; gateway manager state is {1}", msg, GatewayManager); return; } Uri addr = gatewayNames[msgNumber % numGateways]; if (!gatewayConnections.TryGetValue(addr, out gatewayConnection) || !gatewayConnection.IsLive) { gatewayConnection = new GatewayConnection(addr, this); gatewayConnections[addr] = gatewayConnection; if (logger.IsVerbose) logger.Verbose(ErrorCode.ProxyClient_CreatedGatewayUnordered, "Creating gateway to {0} for unordered message to grain {1}", addr, msg.TargetGrain); startRequired = true; } // else - Fast path - we've got a live gatewayConnection to use } } // Otherwise, use the buckets to ensure ordering. else { var index = msg.TargetGrain.GetHashCode_Modulo((uint)grainBuckets.Length); lock (lockable) { // Repeated from above, at the declaration of the grainBuckets array: // Requests are bucketed by GrainID, so that all requests to a grain get routed through the same bucket. // Each bucket holds a (possibly null) weak reference to a GatewayConnection object. That connection instance is used // if the WeakReference is non-null, is alive, and points to a live gateway connection. If any of these conditions is // false, then a new gateway is selected using the gateway manager, and a new connection established if necessary. var weakRef = grainBuckets[index]; if ((weakRef != null) && weakRef.IsAlive) { gatewayConnection = weakRef.Target as GatewayConnection; } if ((gatewayConnection == null) || !gatewayConnection.IsLive) { var addr = GatewayManager.GetLiveGateway(); if (addr == null) { RejectMessage(msg, "No gateways available"); logger.Warn(ErrorCode.ProxyClient_CannotSend_NoGateway, "Unable to send message {0}; gateway manager state is {1}", msg, GatewayManager); return; } if (logger.IsVerbose2) logger.Verbose2(ErrorCode.ProxyClient_NewBucketIndex, "Starting new bucket index {0} for ordered messages to grain {1}", index, msg.TargetGrain); if (!gatewayConnections.TryGetValue(addr, out gatewayConnection) || !gatewayConnection.IsLive) { gatewayConnection = new GatewayConnection(addr, this); gatewayConnections[addr] = gatewayConnection; if (logger.IsVerbose) logger.Verbose(ErrorCode.ProxyClient_CreatedGatewayToGrain, "Creating gateway to {0} for message to grain {1}, bucket {2}, grain id hash code {3}X", addr, msg.TargetGrain, index, msg.TargetGrain.GetHashCode().ToString("x")); startRequired = true; } grainBuckets[index] = new WeakReference(gatewayConnection); } } } if (startRequired) { gatewayConnection.Start(); if (!gatewayConnection.IsLive) { // if failed to start Gateway connection (failed to connect), try sending this msg to another Gateway. RejectOrResend(msg); return; } } try { gatewayConnection.QueueRequest(msg); if (logger.IsVerbose2) logger.Verbose2(ErrorCode.ProxyClient_QueueRequest, "Sending message {0} via gateway {1}", msg, gatewayConnection.Address); } catch (InvalidOperationException) { // This exception can be thrown if the gateway connection we selected was closed since we checked (i.e., we lost the race) // If this happens, we reject if the message is targeted to a specific silo, or try again if not RejectOrResend(msg); } } private void RejectOrResend(Message msg) { if (msg.TargetSilo != null) { RejectMessage(msg, String.Format("Target silo {0} is unavailable", msg.TargetSilo)); } else { SendMessage(msg); } } public Task<IGrainTypeResolver> GetTypeCodeMap(GrainFactory grainFactory) { var silo = GetLiveGatewaySiloAddress(); return GetTypeManager(silo, grainFactory).GetClusterTypeCodeMap(); } public Task<Streams.ImplicitStreamSubscriberTable> GetImplicitStreamSubscriberTable(GrainFactory grainFactory) { var silo = GetLiveGatewaySiloAddress(); return GetTypeManager(silo, grainFactory).GetImplicitStreamSubscriberTable(silo); } [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes")] public Message WaitMessage(Message.Categories type, CancellationToken ct) { try { if (ct.IsCancellationRequested) { return null; } // Don't pass CancellationToken to Take. It causes too much spinning. Message msg = PendingInboundMessages.Take(); #if TRACK_DETAILED_STATS if (StatisticsCollector.CollectQueueStats) { queueTracking.OnDeQueueRequest(msg); } #endif return msg; } #if !NETSTANDARD catch (ThreadAbortException exc) { // Silo may be shutting-down, so downgrade to verbose log logger.Verbose(ErrorCode.ProxyClient_ThreadAbort, "Received thread abort exception -- exiting. {0}", exc); Thread.ResetAbort(); return null; } #endif catch (OperationCanceledException exc) { logger.Verbose(ErrorCode.ProxyClient_OperationCancelled, "Received operation cancelled exception -- exiting. {0}", exc); return null; } catch (ObjectDisposedException exc) { logger.Verbose(ErrorCode.ProxyClient_OperationCancelled, "Received Object Disposed exception -- exiting. {0}", exc); return null; } catch (InvalidOperationException exc) { logger.Verbose(ErrorCode.ProxyClient_OperationCancelled, "Received Invalid Operation exception -- exiting. {0}", exc); return null; } catch (Exception ex) { logger.Error(ErrorCode.ProxyClient_ReceiveError, "Unexpected error getting an inbound message", ex); return null; } } internal void QueueIncomingMessage(Message msg) { #if TRACK_DETAILED_STATS if (StatisticsCollector.CollectQueueStats) { queueTracking.OnEnQueueRequest(1, PendingInboundMessages.Count, msg); } #endif PendingInboundMessages.Add(msg); } private void RejectMessage(Message msg, string reasonFormat, params object[] reasonParams) { if (!Running) return; var reason = String.Format(reasonFormat, reasonParams); if (msg.Direction != Message.Directions.Request) { if (logger.IsVerbose) logger.Verbose(ErrorCode.ProxyClient_DroppingMsg, "Dropping message: {0}. Reason = {1}", msg, reason); } else { if (logger.IsVerbose) logger.Verbose(ErrorCode.ProxyClient_RejectingMsg, "Rejecting message: {0}. Reason = {1}", msg, reason); MessagingStatisticsGroup.OnRejectedMessage(msg); Message error = msg.CreateRejectionResponse(Message.RejectionTypes.Unrecoverable, reason); QueueIncomingMessage(error); } } /// <summary> /// For testing use only /// </summary> public void Disconnect() { foreach (var connection in gatewayConnections.Values) { connection.Stop(); } } /// <summary> /// For testing use only. /// </summary> public void Reconnect() { throw new NotImplementedException("Reconnect"); } #region Random IMessageCenter stuff public int SendQueueLength { get { return 0; } } public int ReceiveQueueLength { get { return 0; } } #endregion private IClusterTypeManager GetTypeManager(SiloAddress destination, GrainFactory grainFactory) { return grainFactory.GetSystemTarget<IClusterTypeManager>(Constants.TypeManagerId, destination); } private SiloAddress GetLiveGatewaySiloAddress() { var gateway = GatewayManager.GetLiveGateway(); if (gateway == null) { throw new OrleansException("Not connected to a gateway"); } return gateway.ToSiloAddress(); } internal void UpdateClientId(GrainId clientId) { if (ClientId.Category != UniqueKey.Category.Client) throw new InvalidOperationException("Only handshake client ID can be updated with a cluster ID."); if (clientId.Category != UniqueKey.Category.GeoClient) throw new ArgumentException("Handshake client ID can only be updated with a geo client.", nameof(clientId)); ClientId = clientId; } internal void OnGatewayConnectionOpen() { Interlocked.Increment(ref numberOfConnectedGateways); } internal void OnGatewayConnectionClosed() { if (Interlocked.Decrement(ref numberOfConnectedGateways) == 0) { GrainClient.NotifyClusterConnectionLost(); } } public void Dispose() { PendingInboundMessages.Dispose(); if (gatewayConnections != null) foreach (var item in gatewayConnections) { item.Value.Dispose(); } GatewayManager.Dispose(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System.Collections.Generic; using Lucene.Net.Support; using Directory = Lucene.Net.Store.Directory; namespace Lucene.Net.Index { /// <summary>A <see cref="MergeScheduler" /> that runs each merge using a /// separate thread, up until a maximum number of threads /// (<see cref="MaxThreadCount" />) at which when a merge is /// needed, the thread(s) that are updating the index will /// pause until one or more merges completes. This is a /// simple way to use concurrency in the indexing process /// without having to create and manage application level /// threads. /// </summary> public class ConcurrentMergeScheduler:MergeScheduler { private int mergeThreadPriority = - 1; protected internal IList<MergeThread> mergeThreads = new List<MergeThread>(); // Max number of threads allowed to be merging at once private int _maxThreadCount = 1; protected internal Directory dir; private bool closed; protected internal IndexWriter writer; protected internal int mergeThreadCount; public ConcurrentMergeScheduler() { if (allInstances != null) { // Only for testing AddMyself(); } } /// <summary>Gets or sets the max # simultaneous threads that may be /// running. If a merge is necessary yet we already have /// this many threads running, the incoming thread (that /// is calling add/updateDocument) will block until /// a merge thread has completed. /// </summary> public virtual int MaxThreadCount { set { if (value < 1) throw new System.ArgumentException("count should be at least 1"); _maxThreadCount = value; } get { return _maxThreadCount; } } /// <summary>Return the priority that merge threads run at. By /// default the priority is 1 plus the priority of (ie, /// slightly higher priority than) the first thread that /// calls merge. /// </summary> [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Design", "CA1024:UsePropertiesWhereAppropriate")] public virtual int GetMergeThreadPriority() { lock (this) { InitMergeThreadPriority(); return mergeThreadPriority; } } /// <summary>Set the priority that merge threads run at. </summary> public virtual void SetMergeThreadPriority(int pri) { lock (this) { if (pri > (int) System.Threading.ThreadPriority.Highest || pri < (int) System.Threading.ThreadPriority.Lowest) throw new System.ArgumentException("priority must be in range " + (int) System.Threading.ThreadPriority.Lowest + " .. " + (int) System.Threading.ThreadPriority.Highest + " inclusive"); mergeThreadPriority = pri; int numThreads = MergeThreadCount(); for (int i = 0; i < numThreads; i++) { MergeThread merge = mergeThreads[i]; merge.SetThreadPriority(pri); } } } private bool Verbose() { return writer != null && writer.Verbose; } private void Message(System.String message) { if (Verbose()) writer.Message("CMS: " + message); } private void InitMergeThreadPriority() { lock (this) { if (mergeThreadPriority == - 1) { // Default to slightly higher priority than our // calling thread mergeThreadPriority = 1 + (System.Int32) ThreadClass.Current().Priority; if (mergeThreadPriority > (int) System.Threading.ThreadPriority.Highest) mergeThreadPriority = (int) System.Threading.ThreadPriority.Highest; } } } protected override void Dispose(bool disposing) { //if (disposing) //{ closed = true; //} } public virtual void Sync() { lock (this) { while (MergeThreadCount() > 0) { if (Verbose()) Message("now wait for threads; currently " + mergeThreads.Count + " still running"); int count = mergeThreads.Count; if (Verbose()) { for (int i = 0; i < count; i++) Message(" " + i + ": " + mergeThreads[i]); } System.Threading.Monitor.Wait(this); } } } private int MergeThreadCount() { lock (this) { int count = 0; int numThreads = mergeThreads.Count; for (int i = 0; i < numThreads; i++) { if (mergeThreads[i].IsAlive) { count++; } } return count; } } public override void Merge(IndexWriter writer) { // TODO: .NET doesn't support this // assert !Thread.holdsLock(writer); this.writer = writer; InitMergeThreadPriority(); dir = writer.Directory; // First, quickly run through the newly proposed merges // and add any orthogonal merges (ie a merge not // involving segments already pending to be merged) to // the queue. If we are way behind on merging, many of // these newly proposed merges will likely already be // registered. if (Verbose()) { Message("now merge"); Message(" index: " + writer.SegString()); } // Iterate, pulling from the IndexWriter's queue of // pending merges, until it's empty: while (true) { // TODO: we could be careful about which merges to do in // the BG (eg maybe the "biggest" ones) vs FG, which // merges to do first (the easiest ones?), etc. MergePolicy.OneMerge merge = writer.GetNextMerge(); if (merge == null) { if (Verbose()) Message(" no more merges pending; now return"); return ; } // We do this w/ the primary thread to keep // deterministic assignment of segment names writer.MergeInit(merge); bool success = false; try { lock (this) { while (MergeThreadCount() >= _maxThreadCount) { if (Verbose()) Message(" too many merge threads running; stalling..."); System.Threading.Monitor.Wait(this); } if (Verbose()) Message(" consider merge " + merge.SegString(dir)); System.Diagnostics.Debug.Assert(MergeThreadCount() < _maxThreadCount); // OK to spawn a new merge thread to handle this // merge: MergeThread merger = GetMergeThread(writer, merge); mergeThreads.Add(merger); if (Verbose()) Message(" launch new thread [" + merger.Name + "]"); merger.Start(); success = true; } } finally { if (!success) { writer.MergeFinish(merge); } } } } /// <summary>Does the actual merge, by calling <see cref="IndexWriter.Merge" /> </summary> protected internal virtual void DoMerge(MergePolicy.OneMerge merge) { writer.Merge(merge); } /// <summary>Create and return a new MergeThread </summary> protected internal virtual MergeThread GetMergeThread(IndexWriter writer, MergePolicy.OneMerge merge) { lock (this) { var thread = new MergeThread(this, writer, merge); thread.SetThreadPriority(mergeThreadPriority); thread.IsBackground = true; thread.Name = "Lucene Merge Thread #" + mergeThreadCount++; return thread; } } public /*protected internal*/ class MergeThread:ThreadClass { private void InitBlock(ConcurrentMergeScheduler enclosingInstance) { this.enclosingInstance = enclosingInstance; } private ConcurrentMergeScheduler enclosingInstance; public ConcurrentMergeScheduler Enclosing_Instance { get { return enclosingInstance; } } internal IndexWriter writer; internal MergePolicy.OneMerge startMerge; internal MergePolicy.OneMerge runningMerge; public MergeThread(ConcurrentMergeScheduler enclosingInstance, IndexWriter writer, MergePolicy.OneMerge startMerge) { InitBlock(enclosingInstance); this.writer = writer; this.startMerge = startMerge; } public virtual void SetRunningMerge(MergePolicy.OneMerge merge) { lock (this) { runningMerge = merge; } } public virtual MergePolicy.OneMerge RunningMerge { get { lock (this) { return runningMerge; } } } public virtual void SetThreadPriority(int pri) { try { Priority = (System.Threading.ThreadPriority) pri; } catch (System.NullReferenceException) { // Strangely, Sun's JDK 1.5 on Linux sometimes // throws NPE out of here... } catch (System.Security.SecurityException) { // Ignore this because we will still run fine with // normal thread priority } } override public void Run() { // First time through the while loop we do the merge // that we were started with: MergePolicy.OneMerge merge = this.startMerge; try { if (Enclosing_Instance.Verbose()) Enclosing_Instance.Message(" merge thread: start"); while (true) { SetRunningMerge(merge); Enclosing_Instance.DoMerge(merge); // Subsequent times through the loop we do any new // merge that writer says is necessary: merge = writer.GetNextMerge(); if (merge != null) { writer.MergeInit(merge); if (Enclosing_Instance.Verbose()) Enclosing_Instance.Message(" merge thread: do another merge " + merge.SegString(Enclosing_Instance.dir)); } else break; } if (Enclosing_Instance.Verbose()) Enclosing_Instance.Message(" merge thread: done"); } catch (System.Exception exc) { // Ignore the exception if it was due to abort: if (!(exc is MergePolicy.MergeAbortedException)) { if (!Enclosing_Instance.suppressExceptions) { // suppressExceptions is normally only set during // testing. Lucene.Net.Index.ConcurrentMergeScheduler.anyExceptions = true; Enclosing_Instance.HandleMergeException(exc); } } } finally { lock (Enclosing_Instance) { System.Threading.Monitor.PulseAll(Enclosing_Instance); Enclosing_Instance.mergeThreads.Remove(this); bool removed = !Enclosing_Instance.mergeThreads.Contains(this); System.Diagnostics.Debug.Assert(removed); } } } public override System.String ToString() { MergePolicy.OneMerge merge = RunningMerge ?? startMerge; return "merge thread: " + merge.SegString(Enclosing_Instance.dir); } } /// <summary>Called when an exception is hit in a background merge /// thread /// </summary> protected internal virtual void HandleMergeException(System.Exception exc) { // When an exception is hit during merge, IndexWriter // removes any partial files and then allows another // merge to run. If whatever caused the error is not // transient then the exception will keep happening, // so, we sleep here to avoid saturating CPU in such // cases: System.Threading.Thread.Sleep(new System.TimeSpan((System.Int64) 10000 * 250)); throw new MergePolicy.MergeException(exc, dir); } internal static bool anyExceptions = false; /// <summary>Used for testing </summary> public static bool AnyUnhandledExceptions() { if (allInstances == null) { throw new System.SystemException("setTestMode() was not called; often this is because your test case's setUp method fails to call super.setUp in LuceneTestCase"); } lock (allInstances) { int count = allInstances.Count; // Make sure all outstanding threads are done so we see // any exceptions they may produce: for (int i = 0; i < count; i++) allInstances[i].Sync(); bool v = anyExceptions; anyExceptions = false; return v; } } public static void ClearUnhandledExceptions() { lock (allInstances) { anyExceptions = false; } } /// <summary>Used for testing </summary> private void AddMyself() { lock (allInstances) { int size = allInstances.Count; int upto = 0; for (int i = 0; i < size; i++) { ConcurrentMergeScheduler other = allInstances[i]; if (!(other.closed && 0 == other.MergeThreadCount())) // Keep this one for now: it still has threads or // may spawn new threads allInstances[upto++] = other; } allInstances.RemoveRange(upto, allInstances.Count - upto); allInstances.Add(this); } } private bool suppressExceptions; /// <summary>Used for testing </summary> public /*internal*/ virtual void SetSuppressExceptions() { suppressExceptions = true; } /// <summary>Used for testing </summary> public /*internal*/ virtual void ClearSuppressExceptions() { suppressExceptions = false; } /// <summary>Used for testing </summary> private static List<ConcurrentMergeScheduler> allInstances; public static void SetTestMode() { allInstances = new List<ConcurrentMergeScheduler>(); } } }
using System; using System.Text; using System.Data; using System.Data.SqlClient; using System.Data.Common; using System.Collections; using System.Collections.Generic; using System.ComponentModel; using System.Configuration; using System.Xml; using System.Xml.Serialization; using SubSonic; using SubSonic.Utilities; // <auto-generated /> namespace Northwind{ /// <summary> /// Strongly-typed collection for the SalesByCategory class. /// </summary> [Serializable] public partial class SalesByCategoryCollection : ReadOnlyList<SalesByCategory, SalesByCategoryCollection> { public SalesByCategoryCollection() {} } /// <summary> /// This is Read-only wrapper class for the Sales by Category view. /// </summary> [Serializable] public partial class SalesByCategory : ReadOnlyRecord<SalesByCategory>, IReadOnlyRecord { #region Default Settings protected static void SetSQLProps() { GetTableSchema(); } #endregion #region Schema Accessor public static TableSchema.Table Schema { get { if (BaseSchema == null) { SetSQLProps(); } return BaseSchema; } } private static void GetTableSchema() { if(!IsSchemaInitialized) { //Schema declaration TableSchema.Table schema = new TableSchema.Table("Sales by Category", TableType.View, DataService.GetInstance("Northwind")); schema.Columns = new TableSchema.TableColumnCollection(); schema.SchemaName = @"dbo"; //columns TableSchema.TableColumn colvarCategoryID = new TableSchema.TableColumn(schema); colvarCategoryID.ColumnName = "CategoryID"; colvarCategoryID.DataType = DbType.Int32; colvarCategoryID.MaxLength = 0; colvarCategoryID.AutoIncrement = false; colvarCategoryID.IsNullable = false; colvarCategoryID.IsPrimaryKey = false; colvarCategoryID.IsForeignKey = false; colvarCategoryID.IsReadOnly = false; schema.Columns.Add(colvarCategoryID); TableSchema.TableColumn colvarCategoryName = new TableSchema.TableColumn(schema); colvarCategoryName.ColumnName = "CategoryName"; colvarCategoryName.DataType = DbType.String; colvarCategoryName.MaxLength = 15; colvarCategoryName.AutoIncrement = false; colvarCategoryName.IsNullable = false; colvarCategoryName.IsPrimaryKey = false; colvarCategoryName.IsForeignKey = false; colvarCategoryName.IsReadOnly = false; schema.Columns.Add(colvarCategoryName); TableSchema.TableColumn colvarProductName = new TableSchema.TableColumn(schema); colvarProductName.ColumnName = "ProductName"; colvarProductName.DataType = DbType.String; colvarProductName.MaxLength = 40; colvarProductName.AutoIncrement = false; colvarProductName.IsNullable = false; colvarProductName.IsPrimaryKey = false; colvarProductName.IsForeignKey = false; colvarProductName.IsReadOnly = false; schema.Columns.Add(colvarProductName); TableSchema.TableColumn colvarProductSales = new TableSchema.TableColumn(schema); colvarProductSales.ColumnName = "ProductSales"; colvarProductSales.DataType = DbType.Currency; colvarProductSales.MaxLength = 0; colvarProductSales.AutoIncrement = false; colvarProductSales.IsNullable = true; colvarProductSales.IsPrimaryKey = false; colvarProductSales.IsForeignKey = false; colvarProductSales.IsReadOnly = false; schema.Columns.Add(colvarProductSales); BaseSchema = schema; //add this schema to the provider //so we can query it later DataService.Providers["Northwind"].AddSchema("Sales by Category",schema); } } #endregion #region Query Accessor public static Query CreateQuery() { return new Query(Schema); } #endregion #region .ctors public SalesByCategory() { SetSQLProps(); SetDefaults(); MarkNew(); } public SalesByCategory(bool useDatabaseDefaults) { SetSQLProps(); if(useDatabaseDefaults) { ForceDefaults(); } MarkNew(); } public SalesByCategory(object keyID) { SetSQLProps(); LoadByKey(keyID); } public SalesByCategory(string columnName, object columnValue) { SetSQLProps(); LoadByParam(columnName,columnValue); } #endregion #region Props [XmlAttribute("CategoryID")] [Bindable(true)] public int CategoryID { get { return GetColumnValue<int>("CategoryID"); } set { SetColumnValue("CategoryID", value); } } [XmlAttribute("CategoryName")] [Bindable(true)] public string CategoryName { get { return GetColumnValue<string>("CategoryName"); } set { SetColumnValue("CategoryName", value); } } [XmlAttribute("ProductName")] [Bindable(true)] public string ProductName { get { return GetColumnValue<string>("ProductName"); } set { SetColumnValue("ProductName", value); } } [XmlAttribute("ProductSales")] [Bindable(true)] public decimal? ProductSales { get { return GetColumnValue<decimal?>("ProductSales"); } set { SetColumnValue("ProductSales", value); } } #endregion #region Columns Struct public struct Columns { public static string CategoryID = @"CategoryID"; public static string CategoryName = @"CategoryName"; public static string ProductName = @"ProductName"; public static string ProductSales = @"ProductSales"; } #endregion #region IAbstractRecord Members public new CT GetColumnValue<CT>(string columnName) { return base.GetColumnValue<CT>(columnName); } public object GetColumnValue(string columnName) { return base.GetColumnValue<object>(columnName); } #endregion } }
using System; using System.Linq; using NUnit.Framework; using StructureMap.Graph; using StructureMap.Testing.Widget; using StructureMap.Testing.Widget3; namespace StructureMap.Testing.Graph { [TestFixture] public class DynamicInjectionTester { private readonly IService _red = new ColorService("Red"); private readonly IService _blue = new ColorService("Blue"); private readonly IService _orange = new ColorService("Orange"); public interface IService<T> { } public class Service1<T> : IService<T> { } public class Service2<T> : IService<T> { } public class Service3<T> : IService<T> { } public interface IOtherService<T> { } public class Service4 : IOtherService<string> { } //[PluginFamily("Default")] public interface IThingy { } //[Pluggable("Default")] public class TheThingy : IThingy { } public class TheWidget : IWidget { #region IWidget Members public void DoSomething() { throw new NotImplementedException(); } #endregion } [Test] public void AddANewDefaultTypeForAPluginTypeThatAlreadyExists() { var container = new Container(x => x.For<ISomething>().Use<SomethingTwo>()); container.Configure(x => x.For<ISomething>().Use<SomethingOne>()); container.GetInstance<ISomething>().ShouldBeOfType<SomethingOne>(); } [Test] public void AddANewDefaultTypeForAPluginTypeThatAlreadyExists2() { var container = new Container(x => { x.For<ISomething>(); }); container.Configure(x => { x.For<ISomething>().Use<SomethingOne>(); }); container.GetInstance<ISomething>().ShouldBeOfType<SomethingOne>(); } [Test] public void AddInstanceFromContainer() { var one = new SomethingOne(); var container = new Container(); container.Inject<ISomething>(one); Assert.AreSame(one, container.GetInstance<ISomething>()); } [Test] public void AddInstanceToInstanceManagerWhenTheInstanceFactoryDoesNotExist() { IContainer container = new Container(new PluginGraph()); container.Configure(r => { r.For<IService>().AddInstances(x => { x.Object(_red).Named("Red"); x.Object(_blue).Named("Blue"); }); }); Assert.AreSame(_red, container.GetInstance(typeof (IService), "Red")); Assert.AreSame(_blue, container.GetInstance(typeof (IService), "Blue")); } [Test] public void AddNamedInstanceByType() { var container = new Container(r => { r.For<ISomething>().AddInstances(x => { x.Type<SomethingOne>().Named("One"); x.Type<SomethingTwo>().Named("Two"); }); }); container.GetInstance<ISomething>("One").ShouldBeOfType<SomethingOne>(); container.GetInstance<ISomething>("Two").ShouldBeOfType<SomethingTwo>(); } [Test] public void AddNamedInstanceToobjectFactory() { var one = new SomethingOne(); var two = new SomethingOne(); var container = new Container(r => { r.For<ISomething>().AddInstances(x => { x.Object(one).Named("One"); x.Object(two).Named("Two"); }); }); Assert.AreSame(one, container.GetInstance<ISomething>("One")); Assert.AreSame(two, container.GetInstance<ISomething>("Two")); } [Test] public void AddPluginForTypeWhenThePluginDoesNotAlreadyExistsDoesNothing() { var pluginGraph = new PluginGraph(); IContainer container = new Container(pluginGraph); container.Configure( r => { r.For<ISomething>().Use<SomethingOne>(); }); container.GetAllInstances<ISomething>() .Single() .ShouldBeOfType<SomethingOne>(); } [Test] public void AddTypeThroughContainer() { var container = new Container(x => { x.For<ISomething>().Use<SomethingOne>(); }); container.GetInstance<ISomething>().ShouldBeOfType<SomethingOne>(); } [Test] public void Add_an_assembly_in_the_Configure() { var container = new Container(); container.Configure(registry => { registry.Scan(x => { x.TheCallingAssembly(); x.AddAllTypesOf<IThingy>(); }); }); container.GetInstance<IThingy>().ShouldBeOfType<TheThingy>(); } [Test] public void Add_an_assembly_on_the_fly_and_pick_up_plugins() { var container = new Container(); container.Configure( registry => registry.Scan(x => { x.TheCallingAssembly(); x.AddAllTypesOf<IWidget>(); })); container.GetAllInstances<IWidget>().OfType<TheWidget>().Any().ShouldBeTrue(); } [Test] public void Add_an_assembly_on_the_fly_and_pick_up_plugins3() { var container = new Container(); container.Configure( registry => { registry.Scan(x => { x.TheCallingAssembly(); x.AddAllTypesOf(typeof (IWidget)); }); } ); container.GetAllInstances<IWidget>() .OfType<TheWidget>() .Any().ShouldBeTrue(); } [Test] public void Add_an_assembly_on_the_fly_and_pick_up_plugins4() { var container = new Container(); container.Configure( registry => registry.Scan( x => { x.AssemblyContainingType(typeof (IOtherService<>)); x.AddAllTypesOf(typeof (IOtherService<>)); })); var instances = container.GetAllInstances<IOtherService<string>>(); instances.Any(s => s is Service4).ShouldBeTrue(); } [Test] public void Add_generic_stuff_in_configure() { var container = new Container(); container.Configure(registry => { registry.For(typeof (IService<>)).Add(typeof (Service1<>)); registry.For(typeof (IService<>)).Add(typeof (Service2<>)); }); container.GetAllInstances<IService<string>>().Count().ShouldEqual(2); } [Test] public void InjectType() { var container = new Container( r => r.For<ISomething>().Use<SomethingOne>()); container.GetAllInstances<ISomething>() .Single() .ShouldBeOfType<SomethingOne>(); } [Test] public void JustAddATypeWithNoNameAndDefault() { var container = new Container(x => { x.For<ISomething>().Use<SomethingOne>(); }); container.GetInstance<ISomething>().ShouldBeOfType<SomethingOne>(); } [Test] public void OverwriteInstanceFromObjectFactory() { var one = new SomethingOne(); var two = new SomethingOne(); var container = new Container(); container.Inject<ISomething>(one); container.Inject<ISomething>(two); Assert.AreSame(two, container.GetInstance<ISomething>()); } } public class SomethingOne : ISomething { public void Go() { throw new NotImplementedException(); } } public class SomethingTwo : ISomething { public void Go() { throw new NotImplementedException(); } } }
/* * MindTouch Dream - a distributed REST framework * Copyright (C) 2006-2011 MindTouch, Inc. * www.mindtouch.com oss@mindtouch.com * * For community documentation and downloads visit wiki.developer.mindtouch.com; * please review the licensing section. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections.Generic; using System.IO; using System.Text; using log4net; using log4net.Config; using MindTouch.Dream.Test; using MindTouch.Tasking; using MindTouch.Xml; using NUnit.Framework; namespace MindTouch.Dream.Storage.Test { using Yield = IEnumerator<IYield>; [TestFixture] public class StorageTest { //--- Constants --- public const string TEST_CONTENTS = "Sample content"; public const string TEST_FILE_URI = "testfile"; public const string TEST_SHARED_PATH = "shared"; private const string TEST_PATH = "public-storage-proxy"; private const string CROSS_TEST_PATH = "public-storage-crossover"; //--- Class Fields --- private static readonly string[] _fileUri = new string[] { "testfolder", "testfile" }; private static readonly log4net.ILog _log = LogUtils.CreateLog(); //--- Fields --- private Plug _storage; private DreamHostInfo _hostInfo; private string _folder; private string _storageFolder; private Plug _testService; public static XUri _crossServiceUri; private Plug _testCrossService; [TestFixtureSetUp] public void Init() { BasicConfigurator.Configure(); _folder = Path.GetTempPath(); Directory.CreateDirectory(_folder); _storageFolder = Path.Combine(Path.GetTempPath(), StringUtil.CreateAlphaNumericKey(6)); Directory.CreateDirectory(_storageFolder); XDoc config = new XDoc("config").Elem("service-dir", _folder); _hostInfo = DreamTestHelper.CreateRandomPortHost(config); CreateStorageService(); CreateStorageServiceProxies(); } [TestFixtureTearDown] public void GlobalCleanup() { LogManager.Shutdown(); _hostInfo.Dispose(); } [Test] public void Can_init() { } [Test] public void TestSendFile() { string filename = Path.GetTempFileName(); using(Stream s = File.OpenWrite(filename)) { byte[] data = Encoding.UTF8.GetBytes(TEST_CONTENTS); s.Write(data, 0, data.Length); } // add a file _storage.At(_fileUri).Put(DreamMessage.FromFile(filename, false)); File.Delete(filename); // get file and compare contents string contents = _storage.At(_fileUri).Get().ToText(); Assert.AreEqual(TEST_CONTENTS, contents); // delete file _storage.At(_fileUri).Delete(); } [Test] public void Can_store_and_retrieve_xml() { string file = "foo.xml"; // add a file to it _storage.At(file).Put(new XDoc("foo").Elem("bar","baz")); // get file and compare contents XDoc doc = _storage.At(file).Get().ToDocument(); Assert.AreEqual("baz", doc["bar"].AsText); // delete file _storage.At(file).Delete(); } [Test] public void Can_store_files_at_service_root_level() { string file = "foo.txt"; // add a file to it _storage.At(file).Put(DreamMessage.Ok(MimeType.TEXT, TEST_CONTENTS)); // get file and compare contents string contents = _storage.At(file).Get().ToText(); Assert.AreEqual(TEST_CONTENTS, contents); // delete file _storage.At(file).Delete(); } [Test] public void Can_store_files_at_any_depth() { string file = "foo.txt"; // add a file to it _storage.At("foo", file).Put(DreamMessage.Ok(MimeType.TEXT, TEST_CONTENTS)); // get file and compare contents string contents = _storage.At("foo", file).Get().ToText(); Assert.AreEqual(TEST_CONTENTS, contents); // delete file _storage.At("foo", file).Delete(); // add a file to it _storage.At("foo", "bar", file).Put(DreamMessage.Ok(MimeType.TEXT, TEST_CONTENTS)); // get file and compare contents contents = _storage.At("foo", "bar", file).Get().ToText(); Assert.AreEqual(TEST_CONTENTS, contents); // delete file _storage.At("foo", "bar", file).Delete(); // add a file to it _storage.At("foo", "bar", "baz", file).Put(DreamMessage.Ok(MimeType.TEXT, TEST_CONTENTS)); // get file and compare contents contents = _storage.At("foo", "bar", "baz", file).Get().ToText(); Assert.AreEqual(TEST_CONTENTS, contents); // delete file _storage.At("foo", "bar", "baz", file).Delete(); } [Test] public void Delete_of_subdir_wipes_all_children() { string file = "foo.txt"; // create some files _storage.At("foo", file).Put(DreamMessage.Ok(MimeType.TEXT, TEST_CONTENTS)); _storage.At("foo", "bar", file).Put(DreamMessage.Ok(MimeType.TEXT, TEST_CONTENTS)); _storage.At("foo", "bar", "baz", file).Put(DreamMessage.Ok(MimeType.TEXT, TEST_CONTENTS)); XDoc files = _storage.At("foo", "bar").Get().ToDocument(); Assert.AreEqual("baz", files["folder/name"].Contents); Assert.AreEqual("foo.txt", files["file/name"].Contents); files = _storage.At("foo").Get().ToDocument(); Assert.AreEqual("bar", files["folder/name"].Contents); Assert.AreEqual("foo.txt", files["file/name"].Contents); _storage.At("foo", "bar").Delete(); DreamMessage response = _storage.At("foo", "bar").GetAsync().Wait(); Assert.IsFalse(response.IsSuccessful); Assert.AreEqual(DreamStatus.NotFound, response.Status); files = _storage.At("foo").Get().ToDocument(); Assert.IsTrue(files["folder"].IsEmpty); Assert.AreEqual("foo.txt", files["file/name"].Contents); _storage.At("foo").Delete(); } [Test] public void Delete_of_random_path_is_ok() { DreamMessage response = _storage.At("foo", "bar", "baz").DeleteAsync().Wait(); Assert.IsTrue(response.IsSuccessful); Assert.AreEqual(DreamStatus.Ok, response.Status); } [Test] public void Head_on_folder_is_ok() { DreamMessage response = _storage.HeadAsync().Wait(); Assert.IsTrue(response.IsSuccessful); Assert.AreEqual(DreamStatus.Ok, response.Status); } [Ignore("this needs to be revised")] [Test] [ExpectedException(typeof(DreamResponseException))] public void TestSendFolder_Fail() { string foldername = Path.GetTempPath(); DreamMessage folderMsg = DreamMessage.FromFile(foldername, false); // add a file _storage.At(_fileUri).Put(folderMsg); // delete file (should never happen) _storage.At(_fileUri).Delete(); } [Test] public void TestPutGetDelete() { // add a file to it _storage.At(_fileUri).Put(DreamMessage.Ok(MimeType.TEXT, TEST_CONTENTS)); // get file and compare contents string contents = _storage.At(_fileUri).Get().ToText(); Assert.AreEqual(TEST_CONTENTS, contents); // delete file _storage.At(_fileUri).Delete(); } [Test] public void TestPutHeadDelete() { // add a file to it _storage.At(_fileUri).Put(DreamMessage.Ok(MimeType.TEXT, TEST_CONTENTS)); // get file and compare contents DreamMessage response = _storage.At(_fileUri).Invoke(Verb.HEAD, DreamMessage.Ok()); Assert.AreEqual(TEST_CONTENTS.Length, response.ContentLength); // delete file _storage.At(_fileUri).Delete(); } [Test] public void TestPutGetTTL() { // add a file _storage.At(_fileUri).With("ttl", "2").Put(DreamMessage.Ok(MimeType.TEXT, TEST_CONTENTS)); _log.DebugFormat("File stored at: {0}", DateTime.UtcNow); // get file and compare contents string contents = _storage.At(_fileUri).Get().ToText(); Assert.AreEqual(TEST_CONTENTS, contents); _log.DebugFormat("check file at: {0}", DateTime.UtcNow); System.Threading.Thread.Sleep(TimeSpan.FromSeconds(4)); // get file and compare contents _log.DebugFormat("Checking for expired file at: {0}", DateTime.UtcNow); DreamMessage response = _storage.At(_fileUri).GetAsync().Wait(); Assert.AreEqual(DreamStatus.NotFound, response.Status); } [Ignore("issues with service deletion")] [Test] public void TestPutRestartGetTTL() { // add a file to it _storage.At(_fileUri).With("ttl", "5").Put(DreamMessage.Ok(MimeType.TEXT, TEST_CONTENTS)); // get file and compare contents string contents = _storage.At(_fileUri).Get().ToText(); Assert.AreEqual(TEST_CONTENTS, contents); // destroy storage service DestroyStorageService(); System.Threading.Thread.Sleep(TimeSpan.FromSeconds(6)); // re-create storage service CreateStorageService(); // get file and compare contents contents = _storage.At(_fileUri).Get().ToText(); Assert.AreEqual(TEST_CONTENTS, contents); System.Threading.Thread.Sleep(TimeSpan.FromSeconds(10)); // get file and compare contents DreamMessage response = _storage.At(_fileUri).GetAsync().Wait(); Assert.AreEqual(DreamStatus.NotFound, response.Status); } [Test] public void Access_to_host_shared_private_service_should_be_forbidden() { DreamMessage response = _hostInfo.LocalHost.At("host", "$store").GetAsync().Wait(); Assert.IsFalse(response.IsSuccessful, response.ToText()); } [Test] public void Service_can_store_and_retrieve_file() { DreamMessage response = _testService.AtPath("create-retrieve-delete").PostAsync().Wait(); Assert.IsTrue(response.IsSuccessful, response.ToText()); } [Test] public void Service_can_store_and_retrieve_head() { DreamMessage response = _testService.AtPath("create-retrievehead-delete").PostAsync().Wait(); Assert.IsTrue(response.IsSuccessful, response.ToText()); } [Test] public void Service_storage_will_expire_file() { DreamMessage response = _testService.AtPath("create-expire").PostAsync().Wait(); Assert.IsTrue(response.IsSuccessful, response.ToText()); } [Test] public void Service_can_store_and_retrieve_file_from_another_services_shared_private_storage() { DreamMessage response = _testCrossService.AtPath("create-retrieve-delete").PostAsync().Wait(); Assert.IsTrue(response.IsSuccessful, response.ToText()); } [Test] public void Services_can_manipulate_shared_private_storage() { DreamMessage response = _testService.AtPath("shared-create").PostAsync().Wait(); Assert.IsTrue(response.IsSuccessful, response.ToText()); response = _testCrossService.AtPath("shared-retrieve-delete").PostAsync().Wait(); Assert.IsTrue(response.IsSuccessful, response.ToText()); } [TearDown] public void DeinitTest() { System.GC.Collect(); } private void CreateStorageService() { // create storage service XDoc config = new XDoc("config"); config.Elem("path", "storage"); config.Elem("sid", "sid://mindtouch.com/2007/03/dream/storage"); config.Elem("folder", _storageFolder); //DreamMessage result = _host.Self.At("services").PostAsync(config).Wait(); DreamMessage result = _hostInfo.LocalHost.At("host", "services").With("apikey", _hostInfo.ApiKey).PostAsync(config).Wait(); Assert.IsTrue(result.IsSuccessful, result.ToText()); // initialize storage plug _storage = _hostInfo.LocalHost.At("storage"); } private void DestroyStorageService() { DreamMessage response = _storage.DeleteAsync().Wait(); Assert.IsTrue(response.IsSuccessful, response.ToText()); } private void CreateStorageServiceProxies() { _hostInfo.Host.Self.At("load").With("name", "test.mindtouch.storage").Post(DreamMessage.Ok()); _hostInfo.Host.Self.At("services").Post( new XDoc("config") .Elem("class", typeof(TestServiceWithPublicStorage).FullName) .Elem("path", TEST_PATH)); _testService = Plug.New(_hostInfo.Host.LocalMachineUri).At(TEST_PATH); _hostInfo.Host.Self.At("services").Post( new XDoc("config") .Elem("class", typeof(TestCrossServiceStorageAccessor).FullName) .Elem("path", CROSS_TEST_PATH)); _testCrossService = Plug.New(_hostInfo.Host.LocalMachineUri).At(CROSS_TEST_PATH); } [DreamService("TestServiceWithPublicStorage", "Copyright (c) 2008 MindTouch, Inc.", Info = "", SID = new string[] { "sid://mindtouch.com/TestServiceWithPublicStorage" } )] public class TestServiceWithPublicStorage : DreamService { //--- Class Fields --- private static readonly ILog _log = LogUtils.CreateLog(); [DreamFeature("POST:create-retrieve-delete", "Create and retrieve test")] public Yield TestCreateRetrieveDelete(DreamContext context, DreamMessage request, Result<DreamMessage> response) { string filename = Path.GetTempFileName(); using(Stream s = File.OpenWrite(filename)) { byte[] data = Encoding.UTF8.GetBytes(TEST_CONTENTS); s.Write(data, 0, data.Length); } _log.Debug("created file"); // add a file _log.DebugFormat("storage path: {0}", Storage.Uri); Storage.AtPath(TEST_FILE_URI).Put(DreamMessage.FromFile(filename, false)); File.Delete(filename); _log.Debug("put file"); // get file and compare contents string contents = Storage.AtPath(TEST_FILE_URI).Get().ToText(); Assert.AreEqual(TEST_CONTENTS, contents); _log.Debug("got file"); // delete file Storage.AtPath(TEST_FILE_URI).Delete(); _log.Debug("deleted file"); response.Return(DreamMessage.Ok()); yield break; } [DreamFeature("POST:create-retrievehead-delete", "Create and retrieve head test")] public Yield TestCreateRetrieveHeadDelete(DreamContext context, DreamMessage request, Result<DreamMessage> response) { string filename = Path.GetTempFileName(); using(Stream s = File.OpenWrite(filename)) { byte[] data = Encoding.UTF8.GetBytes(TEST_CONTENTS); s.Write(data, 0, data.Length); } _log.Debug("created file"); // add a file Storage.AtPath(TEST_FILE_URI).Put(DreamMessage.FromFile(filename, false)); File.Delete(filename); _log.Debug("put file"); // get file and compare contents DreamMessage headResponse = Storage.AtPath(TEST_FILE_URI).Invoke(Verb.HEAD, DreamMessage.Ok()); Assert.AreEqual(TEST_CONTENTS.Length, headResponse.ContentLength); _log.Debug("got content length"); // delete file Storage.AtPath(TEST_FILE_URI).Delete(); _log.Debug("deleted file"); response.Return(DreamMessage.Ok()); yield break; } [DreamFeature("POST:create-expire", "Create and expire test")] public Yield TestCreateTtlExpire(DreamContext context, DreamMessage request, Result<DreamMessage> response) { // add a file Storage.AtPath(TEST_FILE_URI).With("ttl", "2").Put(DreamMessage.Ok(MimeType.TEXT, TEST_CONTENTS)); _log.DebugFormat("File stored at: {0}", DateTime.UtcNow); // get file and compare contents string contents = Storage.AtPath(TEST_FILE_URI).Get().ToText(); Assert.AreEqual(TEST_CONTENTS, contents); _log.DebugFormat("check file at: {0}", DateTime.UtcNow); System.Threading.Thread.Sleep(TimeSpan.FromSeconds(4)); // get file and compare contents _log.DebugFormat("Checking for expired file at: {0}", DateTime.UtcNow); DreamMessage getResponse = Storage.AtPath(TEST_FILE_URI).GetAsync().Wait(); Assert.AreEqual(DreamStatus.NotFound, getResponse.Status); response.Return(DreamMessage.Ok()); yield break; } [DreamFeature("POST:shared-create", "Create and retrieve test")] public Yield TestCreateForCrossService(DreamContext context, DreamMessage request, Result<DreamMessage> response) { string filename = Path.GetTempFileName(); using(Stream s = File.OpenWrite(filename)) { byte[] data = Encoding.UTF8.GetBytes(TEST_CONTENTS); s.Write(data, 0, data.Length); } _log.Debug("created file"); // derive shared storage path Plug sharedStorage = Plug.New(Storage.Uri.WithoutLastSegment().At(TEST_SHARED_PATH)); _log.DebugFormat("shared storage: {0}", sharedStorage.Uri); // add a file sharedStorage.AtPath(TEST_FILE_URI).Put(DreamMessage.FromFile(filename, false)); File.Delete(filename); _log.Debug("put file"); // get file and compare contents string contents = sharedStorage.AtPath(TEST_FILE_URI).Get().ToText(); Assert.AreEqual(TEST_CONTENTS, contents); _log.Debug("got file"); response.Return(DreamMessage.Ok()); yield break; } protected override Yield Start(XDoc config, Result result) { yield return Coroutine.Invoke(base.Start, config, new Result()); _crossServiceUri = Storage.Uri; result.Return(); } } [DreamService("TestCrossServiceStorageAccessor", "Copyright (c) 2008 MindTouch, Inc.", Info = "", SID = new string[] { "sid://mindtouch.com/TestCrossServiceStorageAccessor" } )] public class TestCrossServiceStorageAccessor : DreamService { //--- Class Fields --- private static readonly ILog _log = LogUtils.CreateLog(); [DreamFeature("POST:shared-retrieve-delete", "Create and retrieve test")] public Yield TestSharedRetrieveDelete(DreamContext context, DreamMessage request, Result<DreamMessage> response) { string filename = Path.GetTempFileName(); using(Stream s = File.OpenWrite(filename)) { byte[] data = Encoding.UTF8.GetBytes(TEST_CONTENTS); s.Write(data, 0, data.Length); } _log.Debug("created file"); // derive shared storage path Plug sharedStorage = Plug.New(Storage.Uri.WithoutLastSegment().At(TEST_SHARED_PATH)); _log.DebugFormat("shared storage: {0}", sharedStorage.Uri); // get file and compare contents string contents = sharedStorage.AtPath(TEST_FILE_URI).Get().ToText(); Assert.AreEqual(TEST_CONTENTS, contents); _log.Debug("got file"); // delete file sharedStorage.AtPath(TEST_FILE_URI).Delete(); _log.Debug("deleted file"); response.Return(DreamMessage.Ok()); yield break; } [DreamFeature("POST:create-retrieve-delete", "Create and retrieve test")] public Yield TestCreateRetrieveDelete(DreamContext context, DreamMessage request, Result<DreamMessage> response) { string filename = Path.GetTempFileName(); using(Stream s = File.OpenWrite(filename)) { byte[] data = Encoding.UTF8.GetBytes(TEST_CONTENTS); s.Write(data, 0, data.Length); } _log.Debug("created file"); // add a file Plug cross = Plug.New(_crossServiceUri); _log.DebugFormat("cross service path storage path: {0}", cross.Uri); cross.AtPath(TEST_FILE_URI).Put(DreamMessage.FromFile(filename, false)); File.Delete(filename); _log.Debug("put file"); // get file and compare contents string contents = cross.AtPath(TEST_FILE_URI).Get().ToText(); Assert.AreEqual(TEST_CONTENTS, contents); _log.Debug("got file"); // delete file cross.AtPath(TEST_FILE_URI).Delete(); _log.Debug("deleted file"); response.Return(DreamMessage.Ok()); yield break; } } } }
// // ButtonBackend.cs // // Author: // Lluis Sanchez <lluis@xamarin.com> // // Copyright (c) 2011 Xamarin Inc // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. using System; using Xwt.Backends; using Xwt.Drawing; using Xwt.CairoBackend; namespace Xwt.GtkBackend { public partial class ButtonBackend: WidgetBackend, IButtonBackend { protected bool ignoreClickEvents; ImageDescription image; Pango.FontDescription customFont; Gtk.Label labelWidget; public ButtonBackend () { } public override void Initialize () { NeedsEventBox = false; Widget = new Gtk.Button (); Widget.Realized += (o, arg) => { if (Widget.IsRealized && Widget.CanDefault) Widget.GrabDefault(); }; base.Widget.Show (); } protected new Gtk.Button Widget { get { return (Gtk.Button)base.Widget; } set { base.Widget = value; } } protected new IButtonEventSink EventSink { get { return (IButtonEventSink)base.EventSink; } } protected override void OnSetBackgroundColor (Color color) { Widget.SetBackgroundColor (color); Widget.SetBackgroundColor (Gtk.StateType.Prelight, color); } Color? customLabelColor; public virtual Color LabelColor { get { return customLabelColor.HasValue ? customLabelColor.Value : Widget.Style.Foreground (Gtk.StateType.Normal).ToXwtValue (); } set { customLabelColor = value; Widget.SetForegroundColor (value); Widget.SetForegroundColor (Gtk.StateType.Prelight, value); if (labelWidget != null) { labelWidget.SetForegroundColor (value); labelWidget.SetForegroundColor (Gtk.StateType.Prelight, value); } } } public virtual bool IsDefault { get { return Widget.CanDefault; } set { Widget.CanDefault = value; } } public override object Font { get { return base.Font; } set { base.Font = value; customFont = value as Pango.FontDescription; SetButtonType (ButtonType.Normal); } } public void SetContent (string label, bool useMnemonic, ImageDescription image, ContentPosition position) { Widget.UseUnderline = useMnemonic; this.image = image; if (label != null && label.Length == 0) label = null; Button b = (Button) Frontend; if (label != null && image.Backend == null && b.Type == ButtonType.Normal) { Widget.Label = label; return; } if (b.Type == ButtonType.Disclosure) { Widget.Label = null; Widget.Image = new Gtk.Arrow (Gtk.ArrowType.Down, Gtk.ShadowType.Out); Widget.Image.ShowAll (); return; } Gtk.Widget contentWidget = null; Gtk.Widget imageWidget = null; if (image.Backend != null) imageWidget = new ImageBox (ApplicationContext, image.WithDefaultSize (Gtk.IconSize.Button)); labelWidget = null; if (label != null && imageWidget == null) { contentWidget = labelWidget = new Gtk.Label (label); } else if (label == null && imageWidget != null) { contentWidget = imageWidget; } else if (label != null && imageWidget != null) { Gtk.Box box = position == ContentPosition.Left || position == ContentPosition.Right ? (Gtk.Box) new Gtk.HBox (false, 3) : (Gtk.Box) new Gtk.VBox (false, 3); labelWidget = new Gtk.Label (label) { UseUnderline = useMnemonic }; if (position == ContentPosition.Left || position == ContentPosition.Top) { box.PackStart (imageWidget, false, false, 0); box.PackStart (labelWidget, false, false, 0); } else { box.PackStart (labelWidget, false, false, 0); box.PackStart (imageWidget, false, false, 0); } contentWidget = box; } var expandButtonContent = false; if (b.Type == ButtonType.DropDown) { if (contentWidget != null) { Gtk.HBox box = new Gtk.HBox (false, 3); box.PackStart (contentWidget, true, true, 3); box.PackStart (new Gtk.Arrow (Gtk.ArrowType.Down, Gtk.ShadowType.Out), false, false, 0); contentWidget = box; expandButtonContent = true; } else contentWidget = new Gtk.Arrow (Gtk.ArrowType.Down, Gtk.ShadowType.Out); } if (contentWidget != null) { contentWidget.ShowAll (); Widget.Label = null; Widget.Image = contentWidget; var alignment = Widget.Child as Gtk.Alignment; if (alignment != null) { if (expandButtonContent) { var box = alignment.Child as Gtk.Box; if (box != null) { alignment.Xscale = 1; box.SetChildPacking (box.Children [0], true, true, 0, Gtk.PackType.Start); if (labelWidget != null) labelWidget.Xalign = 0; } } else if (position == ContentPosition.Left && (contentWidget is Gtk.Box)) { // in case the button is wider than its natural size and has text and an image on the left, // optimize its alignment to make the text more centered. // FIXME: more sophisticated size calculation alignment.Xalign = 0.475f; } } if (labelWidget != null) { labelWidget.UseUnderline = useMnemonic; if (customFont != null) labelWidget.ModifyFont (customFont); if (customLabelColor.HasValue) { labelWidget.SetForegroundColor (customLabelColor.Value); labelWidget.SetForegroundColor (Gtk.StateType.Prelight, customLabelColor.Value); } } } else Widget.Label = null; } public void SetButtonStyle (ButtonStyle style) { switch (style) { case ButtonStyle.Normal: SetMiniMode (false); Widget.Relief = Gtk.ReliefStyle.Normal; break; case ButtonStyle.Flat: SetMiniMode (false); Widget.Relief = Gtk.ReliefStyle.None; break; case ButtonStyle.AlwaysBorderless: case ButtonStyle.Borderless: SetMiniMode (true); Widget.Relief = Gtk.ReliefStyle.None; break; } } public void SetButtonType (ButtonType type) { Button b = (Button) Frontend; SetContent (b.Label, b.UseMnemonic, image, b.ImagePosition); } public override void EnableEvent (object eventId) { base.EnableEvent (eventId); if (eventId is ButtonEvent) { switch ((ButtonEvent)eventId) { case ButtonEvent.Clicked: Widget.Clicked += HandleWidgetClicked; break; } } } public override void DisableEvent (object eventId) { base.DisableEvent (eventId); if (eventId is ButtonEvent) { switch ((ButtonEvent)eventId) { case ButtonEvent.Clicked: Widget.Clicked -= HandleWidgetClicked; break; } } } void HandleWidgetClicked (object sender, EventArgs e) { if (!ignoreClickEvents) { ApplicationContext.InvokeUserCode (EventSink.OnClicked); } } bool miniMode; protected void SetMiniMode (bool miniMode) { // Gtk.Rc.ParseString ("style \"Xwt.GtkBackend.CustomButton\" {\n GtkButton::inner-border = {0,0,0,0} GtkButton::child-displacement-x = {0} GtkButton::child-displacement-y = {0}\n }\n"); // Gtk.Rc.ParseString ("widget \"*.Xwt.GtkBackend.CustomButton\" style \"Xwt.GtkBackend.CustomButton\"\n"); // Name = "Xwt.GtkBackend.CustomButton"; if (this.miniMode == miniMode) return; this.miniMode = miniMode; if (miniMode) { Widget.SizeAllocated += HandleSizeAllocated; } SetMiniModeGtk(miniMode); Widget.QueueResize (); } [GLib.ConnectBefore] void HandleSizeAllocated (object o, Gtk.SizeAllocatedArgs args) { Widget.Child.SizeAllocate (args.Allocation); args.RetVal = true; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Concurrent; using System.Collections.Generic; using Xunit; namespace System.Threading.Tasks.Tests { public static class ParallelLoopResultTests { [Fact] public static void ForPLRTests() { ParallelLoopResult plr = Parallel.For(1, 0, delegate (int i, ParallelLoopState ps) { if (i == 10) ps.Stop(); }); PLRcheck(plr, "For-Empty", true, null); plr = Parallel.For(0, 100, delegate (int i, ParallelLoopState ps) { //Thread.Sleep(20); if (i == 10) ps.Stop(); }); PLRcheck(plr, "For-Stop", false, null); plr = Parallel.For(0, 100, delegate (int i, ParallelLoopState ps) { //Thread.Sleep(20); if (i == 10) ps.Break(); }); PLRcheck(plr, "For-Break", false, 10); plr = Parallel.For(0, 100, delegate (int i, ParallelLoopState ps) { //Thread.Sleep(20); }); PLRcheck(plr, "For-Completion", true, null); } [Fact] public static void ForPLR64Tests() { ParallelLoopResult plr = Parallel.For(1L, 0L, delegate (long i, ParallelLoopState ps) { if (i == 10) ps.Stop(); }); PLRcheck(plr, "For64-Empty", true, null); plr = Parallel.For(0L, 100L, delegate (long i, ParallelLoopState ps) { //Thread.Sleep(20); if (i == 10) ps.Stop(); }); PLRcheck(plr, "For64-Stop", false, null); plr = Parallel.For(0L, 100L, delegate (long i, ParallelLoopState ps) { //Thread.Sleep(20); if (i == 10) ps.Break(); }); PLRcheck(plr, "For64-Break", false, 10); plr = Parallel.For(0L, 100L, delegate (long i, ParallelLoopState ps) { //Thread.Sleep(20); }); PLRcheck(plr, "For64-Completion", true, null); } [Fact] public static void ForEachPLRTests() { Dictionary<string, string> dict = new Dictionary<string, string>(); ParallelLoopResult plr = Parallel.ForEach(dict, delegate (KeyValuePair<string, string> kvp, ParallelLoopState ps) { if (kvp.Value.Equals("Purple")) ps.Stop(); }); PLRcheck(plr, "ForEach-Empty", true, null); dict.Add("Apple", "Red"); dict.Add("Banana", "Yellow"); dict.Add("Pear", "Green"); dict.Add("Plum", "Red"); dict.Add("Grape", "Green"); dict.Add("Cherry", "Red"); dict.Add("Carrot", "Orange"); dict.Add("Eggplant", "Purple"); plr = Parallel.ForEach(dict, delegate (KeyValuePair<string, string> kvp, ParallelLoopState ps) { if (kvp.Value.Equals("Purple")) ps.Stop(); }); PLRcheck(plr, "ForEach-Stop", false, null); plr = Parallel.ForEach(dict, delegate (KeyValuePair<string, string> kvp, ParallelLoopState ps) { if (kvp.Value.Equals("Purple")) ps.Break(); }); PLRcheck(plr, "ForEach-Break", false, 7); // right?? plr = Parallel.ForEach(dict, delegate (KeyValuePair<string, string> kvp, ParallelLoopState ps) { //if(kvp.Value.Equals("Purple")) ps.Stop(); }); PLRcheck(plr, "ForEach-Complete", true, null); } [Fact] public static void PartitionerForEachPLRTests() { // // Now try testing Partitionable, OrderablePartitionable // List<int> intlist = new List<int>(); for (int i = 0; i < 20; i++) intlist.Add(i * i); MyPartitioner<int> mp = new MyPartitioner<int>(intlist); ParallelLoopResult plr = Parallel.ForEach(mp, delegate (int item, ParallelLoopState ps) { if (item == 0) ps.Stop(); }); PLRcheck(plr, "Partitioner-ForEach-Stop", false, null); plr = Parallel.ForEach(mp, delegate (int item, ParallelLoopState ps) { }); PLRcheck(plr, "Partitioner-ForEach-Complete", true, null); } [Fact] public static void OrderablePartitionerForEachTests() { List<int> intlist = new List<int>(); for (int i = 0; i < 20; i++) intlist.Add(i * i); OrderablePartitioner<int> mop = Partitioner.Create(intlist, true); ParallelLoopResult plr = Parallel.ForEach(mop, delegate (int item, ParallelLoopState ps, long index) { if (index == 2) ps.Stop(); }); PLRcheck(plr, "OrderablePartitioner-ForEach-Stop", false, null); plr = Parallel.ForEach(mop, delegate (int item, ParallelLoopState ps, long index) { if (index == 2) ps.Break(); }); PLRcheck(plr, "OrderablePartitioner-ForEach-Break", false, 2); plr = Parallel.ForEach(mop, delegate (int item, ParallelLoopState ps, long index) { }); PLRcheck(plr, "OrderablePartitioner-ForEach-Complete", true, null); } private static void PLRcheck(ParallelLoopResult plr, string ttype, bool shouldComplete, Int32? expectedLBI) { Assert.Equal(shouldComplete, plr.IsCompleted); Assert.Equal(expectedLBI, plr.LowestBreakIteration); } // Generalized test for testing For-loop results private static void ForPLRTest( Action<int, ParallelLoopState> body, string desc, bool excExpected, bool shouldComplete, bool shouldStop, bool shouldBreak) { ForPLRTest(body, new ParallelOptions(), desc, excExpected, shouldComplete, shouldStop, shouldBreak, false); } private static void ForPLRTest( Action<int, ParallelLoopState> body, ParallelOptions parallelOptions, string desc, bool excExpected, bool shouldComplete, bool shouldStop, bool shouldBreak, bool shouldCancel) { try { ParallelLoopResult plr = Parallel.For(0, 1, parallelOptions, body); if (excExpected || shouldCancel) { Logger.LogInformation("For-PLRTest -- {0} > failed. Expected an exception.", desc); } else if ((plr.IsCompleted != shouldComplete) || (shouldStop && (plr.LowestBreakIteration != null)) || (shouldBreak && (plr.LowestBreakIteration == null))) { string LBIval = "null"; if (plr.LowestBreakIteration != null) LBIval = plr.LowestBreakIteration.Value.ToString(); Logger.LogInformation("For-PLRTest -- {0} > failed. Complete={1}, LBI={2}", desc, plr.IsCompleted, LBIval); } } catch (OperationCanceledException oce) { if (!shouldCancel) { Logger.LogInformation("For-PLRTest -- {0}: > FAILED -- got unexpected OCE: {1}.", desc, oce.Message); } } catch (AggregateException e) { if (!excExpected) { Logger.LogInformation("For-PLRTest -- {0}: > failed -- unexpected exception from loop. Error: {1}", desc, e.ToString()); } } } // ... and a 64-bit version private static void For64PLRTest( Action<long, ParallelLoopState> body, string desc, bool excExpected, bool shouldComplete, bool shouldStop, bool shouldBreak) { For64PLRTest(body, new ParallelOptions(), desc, excExpected, shouldComplete, shouldStop, shouldBreak, false); } private static void For64PLRTest( Action<long, ParallelLoopState> body, ParallelOptions parallelOptions, string desc, bool excExpected, bool shouldComplete, bool shouldStop, bool shouldBreak, bool shouldCancel) { try { ParallelLoopResult plr = Parallel.For(0L, 1L, parallelOptions, body); if (excExpected || shouldCancel) { Logger.LogInformation("For64-PLRTest -- {0} > failed. Expected an exception.", desc); } else if ((plr.IsCompleted != shouldComplete) || (shouldStop && (plr.LowestBreakIteration != null)) || (shouldBreak && (plr.LowestBreakIteration == null))) { string LBIval = "null"; if (plr.LowestBreakIteration != null) LBIval = plr.LowestBreakIteration.Value.ToString(); Logger.LogInformation("For64-PLRTest -- {0} > failed. Complete={1}, LBI={2}", desc, plr.IsCompleted, LBIval); } } catch (OperationCanceledException) { if (!shouldCancel) { Logger.LogInformation("For64-PLRTest -- {0} > FAILED -- got unexpected OCE.", desc); } } catch (AggregateException e) { if (!excExpected) { Logger.LogInformation("For64-PLRTest -- {0}: > failed -- unexpected exception from loop. Error: {1} ", desc, e.ToString()); } } } // Generalized test for testing ForEach-loop results private static void ForEachPLRTest( Action<KeyValuePair<int, string>, ParallelLoopState> body, string desc, bool excExpected, bool shouldComplete, bool shouldStop, bool shouldBreak) { ForEachPLRTest(body, new ParallelOptions(), desc, excExpected, shouldComplete, shouldStop, shouldBreak, false); } private static void ForEachPLRTest( Action<KeyValuePair<int, string>, ParallelLoopState> body, ParallelOptions parallelOptions, string desc, bool excExpected, bool shouldComplete, bool shouldStop, bool shouldBreak, bool shouldCancel) { Dictionary<int, string> dict = new Dictionary<int, string>(); dict.Add(1, "one"); try { ParallelLoopResult plr = Parallel.ForEach(dict, parallelOptions, body); if (excExpected || shouldCancel) { Logger.LogInformation("ForEach-PLRTest -- {0} > failed. Expected an exception.", desc); } else if ((plr.IsCompleted != shouldComplete) || (shouldStop && (plr.LowestBreakIteration != null)) || (shouldBreak && (plr.LowestBreakIteration == null))) { Logger.LogInformation("ForEach-PLRTest -- {0} > failed. Complete={1}, LBI={2}", desc, plr.IsCompleted, plr.LowestBreakIteration); } } catch (OperationCanceledException oce) { if (!shouldCancel) { Logger.LogInformation("ForEach-PLRTest -- {0} > FAILED -- got unexpected OCE. Exception: {1}", desc, oce); } } catch (AggregateException e) { if (!excExpected) { Logger.LogInformation("ForEach-PLRTest -- {0} > failed -- unexpected exception from loop. Exception: {1}", desc, e); } } } // Generalized test for testing Partitioner ForEach-loop results private static void PartitionerForEachPLRTest( Action<int, ParallelLoopState> body, string desc, bool excExpected, bool shouldComplete, bool shouldStop, bool shouldBreak) { List<int> list = new List<int>(); for (int i = 0; i < 20; i++) list.Add(i); MyPartitioner<int> mp = new MyPartitioner<int>(list); try { ParallelLoopResult plr = Parallel.ForEach(mp, body); if (excExpected) { Logger.LogInformation("PartitionerForEach-PLRTest -- {0}: > failed. Expected an exception.", desc); } else if ((plr.IsCompleted != shouldComplete) || (shouldStop && (plr.LowestBreakIteration != null)) || (shouldBreak && (plr.LowestBreakIteration == null))) { Logger.LogInformation("PartitionerForEach-PLRTest -- {0} > failed. Complete={1}, LBI={2}", desc, plr.IsCompleted, plr.LowestBreakIteration); } } catch (AggregateException e) { if (!excExpected) { Logger.LogInformation("PartitionerForEach-PLRTest -- {0} > failed -- unexpected exception from loop. Exception: {1}", desc, e); } } } // Generalized test for testing OrderablePartitioner ForEach-loop results private static void OrderablePartitionerForEachPLRTest( Action<int, ParallelLoopState, long> body, string desc, bool excExpected, bool shouldComplete, bool shouldStop, bool shouldBreak) { List<int> list = new List<int>(); for (int i = 0; i < 20; i++) list.Add(i); OrderablePartitioner<int> mop = Partitioner.Create(list, true); try { ParallelLoopResult plr = Parallel.ForEach(mop, body); if (excExpected) { Logger.LogInformation("OrderablePartitionerForEach-PLRTest -- {0}: > failed. Expected an exception.", desc); } else if ((plr.IsCompleted != shouldComplete) || (shouldStop && (plr.LowestBreakIteration != null)) || (shouldBreak && (plr.LowestBreakIteration == null))) { Logger.LogInformation("OrderablePartitionerForEach-PLRTest -- {0}: > failed. Complete={1}, LBI={2}", desc, plr.IsCompleted, plr.LowestBreakIteration); } } catch (AggregateException e) { if (!excExpected) { Logger.LogInformation("OrderablePartitionerForEach-PLRTest -- {0}: > failed -- unexpected exception from loop. Exception: {1}", desc, e); } } } // Perform tests on various combinations of Stop()/Break() [Fact] public static void SimultaneousStopBreakTests() { // // Test 32-bit Parallel.For() // ForPLRTest(delegate (int i, ParallelLoopState ps) { ps.Stop(); ps.Break(); }, "Break After Stop", true, false, false, false); ForPLRTest(delegate (int i, ParallelLoopState ps) { ps.Break(); ps.Stop(); }, "Stop After Break", true, false, false, false); CancellationTokenSource cts = new CancellationTokenSource(); ParallelOptions options = new ParallelOptions(); options.CancellationToken = cts.Token; ForPLRTest(delegate (int i, ParallelLoopState ps) { ps.Break(); cts.Cancel(); }, options, "Cancel After Break", false, false, false, false, true); cts = new CancellationTokenSource(); options = new ParallelOptions(); options.CancellationToken = cts.Token; ForPLRTest(delegate (int i, ParallelLoopState ps) { ps.Stop(); cts.Cancel(); }, options, "Cancel After Stop", false, false, false, false, true); cts = new CancellationTokenSource(); options = new ParallelOptions(); options.CancellationToken = cts.Token; ForPLRTest(delegate (int i, ParallelLoopState ps) { cts.Cancel(); ps.Stop(); }, options, "Stop After Cancel", false, false, false, false, true); cts = new CancellationTokenSource(); options = new ParallelOptions(); options.CancellationToken = cts.Token; ForPLRTest(delegate (int i, ParallelLoopState ps) { cts.Cancel(); ps.Break(); }, options, "Break After Cancel", false, false, false, false, true); ForPLRTest(delegate (int i, ParallelLoopState ps) { ps.Break(); try { ps.Stop(); } catch { } }, "Stop(caught) after Break", false, false, false, true); ForPLRTest(delegate (int i, ParallelLoopState ps) { ps.Stop(); try { ps.Break(); } catch { } }, "Break(caught) after Stop", false, false, true, false); // // Test "vanilla" Parallel.ForEach // ForEachPLRTest(delegate (KeyValuePair<int, string> kvp, ParallelLoopState ps) { ps.Break(); ps.Stop(); }, "Stop-After-Break", true, false, false, false); ForEachPLRTest(delegate (KeyValuePair<int, string> kvp, ParallelLoopState ps) { ps.Stop(); ps.Break(); }, "Break-after-Stop", true, false, false, false); cts = new CancellationTokenSource(); options = new ParallelOptions(); options.CancellationToken = cts.Token; ForEachPLRTest(delegate (KeyValuePair<int, string> kvp, ParallelLoopState ps) { ps.Break(); cts.Cancel(); }, options, "Cancel After Break", false, false, false, false, true); cts = new CancellationTokenSource(); options = new ParallelOptions(); options.CancellationToken = cts.Token; ForEachPLRTest(delegate (KeyValuePair<int, string> kvp, ParallelLoopState ps) { ps.Stop(); cts.Cancel(); }, options, "Cancel After Stop", false, false, false, false, true); cts = new CancellationTokenSource(); options = new ParallelOptions(); options.CancellationToken = cts.Token; ForEachPLRTest(delegate (KeyValuePair<int, string> kvp, ParallelLoopState ps) { cts.Cancel(); ps.Stop(); }, options, "Stop After Cancel", false, false, false, false, true); cts = new CancellationTokenSource(); options = new ParallelOptions(); options.CancellationToken = cts.Token; ForEachPLRTest(delegate (KeyValuePair<int, string> kvp, ParallelLoopState ps) { cts.Cancel(); ps.Break(); }, options, "Break After Cancel", false, false, false, false, true); ForEachPLRTest(delegate (KeyValuePair<int, string> kvp, ParallelLoopState ps) { ps.Break(); try { ps.Stop(); } catch { } }, "Stop(caught)-after-Break", false, false, false, true); ForEachPLRTest(delegate (KeyValuePair<int, string> kvp, ParallelLoopState ps) { ps.Stop(); try { ps.Break(); } catch { } }, "Break(caught)-after-Stop", false, false, true, false); // // Test Parallel.ForEach w/ Partitioner // PartitionerForEachPLRTest(delegate (int i, ParallelLoopState ps) { ps.Break(); ps.Stop(); }, "Stop-After-Break", true, false, false, false); PartitionerForEachPLRTest(delegate (int i, ParallelLoopState ps) { ps.Stop(); ps.Break(); }, "Break-after-Stop", true, false, false, false); PartitionerForEachPLRTest(delegate (int i, ParallelLoopState ps) { ps.Break(); try { ps.Stop(); } catch { } }, "Stop(caught)-after-Break", false, false, false, true); PartitionerForEachPLRTest(delegate (int i, ParallelLoopState ps) { ps.Stop(); try { ps.Break(); } catch { } }, "Break(caught)-after-Stop", false, false, true, false); // // Test Parallel.ForEach w/ OrderablePartitioner // OrderablePartitionerForEachPLRTest(delegate (int i, ParallelLoopState ps, long index) { ps.Break(); ps.Stop(); }, "Stop-After-Break", true, false, false, false); OrderablePartitionerForEachPLRTest(delegate (int i, ParallelLoopState ps, long index) { ps.Stop(); ps.Break(); }, "Break-after-Stop", true, false, false, false); OrderablePartitionerForEachPLRTest(delegate (int i, ParallelLoopState ps, long index) { ps.Break(); try { ps.Stop(); } catch { } }, "Stop(caught)-after-Break", false, false, false, true); OrderablePartitionerForEachPLRTest(delegate (int i, ParallelLoopState ps, long index) { ps.Stop(); try { ps.Break(); } catch { } }, "Break(caught)-after-Stop", false, false, true, false); // // Test 64-bit Parallel.For // For64PLRTest(delegate (long i, ParallelLoopState ps) { ps.Stop(); ps.Break(); }, "Break After Stop", true, false, false, false); For64PLRTest(delegate (long i, ParallelLoopState ps) { ps.Break(); ps.Stop(); }, "Stop After Break", true, false, false, false); cts = new CancellationTokenSource(); options = new ParallelOptions(); options.CancellationToken = cts.Token; For64PLRTest(delegate (long i, ParallelLoopState ps) { ps.Break(); cts.Cancel(); }, options, "Cancel After Break", false, false, false, false, true); cts = new CancellationTokenSource(); options = new ParallelOptions(); options.CancellationToken = cts.Token; For64PLRTest(delegate (long i, ParallelLoopState ps) { ps.Stop(); cts.Cancel(); }, options, "Cancel After Stop", false, false, false, false, true); cts = new CancellationTokenSource(); options = new ParallelOptions(); options.CancellationToken = cts.Token; For64PLRTest(delegate (long i, ParallelLoopState ps) { cts.Cancel(); ps.Stop(); }, options, "Stop after Cancel", false, false, false, false, true); cts = new CancellationTokenSource(); options = new ParallelOptions(); options.CancellationToken = cts.Token; For64PLRTest(delegate (long i, ParallelLoopState ps) { cts.Cancel(); ps.Break(); }, options, "Break after Cancel", false, false, false, false, true); For64PLRTest(delegate (long i, ParallelLoopState ps) { ps.Break(); try { ps.Stop(); } catch { } }, "Stop(caught) after Break", false, false, false, true); For64PLRTest(delegate (long i, ParallelLoopState ps) { ps.Stop(); try { ps.Break(); } catch { } }, "Break(caught) after Stop", false, false, true, false); } #region Helper Classes and Methods // // Utility class for use w/ Partitioner-style ForEach testing. // Created by Cindy Song. // public class MyPartitioner<TSource> : Partitioner<TSource> { private IList<TSource> _data; public MyPartitioner(IList<TSource> data) { _data = data; } override public IList<IEnumerator<TSource>> GetPartitions(int partitionCount) { if (partitionCount <= 0) { throw new ArgumentOutOfRangeException(nameof(partitionCount)); } IEnumerator<TSource>[] partitions = new IEnumerator<TSource>[partitionCount]; IEnumerable<KeyValuePair<long, TSource>> partitionEnumerable = Partitioner.Create(_data, true).GetOrderableDynamicPartitions(); for (int i = 0; i < partitionCount; i++) { partitions[i] = DropIndices(partitionEnumerable.GetEnumerator()); } return partitions; } override public IEnumerable<TSource> GetDynamicPartitions() { return DropIndices(Partitioner.Create(_data, true).GetOrderableDynamicPartitions()); } private static IEnumerable<TSource> DropIndices(IEnumerable<KeyValuePair<long, TSource>> source) { foreach (KeyValuePair<long, TSource> pair in source) { yield return pair.Value; } } private static IEnumerator<TSource> DropIndices(IEnumerator<KeyValuePair<long, TSource>> source) { while (source.MoveNext()) { yield return source.Current.Value; } } public override bool SupportsDynamicPartitions { get { return true; } } } #endregion } }
// Copyright 2019 Esri // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.IO; using System.Linq; using System.Threading; using System.Threading.Tasks; using System.Windows.Data; using System.Windows.Input; using ArcGIS.Desktop.Catalog; using ArcGIS.Desktop.Core; using ArcGIS.Desktop.Framework; using ArcGIS.Desktop.Framework.Contracts; using ArcGIS.Desktop.Framework.Dialogs; using Microsoft.Win32; using ArcGIS.Desktop.Mapping; using ArcGIS.Desktop.Framework.Threading.Tasks; namespace WorkwithProjects { internal class WorkWithProjectsViewModel : DockPane { private const string _dockPaneID = "WorkwithProjects_WorkWithProjects"; /// <summary> /// used to lock collections for use by multiple threads /// </summary> private readonly object _lockMxdItemCollection = new object(); private readonly object _lockTemplateFilesCollection = new object(); private ObservableCollection<Item> _mxdItems = new ObservableCollection<Item>(); protected WorkWithProjectsViewModel() { BindingOperations.EnableCollectionSynchronization(MxdItems, _lockMxdItemCollection); } #region Properties private string _Filter = @".mxd"; /// <summary> /// Folder Filter property /// </summary> public string Filter { get { return _Filter; } set { SetProperty(ref _Filter, value, () => Filter); } } private string _openProjectPath = @""; /// <summary> /// Open Project Path property /// </summary> public string OpenProjectPath { get { return _openProjectPath; } set { SetProperty(ref _openProjectPath, value, () => OpenProjectPath); } } private string _folderPath = @""; /// <summary> /// Folder Path property /// </summary> public string FolderPath { get { return _folderPath; } set { SetProperty(ref _folderPath, value, () => FolderPath); } } private string _projectPath = @""; /// <summary> /// Project Path property /// </summary> public string ProjectPath { get { return _projectPath; } set { SetProperty(ref _projectPath, value, () => ProjectPath); } } private bool _isCatalog; private bool _isMap; private bool _isLocalScene; private bool _isGlobalScene; private bool _isFile; public bool IsCatalog { get { return _isCatalog; } set { SetProperty(ref _isCatalog, value, () => IsCatalog); SelectTemplateType("Catalog"); } } public bool IsMap { get { return _isMap; } set { SetProperty(ref _isMap, value, () => IsMap); SelectTemplateType("Map"); } } public bool IsLocalScene { get { return _isLocalScene; } set { SetProperty(ref _isLocalScene, value, () => IsLocalScene); SelectTemplateType("LocalScene"); } } public bool IsGlobalScene { get { return _isGlobalScene; } set { SetProperty(ref _isGlobalScene, value, () => IsGlobalScene); SelectTemplateType("GlobalScene"); } } public bool IsFile { get { return _isFile; } set { SetProperty(ref _isFile, value, () => IsFile); SelectTemplateType("File"); } } private TemplateType _SelectedTemplateType = TemplateType.Untitled; private bool _reentry = false; private void SelectTemplateType (string templateType) { if (_reentry) return; _reentry = true; FileTemplateVisibility = System.Windows.Visibility.Collapsed; switch (templateType) { case "Catalog": IsMap = false; IsLocalScene = false; IsGlobalScene = false; IsFile = false; _SelectedTemplateType = TemplateType.Catalog; break; case "Map": IsCatalog = false; IsLocalScene = false; IsGlobalScene = false; IsFile = false; _SelectedTemplateType = TemplateType.Map; break; case "LocalScene": IsCatalog = false; IsMap = false; IsGlobalScene = false; IsFile = false; _SelectedTemplateType = TemplateType.LocalScene; break; case "GlobalScene": IsCatalog = false; IsMap = false; IsLocalScene = false; IsFile = false; _SelectedTemplateType = TemplateType.GlobalScene; break; case "File": IsCatalog = false; IsMap = false; IsLocalScene = false; IsGlobalScene = false; _SelectedTemplateType = TemplateType.Untitled; RefreshTemplates(); FileTemplateVisibility = System.Windows.Visibility.Visible; break; } _reentry = false; } public ICommand CmdCreateProject { get { return new RelayCommand(async () => { // Create from template await CreateProjectFromTemplate(ProjectName, ProjectPath, _SelectedTemplateType, _templateFile); }); } } private string _projectName = @""; /// <summary> /// Project Name property /// </summary> public string ProjectName { get { return _projectName; } set { SetProperty(ref _projectName, value, () => ProjectName); } } /// <summary> /// collection of mxd items. Bind to this property in the view. /// </summary> public ObservableCollection<Item> MxdItems { get { return _mxdItems; } } /// <summary> /// Holds the selected mxd item /// </summary> private Item _mxdItem = null; public Item MxdItem { get { return _mxdItem; } set { SetProperty(ref _mxdItem, value, () => MxdItem); if (_mxdItem == null) return; // import the map ImportAMap(_mxdItem); } } /// <summary> /// collection of template paths. Bind to this property in the view. /// </summary> private IEnumerable<string> _templateFiles; public IEnumerable<string> TemplateFiles { get { return _templateFiles; } } private void RefreshTemplates () { var templateFolder = System.IO.Path.Combine(System.Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments), @"ArcGIS\ProjectTemplates"); _templateFiles = Directory.GetFiles(templateFolder, "*.aptx").ToList(); BindingOperations.EnableCollectionSynchronization(TemplateFiles, _lockTemplateFilesCollection); NotifyPropertyChanged(() => TemplateFiles); } /// <summary> /// Holds the selected templateFile item /// </summary> private string _templateFile = null; public string TemplateFile { get { return _templateFile; } set { SetProperty(ref _templateFile, value, () => TemplateFile); } } private System.Windows.Visibility _fileTemplateVisibility = System.Windows.Visibility.Collapsed; /// <summary> /// Determines the FileTemplate entry visibility /// </summary> public System.Windows.Visibility FileTemplateVisibility { get { return _fileTemplateVisibility; } set { SetProperty(ref _fileTemplateVisibility, value, () => FileTemplateVisibility); } } #endregion #region Commands /// <summary> /// command to open a project /// </summary> private ICommand _openProjectCommand; public ICommand OpenProjectCommand { get { return _openProjectCommand ?? (_openProjectCommand = new RelayCommand(() => OpenProject(OpenProjectPath), () => true)); } } /// <summary> /// command to open a project /// </summary> private ICommand _addFolderToProjectCommand; public ICommand AddFolderToProjectCommand { get { return _addFolderToProjectCommand ?? (_addFolderToProjectCommand = new RelayCommand(() => AddFolderToProject(FolderPath), () => true)); } } #endregion #region Exercise private async void OpenProject(string openProjectPath) { try { await Project.OpenAsync(openProjectPath); } catch (Exception ex) { MessageBox.Show("Error opening project: " + ex.ToString()); } } private async void AddFolderToProject(string addFolderPath) { if (string.IsNullOrEmpty(addFolderPath)) return; try { // Add a folder to the Project var folderToAdd = ItemFactory.Instance.Create(addFolderPath); await QueuedTask.Run(() => Project.Current.AddItem(folderToAdd as IProjectItem)); // find the folder project item FolderConnectionProjectItem folder = Project.Current.GetItems<FolderConnectionProjectItem>().FirstOrDefault(f => f.Path.Equals(addFolderPath, StringComparison.CurrentCultureIgnoreCase)); if (folder == null) return; // do the search IEnumerable<Item> folderFiles = null; await QueuedTask.Run(() => folderFiles = folder.GetItems().Where(f => f.Path.EndsWith(Filter, StringComparison.CurrentCultureIgnoreCase))); // search MXDs lock (_lockMxdItemCollection) { _mxdItems.Clear(); foreach (var newItem in folderFiles) { _mxdItems.Add(newItem); } } NotifyPropertyChanged(() => MxdItems); } catch (Exception ex) { MessageBox.Show("Error adding folder to project: " + ex.ToString()); } } private async Task ImportAMap(Item mxdToAdd) { try { // Add a folder to the Project await QueuedTask.Run(() => MapFactory.Instance.CreateMapFromItem(mxdToAdd)); } catch (Exception ex) { MessageBox.Show("Error importing mxd: " + ex.ToString()); } } /// <summary> /// Creates a new project using the supplied name from a project template. /// </summary> /// <remarks>Exercise 3: This exercise creates a new project from a project template (*.aptx) using Pro SDK. This exercise introduces the /// CreateProjectSettings class used in project creation.</remarks> /// <param name="projectName">Name to be used for the new project.</param> /// <param name="locationPath">Location for the new project</param> /// <param name="templateType">Type of template to use for newly created project</param> /// <param name="templatePath">Template to use</param> /// <returns>A Task representing CreateProjectFromTemplate.</returns> private async Task CreateProjectFromTemplate(string projectName, string locationPath, TemplateType templateType, string templatePath) { try { // Set project creation settings var createProjectSettings = new CreateProjectSettings { Name = projectName, LocationPath = locationPath }; if (templateType == TemplateType.Untitled) { createProjectSettings.TemplatePath = templatePath; } else { createProjectSettings.TemplateType = templateType; } // Create project var newProject = await Project.CreateAsync(createProjectSettings); // ... and continue to do stuff //await ChangeSettings(); // Save Project //await Project.Current.SaveAsync(); // ... and/or continue to do stuff but allow user to continue to work with Pro } catch (Exception ex) { MessageBox.Show("Error importing mxd: " + ex.ToString()); } } public static readonly string RegisrtyKeyUseInCaseOfEmergencyOnly = "ArcGISPro"; private static string GetInstallDirFromReg() { var regKeyName = RegisrtyKeyUseInCaseOfEmergencyOnly; var regPath = string.Format(@"SOFTWARE\ESRI\{0}", regKeyName); var err1 = string.Format("Install location of ArcGIS Pro cannot be found. Please check your registry for {0}.", string.Format(@"HKLM\{0}\{1}", regPath, "InstallDir")); var path = ""; try { var localKey = RegistryKey.OpenBaseKey(Microsoft.Win32.RegistryHive.LocalMachine, RegistryView.Registry64); var esriKey = localKey.OpenSubKey(regPath); if (esriKey == null) { localKey = RegistryKey.OpenBaseKey(Microsoft.Win32.RegistryHive.CurrentUser, RegistryView.Registry64); esriKey = localKey.OpenSubKey(regPath); } if (esriKey == null) { //this is an error throw new System.InvalidOperationException(err1); } path = esriKey.GetValue("InstallDir") as string; if (path == null || path == string.Empty) //this is an error throw new InvalidOperationException(err1); } catch (InvalidOperationException ie) { //this is ours throw ie; } catch (Exception ex) { throw new System.Exception(err1, ex); } return path; } #endregion #region Manage Dockpane /// <summary> /// Show the DockPane. /// </summary> internal static void Show() { DockPane pane = FrameworkApplication.DockPaneManager.Find(_dockPaneID); if (pane == null) return; pane.Activate(); } /// <summary> /// Text shown near the top of the DockPane. /// </summary> private string _heading = "Exercising Project functions"; public string Heading { get { return _heading; } set { SetProperty(ref _heading, value, () => Heading); } } #endregion } /// <summary> /// Button implementation to show the DockPane. /// </summary> internal class WorkWithProjects_ShowButton : Button { protected override void OnClick() { WorkWithProjectsViewModel.Show(); } } }
using System; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using NQuery.Runtime; namespace NQuery.Compilation { internal class Resolver : OperatorTypeResolver { #region Table Reference Declaration Finder private sealed class TableReferenceDeclarationFinder : StandardVisitor { private List<NamedTableReference> _nameTableReferenceList = new List<NamedTableReference>(); private List<DerivedTableReference> _derivedTableReferenceList = new List<DerivedTableReference>(); public NamedTableReference[] GetNamedTableReferences() { return _nameTableReferenceList.ToArray(); } public DerivedTableReference[] GetDerivedTableReferences() { return _derivedTableReferenceList.ToArray(); } public override TableReference VisitNamedTableReference(NamedTableReference node) { _nameTableReferenceList.Add(node); return node; } public override TableReference VisitDerivedTableReference(DerivedTableReference node) { _derivedTableReferenceList.Add(node); return node; } // We don't want to visit subselect predicates in ON. public override ExpressionNode VisitSingleRowSubselect(SingleRowSubselect expression) { return expression; } public override ExpressionNode VisitExistsSubselect(ExistsSubselect expression) { return expression; } public override ExpressionNode VisitAllAnySubselect(AllAnySubselect expression) { return expression; } } #endregion private Scope _scope; private Stack<QueryScope> _queryScopes = new Stack<QueryScope>(); public Resolver(IErrorReporter errorReporter, Scope scope) : base(errorReporter) { _scope = scope; // Needed for IntelliSense. This ensures // That CurrentScope always returns a legal scope. PushNewScope(null); } #region Scope Helpers private void PushScope(QueryScope queryScope) { _queryScopes.Push(queryScope); } private QueryScope PushNewScope(QueryScope queryScope) { if (queryScope == null) queryScope = new QueryScope(CurrentScope); PushScope(queryScope); return queryScope; } private void PopScope() { _queryScopes.Pop(); } public QueryScope CurrentScope { get { if (_queryScopes.Count == 0) return null; return _queryScopes.Peek(); } } #endregion #region Resolution Helpers private TableRefBinding ResolveTableRef(SourceRange sourceRange, Identifier identifier) { QueryScope lookupScope = CurrentScope; while (lookupScope != null) { TableRefBinding[] candidates = lookupScope.FindTableRef(identifier); if (candidates != null && candidates.Length > 0) { if (candidates.Length > 1) ErrorReporter.AmbiguousTableRef(sourceRange, identifier, candidates); return candidates[0]; } lookupScope = lookupScope.ParentScope; } return null; } private ColumnRefBinding ResolveColumnRef(SourceRange sourceRange, Identifier identifier) { QueryScope lookupScope = CurrentScope; while (lookupScope != null) { ColumnRefBinding[] candidates = lookupScope.FindColumnRef(identifier); if (candidates != null && candidates.Length > 0) { if (candidates.Length > 1) ErrorReporter.AmbiguousColumnRef(sourceRange, identifier, candidates); return candidates[0]; } lookupScope = lookupScope.ParentScope; } return null; } private ColumnRefBinding ResolveColumnRef(TableRefBinding tableRef, SourceRange sourceRange, Identifier identifier) { QueryScope lookupScope = tableRef.Scope; ColumnRefBinding[] candidates = lookupScope.FindColumnRef(tableRef, identifier); if (candidates == null || candidates.Length == 0) return null; if (candidates.Length > 1) ErrorReporter.AmbiguousColumnRef(sourceRange, identifier, candidates); return candidates[0]; } private TableBinding ResolveTable(SourceRange sourceRange, Identifier identifier) { TableBinding[] candidates; // First we try to find a common table expression QueryScope lookupScope = CurrentScope; while (lookupScope != null) { candidates = lookupScope.FindCommonTable(identifier); if (candidates != null && candidates.Length > 0) { if (candidates.Length > 1) ErrorReporter.AmbiguousTable(sourceRange, identifier, candidates); return candidates[0]; } lookupScope = lookupScope.ParentScope; } // Now we try to find a "global" table. candidates = _scope.DataContext.Tables.Find(identifier); if (candidates == null || candidates.Length == 0) return null; if (candidates.Length > 1) ErrorReporter.AmbiguousTable(sourceRange, identifier, candidates); return candidates[0]; } private ConstantBinding ResolveConstant(SourceRange sourceRange, Identifier identifier) { ConstantBinding[] candidates = _scope.DataContext.Constants.Find(identifier); if (candidates == null || candidates.Length == 0) return null; if (candidates.Length > 1) ErrorReporter.AmbiguousConstant(sourceRange, identifier, candidates); return candidates[0]; } private ParameterBinding ResolveParameter(SourceRange sourceRange, Identifier identifier) { ParameterBinding[] candidates = _scope.Parameters.Find(identifier); if (candidates == null || candidates.Length == 0) return null; if (candidates.Length > 1) ErrorReporter.AmbiguousParameter(sourceRange, identifier, candidates); return candidates[0]; } private AggregateBinding ResolveAggregate(SourceRange sourceRange, Identifier identifier) { AggregateBinding[] candidates = _scope.DataContext.Aggregates.Find(identifier); if (candidates == null || candidates.Length == 0) return null; if (candidates.Length > 1) ErrorReporter.AmbiguousAggregate(sourceRange, identifier, candidates); return candidates[0]; } private PropertyBinding ResolveTypeProperty(Type type, SourceRange sourceRange, Identifier identifier) { PropertyBinding[] candidates = _scope.DataContext.MetadataContext.FindProperty(type, identifier); if (candidates == null || candidates.Length == 0) return null; if (candidates.Length > 1) ErrorReporter.AmbiguousProperty(sourceRange, identifier, candidates); return candidates[0]; } private PropertyBinding ResolveCustomProperty(IList<PropertyBinding> customProperties, SourceRange sourceRange, Identifier identifier) { PropertyBinding[] candidates = _scope.DataContext.MetadataContext.FindProperty(customProperties, identifier); if (candidates == null || candidates.Length == 0) return null; if (candidates.Length > 1) ErrorReporter.AmbiguousProperty(sourceRange, identifier, candidates); return candidates[0]; } private FunctionBinding ResolveFunction(Identifier functionName, Type[] argumentTypes) { FunctionBinding[] functionGroup = _scope.DataContext.Functions.Find(functionName); return (FunctionBinding)Binder.BindInvocation(functionGroup, argumentTypes); } private MethodBinding ResolveMethod(Type declaringType, Identifier methodName, Type[] argumentTypes) { MethodBinding[] methodGroup = _scope.DataContext.MetadataContext.FindMethod(declaringType, methodName); return (MethodBinding)Binder.BindInvocation(methodGroup, argumentTypes); } private void ResolveOrderBy(SelectColumn[] inputSelectColumns, IEnumerable<OrderByColumn> orderByColumns) { // 1. ORDER BY expressions allow simple integer literals. They refer to an expression // in the selection list. // // 2. ORDER BY allows to specify the column alias. foreach (OrderByColumn orderByColumn in orderByColumns) { orderByColumn.ColumnIndex = -1; // Check for positional form. bool expressionResolved = false; LiteralExpression literalExpression = orderByColumn.Expression as LiteralExpression; if (literalExpression != null && literalExpression.IsInt32Value) { int selectionListPos = literalExpression.AsInt32; if (selectionListPos < 1 || selectionListPos > inputSelectColumns.Length) ErrorReporter.OrderByColumnPositionIsOutOfRange(selectionListPos); else { int index = selectionListPos - 1; orderByColumn.Expression = inputSelectColumns[index].Expression; orderByColumn.ColumnIndex = index; } expressionResolved = true; } else { // Check for column alias form. NameExpression nameExpression = orderByColumn.Expression as NameExpression; if (nameExpression != null) { for (int i = 0; i < inputSelectColumns.Length; i++) { SelectColumn selectColumn = inputSelectColumns[i]; if (nameExpression.Name == selectColumn.Alias) { orderByColumn.Expression = selectColumn.Expression; orderByColumn.ColumnIndex = i; expressionResolved = true; break; } } } } if (!expressionResolved) { orderByColumn.Expression = VisitExpression(orderByColumn.Expression); for (int i = 0; i < inputSelectColumns.Length; i++) { SelectColumn selectColumn = inputSelectColumns[i]; if (selectColumn.Expression.IsStructuralEqualTo(orderByColumn.Expression)) { orderByColumn.ColumnIndex = i; break; } } } } } #endregion #region Helpers private static bool IsRecursive(QueryNode query, Identifier tableName) { CommonTableExpressionRecursiveMemberChecker checker = new CommonTableExpressionRecursiveMemberChecker(tableName); checker.Visit(query); return checker.RecursiveReferences > 0 || checker.RecursiveReferenceInSubquery; } private void ValidateColumnNames(SelectColumn[] columns, CommonTableExpression commonTableExpression) { if (commonTableExpression.ColumnNames == null) { // Check that all columns have aliases. for (int i = 0; i < columns.Length; i++) { SelectColumn selectColumn = columns[i]; if (selectColumn.Alias == null) ErrorReporter.NoColumnAliasSpecified(commonTableExpression.TableNameSourceRange, i, commonTableExpression.TableName); } } else { if (commonTableExpression.ColumnNames.Length < columns.Length) ErrorReporter.CteHasMoreColumnsThanSpecified(commonTableExpression.TableName); else if (commonTableExpression.ColumnNames.Length > columns.Length) ErrorReporter.CteHasFewerColumnsThanSpecified(commonTableExpression.TableName); else { // Check that all specified column names are unique. Dictionary<string, object> nameSet = new Dictionary<string, object>(StringComparer.OrdinalIgnoreCase); foreach (Identifier columnName in commonTableExpression.ColumnNames) { if (nameSet.ContainsKey(columnName.Text)) ErrorReporter.CteHasDuplicateColumnName(columnName, commonTableExpression.TableName); else nameSet.Add(columnName.Text, null); } } if (!ErrorReporter.ErrorsSeen) { // Write explictly given column names into the real column list as aliases. for (int i = 0; i < columns.Length; i++) columns[i].Alias = commonTableExpression.ColumnNames[i]; } } } #endregion public override ExpressionNode VisitNameExpression(NameExpression expression) { // Resolve column // -- or -- // Resolve constant // -- or -- // Resolve parameter (without @) // -- or -- // Reference to row object ColumnRefBinding columnRefBinding = ResolveColumnRef(expression.NameSourceRange, expression.Name); ConstantBinding constantBinding = ResolveConstant(expression.NameSourceRange, expression.Name); ParameterBinding parameterBinding = ResolveParameter(expression.NameSourceRange, expression.Name); TableRefBinding tableRefBinding = ResolveTableRef(expression.NameSourceRange, expression.Name); if (columnRefBinding != null) { if (constantBinding != null) ErrorReporter.AmbiguousReference(expression.NameSourceRange, expression.Name, new Binding[] { columnRefBinding, constantBinding }); if (parameterBinding != null) ErrorReporter.AmbiguousReference(expression.NameSourceRange, expression.Name, new Binding[] { columnRefBinding, parameterBinding }); if (tableRefBinding != null) ErrorReporter.AmbiguousReference(expression.NameSourceRange, expression.Name, new Binding[] { columnRefBinding, tableRefBinding }); return new ColumnExpression(columnRefBinding); } if (constantBinding != null) { if (parameterBinding != null) ErrorReporter.AmbiguousReference(expression.NameSourceRange, expression.Name, new Binding[] { constantBinding, parameterBinding }); if (tableRefBinding != null) ErrorReporter.AmbiguousReference(expression.NameSourceRange, expression.Name, new Binding[] { constantBinding, tableRefBinding }); return new NamedConstantExpression(constantBinding); } if (parameterBinding != null) { if (tableRefBinding != null) ErrorReporter.AmbiguousReference(expression.NameSourceRange, expression.Name, new Binding[] { parameterBinding, tableRefBinding }); ParameterExpression parameterExpression = new ParameterExpression(); parameterExpression.Name = expression.Name; parameterExpression.NameSourceRange = expression.NameSourceRange; parameterExpression.Parameter = parameterBinding; return parameterExpression; } if (tableRefBinding != null) { if (tableRefBinding.TableBinding is DerivedTableBinding || tableRefBinding.TableBinding is CommonTableBinding) { ErrorReporter.InvalidRowReference(expression.NameSourceRange, tableRefBinding); return expression; } else { ColumnRefBinding cab = tableRefBinding.Scope.DeclareRowColumnRef(tableRefBinding); return new ColumnExpression(cab); } } // Check if there is any function with this name declared. This helps to give a better // error message. FunctionBinding[] functionBindings = _scope.DataContext.Functions.Find(expression.Name); if (functionBindings != null && functionBindings.Length > 0) { // Report that parentheses are required in function calls. ErrorReporter.InvocationRequiresParentheses(expression.NameSourceRange, functionBindings); } else { // Report the regular undeclared entity error message. ErrorReporter.UndeclaredEntity(expression.NameSourceRange, expression.Name); } return expression; } public override ExpressionNode VisitParameterExpression(ParameterExpression expression) { ParameterBinding parameter = ResolveParameter(expression.NameSourceRange, expression.Name); if (parameter == null) ErrorReporter.UndeclaredParameter(expression.NameSourceRange, expression.Name); expression.Parameter = parameter; return expression; } public override ExpressionNode VisitPropertyAccessExpression(PropertyAccessExpression expression) { // Resolve column // -- or -- // Resolve property NameExpression targetAsNameExpression = expression.Target as NameExpression; if (targetAsNameExpression != null) { // Ok, the target is a name expression. Lets see whether it refers to a table // or constant. TableRefBinding tableRefBinding = ResolveTableRef(targetAsNameExpression.NameSourceRange, targetAsNameExpression.Name); ConstantBinding constantBinding = ResolveConstant(targetAsNameExpression.NameSourceRange, targetAsNameExpression.Name); if (tableRefBinding != null) { if (constantBinding != null) { // It can both refer to a constant and a table. We cannot resolve // the ambiguity so we report an error and do nothing. ErrorReporter.AmbiguousReference(expression.NameSourceRange, expression.Name, new Binding[] { constantBinding, tableRefBinding }); return expression; } // The target only refers to table. Resolve the column or method on the row type. ColumnRefBinding columnRefBinding = ResolveColumnRef(tableRefBinding, expression.NameSourceRange, expression.Name); if (columnRefBinding != null) return new ColumnExpression(columnRefBinding); if (!(tableRefBinding.TableBinding is DerivedTableBinding) && !(tableRefBinding.TableBinding is CommonTableBinding)) { // Check if there is any method with this name declared. This helps to give a better // error message. MethodBinding[] methodBindings = _scope.DataContext.MetadataContext.FindMethod(tableRefBinding.TableBinding.RowType, expression.Name); if (methodBindings != null && methodBindings.Length > 0) { // Report that methods calls require parentheses ErrorReporter.InvocationRequiresParentheses(expression.NameSourceRange, methodBindings); return expression; } } // Report that no column with this name exists. ErrorReporter.UndeclaredColumn(expression.NameSourceRange, tableRefBinding, expression.Name); return expression; } // NOTE: If name does not refer to a table but to constant // we fall through. The name node is resolved below // (actually it is replaced in VisitNameExpression) } // Ok, it is not a table. Try to resolve a property. expression.Target = VisitExpression(expression.Target); if (expression.Target.ExpressionType == null) return expression; // To support custom properties we check if the target is a named constant or a parameter. NamedConstantExpression namedConstantExpression = expression.Target as NamedConstantExpression; ParameterExpression parameterExpression = expression.Target as ParameterExpression; if (namedConstantExpression == null && parameterExpression == null) { // We cannot use custom properties in this case. Therfore // we resolve the property using type specific properties. expression.Property = ResolveTypeProperty(expression.Target.ExpressionType, expression.NameSourceRange, expression.Name); } else { // We can use custom properties. Get that value and the dataype. IList<PropertyBinding> customProperties; if (namedConstantExpression != null) customProperties = namedConstantExpression.Constant.CustomProperties; else customProperties = parameterExpression.Parameter.CustomProperties; if (customProperties != null) { // Resolve instance property. expression.Property = ResolveCustomProperty(customProperties, expression.NameSourceRange, expression.Name); } else { // The constant or parameter did not have custom properties. expression.Property = ResolveTypeProperty(expression.Target.ExpressionType, expression.NameSourceRange, expression.Name); } } if (expression.Property == null) { // Check if there is any method with this name declared. This helps to give a better // error message. MethodBinding[] methodBindings = _scope.DataContext.MetadataContext.FindMethod(expression.Target.ExpressionType, expression.Name); if (methodBindings != null && methodBindings.Length > 0) { // Report that methods calls require parentheses ErrorReporter.InvocationRequiresParentheses(expression.NameSourceRange, methodBindings); } else { // Report that no property with this name exists. ErrorReporter.UndeclaredProperty(expression.NameSourceRange, expression.Target.ExpressionType, expression.Name); } } return expression; } public override ExpressionNode VisitCastExpression(CastExpression expression) { expression.Expression = VisitExpression(expression.Expression); if (expression.TypeReference.ResolvedType == null) { Type targetType = Binder.BindType(expression.TypeReference.TypeName, expression.TypeReference.CaseSensitve); expression.TypeReference.ResolvedType = targetType; if (targetType == null) { ErrorReporter.UndeclaredType(expression.TypeReference.TypeNameSourceRange, expression.TypeReference.TypeName); } else if (expression.Expression.ExpressionType != null) { return Binder.ConvertOrDowncastExpressionIfRequired(expression.Expression, targetType); } } return expression; } public override ExpressionNode VisitFunctionInvocationExpression(FunctionInvocationExpression expression) { // Resolve aggregation // -- or -- // Resolve method // First all parameters must be resolved. base.VisitFunctionInvocationExpression(expression); // If the type of any argument could not be resolved we cannot resolve the method. foreach (ExpressionNode argument in expression.Arguments) { if (argument.ExpressionType == null) return expression; } // Resolve aggregate bool canBeAggregate = expression.Arguments.Length == 1 || (expression.Arguments.Length == 0 && expression.HasAsteriskModifier); if (canBeAggregate) { AggregateBinding aggregateBinding = ResolveAggregate(expression.NameSourceRange, expression.Name); if (aggregateBinding != null) { ExpressionNode aggregateArgument; if (!expression.HasAsteriskModifier) { aggregateArgument = expression.Arguments[0]; } else { // Only COUNT can have the asterisk modifier. Identifier countName = Identifier.CreateNonVerbatim("COUNT"); if (!countName.Matches(aggregateBinding.Name)) ErrorReporter.AsteriskModifierNotAllowed(expression.NameSourceRange, expression); // The semantic of COUNT(*) says that it counts all rows of the bound // query. The same result can be accomplished by using COUNT(0) (or any // otherc onstant non-null expression as argument). Therefore we use a // literal zero as the argument. aggregateArgument = LiteralExpression.FromInt32(0); } IAggregator aggregator = aggregateBinding.CreateAggregator(aggregateArgument.ExpressionType); if (aggregator == null) { ErrorReporter.AggregateDoesNotSupportType(aggregateBinding, aggregateArgument.ExpressionType); return expression; } AggregateExpression aggregateExpression = new AggregateExpression(); aggregateExpression.Aggregate = aggregateBinding; aggregateExpression.Aggregator = aggregator; aggregateExpression.Argument = aggregateArgument; aggregateExpression.HasAsteriskModifier = expression.HasAsteriskModifier; return aggregateExpression; } } // Resolve method if (expression.HasAsteriskModifier) { // Simple invocations cannot have asterisk modifier. ErrorReporter.AsteriskModifierNotAllowed(expression.NameSourceRange, expression); // Leave to avoid cascading errors. return expression; } Type[] argumentTypes = new Type[expression.Arguments.Length]; for (int i = 0; i < argumentTypes.Length; i++) argumentTypes[i] = expression.Arguments[i].ExpressionType; expression.Function = ResolveFunction(expression.Name, argumentTypes); if (expression.Function == null) { ErrorReporter.UndeclaredFunction(expression.NameSourceRange, expression.Name, argumentTypes); } else { // Convert all arguments if necessary Type[] parameterTypes = expression.Function.GetParameterTypes(); for (int i = 0; i < expression.Arguments.Length; i++) expression.Arguments[i] = Binder.ConvertExpressionIfRequired(expression.Arguments[i], parameterTypes[i]); } return expression; } public override ExpressionNode VisitMethodInvocationExpression(MethodInvocationExpression expression) { // First the target and all parameters must be resolved. base.VisitMethodInvocationExpression(expression); // If the type of the target could not be resolved we cannot resolve the method. if (expression.Target.ExpressionType == null) return expression; // If the type of any argument could not be resolved we cannot resolve the method. foreach (ExpressionNode argument in expression.Arguments) { if (argument.ExpressionType == null) return expression; } // Build argument type array. Type[] argumentTypes = new Type[expression.Arguments.Length]; for (int i = 0; i < argumentTypes.Length; i++) argumentTypes[i] = expression.Arguments[i].ExpressionType; expression.Method = ResolveMethod(expression.Target.ExpressionType, expression.Name, argumentTypes); if (expression.Method == null) { ErrorReporter.UndeclaredMethod(expression.NameSourceRange, expression.Target.ExpressionType, expression.Name, argumentTypes); } else { // Convert all arguments if necessary Type[] parameterTypes = expression.Method.GetParameterTypes(); for (int i = 0; i < expression.Arguments.Length; i++) expression.Arguments[i] = Binder.ConvertExpressionIfRequired(expression.Arguments[i], parameterTypes[i]); } return expression; } public override TableReference VisitJoinedTableReference(JoinedTableReference node) { // Resolve children node.Left = VisitTableReference(node.Left); node.Right = VisitTableReference(node.Right); if (node.Condition != null) { node.Condition = VisitExpression(node.Condition); // Get all tables that are referenced by node.Condition. MetaInfo metaInfo = AstUtil.GetMetaInfo(node.Condition); // Now get a list with all tables are really introduced by one of the node's // children. TableDeclarationFinder tableDeclarationFinder = new TableDeclarationFinder(); tableDeclarationFinder.Visit(node.Left); tableDeclarationFinder.Visit(node.Right); TableRefBinding[] declaredTables = tableDeclarationFinder.GetDeclaredTables(); // Any tables that are not declared by children of node are inaccessible in the // current context and referencing them in node.Condition is invalid. // // NOTE: This is only partially true. It could also be an outer reference. Therefore // we also check that the scope is the same. foreach (TableRefBinding referencedTable in metaInfo.TableDependencies) { if (referencedTable.Scope == CurrentScope) { if (!ArrayHelpers.Contains(declaredTables, referencedTable)) ErrorReporter.TableRefInaccessible(referencedTable); } } } return node; } public override TableReference VisitDerivedTableReference(DerivedTableReference node) { // Don't visit the query since we have visited it in VisitSelectQuery() already. return node; } public override QueryNode VisitCommonTableExpressionQuery(CommonTableExpressionQuery query) { QueryScope queryScope = new QueryScope(null); PushNewScope(queryScope); try { foreach (CommonTableExpression commonTableExpression in query.CommonTableExpressions) { CommonTableBinding[] existingCommonTables = queryScope.FindCommonTable(commonTableExpression.TableName); if (existingCommonTables != null && existingCommonTables.Length > 0) { ErrorReporter.CteHasDuplicateTableName(commonTableExpression.TableName); break; } // Check if CTE is recursive. bool recursive = IsRecursive(commonTableExpression.QueryDeclaration, commonTableExpression.TableName); if (!recursive) { commonTableExpression.QueryDeclaration = VisitQuery(commonTableExpression.QueryDeclaration); ValidateColumnNames(commonTableExpression.QueryDeclaration.GetColumns(), commonTableExpression); if (ErrorReporter.ErrorsSeen) return query; commonTableExpression.CommonTableBinding = queryScope.DeclareCommonTableExpression(commonTableExpression.TableName, commonTableExpression.QueryDeclaration); } else { // If recursive, we must check the structure. The structure is as follows: // // {One or more anchor members} // UNION ALL // {One or more recursive members} BinaryQuery unionAllQuery = commonTableExpression.QueryDeclaration as BinaryQuery; if (unionAllQuery == null || unionAllQuery.Op != BinaryQueryOperator.UnionAll) { ErrorReporter.CteDoesNotHaveUnionAll(commonTableExpression.TableName); break; } List<QueryNode> recursiveMembers = AstUtil.FlattenBinaryQuery(unionAllQuery); List<QueryNode> anchorMembers = new List<QueryNode>(); foreach (QueryNode queryNode in recursiveMembers) { if (!IsRecursive(queryNode, commonTableExpression.TableName)) anchorMembers.Add(queryNode); else break; } recursiveMembers.RemoveRange(0, anchorMembers.Count); QueryNode anchorMember = AstUtil.CombineQueries(anchorMembers, BinaryQueryOperator.UnionAll); if (anchorMembers.Count == 0) { ErrorReporter.CteDoesNotHaveAnchorMember(commonTableExpression.TableName); return query; } // Resolve anchor member and use it to construct a common table definition. anchorMember = VisitQuery(anchorMember); // Check that all columns have aliases. ValidateColumnNames(anchorMember.GetColumns(), commonTableExpression); if (ErrorReporter.ErrorsSeen) return query; commonTableExpression.CommonTableBinding = queryScope.DeclareCommonTableExpression(commonTableExpression.TableName, anchorMember); SelectColumn[] anchorColumns = anchorMember.GetColumns(); // Now resolve all recursive members and add them to the common table definition. for (int i = 0; i < recursiveMembers.Count; i++) { recursiveMembers[i] = VisitQuery(recursiveMembers[i]); // Make sure the column count and data type match. // NOTE: Due to the recursive nature there is no implicit conversion support for the UNION ALL // in common table expressions. Instead, the types must match exactly. SelectColumn[] recursiveColumns = recursiveMembers[i].GetColumns(); if (recursiveColumns.Length != anchorColumns.Length) ErrorReporter.DifferentExpressionCountInBinaryQuery(); else { for (int columnIndex = 0; columnIndex < anchorColumns.Length; columnIndex++) { Type anchorColumnType = anchorColumns[columnIndex].Expression.ExpressionType; Type recursiveColumnType = recursiveColumns[columnIndex].Expression.ExpressionType; if (recursiveColumnType != null && recursiveColumnType != anchorColumnType) ErrorReporter.CteHasTypeMismatchBetweenAnchorAndRecursivePart(anchorColumns[columnIndex].Alias, commonTableExpression.TableName); } } } commonTableExpression.CommonTableBinding.RecursiveMembers = recursiveMembers.ToArray(); } } if (ErrorReporter.ErrorsSeen) return query; query.Input = VisitQuery(query.Input); } finally { PopScope(); } return query; } public override QueryNode VisitBinaryQuery(BinaryQuery query) { base.VisitBinaryQuery(query); if (ErrorReporter.ErrorsSeen) return query; SelectColumn[] leftSelectColumns = query.Left.GetColumns(); SelectColumn[] rightSelectColumns = query.Right.GetColumns(); if (leftSelectColumns.Length != rightSelectColumns.Length) { ErrorReporter.DifferentExpressionCountInBinaryQuery(); } else { // Check that all column expressions share a common type. // // If the types are not equal an CAST node is inserted in the tree. // To do this and to support good error reporting we first try to find // the best common type. Any needed conversions or type errors are // ignored. Type[] commonTypes = new Type[leftSelectColumns.Length]; for (int i = 0; i < leftSelectColumns.Length; i++) { Type leftType = leftSelectColumns[i].Expression.ExpressionType; Type rightType = rightSelectColumns[i].Expression.ExpressionType; commonTypes[i] = Binder.ChooseBetterTypeConversion(leftType, rightType); } // Now we know that commonType is the best type for all column expressions. // // Insert cast nodes for all expressions that have a different type but are // implicit convertible and report errors for all expressions that not convertible. for (int i = 0; i < leftSelectColumns.Length; i++) { SelectColumn leftSelectColumn = leftSelectColumns[i]; SelectColumn rightSelectColumn = rightSelectColumns[i]; leftSelectColumn.Expression = Binder.ConvertExpressionIfRequired(leftSelectColumn.Expression, commonTypes[i]); rightSelectColumn.Expression = Binder.ConvertExpressionIfRequired(rightSelectColumn.Expression, commonTypes[i]); } } return query; } [SuppressMessage("Microsoft.Maintainability", "CA1506:AvoidExcessiveClassCoupling")] [SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity")] public override QueryNode VisitSelectQuery(SelectQuery query) { query.QueryScope = PushNewScope(query.QueryScope); try { if (query.TableReferences != null) { // Declare all tables TableReferenceDeclarationFinder tableReferenceDeclarationFinder = new TableReferenceDeclarationFinder(); tableReferenceDeclarationFinder.Visit(query.TableReferences); NamedTableReference[] namedTableReferences = tableReferenceDeclarationFinder.GetNamedTableReferences(); DerivedTableReference[] derivedTableReferences = tableReferenceDeclarationFinder.GetDerivedTableReferences(); foreach (NamedTableReference namedTableReference in namedTableReferences) { TableBinding tableBinding = ResolveTable(namedTableReference.TableNameSourceRange, namedTableReference.TableName); if (tableBinding == null) { ErrorReporter.UndeclaredTable(namedTableReference.TableNameSourceRange, namedTableReference.TableName); } else { Identifier tableReferenceIdentifer; if (namedTableReference.CorrelationName == null) tableReferenceIdentifer = Identifier.CreateVerbatim(tableBinding.Name); else tableReferenceIdentifer = namedTableReference.CorrelationName; TableRefBinding existingTableRef = ResolveTableRef(SourceRange.None, tableReferenceIdentifer); if (existingTableRef != null && existingTableRef.Scope == query.QueryScope) ErrorReporter.DuplicateTableRefInFrom(tableReferenceIdentifer); else namedTableReference.TableRefBinding = CurrentScope.DeclareTableRef(tableBinding, tableReferenceIdentifer); } } foreach (DerivedTableReference derivedTableReference in derivedTableReferences) { derivedTableReference.Query = VisitQuery(derivedTableReference.Query); // Make sure we are only declaring derived tables that have aliases for all expressions. SelectColumn[] selectColumns = derivedTableReference.Query.GetColumns(); for (int i = 0; i < selectColumns.Length; i++) { if (selectColumns[i].Alias == null) ErrorReporter.NoColumnAliasSpecified(derivedTableReference.CorrelationNameSourceRange, i, derivedTableReference.CorrelationName); } if (!ErrorReporter.ErrorsSeen) { TableRefBinding existingTableRef = ResolveTableRef(SourceRange.None, derivedTableReference.CorrelationName); if (existingTableRef != null && existingTableRef.Scope == query.QueryScope) ErrorReporter.DuplicateTableRefInFrom(derivedTableReference.CorrelationName); else { DerivedTableBinding derivedTableBinding = new DerivedTableBinding(derivedTableReference.CorrelationName.Text, derivedTableReference.Query); derivedTableReference.DerivedTableBinding = query.QueryScope.DeclareTableRef(derivedTableBinding, derivedTableReference.CorrelationName); } } } // If we could not declare all tables, we stop resolving this query. if (ErrorReporter.ErrorsSeen) return query; // Resolve joins query.TableReferences = VisitTableReference(query.TableReferences); } // Ensure that we tables specified if using the asterisk. if (query.TableReferences == null) { foreach (SelectColumn column in query.SelectColumns) { if (column.IsAsterisk) ErrorReporter.MustSpecifyTableToSelectFrom(); } } // If we could not resolve all tables or there are duplicates // we must stop here. if (ErrorReporter.ErrorsSeen) return query; // Expand all * and <alias>.* columns with the corresponding columns. // // Build a list of all column sources to replace the query.SelectColumns property. List<SelectColumn> columnSources = new List<SelectColumn>(); foreach (SelectColumn columnSource in query.SelectColumns) { if (!columnSource.IsAsterisk) { // Nothing to expand, just add the column source // to the list. columnSources.Add(columnSource); } else { // Expand the asterisk. if (columnSource.Alias == null) { // No alias, expand the asterisk to all columns // of all tables. foreach (TableRefBinding tableRefBinding in CurrentScope.GetAllTableRefBindings()) { foreach (ColumnBinding columnBinding in tableRefBinding.TableBinding.Columns) { ColumnRefBinding columnRefBinding = CurrentScope.GetColumnRef(tableRefBinding, columnBinding); columnSources.Add(new SelectColumn(new ColumnExpression(columnRefBinding), null)); } } } else { // Resolve the alias of the column to a table and just // expand the asterisk to the columns of this table. TableRefBinding tableRefBinding = ResolveTableRef(SourceRange.None, columnSource.Alias); if (tableRefBinding == null) { ErrorReporter.UndeclaredTable(SourceRange.None, columnSource.Alias); } else { foreach (ColumnBinding columnDefinition in tableRefBinding.TableBinding.Columns) { ColumnRefBinding columnRefBinding = CurrentScope.GetColumnRef(tableRefBinding, columnDefinition); columnSources.Add(new SelectColumn(new ColumnExpression(columnRefBinding), null)); } } } } } // Now we have expanded all asterisks so we can replace the query.SelectColumns // property with the expanded list. query.SelectColumns = columnSources.ToArray(); // Resolve // - all column selection expressions // - WHERE clause, // - GROUP BY clause, // - HAVING clause // - ORDER BY clause. for (int i = 0; i < query.SelectColumns.Length; i++) query.SelectColumns[i].Expression = VisitExpression(query.SelectColumns[i].Expression); if (query.WhereClause != null) query.WhereClause = VisitExpression(query.WhereClause); if (query.GroupByColumns != null) { for (int i = 0; i < query.GroupByColumns.Length; i++) query.GroupByColumns[i] = VisitExpression(query.GroupByColumns[i]); } if (query.HavingClause != null) query.HavingClause = VisitExpression(query.HavingClause); if (query.OrderByColumns != null) ResolveOrderBy(query.SelectColumns, query.OrderByColumns); // Infer column aliases for the following three expressions: // // - ColumnExpression // - PropertyExpression // - NamedConstantExpression // // In regular SQL only the first one is possible. foreach (SelectColumn columnSource in query.SelectColumns) { if (columnSource.Alias == null) { ColumnExpression exprAsColumnExpression = columnSource.Expression as ColumnExpression; PropertyAccessExpression exprAsPropertyAccessExpression = columnSource.Expression as PropertyAccessExpression; NamedConstantExpression exprAsNamedConstant = columnSource.Expression as NamedConstantExpression; if (exprAsColumnExpression != null) columnSource.Alias = Identifier.CreateVerbatim(exprAsColumnExpression.Column.Name); else if (exprAsPropertyAccessExpression != null) columnSource.Alias = exprAsPropertyAccessExpression.Name; else if (exprAsNamedConstant != null) columnSource.Alias = Identifier.CreateVerbatim(exprAsNamedConstant.Constant.Name); } } // Ensure WHERE clause is a boolean expression if (query.WhereClause != null) { if (query.WhereClause.ExpressionType == typeof(DBNull)) { // This means the user entered something like SELECT ... WHERE null = null // // NOTE: We cannot test on literals since constant folding will be applied // later. query.WhereClause = LiteralExpression.FromBoolean(false); } else if (query.WhereClause.ExpressionType != null && query.WhereClause.ExpressionType != typeof(bool)) { ErrorReporter.WhereClauseMustEvaluateToBool(); } } // Ensure HAVING clause is boolean expression if (query.HavingClause != null) { if (query.HavingClause.ExpressionType == typeof(DBNull)) { // See WHERE clause handler above. query.HavingClause = LiteralExpression.FromBoolean(false); } else if (query.HavingClause.ExpressionType != null && query.HavingClause.ExpressionType != typeof(bool)) { ErrorReporter.HavingClauseMustEvaluateToBool(); } } MetaInfo metaInfo = AstUtil.GetMetaInfo(query); query.ColumnDependencies = metaInfo.ColumnDependencies; return query; } finally { PopScope(); } } public override QueryNode VisitSortedQuery(SortedQuery query) { // It is legal but meaningless to sort a select query that have no // tables. So we simply optimize this SortedQuery node away. SelectQuery selectQuery = query.Input as SelectQuery; if (selectQuery != null && selectQuery.TableReferences == null) return VisitQuery(query.Input); // First we resolve the input. query.Input = VisitQuery(query.Input); // Find innermost SELECT query. // // This is the SELECT query the COLUMN definition is bought from. QueryNode currentInput = query.Input; SelectQuery innerMostInput = null; while (innerMostInput == null) { innerMostInput = currentInput as SelectQuery; if (innerMostInput == null) { if (currentInput.NodeType == AstNodeType.BinaryQuery) { BinaryQuery inputAsBinaryQuery = (BinaryQuery)currentInput; currentInput = inputAsBinaryQuery.Left; } else { throw ExceptionBuilder.InternalError("Unexpected input node: {0}\nSource:{1}", currentInput.NodeType, currentInput.GenerateSource()); } } } PushScope(innerMostInput.QueryScope); try { ResolveOrderBy(query.Input.GetColumns(), query.OrderByColumns); return query; } finally { PopScope(); } } } }
using Newtonsoft.Json; using System; using System.ComponentModel; using System.IO; namespace DsonTypes { public class DsonDocument { private DsonRoot root; public DsonObjectLocator Locator {get; } public string Product { get; } public string BaseUri {get; } public DsonRoot Root => root; private DsonDocument(DsonObjectLocator locator, string product, string documentPath) { Locator = locator; Product = product; BaseUri = documentPath; } public static DsonDocument LoadFromFile(DsonObjectLocator locator, ContentFileLocator.ContentLocation contentFile, string documentPath) { using (StreamReader reader = contentFile.File.OpenText()) { using (JsonReader jsonReader = new JsonTextReader(reader)) { DsonDocument document = new DsonDocument(locator, contentFile.Product, documentPath); var serializer = JsonSerializer.CreateDefault(); serializer.Converters.Add(new DsonObjectReferenceConverter(document)); DsonRoot root = serializer.Deserialize<DsonRoot>(jsonReader); document.root = root; return document; } } } public string ResolveUri(string uri) { if (uri.StartsWith("#")) { return BaseUri + uri; } else if (uri.StartsWith("/")) { return uri; } else { throw new InvalidOperationException("unrecognized uri style: " + uri); } } } public class DsonObjectReference<T> where T : DsonObject { public DsonDocument Document {get; } public string RelativeUri {get; } public DsonObjectReference(DsonDocument document, string relativeUri) { Document = document; RelativeUri = relativeUri ?? throw new ArgumentNullException("relativeUri"); } public string Uri { get { return Document.ResolveUri(RelativeUri); } } public T LocateReferencedObject(bool throwIfMissing) { return (T) Document.Locator.Locate(Uri, throwIfMissing); } public T ReferencedObject { get { return LocateReferencedObject(true); } } } class DsonObjectReferenceConverter : JsonConverter { private readonly DsonDocument document; public DsonObjectReferenceConverter(DsonDocument document) { this.document = document; } public override bool CanConvert(Type objectType) { return objectType.IsGenericType && objectType.GetGenericTypeDefinition() == typeof(DsonObjectReference<>); } public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer) { if (reader.TokenType != JsonToken.String) { throw new InvalidOperationException("expected a string token"); } string relativeUri = (string) reader.Value; return objectType.GetConstructors()[0].Invoke(new object[] { document, relativeUri }); } public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer) { throw new NotImplementedException(); } } [Serializable] public class DsonRoot { public string file_version; public AssetInfo asset_info; public Geometry[] geometry_library; public Modifier[] modifier_library; public UvSet[] uv_set_library; public Node[] node_library; public Image[] image_library; public Material[] material_library; public Scene scene; } [Serializable] public class DsonObject { public string id; public string url; public string name; public string label; } [Serializable] public class Image : DsonObject { public float map_gamma; public ImageMap[] map; } [Serializable] public class ImageMap { public string url; } [Serializable] public class Scene { public ModifierInstance[] modifiers; public MaterialInstance[] materials; public ChannelAnimation[] animations; } [Serializable] public class ChannelAnimation { public string url; public Object[][] keys; } [Serializable] public class CountedArray<T> { public int count; public T[] values; } [Serializable] public class UvSet : DsonObject { public int vertex_count; public CountedArray<float[]> uvs; public int[][] polygon_vertex_indices; } [Serializable] public class AssetInfo { public string id; public string type; } class GeometryTypeConverter : JsonConverter { public override bool CanConvert(Type objectType) { return true; } public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer) { if (reader.TokenType != JsonToken.String) { throw new InvalidOperationException("expected operator to be a string"); } string value = (string) reader.Value; if (value == "polygon_mesh") { return GeometryType.PolygonMesh; } else if (value == "subdivision_surface") { return GeometryType.SubdivisionSurface; } else { throw new InvalidOperationException("unrecognized operator: " + value); } } public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer) { throw new NotImplementedException(); } } [JsonConverter(typeof(GeometryTypeConverter))] public enum GeometryType { PolygonMesh, SubdivisionSurface } [Serializable] public class Geometry : DsonObject { public GeometryType type; public CountedArray<string> polygon_groups; public CountedArray<string> polygon_material_groups; public Float3Array vertices; public PolygonArray polylist; public DsonObjectReference<UvSet> default_uv_set; public Graft graft; } [Serializable] public class Graft { public Int2DArray vertex_pairs; public IntArray hidden_polys; } [Serializable] public class IntArray { public int count; public int[] values; } [Serializable] public class Int2DArray { public int count; public int[][] values; } [Serializable] public class Float3Array { public int count; public float[][] values; } [Serializable] public class PolygonArray { public int count; public int[][] values; } [Serializable] public class Presentation { public string type; } [Serializable] public class Modifier : DsonObject { public string source; public DsonObjectReference<DsonObject> parent; public Presentation presentation; public ChannelFloat channel; public string region; public string group; public SkinBinding skin; public Morph morph; public Formula[] formulas; } [Serializable] public class Channel : DsonObject { [DefaultValue(true)] [JsonProperty(DefaultValueHandling = DefaultValueHandling.Populate)] public bool visible; public bool locked; public double value; public double current_value; public double min; public double max; public bool clamped; public string target_channel; } [Serializable] public class ChannelFloat : Channel { } class IndexedFloatConverter : JsonConverter { public override bool CanConvert(Type objectType) { throw new NotImplementedException(); } public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer) { IndexedFloat obj = default(IndexedFloat); obj.index = reader.ReadAsInt32().Value; obj.value = reader.ReadAsDouble().Value; reader.Read(); if (reader.TokenType != JsonToken.EndArray) { throw new JsonSerializationException("expected end of array"); } return obj; } public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer) { throw new NotImplementedException(); } } [Serializable] [JsonConverter(typeof(IndexedFloatConverter))] public struct IndexedFloat { public int index; public double value; } [Serializable] public class WeightedJoint { public DsonObjectReference<Node> node; public CountedArray<IndexedFloat> node_weights; //these only exist to ensure they're not set public object scale_weights; public object local_weights; public object bulge_weights; } class StringPairConverter : JsonConverter { public override bool CanConvert(Type objectType) { throw new NotImplementedException(); } public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer) { StringPair obj = default(StringPair); obj.from = reader.ReadAsString(); obj.to = reader.ReadAsString(); reader.Read(); if (reader.TokenType != JsonToken.EndArray) { throw new JsonSerializationException("expected end of array"); } return obj; } public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer) { throw new NotImplementedException(); } } [Serializable] [JsonConverter(typeof(StringPairConverter))] public struct StringPair { public string from; public string to; } [Serializable] public class NamedStringMap { public string id; public StringPair[] mappings; } [Serializable] public class SkinBinding { public int vertex_count; public WeightedJoint[] joints; public NamedStringMap[] selection_map; } [Serializable] public class Morph { public int vertex_count; public Float3Array deltas; public string hd_url; } class StageConverter : JsonConverter { public override bool CanConvert(Type objectType) { return true; } public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer) { if (reader.TokenType != JsonToken.String) { throw new InvalidOperationException("expected Stage to be a string"); } string value = (string) reader.Value; if (value == "multiply" || value == "mult") { return Stage.Multiply; } else if (value == "sum") { return Stage.Sum; } else { throw new InvalidOperationException("unrecognized stage: " + value); } } public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer) { throw new NotImplementedException(); } } [JsonConverter(typeof(StageConverter))] public enum Stage { Sum, Multiply } [Serializable] public class Formula { public string output; public Operation[] operations; public Stage stage; } class OperatorConverter : JsonConverter { public override bool CanConvert(Type objectType) { return true; } public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer) { if (reader.TokenType != JsonToken.String) { throw new InvalidOperationException("expected operator to be a string"); } string value = (string) reader.Value; if (value == "push") { return Operator.Push; } else if (value == "add") { return Operator.Add; } else if (value == "sub") { return Operator.Sub; } else if (value == "mult") { return Operator.Mult; } else if (value == "div") { return Operator.Div; } else if (value == "spline_tcb") { return Operator.SplineTcb; } else { throw new InvalidOperationException("unrecognized operator: " + value); } } public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer) { throw new NotImplementedException(); } } [JsonConverter(typeof(OperatorConverter))] public enum Operator { Push, Add, Sub, Mult, Div, SplineTcb } [Serializable] public class Operation { public Operator op; public Object val; public string url; } [Serializable] public class Node : DsonObject { public string type; public DsonObjectReference<Node> parent; public string rotation_order; [DefaultValue(true)] [JsonProperty(DefaultValueHandling = DefaultValueHandling.Populate)] public bool inherits_scale; public ChannelFloat[] center_point; public ChannelFloat[] end_point; public ChannelFloat[] orientation; public ChannelFloat[] rotation; public ChannelFloat[] translation; public ChannelFloat[] scale; public ChannelFloat general_scale; public Formula[] formulas; } [Serializable] public class ModifierInstance { public string id; public string url; } [Serializable] public class Material : DsonObject { public string uv_set; public MaterialChannel diffuse; public MaterialExtra[] extra; } [Serializable] public class MaterialChannel { public MaterialChannelChannel channel; } [Serializable] public class MaterialChannelChannel { public string id; public object current_value; public string image_file; public ImageModification image_modification; } [Serializable] public class ImageModification { public float scale; } [Serializable] public class MaterialExtra { public string type; public MaterialChannel[] channels; } [Serializable] public class MaterialInstance { public DsonObjectReference<Material> url; public string uv_set; public MaterialChannel diffuse; public MaterialExtra[] extra; public string[] groups; } }
// ---------------------------------------------------------------------------------- // // Copyright Microsoft Corporation // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // ---------------------------------------------------------------------------------- using System; using System.Collections.ObjectModel; using System.IO; using System.Reflection; using System.Threading; using Microsoft.VisualStudio.TestTools.UnitTesting; using Microsoft.WindowsAzure.Commands.ServiceManagement.Model; using Microsoft.WindowsAzure.Commands.ServiceManagement.PlatformImageRepository.Model; using Microsoft.WindowsAzure.Commands.ServiceManagement.Test.FunctionalTests.ConfigDataInfo; namespace Microsoft.WindowsAzure.Commands.ServiceManagement.Test.FunctionalTests { [TestClass] public class PIRTest : ServiceManagementTest { private const string vhdNamePrefix = "pirtestosvhd"; private const string imageNamePrefix = "pirtestosimage"; private string vhdName; private string vhdBlobLocation; private string image; private const string location1 = "West US"; private const string location2 = "North Central US"; private const string location3 = "East US"; private const string publisher = "publisher1"; private const string normaluser = "normaluser2"; private const string normaluserSubId = "602258C5-52EC-46B3-A49A-7587A764AC84"; private const string storageNormalUser = "normalstorage"; [ClassInitialize] public static void ClassInit(TestContext context) { if (defaultAzureSubscription.Equals(null)) { Assert.Inconclusive("No Subscription is selected!"); } } [TestInitialize] public void Initialize() { vhdName = Utilities.GetUniqueShortName(vhdNamePrefix); image = Utilities.GetUniqueShortName(imageNamePrefix); vhdBlobLocation = string.Format("{0}{1}/{2}", blobUrlRoot, vhdContainerName, vhdName); try { if (string.IsNullOrEmpty(localFile)) { CredentialHelper.CopyTestData(testDataContainer, osVhdName, vhdContainerName, vhdName); } else { vmPowershellCmdlets.AddAzureVhd(new FileInfo(localFile), vhdBlobLocation); } } catch (Exception e) { if (e.ToString().Contains("already exists") || e.ToString().Contains("currently a lease")) { // Use the already uploaded vhd. Console.WriteLine("Using already uploaded blob.."); } else { Console.WriteLine(e.ToString()); Assert.Inconclusive("Upload vhd is not set!"); } } try { vmPowershellCmdlets.AddAzureVMImage(image, vhdBlobLocation, OS.Windows); } catch (Exception e) { Console.WriteLine(e.ToString()); throw; } pass = false; testStartTime = DateTime.Now; } /// <summary> /// This test covers Get-AzurePlatformVMImage, Set-AzurePlatformVMImage and Remove-AzurePlatformVMImage cmdlets /// </summary> [TestMethod(), TestCategory("PIRTest"), TestProperty("Feature", "IAAS"), Priority(1), Owner("hylee"), Description("Test the cmdlet (Get,Set,Remove)-AzurePlatformVMImage)")] public void AzurePlatformVMImageSingleLocationTest() { StartTest(MethodBase.GetCurrentMethod().Name, testStartTime); try { // starting the test. PrintOSImageDetailsContext(vmPowershellCmdlets.GetAzurePlatformVMImage(image)); // Replicate the user image to "West US" and wait until the replication process is completed. ComputeImageConfig compCfg = new ComputeImageConfig { Offer = "test", Sku = "test", Version = "test" }; MarketplaceImageConfig marketCfg = null; vmPowershellCmdlets.SetAzurePlatformVMImageReplicate(image, new string[] { location1 }, compCfg, marketCfg); PrintOSImageDetailsContext(vmPowershellCmdlets.GetAzurePlatformVMImage(image)); WaitForReplicationComplete(image); // Make the replicated image public and wait until the PIR image shows up. vmPowershellCmdlets.SetAzurePlatformVMImagePublic(image); OSImageContext pirImage = WaitForPIRAppear(image, publisher); PrintOSImageDetailsContext(vmPowershellCmdlets.GetAzurePlatformVMImage(image)); // Check the locations of the PIR image. string pirlocations = vmPowershellCmdlets.GetAzureVMImage(pirImage.ImageName)[0].Location; Assert.IsTrue(pirlocations.Contains(location1)); Assert.IsFalse(pirlocations.Contains(location2)); Assert.IsFalse(pirlocations.Contains(location3)); // Switch to the normal User and check the PIR image. SwitchToNormalUser(); Assert.IsTrue(Utilities.CheckRemove(vmPowershellCmdlets.GetAzureVMImage, image)); WaitForPIRAppear(image, publisher); // Switch to the publisher and make the PIR image private SwitchToPublisher(); vmPowershellCmdlets.SetAzurePlatformVMImagePrivate(image); // Switch to the normal User and wait until the PIR image disapper SwitchToNormalUser(); WaitForPIRDisappear(pirImage.ImageName); // Switch to the publisher and remove the PIR image. SwitchToPublisher(); vmPowershellCmdlets.RemoveAzurePlatformVMImage(image); Assert.AreEqual(0, vmPowershellCmdlets.GetAzurePlatformVMImage(image).ReplicationProgress.Count); PrintOSImageDetailsContext(vmPowershellCmdlets.GetAzurePlatformVMImage(image)); pass = true; } catch (Exception e) { Console.WriteLine(e.ToString()); throw; } } /// <summary> /// This test covers Get-AzurePlatformVMImage, Set-AzurePlatformVMImage and Remove-AzurePlatformVMImage cmdlets /// </summary> [TestMethod(), TestCategory("PIRTest"), TestProperty("Feature", "IAAS"), Priority(1), Owner("hylee"), Description("Test the cmdlet (Get,Set,Remove)-AzurePlatformVMImage)")] public void AzurePlatformVMImageMultipleLocationsTest() { StartTest(MethodBase.GetCurrentMethod().Name, testStartTime); try { // starting the test. PrintOSImageDetailsContext(vmPowershellCmdlets.GetAzurePlatformVMImage(image)); // Replicate the user image to "West US" and wait until the replication process is completed. ComputeImageConfig compCfg = new ComputeImageConfig { Offer = "test", Sku = "test", Version = "test" }; MarketplaceImageConfig marketCfg = null; vmPowershellCmdlets.SetAzurePlatformVMImageReplicate(image, new string[] { location1, location2 }, compCfg, marketCfg); PrintOSImageDetailsContext(vmPowershellCmdlets.GetAzurePlatformVMImage(image)); WaitForReplicationComplete(image); // Make the replicated image public and wait until the PIR image shows up. vmPowershellCmdlets.SetAzurePlatformVMImagePublic(image); OSImageContext pirImage = WaitForPIRAppear(image, publisher); PrintOSImageDetailsContext(vmPowershellCmdlets.GetAzurePlatformVMImage(image)); // Check the locations of the PIR image. string pirlocations = vmPowershellCmdlets.GetAzureVMImage(pirImage.ImageName)[0].Location; Assert.IsTrue(pirlocations.Contains(location1)); Assert.IsTrue(pirlocations.Contains(location2)); Assert.IsFalse(pirlocations.Contains(location3)); // Switch to the normal User and check the PIR image. SwitchToNormalUser(); Assert.IsTrue(Utilities.CheckRemove(vmPowershellCmdlets.GetAzureVMImage, image)); WaitForPIRAppear(image, publisher); // Switch to the publisher and make the PIR image private SwitchToPublisher(); vmPowershellCmdlets.SetAzurePlatformVMImagePrivate(image); // Switch to the normal User and wait until the PIR image disapper SwitchToNormalUser(); WaitForPIRDisappear(pirImage.ImageName); // Switch to the publisher and remove the PIR image. SwitchToPublisher(); vmPowershellCmdlets.RemoveAzurePlatformVMImage(image); Assert.AreEqual(0, vmPowershellCmdlets.GetAzurePlatformVMImage(image).ReplicationProgress.Count); PrintOSImageDetailsContext(vmPowershellCmdlets.GetAzurePlatformVMImage(image)); pass = true; } catch (Exception e) { Console.WriteLine(e.ToString()); throw; } } /// <summary> /// This test covers Get-AzurePlatformVMImage, Set-AzurePlatformVMImage and Remove-AzurePlatformVMImage cmdlets /// </summary> [TestMethod(), TestCategory("PIRTest"), TestProperty("Feature", "IAAS"), Priority(1), Owner("hylee"), Description("Test the cmdlet (Get,Set,Remove)-AzurePlatformVMImage)")] public void AzurePlatformVMImageScenarioTest() { StartTest(MethodBase.GetCurrentMethod().Name, testStartTime); string vmName = Utilities.GetUniqueShortName("pirtestvm"); string svcName = Utilities.GetUniqueShortName("pirtestservice"); try { SwitchToNormalUser(); try { vmPowershellCmdlets.GetAzureStorageAccount(storageNormalUser); } catch (Exception e) { if (e.ToString().Contains("ResourceNotFound")) { vmPowershellCmdlets.NewAzureStorageAccount(storageNormalUser, location1); } else { Console.WriteLine(e.ToString()); throw; } } vmPowershellCmdlets.SetAzureSubscription(normaluser, normaluserSubId, storageNormalUser); // Replicate the user image to "West US" and wait until the replication process is completed. SwitchToPublisher(); ComputeImageConfig compCfg = new ComputeImageConfig { Offer = "test", Sku = "test", Version = "test" }; MarketplaceImageConfig marketCfg = null; vmPowershellCmdlets.SetAzurePlatformVMImageReplicate(image, new string[] { location1 }, compCfg, marketCfg); // Make the replicated image public and wait until the PIR image shows up. vmPowershellCmdlets.SetAzurePlatformVMImagePublic(image); OSImageContext pirImage = WaitForPIRAppear(image, publisher); // Switch to the normal User and check the PIR image. SwitchToNormalUser(); WaitForPIRAppear(image, publisher); // Create a VM using the PIR image vmPowershellCmdlets.NewAzureQuickVM(OS.Windows, vmName, svcName, pirImage.ImageName, username, password, location1); Console.WriteLine("VM, {0}, is successfully created using the uploaded PIR image", vmPowershellCmdlets.GetAzureVM(vmName, svcName).Name); // Remove the service and VM vmPowershellCmdlets.RemoveAzureService(svcName); // Switch to the publisher and remove the PIR image SwitchToPublisher(); vmPowershellCmdlets.RemoveAzurePlatformVMImage(image); pass = true; } catch (Exception e) { Console.WriteLine(e.ToString()); throw; } } [TestCleanup] public virtual void CleanUp() { SwitchToPublisher(); Console.WriteLine("Test {0}", pass ? "passed" : "failed"); if ((cleanupIfPassed && pass) || (cleanupIfFailed && !pass)) { Console.WriteLine("Starting to clean up created VM and service."); try { vmPowershellCmdlets.RemoveAzureVMImage(image, false); } catch (Exception e) { Console.WriteLine("Exception occurs during cleanup: {0}", e.ToString()); } try { } catch (Exception e) { Console.WriteLine(e.ToString()); } } } private void SwitchToPublisher() { vmPowershellCmdlets.SetDefaultAzureSubscription(publisher); } private void SwitchToNormalUser() { vmPowershellCmdlets.SetDefaultAzureSubscription(normaluser); } private void WaitForReplicationComplete(string imageName) { DateTime startTime = DateTime.Now; OSImageDetailsContext state; try { do { state = vmPowershellCmdlets.GetAzurePlatformVMImage(imageName); foreach(var repro in state.ReplicationProgress) { Console.WriteLine(repro.ToString()); } } while (!state.ReplicationProgress.TrueForAll((s) => (s.Progress.Equals("100")))); Console.WriteLine("Replication completed after {0} minutes.", (DateTime.Now - startTime).TotalMinutes); PrintOSImageDetailsContext(state); } catch (Exception e) { Console.WriteLine(e.ToString()); throw; } } private OSImageContext WaitForPIRAppear(string imageName, string publisherName, int waitTimeInMin = 1, int maxWaitTimeInMin = 30) { DateTime startTime = DateTime.Now; while (true) { Collection<OSImageContext> vmImages = vmPowershellCmdlets.GetAzureVMImage(); foreach (OSImageContext image in vmImages) { if (Utilities.MatchKeywords(image.ImageName, new[]{imageName}, false) >= 0 && image.PublisherName.Equals(publisherName)) { Console.WriteLine("MATCHED PIR image found after {0} minutes:", (DateTime.Now - startTime).TotalMinutes); PrintContext<OSImageContext>(image); return image; } } if ((DateTime.Now - startTime).TotalMinutes < maxWaitTimeInMin) { Thread.Sleep(waitTimeInMin * 1000 * 60); } else { Assert.Fail("Cannot get PIR image, {0}, within {1} minutes!", imageName, maxWaitTimeInMin); } } } private bool WaitForPIRDisappear(string imageName, int waitTimeInMin = 1, int maxWaitTimeInMin = 30) { DateTime startTime = DateTime.Now; while (true) { try { OSImageContext imageContext = vmPowershellCmdlets.GetAzureVMImage(imageName)[0]; if ((DateTime.Now - startTime).TotalMinutes < maxWaitTimeInMin) { Thread.Sleep(waitTimeInMin * 1000 * 60); } else { Assert.Fail("Still has image, {0}, after {1} minutes!", imageName, maxWaitTimeInMin); } } catch (Exception e) { if (e.ToString().Contains("ResourceNotFound")) { Console.WriteLine("Image {0} disappered after {1} minutes.", imageName, (DateTime.Now - startTime).TotalMinutes); return true; } else { Console.WriteLine(e.ToString()); throw; } } } } private void PrintContext<T>(T obj) { Type type = typeof(T); foreach (PropertyInfo property in type.GetProperties(BindingFlags.Public | BindingFlags.Instance | BindingFlags.DeclaredOnly)) { string typeName = property.PropertyType.FullName; if (typeName.Equals("System.String") || typeName.Equals("System.Int32") || typeName.Equals("System.Uri") || typeName.Contains("Nullable")) { Console.WriteLine("{0}: {1}", property.Name, property.GetValue(obj, null)); } } } private void PrintOSImageDetailsContext(OSImageDetailsContext context) { PrintContext<OSImageContext>(context); foreach (var repro in context.ReplicationProgress) { Console.WriteLine("ReplicationProgress: {0}", repro.ToString()); } if (context.ReplicationProgress.Count == 0) { Console.WriteLine("There is no replication!"); } Console.WriteLine("IsCorrupted {0}", context.IsCorrupted); } } }
#region Copyright notice and license // Copyright 2015, Google Inc. // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #endregion using System; using System.Diagnostics; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Threading; using System.Threading.Tasks; using Grpc.Core.Internal; using Grpc.Core.Logging; using Grpc.Core.Utils; namespace Grpc.Core.Internal { /// <summary> /// Manages client side native call lifecycle. /// </summary> internal class AsyncCall<TRequest, TResponse> : AsyncCallBase<TRequest, TResponse> { static readonly ILogger Logger = GrpcEnvironment.Logger.ForType<AsyncCall<TRequest, TResponse>>(); readonly CallInvocationDetails<TRequest, TResponse> details; // Completion of a pending unary response if not null. TaskCompletionSource<TResponse> unaryResponseTcs; // Set after status is received. Used for both unary and streaming response calls. ClientSideStatus? finishedStatus; bool readObserverCompleted; // True if readObserver has already been completed. public AsyncCall(CallInvocationDetails<TRequest, TResponse> callDetails) : base(callDetails.RequestMarshaller.Serializer, callDetails.ResponseMarshaller.Deserializer) { this.details = callDetails.WithOptions(callDetails.Options.Normalize()); this.initialMetadataSent = true; // we always send metadata at the very beginning of the call. } // TODO: this method is not Async, so it shouldn't be in AsyncCall class, but // it is reusing fair amount of code in this class, so we are leaving it here. /// <summary> /// Blocking unary request - unary response call. /// </summary> public TResponse UnaryCall(TRequest msg) { using (CompletionQueueSafeHandle cq = CompletionQueueSafeHandle.Create()) { byte[] payload = UnsafeSerialize(msg); unaryResponseTcs = new TaskCompletionSource<TResponse>(); lock (myLock) { Preconditions.CheckState(!started); started = true; Initialize(cq); halfcloseRequested = true; readingDone = true; } using (var metadataArray = MetadataArraySafeHandle.Create(details.Options.Headers)) { using (var ctx = BatchContextSafeHandle.Create()) { call.StartUnary(ctx, payload, metadataArray, GetWriteFlagsForCall()); var ev = cq.Pluck(ctx.Handle); bool success = (ev.success != 0); try { HandleUnaryResponse(success, ctx); } catch (Exception e) { Logger.Error(e, "Exception occured while invoking completion delegate."); } } } // Once the blocking call returns, the result should be available synchronously. // Note that GetAwaiter().GetResult() doesn't wrap exceptions in AggregateException. return unaryResponseTcs.Task.GetAwaiter().GetResult(); } } /// <summary> /// Starts a unary request - unary response call. /// </summary> public Task<TResponse> UnaryCallAsync(TRequest msg) { lock (myLock) { Preconditions.CheckState(!started); started = true; Initialize(details.Channel.Environment.CompletionQueue); halfcloseRequested = true; readingDone = true; byte[] payload = UnsafeSerialize(msg); unaryResponseTcs = new TaskCompletionSource<TResponse>(); using (var metadataArray = MetadataArraySafeHandle.Create(details.Options.Headers)) { call.StartUnary(HandleUnaryResponse, payload, metadataArray, GetWriteFlagsForCall()); } return unaryResponseTcs.Task; } } /// <summary> /// Starts a streamed request - unary response call. /// Use StartSendMessage and StartSendCloseFromClient to stream requests. /// </summary> public Task<TResponse> ClientStreamingCallAsync() { lock (myLock) { Preconditions.CheckState(!started); started = true; Initialize(details.Channel.Environment.CompletionQueue); readingDone = true; unaryResponseTcs = new TaskCompletionSource<TResponse>(); using (var metadataArray = MetadataArraySafeHandle.Create(details.Options.Headers)) { call.StartClientStreaming(HandleUnaryResponse, metadataArray); } return unaryResponseTcs.Task; } } /// <summary> /// Starts a unary request - streamed response call. /// </summary> public void StartServerStreamingCall(TRequest msg) { lock (myLock) { Preconditions.CheckState(!started); started = true; Initialize(details.Channel.Environment.CompletionQueue); halfcloseRequested = true; halfclosed = true; // halfclose not confirmed yet, but it will be once finishedHandler is called. byte[] payload = UnsafeSerialize(msg); using (var metadataArray = MetadataArraySafeHandle.Create(details.Options.Headers)) { call.StartServerStreaming(HandleFinished, payload, metadataArray, GetWriteFlagsForCall()); } } } /// <summary> /// Starts a streaming request - streaming response call. /// Use StartSendMessage and StartSendCloseFromClient to stream requests. /// </summary> public void StartDuplexStreamingCall() { lock (myLock) { Preconditions.CheckState(!started); started = true; Initialize(details.Channel.Environment.CompletionQueue); using (var metadataArray = MetadataArraySafeHandle.Create(details.Options.Headers)) { call.StartDuplexStreaming(HandleFinished, metadataArray); } } } /// <summary> /// Sends a streaming request. Only one pending send action is allowed at any given time. /// completionDelegate is called when the operation finishes. /// </summary> public void StartSendMessage(TRequest msg, WriteFlags writeFlags, AsyncCompletionDelegate<object> completionDelegate) { StartSendMessageInternal(msg, writeFlags, completionDelegate); } /// <summary> /// Receives a streaming response. Only one pending read action is allowed at any given time. /// completionDelegate is called when the operation finishes. /// </summary> public void StartReadMessage(AsyncCompletionDelegate<TResponse> completionDelegate) { StartReadMessageInternal(completionDelegate); } /// <summary> /// Sends halfclose, indicating client is done with streaming requests. /// Only one pending send action is allowed at any given time. /// completionDelegate is called when the operation finishes. /// </summary> public void StartSendCloseFromClient(AsyncCompletionDelegate<object> completionDelegate) { lock (myLock) { Preconditions.CheckNotNull(completionDelegate, "Completion delegate cannot be null"); CheckSendingAllowed(); call.StartSendCloseFromClient(HandleHalfclosed); halfcloseRequested = true; sendCompletionDelegate = completionDelegate; } } /// <summary> /// Gets the resulting status if the call has already finished. /// Throws InvalidOperationException otherwise. /// </summary> public Status GetStatus() { lock (myLock) { Preconditions.CheckState(finishedStatus.HasValue, "Status can only be accessed once the call has finished."); return finishedStatus.Value.Status; } } /// <summary> /// Gets the trailing metadata if the call has already finished. /// Throws InvalidOperationException otherwise. /// </summary> public Metadata GetTrailers() { lock (myLock) { Preconditions.CheckState(finishedStatus.HasValue, "Trailers can only be accessed once the call has finished."); return finishedStatus.Value.Trailers; } } public CallInvocationDetails<TRequest, TResponse> Details { get { return this.details; } } /// <summary> /// On client-side, we only fire readCompletionDelegate once all messages have been read /// and status has been received. /// </summary> protected override void ProcessLastRead(AsyncCompletionDelegate<TResponse> completionDelegate) { if (completionDelegate != null && readingDone && finishedStatus.HasValue) { bool shouldComplete; lock (myLock) { shouldComplete = !readObserverCompleted; readObserverCompleted = true; } if (shouldComplete) { var status = finishedStatus.Value.Status; if (status.StatusCode != StatusCode.OK) { FireCompletion(completionDelegate, default(TResponse), new RpcException(status)); } else { FireCompletion(completionDelegate, default(TResponse), null); } } } } protected override void OnAfterReleaseResources() { details.Channel.RemoveCallReference(this); } private void Initialize(CompletionQueueSafeHandle cq) { var parentCall = details.Options.PropagationToken != null ? details.Options.PropagationToken.ParentCall : CallSafeHandle.NullInstance; var call = details.Channel.Handle.CreateCall(details.Channel.Environment.CompletionRegistry, parentCall, ContextPropagationToken.DefaultMask, cq, details.Method, details.Host, Timespec.FromDateTime(details.Options.Deadline.Value)); details.Channel.AddCallReference(this); InitializeInternal(call); RegisterCancellationCallback(); } // Make sure that once cancellationToken for this call is cancelled, Cancel() will be called. private void RegisterCancellationCallback() { var token = details.Options.CancellationToken; if (token.CanBeCanceled) { token.Register(() => this.Cancel()); } } /// <summary> /// Gets WriteFlags set in callDetails.Options.WriteOptions /// </summary> private WriteFlags GetWriteFlagsForCall() { var writeOptions = details.Options.WriteOptions; return writeOptions != null ? writeOptions.Flags : default(WriteFlags); } /// <summary> /// Handler for unary response completion. /// </summary> private void HandleUnaryResponse(bool success, BatchContextSafeHandle ctx) { var fullStatus = ctx.GetReceivedStatusOnClient(); lock (myLock) { finished = true; finishedStatus = fullStatus; halfclosed = true; ReleaseResourcesIfPossible(); } if (!success) { unaryResponseTcs.SetException(new RpcException(new Status(StatusCode.Internal, "Internal error occured."))); return; } var status = fullStatus.Status; if (status.StatusCode != StatusCode.OK) { unaryResponseTcs.SetException(new RpcException(status)); return; } // TODO: handle deserialization error TResponse msg; TryDeserialize(ctx.GetReceivedMessage(), out msg); unaryResponseTcs.SetResult(msg); } /// <summary> /// Handles receive status completion for calls with streaming response. /// </summary> private void HandleFinished(bool success, BatchContextSafeHandle ctx) { var fullStatus = ctx.GetReceivedStatusOnClient(); AsyncCompletionDelegate<TResponse> origReadCompletionDelegate = null; lock (myLock) { finished = true; finishedStatus = fullStatus; origReadCompletionDelegate = readCompletionDelegate; ReleaseResourcesIfPossible(); } ProcessLastRead(origReadCompletionDelegate); } } }
using System; using System.Diagnostics; using System.Drawing; using System.Runtime.InteropServices; using System.Web; using System.Windows.Forms; using GuruComponents.Netrix.ComInterop; using GuruComponents.Netrix.Events; using GuruComponents.Netrix.WebEditing.Behaviors; using GuruComponents.Netrix.WebEditing.Documents; using GuruComponents.Netrix.WebEditing.DragDrop; using GuruComponents.Netrix.WebEditing.Elements; using Control = System.Web.UI.Control; using HtmlWindow = GuruComponents.Netrix.WebEditing.Documents.HtmlWindow; using System.Runtime.InteropServices.ComTypes; using GuruComponents.Netrix.WebEditing.UndoRedo; namespace GuruComponents.Netrix { /// <summary> /// This is the basic implementation of the MSHTML host. /// </summary> /// <remarks> /// This class implements the interfaces building /// the base services and the basic editor host, which implements TAB key, table recognition and DEL /// key support. /// </remarks> [ClassInterface(ClassInterfaceType.None)] internal class MSHTMLSite : Interop.IOleClientSite, Interop.IOleContainer, Interop.IOleDocumentSite, Interop.IOleInPlaceSite, Interop.IOleInPlaceSiteEx, Interop.IOleInPlaceFrame, Interop.IDocHostUIHandler, Interop.IDocHostShowUI, Interop.IPropertyNotifySink, Interop.IAdviseSink, Interop.IOleServiceProvider, Interop.IHTMLEditDesigner, IDisposable { /// the Control used to host (and parent) the mshtml window private HtmlEditor htmlEditor; /// <summary> /// the mshtml instance and various related objects /// </summary> private Interop.IOleObject oleDocumentObject; private Interop.IHTMLDocument2 htmlbaseDocument; private Interop.IOleDocumentView interopDocumentView; private Interop.IOleInPlaceActiveObject activeObject; private string _readyStateString; /// <summary> /// Show UI on start? /// </summary> private bool WithUI; /// <summary> /// cookie representing our sink /// </summary> private ConnectionPointCookie propNotifyCookie; private int adviseSinkCookie; private IntPtr windowHandle = IntPtr.Zero; private DataObjectConverter _dataobjectconverter; // Delete Key Code private const int DEL = 46; /// <summary> /// </summary> public MSHTMLSite(HtmlEditor htmlEditor) { if ((htmlEditor == null))// || (htmlEditor.IsHandleCreated == false)) { throw new ArgumentException(); } WithUI = false; this.htmlEditor = htmlEditor; this._readyStateString = String.Empty; } #region Internal used methods internal bool PreTranslateMessage(Message msg) { Interop.COMMSG lpmsg = new Interop.COMMSG(); lpmsg.hwnd = msg.HWnd; lpmsg.lParam = msg.LParam; lpmsg.wParam = msg.WParam; lpmsg.message = msg.Msg; if (this.activeObject != null && this.activeObject.TranslateAccelerator(lpmsg) == Interop.S_OK) { return true; } else { return false; } } internal Interop.IOleObject OleDocument { get { return this.oleDocumentObject; } } public IntPtr DocumentHandle { get { return windowHandle; } } /// <summary> /// Access to drop information after dragdrop operations. The object /// converter contains the dropped element. /// </summary> public DataObjectConverter DataObjectConverter { get { if (_dataobjectconverter == null) { _dataobjectconverter = new DataObjectConverter(); } return _dataobjectconverter; } set { _dataobjectconverter = value; } } /// <summary> /// Access to current document. The set accessor should only set to null during cleanup. /// </summary> public Interop.IHTMLDocument2 MSHTMLDocument { get { return htmlbaseDocument; } set { htmlbaseDocument = value; } } /// <overloads/> /// <summary> /// Activate with UI activation. /// </summary> /// <remarks> /// UI activation means that the caret appears immediately after the designer surface appears, whether or not the /// control has the focus. /// </remarks> public void ActivateMSHTML() { ActivateMSHTML(true); } /// <summary> /// Activate the editor /// </summary> /// <param name="withUI">Activates the UI of the control immediately after start up.</param> /// <remarks> /// UI activation means that the caret appears immediately after the designer surface appears, whether or not the /// control has the focus. /// </remarks> public void ActivateMSHTML(bool withUI) { try { this.WithUI = withUI; Interop.RECT r = EditorRect; int result = OleDocument.DoVerb((int)Interop.OLE.OLEIVERB_UIACTIVATE, Interop.NullIntPtr, this, 0, EditorHandle, r); if (result == Interop.S_OK) { this.htmlEditor.NeedActivation = false; } else { throw new ApplicationException("Activate UI in ActivateMSHTML failed with result " + result); } htmlEditor.AddEditDesigner(this); } catch (Exception e) { Debug.Fail(e.ToString()); } } private IntPtr EditorHandle { get { if (htmlEditor.IsDisposed) return IntPtr.Zero; else return htmlEditor.PanelHandle; } } private Interop.RECT EditorRect { get { Interop.RECT r = new Interop.RECT(); Win32.GetClientRect(EditorHandle, r); return r; } } /// <summary> /// </summary> public void Dispose() { try { int RefCount; if (propNotifyCookie != null) { propNotifyCookie.Dispose(); propNotifyCookie = null; } if (winEvents != null) { winEvents.Dispose(); winEvents = null; } try { Marshal.ReleaseComObject(window); } catch { } try { if (interopDocumentView != null) { try { interopDocumentView.Show(0); } catch { } try { interopDocumentView.UIActivate(0); } catch { } try { interopDocumentView.SetInPlaceSite(null); } catch { } long nullParam = 0L; try { interopDocumentView.Close(nullParam); do { RefCount = Marshal.ReleaseComObject(interopDocumentView); } while (RefCount >= 0); } catch { } finally { Marshal.FinalReleaseComObject(interopDocumentView); interopDocumentView = null; } } } catch { } if (oleDocumentObject != null) { try { if (htmlEditor.Site == null || !htmlEditor.Site.DesignMode) { Marshal.FinalReleaseComObject(oleDocumentObject); oleDocumentObject = null; } } catch { } } if (htmlbaseDocument != null) { do { RefCount = Marshal.ReleaseComObject(htmlbaseDocument); } while (RefCount >= 0); Marshal.FinalReleaseComObject(htmlbaseDocument); htmlbaseDocument = null; } if (interopDocumentView != null) { do { RefCount = Marshal.ReleaseComObject(interopDocumentView); } while (RefCount >= 0); } if (activeObject != null) { do { RefCount = Marshal.ReleaseComObject(activeObject); } while (RefCount >= 0); Marshal.FinalReleaseComObject(activeObject); activeObject = null; } interopDocumentView = null; htmlbaseDocument = null; activeObject = null; } catch (Exception ex) { Debug.WriteLine(ex.Message); } } /// <summary> /// </summary> public void CreateMSHTML() { bool created = false; try { // create our base instance this.htmlbaseDocument = (Interop.IHTMLDocument2)new Interop.HTMLDocument(); this.activeObject = (Interop.IOleInPlaceActiveObject)htmlbaseDocument; this.windowHandle = new IntPtr(); this.activeObject.GetWindow(out this.windowHandle); oleDocumentObject = (Interop.IOleObject)htmlbaseDocument; if (oleDocumentObject == null) { throw new ApplicationException("InteropOleObject not created. No document available."); } // hand it our Interop.IOleClientSite implementation Win32.OleRun(htmlbaseDocument); oleDocumentObject.SetClientSite(this); Win32.OleLockRunning(htmlbaseDocument, true, false); created = true; // attach document and window base events propNotifyCookie = new ConnectionPointCookie(htmlbaseDocument, this, typeof(Interop.IPropertyNotifySink), false); // set document properties oleDocumentObject.SetHostNames("NetRix", "NetRix"); // set ole events oleDocumentObject.Advise(this, out adviseSinkCookie); // set IConnectionPointContainer icpc = (IConnectionPointContainer)htmlbaseDocument; //find the source interface ////get IPropertyNotifySink interface //Guid g = new Guid("9BFBBC02-EFF1-101A-84ED-00AA00341D07"); //icpc.FindConnectionPoint(ref g, out icp); ////pass a pointer to the host to the connection point //icp.Advise(this._site, out this._cookie); } catch (Exception ex) { Debug.Fail("CreateHtml failed", ex.Message); } finally { if (created == false) { htmlbaseDocument = null; oleDocumentObject = null; } } } internal void SetFocus() { if (activeObject != null) { IntPtr hWnd; if (activeObject.GetWindow(out hWnd) == Interop.S_OK) { Win32.SetFocus(hWnd); } } } #endregion #region Internal used event fire methods /// <summary> /// </summary> internal void ParentResize() { if (interopDocumentView != null) { Interop.RECT r = EditorRect; interopDocumentView.SetRect(r); } } internal void ExpandView(Rectangle r) { Interop.RECT rect = new Interop.RECT(); rect.right = r.Right; rect.bottom = r.Bottom; interopDocumentView.SetRect(rect); } #endregion #region Interop.IOleClientSite Implementation public int SaveObject() { return Interop.S_OK; } public int GetMoniker(int dwAssign, int dwWhichMoniker, out object ppmk) { ppmk = null; return Interop.E_NOTIMPL; } public int GetContainer(out Interop.IOleContainer ppContainer) { ppContainer = (Interop.IOleContainer)this; return Interop.S_OK; } public int ShowObject() { return Interop.S_OK; } public int OnShowWindow(int fShow) { return Interop.S_OK; } public int RequestNewObjectLayout() { return Interop.S_OK; } #endregion #region Interop.IOleContainer Implementation public void ParseDisplayName(object pbc, string pszDisplayName, int[] pchEaten, object[] ppmkOut) { Debug.Fail("ParseDisplayName - " + pszDisplayName); throw new COMException(String.Empty, Interop.E_NOTIMPL); } public void EnumObjects(int grfFlags, out Interop.IEnumUnknown ppenum) { ppenum = null; throw new COMException(String.Empty, Interop.E_NOTIMPL); } // public void EnumObjects(int grfFlags, object[] ppenum) // { // throw new COMException(String.Empty, Interop.E_NOTIMPL); // } public void LockContainer(int fLock) { } #endregion #region Interop.IOleDocumentSite Implementation public int ActivateMe(Interop.IOleDocumentView pViewToActivate) { if (pViewToActivate == null) return Interop.E_INVALIDARG; Interop.RECT r = EditorRect; interopDocumentView = pViewToActivate; interopDocumentView.SetInPlaceSite(this); interopDocumentView.UIActivate(WithUI ? 1 : 0); interopDocumentView.SetRect(r); interopDocumentView.Show(1); return Interop.S_OK; } #endregion internal void HideCaret() { Win32.HideCaret(windowHandle); } internal void ShowCaret() { Win32.ShowCaret(windowHandle); } # region Interop.IOleInPlaceSiteEx Implementation int Interop.IOleInPlaceSiteEx.CanInPlaceActivate() { return Interop.S_OK; } int Interop.IOleInPlaceSiteEx.OnInPlaceActivate() { return Interop.S_OK; } int Interop.IOleInPlaceSiteEx.ContextSensitiveHelp(bool fEnterMode) { return Interop.E_NOTIMPL; } int Interop.IOleInPlaceSiteEx.GetWindow(ref IntPtr hwnd) { hwnd = IntPtr.Zero; if (this.htmlEditor != null) { hwnd = EditorHandle; return Interop.S_OK; } else { return Interop.E_FAIL; } } int Interop.IOleInPlaceSiteEx.OnInPlaceActivateEx(out bool pfNoRedraw, int dwFlags) { pfNoRedraw = false; //false means object needs to redraw return Interop.S_OK; } int Interop.IOleInPlaceSiteEx.OnInPlaceDeactivateEx(bool fNoRedraw) { //Debug.WriteLine(fNoRedraw, "OnInPlaceDeactivateEx::Enter"); if (!fNoRedraw) { //redraw container this.htmlEditor.Invalidate(); } Debug.WriteLine("OnInPlaceDeactivateEx::Leave"); return Interop.S_OK; } int Interop.IOleInPlaceSiteEx.RequestUIActivate() { //Debug.WriteLine("RequestUIActivate::Enter"); if (this.htmlEditor.Visible && this.htmlEditor.ActivationEnabled && !this.htmlEditor.StopFocusOnLoad || htmlEditor.IsReady) { return Interop.S_OK; } else { return Interop.S_FALSE; } } int Interop.IOleInPlaceSiteEx.OnUIActivate() { //Debug.WriteLine("OnUIActivate::Enter"); //return HESULT.S_FALSE prevents focus grab //but means no caret if (this.htmlEditor.Visible && this.htmlEditor.ActivationEnabled && !this.htmlEditor.StopFocusOnLoad) { return Interop.S_OK; } else { return Interop.S_FALSE; } } int Interop.IOleInPlaceSiteEx.GetWindowContext(out Interop.IOleInPlaceFrame ppFrame, out Interop.IOleInPlaceUIWindow ppDoc, Interop.RECT lprcPosRect, Interop.RECT lprcClipRect, Interop.tagOIFI lpFrameInfo) { //Debug.WriteLine("GetWindowContext::Enter"); ppDoc = null; //XX set to null because same as Frame window ppFrame = this; if (lprcPosRect != null) { Win32.GetClientRect(EditorHandle, lprcPosRect); } if (lprcClipRect != null) { Win32.GetClientRect(EditorHandle, lprcClipRect); } //lpFrameInfo.cb = Marshal.SizeOf(typeof(tagOIFI)); //This value is set by the caller lpFrameInfo.fMDIApp = 0; lpFrameInfo.hwndFrame = EditorHandle; lpFrameInfo.hAccel = IntPtr.Zero; lpFrameInfo.cAccelEntries = 0; //Debug.WriteLine("GetWindowContext::Leave"); return Interop.S_OK; } int Interop.IOleInPlaceSiteEx.Scroll(Interop.tagSIZE scrollExtant) { return Interop.E_NOTIMPL; } int Interop.IOleInPlaceSiteEx.OnUIDeactivate(int fUndoable) { return Interop.S_OK; } int Interop.IOleInPlaceSiteEx.OnInPlaceDeactivate() { activeObject = null; return Interop.S_OK; } int Interop.IOleInPlaceSiteEx.DiscardUndoState() { return Interop.E_NOTIMPL; } int Interop.IOleInPlaceSiteEx.DeactivateAndUndo() { return Interop.S_OK; } int Interop.IOleInPlaceSiteEx.OnPosRectChange(ref Interop.RECT lprcPosRect) { return Interop.S_OK; } # endregion #region Interop.IOleInPlaceSite Implementation int Interop.IOleInPlaceSite.DiscardUndoState() { return Interop.E_NOTIMPL; } int Interop.IOleInPlaceSite.DeactivateAndUndo() { return Interop.S_OK; } int Interop.IOleInPlaceSite.OnInPlaceDeactivate() { activeObject = null; return Interop.S_OK; } int Interop.IOleInPlaceSite.OnUIDeactivate(int fUndoable) { return Interop.S_OK; } IntPtr Interop.IOleInPlaceSite.GetWindow() { IntPtr hwnd = IntPtr.Zero; if (this.htmlEditor != null) { hwnd = EditorHandle; } return hwnd; } int Interop.IOleInPlaceSite.ContextSensitiveHelp(int fEnterMode) { return Interop.E_NOTIMPL; } int Interop.IOleInPlaceSite.CanInPlaceActivate() { return Interop.S_OK; } int Interop.IOleInPlaceSite.OnInPlaceActivate() { return Interop.S_OK; } int Interop.IOleInPlaceSite.OnUIActivate() { return Interop.S_OK; } int Interop.IOleInPlaceSite.GetWindowContext(out Interop.IOleInPlaceFrame ppFrame, out Interop.IOleInPlaceUIWindow ppDoc, Interop.RECT lprcPosRect, Interop.RECT lprcClipRect, Interop.tagOIFI lpFrameInfo) { Debug.WriteLine("GetWindowContext2::Enter"); ppFrame = this; ppDoc = null; Win32.GetClientRect(EditorHandle, lprcPosRect); Win32.GetClientRect(EditorHandle, lprcClipRect); lpFrameInfo.cb = Marshal.SizeOf(typeof(Interop.tagOIFI)); lpFrameInfo.fMDIApp = 0; lpFrameInfo.hwndFrame = EditorHandle; lpFrameInfo.hAccel = Interop.NullIntPtr; lpFrameInfo.cAccelEntries = 0; Debug.WriteLine("GetWindowContext2::Leave"); return Interop.S_OK; } int Interop.IOleInPlaceSite.Scroll(Interop.tagSIZE scrollExtant) { return Interop.E_NOTIMPL; } int Interop.IOleInPlaceSite.OnPosRectChange(Interop.RECT lprcPosRect) { return Interop.S_OK; } #endregion #region Interop.IOleInPlaceFrame Implementation IntPtr Interop.IOleInPlaceFrame.GetWindow() { return EditorHandle; } void Interop.IOleInPlaceFrame.ContextSensitiveHelp(int fEnterMode) { throw new COMException(String.Empty, Interop.E_NOTIMPL); } void Interop.IOleInPlaceFrame.GetBorder(Interop.RECT lprectBorder) { throw new COMException(String.Empty, Interop.E_NOTIMPL); } void Interop.IOleInPlaceFrame.RequestBorderSpace(Interop.RECT pborderwidths) { throw new COMException(String.Empty, Interop.E_NOTIMPL); } void Interop.IOleInPlaceFrame.SetBorderSpace(Interop.RECT pborderwidths) { throw new COMException(String.Empty, Interop.E_NOTIMPL); } void Interop.IOleInPlaceFrame.SetActiveObject(Interop.IOleInPlaceActiveObject pActiveObject, string pszObjName) { try { if (pActiveObject == null) { if (this.activeObject != null) { Marshal.ReleaseComObject(this.activeObject); } this.activeObject = null; this.windowHandle = IntPtr.Zero; } else { this.activeObject = pActiveObject; this.windowHandle = new IntPtr(); pActiveObject.GetWindow(out this.windowHandle); } } catch { } } public void InsertMenus(IntPtr hmenuShared, Interop.tagOleMenuGroupWidths lpMenuWidths) { throw new COMException(String.Empty, Interop.E_NOTIMPL); } public void SetMenu(IntPtr hmenuShared, IntPtr holemenu, IntPtr hwndActiveObject) { throw new COMException(String.Empty, Interop.E_NOTIMPL); } public void RemoveMenus(IntPtr hmenuShared) { throw new COMException(String.Empty, Interop.E_NOTIMPL); } public void SetStatusText(string pszStatusText) { } public void EnableModeless(int fEnable) { } public int TranslateAccelerator(Interop.COMMSG lpmsg, short wID) { return Interop.S_FALSE; } #endregion #region IDocHostUIHandler Implementation public int ShowContextMenu(int dwID, ref Interop.POINT pt, object pcmdtReserved, object pdispReserved) { Point location = htmlEditor.PointToClient(new Point(pt.x, pt.y)); Interop.IHTMLElement element = this.MSHTMLDocument.ElementFromPoint(location.X, location.Y); Control ielement = this.htmlEditor.GenericElementFactory.CreateElement(element); ShowContextMenuEventArgs e = new ShowContextMenuEventArgs(location, false, dwID, ielement); try { htmlEditor.OnShowContextMenu(e); } catch { // Make sure we return Interop.S_OK } return Interop.S_OK; } public int GetHostInfo(Interop.DOCHOSTUIINFO info) { info.dwDoubleClick = (int)Interop.DOCHOSTUIDBLCLICK.DEFAULT; int flags = 0; if (htmlEditor.NoTextSelection) { flags |= (int)Interop.DOCHOSTUIFLAG.DIALOG; } if (htmlEditor.AllowInPlaceNavigation) { flags |= (int)Interop.DOCHOSTUIFLAG.ENABLE_INPLACE_NAVIGATION; } if (htmlEditor.ImeReconversion) { flags |= (int)Interop.DOCHOSTUIFLAG.IME_ENABLE_RECONVERSION; } if (!htmlEditor.Border3d) { flags |= (int)Interop.DOCHOSTUIFLAG.NO3DBORDER; } if (!htmlEditor.ScriptEnabled) { flags |= (int)Interop.DOCHOSTUIFLAG.DISABLE_SCRIPT_INACTIVE; } if (!htmlEditor.ScrollBarsEnabled) { flags |= (int)Interop.DOCHOSTUIFLAG.SCROLL_NO; } if (htmlEditor.FlatScrollBars) { flags |= (int)Interop.DOCHOSTUIFLAG.FLAT_SCROLLBAR; } if (htmlEditor.BlockDefault == BlockDefaultType.DIV) { flags |= (int)Interop.DOCHOSTUIFLAG.DIV_BLOCKDEFAULT; } if (htmlEditor.XPTheming) { flags |= (int)Interop.DOCHOSTUIFLAG.THEME; } else { flags |= (int)Interop.DOCHOSTUIFLAG.NOTHEME; } // IE 6 Enhancements flags |= (int)Interop.DOCHOSTUIFLAG.DISABLE_EDIT_NS_FIXUP; flags |= (int)Interop.DOCHOSTUIFLAG.DISABLE_UNTRUSTEDPROTOCOL; // IE 7 Enhancements flags |= (int)Interop.DOCHOSTUIFLAG.USE_WINDOWLESS_SELECTCONTROL; // IE 8 Enhancements if (htmlEditor.AutoWordSelection) { //flags |= (int)Interop.DOCHOSTUIFLAG.AUTOWORD; } info.dwFlags = flags; return Interop.S_OK; } public int EnableModeless(bool fEnable) { return fEnable ? Interop.S_OK : Interop.S_FALSE; } public int ShowUI(int dwID, Interop.IOleInPlaceActiveObject activeObject, Interop.IOleCommandTarget commandTarget, Interop.IOleInPlaceFrame frame, Interop.IOleInPlaceUIWindow doc) { return Interop.S_FALSE; } public int HideUI() { return Interop.S_OK; } public int UpdateUI() { this.htmlEditor.OnUpdateUI(lastEventType); return Interop.S_OK; } public int OnDocWindowActivate(bool fActivate) { return Interop.E_NOTIMPL; } public int OnFrameWindowActivate(bool fActivate) { return Interop.E_NOTIMPL; } public int ResizeBorder(Interop.RECT rect, Interop.IOleInPlaceUIWindow doc, bool fFrameWindow) { return Interop.E_NOTIMPL; } public int GetOptionKeyPath(string[] pbstrKey, int dw) { pbstrKey[0] = null; return Interop.S_OK; } public int GetDropTarget(Interop.IOleDropTarget pDropTarget, out Interop.IOleDropTarget ppDropTarget) { if (this.htmlEditor._dropTarget == null) { this.htmlEditor._dropTarget = new DropTarget(this.htmlEditor, DataObjectConverter, pDropTarget); ppDropTarget = this.htmlEditor._dropTarget; return Interop.S_OK; } else { ppDropTarget = null; //pDropTarget; return Interop.S_FALSE; } } /// <summary> /// Called if in JScript windows.external.WhatEver is being executed. /// </summary> /// <remarks> /// E_NOTIMPL = fires native error window /// E_DEFAULTACTION = security exception /// E_FAIL = unspecified error /// E_ABORT = suppress an native window /// E_HANDLE = provide valid handle to invoke code /// E_UNEXPECTED = unexpected error /// E_POINTER = pointer expected /// E_NOINTERFACE = null or not object /// E_ACCESSDENIED = security error /// E_OUTOFMEMORY = out of mem error /// </remarks> /// <param name="ppDispatch"></param> /// <returns></returns> public int GetExternal(out object ppDispatch) { ppDispatch = ((HtmlWindow)htmlEditor.Window).ObjectForScripting; ScriptExternalEventArgs args = new ScriptExternalEventArgs(); if (ppDispatch == null) { args.ExternalError = ScriptExternalEventArgs.ExternalErrorCode.E_ABORT; } else { args.ExternalError = ScriptExternalEventArgs.ExternalErrorCode.S_OK; } ((HtmlWindow)htmlEditor.Window).OnScriptExternal(args); return (int)args.ExternalError; } public int TranslateAccelerator(Interop.COMMSG msg, ref Guid group, int nCmdID) { return Interop.S_FALSE; } public int TranslateUrl(int dwTranslate, string strURLIn, out string pstrURLOut) { BeforeNavigateEventArgs args = new BeforeNavigateEventArgs(strURLIn); this.htmlEditor.OnBeforeNavigate(args); if (args.Cancel) { // This is how we cancel it, a bit weird to provide a blank, but String.Empty will not work! pstrURLOut = " "; } else { pstrURLOut = args.Url; } return pstrURLOut.Equals(strURLIn) ? Interop.S_FALSE : Interop.S_OK; } public int FilterDataObject(Interop.IOleDataObject pDO, out Interop.IOleDataObject ppDORet) { ppDORet = null; return Interop.E_NOTIMPL; } #endregion #region IAdviseSink Implementation public void OnDataChange(Interop.FORMATETC pFormat, Interop.STGMEDIUM pStg) { } public void OnViewChange(int dwAspect, int index) { } public void OnRename(object pmk) { } public void OnSave() { } public void OnClose() { } #endregion #region Interop.IOleServiceProvider public int QueryService(ref Guid sid, ref Guid iid, out IntPtr ppvObject) { int hr = Interop.E_NOINTERFACE; ppvObject = Interop.NullIntPtr; // ask our explicit services container Type type = GetTypeFromIID(sid); if (type != null && htmlEditor.ServiceProvider != null) { object service = htmlEditor.ServiceProvider.GetService(type); if (service != null) { if (iid.Equals(Interop.IID_IUnknown)) { ppvObject = Marshal.GetIUnknownForObject(service); } else { IntPtr pUnk = Marshal.GetIUnknownForObject(service); Marshal.QueryInterface(pUnk, ref iid, out ppvObject); Marshal.Release(pUnk); return Interop.S_OK; } } } return hr; } private static readonly Guid IUnknowGuid = new Guid("00000118-0000-0000-C000-000000000046"); private static readonly Guid IHTMLEditDesignerGuid = new Guid("3050f662-98b5-11cf-bb82-00aa00bdce0b"); private static readonly Guid IHTMLEditHostGuid = new Guid("3050f6a0-98b5-11cf-bb82-00aa00bdce0b"); private static readonly Guid IAuthenticateGuid = new Guid("79EAC9D0-BAF9-11CE-8C82-00AA004BA90B"); private static readonly Guid IHttpSecurityGuid = new Guid("79eac9d7-bafa-11ce-8c82-00aa004ba90b"); //private static readonly Guid IOleCommandTargetGuid = new Guid("b722bccb-4e68-101b-a2bc-00aa00404770"); private static readonly Guid IOleCommandTargetGuid = new Guid("3050f4b5-98b5-11cf-bb82-00aa00bdce0b"); private static readonly Guid IOleUndoManagerGuid = new Guid("d001f200-ef97-11ce-9bc9-00aa00608e01"); private static readonly Guid IInternetSecurityManagerGuid = new Guid("79eac9ee-baf9-11ce-8c82-00aa004ba90b"); private Type GetTypeFromIID(Guid iid) { if (iid.Equals(IUnknowGuid)) return null; if (iid.Equals(IHTMLEditDesignerGuid)) return typeof(Interop.IHTMLEditDesigner); if (iid.Equals(IHTMLEditHostGuid)) return typeof(Interop.IHTMLEditHost); if (iid.Equals(IAuthenticateGuid)) return typeof(Interop.IAuthenticate); if (iid.Equals(IHttpSecurityGuid)) return typeof(Interop.IHttpSecurity); if (iid.Equals(IOleCommandTargetGuid)) return typeof(Interop.IOleCommandTarget); if (iid.Equals(IOleUndoManagerGuid)) return typeof(Interop.IOleUndoManager); if (iid.Equals(IInternetSecurityManagerGuid)) return typeof(Interop.IInternetSecurityManager); return null; } #endregion #region Interop.IPropertyNotifySink Implementation bool _firstChanged = false; public int OnChanged(int dispID) { try { switch (dispID) { case 1005 /*DISPID_FRAMECHANGE*/: if (!_firstChanged) { _firstChanged = true; } // string readyState = MSHTMLDocument.GetReadyState(); // // the method will called after initialisation, this activates the site // // for the first time. Subsequent calls does not fire the ready event again. break; case DispId.READYSTATE: string newReadyState = this.MSHTMLDocument.GetReadyState(); if (newReadyState != this._readyStateString) { _readyStateString = newReadyState; if (_readyStateString.Equals("complete")) { if (winEvents != null) { winEvents.Dispose(); winEvents = null; } // global events window = htmlbaseDocument.GetParentWindow(); winEvents = new WindowsEvents(window, htmlEditor, htmlEditor.Window); } this.htmlEditor.OnReadyStateChanged(newReadyState); } break; } } catch { return Interop.S_FALSE; } return Interop.S_OK; } public int OnRequestEdit(int dispID) { return Interop.S_OK; } #endregion # region IHtmlEditDesigner internal HtmlFrameSet.FrameWindow RelatedFrameWindow = null; private string lastFrameName = String.Empty; private Interop.IHTMLWindow2 window = null; private int returnCode = Interop.S_FALSE; private WindowsEvents winEvents; private string lastEventType; public int PreHandleEvent(int dispId, Interop.IHTMLEventObj e) { returnCode = Interop.S_FALSE; Interop.IHTMLElement el = e.srcElement; if (e.srcElement != null) { lastEventType = e.type; Control element = htmlEditor.GenericElementFactory.CreateElement(el); returnCode = this.htmlEditor.InvokeHtmlEvent(e, element); if (returnCode == Interop.S_OK || (element is IElement && !htmlEditor.DesignModeEnabled)) { e.cancelBubble = true; e.returnValue = Interop.S_OK; } else { if (returnCode == Interop.S_FALSE && dispId == DispId.KEYDOWN && htmlEditor.DesignModeEnabled) { switch (e.keyCode) { case DEL: if (this.htmlEditor.InternalShortcutKeys) { try { this.htmlEditor.Exec(Interop.IDM.DELETE); } finally { returnCode = Interop.S_OK; } } break; default: break; } } } } return returnCode; } public int PostHandleEvent(int dispId, Interop.IHTMLEventObj e) { return returnCode; } public int TranslateAccelerator(int dispId, Interop.IHTMLEventObj e) { return Interop.S_FALSE; } public int PostEditorEventNotify(int dispId, Interop.IHTMLEventObj e) { HandlePostEvents(dispId, e); return Interop.S_FALSE; } private void HandlePostEvents(int dispId, Interop.IHTMLEventObj e) { // For spellchecker and other late bound event sinks htmlEditor.InvokePostEditorEvent(new PostEditorEventArgs(e)); if (e.srcElement != null) { if (dispId == DispId.KEYDOWN || dispId == DispId.MOUSEUP) { // We check the current scope only if the caret is visible for the user if (dispId == DispId.KEYDOWN) { //Application.DoEvents(); IElement currentElement = this.htmlEditor.Window.GetElementFromCaret() as IElement; if (currentElement != null) { this.htmlEditor.InvokeHtmlElementChanged(currentElement.GetBaseElement(), HtmlElementChangedType.Key); } else { this.htmlEditor.InvokeHtmlElementChanged(htmlEditor.GetBodyElement().GetBaseElement(), HtmlElementChangedType.Key); } } // if a mouse click was handled the event source has the element if (dispId == DispId.MOUSEUP) { this.htmlEditor.InvokeHtmlElementChanged(e.srcElement, HtmlElementChangedType.Mouse); } } } } #endregion #region IDocHostShowUI Members int Interop.IDocHostShowUI.ShowMessage(IntPtr hwnd, string lpStrText, string lpstrCaption, uint dwType, string lpStrHelpFile, uint dwHelpContext, out uint lpresult) { // dwType 48 == Alert() // 33 == confirm() lpresult = (uint)Interop.MBID.OK; ShowMessageEventArgs e = new ShowMessageEventArgs(lpStrText, lpstrCaption, dwType); ((HtmlWindow)htmlEditor.Window).OnScriptMessage(e); if (e.Cancel) return Interop.S_OK; else return Interop.S_FALSE; } int Interop.IDocHostShowUI.ShowHelp(IntPtr hwnd, string lpHelpFile, uint uCommand, uint dwData, Interop.POINT ptMouse, object pDispatchObjectHit) { return Interop.S_OK; } #endregion } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using Xunit; namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.anonytype01.anonytype01 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.anonytype01.anonytype01; unsafe // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // anonymous type // </Description> //<Expects Status=success></Expects> // <Code> //<Expects Status=warning>\(12,17\).*CS0649</Expects> public class UC { public int* p; } public class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { dynamic x = new { P = new UC() } ; return 0; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.array01.array01 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.array01.array01; unsafe // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // array initializer : usafe array initializer with dynamic // </Description> //<Expects Status=success></Expects> // <Code> //<Expects Status=warning>\(12,17\).*CS0649</Expects> public class US { public int* p; } public class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { dynamic d1 = new US(); dynamic d2 = new US(); US[] array = { d1, d2, new US()} ; return 0; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.array02.array02 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.array02.array02; unsafe // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // array initializer : dynamic array initializer with unsafe // </Description> //<Expects Status=success></Expects> // <Code> //<Expects Status=warning>\(12,17\).*CS0649</Expects> public class US { public int* p; } public class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { dynamic[] array = { new US(), new US()} ; return 0; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.attribute01.attribute01 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.attribute01.attribute01; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // attribute // </Description> //<Expects Status=success></Expects> // <Code> //<Expects Status=warning>\(20,17\).*CS0649</Expects> using System; [AttributeUsage(AttributeTargets.Class)] public class MyAttr : System.Attribute { } [MyAttr] unsafe public class US { public int* p; } public class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { dynamic d1 = new US(); US u = d1; return 0; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.collection01.collection01 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.collection01.collection01; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // collection initializer : dynamic collection initializer with unsafe // </Description> //<Expects Status=success></Expects> // <Code> //<Expects Status=warning>\(14,17\).*CS0649</Expects> using System.Collections.Generic; unsafe public class US { public int* p; } public class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { List<dynamic> col = new List<dynamic> { new US(), new US()} ; return 0; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.collection02.collection02 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.collection02.collection02; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // collection initializer : unsafe type collection initializer with dynamic // </Description> //<Expects Status=success></Expects> // <Code> //<Expects Status=warning>\(14,17\).*CS0649</Expects> using System.Collections.Generic; unsafe public class US { public int* p; } public class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { dynamic d = new US(); List<US> col = new List<US> { d, d } ; return 0; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.collection03.collection03 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.collection03.collection03; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // collection initializer : unsafe type collection initializer with dynamic // </Description> // <RelatedBugs></RelatedBugs> //<Expects Status=success></Expects> // <Code> //<Expects Status=warning>\(17,17\).*CS0649</Expects> using System; using System.Collections.Generic; using Microsoft.CSharp.RuntimeBinder; unsafe public class US { public int* p; } public class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { dynamic d1 = 1; dynamic d2 = "hi"; try { List<US> col = new List<US> { d1, d2 } ; } catch (RuntimeBinderException ex) { bool ret = ErrorVerifier.Verify(ErrorMessageId.BadArgTypes, ex.Message, "System.Collections.Generic.List<US>.Add(US)"); if (ret) return 0; } return 1; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.ctor01.ctor01 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.ctor01.ctor01; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // ctor - pointer as arg // </Description> //<Expects Status=success></Expects> // <Code> public unsafe class C { public int* p; public C(int* q) { p = q; } } public unsafe class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { //int num = 5; //int* p = &num; //dynamic d = new C(p); return 0; } } } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.ctor02.ctor02 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.ctor02.ctor02; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // ctor - dynamic as arg // </Description> //<Expects Status=success></Expects> // <Code> public unsafe class C { public dynamic p; public C(dynamic q) { p = q; } } public unsafe class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { int num = 5; dynamic d = new C(num); return 0; } } } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.ctor03.ctor03 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.ctor03.ctor03; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // ctor - mixed dynamic and pointer as arg // </Description> //<Expects Status=success></Expects> // <Code> public unsafe class C { public dynamic d; public int* p; public C(dynamic x, int* y) { d = x; p = y; } } public unsafe class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { int num = 5; int* p = &num; dynamic d = new C(num, p); return 0; } } } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.dlgate01.dlgate01 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.dlgate01.dlgate01; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type</Title> // <Description> // delegate // </Description> //<Expects Status=success></Expects> // <Code> internal unsafe delegate void Foo(int* p); unsafe public class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { dynamic d = (Foo)Test.Bar; return 0; } public static void Bar(int* q) { } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.dtor01.dtor01 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.dtor01.dtor01; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // dtor // </Description> //<Expects Status=success></Expects> // <Code> public class C { unsafe ~C() { int num = 5; int* ptr = &num; } } public unsafe class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { dynamic d = new C(); return 0; } } } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.evnt01.evnt01 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.evnt01.evnt01; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type</Title> // <Description> // delegate // </Description> //<Expects Status=success></Expects> // <Code> internal unsafe delegate void Foo(int* p); unsafe public class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { dynamic d = (Foo)Test.Bar; return 0; } public static void Bar(int* q) { } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.explicit01.explicit01 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.explicit01.explicit01; unsafe // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // explicit conversion // </Description> //<Expects Status=success></Expects> // <Code> //<Expects Status=warning>\(12,17\).*CS0649</Expects> public class US { public int* p; public static explicit operator int (US u) { return 1; } public static explicit operator US(int i) { return new US(); } } public unsafe class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { US u = new US(); dynamic d = (int)u; dynamic x = u; int i = (int)x; if (i != 1) return 1; return 0; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.expressiontree01.expressiontree01 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.expressiontree01.expressiontree01; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // expression tree // </Description> //<Expects Status=success></Expects> // <Code> //<Expects Status=warning>\(15,17\).*CS0649</Expects> using System; using System.Linq.Expressions; public unsafe struct UC { public int* p; } unsafe public class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { Expression<Func<dynamic, UC>> f = x => new UC(); dynamic dyn = 10; f.Compile()(dyn); return 0; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.field01.field01 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.field01.field01; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // filed (static & non-static) // </Description> //<Expects Status=success></Expects> // <Code> public class C { public unsafe int* p; public unsafe static char* q; } public unsafe class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { dynamic d = new C(); return 0; } } } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.fieldinit01.fieldinit01 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.fieldinit01.fieldinit01; unsafe // <Area> dynamic in unsafe code </Area> // <Title>unsafe conext</Title> // <Description> // dynamic in field initializer // </Description> // <RelatedBug></RelatedBug> //<Expects Status=success></Expects> // <Code> public class C { public dynamic field = 10; } public class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { C c = new C(); return 0; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.implicit01.implicit01 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.implicit01.implicit01; unsafe // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // implicit conversion // </Description> //<Expects Status=success></Expects> // <Code> //<Expects Status=warning>\(12,17\).*CS0649</Expects> public class US { public int* p; public static implicit operator int (US u) { return 1; } public static implicit operator US(int i) { return new US(); } } public unsafe class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { US u = new US(); dynamic x = u; int i = x; if (i != 1) return 1; return 0; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.indexer02.indexer02 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.indexer02.indexer02; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // indexer - dynamic as index // </Description> //<Expects Status=success></Expects> // <Code> public class C { public const int field = 10; unsafe public int* this[int[] index] { get { fixed (int* p = index) { return p; } } } } static public class D { public static int field = 1; } public unsafe class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { C d = new C(); int[] array = new[] { 1, 2, 3 } ; int* x = ((C)d)[array]; return 0; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.indexer04.indexer04 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.indexer04.indexer04; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // indexer - pointer as return value // </Description> //<Expects Status=success></Expects> // <Code> public class Unsafe { unsafe public int* this[int index] { get { int temp = 10; return &temp; } set { } } } public unsafe class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { dynamic d = new Unsafe(); bool ret = true; try { var p = d[1]; } catch (Microsoft.CSharp.RuntimeBinder.RuntimeBinderException ex) { ret = ErrorVerifier.Verify(ErrorMessageId.UnsafeNeeded, ex.Message); if (ret) return 0; } return 1; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.integeregererface02.integeregererface02 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.integeregererface02.integeregererface02; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type</Title> // <Description> // interface - method with dynamic // </Description> //<Expects Status=success></Expects> // <Code> public unsafe interface IF { void Foo(dynamic p); } unsafe public class C : IF { public void Foo(dynamic p) { } } unsafe public class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { IF i = new C(); dynamic d = i; d.Foo(i); return 0; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.lambda01.lambda01 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.lambda01.lambda01; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // lambda expression // </Description> //<Expects Status=success></Expects> // <Code> //<Expects Status=warning>\(14,17\).*CS0649</Expects> using System; unsafe public class UC { public int* p; } unsafe public class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { Func<dynamic, UC> f1 = x => new UC(); f1(1); Func<UC, dynamic> f2 = x => x; f2(new UC()); return 0; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.lambda02.lambda02 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.lambda02.lambda02; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // lambda expression // </Description> //<Expects Status=success></Expects> // <Code> //<Expects Status=warning>\(14,17\).*CS0649</Expects> using System; unsafe public class UC { public int* p; } unsafe public class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { Func<int, int> f = x => { int* p = &x; return *p; } ; dynamic dyn = 10; int result = f(dyn); if (result == 10) return 0; return 1; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.method02.method02 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.method02.method02; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // method (static & non-static) - dynamic as arg // </Description> //<Expects Status=success></Expects> // <Code> public class C { unsafe public int Foo(dynamic p) { return 1; } unsafe public static int Bar(dynamic p) { return 2; } } public unsafe class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { dynamic d = new C(); int num = 5; int result = d.Foo(num) + C.Bar(d); if (result != 3) return 1; return 0; } } } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.method05.method05 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.method05.method05; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // method (static & non-static) - dynamic as return type // </Description> //<Expects Status=success></Expects> // <Code> public class C { public static int field = 10; unsafe public dynamic Foo() { return 1; } unsafe public static dynamic Bar() { return 2; } } public unsafe class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { dynamic d = new C(); d.Foo(); C.Bar(); return 0; } } } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.method07.method07 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.method07.method07; unsafe // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // method - unsafe type as arg : extension method // </Description> //<Expects Status=success></Expects> // <Code> //<Expects Status=warning>\(12,17\).*CS0649</Expects> public class US { public int* ptr; } static public class Ext { public static void Foo(this US u, dynamic d) { } } public class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { US u = new US(); u.Foo(u); return 0; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.method08.method08 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.method08.method08; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // method - OPTIONAL param // </Description> // <RelatedBug></RelatedBug> //<Expects Status=success></Expects> // <Code> using System; using Microsoft.CSharp.RuntimeBinder; public unsafe class Test { public void Foo(void* ptr = null) { } [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { dynamic d = new Test(); bool ret = true; try { d.Foo(); } catch (RuntimeBinderException ex) { ret = ErrorVerifier.Verify(ErrorMessageId.UnsafeNeeded, ex.Message); if (ret) return 0; } return 1; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.objinit01.objinit01 { using ManagedTests.DynamicCSharp.Conformance.dynamic.unsfe.basic.objinit01.objinit01; // <Area> dynamic in unsafe code </Area> // <Title> unsafe type </Title> // <Description> // object initializer // </Description> //<Expects Status=success></Expects> // <Code> using System.Collections.Generic; unsafe public class US { public int* p; } unsafe public class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { int num = 10; dynamic u = new US { p = &num } ; return 0; } } // </Code> }
using Lucene.Net.Support; using System.Diagnostics; using System.Reflection; namespace Lucene.Net.Util.Fst { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using PackedInt32s = Lucene.Net.Util.Packed.PackedInt32s; using PagedGrowableWriter = Lucene.Net.Util.Packed.PagedGrowableWriter; /// <summary> /// Used to dedup states (lookup already-frozen states) /// </summary> internal sealed class NodeHash<T> { private PagedGrowableWriter table; private long count; private long mask; private readonly FST<T> fst; private readonly FST.Arc<T> scratchArc = new FST.Arc<T>(); private readonly FST.BytesReader input; // LUCENENET specific - optimize the Hash methods // by only calling Collections.GetHashCode() if the value is a reference type private readonly static bool tIsValueType = typeof(T).IsValueType; public NodeHash(FST<T> fst, FST.BytesReader input) { table = new PagedGrowableWriter(16, 1 << 30, 8, PackedInt32s.COMPACT); mask = 15; this.fst = fst; this.input = input; } private bool NodesEqual(Builder.UnCompiledNode<T> node, long address) { fst.ReadFirstRealTargetArc(address, scratchArc, input); if (scratchArc.BytesPerArc != 0 && node.NumArcs != scratchArc.NumArcs) { return false; } for (int arcUpto = 0; arcUpto < node.NumArcs; arcUpto++) { Builder.Arc<T> arc = node.Arcs[arcUpto]; if (arc.Label != scratchArc.Label || !arc.Output.Equals(scratchArc.Output) || ((Builder.CompiledNode)arc.Target).Node != scratchArc.Target || !arc.NextFinalOutput.Equals(scratchArc.NextFinalOutput) || arc.IsFinal != scratchArc.IsFinal) { return false; } if (scratchArc.IsLast) { if (arcUpto == node.NumArcs - 1) { return true; } else { return false; } } fst.ReadNextRealArc(scratchArc, input); } return false; } /// <summary> /// hash code for an unfrozen node. this must be identical /// to the frozen case (below)!! /// </summary> private long Hash(Builder.UnCompiledNode<T> node) { const int PRIME = 31; //System.out.println("hash unfrozen"); long h = 0; // TODO: maybe if number of arcs is high we can safely subsample? for (int arcIdx = 0; arcIdx < node.NumArcs; arcIdx++) { Builder.Arc<T> arc = node.Arcs[arcIdx]; h = PRIME * h + arc.Label; long n = ((Builder.CompiledNode)arc.Target).Node; h = PRIME * h + (int)(n ^ (n >> 32)); // LUCENENET specific - optimize the Hash methods // by only calling Collections.GetHashCode() if the value is a reference type h = PRIME * h + (tIsValueType ? arc.Output.GetHashCode() : Collections.GetHashCode(arc.Output)); h = PRIME * h + (tIsValueType ? arc.NextFinalOutput.GetHashCode() : Collections.GetHashCode(arc.NextFinalOutput)); if (arc.IsFinal) { h += 17; } } //System.out.println(" ret " + (h&Integer.MAX_VALUE)); return h & long.MaxValue; } /// <summary> /// hash code for a frozen node /// </summary> private long Hash(long node) { const int PRIME = 31; //System.out.println("hash frozen node=" + node); long h = 0; fst.ReadFirstRealTargetArc(node, scratchArc, input); while (true) { //System.out.println(" label=" + scratchArc.label + " target=" + scratchArc.target + " h=" + h + " output=" + fst.outputs.outputToString(scratchArc.output) + " next?=" + scratchArc.flag(4) + " final?=" + scratchArc.isFinal() + " pos=" + in.getPosition()); h = PRIME * h + scratchArc.Label; h = PRIME * h + (int)(scratchArc.Target ^ (scratchArc.Target >> 32)); // LUCENENET specific - optimize the Hash methods // by only calling Collections.GetHashCode() if the value is a reference type h = PRIME * h + (tIsValueType ? scratchArc.Output.GetHashCode() : Collections.GetHashCode(scratchArc.Output)); h = PRIME * h + (tIsValueType ? scratchArc.NextFinalOutput.GetHashCode() : Collections.GetHashCode(scratchArc.NextFinalOutput)); if (scratchArc.IsFinal) { h += 17; } if (scratchArc.IsLast) { break; } fst.ReadNextRealArc(scratchArc, input); } //System.out.println(" ret " + (h&Integer.MAX_VALUE)); return h & long.MaxValue; } public long Add(Builder.UnCompiledNode<T> nodeIn) { //System.out.println("hash: add count=" + count + " vs " + table.size() + " mask=" + mask); long h = Hash(nodeIn); long pos = h & mask; int c = 0; while (true) { long v = table.Get(pos); if (v == 0) { // freeze & add long node = fst.AddNode(nodeIn); //System.out.println(" now freeze node=" + node); long hashNode = Hash(node); Debug.Assert(hashNode == h, "frozenHash=" + hashNode + " vs h=" + h); count++; table.Set(pos, node); // Rehash at 2/3 occupancy: if (count > 2 * table.Count / 3) { Rehash(); } return node; } else if (NodesEqual(nodeIn, v)) { // same node is already here return v; } // quadratic probe pos = (pos + (++c)) & mask; } } /// <summary> /// called only by rehash /// </summary> private void AddNew(long address) { long pos = Hash(address) & mask; int c = 0; while (true) { if (table.Get(pos) == 0) { table.Set(pos, address); break; } // quadratic probe pos = (pos + (++c)) & mask; } } private void Rehash() { PagedGrowableWriter oldTable = table; table = new PagedGrowableWriter(2 * oldTable.Count, 1 << 30, PackedInt32s.BitsRequired(count), PackedInt32s.COMPACT); mask = table.Count - 1; for (long idx = 0; idx < oldTable.Count; idx++) { long address = oldTable.Get(idx); if (address != 0) { AddNew(address); } } } } }
#region license // Copyright (c) 2004, Rodrigo B. de Oliveira (rbo@acm.org) // All rights reserved. // // Redistribution and use in source and binary forms, with or without modification, // are permitted provided that the following conditions are met: // // * Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // * Neither the name of Rodrigo B. de Oliveira nor the names of its // contributors may be used to endorse or promote products derived from this // software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF // THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #endregion using System; using System.Collections.Generic; using antlr; namespace Boo.Lang.Parser.Util { /// <summary> /// Process white space agnostic tokens to generate INDENT, DEDENT /// virtual tokens as expected by the standard grammar. /// </summary> public class WSATokenStreamFilter : TokenStream { static readonly char[] NewLineCharArray = new char[] { '\r', '\n' }; /// <summary> /// token input stream. /// </summary> protected TokenStream _istream; /// <summary> /// last non whitespace token for accurate location information /// </summary> protected IToken _lastEnqueuedToken; /// <summary> /// Flags the current QQ expression to need a DEDENT when closing it. /// </summary> protected bool _lastQQIndented; /// <summary> /// tokens waiting to be consumed /// </summary> protected Queue<IToken> _pendingTokens; System.Text.StringBuilder _buffer = new System.Text.StringBuilder(); public WSATokenStreamFilter(TokenStream istream) { if (null == istream) { throw new ArgumentNullException("istream"); } _istream = istream; _pendingTokens = new Queue<IToken>(); } public TokenStream InnerStream { get { return _istream; } } void ResetBuffer() { _buffer.Length = 0; } public IToken nextToken() { IToken token; if (_pendingTokens.Count == 0) { token = BufferUntilNextNonWhiteSpaceToken(); ProcessNextToken(token); } token = _pendingTokens.Dequeue(); return token; } bool BufferHasNewLine() { if (_buffer.Length == 0) return false; var text = _buffer.ToString(); string[] lines = text.Split(NewLineCharArray); return lines.Length > 1; } void ProcessNextToken(IToken token) { // New lines are converted to EOS unless they come after // indents or a dot (member reference) if (!IsLastIndent() && !IsLastDot() && BufferHasNewLine()) { EnqueueEOS(token); } if (token.Type == BooLexer.COLON) { Enqueue(token); // If whitespace is not being skiped assume it's a block var next = BufferUntilNextNonWhiteSpaceToken(); if (_buffer.Length > 0) { // Special case for docstrings if (next.Type == BooLexer.TRIPLE_QUOTED_STRING) { ProcessNextToken(next); EnqueueIndent(next); return; } EnqueueIndent(token); } ProcessNextToken(next); } else if (IsEnding(token.Type)) { IToken next = null; // Dissambiguate OR/ELSE if (IsAmbiguous(token.Type)) { next = BufferUntilNextNonWhiteSpaceToken(); if (next.Type != BooLexer.COLON) { // Not an ending keyword, just process it as normal Enqueue(token); ProcessNextToken(next); return; } } // Inject a `pass` if there are no statements in a block if (IsLastIndent()) { Enqueue(CreateToken(token, BooLexer.PASS, "pass")); } // Dedent the block EnqueueEOS(token); EnqueueDedent(token); if (token.Type != BooLexer.END) Enqueue(token); // Process the look-ahead token we used to disambiguate if (null != next) ProcessNextToken(next); } else if (token.Type == BooLexer.QQ_BEGIN) { Enqueue(token); // If follows a new line we handle it as a block var next = BufferUntilNextNonWhiteSpaceToken(); _lastQQIndented = BufferHasNewLine(); if (_lastQQIndented) { EnqueueIndent(token); } ProcessNextToken(next); } else if (token.Type == BooLexer.QQ_END) { if (_lastQQIndented) EnqueueDedent(token); Enqueue(token); } else if (token.Type == Token.EOF_TYPE) { // EOF also signals the end of any running statement EnqueueEOS(token); Enqueue(token); } else { Enqueue(token); } } bool IsLastIndent() { return _lastEnqueuedToken != null && _lastEnqueuedToken.Type == BooLexer.INDENT; } bool IsLastDot() { return _lastEnqueuedToken != null && _lastEnqueuedToken.Type == BooLexer.DOT; } static bool IsAmbiguous(int type) { return type == BooLexer.OR || type == BooLexer.ELSE; } static bool IsEnding(int type) { return type == BooLexer.END || type == BooLexer.ELSE || type == BooLexer.ELIF || type == BooLexer.EXCEPT || type == BooLexer.ENSURE || type == BooLexer.THEN || type == BooLexer.OR; } IToken BufferUntilNextNonWhiteSpaceToken() { ResetBuffer(); IToken token = null; while (true) { token = _istream.nextToken(); if (token.Type == Token.SKIP) continue; if (token.Type == BooLexer.WS) { _buffer.Append(token.getText()); continue; } break; } return token; } void Enqueue(IToken token) { _pendingTokens.Enqueue(token); _lastEnqueuedToken = token; } void EnqueueIndent(IToken prototype) { Enqueue(CreateToken(prototype, BooLexer.INDENT, "<INDENT>")); } void EnqueueDedent(IToken prototype) { Enqueue(CreateToken(prototype, BooLexer.DEDENT, "<DEDENT>")); } void EnqueueEOS(IToken prototype) { Enqueue(CreateToken(prototype, BooLexer.EOL, "<EOL>")); } static IToken CreateToken(IToken prototype, int newTokenType, string newTokenText) { return new BooToken(newTokenType, newTokenText, prototype.getFilename(), prototype.getLine(), prototype.getColumn()+SafeGetLength(prototype.getText())); } static int SafeGetLength(string s) { return s == null ? 0 : s.Length; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.Net.Http; using System.Net.Security; using System.Runtime.InteropServices; using System.Security.Authentication; using System.Security.Authentication.ExtendedProtection; using System.Security.Cryptography; using System.Security.Cryptography.X509Certificates; using Microsoft.Win32.SafeHandles; internal static partial class Interop { internal static partial class OpenSsl { private static Ssl.SslCtxSetVerifyCallback s_verifyClientCertificate = VerifyClientCertificate; #region internal methods internal static SafeChannelBindingHandle QueryChannelBinding(SafeSslHandle context, ChannelBindingKind bindingType) { Debug.Assert( bindingType != ChannelBindingKind.Endpoint, "Endpoint binding should be handled by EndpointChannelBindingToken"); SafeChannelBindingHandle bindingHandle; switch (bindingType) { case ChannelBindingKind.Unique: bindingHandle = new SafeChannelBindingHandle(bindingType); QueryUniqueChannelBinding(context, bindingHandle); break; default: // Keeping parity with windows, we should return null in this case. bindingHandle = null; break; } return bindingHandle; } internal static SafeSslHandle AllocateSslContext(SslProtocols protocols, SafeX509Handle certHandle, SafeEvpPKeyHandle certKeyHandle, EncryptionPolicy policy, bool isServer, bool remoteCertRequired) { SafeSslHandle context = null; IntPtr method = GetSslMethod(protocols); using (SafeSslContextHandle innerContext = Ssl.SslCtxCreate(method)) { if (innerContext.IsInvalid) { throw CreateSslException(SR.net_allocate_ssl_context_failed); } // Configure allowed protocols. It's ok to use DangerousGetHandle here without AddRef/Release as we just // create the handle, it's rooted by the using, no one else has a reference to it, etc. Ssl.SetProtocolOptions(innerContext.DangerousGetHandle(), protocols); // The logic in SafeSslHandle.Disconnect is simple because we are doing a quiet // shutdown (we aren't negotiating for session close to enable later session // restoration). // // If you find yourself wanting to remove this line to enable bidirectional // close-notify, you'll probably need to rewrite SafeSslHandle.Disconnect(). // https://www.openssl.org/docs/manmaster/ssl/SSL_shutdown.html Ssl.SslCtxSetQuietShutdown(innerContext); if (!Ssl.SetEncryptionPolicy(innerContext, policy)) { throw new PlatformNotSupportedException(SR.Format(SR.net_ssl_encryptionpolicy_notsupported, policy)); } bool hasCertificateAndKey = certHandle != null && !certHandle.IsInvalid && certKeyHandle != null && !certKeyHandle.IsInvalid; if (hasCertificateAndKey) { SetSslCertificate(innerContext, certHandle, certKeyHandle); } if (remoteCertRequired) { Debug.Assert(isServer, "isServer flag should be true"); Ssl.SslCtxSetVerify(innerContext, s_verifyClientCertificate); //update the client CA list UpdateCAListFromRootStore(innerContext); } context = SafeSslHandle.Create(innerContext, isServer); Debug.Assert(context != null, "Expected non-null return value from SafeSslHandle.Create"); if (context.IsInvalid) { context.Dispose(); throw CreateSslException(SR.net_allocate_ssl_context_failed); } if (hasCertificateAndKey) { bool hasCertReference = false; try { certHandle.DangerousAddRef(ref hasCertReference); using (X509Certificate2 cert = new X509Certificate2(certHandle.DangerousGetHandle())) { using (X509Chain chain = TLSCertificateExtensions.BuildNewChain(cert, includeClientApplicationPolicy: false)) { if (chain != null && !Ssl.AddExtraChainCertificates(context, chain)) throw CreateSslException(SR.net_ssl_use_cert_failed); } } } finally { if (hasCertReference) certHandle.DangerousRelease(); } } } return context; } internal static bool DoSslHandshake(SafeSslHandle context, byte[] recvBuf, int recvOffset, int recvCount, out byte[] sendBuf, out int sendCount) { sendBuf = null; sendCount = 0; if ((recvBuf != null) && (recvCount > 0)) { BioWrite(context.InputBio, recvBuf, recvOffset, recvCount); } int retVal = Ssl.SslDoHandshake(context); if (retVal != 1) { Exception innerError; Ssl.SslErrorCode error = GetSslError(context, retVal, out innerError); if ((retVal != -1) || (error != Ssl.SslErrorCode.SSL_ERROR_WANT_READ)) { throw new SslException(SR.Format(SR.net_ssl_handshake_failed_error, error), innerError); } } sendCount = Crypto.BioCtrlPending(context.OutputBio); if (sendCount > 0) { sendBuf = new byte[sendCount]; try { sendCount = BioRead(context.OutputBio, sendBuf, sendCount); } finally { if (sendCount <= 0) { sendBuf = null; sendCount = 0; } } } bool stateOk = Ssl.IsSslStateOK(context); if (stateOk) { context.MarkHandshakeCompleted(); } return stateOk; } internal static int Encrypt(SafeSslHandle context, byte[] input, int offset, int count, ref byte[] output, out Ssl.SslErrorCode errorCode) { Debug.Assert(input != null); Debug.Assert(offset >= 0); Debug.Assert(count > 0); Debug.Assert(offset <= input.Length); Debug.Assert(input.Length - offset >= count); errorCode = Ssl.SslErrorCode.SSL_ERROR_NONE; int retVal; unsafe { fixed (byte* fixedBuffer = input) { retVal = Ssl.SslWrite(context, fixedBuffer + offset, count); } } if (retVal != count) { Exception innerError; errorCode = GetSslError(context, retVal, out innerError); retVal = 0; switch (errorCode) { // indicate end-of-file case Ssl.SslErrorCode.SSL_ERROR_ZERO_RETURN: case Ssl.SslErrorCode.SSL_ERROR_WANT_READ: break; default: throw new SslException(SR.Format(SR.net_ssl_encrypt_failed, errorCode), innerError); } } else { int capacityNeeded = Crypto.BioCtrlPending(context.OutputBio); if (output == null || output.Length < capacityNeeded) { output = new byte[capacityNeeded]; } retVal = BioRead(context.OutputBio, output, capacityNeeded); } return retVal; } internal static int Decrypt(SafeSslHandle context, byte[] outBuffer, int count, out Ssl.SslErrorCode errorCode) { errorCode = Ssl.SslErrorCode.SSL_ERROR_NONE; int retVal = BioWrite(context.InputBio, outBuffer, 0, count); if (retVal == count) { retVal = Ssl.SslRead(context, outBuffer, outBuffer.Length); if (retVal > 0) { count = retVal; } } if (retVal != count) { Exception innerError; errorCode = GetSslError(context, retVal, out innerError); retVal = 0; switch (errorCode) { // indicate end-of-file case Ssl.SslErrorCode.SSL_ERROR_ZERO_RETURN: break; case Ssl.SslErrorCode.SSL_ERROR_WANT_READ: // update error code to renegotiate if renegotiate is pending, otherwise make it SSL_ERROR_WANT_READ errorCode = Ssl.IsSslRenegotiatePending(context) ? Ssl.SslErrorCode.SSL_ERROR_RENEGOTIATE : Ssl.SslErrorCode.SSL_ERROR_WANT_READ; break; default: throw new SslException(SR.Format(SR.net_ssl_decrypt_failed, errorCode), innerError); } } return retVal; } internal static SafeX509Handle GetPeerCertificate(SafeSslHandle context) { return Ssl.SslGetPeerCertificate(context); } internal static SafeSharedX509StackHandle GetPeerCertificateChain(SafeSslHandle context) { return Ssl.SslGetPeerCertChain(context); } #endregion #region private methods private static void QueryUniqueChannelBinding(SafeSslHandle context, SafeChannelBindingHandle bindingHandle) { bool sessionReused = Ssl.SslSessionReused(context); int certHashLength = context.IsServer ^ sessionReused ? Ssl.SslGetPeerFinished(context, bindingHandle.CertHashPtr, bindingHandle.Length) : Ssl.SslGetFinished(context, bindingHandle.CertHashPtr, bindingHandle.Length); if (0 == certHashLength) { throw CreateSslException(SR.net_ssl_get_channel_binding_token_failed); } bindingHandle.SetCertHashLength(certHashLength); } private static IntPtr GetSslMethod(SslProtocols protocols) { #pragma warning disable 0618 // Ssl2, Ssl3 are deprecated. bool ssl2 = (protocols & SslProtocols.Ssl2) == SslProtocols.Ssl2; bool ssl3 = (protocols & SslProtocols.Ssl3) == SslProtocols.Ssl3; #pragma warning restore bool tls10 = (protocols & SslProtocols.Tls) == SslProtocols.Tls; bool tls11 = (protocols & SslProtocols.Tls11) == SslProtocols.Tls11; bool tls12 = (protocols & SslProtocols.Tls12) == SslProtocols.Tls12; IntPtr method = Ssl.SslMethods.SSLv23_method; // default string methodName = "SSLv23_method"; if (!ssl2) { if (!ssl3) { if (!tls11 && !tls12) { method = Ssl.SslMethods.TLSv1_method; methodName = "TLSv1_method"; } else if (!tls10 && !tls12) { method = Ssl.SslMethods.TLSv1_1_method; methodName = "TLSv1_1_method"; } else if (!tls10 && !tls11) { method = Ssl.SslMethods.TLSv1_2_method; methodName = "TLSv1_2_method"; } } else if (!tls10 && !tls11 && !tls12) { method = Ssl.SslMethods.SSLv3_method; methodName = "SSLv3_method"; } } if (IntPtr.Zero == method) { throw new SslException(SR.Format(SR.net_get_ssl_method_failed, methodName)); } return method; } private static int VerifyClientCertificate(int preverify_ok, IntPtr x509_ctx_ptr) { // Full validation is handled after the handshake in VerifyCertificateProperties and the // user callback. It's also up to those handlers to decide if a null certificate // is appropriate. So just return success to tell OpenSSL that the cert is acceptable, // we'll process it after the handshake finishes. const int OpenSslSuccess = 1; return OpenSslSuccess; } private static void UpdateCAListFromRootStore(SafeSslContextHandle context) { using (SafeX509NameStackHandle nameStack = Crypto.NewX509NameStack()) { //maintaining the HashSet of Certificate's issuer name to keep track of duplicates HashSet<string> issuerNameHashSet = new HashSet<string>(); //Enumerate Certificates from LocalMachine and CurrentUser root store AddX509Names(nameStack, StoreLocation.LocalMachine, issuerNameHashSet); AddX509Names(nameStack, StoreLocation.CurrentUser, issuerNameHashSet); Ssl.SslCtxSetClientCAList(context, nameStack); // The handle ownership has been transferred into the CTX. nameStack.SetHandleAsInvalid(); } } private static void AddX509Names(SafeX509NameStackHandle nameStack, StoreLocation storeLocation, HashSet<string> issuerNameHashSet) { using (var store = new X509Store(StoreName.Root, storeLocation)) { store.Open(OpenFlags.ReadOnly); foreach (var certificate in store.Certificates) { //Check if issuer name is already present //Avoiding duplicate names if (!issuerNameHashSet.Add(certificate.Issuer)) { continue; } using (SafeX509Handle certHandle = Crypto.X509UpRef(certificate.Handle)) { using (SafeX509NameHandle nameHandle = Crypto.DuplicateX509Name(Crypto.X509GetIssuerName(certHandle))) { if (Crypto.PushX509NameStackField(nameStack, nameHandle)) { // The handle ownership has been transferred into the STACK_OF(X509_NAME). nameHandle.SetHandleAsInvalid(); } else { throw new CryptographicException(SR.net_ssl_x509Name_push_failed_error); } } } } } } private static int BioRead(SafeBioHandle bio, byte[] buffer, int count) { Debug.Assert(buffer != null); Debug.Assert(count >= 0); Debug.Assert(buffer.Length >= count); int bytes = Crypto.BioRead(bio, buffer, count); if (bytes != count) { throw CreateSslException(SR.net_ssl_read_bio_failed_error); } return bytes; } private static int BioWrite(SafeBioHandle bio, byte[] buffer, int offset, int count) { Debug.Assert(buffer != null); Debug.Assert(offset >= 0); Debug.Assert(count >= 0); Debug.Assert(buffer.Length >= offset + count); int bytes; unsafe { fixed (byte* bufPtr = buffer) { bytes = Ssl.BioWrite(bio, bufPtr + offset, count); } } if (bytes != count) { throw CreateSslException(SR.net_ssl_write_bio_failed_error); } return bytes; } private static Ssl.SslErrorCode GetSslError(SafeSslHandle context, int result, out Exception innerError) { ErrorInfo lastErrno = Sys.GetLastErrorInfo(); // cache it before we make more P/Invoke calls, just in case we need it Ssl.SslErrorCode retVal = Ssl.SslGetError(context, result); switch (retVal) { case Ssl.SslErrorCode.SSL_ERROR_SYSCALL: // Some I/O error occurred innerError = Crypto.ErrPeekError() != 0 ? Crypto.CreateOpenSslCryptographicException() : // crypto error queue not empty result == 0 ? new EndOfStreamException() : // end of file that violates protocol result == -1 && lastErrno.Error != Error.SUCCESS ? new IOException(lastErrno.GetErrorMessage(), lastErrno.RawErrno) : // underlying I/O error null; // no additional info available break; case Ssl.SslErrorCode.SSL_ERROR_SSL: // OpenSSL failure occurred. The error queue contains more details. innerError = Interop.Crypto.CreateOpenSslCryptographicException(); break; default: // No additional info available. innerError = null; break; } return retVal; } private static void SetSslCertificate(SafeSslContextHandle contextPtr, SafeX509Handle certPtr, SafeEvpPKeyHandle keyPtr) { Debug.Assert(certPtr != null && !certPtr.IsInvalid, "certPtr != null && !certPtr.IsInvalid"); Debug.Assert(keyPtr != null && !keyPtr.IsInvalid, "keyPtr != null && !keyPtr.IsInvalid"); int retVal = Ssl.SslCtxUseCertificate(contextPtr, certPtr); if (1 != retVal) { throw CreateSslException(SR.net_ssl_use_cert_failed); } retVal = Ssl.SslCtxUsePrivateKey(contextPtr, keyPtr); if (1 != retVal) { throw CreateSslException(SR.net_ssl_use_private_key_failed); } //check private key retVal = Ssl.SslCtxCheckPrivateKey(contextPtr); if (1 != retVal) { throw CreateSslException(SR.net_ssl_check_private_key_failed); } } internal static SslException CreateSslException(string message) { ulong errorVal = Crypto.ErrGetError(); string msg = SR.Format(message, Marshal.PtrToStringAnsi(Crypto.ErrReasonErrorString(errorVal))); return new SslException(msg, (int)errorVal); } #endregion #region Internal class internal sealed class SslException : Exception { public SslException(string inputMessage) : base(inputMessage) { } public SslException(string inputMessage, Exception ex) : base(inputMessage, ex) { } public SslException(string inputMessage, int error) : this(inputMessage) { HResult = error; } public SslException(int error) : this(SR.Format(SR.net_generic_operation_failed, error)) { HResult = error; } } #endregion } }
using NBitcoin; using System; using System.Collections.Generic; using System.Linq; using System.Text; using WalletWasabi.Blockchain.Keys; using WalletWasabi.Tests.Helpers; using Xunit; namespace WalletWasabi.Tests.UnitTests.BlockchainAnalysis { public class PubKeyReuseAnonScoreTests { [Fact] public void AddressReusePunishment() { // If there's reuse in input and output side, then output side didn't gain, nor lose anonymity. var analyser = ServiceFactory.CreateBlockchainAnalyzer(); var km = ServiceFactory.CreateKeyManager(); var reuse = BitcoinFactory.CreateHdPubKey(km); var tx = BitcoinFactory.CreateSmartTransaction( 9, Enumerable.Repeat(Money.Coins(1m), 9), new[] { (Money.Coins(1.1m), 100, BitcoinFactory.CreateHdPubKey(km)) }, new[] { (Money.Coins(1m), HdPubKey.DefaultHighAnonymitySet, reuse) }); // Make the reused key anonymity set something smaller than 109 (which should be the final anonymity set) reuse.AnonymitySet = 30; analyser.Analyze(tx); Assert.All(tx.WalletInputs, x => Assert.True(x.HdPubKey.AnonymitySet < 30)); // It should be smaller than 30, because reuse also gets punishment. Assert.True(tx.WalletOutputs.First().HdPubKey.AnonymitySet < 30); } [Fact] public void SelfSpendReuse() { var analyser = ServiceFactory.CreateBlockchainAnalyzer(); var km = ServiceFactory.CreateKeyManager(); var reuse = BitcoinFactory.CreateHdPubKey(km); var tx = BitcoinFactory.CreateSmartTransaction( 0, Enumerable.Empty<Money>(), new[] { (Money.Coins(1.1m), 100, BitcoinFactory.CreateHdPubKey(km)) }, new[] { (Money.Coins(1m), HdPubKey.DefaultHighAnonymitySet, reuse) }); reuse.AnonymitySet = 30; analyser.Analyze(tx); Assert.All(tx.WalletInputs, x => Assert.True(x.HdPubKey.AnonymitySet < 30)); // It should be smaller than 30, because reuse also gets punishment. Assert.True(tx.WalletOutputs.First().HdPubKey.AnonymitySet < 30); } [Fact] public void AddressReuseIrrelevantInNormalSpend() { // In normal transactions we expose to someone that we own the inputs and the changes // So we cannot test address reuse here, because anonsets would be 1 regardless of anything. var analyser = ServiceFactory.CreateBlockchainAnalyzer(); var km = ServiceFactory.CreateKeyManager(); var key = BitcoinFactory.CreateHdPubKey(km); var tx = BitcoinFactory.CreateSmartTransaction( 0, Enumerable.Repeat(Money.Coins(1m), 9), new[] { (Money.Coins(1.1m), 100, key), (Money.Coins(1.2m), 100, key), (Money.Coins(1.3m), 100, key), (Money.Coins(1.4m), 100, key) }, new[] { (Money.Coins(1m), HdPubKey.DefaultHighAnonymitySet, BitcoinFactory.CreateHdPubKey(km)) }); analyser.Analyze(tx); Assert.All(tx.WalletInputs, x => Assert.Equal(1, x.HdPubKey.AnonymitySet)); Assert.Equal(1, tx.WalletOutputs.First().HdPubKey.AnonymitySet); } [Fact] public void InputSideAddressReuseHaveNoConsolidationPunishmentInSelfSpend() { // Consolidation can't hurt any more than reuse already has. var analyser = ServiceFactory.CreateBlockchainAnalyzer(); var km = ServiceFactory.CreateKeyManager(); var key = BitcoinFactory.CreateHdPubKey(km); var tx = BitcoinFactory.CreateSmartTransaction( 0, Enumerable.Empty<Money>(), new[] { (Money.Coins(1.1m), 100, key), (Money.Coins(1.2m), 100, key), (Money.Coins(1.3m), 100, key), (Money.Coins(1.4m), 100, key) }, new[] { (Money.Coins(1m), HdPubKey.DefaultHighAnonymitySet, BitcoinFactory.CreateHdPubKey(km)) }); analyser.Analyze(tx); Assert.All(tx.WalletInputs, x => Assert.Equal(100, x.HdPubKey.AnonymitySet)); Assert.Equal(100, tx.WalletOutputs.First().HdPubKey.AnonymitySet); } [Fact] public void InputSideAddressReuseHaveNoConsolidationPunishmentInCoinJoin() { var analyser = ServiceFactory.CreateBlockchainAnalyzer(); var km = ServiceFactory.CreateKeyManager(); var key = BitcoinFactory.CreateHdPubKey(km); var tx = BitcoinFactory.CreateSmartTransaction( 9, Enumerable.Repeat(Money.Coins(1m), 9), new[] { (Money.Coins(1.1m), 100, key), (Money.Coins(1.2m), 100, key), (Money.Coins(1.3m), 100, key), (Money.Coins(1.4m), 100, key) }, new[] { (Money.Coins(1m), HdPubKey.DefaultHighAnonymitySet, BitcoinFactory.CreateHdPubKey(km)) }); analyser.Analyze(tx); Assert.All(tx.WalletInputs, x => Assert.Equal(100, x.HdPubKey.AnonymitySet)); Assert.Equal(109, tx.WalletOutputs.First().HdPubKey.AnonymitySet); } [Fact] public void InputOutputSideAddress() { // If there's reuse in input and output side, then output side didn't gain, nor lose anonymity. var analyser = ServiceFactory.CreateBlockchainAnalyzer(); var key = BitcoinFactory.CreateHdPubKey(ServiceFactory.CreateKeyManager()); var tx = BitcoinFactory.CreateSmartTransaction( 9, Enumerable.Repeat(Money.Coins(1m), 9), new[] { (Money.Coins(1.1m), 100, key) }, new[] { (Money.Coins(1m), HdPubKey.DefaultHighAnonymitySet, key) }); analyser.Analyze(tx); Assert.All(tx.WalletInputs, x => Assert.Equal(100, x.HdPubKey.AnonymitySet)); Assert.Equal(100, tx.WalletOutputs.First().HdPubKey.AnonymitySet); } [Fact] public void InputOutputSidePreviouslyUsedAddress() { // If there's reuse in output side, input anonsets should be adjusted down, too. var analyser = ServiceFactory.CreateBlockchainAnalyzer(); var reuse = BitcoinFactory.CreateHdPubKey(ServiceFactory.CreateKeyManager()); var tx = BitcoinFactory.CreateSmartTransaction( 9, Enumerable.Repeat(Money.Coins(1m), 9), new[] { (Money.Coins(1.1m), 100, BitcoinFactory.CreateHdPubKey(ServiceFactory.CreateKeyManager())) }, new[] { (Money.Coins(1m), HdPubKey.DefaultHighAnonymitySet, reuse) }); reuse.AnonymitySet = 30; analyser.Analyze(tx); Assert.True(tx.WalletOutputs.First().HdPubKey.AnonymitySet < 30); Assert.All(tx.WalletInputs, x => Assert.True(x.HdPubKey.AnonymitySet < 30)); } [Fact] public void OutputSideAddressReusePunished() { // If there's reuse in input and output side, then output side didn't gain, nor lose anonymity. var analyser = ServiceFactory.CreateBlockchainAnalyzer(); var km = ServiceFactory.CreateKeyManager(); var key = BitcoinFactory.CreateHdPubKey(km); var tx = BitcoinFactory.CreateSmartTransaction( 9, Enumerable.Repeat(Money.Coins(1m), 9).Concat(Enumerable.Repeat(Money.Coins(2m), 7)), new[] { (Money.Coins(1.1m), 100, BitcoinFactory.CreateHdPubKey(km)) }, new[] { (Money.Coins(1m), HdPubKey.DefaultHighAnonymitySet, key), (Money.Coins(2m), HdPubKey.DefaultHighAnonymitySet, key) }); analyser.Analyze(tx); Assert.All(tx.WalletInputs, x => Assert.Equal(100, x.HdPubKey.AnonymitySet)); // Normally all levels should have 109 and 106 anonsets, but they're consolidated and punished. Assert.All(tx.WalletOutputs.Select(x => x.HdPubKey.AnonymitySet), x => Assert.True(x < 106)); } [Fact] public void OutputSideAddressReuseDoesntPunishedMoreThanInheritance() { // If there's reuse in input and output side, then output side didn't gain, nor lose anonymity. var analyser = ServiceFactory.CreateBlockchainAnalyzer(); var km = ServiceFactory.CreateKeyManager(); var key = BitcoinFactory.CreateHdPubKey(km); var tx = BitcoinFactory.CreateSmartTransaction( 9, Enumerable.Repeat(Money.Coins(1m), 9).Concat(Enumerable.Repeat(Money.Coins(2m), 8)).Concat(Enumerable.Repeat(Money.Coins(3m), 7)).Concat(Enumerable.Repeat(Money.Coins(4m), 6)).Concat(Enumerable.Repeat(Money.Coins(5m), 5)).Concat(Enumerable.Repeat(Money.Coins(6m), 4)), new[] { (Money.Coins(1.1m), 100, BitcoinFactory.CreateHdPubKey(km)) }, new[] { (Money.Coins(1m), HdPubKey.DefaultHighAnonymitySet, key), (Money.Coins(2m), HdPubKey.DefaultHighAnonymitySet, key), (Money.Coins(3m), HdPubKey.DefaultHighAnonymitySet, key), (Money.Coins(4m), HdPubKey.DefaultHighAnonymitySet, key), (Money.Coins(5m), HdPubKey.DefaultHighAnonymitySet, key), (Money.Coins(6m), HdPubKey.DefaultHighAnonymitySet, key) }); analyser.Analyze(tx); Assert.All(tx.WalletInputs, x => Assert.Equal(100, x.HdPubKey.AnonymitySet)); // 100 is the input anonset, so outputs shouldn't go lower than that. Assert.All(tx.WalletOutputs.Select(x => x.HdPubKey.AnonymitySet), x => Assert.True(x >= 100)); } [Fact] public void OutputSideAddressReuseBySomeoneElse() { // If there's reuse in output side by another participant, then we should not gain anonsets by them. // https://github.com/zkSNACKs/WalletWasabi/pull/4724/commits/6f5893ca57e35eadb6e20f164bdf0696bb14eea1#r530847724 var analyser = ServiceFactory.CreateBlockchainAnalyzer(); var km = ServiceFactory.CreateKeyManager(); var equalOutputAmount = Money.Coins(1m); var reusedTxOut = new TxOut(equalOutputAmount, new Key()); var tx = BitcoinFactory.CreateSmartTransaction( 9, Common.Repeat(() => new TxOut(equalOutputAmount, new Key()), 7).Concat(new[] { reusedTxOut, reusedTxOut }), new[] { (Money.Coins(1.1m), 1, BitcoinFactory.CreateHdPubKey(km)) }, new[] { (equalOutputAmount, HdPubKey.DefaultHighAnonymitySet, BitcoinFactory.CreateHdPubKey(km)) }); analyser.Analyze(tx); Assert.All(tx.WalletInputs, x => Assert.Equal(1, x.HdPubKey.AnonymitySet)); // Normally it'd be 10, but because of reuse it should be only 8. Assert.Equal(8, tx.WalletOutputs.First().HdPubKey.AnonymitySet); } [Fact] public void CoinJoinSend() { var analyser = ServiceFactory.CreateBlockchainAnalyzer(); var tx = BitcoinFactory.CreateSmartTransaction(2, 2, 40, 0); // Make sure that Analyze won't throw in case of no own outputs. analyser.Analyze(tx); } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System.Collections.Generic; using System.Diagnostics; using System.Diagnostics.Contracts; using System.Security; namespace System.IO { public sealed class DirectoryInfo : FileSystemInfo { [System.Security.SecuritySafeCritical] public DirectoryInfo(String path) { if (path == null) throw new ArgumentNullException("path"); Contract.EndContractBlock(); OriginalPath = PathHelpers.ShouldReviseDirectoryPathToCurrent(path) ? "." : path; FullPath = PathHelpers.GetFullPathInternal(path); DisplayPath = GetDisplayName(OriginalPath, FullPath); } [System.Security.SecuritySafeCritical] internal DirectoryInfo(String fullPath, IFileSystemObject fileSystemObject) : base(fileSystemObject) { Debug.Assert(PathInternal.GetRootLength(fullPath) > 0, "fullPath must be fully qualified!"); // Fast path when we know a DirectoryInfo exists. OriginalPath = Path.GetFileName(fullPath); FullPath = fullPath; DisplayPath = GetDisplayName(OriginalPath, FullPath); } public override String Name { get { // DisplayPath is dir name for coreclr Debug.Assert(GetDirName(FullPath) == DisplayPath || DisplayPath == "."); return DisplayPath; } } public DirectoryInfo Parent { [System.Security.SecuritySafeCritical] get { string s = FullPath; // FullPath might end in either "parent\child" or "parent\child", and in either case we want // the parent of child, not the child. Trim off an ending directory separator if there is one, // but don't mangle the root. if (!PathHelpers.IsRoot(s)) { s = PathHelpers.TrimEndingDirectorySeparator(s); } string parentName = Path.GetDirectoryName(s); return parentName != null ? new DirectoryInfo(parentName, null) : null; } } [System.Security.SecuritySafeCritical] public DirectoryInfo CreateSubdirectory(String path) { if (path == null) throw new ArgumentNullException("path"); Contract.EndContractBlock(); return CreateSubdirectoryHelper(path); } [System.Security.SecurityCritical] // auto-generated private DirectoryInfo CreateSubdirectoryHelper(String path) { Contract.Requires(path != null); PathHelpers.ThrowIfEmptyOrRootedPath(path); String newDirs = Path.Combine(FullPath, path); String fullPath = Path.GetFullPath(newDirs); if (0 != String.Compare(FullPath, 0, fullPath, 0, FullPath.Length, PathInternal.GetComparison())) { throw new ArgumentException(SR.Format(SR.Argument_InvalidSubPath, path, DisplayPath), "path"); } FileSystem.Current.CreateDirectory(fullPath); // Check for read permission to directory we hand back by calling this constructor. return new DirectoryInfo(fullPath); } [System.Security.SecurityCritical] public void Create() { FileSystem.Current.CreateDirectory(FullPath); } // Tests if the given path refers to an existing DirectoryInfo on disk. // // Your application must have Read permission to the directory's // contents. // public override bool Exists { [System.Security.SecuritySafeCritical] // auto-generated get { try { return FileSystemObject.Exists; } catch { return false; } } } // Returns an array of Files in the current DirectoryInfo matching the // given search criteria (ie, "*.txt"). [SecurityCritical] public FileInfo[] GetFiles(String searchPattern) { if (searchPattern == null) throw new ArgumentNullException("searchPattern"); Contract.EndContractBlock(); return InternalGetFiles(searchPattern, SearchOption.TopDirectoryOnly); } // Returns an array of Files in the current DirectoryInfo matching the // given search criteria (ie, "*.txt"). public FileInfo[] GetFiles(String searchPattern, SearchOption searchOption) { if (searchPattern == null) throw new ArgumentNullException("searchPattern"); if ((searchOption != SearchOption.TopDirectoryOnly) && (searchOption != SearchOption.AllDirectories)) throw new ArgumentOutOfRangeException("searchOption", SR.ArgumentOutOfRange_Enum); Contract.EndContractBlock(); return InternalGetFiles(searchPattern, searchOption); } // Returns an array of Files in the current DirectoryInfo matching the // given search criteria (ie, "*.txt"). private FileInfo[] InternalGetFiles(String searchPattern, SearchOption searchOption) { Contract.Requires(searchPattern != null); Contract.Requires(searchOption == SearchOption.AllDirectories || searchOption == SearchOption.TopDirectoryOnly); IEnumerable<FileInfo> enumerable = (IEnumerable<FileInfo>)FileSystem.Current.EnumerateFileSystemInfos(FullPath, searchPattern, searchOption, SearchTarget.Files); return EnumerableHelpers.ToArray(enumerable); } // Returns an array of Files in the DirectoryInfo specified by path public FileInfo[] GetFiles() { return InternalGetFiles("*", SearchOption.TopDirectoryOnly); } // Returns an array of Directories in the current directory. public DirectoryInfo[] GetDirectories() { return InternalGetDirectories("*", SearchOption.TopDirectoryOnly); } // Returns an array of strongly typed FileSystemInfo entries in the path with the // given search criteria (ie, "*.txt"). public FileSystemInfo[] GetFileSystemInfos(String searchPattern) { if (searchPattern == null) throw new ArgumentNullException("searchPattern"); Contract.EndContractBlock(); return InternalGetFileSystemInfos(searchPattern, SearchOption.TopDirectoryOnly); } // Returns an array of strongly typed FileSystemInfo entries in the path with the // given search criteria (ie, "*.txt"). public FileSystemInfo[] GetFileSystemInfos(String searchPattern, SearchOption searchOption) { if (searchPattern == null) throw new ArgumentNullException("searchPattern"); if ((searchOption != SearchOption.TopDirectoryOnly) && (searchOption != SearchOption.AllDirectories)) throw new ArgumentOutOfRangeException("searchOption", SR.ArgumentOutOfRange_Enum); Contract.EndContractBlock(); return InternalGetFileSystemInfos(searchPattern, searchOption); } // Returns an array of strongly typed FileSystemInfo entries in the path with the // given search criteria (ie, "*.txt"). private FileSystemInfo[] InternalGetFileSystemInfos(String searchPattern, SearchOption searchOption) { Contract.Requires(searchPattern != null); Contract.Requires(searchOption == SearchOption.AllDirectories || searchOption == SearchOption.TopDirectoryOnly); IEnumerable<FileSystemInfo> enumerable = FileSystem.Current.EnumerateFileSystemInfos(FullPath, searchPattern, searchOption, SearchTarget.Both); return EnumerableHelpers.ToArray(enumerable); } // Returns an array of strongly typed FileSystemInfo entries which will contain a listing // of all the files and directories. public FileSystemInfo[] GetFileSystemInfos() { return InternalGetFileSystemInfos("*", SearchOption.TopDirectoryOnly); } // Returns an array of Directories in the current DirectoryInfo matching the // given search criteria (ie, "System*" could match the System & System32 // directories). public DirectoryInfo[] GetDirectories(String searchPattern) { if (searchPattern == null) throw new ArgumentNullException("searchPattern"); Contract.EndContractBlock(); return InternalGetDirectories(searchPattern, SearchOption.TopDirectoryOnly); } // Returns an array of Directories in the current DirectoryInfo matching the // given search criteria (ie, "System*" could match the System & System32 // directories). public DirectoryInfo[] GetDirectories(String searchPattern, SearchOption searchOption) { if (searchPattern == null) throw new ArgumentNullException("searchPattern"); if ((searchOption != SearchOption.TopDirectoryOnly) && (searchOption != SearchOption.AllDirectories)) throw new ArgumentOutOfRangeException("searchOption", SR.ArgumentOutOfRange_Enum); Contract.EndContractBlock(); return InternalGetDirectories(searchPattern, searchOption); } // Returns an array of Directories in the current DirectoryInfo matching the // given search criteria (ie, "System*" could match the System & System32 // directories). private DirectoryInfo[] InternalGetDirectories(String searchPattern, SearchOption searchOption) { Contract.Requires(searchPattern != null); Contract.Requires(searchOption == SearchOption.AllDirectories || searchOption == SearchOption.TopDirectoryOnly); IEnumerable<DirectoryInfo> enumerable = (IEnumerable<DirectoryInfo>)FileSystem.Current.EnumerateFileSystemInfos(FullPath, searchPattern, searchOption, SearchTarget.Directories); return EnumerableHelpers.ToArray(enumerable); } public IEnumerable<DirectoryInfo> EnumerateDirectories() { return InternalEnumerateDirectories("*", SearchOption.TopDirectoryOnly); } public IEnumerable<DirectoryInfo> EnumerateDirectories(String searchPattern) { if (searchPattern == null) throw new ArgumentNullException("searchPattern"); Contract.EndContractBlock(); return InternalEnumerateDirectories(searchPattern, SearchOption.TopDirectoryOnly); } public IEnumerable<DirectoryInfo> EnumerateDirectories(String searchPattern, SearchOption searchOption) { if (searchPattern == null) throw new ArgumentNullException("searchPattern"); if ((searchOption != SearchOption.TopDirectoryOnly) && (searchOption != SearchOption.AllDirectories)) throw new ArgumentOutOfRangeException("searchOption", SR.ArgumentOutOfRange_Enum); Contract.EndContractBlock(); return InternalEnumerateDirectories(searchPattern, searchOption); } private IEnumerable<DirectoryInfo> InternalEnumerateDirectories(String searchPattern, SearchOption searchOption) { Contract.Requires(searchPattern != null); Contract.Requires(searchOption == SearchOption.AllDirectories || searchOption == SearchOption.TopDirectoryOnly); return (IEnumerable<DirectoryInfo>)FileSystem.Current.EnumerateFileSystemInfos(FullPath, searchPattern, searchOption, SearchTarget.Directories); } public IEnumerable<FileInfo> EnumerateFiles() { return InternalEnumerateFiles("*", SearchOption.TopDirectoryOnly); } public IEnumerable<FileInfo> EnumerateFiles(String searchPattern) { if (searchPattern == null) throw new ArgumentNullException("searchPattern"); Contract.EndContractBlock(); return InternalEnumerateFiles(searchPattern, SearchOption.TopDirectoryOnly); } public IEnumerable<FileInfo> EnumerateFiles(String searchPattern, SearchOption searchOption) { if (searchPattern == null) throw new ArgumentNullException("searchPattern"); if ((searchOption != SearchOption.TopDirectoryOnly) && (searchOption != SearchOption.AllDirectories)) throw new ArgumentOutOfRangeException("searchOption", SR.ArgumentOutOfRange_Enum); Contract.EndContractBlock(); return InternalEnumerateFiles(searchPattern, searchOption); } private IEnumerable<FileInfo> InternalEnumerateFiles(String searchPattern, SearchOption searchOption) { Contract.Requires(searchPattern != null); Contract.Requires(searchOption == SearchOption.AllDirectories || searchOption == SearchOption.TopDirectoryOnly); return (IEnumerable<FileInfo>)FileSystem.Current.EnumerateFileSystemInfos(FullPath, searchPattern, searchOption, SearchTarget.Files); } public IEnumerable<FileSystemInfo> EnumerateFileSystemInfos() { return InternalEnumerateFileSystemInfos("*", SearchOption.TopDirectoryOnly); } public IEnumerable<FileSystemInfo> EnumerateFileSystemInfos(String searchPattern) { if (searchPattern == null) throw new ArgumentNullException("searchPattern"); Contract.EndContractBlock(); return InternalEnumerateFileSystemInfos(searchPattern, SearchOption.TopDirectoryOnly); } public IEnumerable<FileSystemInfo> EnumerateFileSystemInfos(String searchPattern, SearchOption searchOption) { if (searchPattern == null) throw new ArgumentNullException("searchPattern"); if ((searchOption != SearchOption.TopDirectoryOnly) && (searchOption != SearchOption.AllDirectories)) throw new ArgumentOutOfRangeException("searchOption", SR.ArgumentOutOfRange_Enum); Contract.EndContractBlock(); return InternalEnumerateFileSystemInfos(searchPattern, searchOption); } private IEnumerable<FileSystemInfo> InternalEnumerateFileSystemInfos(String searchPattern, SearchOption searchOption) { Contract.Requires(searchPattern != null); Contract.Requires(searchOption == SearchOption.AllDirectories || searchOption == SearchOption.TopDirectoryOnly); return FileSystem.Current.EnumerateFileSystemInfos(FullPath, searchPattern, searchOption, SearchTarget.Both); } // Returns the root portion of the given path. The resulting string // consists of those rightmost characters of the path that constitute the // root of the path. Possible patterns for the resulting string are: An // empty string (a relative path on the current drive), "\" (an absolute // path on the current drive), "X:" (a relative path on a given drive, // where X is the drive letter), "X:\" (an absolute path on a given drive), // and "\\server\share" (a UNC path for a given server and share name). // The resulting string is null if path is null. // public DirectoryInfo Root { [System.Security.SecuritySafeCritical] get { String rootPath = Path.GetPathRoot(FullPath); return new DirectoryInfo(rootPath); } } [System.Security.SecuritySafeCritical] public void MoveTo(String destDirName) { if (destDirName == null) throw new ArgumentNullException("destDirName"); if (destDirName.Length == 0) throw new ArgumentException(SR.Argument_EmptyFileName, "destDirName"); Contract.EndContractBlock(); String fullDestDirName = PathHelpers.GetFullPathInternal(destDirName); if (fullDestDirName[fullDestDirName.Length - 1] != Path.DirectorySeparatorChar) fullDestDirName = fullDestDirName + PathHelpers.DirectorySeparatorCharAsString; String fullSourcePath; if (FullPath.Length > 0 && FullPath[FullPath.Length - 1] == Path.DirectorySeparatorChar) fullSourcePath = FullPath; else fullSourcePath = FullPath + PathHelpers.DirectorySeparatorCharAsString; if (PathInternal.IsDirectoryTooLong(fullSourcePath)) throw new PathTooLongException(SR.IO_PathTooLong); if (PathInternal.IsDirectoryTooLong(fullDestDirName)) throw new PathTooLongException(SR.IO_PathTooLong); StringComparison pathComparison = PathInternal.GetComparison(); if (String.Equals(fullSourcePath, fullDestDirName, pathComparison)) throw new IOException(SR.IO_SourceDestMustBeDifferent); String sourceRoot = Path.GetPathRoot(fullSourcePath); String destinationRoot = Path.GetPathRoot(fullDestDirName); if (!String.Equals(sourceRoot, destinationRoot, pathComparison)) throw new IOException(SR.IO_SourceDestMustHaveSameRoot); FileSystem.Current.MoveDirectory(FullPath, fullDestDirName); FullPath = fullDestDirName; OriginalPath = destDirName; DisplayPath = GetDisplayName(OriginalPath, FullPath); // Flush any cached information about the directory. Invalidate(); } [System.Security.SecuritySafeCritical] public override void Delete() { FileSystem.Current.RemoveDirectory(FullPath, false); } [System.Security.SecuritySafeCritical] public void Delete(bool recursive) { FileSystem.Current.RemoveDirectory(FullPath, recursive); } // Returns the fully qualified path public override String ToString() { return DisplayPath; } private static String GetDisplayName(String originalPath, String fullPath) { Debug.Assert(originalPath != null); Debug.Assert(fullPath != null); return PathHelpers.ShouldReviseDirectoryPathToCurrent(originalPath) ? "." : GetDirName(fullPath); } private static String GetDirName(String fullPath) { Debug.Assert(fullPath != null); return PathHelpers.IsRoot(fullPath) ? fullPath : Path.GetFileName(PathHelpers.TrimEndingDirectorySeparator(fullPath)); } } }
using System; using System.Collections.Generic; using System.Linq; using GitTfs.Core; using GitTfs.Core.TfsInterop; using GitTfs.Util; using Xunit; using Rhino.Mocks; using Rhino.Mocks.Constraints; namespace GitTfs.Test.Core { public class ChangeSieveTests : BaseTest { #region Base fixture // Sets up a ChangeSieve for testing. public abstract class BaseFixture { public BaseFixture() { // Make this remote act like it's mapped to $/Project Remote.Stub(r => r.GetPathInGitRepo(null)) .Constraints(Is.Anything()) .Do(new Function<string, string>(path => path != null && path.StartsWith("$/Project/") ? path.Replace("$/Project/", "") : null)); // Make this remote ignore any path that includes "ignored". Remote.Stub(r => r.ShouldSkip(null)) .Constraints(Is.Anything()) .Do(new Function<string, bool>(s => s.Contains("ignored"))); } private ChangeSieve _changeSieve; public ChangeSieve Subject { get { return _changeSieve ?? (_changeSieve = new ChangeSieve(Changeset, new PathResolver(Remote, "", InitialTree))); } } private Dictionary<string, GitObject> _initialTree; public virtual Dictionary<string, GitObject> InitialTree { get { return _initialTree ?? (_initialTree = new Dictionary<string, GitObject>(StringComparer.InvariantCultureIgnoreCase)); } } private FakeChangeset _changeset; public virtual FakeChangeset Changeset { get { return _changeset ?? (_changeset = new FakeChangeset()); } } public class FakeChangeset : IChangeset { public IVersionControlServer VersionControlServer { get; set; } public int ChangesetId { get; set; } public string Comment { get; set; } public DateTime CreationDate { get; set; } public string Committer { get; set; } public IChange[] Changes { get; set; } public void Get(ITfsWorkspace workspace, IEnumerable<IChange> changes, Action<Exception> ignorableErrorHandler) { throw new NotImplementedException(); } } private IGitTfsRemote _remote; public virtual IGitTfsRemote Remote { get { return _remote ?? (_remote = BuildRemote()); } } protected virtual IGitTfsRemote BuildRemote() { return _mocks.StrictMock<IGitTfsRemote>(); } protected MockRepository _mocks = new MockRepository(); public MockRepository Mocks { get { return _mocks; } } } // A base class for ChangeSieve test classes. public class Base<FixtureClass> : IDisposable where FixtureClass : BaseFixture, new() { protected readonly FixtureClass BaseFixture; protected ChangeSieve Subject { get { return BaseFixture.Subject; } } protected IChange[] Changes { get { return BaseFixture.Changeset.Changes; } } public Base() { BaseFixture = new FixtureClass(); BaseFixture.Mocks.ReplayAll(); } public void Dispose() { BaseFixture.Mocks.VerifyAll(); } protected void AssertChanges(IEnumerable<ApplicableChange> actualChanges, params ApplicableChange[] expectedChanges) { Assert.Equal(Stringify(expectedChanges), Stringify(actualChanges)); } private string Stringify(IEnumerable<ApplicableChange> changes) { return string.Join("\n", changes.Select(c => "" + c.Type + ":" + c.GitPath)); } } // A stub IChange implementation. private class FakeChange : IChange, IItem, IVersionControlServer { public static IChange Add(string serverItem, TfsChangeType additionalChange = 0, int deletionId = 0) { return new FakeChange(TfsChangeType.Add | additionalChange, TfsItemType.File, serverItem, deletionId); } public static IChange Edit(string serverItem, TfsChangeType additionalChange = 0) { return new FakeChange(TfsChangeType.Edit | additionalChange, TfsItemType.File, serverItem); } public static IChange Delete(string serverItem, TfsChangeType additionalChange = 0) { return new FakeChange(TfsChangeType.Delete | additionalChange, TfsItemType.File, serverItem); } public static IChange Rename(string serverItem, string from, TfsChangeType additionalChange = 0, int deletionId = 0) { return new FakeChange(TfsChangeType.Rename | additionalChange, TfsItemType.File, serverItem, deletionId, from); } public static IChange AddDir(string serverItem, TfsChangeType additionalChange = 0) { return new FakeChange(TfsChangeType.Add | additionalChange, TfsItemType.Folder, serverItem); } public static IChange Branch(string serverItem) { return new FakeChange(TfsChangeType.Branch, TfsItemType.File, serverItem); } public static IChange Merge(string serverItem) { return new FakeChange(TfsChangeType.Merge, TfsItemType.File, serverItem); } public static IChange MergeNewFile(string serverItem) { return new FakeChange(TfsChangeType.Merge | TfsChangeType.Branch, TfsItemType.File, serverItem); } public static IChange DeleteDir(string serverItem) { return new FakeChange(TfsChangeType.Delete, TfsItemType.Folder, serverItem); } private const int ChangesetId = 10; private readonly TfsChangeType _tfsChangeType; private readonly TfsItemType _tfsItemType; private readonly string _serverItem; private readonly int _deletionId; private readonly string _renamedFrom; private readonly int _itemId; private static int _maxItemId = 0; private FakeChange(TfsChangeType tfsChangeType, TfsItemType itemType, string serverItem, int deletionId = 0, string renamedFrom = null) { _tfsChangeType = tfsChangeType; _tfsItemType = itemType; _serverItem = serverItem; _deletionId = deletionId; _renamedFrom = renamedFrom; _itemId = ++_maxItemId; } TfsChangeType IChange.ChangeType { get { return _tfsChangeType; } } IItem IChange.Item { get { return this; } } IVersionControlServer IItem.VersionControlServer { get { return this; } } int IItem.ChangesetId { get { return ChangesetId; } } string IItem.ServerItem { get { return _serverItem; } } int IItem.DeletionId { get { return _deletionId; ; } } TfsItemType IItem.ItemType { get { return _tfsItemType; } } int IItem.ItemId { get { return _itemId; } } long IItem.ContentLength { get { throw new NotImplementedException(); } } TemporaryFile IItem.DownloadFile() { throw new NotImplementedException(); } IItem IVersionControlServer.GetItem(int itemId, int changesetNumber) { if (itemId == _itemId && changesetNumber == ChangesetId - 1 && _tfsChangeType.HasFlag(TfsChangeType.Rename)) return new PreviousItem(_renamedFrom); throw new NotImplementedException(); } private class PreviousItem : IItem { private readonly string _oldName; public PreviousItem(string oldName) { _oldName = oldName; } IVersionControlServer IItem.VersionControlServer { get { throw new NotImplementedException(); } } int IItem.ChangesetId { get { throw new NotImplementedException(); } } string IItem.ServerItem { get { return _oldName; } } int IItem.DeletionId { get { throw new NotImplementedException(); } } TfsItemType IItem.ItemType { get { throw new NotImplementedException(); } } int IItem.ItemId { get { throw new NotImplementedException(); } } long IItem.ContentLength { get { throw new NotImplementedException(); } } TemporaryFile IItem.DownloadFile() { throw new NotImplementedException(); } } IItem IVersionControlServer.GetItem(string itemPath, int changesetNumber) { throw new NotImplementedException(); } IItem[] IVersionControlServer.GetItems(string itemPath, int changesetNumber, TfsRecursionType recursionType) { throw new NotImplementedException(); } IEnumerable<IChangeset> IVersionControlServer.QueryHistory(string path, int version, int deletionId, TfsRecursionType recursion, string user, int versionFrom, int versionTo, int maxCount, bool includeChanges, bool slotMode, bool includeDownloadInfo) { throw new NotImplementedException(); } } #endregion public class WithNoChanges : Base<WithNoChanges.Fixture> { public class Fixture : BaseFixture { public Fixture() { Changeset.Changes = new IChange[0]; } } [Fact] public void HasEmptyChangesToFetch() { Assert.Empty(Subject.GetChangesToFetch()); } [Fact] public void HasEmptyChangesToApply() { AssertChanges(Subject.GetChangesToApply() /* expect an empty list */); } } public class WithAddsAndDeletes : Base<WithAddsAndDeletes.Fixture> { public class Fixture : BaseFixture { public Fixture() { Changeset.Changes = new IChange[] { /*0*/FakeChange.Add("$/Project/file1.txt"), /*1*/FakeChange.Delete("$/Project/file2.txt"), /*2*/FakeChange.Add("$/Project/file3.txt"), /*3*/FakeChange.Delete("$/Project/file4.txt"), /*4*/FakeChange.Rename("$/Project/file5.txt", from: "$/Project/oldfile5.txt"), }; } } [Fact] public void FetchesAllChanges() { var fetchChanges = Subject.GetChangesToFetch().ToArray(); Assert.Equal(5, fetchChanges.Length); Assert.Contains(Changes[0], fetchChanges); Assert.Contains(Changes[1], fetchChanges); Assert.Contains(Changes[2], fetchChanges); Assert.Contains(Changes[3], fetchChanges); Assert.Contains(Changes[4], fetchChanges); } [Fact] public void SplitsRenamesAndPutsDeletesFirst() { AssertChanges(Subject.GetChangesToApply(), ApplicableChange.Delete("file2.txt"), ApplicableChange.Delete("file4.txt"), ApplicableChange.Delete("oldfile5.txt"), ApplicableChange.Update("file1.txt"), ApplicableChange.Update("file3.txt"), ApplicableChange.Update("file5.txt")); } } public class WithIgnoredThings : Base<WithIgnoredThings.Fixture> { public class Fixture : BaseFixture { public Fixture() { Changeset.Changes = new IChange[] { FakeChange.Add("$/Project/0-ignored.txt"), FakeChange.Delete("$/Project/1-ignored.txt"), FakeChange.Add("$/Project/2-included.txt"), FakeChange.Delete("$/Project/3-included.txt"), FakeChange.Rename("$/Project/4-ignored.txt", from: "$/Project/4-wasignored.txt"), FakeChange.Rename("$/Project/5-ignored.txt", from: "$/Project/5-wasincluded.txt"), FakeChange.Rename("$/Project/6-included.txt", from: "$/Project/6-wasignored.txt"), }; } } [Fact] public void FetchesAllExceptIgnored() { var fetchChanges = Subject.GetChangesToFetch().ToArray(); Assert.Equal(3, fetchChanges.Length); Assert.Contains(Changes[2], fetchChanges); Assert.Contains(Changes[3], fetchChanges); Assert.Contains(Changes[6], fetchChanges); } [Fact] public void AppliesDeletesFirst() { AssertChanges(Subject.GetChangesToApply(), ApplicableChange.Delete("1-ignored.txt"), ApplicableChange.Delete("3-included.txt"), ApplicableChange.Delete("4-wasignored.txt"), ApplicableChange.Delete("5-wasincluded.txt"), ApplicableChange.Delete("6-wasignored.txt"), ApplicableChange.Update("2-included.txt"), ApplicableChange.Update("6-included.txt")); } } public class SkipDeletedThings : Base<SkipDeletedThings.Fixture> { public class Fixture : BaseFixture { public Fixture() { Changeset.Changes = new IChange[] { FakeChange.Rename("$/Project/file1.txt", from: "$/Project/oldfile1.txt", deletionId: 33), FakeChange.Add("$/Project/deletedfile1.txt", deletionId: 33), // this seems like nonsense. }; } } [Fact] public void DoesNotApplyDeletedRenamedFile() { AssertChanges(Subject.GetChangesToApply(), ApplicableChange.Delete("oldfile1.txt")); } } public class DirsAndPathsOutsideTheProject : Base<DirsAndPathsOutsideTheProject.Fixture> { public class Fixture : BaseFixture { public Fixture() { Changeset.Changes = new IChange[] { FakeChange.AddDir("$/Project/dir1"), FakeChange.Add("$/Project2/outsidefile.txt"), FakeChange.Rename("$/Project2/movedoutside.txt", from: "$/Project/startedinside.txt"), FakeChange.Rename("$/Project/movedinside.txt", from: "$/Project2/startedoutside.txt"), }; } } [Fact] public void DoesNotFetchFilesOutside() { Assert.Equal(new string[] { "$/Project/dir1", "$/Project/movedinside.txt" }, Subject.GetChangesToFetch().Select(c => c.Item.ServerItem)); } [Fact] public void OnlyAppliesChangesInsideTheProject() { AssertChanges(Subject.GetChangesToApply(), ApplicableChange.Delete("startedinside.txt"), ApplicableChange.Update("movedinside.txt")); } } public class WithExistingItems : Base<WithExistingItems.Fixture> { public class Fixture : BaseFixture { public Fixture() { InitialTree.Add("dir1", new GitObject { Path = "dir1" }); InitialTree.Add("dir1/file1.exe", new GitObject { Path = "dir1/file1.exe", Mode = "100755".ToFileMode() }); InitialTree.Add("dir1/file4.txt", new GitObject { Path = "dir1/file4.txt", Mode = "100644".ToFileMode() }); InitialTree.Add("dir2", new GitObject { Path = "dir2" }); InitialTree.Add("dir2/file2.txt", new GitObject { Path = "dir2/file2.txt" }); Changeset.Changes = new IChange[] { FakeChange.Add("$/Project/DIR2/file3.txt"), FakeChange.Delete("$/Project/DIR2/FILE2.txt"), FakeChange.Edit("$/Project/dir1/file1.exe"), FakeChange.Edit("$/Project/dir1/file4.txt"), }; } } [Fact] public void UpdatesPathCasing() { AssertChanges(Subject.GetChangesToApply(), ApplicableChange.Delete("dir2/file2.txt"), ApplicableChange.Update("dir2/file3.txt"), ApplicableChange.Update("dir1/file1.exe"), ApplicableChange.Update("dir1/file4.txt")); } [Fact] public void PreservesFileMode() { var toApply = Subject.GetChangesToApply().ToArray(); Assert.Equal("100644", toApply[1].Mode.ToModeString()); // new file Assert.Equal("100755", toApply[2].Mode.ToModeString()); // existing executable file Assert.Equal("100644", toApply[3].Mode.ToModeString()); // existing normal file } } public class SkipBranchedThings : Base<SkipBranchedThings.Fixture> { public class Fixture : BaseFixture { public Fixture() { InitialTree.Add("file6.txt", new GitObject() { Commit = "SHA" }); Changeset.Changes = new[] { /*0*/FakeChange.Branch("$/Project/file6.txt"), // Do not include, because it was there before. /*1*/FakeChange.Branch("$/Project/file7.txt"), // Include, because it was not there before. /*2*/FakeChange.Edit("$/Project/file8.txt", TfsChangeType.Branch), // Include, because it's not just branched. /*3*/FakeChange.Rename("$/Project/file9.txt", from: "$/Project/oldfile9.txt", additionalChange: TfsChangeType.Branch), // Include, because it's not just branched. }; } } [Fact] public void DoesNotFetchBranchedFile() { var fetchChanges = Subject.GetChangesToFetch().ToArray(); Assert.Equal(3, fetchChanges.Length); // one is missing // Changes[0] (branch of file6.txt) is missing Assert.Contains(Changes[1], fetchChanges); Assert.Contains(Changes[2], fetchChanges); Assert.Contains(Changes[3], fetchChanges); } [Fact] public void DoesNotApplyBranchedFile() { AssertChanges(Subject.GetChangesToApply(), ApplicableChange.Delete("oldfile9.txt"), ApplicableChange.Update("file7.txt"), ApplicableChange.Update("file8.txt"), ApplicableChange.Update("file9.txt")); } } public class SkipMergedThings : Base<SkipMergedThings.Fixture> { public class Fixture : BaseFixture { public Fixture() { InitialTree.Add("file6.txt", new GitObject() { Commit = "SHA" }); InitialTree.Add("file7.txt", new GitObject() { Commit = "SHA" }); Changeset.Changes = new[] { /*0*/FakeChange.Merge("$/Project/file6.txt"), // Do not include, because it was there before. /*1*/FakeChange.MergeNewFile("$/Project/file7.txt"), // Do not include, because it was in branch before. /*2*/FakeChange.Merge("$/Project/file8.txt"), // Include, because it was not there before. /*3*/FakeChange.Edit("$/Project/file9.txt", TfsChangeType.Merge), // Include, because it's not just branched. /*4*/FakeChange.Rename("$/Project/file10.txt", from: "$/Project/oldfile10.txt", additionalChange: TfsChangeType.Merge), // Include, because it's not just branched. }; } } [Fact] public void DoesNotFetchBranchedFile() { var fetchChanges = Subject.GetChangesToFetch().ToArray(); Assert.Equal(3, fetchChanges.Length); // one is missing // Changes[0] (branch of file6.txt) is missing // Changes[1] (branch of file7.txt) is missing Assert.Contains(Changes[2], fetchChanges); Assert.Contains(Changes[3], fetchChanges); Assert.Contains(Changes[4], fetchChanges); } [Fact] public void DoesNotApplyBranchedFile() { AssertChanges(Subject.GetChangesToApply(), ApplicableChange.Delete("oldfile10.txt"), ApplicableChange.Update("file8.txt"), ApplicableChange.Update("file9.txt"), ApplicableChange.Update("file10.txt")); } } public class WithDeleteMainFolderBranchAndSubItems : Base<WithDeleteMainFolderBranchAndSubItems.Fixture> { public class Fixture : BaseFixture { public Fixture() { Changeset.Changes = new IChange[] { FakeChange.Delete("$/Project/file1.txt"), FakeChange.Delete("$/Project/file2.txt"), FakeChange.Delete("$/Project/file3.txt"), FakeChange.DeleteDir("$/Project/"), FakeChange.Delete("$/Project/file4.txt"), FakeChange.Delete("$/Project/file5.txt"), }; } } [Fact] public void WhenMainBranchFolderIsDeleted_ThenKeepFileInGitCommitByDoingNothing() { Assert.Empty(Subject.GetChangesToApply()); } [Fact] public void DoNotFetch() { // Because we're not going to apply changes, don't waste time fetching any. Assert.Empty(Subject.GetChangesToFetch()); } } public class WithDeleteOtherFolder : Base<WithDeleteOtherFolder.Fixture> { public class Fixture : BaseFixture { public Fixture() { Changeset.Changes = new IChange[] { FakeChange.Edit("$/Project/file1.txt"), FakeChange.DeleteDir("$/Projec"), FakeChange.Delete("$/Projec/file.txt"), FakeChange.DeleteDir("$/Project2"), FakeChange.Delete("$/Project2/file.txt"), }; } } [Fact] public void IncludesChangesInThisProject() { AssertChanges(Subject.GetChangesToApply(), ApplicableChange.Update("file1.txt")); } [Fact] public void FetchesChangesInThisProject() { Assert.Equal(new string[] { "$/Project/file1.txt" }, Subject.GetChangesToFetch().Select(c => c.Item.ServerItem)); } } public class RenamedFromDeleted : Base<RenamedFromDeleted.Fixture> { public class Fixture : BaseFixture { public Fixture() { Changeset.Changes = new IChange[] { new RenamedFromDeletedChange("$/Project/file1.txt"), }; } } [Fact] public void FetchesItemRenamedAfterDelete() { AssertChanges(Subject.GetChangesToApply(), ApplicableChange.Update("file1.txt")); } // A Change/Item that only throws an exception when you try to query its history. public class RenamedFromDeletedChange : IChange, IItem, IVersionControlServer { // This is the interesting part of this implementation. // The TFS client throws an exception of type Microsoft.TeamFoundation.VersionControl.Client.ItemNotFoundException. // ChangeSieve doesn't have a reference to the TFS client libs, so it can't catch that exact exception. // This class, therefore, throws an exception that is of a type that ChangeSieve can't specifically catch. IEnumerable<IChangeset> IVersionControlServer.QueryHistory(string path, int version, int deletionId, TfsRecursionType recursion, string user, int versionFrom, int versionTo, int maxCount, bool includeChanges, bool slotMode, bool includeDownloadInfo) { throw new AnExceptionTypeThatYouCannotCatch(); } private class AnExceptionTypeThatYouCannotCatch : Exception { } // The rest of the implementation is pretty straight-forward. private readonly string _serverItem; // Accept a name so that the name is more obviously matched between the Fixture // and the assertion. public RenamedFromDeletedChange(string serverItem) { _serverItem = serverItem; } TfsChangeType IChange.ChangeType { get { return TfsChangeType.Rename; } } IItem IChange.Item { get { return this; } } IVersionControlServer IItem.VersionControlServer { get { return this; } } int IItem.ChangesetId { get { return 100; } } string IItem.ServerItem { get { return _serverItem; } } int IItem.DeletionId { get { return 0; } } TfsItemType IItem.ItemType { get { return TfsItemType.File; } } int IItem.ItemId { get { return 200; } } long IItem.ContentLength { get { return 0; } } TemporaryFile IItem.DownloadFile() { return null; } IItem IVersionControlServer.GetItem(int itemId, int changesetNumber) { return null; } IItem IVersionControlServer.GetItem(string itemPath, int changesetNumber) { return null; } IItem[] IVersionControlServer.GetItems(string itemPath, int changesetNumber, TfsRecursionType recursionType) { return null; } } } } }
using J2N.Text; using Lucene.Net.Analysis.TokenAttributes; using Lucene.Net.Diagnostics; using NUnit.Framework; using System; using System.Collections.Generic; using System.IO; using System.Text; using Assert = Lucene.Net.TestFramework.Assert; using Console = Lucene.Net.Util.SystemConsole; namespace Lucene.Net.Analysis { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using Automaton = Lucene.Net.Util.Automaton.Automaton; using BasicAutomata = Lucene.Net.Util.Automaton.BasicAutomata; using BasicOperations = Lucene.Net.Util.Automaton.BasicOperations; [TestFixture] public class TestGraphTokenizers : BaseTokenStreamTestCase { // Makes a graph TokenStream from the string; separate // positions with single space, multiple tokens at the same // position with /, and add optional position length with // :. EG "a b c" is a simple chain, "a/x b c" adds 'x' // over 'a' at position 0 with posLen=1, "a/x:3 b c" adds // 'x' over a with posLen=3. Tokens are in normal-form! // So, offsets are computed based on the first token at a // given position. NOTE: each token must be a single // character! We assume this when computing offsets... // NOTE: all input tokens must be length 1!!! this means // you cannot turn on MockCharFilter when random // testing... private class GraphTokenizer : Tokenizer { internal IList<Token> tokens; internal int upto; internal int inputLength; internal readonly ICharTermAttribute termAtt; internal readonly IOffsetAttribute offsetAtt; internal readonly IPositionIncrementAttribute posIncrAtt; internal readonly IPositionLengthAttribute posLengthAtt; public GraphTokenizer(TextReader input) : base(input) { termAtt = AddAttribute<ICharTermAttribute>(); offsetAtt = AddAttribute<IOffsetAttribute>(); posIncrAtt = AddAttribute<IPositionIncrementAttribute>(); posLengthAtt = AddAttribute<IPositionLengthAttribute>(); } public override void Reset() { base.Reset(); tokens = null; upto = 0; } public sealed override bool IncrementToken() { if (tokens == null) { FillTokens(); } //System.out.println("graphTokenizer: incr upto=" + upto + " vs " + tokens.size()); if (upto == tokens.Count) { //System.out.println(" END @ " + tokens.size()); return false; } Token t = tokens[upto++]; //System.out.println(" return token=" + t); ClearAttributes(); termAtt.Append(t.ToString()); offsetAtt.SetOffset(t.StartOffset, t.EndOffset); posIncrAtt.PositionIncrement = t.PositionIncrement; posLengthAtt.PositionLength = t.PositionLength; return true; } public override void End() { base.End(); // NOTE: somewhat... hackish, but we need this to // satisfy BTSTC: int lastOffset; if (tokens != null && tokens.Count > 0) { lastOffset = tokens[tokens.Count - 1].EndOffset; } else { lastOffset = 0; } offsetAtt.SetOffset(CorrectOffset(lastOffset), CorrectOffset(inputLength)); } internal virtual void FillTokens() { StringBuilder sb = new StringBuilder(); char[] buffer = new char[256]; while (true) { int count = m_input.Read(buffer, 0, buffer.Length); //.NET TextReader.Read(buff, int, int) returns 0, not -1 on no chars // but in some cases, such as MockCharFilter, it overloads read and returns -1 // so we should handle both 0 and -1 values if (count <= 0) { break; } sb.Append(buffer, 0, count); //System.out.println("got count=" + count); } //System.out.println("fillTokens: " + sb); inputLength = sb.Length; string[] parts = sb.ToString().Split(' ').TrimEnd(); tokens = new List<Token>(); int pos = 0; int maxPos = -1; int offset = 0; //System.out.println("again"); foreach (string part in parts) { string[] overlapped = part.Split('/').TrimEnd(); bool firstAtPos = true; int minPosLength = int.MaxValue; foreach (string part2 in overlapped) { int colonIndex = part2.IndexOf(':'); string token; int posLength; if (colonIndex != -1) { token = part2.Substring(0, colonIndex); posLength = Convert.ToInt32(part2.Substring(1 + colonIndex)); } else { token = part2; posLength = 1; } maxPos = Math.Max(maxPos, pos + posLength); minPosLength = Math.Min(minPosLength, posLength); Token t = new Token(token, offset, offset + 2 * posLength - 1); t.PositionLength = posLength; t.PositionIncrement = firstAtPos ? 1 : 0; firstAtPos = false; //System.out.println(" add token=" + t + " startOff=" + t.StartOffset + " endOff=" + t.EndOffset); tokens.Add(t); } pos += minPosLength; offset = 2 * pos; } if (Debugging.AssertsEnabled) Debugging.Assert(maxPos <= pos, "input string mal-formed: posLength>1 tokens hang over the end"); } } [Test] public virtual void TestMockGraphTokenFilterBasic() { for (int iter = 0; iter < 10 * RandomMultiplier; iter++) { if (Verbose) { Console.WriteLine("\nTEST: iter=" + iter); } // Make new analyzer each time, because MGTF has fixed // seed: Analyzer a = Analyzer.NewAnonymous(createComponents: (fieldName, reader) => { Tokenizer t = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false); TokenStream t2 = new MockGraphTokenFilter(Random, t); return new TokenStreamComponents(t, t2); }); CheckAnalysisConsistency(Random, a, false, "a b c d e f g h i j k"); } } [Test] public virtual void TestMockGraphTokenFilterOnGraphInput() { for (int iter = 0; iter < 100 * RandomMultiplier; iter++) { if (Verbose) { Console.WriteLine("\nTEST: iter=" + iter); } // Make new analyzer each time, because MGTF has fixed // seed: Analyzer a = Analyzer.NewAnonymous(createComponents: (fieldName, reader) => { Tokenizer t = new GraphTokenizer(reader); TokenStream t2 = new MockGraphTokenFilter(Random, t); return new TokenStreamComponents(t, t2); }); CheckAnalysisConsistency(Random, a, false, "a/x:3 c/y:2 d e f/z:4 g h i j k"); } } // Just deletes (leaving hole) token 'a': private sealed class RemoveATokens : TokenFilter { internal int pendingPosInc; internal readonly ICharTermAttribute termAtt; internal readonly IPositionIncrementAttribute posIncAtt; public RemoveATokens(TokenStream @in) : base(@in) { termAtt = AddAttribute<ICharTermAttribute>(); posIncAtt = AddAttribute<IPositionIncrementAttribute>(); } public override void Reset() { base.Reset(); pendingPosInc = 0; } public override void End() { base.End(); posIncAtt.PositionIncrement = pendingPosInc + posIncAtt.PositionIncrement; } public override bool IncrementToken() { while (true) { bool gotOne = m_input.IncrementToken(); if (!gotOne) { return false; } else if (termAtt.ToString().Equals("a", StringComparison.Ordinal)) { pendingPosInc += posIncAtt.PositionIncrement; } else { posIncAtt.PositionIncrement = pendingPosInc + posIncAtt.PositionIncrement; pendingPosInc = 0; return true; } } } } [Test] public virtual void TestMockGraphTokenFilterBeforeHoles() { for (int iter = 0; iter < 100 * RandomMultiplier; iter++) { if (Verbose) { Console.WriteLine("\nTEST: iter=" + iter); } // Make new analyzer each time, because MGTF has fixed // seed: Analyzer a = Analyzer.NewAnonymous(createComponents: (fieldName, reader) => { Tokenizer t = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false); TokenStream t2 = new MockGraphTokenFilter(Random, t); TokenStream t3 = new RemoveATokens(t2); return new TokenStreamComponents(t, t3); }); Random random = Random; CheckAnalysisConsistency(random, a, false, "a b c d e f g h i j k"); CheckAnalysisConsistency(random, a, false, "x y a b c d e f g h i j k"); CheckAnalysisConsistency(random, a, false, "a b c d e f g h i j k a"); CheckAnalysisConsistency(random, a, false, "a b c d e f g h i j k a x y"); } } [Test] public virtual void TestMockGraphTokenFilterAfterHoles() { for (int iter = 0; iter < 100 * RandomMultiplier; iter++) { if (Verbose) { Console.WriteLine("\nTEST: iter=" + iter); } // Make new analyzer each time, because MGTF has fixed // seed: Analyzer a = Analyzer.NewAnonymous(createComponents: (fieldName, reader) => { Tokenizer t = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false); TokenStream t2 = new RemoveATokens(t); TokenStream t3 = new MockGraphTokenFilter(Random, t2); return new TokenStreamComponents(t, t3); }); Random random = Random; CheckAnalysisConsistency(random, a, false, "a b c d e f g h i j k"); CheckAnalysisConsistency(random, a, false, "x y a b c d e f g h i j k"); CheckAnalysisConsistency(random, a, false, "a b c d e f g h i j k a"); CheckAnalysisConsistency(random, a, false, "a b c d e f g h i j k a x y"); } } [Test] public virtual void TestMockGraphTokenFilterRandom() { for (int iter = 0; iter < 10 * RandomMultiplier; iter++) { if (Verbose) { Console.WriteLine("\nTEST: iter=" + iter); } // Make new analyzer each time, because MGTF has fixed // seed: Analyzer a = Analyzer.NewAnonymous(createComponents: (fieldName, reader) => { Tokenizer t = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false); TokenStream t2 = new MockGraphTokenFilter(Random, t); return new TokenStreamComponents(t, t2); }); Random random = Random; CheckRandomData(random, a, 5, AtLeast(100)); } } // Two MockGraphTokenFilters [Test] public virtual void TestDoubleMockGraphTokenFilterRandom() { for (int iter = 0; iter < 10 * RandomMultiplier; iter++) { if (Verbose) { Console.WriteLine("\nTEST: iter=" + iter); } // Make new analyzer each time, because MGTF has fixed // seed: Analyzer a = Analyzer.NewAnonymous(createComponents: (fieldName, reader) => { Tokenizer t = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false); TokenStream t1 = new MockGraphTokenFilter(Random, t); TokenStream t2 = new MockGraphTokenFilter(Random, t1); return new TokenStreamComponents(t, t2); }); Random random = Random; CheckRandomData(random, a, 5, AtLeast(100)); } } [Test] public void TestMockTokenizerCtor() { var sr = new StringReader("Hello"); var mt = new MockTokenizer(sr); } [Test] public virtual void TestMockGraphTokenFilterBeforeHolesRandom() { for (int iter = 0; iter < 10 * RandomMultiplier; iter++) { if (Verbose) { Console.WriteLine("\nTEST: iter=" + iter); } // Make new analyzer each time, because MGTF has fixed // seed: Analyzer a = Analyzer.NewAnonymous(createComponents: (fieldName, reader) => { Tokenizer t = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false); TokenStream t1 = new MockGraphTokenFilter(Random, t); TokenStream t2 = new MockHoleInjectingTokenFilter(Random, t1); return new TokenStreamComponents(t, t2); }); Random random = Random; CheckRandomData(random, a, 5, AtLeast(100)); } } [Test] public virtual void TestMockGraphTokenFilterAfterHolesRandom() { for (int iter = 0; iter < 10 * RandomMultiplier; iter++) { if (Verbose) { Console.WriteLine("\nTEST: iter=" + iter); } // Make new analyzer each time, because MGTF has fixed // seed: Analyzer a = Analyzer.NewAnonymous(createComponents: (fieldName, reader) => { Tokenizer t = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false); TokenStream t1 = new MockHoleInjectingTokenFilter(Random, t); TokenStream t2 = new MockGraphTokenFilter(Random, t1); return new TokenStreamComponents(t, t2); }); Random random = Random; CheckRandomData(random, a, 5, AtLeast(100)); } } private static Token Token(string term, int posInc, int posLength) { Token t = new Token(term, 0, 0); t.PositionIncrement = posInc; t.PositionLength = posLength; return t; } private static Token Token(string term, int posInc, int posLength, int startOffset, int endOffset) { Token t = new Token(term, startOffset, endOffset); t.PositionIncrement = posInc; t.PositionLength = posLength; return t; } [Test] public virtual void TestSingleToken() { TokenStream ts = new CannedTokenStream(new Token[] { Token("abc", 1, 1) }); Automaton actual = (new TokenStreamToAutomaton()).ToAutomaton(ts); Automaton expected = BasicAutomata.MakeString("abc"); Assert.IsTrue(BasicOperations.SameLanguage(expected, actual)); } [Test] public virtual void TestMultipleHoles() { TokenStream ts = new CannedTokenStream(new Token[] { Token("a", 1, 1), Token("b", 3, 1) }); Automaton actual = (new TokenStreamToAutomaton()).ToAutomaton(ts); Automaton expected = Join(S2a("a"), SEP_A, HOLE_A, SEP_A, HOLE_A, SEP_A, S2a("b")); Assert.IsTrue(BasicOperations.SameLanguage(expected, actual)); } [Test] public virtual void TestSynOverMultipleHoles() { TokenStream ts = new CannedTokenStream(new Token[] { Token("a", 1, 1), Token("x", 0, 3), Token("b", 3, 1) }); Automaton actual = (new TokenStreamToAutomaton()).ToAutomaton(ts); Automaton a1 = Join(S2a("a"), SEP_A, HOLE_A, SEP_A, HOLE_A, SEP_A, S2a("b")); Automaton a2 = Join(S2a("x"), SEP_A, S2a("b")); Automaton expected = BasicOperations.Union(a1, a2); Assert.IsTrue(BasicOperations.SameLanguage(expected, actual)); } // for debugging! /* private static void toDot(Automaton a) throws IOException { final String s = a.toDot(); Writer w = new OutputStreamWriter(new FileOutputStream("/x/tmp/out.dot")); w.write(s); w.close(); System.out.println("TEST: saved to /x/tmp/out.dot"); } */ private static readonly Automaton SEP_A = BasicAutomata.MakeChar(TokenStreamToAutomaton.POS_SEP); private static readonly Automaton HOLE_A = BasicAutomata.MakeChar(TokenStreamToAutomaton.HOLE); private Automaton Join(params string[] strings) { IList<Automaton> @as = new List<Automaton>(); foreach (string s in strings) { @as.Add(BasicAutomata.MakeString(s)); @as.Add(SEP_A); } @as.RemoveAt(@as.Count - 1); return BasicOperations.Concatenate(@as); } private Automaton Join(params Automaton[] @as) { return BasicOperations.Concatenate(@as); } private Automaton S2a(string s) { return BasicAutomata.MakeString(s); } [Test] public virtual void TestTwoTokens() { TokenStream ts = new CannedTokenStream(new Token[] { Token("abc", 1, 1), Token("def", 1, 1) }); Automaton actual = (new TokenStreamToAutomaton()).ToAutomaton(ts); Automaton expected = Join("abc", "def"); //toDot(actual); Assert.IsTrue(BasicOperations.SameLanguage(expected, actual)); } [Test] public virtual void TestHole() { TokenStream ts = new CannedTokenStream(new Token[] { Token("abc", 1, 1), Token("def", 2, 1) }); Automaton actual = (new TokenStreamToAutomaton()).ToAutomaton(ts); Automaton expected = Join(S2a("abc"), SEP_A, HOLE_A, SEP_A, S2a("def")); //toDot(actual); Assert.IsTrue(BasicOperations.SameLanguage(expected, actual)); } [Test] public virtual void TestOverlappedTokensSausage() { // Two tokens on top of each other (sausage): TokenStream ts = new CannedTokenStream(new Token[] { Token("abc", 1, 1), Token("xyz", 0, 1) }); Automaton actual = (new TokenStreamToAutomaton()).ToAutomaton(ts); Automaton a1 = BasicAutomata.MakeString("abc"); Automaton a2 = BasicAutomata.MakeString("xyz"); Automaton expected = BasicOperations.Union(a1, a2); Assert.IsTrue(BasicOperations.SameLanguage(expected, actual)); } [Test] public virtual void TestOverlappedTokensLattice() { TokenStream ts = new CannedTokenStream(new Token[] { Token("abc", 1, 1), Token("xyz", 0, 2), Token("def", 1, 1) }); Automaton actual = (new TokenStreamToAutomaton()).ToAutomaton(ts); Automaton a1 = BasicAutomata.MakeString("xyz"); Automaton a2 = Join("abc", "def"); Automaton expected = BasicOperations.Union(a1, a2); //toDot(actual); Assert.IsTrue(BasicOperations.SameLanguage(expected, actual)); } [Test] public virtual void TestSynOverHole() { TokenStream ts = new CannedTokenStream(new Token[] { Token("a", 1, 1), Token("X", 0, 2), Token("b", 2, 1) }); Automaton actual = (new TokenStreamToAutomaton()).ToAutomaton(ts); Automaton a1 = BasicOperations.Union(Join(S2a("a"), SEP_A, HOLE_A), BasicAutomata.MakeString("X")); Automaton expected = BasicOperations.Concatenate(a1, Join(SEP_A, S2a("b"))); //toDot(actual); Assert.IsTrue(BasicOperations.SameLanguage(expected, actual)); } [Test] public virtual void TestSynOverHole2() { TokenStream ts = new CannedTokenStream(new Token[] { Token("xyz", 1, 1), Token("abc", 0, 3), Token("def", 2, 1) }); Automaton actual = (new TokenStreamToAutomaton()).ToAutomaton(ts); Automaton expected = BasicOperations.Union(Join(S2a("xyz"), SEP_A, HOLE_A, SEP_A, S2a("def")), BasicAutomata.MakeString("abc")); Assert.IsTrue(BasicOperations.SameLanguage(expected, actual)); } [Test] public virtual void TestOverlappedTokensLattice2() { TokenStream ts = new CannedTokenStream(new Token[] { Token("abc", 1, 1), Token("xyz", 0, 3), Token("def", 1, 1), Token("ghi", 1, 1) }); Automaton actual = (new TokenStreamToAutomaton()).ToAutomaton(ts); Automaton a1 = BasicAutomata.MakeString("xyz"); Automaton a2 = Join("abc", "def", "ghi"); Automaton expected = BasicOperations.Union(a1, a2); //toDot(actual); Assert.IsTrue(BasicOperations.SameLanguage(expected, actual)); } [Test] public virtual void TestToDot() { TokenStream ts = new CannedTokenStream(new Token[] { Token("abc", 1, 1, 0, 4) }); StringWriter w = new StringWriter(); (new TokenStreamToDot("abcd", ts, (TextWriter)(w))).ToDot(); Assert.IsTrue(w.ToString().IndexOf("abc / abcd", StringComparison.Ordinal) != -1); } [Test] public virtual void TestStartsWithHole() { TokenStream ts = new CannedTokenStream(new Token[] { Token("abc", 2, 1) }); Automaton actual = (new TokenStreamToAutomaton()).ToAutomaton(ts); Automaton expected = Join(HOLE_A, SEP_A, S2a("abc")); //toDot(actual); Assert.IsTrue(BasicOperations.SameLanguage(expected, actual)); } // TODO: testEndsWithHole... but we need posInc to set in TS.end() [Test] public virtual void TestSynHangingOverEnd() { TokenStream ts = new CannedTokenStream(new Token[] { Token("a", 1, 1), Token("X", 0, 10) }); Automaton actual = (new TokenStreamToAutomaton()).ToAutomaton(ts); Automaton expected = BasicOperations.Union(BasicAutomata.MakeString("a"), BasicAutomata.MakeString("X")); Assert.IsTrue(BasicOperations.SameLanguage(expected, actual)); } } }
using System; using System.Threading; namespace Amib.Threading { #region Delegates /// <summary> /// A delegate that represents the method to run as the work item /// </summary> /// <param name="state">A state object for the method to run</param> public delegate object WorkItemCallback(object state); /// <summary> /// A delegate to call after the WorkItemCallback completed /// </summary> /// <param name="wir">The work item result object</param> public delegate void PostExecuteWorkItemCallback(IWorkItemResult wir); /// <summary> /// A delegate to call after the WorkItemCallback completed /// </summary> /// <param name="wir">The work item result object</param> public delegate void PostExecuteWorkItemCallback<TResult>(IWorkItemResult<TResult> wir); /// <summary> /// A delegate to call when a WorkItemsGroup becomes idle /// </summary> /// <param name="workItemsGroup">A reference to the WorkItemsGroup that became idle</param> public delegate void WorkItemsGroupIdleHandler(IWorkItemsGroup workItemsGroup); /// <summary> /// A delegate to call after a thread is created, but before /// it's first use. /// </summary> public delegate void ThreadInitializationHandler(); /// <summary> /// A delegate to call when a thread is about to exit, after /// it is no longer belong to the pool. /// </summary> public delegate void ThreadTerminationHandler(); #endregion #region WorkItem Priority /// <summary> /// Defines the availeable priorities of a work item. /// The higher the priority a work item has, the sooner /// it will be executed. /// </summary> public enum WorkItemPriority { Lowest, BelowNormal, Normal, AboveNormal, Highest, } #endregion #region IWorkItemsGroup interface /// <summary> /// IWorkItemsGroup interface /// Created by SmartThreadPool.CreateWorkItemsGroup() /// </summary> public interface IWorkItemsGroup { /// <summary> /// Get/Set the name of the WorkItemsGroup /// </summary> string Name { get; set; } /// <summary> /// Get/Set the maximum number of workitem that execute cocurrency on the thread pool /// </summary> int Concurrency { get; set; } /// <summary> /// Get the number of work items waiting in the queue. /// </summary> int WaitingCallbacks { get; } /// <summary> /// Get an array with all the state objects of the currently running items. /// The array represents a snap shot and impact performance. /// </summary> object[] GetStates(); /// <summary> /// Get the WorkItemsGroup start information /// </summary> WIGStartInfo WIGStartInfo { get; } /// <summary> /// Starts to execute work items /// </summary> void Start(); /// <summary> /// Cancel all the work items. /// Same as Cancel(false) /// </summary> void Cancel(); /// <summary> /// Cancel all work items using thread abortion /// </summary> /// <param name="abortExecution">True to stop work items by raising ThreadAbortException</param> void Cancel(bool abortExecution); /// <summary> /// Wait for all work item to complete. /// </summary> void WaitForIdle(); /// <summary> /// Wait for all work item to complete, until timeout expired /// </summary> /// <param name="timeout">How long to wait for the work items to complete</param> /// <returns>Returns true if work items completed within the timeout, otherwise false.</returns> bool WaitForIdle(TimeSpan timeout); /// <summary> /// Wait for all work item to complete, until timeout expired /// </summary> /// <param name="millisecondsTimeout">How long to wait for the work items to complete in milliseconds</param> /// <returns>Returns true if work items completed within the timeout, otherwise false.</returns> bool WaitForIdle(int millisecondsTimeout); /// <summary> /// IsIdle is true when there are no work items running or queued. /// </summary> bool IsIdle { get; } /// <summary> /// This event is fired when all work items are completed. /// (When IsIdle changes to true) /// This event only work on WorkItemsGroup. On SmartThreadPool /// it throws the NotImplementedException. /// </summary> event WorkItemsGroupIdleHandler OnIdle; #region QueueWorkItem /// <summary> /// Queue a work item /// </summary> /// <param name="callback">A callback to execute</param> /// <returns>Returns a work item result</returns> IWorkItemResult QueueWorkItem(WorkItemCallback callback); /// <summary> /// Queue a work item /// </summary> /// <param name="callback">A callback to execute</param> /// <param name="workItemPriority">The priority of the work item</param> /// <returns>Returns a work item result</returns> IWorkItemResult QueueWorkItem(WorkItemCallback callback, WorkItemPriority workItemPriority); /// <summary> /// Queue a work item /// </summary> /// <param name="callback">A callback to execute</param> /// <param name="state"> /// The context object of the work item. Used for passing arguments to the work item. /// </param> /// <returns>Returns a work item result</returns> IWorkItemResult QueueWorkItem(WorkItemCallback callback, object state); /// <summary> /// Queue a work item /// </summary> /// <param name="callback">A callback to execute</param> /// <param name="state"> /// The context object of the work item. Used for passing arguments to the work item. /// </param> /// <param name="workItemPriority">The work item priority</param> /// <returns>Returns a work item result</returns> IWorkItemResult QueueWorkItem(WorkItemCallback callback, object state, WorkItemPriority workItemPriority); /// <summary> /// Queue a work item /// </summary> /// <param name="callback">A callback to execute</param> /// <param name="state"> /// The context object of the work item. Used for passing arguments to the work item. /// </param> /// <param name="postExecuteWorkItemCallback"> /// A delegate to call after the callback completion /// </param> /// <returns>Returns a work item result</returns> IWorkItemResult QueueWorkItem(WorkItemCallback callback, object state, PostExecuteWorkItemCallback postExecuteWorkItemCallback); /// <summary> /// Queue a work item /// </summary> /// <param name="callback">A callback to execute</param> /// <param name="state"> /// The context object of the work item. Used for passing arguments to the work item. /// </param> /// <param name="postExecuteWorkItemCallback"> /// A delegate to call after the callback completion /// </param> /// <param name="workItemPriority">The work item priority</param> /// <returns>Returns a work item result</returns> IWorkItemResult QueueWorkItem(WorkItemCallback callback, object state, PostExecuteWorkItemCallback postExecuteWorkItemCallback, WorkItemPriority workItemPriority); /// <summary> /// Queue a work item /// </summary> /// <param name="callback">A callback to execute</param> /// <param name="state"> /// The context object of the work item. Used for passing arguments to the work item. /// </param> /// <param name="postExecuteWorkItemCallback"> /// A delegate to call after the callback completion /// </param> /// <param name="callToPostExecute">Indicates on which cases to call to the post execute callback</param> /// <returns>Returns a work item result</returns> IWorkItemResult QueueWorkItem(WorkItemCallback callback, object state, PostExecuteWorkItemCallback postExecuteWorkItemCallback, CallToPostExecute callToPostExecute); /// <summary> /// Queue a work item /// </summary> /// <param name="callback">A callback to execute</param> /// <param name="state"> /// The context object of the work item. Used for passing arguments to the work item. /// </param> /// <param name="postExecuteWorkItemCallback"> /// A delegate to call after the callback completion /// </param> /// <param name="callToPostExecute">Indicates on which cases to call to the post execute callback</param> /// <param name="workItemPriority">The work item priority</param> /// <returns>Returns a work item result</returns> IWorkItemResult QueueWorkItem(WorkItemCallback callback, object state, PostExecuteWorkItemCallback postExecuteWorkItemCallback, CallToPostExecute callToPostExecute, WorkItemPriority workItemPriority); /// <summary> /// Queue a work item /// </summary> /// <param name="workItemInfo">Work item info</param> /// <param name="callback">A callback to execute</param> /// <returns>Returns a work item result</returns> IWorkItemResult QueueWorkItem(WorkItemInfo workItemInfo, WorkItemCallback callback); /// <summary> /// Queue a work item /// </summary> /// <param name="workItemInfo">Work item information</param> /// <param name="callback">A callback to execute</param> /// <param name="state"> /// The context object of the work item. Used for passing arguments to the work item. /// </param> /// <returns>Returns a work item result</returns> IWorkItemResult QueueWorkItem(WorkItemInfo workItemInfo, WorkItemCallback callback, object state); #endregion #region QueueWorkItem(Action<...>) /// <summary> /// Queue a work item. /// </summary> /// <returns>Returns a IWorkItemResult object, but its GetResult() will always return null</returns> IWorkItemResult QueueWorkItem(Action action); /// <summary> /// Queue a work item. /// </summary> /// <returns>Returns a IWorkItemResult object, but its GetResult() will always return null</returns> IWorkItemResult QueueWorkItem<T>(Action<T> action, T arg); /// <summary> /// Queue a work item. /// </summary> /// <returns>Returns a IWorkItemResult object, but its GetResult() will always return null</returns> IWorkItemResult QueueWorkItem<T1, T2>(Action<T1, T2> action, T1 arg1, T2 arg2); /// <summary> /// Queue a work item. /// </summary> /// <returns>Returns a IWorkItemResult object, but its GetResult() will always return null</returns> IWorkItemResult QueueWorkItem<T1, T2, T3>(Action<T1, T2, T3> action, T1 arg1, T2 arg2, T3 arg3); /// <summary> /// Queue a work item. /// </summary> /// <returns>Returns a IWorkItemResult object, but its GetResult() will always return null</returns> IWorkItemResult QueueWorkItem<T1, T2, T3, T4>(Action<T1, T2, T3, T4> action, T1 arg1, T2 arg2, T3 arg3, T4 arg4); #endregion #region QueueWorkItem(Func<...>) /// <summary> /// Queue a work item. /// </summary> /// <returns>Returns a IWorkItemResult<TResult> object. /// its GetResult() returns a TResult object</returns> IWorkItemResult<TResult> QueueWorkItem<TResult>(Func<TResult> func); /// <summary> /// Queue a work item. /// </summary> /// <returns>Returns a IWorkItemResult<TResult> object. /// its GetResult() returns a TResult object</returns> IWorkItemResult<TResult> QueueWorkItem<T, TResult>(Func<T, TResult> func, T arg); /// <summary> /// Queue a work item. /// </summary> /// <returns>Returns a IWorkItemResult<TResult> object. /// its GetResult() returns a TResult object</returns> IWorkItemResult<TResult> QueueWorkItem<T1, T2, TResult>(Func<T1, T2, TResult> func, T1 arg1, T2 arg2); /// <summary> /// Queue a work item. /// </summary> /// <returns>Returns a IWorkItemResult<TResult> object. /// its GetResult() returns a TResult object</returns> IWorkItemResult<TResult> QueueWorkItem<T1, T2, T3, TResult>(Func<T1, T2, T3, TResult> func, T1 arg1, T2 arg2, T3 arg3); /// <summary> /// Queue a work item. /// </summary> /// <returns>Returns a IWorkItemResult<TResult> object. /// its GetResult() returns a TResult object</returns> IWorkItemResult<TResult> QueueWorkItem<T1, T2, T3, T4, TResult>(Func<T1, T2, T3, T4, TResult> func, T1 arg1, T2 arg2, T3 arg3, T4 arg4); #endregion } #endregion #region CallToPostExecute enumerator [Flags] public enum CallToPostExecute { /// <summary> /// Never call to the PostExecute call back /// </summary> Never = 0x00, /// <summary> /// Call to the PostExecute only when the work item is cancelled /// </summary> WhenWorkItemCanceled = 0x01, /// <summary> /// Call to the PostExecute only when the work item is not cancelled /// </summary> WhenWorkItemNotCanceled = 0x02, /// <summary> /// Always call to the PostExecute /// </summary> Always = WhenWorkItemCanceled | WhenWorkItemNotCanceled, } #endregion #region IWorkItemResult interface /// <summary> /// The common interface of IWorkItemResult and IWorkItemResult<T> /// </summary> public interface IWaitableResult { /// <summary> /// This method intent is for internal use. /// </summary> /// <returns></returns> IWorkItemResult GetWorkItemResult(); /// <summary> /// This method intent is for internal use. /// </summary> /// <returns></returns> IWorkItemResult<TResult> GetWorkItemResultT<TResult>(); } /// <summary> /// IWorkItemResult interface. /// Created when a WorkItemCallback work item is queued. /// </summary> public interface IWorkItemResult : IWorkItemResult<object> { } /// <summary> /// IWorkItemResult<TResult> interface. /// Created when a Func<T> work item is queued. /// </summary> public interface IWorkItemResult<TResult> : IWaitableResult { /// <summary> /// Get the result of the work item. /// If the work item didn't run yet then the caller waits. /// </summary> /// <returns>The result of the work item</returns> TResult GetResult(); /// <summary> /// Get the result of the work item. /// If the work item didn't run yet then the caller waits until timeout. /// </summary> /// <returns>The result of the work item</returns> /// On timeout throws WorkItemTimeoutException TResult GetResult( int millisecondsTimeout, bool exitContext); /// <summary> /// Get the result of the work item. /// If the work item didn't run yet then the caller waits until timeout. /// </summary> /// <returns>The result of the work item</returns> /// On timeout throws WorkItemTimeoutException TResult GetResult( TimeSpan timeout, bool exitContext); /// <summary> /// Get the result of the work item. /// If the work item didn't run yet then the caller waits until timeout or until the cancelWaitHandle is signaled. /// </summary> /// <param name="millisecondsTimeout">Timeout in milliseconds, or -1 for infinite</param> /// <param name="exitContext"> /// true to exit the synchronization domain for the context before the wait (if in a synchronized context), and reacquire it; otherwise, false. /// </param> /// <param name="cancelWaitHandle">A cancel wait handle to interrupt the blocking if needed</param> /// <returns>The result of the work item</returns> /// On timeout throws WorkItemTimeoutException /// On cancel throws WorkItemCancelException TResult GetResult( int millisecondsTimeout, bool exitContext, WaitHandle cancelWaitHandle); /// <summary> /// Get the result of the work item. /// If the work item didn't run yet then the caller waits until timeout or until the cancelWaitHandle is signaled. /// </summary> /// <returns>The result of the work item</returns> /// On timeout throws WorkItemTimeoutException /// On cancel throws WorkItemCancelException TResult GetResult( TimeSpan timeout, bool exitContext, WaitHandle cancelWaitHandle); /// <summary> /// Get the result of the work item. /// If the work item didn't run yet then the caller waits. /// </summary> /// <param name="e">Filled with the exception if one was thrown</param> /// <returns>The result of the work item</returns> TResult GetResult(out Exception e); /// <summary> /// Get the result of the work item. /// If the work item didn't run yet then the caller waits until timeout. /// </summary> /// <param name="millisecondsTimeout"></param> /// <param name="exitContext"></param> /// <param name="e">Filled with the exception if one was thrown</param> /// <returns>The result of the work item</returns> /// On timeout throws WorkItemTimeoutException TResult GetResult( int millisecondsTimeout, bool exitContext, out Exception e); /// <summary> /// Get the result of the work item. /// If the work item didn't run yet then the caller waits until timeout. /// </summary> /// <param name="exitContext"></param> /// <param name="e">Filled with the exception if one was thrown</param> /// <param name="timeout"></param> /// <returns>The result of the work item</returns> /// On timeout throws WorkItemTimeoutException TResult GetResult( TimeSpan timeout, bool exitContext, out Exception e); /// <summary> /// Get the result of the work item. /// If the work item didn't run yet then the caller waits until timeout or until the cancelWaitHandle is signaled. /// </summary> /// <param name="millisecondsTimeout">Timeout in milliseconds, or -1 for infinite</param> /// <param name="exitContext"> /// true to exit the synchronization domain for the context before the wait (if in a synchronized context), and reacquire it; otherwise, false. /// </param> /// <param name="cancelWaitHandle">A cancel wait handle to interrupt the blocking if needed</param> /// <param name="e">Filled with the exception if one was thrown</param> /// <returns>The result of the work item</returns> /// On timeout throws WorkItemTimeoutException /// On cancel throws WorkItemCancelException TResult GetResult( int millisecondsTimeout, bool exitContext, WaitHandle cancelWaitHandle, out Exception e); /// <summary> /// Get the result of the work item. /// If the work item didn't run yet then the caller waits until timeout or until the cancelWaitHandle is signaled. /// </summary> /// <returns>The result of the work item</returns> /// <param name="cancelWaitHandle"></param> /// <param name="e">Filled with the exception if one was thrown</param> /// <param name="timeout"></param> /// <param name="exitContext"></param> /// On timeout throws WorkItemTimeoutException /// On cancel throws WorkItemCancelException TResult GetResult( TimeSpan timeout, bool exitContext, WaitHandle cancelWaitHandle, out Exception e); /// <summary> /// Gets an indication whether the asynchronous operation has completed. /// </summary> bool IsCompleted { get; } /// <summary> /// Gets an indication whether the asynchronous operation has been canceled. /// </summary> bool IsCanceled { get; } /// <summary> /// Gets the user-defined object that contains context data /// for the work item method. /// </summary> object State { get; } /// <summary> /// Same as Cancel(false). /// </summary> bool Cancel(); /// <summary> /// Cancel the work item execution. /// If the work item is in the queue then it won't execute /// If the work item is completed, it will remain completed /// If the work item is in progress then the user can check the SmartThreadPool.IsWorkItemCanceled /// property to check if the work item has been cancelled. If the abortExecution is set to true then /// the Smart Thread Pool will send an AbortException to the running thread to stop the execution /// of the work item. When an in progress work item is canceled its GetResult will throw WorkItemCancelException. /// If the work item is already cancelled it will remain cancelled /// </summary> /// <param name="abortExecution">When true send an AbortException to the executing thread.</param> /// <returns>Returns true if the work item was not completed, otherwise false.</returns> bool Cancel(bool abortExecution); /// <summary> /// Get the work item's priority /// </summary> WorkItemPriority WorkItemPriority { get; } /// <summary> /// Return the result, same as GetResult() /// </summary> TResult Result { get; } /// <summary> /// Returns the exception if occured otherwise returns null. /// </summary> object Exception { get; } } #endregion #region .NET 3.5 // All these delegate are built-in .NET 3.5 // Comment/Remove them when compiling to .NET 3.5 to avoid ambiguity. public delegate void Action(); public delegate void Action<T1, T2>(T1 arg1, T2 arg2); public delegate void Action<T1, T2, T3>(T1 arg1, T2 arg2, T3 arg3); public delegate void Action<T1, T2, T3, T4>(T1 arg1, T2 arg2, T3 arg3, T4 arg4); public delegate TResult Func<TResult>(); public delegate TResult Func<T, TResult>(T arg1); public delegate TResult Func<T1, T2, TResult>(T1 arg1, T2 arg2); public delegate TResult Func<T1, T2, T3, TResult>(T1 arg1, T2 arg2, T3 arg3); public delegate TResult Func<T1, T2, T3, T4, TResult>(T1 arg1, T2 arg2, T3 arg3, T4 arg4); #endregion }
using System; using System.Globalization; #if !BUILDTASK using Avalonia.Animation.Animators; #endif namespace Avalonia.Media { /// <summary> /// An ARGB color. /// </summary> #if !BUILDTASK public #endif readonly struct Color : IEquatable<Color> { static Color() { #if !BUILDTASK Animation.Animation.RegisterAnimator<ColorAnimator>(prop => typeof(Color).IsAssignableFrom(prop.PropertyType)); #endif } /// <summary> /// Gets the Alpha component of the color. /// </summary> public byte A { get; } /// <summary> /// Gets the Red component of the color. /// </summary> public byte R { get; } /// <summary> /// Gets the Green component of the color. /// </summary> public byte G { get; } /// <summary> /// Gets the Blue component of the color. /// </summary> public byte B { get; } public Color(byte a, byte r, byte g, byte b) { A = a; R = r; G = g; B = b; } /// <summary> /// Creates a <see cref="Color"/> from alpha, red, green and blue components. /// </summary> /// <param name="a">The alpha component.</param> /// <param name="r">The red component.</param> /// <param name="g">The green component.</param> /// <param name="b">The blue component.</param> /// <returns>The color.</returns> public static Color FromArgb(byte a, byte r, byte g, byte b) { return new Color(a, r, g, b); } /// <summary> /// Creates a <see cref="Color"/> from red, green and blue components. /// </summary> /// <param name="r">The red component.</param> /// <param name="g">The green component.</param> /// <param name="b">The blue component.</param> /// <returns>The color.</returns> public static Color FromRgb(byte r, byte g, byte b) { return new Color(0xff, r, g, b); } /// <summary> /// Creates a <see cref="Color"/> from an integer. /// </summary> /// <param name="value">The integer value.</param> /// <returns>The color.</returns> public static Color FromUInt32(uint value) { return new Color( (byte)((value >> 24) & 0xff), (byte)((value >> 16) & 0xff), (byte)((value >> 8) & 0xff), (byte)(value & 0xff) ); } /// <summary> /// Parses a color string. /// </summary> /// <param name="s">The color string.</param> /// <returns>The <see cref="Color"/>.</returns> public static Color Parse(string s) { if (s is null) { throw new ArgumentNullException(nameof(s)); } if (TryParse(s, out Color color)) { return color; } throw new FormatException($"Invalid color string: '{s}'."); } /// <summary> /// Parses a color string. /// </summary> /// <param name="s">The color string.</param> /// <returns>The <see cref="Color"/>.</returns> public static Color Parse(ReadOnlySpan<char> s) { if (TryParse(s, out Color color)) { return color; } throw new FormatException($"Invalid color string: '{s.ToString()}'."); } /// <summary> /// Parses a color string. /// </summary> /// <param name="s">The color string.</param> /// <param name="color">The parsed color</param> /// <returns>The status of the operation.</returns> public static bool TryParse(string s, out Color color) { color = default; if (s is null) { return false; } if (s.Length == 0) { return false; } if (s[0] == '#' && TryParseInternal(s.AsSpan(), out color)) { return true; } var knownColor = KnownColors.GetKnownColor(s); if (knownColor != KnownColor.None) { color = knownColor.ToColor(); return true; } return false; } /// <summary> /// Parses a color string. /// </summary> /// <param name="s">The color string.</param> /// <param name="color">The parsed color</param> /// <returns>The status of the operation.</returns> public static bool TryParse(ReadOnlySpan<char> s, out Color color) { if (s.Length == 0) { color = default; return false; } if (s[0] == '#') { return TryParseInternal(s, out color); } var knownColor = KnownColors.GetKnownColor(s.ToString()); if (knownColor != KnownColor.None) { color = knownColor.ToColor(); return true; } color = default; return false; } private static bool TryParseInternal(ReadOnlySpan<char> s, out Color color) { static bool TryParseCore(ReadOnlySpan<char> input, ref Color color) { var alphaComponent = 0u; if (input.Length == 6) { alphaComponent = 0xff000000; } else if (input.Length != 8) { return false; } // TODO: (netstandard 2.1) Can use allocation free parsing. if (!uint.TryParse(input.ToString(), NumberStyles.HexNumber, CultureInfo.InvariantCulture, out var parsed)) { return false; } color = FromUInt32(parsed | alphaComponent); return true; } color = default; ReadOnlySpan<char> input = s.Slice(1); // Handle shorthand cases like #FFF (RGB) or #FFFF (ARGB). if (input.Length == 3 || input.Length == 4) { var extendedLength = 2 * input.Length; #if !BUILDTASK Span<char> extended = stackalloc char[extendedLength]; #else char[] extended = new char[extendedLength]; #endif for (int i = 0; i < input.Length; i++) { extended[2 * i + 0] = input[i]; extended[2 * i + 1] = input[i]; } return TryParseCore(extended, ref color); } return TryParseCore(input, ref color); } /// <summary> /// Returns the string representation of the color. /// </summary> /// <returns> /// The string representation of the color. /// </returns> public override string ToString() { uint rgb = ToUint32(); return KnownColors.GetKnownColorName(rgb) ?? $"#{rgb:x8}"; } /// <summary> /// Returns the integer representation of the color. /// </summary> /// <returns> /// The integer representation of the color. /// </returns> public uint ToUint32() { return ((uint)A << 24) | ((uint)R << 16) | ((uint)G << 8) | (uint)B; } /// <summary> /// Check if two colors are equal. /// </summary> public bool Equals(Color other) { return A == other.A && R == other.R && G == other.G && B == other.B; } public override bool Equals(object obj) { return obj is Color other && Equals(other); } public override int GetHashCode() { unchecked { int hashCode = A.GetHashCode(); hashCode = (hashCode * 397) ^ R.GetHashCode(); hashCode = (hashCode * 397) ^ G.GetHashCode(); hashCode = (hashCode * 397) ^ B.GetHashCode(); return hashCode; } } public static bool operator ==(Color left, Color right) { return left.Equals(right); } public static bool operator !=(Color left, Color right) { return !left.Equals(right); } } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: booking/rpc/restriction_svc.proto #pragma warning disable 1591, 0612, 3021 #region Designer generated code using pb = global::Google.Protobuf; using pbc = global::Google.Protobuf.Collections; using pbr = global::Google.Protobuf.Reflection; using scg = global::System.Collections.Generic; namespace HOLMS.Types.Booking.RPC { /// <summary>Holder for reflection information generated from booking/rpc/restriction_svc.proto</summary> public static partial class RestrictionSvcReflection { #region Descriptor /// <summary>File descriptor for booking/rpc/restriction_svc.proto</summary> public static pbr::FileDescriptor Descriptor { get { return descriptor; } } private static pbr::FileDescriptor descriptor; static RestrictionSvcReflection() { byte[] descriptorData = global::System.Convert.FromBase64String( string.Concat( "CiFib29raW5nL3JwYy9yZXN0cmljdGlvbl9zdmMucHJvdG8SF2hvbG1zLnR5", "cGVzLmJvb2tpbmcucnBjGhtnb29nbGUvcHJvdG9idWYvZW1wdHkucHJvdG8a", "KnByaW1pdGl2ZS9wYl9pbmNsdXNpdmVfb3BzZGF0ZV9yYW5nZS5wcm90bxoe", "Ym9va2luZy9kYXRlX3Jlc3RyaWN0aW9uLnByb3RvIl4KGFJlc3RyaWN0aW9u", "U3ZjR2V0UmVxdWVzdBJCCgpkYXRlX3JhbmdlGAEgASgLMi4uaG9sbXMudHlw", "ZXMucHJpbWl0aXZlLlBiSW5jbHVzaXZlT3BzZGF0ZVJhbmdlIlcKGVJlc3Ry", "aWN0aW9uU3ZjR2V0UmVzcG9uc2USOgoMcmVzdHJpY3Rpb25zGAEgAygLMiQu", "aG9sbXMudHlwZXMuYm9va2luZy5EYXRlUmVzdHJpY3Rpb24iWQobUmVzdHJp", "Y3Rpb25TdmNVcGRhdGVSZXF1ZXN0EjoKDHJlc3RyaWN0aW9ucxgBIAMoCzIk", "LmhvbG1zLnR5cGVzLmJvb2tpbmcuRGF0ZVJlc3RyaWN0aW9uMuIBCg5SZXN0", "cmljdGlvblN2YxJsCgNHZXQSMS5ob2xtcy50eXBlcy5ib29raW5nLnJwYy5S", "ZXN0cmljdGlvblN2Y0dldFJlcXVlc3QaMi5ob2xtcy50eXBlcy5ib29raW5n", "LnJwYy5SZXN0cmljdGlvblN2Y0dldFJlc3BvbnNlEmIKElVwZGF0ZVJlc3Ry", "aWN0aW9ucxI0LmhvbG1zLnR5cGVzLmJvb2tpbmcucnBjLlJlc3RyaWN0aW9u", "U3ZjVXBkYXRlUmVxdWVzdBoWLmdvb2dsZS5wcm90b2J1Zi5FbXB0eUInWgti", "b29raW5nL3JwY6oCF0hPTE1TLlR5cGVzLkJvb2tpbmcuUlBDYgZwcm90bzM=")); descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, new pbr::FileDescriptor[] { global::Google.Protobuf.WellKnownTypes.EmptyReflection.Descriptor, global::HOLMS.Types.Primitive.PbInclusiveOpsdateRangeReflection.Descriptor, global::HOLMS.Types.Booking.DateRestrictionReflection.Descriptor, }, new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::HOLMS.Types.Booking.RPC.RestrictionSvcGetRequest), global::HOLMS.Types.Booking.RPC.RestrictionSvcGetRequest.Parser, new[]{ "DateRange" }, null, null, null), new pbr::GeneratedClrTypeInfo(typeof(global::HOLMS.Types.Booking.RPC.RestrictionSvcGetResponse), global::HOLMS.Types.Booking.RPC.RestrictionSvcGetResponse.Parser, new[]{ "Restrictions" }, null, null, null), new pbr::GeneratedClrTypeInfo(typeof(global::HOLMS.Types.Booking.RPC.RestrictionSvcUpdateRequest), global::HOLMS.Types.Booking.RPC.RestrictionSvcUpdateRequest.Parser, new[]{ "Restrictions" }, null, null, null) })); } #endregion } #region Messages public sealed partial class RestrictionSvcGetRequest : pb::IMessage<RestrictionSvcGetRequest> { private static readonly pb::MessageParser<RestrictionSvcGetRequest> _parser = new pb::MessageParser<RestrictionSvcGetRequest>(() => new RestrictionSvcGetRequest()); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pb::MessageParser<RestrictionSvcGetRequest> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { get { return global::HOLMS.Types.Booking.RPC.RestrictionSvcReflection.Descriptor.MessageTypes[0]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public RestrictionSvcGetRequest() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public RestrictionSvcGetRequest(RestrictionSvcGetRequest other) : this() { DateRange = other.dateRange_ != null ? other.DateRange.Clone() : null; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public RestrictionSvcGetRequest Clone() { return new RestrictionSvcGetRequest(this); } /// <summary>Field number for the "date_range" field.</summary> public const int DateRangeFieldNumber = 1; private global::HOLMS.Types.Primitive.PbInclusiveOpsdateRange dateRange_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public global::HOLMS.Types.Primitive.PbInclusiveOpsdateRange DateRange { get { return dateRange_; } set { dateRange_ = value; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { return Equals(other as RestrictionSvcGetRequest); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Equals(RestrictionSvcGetRequest other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (!object.Equals(DateRange, other.DateRange)) return false; return true; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override int GetHashCode() { int hash = 1; if (dateRange_ != null) hash ^= DateRange.GetHashCode(); return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { if (dateRange_ != null) { output.WriteRawTag(10); output.WriteMessage(DateRange); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int CalculateSize() { int size = 0; if (dateRange_ != null) { size += 1 + pb::CodedOutputStream.ComputeMessageSize(DateRange); } return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(RestrictionSvcGetRequest other) { if (other == null) { return; } if (other.dateRange_ != null) { if (dateRange_ == null) { dateRange_ = new global::HOLMS.Types.Primitive.PbInclusiveOpsdateRange(); } DateRange.MergeFrom(other.DateRange); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: input.SkipLastField(); break; case 10: { if (dateRange_ == null) { dateRange_ = new global::HOLMS.Types.Primitive.PbInclusiveOpsdateRange(); } input.ReadMessage(dateRange_); break; } } } } } public sealed partial class RestrictionSvcGetResponse : pb::IMessage<RestrictionSvcGetResponse> { private static readonly pb::MessageParser<RestrictionSvcGetResponse> _parser = new pb::MessageParser<RestrictionSvcGetResponse>(() => new RestrictionSvcGetResponse()); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pb::MessageParser<RestrictionSvcGetResponse> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { get { return global::HOLMS.Types.Booking.RPC.RestrictionSvcReflection.Descriptor.MessageTypes[1]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public RestrictionSvcGetResponse() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public RestrictionSvcGetResponse(RestrictionSvcGetResponse other) : this() { restrictions_ = other.restrictions_.Clone(); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public RestrictionSvcGetResponse Clone() { return new RestrictionSvcGetResponse(this); } /// <summary>Field number for the "restrictions" field.</summary> public const int RestrictionsFieldNumber = 1; private static readonly pb::FieldCodec<global::HOLMS.Types.Booking.DateRestriction> _repeated_restrictions_codec = pb::FieldCodec.ForMessage(10, global::HOLMS.Types.Booking.DateRestriction.Parser); private readonly pbc::RepeatedField<global::HOLMS.Types.Booking.DateRestriction> restrictions_ = new pbc::RepeatedField<global::HOLMS.Types.Booking.DateRestriction>(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pbc::RepeatedField<global::HOLMS.Types.Booking.DateRestriction> Restrictions { get { return restrictions_; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { return Equals(other as RestrictionSvcGetResponse); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Equals(RestrictionSvcGetResponse other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if(!restrictions_.Equals(other.restrictions_)) return false; return true; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override int GetHashCode() { int hash = 1; hash ^= restrictions_.GetHashCode(); return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { restrictions_.WriteTo(output, _repeated_restrictions_codec); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int CalculateSize() { int size = 0; size += restrictions_.CalculateSize(_repeated_restrictions_codec); return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(RestrictionSvcGetResponse other) { if (other == null) { return; } restrictions_.Add(other.restrictions_); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: input.SkipLastField(); break; case 10: { restrictions_.AddEntriesFrom(input, _repeated_restrictions_codec); break; } } } } } public sealed partial class RestrictionSvcUpdateRequest : pb::IMessage<RestrictionSvcUpdateRequest> { private static readonly pb::MessageParser<RestrictionSvcUpdateRequest> _parser = new pb::MessageParser<RestrictionSvcUpdateRequest>(() => new RestrictionSvcUpdateRequest()); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pb::MessageParser<RestrictionSvcUpdateRequest> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { get { return global::HOLMS.Types.Booking.RPC.RestrictionSvcReflection.Descriptor.MessageTypes[2]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public RestrictionSvcUpdateRequest() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public RestrictionSvcUpdateRequest(RestrictionSvcUpdateRequest other) : this() { restrictions_ = other.restrictions_.Clone(); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public RestrictionSvcUpdateRequest Clone() { return new RestrictionSvcUpdateRequest(this); } /// <summary>Field number for the "restrictions" field.</summary> public const int RestrictionsFieldNumber = 1; private static readonly pb::FieldCodec<global::HOLMS.Types.Booking.DateRestriction> _repeated_restrictions_codec = pb::FieldCodec.ForMessage(10, global::HOLMS.Types.Booking.DateRestriction.Parser); private readonly pbc::RepeatedField<global::HOLMS.Types.Booking.DateRestriction> restrictions_ = new pbc::RepeatedField<global::HOLMS.Types.Booking.DateRestriction>(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pbc::RepeatedField<global::HOLMS.Types.Booking.DateRestriction> Restrictions { get { return restrictions_; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { return Equals(other as RestrictionSvcUpdateRequest); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Equals(RestrictionSvcUpdateRequest other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if(!restrictions_.Equals(other.restrictions_)) return false; return true; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override int GetHashCode() { int hash = 1; hash ^= restrictions_.GetHashCode(); return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { restrictions_.WriteTo(output, _repeated_restrictions_codec); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int CalculateSize() { int size = 0; size += restrictions_.CalculateSize(_repeated_restrictions_codec); return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(RestrictionSvcUpdateRequest other) { if (other == null) { return; } restrictions_.Add(other.restrictions_); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: input.SkipLastField(); break; case 10: { restrictions_.AddEntriesFrom(input, _repeated_restrictions_codec); break; } } } } } #endregion } #endregion Designer generated code
// // IntervalHeap.cs // // Author: // Stephane Delcroix <sdelcroix@src.gnome.org> // // Copyright (C) 2007-2008 Novell, Inc. // Copyright (C) 2007-2008 Stephane Delcroix // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // /*************************************************************************** * IntervalHeap.cs * * Copyright (C) 2006 Novell, Inc. * Written by Aaron Bockover <aaron@abock.org> ****************************************************************************/ /* THIS FILE IS LICENSED UNDER THE MIT LICENSE AS OUTLINED IMMEDIATELY BELOW: * * Permission is hereby granted, free of charge, to any person obtaining a * copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation * the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS IN THE SOFTWARE. */ using System; using System.Collections; using System.Collections.Generic; namespace Banshee.Kernel { public class IntervalHeap<T> : ICollection<T>, ICollection, IEnumerable<T>, IEnumerable { private const int MIN_CAPACITY = 16; private int count; private int generation; private Interval [] heap; public IntervalHeap() { Clear(); } public virtual T Pop() { if(count == 0) { throw new InvalidOperationException(); } T item = heap[0].Item; MoveDown(0, heap[--count]); generation++; return item; } public virtual T Peek() { if(count == 0) { throw new InvalidOperationException(); } return heap[0].Item; } public virtual void Push(T item, int priority) { if(item == null) { throw new ArgumentNullException("item"); } if(count == heap.Length) { OptimalArrayResize(ref heap, 1); } MoveUp(++count - 1, new Interval(item, priority)); generation++; } public virtual void Clear() { generation = 0; heap = new Interval[MIN_CAPACITY]; } void ICollection.CopyTo(Array array, int index) { if(array == null) { throw new ArgumentNullException("array"); } if(index < 0) { throw new ArgumentOutOfRangeException("index"); } Array.Copy(heap, 0, array, index, count); } public virtual void CopyTo(T [] array, int index) { if(array == null) { throw new ArgumentNullException("array"); } if(index < 0) { throw new ArgumentOutOfRangeException("index"); } Array.Copy(heap, 0, array, index, count); } public virtual bool Contains(T item) { if(item == null) { throw new ArgumentNullException("item"); } return FindItemHeapIndex(item) >= 0; } public virtual void Add(T item) { if(item == null) { throw new ArgumentNullException("item"); } Push(item, 0); } public virtual bool Remove(T item) { if(item == null) { throw new ArgumentNullException("item"); } int index = FindItemHeapIndex(item); if(index < 0) { return false; } MoveDown(index, heap[--count]); generation++; return true; } public virtual void TrimExcess() { if(count < heap.Length * 0.9) { Array.Resize(ref heap, count); } } IEnumerator IEnumerable.GetEnumerator() { return GetEnumerator(); } public virtual IEnumerator<T> GetEnumerator() { return new IntervalHeapEnumerator(this); } public static IntervalHeap<T> Synchronized(IntervalHeap<T> heap) { if(heap == null) { throw new ArgumentNullException("heap"); } return new SyncIntervalHeap(heap); } private int FindItemHeapIndex(T item) { for(int i = 0; i < count; i++) { if(item.Equals(heap[i].Item)) { return i; } } return -1; } private static int GetLeftChildIndex(int index) { return index * 2 + 1; } private static int GetParentIndex(int index) { return (index - 1) / 2; } // grow array to nearest minimum power of two private static void OptimalArrayResize(ref Interval [] array, int grow) { int new_capacity = array.Length == 0 ? 1 : array.Length; int min_capacity = array.Length == 0 ? MIN_CAPACITY : array.Length + grow; while(new_capacity < min_capacity) { new_capacity <<= 1; } Array.Resize(ref array, new_capacity); } private void MoveUp(int index, Interval node) { int parent_index = GetParentIndex(index); while(index > 0 && heap[parent_index].Priority < node.Priority) { heap[index] = heap[parent_index]; index = parent_index; parent_index = GetParentIndex(index); } heap[index] = node; } private void MoveDown(int index, Interval node) { int child_index = GetLeftChildIndex(index); while(child_index < count) { if(child_index + 1 < count && heap[child_index].Priority < heap[child_index + 1].Priority) { child_index++; } heap[index] = heap[child_index]; index = child_index; child_index = GetLeftChildIndex(index); } MoveUp(index, node); } public virtual int Count { get { return count; } } public bool IsReadOnly { get { return false; } } public virtual object SyncRoot { get { return this; } } public virtual bool IsSynchronized { get { return false; } } private struct Interval { private T item; private int priority; public Interval(T item, int priority) { this.item = item; this.priority = priority; } public T Item { get { return item; } } public int Priority { get { return priority; } } public override int GetHashCode () { return priority.GetHashCode () ^ item.GetHashCode (); } } private sealed class SyncIntervalHeap : IntervalHeap<T> { private IntervalHeap<T> heap; internal SyncIntervalHeap(IntervalHeap<T> heap) { this.heap = heap; } public override int Count { get { lock(heap) { return heap.Count; } } } public override bool IsSynchronized { get { return true; } } public override object SyncRoot { get { return heap.SyncRoot; } } public override void Clear() { lock(heap) { heap.Clear(); } } public override bool Contains(T item) { lock(heap) { return heap.Contains(item); } } public override T Pop() { lock(heap) { return heap.Pop(); } } public override T Peek() { lock(heap) { return heap.Peek(); } } public override void Push(T item, int priority) { lock(heap) { heap.Push(item, priority); } } public override void Add(T item) { lock(heap) { heap.Add(item); } } public override bool Remove(T item) { lock(heap) { return heap.Remove(item); } } public override void TrimExcess() { lock(heap) { heap.TrimExcess(); } } public override void CopyTo(T [] array, int index) { lock(heap) { heap.CopyTo(array, index); } } public override IEnumerator<T> GetEnumerator() { lock(heap) { return new IntervalHeapEnumerator(this); } } } private sealed class IntervalHeapEnumerator : IEnumerator<T>, IEnumerator { private IntervalHeap<T> heap; private int index; private int generation; public IntervalHeapEnumerator(IntervalHeap<T> heap) { this.heap = heap; Reset(); } public void Reset() { generation = heap.generation; index = -1; } public void Dispose() { heap = null; } public bool MoveNext() { if(generation != heap.generation) { throw new InvalidOperationException(); } if(index + 1 == heap.count) { return false; } index++; return true; } object IEnumerator.Current { get { return Current; } } public T Current { get { if(generation != heap.generation) { throw new InvalidOperationException(); } return heap.heap[index].Item; } } } } }
using System; using System.Collections; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Collections.Specialized; using System.Data.Entity; using System.Data.Entity.Infrastructure; using System.Diagnostics; using System.Linq; using System.Linq.Expressions; using System.Threading; using System.Threading.Tasks; using Netfox.Core.Collections; using Netfox.Core.Database; using Netfox.Core.Extensions; namespace EntityFramework.InMemory { public class TestDbSet : DbSet, IQueryable, IDbAsyncEnumerable { private readonly Type _type; readonly IList _data; public IList TestDbSetGeneric { get; } public TestDbSet(IList testDbSetGeneric,Type type) { this._type = type; this._data = testDbSetGeneric; this.TestDbSetGeneric = testDbSetGeneric; } #region Overrides of DbSet public override object Find(params object[] keyValues) => throw new NotImplementedException(); public override Task<object> FindAsync(params object[] keyValues) => throw new NotImplementedException(); public override Task<object> FindAsync(CancellationToken cancellationToken, params object[] keyValues) => throw new NotImplementedException(); public override object Attach(object entity) { this.Add(entity); return entity; } public override object Add(object entity) { this._data.Add(entity); return entity; } private static IEntity GetIModel(object entity) => entity as IEntity; public override IEnumerable AddRange(IEnumerable entities) { dynamic data = this._data; //TODO Use interface var addRange = entities as object[] ?? entities.Cast<object>().ToArray(); data.AddRange(addRange); return addRange; } public override object Remove(object entity) { this._data.Remove(GetIModel(entity).Id); return entity; } public override IEnumerable RemoveRange(IEnumerable entities) { var removeRange = entities as object[] ?? entities.Cast<object>().ToArray(); foreach (var entity in removeRange) { this.Remove(entity); } return removeRange; } public override object Create() => Activator.CreateInstance(this._type); public override object Create(Type derivedEntityType) => Activator.CreateInstance(derivedEntityType); public override DbSqlQuery SqlQuery(string sql, params object[] parameters) => throw new NotImplementedException(); public override bool Equals(object obj) => throw new NotImplementedException(); public override int GetHashCode() => throw new NotImplementedException(); public override IList Local => this._data; #endregion #region Overrides of DbQuery public override DbQuery Include(string path) => throw new NotImplementedException(); public override DbQuery AsNoTracking() => throw new NotImplementedException(); #pragma warning disable 672 public override DbQuery AsStreaming() => throw new NotImplementedException(); #pragma warning restore 672 public override string ToString() => throw new NotImplementedException(); public override Type ElementType => this._type; #endregion #region Implementation of IDbAsyncEnumerable public IDbAsyncEnumerator GetAsyncEnumerator() => throw new NotImplementedException(); #endregion #region Implementation of IQueryable public Expression Expression => throw new NotImplementedException(); Type IQueryable.ElementType => this.ElementType; public IQueryProvider Provider => throw new NotImplementedException(); #endregion System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() => this._data.GetEnumerator(); } public class TestDbSet<TEntity> : DbSet<TEntity>, IQueryable, IEnumerable<TEntity>, IDbAsyncEnumerable<TEntity>, IList, INotifyCollectionChanged where TEntity : class { public InterfaceListWrapper<TEntity> Data {get;} private ConcurrentHashSet<TEntity> DataHashSet { get; } = new ConcurrentHashSet<TEntity>(); readonly IQueryable _query; public Action<TEntity> OnAddAction { get; } public Action<IEnumerable<TEntity>> OnAddBulkAction { get; } public Action<TEntity> OnRemoveAction { get; } public TestDbSet(Action<TEntity> addCallback, Action<IEnumerable<TEntity>> addBulkCallback, Action<TEntity> removeCallback) : this(addCallback, addBulkCallback, removeCallback, null) { } public TestDbSet(Action<TEntity> addCallback, Action<IEnumerable<TEntity>> addBulkCallback, Action<TEntity> removeCallback, TestDbSet dbSet = null) { if(dbSet == null) { this.Data = new InterfaceListWrapper<TEntity>(new ConcurrentObservableCollection<TEntity>()); this._query = this.Data.AsQueryable().Where(p => (p.GetType().IsSubclassOf(typeof(TEntity))) || (p.GetType() == typeof(TEntity))); } else { this.Data = new InterfaceListWrapper<TEntity>(dbSet.TestDbSetGeneric); this._query = this.Data.AsQueryable().Where(p => (p.GetType().IsSubclassOf(typeof(TEntity))) || (p.GetType() == typeof(TEntity))); } this.OnAddAction = addCallback; this.OnAddBulkAction = addBulkCallback; this.OnRemoveAction = removeCallback; } public override TEntity Add(TEntity item) { if ((item == null) || (this.DataHashSet.Contains(item))) { return item; } this.Data.Add(item); this.DataHashSet.Add(item); this.OnAddAction?.Invoke(item); return item; } public IEnumerable<TEntity> AddRange(IEnumerable items) { return this.AddRange(items,true); } public IEnumerable<TEntity> AddRange(IEnumerable items, bool insertReferencedObjects) { var typedItems = items.Cast<TEntity>(); if(typedItems.Any(i => i == null)) Debugger.Break(); var itemsByType = typedItems.GroupBy(item => item.GetType(), item => item, (type, homoItems) => new { type, homoItems }); foreach(var homoItems in itemsByType.Select(i => i.homoItems.Where(ii => !this.DataHashSet.Contains(ii)))) { var enumerable = homoItems as TEntity[] ?? homoItems.ToArray(); if(!enumerable.Any()) continue; this.Data.AddRange(enumerable); this.DataHashSet.AddRange(enumerable); if(insertReferencedObjects) this.OnAddBulkAction?.Invoke(enumerable); } return typedItems; } public override IEnumerable<TEntity> AddRange(IEnumerable<TEntity> items) { return this.AddRange(items,true); } public IEnumerable<TEntity> AddRange(IEnumerable<TEntity> items, bool insertReferencedObjects) { var addRange = items as TEntity[] ?? items.ToArray(); var itemsByType = addRange.GroupBy(item => item.GetType(), item => item, (type, homoItems) => new { type, homoItems }); foreach(var homoItems in itemsByType.Select(i => i.homoItems)) { var enumerable = homoItems as TEntity[] ?? homoItems.ToArray(); this.Data.AddRange(enumerable); if (insertReferencedObjects) this.OnAddBulkAction?.Invoke(enumerable); } return addRange; } public override TEntity Remove(TEntity item) { if ((item == null) || (this.DataHashSet.Contains(item))) { return item; } this.Data.Remove(item); this.OnRemoveAction?.Invoke(item); return item; } public override TEntity Attach(TEntity item) { this.Data.Add(item); return item; } public override TEntity Create() { return Activator.CreateInstance<TEntity>(); } public override TDerivedEntity Create<TDerivedEntity>() { return Activator.CreateInstance<TDerivedEntity>(); } public override ObservableCollection<TEntity> Local => this.Data.Local; Type IQueryable.ElementType => this._query.ElementType; Expression IQueryable.Expression => this._query.Expression; IQueryProvider IQueryable.Provider => new TestDbAsyncQueryProvider<TEntity>(this._query.Provider); System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() { return this.Data.GetEnumerator(); } IEnumerator<TEntity> IEnumerable<TEntity>.GetEnumerator() { return this.Data.GetEnumerator(); } IDbAsyncEnumerator<TEntity> IDbAsyncEnumerable<TEntity>.GetAsyncEnumerator() { return new TestDbAsyncEnumerator<TEntity>(this.Data.GetEnumerator()); } public static implicit operator DbSet(TestDbSet<TEntity> entry) { return entry as DbSet<TEntity>; } #region Implementation of ICollection public void CopyTo(Array array, int index) { ((ICollection) this.Data).CopyTo(array, index); } public int Count => this.Data.Count(); public object SyncRoot => ((ICollection) this.Data).SyncRoot; public bool IsSynchronized => ((ICollection) this.Data).IsSynchronized; #endregion #region Implementation of IList int IList.Add(object value) { if(value == null) { return 0; } if(value.GetType().IsGenericType) { var list = value as IList; if(list == null) { return 0;} foreach(var item in list) { this.Add(item as TEntity); } } else { this.Add(value as TEntity); } return 0; } bool IList.Contains(object value) { return ((IList) this.Data).Contains(value); } void IList.Clear() { this.Data.Clear(); } int IList.IndexOf(object value) { return ((IList) this.Data).IndexOf(value); } void IList.Insert(int index, object value) { ((IList) this.Data).Insert(index, value); } void IList.Remove(object value) { if(value == null) { return; } if (value.GetType().IsGenericType) { var list = value as IList; if (list == null) { return; } foreach (var item in list) { this.Remove(item as TEntity); } } else { this.Remove(value as TEntity); } } void IList.RemoveAt(int index) { this.Data.RemoveAt(index); } object IList.this[int index] { get => ((IList) this.Data)[index]; set => ((IList) this.Data)[index] = value; } bool IList.IsReadOnly => ((IList) this.Data).IsReadOnly; bool IList.IsFixedSize => ((IList) this.Data).IsFixedSize; #endregion #region Implementation of INotifyCollectionChanged public event NotifyCollectionChangedEventHandler CollectionChanged { add => this.Local.CollectionChanged += value; remove => this.Local.CollectionChanged -= value; } #endregion } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. namespace Network.Tests.Tests { using System.Collections.Generic; using System.Linq; using System.Net; using Microsoft.Azure.Management.Network; using Microsoft.Azure.Management.Network.Models; using Microsoft.Azure.Management.Resources; using Microsoft.Azure.Management.Resources.Models; using Microsoft.Azure.Test; using Microsoft.Rest.ClientRuntime.Azure.TestFramework; using Networks.Tests.Helpers; using ResourceGroups.Tests; using Xunit; using Microsoft.Azure.Test.HttpRecorder; public class RouteTableTests { public RouteTableTests() { HttpMockServer.RecordsDirectory = "SessionRecords"; } [Fact(Skip="Disable tests")] public void EmptyRouteTableTest() { var handler1 = new RecordedDelegatingHandler { StatusCodeToReturn = HttpStatusCode.OK }; var handler2 = new RecordedDelegatingHandler { StatusCodeToReturn = HttpStatusCode.OK }; using (MockContext context = MockContext.Start(this.GetType())) { var resourcesClient = ResourcesManagementTestUtilities.GetResourceManagementClientWithHandler(context, handler1); var networkManagementClient = NetworkManagementTestUtilities.GetNetworkManagementClientWithHandler(context, handler2); var location = NetworkManagementTestUtilities.GetResourceLocation(resourcesClient, "Microsoft.Network/routeTables"); string resourceGroupName = TestUtilities.GenerateName("csmrg"); resourcesClient.ResourceGroups.CreateOrUpdate( resourceGroupName, new ResourceGroup { Location = location }); string routeTableName = TestUtilities.GenerateName(); var routeTable = new RouteTable() { Location = location, }; // Put RouteTable var putRouteTableResponse = networkManagementClient.RouteTables.CreateOrUpdate( resourceGroupName, routeTableName, routeTable); Assert.Equal("Succeeded", putRouteTableResponse.ProvisioningState); // Get RouteTable var getRouteTableResponse = networkManagementClient.RouteTables.Get( resourceGroupName, routeTableName); Assert.Equal(routeTableName, getRouteTableResponse.Name); Assert.False(getRouteTableResponse.Routes.Any()); // List RouteTable var listRouteTableResponse = networkManagementClient.RouteTables.List(resourceGroupName); Assert.Single(listRouteTableResponse); Assert.Equal(getRouteTableResponse.Name, listRouteTableResponse.First().Name); Assert.Equal(getRouteTableResponse.Id, listRouteTableResponse.First().Id); // Delete RouteTable networkManagementClient.RouteTables.Delete(resourceGroupName, routeTableName); // Verify delete listRouteTableResponse = networkManagementClient.RouteTables.List(resourceGroupName); Assert.Empty(listRouteTableResponse); } } [Fact(Skip="Disable tests")] public void RouteTableApiTest() { var handler1 = new RecordedDelegatingHandler { StatusCodeToReturn = HttpStatusCode.OK }; var handler2 = new RecordedDelegatingHandler { StatusCodeToReturn = HttpStatusCode.OK }; using (MockContext context = MockContext.Start(this.GetType())) { var resourcesClient = ResourcesManagementTestUtilities.GetResourceManagementClientWithHandler(context, handler1); var networkManagementClient = NetworkManagementTestUtilities.GetNetworkManagementClientWithHandler(context, handler2); var location = NetworkManagementTestUtilities.GetResourceLocation(resourcesClient, "Microsoft.Network/routeTables"); string resourceGroupName = TestUtilities.GenerateName("csmrg"); resourcesClient.ResourceGroups.CreateOrUpdate( resourceGroupName, new ResourceGroup { Location = location }); string routeTableName = TestUtilities.GenerateName(); string route1Name = TestUtilities.GenerateName(); string route2Name = TestUtilities.GenerateName(); var routeTable = new RouteTable() { Location = location, }; routeTable.Routes = new List<Route>(); // Add a route var route1 = new Route() { AddressPrefix = "192.168.1.0/24", Name = route1Name, NextHopIpAddress = "23.108.1.1", NextHopType = RouteNextHopType.VirtualAppliance }; routeTable.Routes.Add(route1); // Put RouteTable var putRouteTableResponse = networkManagementClient.RouteTables.CreateOrUpdate( resourceGroupName, routeTableName, routeTable); Assert.Equal("Succeeded", putRouteTableResponse.ProvisioningState); // Get RouteTable var getRouteTableResponse = networkManagementClient.RouteTables.Get( resourceGroupName, routeTableName); Assert.Equal(routeTableName, getRouteTableResponse.Name); Assert.Equal(1, getRouteTableResponse.Routes.Count); Assert.Equal(route1Name, getRouteTableResponse.Routes[0].Name); Assert.Equal("192.168.1.0/24", getRouteTableResponse.Routes[0].AddressPrefix); Assert.Equal("23.108.1.1", getRouteTableResponse.Routes[0].NextHopIpAddress); Assert.Equal(RouteNextHopType.VirtualAppliance, getRouteTableResponse.Routes[0].NextHopType); // Add another route var route2 = new Route() { AddressPrefix = "10.0.1.0/24", Name = route2Name, NextHopType = RouteNextHopType.VnetLocal }; getRouteTableResponse.Routes.Add(route2); networkManagementClient.RouteTables.CreateOrUpdate( resourceGroupName, routeTableName, getRouteTableResponse); getRouteTableResponse = networkManagementClient.RouteTables.Get( resourceGroupName, routeTableName); Assert.Equal(routeTableName, getRouteTableResponse.Name); Assert.Equal(2, getRouteTableResponse.Routes.Count); Assert.Equal(route2Name, getRouteTableResponse.Routes[1].Name); Assert.Equal("10.0.1.0/24", getRouteTableResponse.Routes[1].AddressPrefix); Assert.True(string.IsNullOrEmpty(getRouteTableResponse.Routes[1].NextHopIpAddress)); Assert.Equal(RouteNextHopType.VnetLocal, getRouteTableResponse.Routes[1].NextHopType); // Delete a route getRouteTableResponse.Routes.RemoveAt(0); networkManagementClient.RouteTables.CreateOrUpdate( resourceGroupName, routeTableName, getRouteTableResponse); getRouteTableResponse = networkManagementClient.RouteTables.Get( resourceGroupName, routeTableName); Assert.Equal(routeTableName, getRouteTableResponse.Name); Assert.Equal(1, getRouteTableResponse.Routes.Count); Assert.Equal(route2Name, getRouteTableResponse.Routes[0].Name); Assert.Equal("10.0.1.0/24", getRouteTableResponse.Routes[0].AddressPrefix); Assert.True(string.IsNullOrEmpty(getRouteTableResponse.Routes[0].NextHopIpAddress)); Assert.Equal(RouteNextHopType.VnetLocal, getRouteTableResponse.Routes[0].NextHopType); // Delete RouteTable networkManagementClient.RouteTables.Delete(resourceGroupName, routeTableName); // Verify delete var listRouteTableResponse = networkManagementClient.RouteTables.List(resourceGroupName); Assert.Empty(listRouteTableResponse); } } [Fact(Skip="Disable tests")] public void SubnetRouteTableTest() { var handler1 = new RecordedDelegatingHandler { StatusCodeToReturn = HttpStatusCode.OK }; var handler2 = new RecordedDelegatingHandler { StatusCodeToReturn = HttpStatusCode.OK }; using (MockContext context = MockContext.Start(this.GetType())) { var resourcesClient = ResourcesManagementTestUtilities.GetResourceManagementClientWithHandler(context, handler1); var networkManagementClient = NetworkManagementTestUtilities.GetNetworkManagementClientWithHandler(context, handler2); var location = NetworkManagementTestUtilities.GetResourceLocation(resourcesClient, "Microsoft.Network/routeTables"); string resourceGroupName = TestUtilities.GenerateName("csmrg"); resourcesClient.ResourceGroups.CreateOrUpdate( resourceGroupName, new ResourceGroup { Location = location }); string routeTableName = TestUtilities.GenerateName(); string route1Name = TestUtilities.GenerateName(); var routeTable = new RouteTable() { Location = location, }; routeTable.Routes = new List<Route>(); var route1 = new Route() { AddressPrefix = "192.168.1.0/24", Name = route1Name, NextHopIpAddress = "23.108.1.1", NextHopType = RouteNextHopType.VirtualAppliance }; routeTable.Routes.Add(route1); // Put RouteTable var putRouteTableResponse = networkManagementClient.RouteTables.CreateOrUpdate( resourceGroupName, routeTableName, routeTable); Assert.Equal("Succeeded", putRouteTableResponse.ProvisioningState); // Get RouteTable var getRouteTableResponse = networkManagementClient.RouteTables.Get( resourceGroupName, routeTableName); // Verify that the subnet reference is null Assert.Null(getRouteTableResponse.Subnets); // Create Vnet with subnet and add a route table string vnetName = TestUtilities.GenerateName(); string subnetName = TestUtilities.GenerateName(); var vnet = new VirtualNetwork() { Location = location, AddressSpace = new AddressSpace() { AddressPrefixes = new List<string>() { "10.0.0.0/16", } }, DhcpOptions = new DhcpOptions() { DnsServers = new List<string>() { "10.1.1.1", "10.1.2.4" } }, Subnets = new List<Subnet>() { new Subnet() { Name = subnetName, AddressPrefix = "10.0.0.0/24", RouteTable = new RouteTable() { Id = getRouteTableResponse.Id, } } } }; var putVnetResponse = networkManagementClient.VirtualNetworks.CreateOrUpdate(resourceGroupName, vnetName, vnet); Assert.Equal("Succeeded", putVnetResponse.ProvisioningState); var getSubnetResponse = networkManagementClient.Subnets.Get(resourceGroupName, vnetName, subnetName); Assert.Equal(getSubnetResponse.RouteTable.Id, getRouteTableResponse.Id); // Get RouteTable getRouteTableResponse = networkManagementClient.RouteTables.Get( resourceGroupName, routeTableName); Assert.Equal(1, getRouteTableResponse.Subnets.Count); Assert.Equal(getSubnetResponse.Id, getRouteTableResponse.Subnets[0].Id); } } } }
// Copyright (c) 2015, Outercurve Foundation. // All rights reserved. // // Redistribution and use in source and binary forms, with or without modification, // are permitted provided that the following conditions are met: // // - Redistributions of source code must retain the above copyright notice, this // list of conditions and the following disclaimer. // // - Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // - Neither the name of the Outercurve Foundation nor the names of its // contributors may be used to endorse or promote products derived from this // software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR // ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES // (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; // LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON // ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. using System; using System.Collections; using System.Collections.Generic; using System.ComponentModel; using System.Drawing; using System.Data; using System.Windows.Forms; using System.Threading; namespace WebsitePanel.Installer.Controls { /// <summary> /// Animated Icon. /// </summary> [ToolboxItem(true)] public class ProgressIcon : System.Windows.Forms.UserControl { private Thread thread = null; private int currentFrame = 0; private int delayInterval = 50; private int pause = 0; private int loopCount = 0; private int currentLoop = 0; private int firstFrame = 0; private int lastFrame = 13; private ImageList images; private IContainer components; /// <summary>Initializes a new instance of the <b>AnimatedIcon</b> class. /// </summary> public ProgressIcon() { CheckForIllegalCrossThreadCalls = false; InitializeComponent(); this.SetStyle(ControlStyles.UserPaint, true); this.SetStyle(ControlStyles.AllPaintingInWmPaint, true); this.SetStyle(ControlStyles.DoubleBuffer, true); this.SetStyle(ControlStyles.ResizeRedraw, true); } #region Dispose /// <summary>Clean up any resources being used.</summary> /// <param name="disposing"><see langword="true"/> to release both managed /// and unmanaged resources; <see langword="false"/> to release /// only unmanaged resources.</param> protected override void Dispose( bool disposing ) { if( disposing ) { if( components != null ) components.Dispose(); if( thread != null ) thread.Abort(); } base.Dispose( disposing ); } #endregion #region Component Designer generated code /// <summary>Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { this.components = new System.ComponentModel.Container(); System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(ProgressIcon)); this.images = new System.Windows.Forms.ImageList(this.components); this.SuspendLayout(); // // images // this.images.ImageStream = ((System.Windows.Forms.ImageListStreamer)(resources.GetObject("images.ImageStream"))); this.images.TransparentColor = System.Drawing.Color.Transparent; this.images.Images.SetKeyName(0, "ProgressImage00.bmp"); this.images.Images.SetKeyName(1, "ProgressImage01.bmp"); this.images.Images.SetKeyName(2, "ProgressImage02.bmp"); this.images.Images.SetKeyName(3, "ProgressImage03.bmp"); this.images.Images.SetKeyName(4, "ProgressImage04.bmp"); this.images.Images.SetKeyName(5, "ProgressImage05.bmp"); this.images.Images.SetKeyName(6, "ProgressImage06.bmp"); this.images.Images.SetKeyName(7, "ProgressImage07.bmp"); this.images.Images.SetKeyName(8, "ProgressImage08.bmp"); this.images.Images.SetKeyName(9, "ProgressImage09.bmp"); this.images.Images.SetKeyName(10, "ProgressImage10.bmp"); this.images.Images.SetKeyName(11, "ProgressImage11.bmp"); this.images.Images.SetKeyName(12, "ProgressImage12.bmp"); // // ProgressIcon // this.Name = "ProgressIcon"; this.Size = new System.Drawing.Size(30, 30); this.ResumeLayout(false); } #endregion /// <summary>Starts animation from the beginning. /// </summary> public void StartAnimation() { StopAnimation(); CheckRange(); // Check the first and the last frames thread = new Thread( new ThreadStart( threadFunc ) ); thread.IsBackground = true; thread.Start(); } /// <summary>Stops animation not changing current frame number. /// </summary> public void StopAnimation() { if( thread != null ) { thread.Abort(); thread = null; } currentLoop = 0; } /// <summary>Displays the specified frame.</summary> /// <param name="frame">An index of the image stored in the <see cref="ImageList"/>.</param> public void ShowFrame2(int frame) { StopAnimation(); if( frame >= 0 && frame < images.Images.Count ) currentFrame = frame; else currentFrame = 0; Refresh(); } /// <summary>Occurs when the control is redrawn.</summary> /// <param name="e">A <see cref="PaintEventArgs"/> that contains /// the event data.</param> /// <remarks>The <b>OnPaint</b> method draws current image from /// the <see cref="ImageList"/> if exists.</remarks> protected override void OnPaint(PaintEventArgs e) { // Draw a crossed rectangle if there is no frame to display if( images == null || currentFrame < 0 || currentFrame >= images.Images.Count ) { if( this.Size.Width == 0 || this.Size.Height == 0 ) return; Pen pen = new Pen( SystemColors.ControlText ); e.Graphics.DrawRectangle( pen, 0, 0, this.Size.Width-1, this.Size.Height-1 ); e.Graphics.DrawLine( pen, 0, 0, this.Size.Width, this.Size.Height ); e.Graphics.DrawLine( pen, 0, this.Size.Height-1, this.Size.Width-1, 0 ); pen.Dispose(); } else { // Draw the current frame e.Graphics.DrawImage( images.Images[currentFrame], 0, 0, this.Size.Width, this.Size.Height ); } } /// <summary>The method to be invoked when the thread begins executing. /// </summary> private void threadFunc() { bool wasPause = false; currentFrame = firstFrame; while( thread != null && thread.IsAlive ) { Refresh(); // Redraw the current frame wasPause = false; if( images != null ) { currentFrame++; if( currentFrame > lastFrame || currentFrame >= images.Images.Count ) { if( pause > 0 ) // Sleep after every loop { Thread.Sleep( pause ); wasPause = true; } currentFrame = firstFrame; if( loopCount != 0 ) // 0 is infinitive loop { currentLoop++; } } if( loopCount != 0 && currentLoop >= loopCount ) { StopAnimation(); // The loop is completed } } if( !wasPause ) // That prevents summation (pause + delayInterval) Thread.Sleep( delayInterval ); } } /// <summary>Check if the last frame is no less than the first one. /// Otherwise, swap them.</summary> private void CheckRange() { if( lastFrame < firstFrame ) { int tmp = firstFrame; firstFrame = lastFrame; lastFrame = tmp; } } } }
//----------------------------------------------------------------------- // <copyright file="GraphStageTimersSpec.cs" company="Akka.NET Project"> // Copyright (C) 2015-2016 Lightbend Inc. <http://www.lightbend.com> // Copyright (C) 2013-2016 Akka.NET project <https://github.com/akkadotnet/akka.net> // </copyright> //----------------------------------------------------------------------- using System; using System.Threading.Tasks; using Akka.Actor; using Akka.Streams.Dsl; using Akka.Streams.Implementation.Fusing; using Akka.Streams.Stage; using Akka.Streams.TestKit; using Akka.Streams.TestKit.Tests; using Akka.TestKit; using FluentAssertions; using Xunit; using Xunit.Abstractions; // ReSharper disable InvokeAsExtensionMethod namespace Akka.Streams.Tests.Dsl { public class GraphStageTimersSpec : AkkaSpec { private ActorMaterializer Materializer { get; } public GraphStageTimersSpec(ITestOutputHelper helper) : base(helper) { var settings = ActorMaterializerSettings.Create(Sys); Materializer = ActorMaterializer.Create(Sys, settings); } private SideChannel SetupIsolatedStage() { var channel = new SideChannel(); var stopPromise = Source.Maybe<int>() .Via(new TestStage(TestActor, channel, this)) .To(Sink.Ignore<int>()) .Run(Materializer); channel.StopPromise = stopPromise; AwaitCondition(()=>channel.IsReady); return channel; } [Fact] public void GraphStage_timer_support_must_receive_single_shot_timer() { var driver = SetupIsolatedStage(); Within(TimeSpan.FromSeconds(2), () => { Within(TimeSpan.FromMilliseconds(500), TimeSpan.FromSeconds(1), () => { driver.Tell(TestSingleTimer.Instance); ExpectMsg(new Tick(1)); }); ExpectNoMsg(TimeSpan.FromSeconds(1)); }); driver.StopStage(); } [Fact] public void GraphStage_timer_support_must_resubmit_single_shot_timer() { var driver = SetupIsolatedStage(); Within(TimeSpan.FromSeconds(2.5), () => { Within(TimeSpan.FromMilliseconds(500), TimeSpan.FromSeconds(1), () => { driver.Tell(TestSingleTimerResubmit.Instance); ExpectMsg(new Tick(1)); }); Within(TimeSpan.FromSeconds(1), () => ExpectMsg(new Tick(2))); ExpectNoMsg(TimeSpan.FromSeconds(1)); }); driver.StopStage(); } [Fact] public void GraphStage_timer_support_must_correctly_cancel_a_named_timer() { var driver = SetupIsolatedStage(); driver.Tell(TestCancelTimer.Instance); Within(TimeSpan.FromMilliseconds(500), () => ExpectMsg<TestCancelTimerAck>()); Within(TimeSpan.FromMilliseconds(300), TimeSpan.FromSeconds(1), () => ExpectMsg(new Tick(1))); ExpectNoMsg(TimeSpan.FromSeconds(1)); driver.StopStage(); } [Fact] public void GraphStage_timer_support_must_receive_and_cancel_a_repeated_timer() { var driver = SetupIsolatedStage(); driver.Tell(TestRepeatedTimer.Instance); var seq = ReceiveWhile(TimeSpan.FromSeconds(2), o => (Tick)o); seq.Should().HaveCount(5); ExpectNoMsg(TimeSpan.FromSeconds(1)); driver.StopStage(); } [Fact] public void GraphStage_timer_support_must_produce_scheduled_ticks_as_expected() { this.AssertAllStagesStopped(() => { var upstream = TestPublisher.CreateProbe<int>(this); var downstream = TestSubscriber.CreateProbe<int>(this); Source.FromPublisher(upstream) .Via(new TestStage2()) .RunWith(Sink.FromSubscriber(downstream), Materializer); downstream.Request(5); downstream.ExpectNext(1, 2, 3); downstream.ExpectNoMsg(TimeSpan.FromSeconds(1)); upstream.SendComplete(); downstream.ExpectComplete(); }, Materializer); } [Fact] public void GraphStage_timer_support_must_propagate_error_if_OnTimer_throws_an_Exception() { this.AssertAllStagesStopped(() => { var exception = new TestException("Expected exception to the rule"); var upstream = TestPublisher.CreateProbe<int>(this); var downstream = TestSubscriber.CreateProbe<int>(this); Source.FromPublisher(upstream) .Via(new ThrowStage(exception)) .RunWith(Sink.FromSubscriber(downstream), Materializer); downstream.Request(1); downstream.ExpectError().Should().Be(exception); }, Materializer); } #region Test classes private sealed class TestSingleTimer { public static readonly TestSingleTimer Instance = new TestSingleTimer(); private TestSingleTimer() { } } private sealed class TestSingleTimerResubmit { public static readonly TestSingleTimerResubmit Instance = new TestSingleTimerResubmit(); private TestSingleTimerResubmit() { } } private sealed class TestCancelTimer { public static readonly TestCancelTimer Instance = new TestCancelTimer(); private TestCancelTimer() { } } private sealed class TestCancelTimerAck { public static readonly TestCancelTimerAck Instance = new TestCancelTimerAck(); private TestCancelTimerAck() { } } private sealed class TestRepeatedTimer { public static readonly TestRepeatedTimer Instance = new TestRepeatedTimer(); private TestRepeatedTimer() { } } private sealed class Tick { public int N { get; } public Tick(int n) { N = n; } public override bool Equals(object obj) { var t = obj as Tick; return t != null && Equals(t); } private bool Equals(Tick other) => N == other.N; public override int GetHashCode() => N; } private sealed class SideChannel { public volatile Action<object> AsyncCallback; public volatile TaskCompletionSource<int> StopPromise; public bool IsReady => AsyncCallback != null; public void Tell(object message) => AsyncCallback(message); public void StopStage() => StopPromise.TrySetResult(-1); } private sealed class TestStage : SimpleLinearGraphStage<int> { private sealed class Logic : TimerGraphStageLogic { private const string TestSingleTimerKey = "TestSingleTimer"; private const string TestSingleTimerResubmitKey = "TestSingleTimerResubmit"; private const string TestCancelTimerKey = "TestCancelTimer"; private const string TestRepeatedTimerKey = "TestRepeatedTimer"; private readonly TestStage _stage; private int _tickCount = 1; public Logic(TestStage stage) : base(stage.Shape) { _stage = stage; SetHandler(stage.Inlet, onPush: () => Push(stage.Outlet, Grab(stage.Inlet))); SetHandler(stage.Outlet, onPull: () => Pull(stage.Inlet)); } public override void PreStart() => _stage._sideChanngel.AsyncCallback = GetAsyncCallback<object>(OnTestEvent); private void OnTestEvent(object message) { message.Match() .With<TestSingleTimer>(() => ScheduleOnce(TestSingleTimerKey, Dilated(500))) .With<TestSingleTimerResubmit>(() => ScheduleOnce(TestSingleTimerResubmitKey, Dilated(500))) .With<TestCancelTimer>(() => { ScheduleOnce(TestCancelTimerKey, Dilated(1)); // Likely in mailbox but we cannot guarantee CancelTimer(TestCancelTimerKey); _stage._probe.Tell(TestCancelTimerAck.Instance); ScheduleOnce(TestCancelTimerKey, Dilated(500)); }) .With<TestRepeatedTimer>(() => ScheduleRepeatedly(TestRepeatedTimerKey, Dilated(100))); } private TimeSpan Dilated(int milliseconds) => _stage._testKit.Dilated(TimeSpan.FromMilliseconds(milliseconds)); protected internal override void OnTimer(object timerKey) { var tick = new Tick(_tickCount++); _stage._probe.Tell(tick); if (timerKey.Equals(TestSingleTimerResubmitKey) && tick.N == 1) ScheduleOnce(TestSingleTimerResubmitKey, Dilated(500)); else if (timerKey.Equals(TestRepeatedTimerKey) && tick.N == 5) CancelTimer(TestRepeatedTimerKey); } } private readonly IActorRef _probe; private readonly SideChannel _sideChanngel; private readonly TestKitBase _testKit; public TestStage(IActorRef probe, SideChannel sideChanngel, TestKitBase testKit) { _probe = probe; _sideChanngel = sideChanngel; _testKit = testKit; } protected override GraphStageLogic CreateLogic(Attributes inheritedAttributes) => new Logic(this); } private sealed class TestStage2 : SimpleLinearGraphStage<int> { private sealed class Logic : TimerGraphStageLogic { private const string TimerKey = "tick"; private readonly TestStage2 _stage; private int _tickCount; public Logic(TestStage2 stage) : base(stage.Shape) { _stage = stage; SetHandler(stage.Inlet, onPush: DoNothing, onUpstreamFinish: CompleteStage, onUpstreamFailure: FailStage); SetHandler(stage.Outlet, onPull: DoNothing, onDownstreamFinish: CompleteStage); } public override void PreStart() => ScheduleRepeatedly(TimerKey, TimeSpan.FromMilliseconds(100)); protected internal override void OnTimer(object timerKey) { _tickCount++; if(IsAvailable(_stage.Outlet)) Push(_stage.Outlet, _tickCount); if(_tickCount == 3) CancelTimer(TimerKey); } } protected override GraphStageLogic CreateLogic(Attributes inheritedAttributes) => new Logic(this); } private sealed class ThrowStage : SimpleLinearGraphStage<int> { private sealed class Logic : TimerGraphStageLogic { private readonly ThrowStage _stage; public Logic(ThrowStage stage) : base(stage.Shape) { _stage = stage; SetHandler(stage.Outlet, onPull: () => Pull(stage.Inlet)); SetHandler(stage.Inlet, onPush: DoNothing); } public override void PreStart() => ScheduleOnce("tick", TimeSpan.FromMilliseconds(100)); protected internal override void OnTimer(object timerKey) { throw _stage._exception; } } private readonly Exception _exception; public ThrowStage(Exception exception) { _exception = exception; } protected override GraphStageLogic CreateLogic(Attributes inheritedAttributes) => new Logic(this); } #endregion } }
using System; using System.Collections.Generic; using System.Text; namespace ICSimulator { public class AFCBufferSlot : IComparable { Flit m_f; public Flit flit { get { return m_f; } set { m_f = value; } } public AFCBufferSlot(Flit f) { m_f = f; } public int CompareTo(object o) { if (o is AFCBufferSlot) return Router_Flit_OldestFirst._rank(m_f, (o as AFCBufferSlot).m_f); else throw new ArgumentException("bad comparison"); } } public class AFCUtilAvg { double m_avg; double m_window_sum; double[] m_window; int m_window_ptr; public AFCUtilAvg() { m_window = new double[Config.afc_avg_window]; m_window_ptr = 0; m_window_sum = 0; m_avg = 0; } public void Add(double util) { // add new sample to window and update sum m_window_sum -= m_window[m_window_ptr]; m_window[m_window_ptr] = util; m_window_sum += util; m_window_ptr = (m_window_ptr + 1) % Config.afc_avg_window; // mix window-average into EWMA m_avg = Config.afc_ewma_history * m_avg + (1 - Config.afc_ewma_history) * (m_window_sum / Config.afc_avg_window); } public double Avg { get { return m_avg; } } } public class Router_AFC : Router { // injectSlot is from Node protected Flit m_injectSlot; // buffers, indexed by physical channel and virtual network protected MinHeap<AFCBufferSlot>[,] m_buf; int m_buf_occupancy; // buffers active? protected bool m_buffered; public Router_AFC(Coord myCoord) : base(myCoord) { m_injectSlot = null; m_buf = new MinHeap<AFCBufferSlot>[5, Config.afc_vnets]; for (int pc = 0; pc < 5; pc++) for (int i = 0; i < Config.afc_vnets; i++) m_buf[pc, i] = new MinHeap<AFCBufferSlot>(); m_buffered = false; m_buf_occupancy = 0; } protected Router_AFC getNeigh(int dir) { return neigh[dir] as Router_AFC; } // accept one ejected flit into rxbuf protected void acceptFlit(Flit f) { statsEjectFlit(f); if (f.packet.nrOfArrivedFlits + 1 == f.packet.nrOfFlits) statsEjectPacket(f.packet); m_n.receiveFlit(f); } Flit ejectLocal() { // eject locally-destined flit (highest-ranked, if multiple) Flit ret = null; int flitsTryToEject = 0; for (int dir = 0; dir < 4; dir++) if (linkIn[dir] != null && linkIn[dir].Out != null && linkIn[dir].Out.dest.ID == ID) flitsTryToEject ++; Simulator.stats.flitsTryToEject[flitsTryToEject].Add(); int bestDir = -1; for (int dir = 0; dir < 4; dir++) if (linkIn[dir] != null && linkIn[dir].Out != null && linkIn[dir].Out.state != Flit.State.Placeholder && linkIn[dir].Out.dest.ID == ID && (ret == null || rank(linkIn[dir].Out, ret) < 0)) { ret = linkIn[dir].Out; bestDir = dir; } if (bestDir != -1) linkIn[bestDir].Out = null; return ret; } // keep these as member vars so we don't have to allocate on every step // (why can't we have arrays on the stack like in C?) Flit[] input = new Flit[4]; AFCBufferSlot[] requesters = new AFCBufferSlot[5]; int[] requester_dir = new int[5]; Queue<AFCBufferSlot> m_freeAFCSlots = new Queue<AFCBufferSlot>(); AFCBufferSlot getFreeBufferSlot(Flit f) { if (m_freeAFCSlots.Count > 0) { AFCBufferSlot s = m_freeAFCSlots.Dequeue(); s.flit = f; return s; } else return new AFCBufferSlot(f); } void returnFreeBufferSlot(AFCBufferSlot s) { m_freeAFCSlots.Enqueue(s); } void switchBufferless() { m_buffered = false; } void switchBuffered() { m_buffered = true; if (m_injectSlot != null) { InjectFlit(m_injectSlot); m_injectSlot = null; } } AFCUtilAvg m_util_avg = new AFCUtilAvg(); protected override void _doStep() { int flit_count = 0; for (int dir = 0; dir < 4; dir++) if (linkIn[dir] != null && linkIn[dir].Out != null) flit_count++; m_util_avg.Add((double)flit_count / neighbors); Simulator.stats.afc_avg.Add(m_util_avg.Avg); Simulator.stats.afc_avg_bysrc[ID].Add(m_util_avg.Avg); bool old_status = m_buffered; bool new_status = old_status; bool gossip_induced = false; if (Config.afc_force) { new_status = Config.afc_force_buffered; } else { if (!m_buffered && (m_util_avg.Avg > Config.afc_buf_threshold)) new_status = true; if (m_buffered && (m_util_avg.Avg < Config.afc_bless_threshold) && m_buf_occupancy == 0) new_status = false; // check at least one free slot in downstream routers; if not, gossip-induced switch for (int n = 0; n < 4; n++) { Router_AFC nr = getNeigh(n); if (nr == null) continue; int oppDir = (n + 2) % 4; for (int vnet = 0; vnet < Config.afc_vnets; vnet++) { int occupancy = nr.m_buf[oppDir, vnet].Count; if ((capacity(vnet) - occupancy) < 2) { gossip_induced = true; break; } } } if (gossip_induced) new_status = true; } // perform switching and stats accumulation if (old_status && !new_status) { switchBufferless(); Simulator.stats.afc_switch.Add(); Simulator.stats.afc_switch_bless.Add(); Simulator.stats.afc_switch_bysrc[ID].Add(); Simulator.stats.afc_switch_bless_bysrc[ID].Add(); } if (!old_status && new_status) { switchBuffered(); Simulator.stats.afc_switch.Add(); Simulator.stats.afc_switch_buf.Add(); Simulator.stats.afc_switch_bysrc[ID].Add(); Simulator.stats.afc_switch_buf_bysrc[ID].Add(); } if (m_buffered) { Simulator.stats.afc_buffered.Add(); Simulator.stats.afc_buffered_bysrc[ID].Add(); if (gossip_induced) { Simulator.stats.afc_gossip.Add(); Simulator.stats.afc_gossip_bysrc[ID].Add(); } } else { Simulator.stats.afc_bless.Add(); Simulator.stats.afc_bless_bysrc[ID].Add(); } if (m_buffered) { Simulator.stats.afc_buf_enabled.Add(); Simulator.stats.afc_buf_enabled_bysrc[ID].Add(); Simulator.stats.afc_buf_occupancy.Add(m_buf_occupancy); Simulator.stats.afc_buf_occupancy_bysrc[ID].Add(m_buf_occupancy); // grab inputs into buffers for (int dir = 0; dir < 4; dir++) { if (linkIn[dir] != null && linkIn[dir].Out != null) { Flit f = linkIn[dir].Out; linkIn[dir].Out = null; AFCBufferSlot slot = getFreeBufferSlot(f); f.enterBuffer = Simulator.CurrentRound; m_buf[dir, f.packet.getClass()].Enqueue(slot); m_buf_occupancy++; Simulator.stats.afc_buf_write.Add(); Simulator.stats.afc_buf_write_bysrc[ID].Add(); } } // perform arbitration: (i) collect heads of each virtual-net // heap (which represents many VCs) to obtain a single requester // per physical channel; (ii) request outputs among these // requesters based on DOR; (iii) select a single winner // per output for (int i = 0; i < 5; i++) { requesters[i] = null; requester_dir[i] = -1; } // find the highest-priority vnet head for each input PC for (int pc = 0; pc < 5; pc++) for (int vnet = 0; vnet < Config.afc_vnets; vnet++) if (m_buf[pc, vnet].Count > 0) { AFCBufferSlot top = m_buf[pc, vnet].Peek(); PreferredDirection pd = determineDirection(top.flit, coord); int outdir = (pd.xDir != Simulator.DIR_NONE) ? pd.xDir : pd.yDir; if (outdir == Simulator.DIR_NONE) outdir = 4; // local ejection // skip if (i) not local ejection and (ii) // destination router is buffered and (iii) // no credits left to destination router if (outdir != 4) { Router_AFC nrouter = (Router_AFC)neigh[outdir]; int ndir = (outdir + 2) % 4; if (nrouter.m_buf[ndir, vnet].Count >= capacity(vnet) && nrouter.m_buffered) continue; } // otherwise, contend for top requester from this // physical channel if (requesters[pc] == null || top.CompareTo(requesters[pc]) < 0) { requesters[pc] = top; requester_dir[pc] = outdir; } } // find the highest-priority requester for each output, and pop // it from its heap for (int outdir = 0; outdir < 5; outdir++) { AFCBufferSlot top = null; AFCBufferSlot top2 = null; int flitsTryToEject = 0; int top_indir = -1; int top_indir2 = -1; for (int req = 0; req < 5; req++) if (requesters[req] != null && requester_dir[req] == outdir) { if (outdir == 4) flitsTryToEject ++; if (top == null || requesters[req].CompareTo(top) < 0) { top = requesters[req]; top_indir = req; } } if (outdir == 4) Simulator.stats.flitsTryToEject[flitsTryToEject].Add(); if (Config.meshEjectTrial == 2 && outdir == 4 && top_indir != -1) // ejectTwice for (int req = 0; req < 5; req ++) if (requesters[req] != null && requester_dir[req] == outdir && req != top_indir) if (top2 == null) { top2 = requesters[req]; top_indir2 = req; } if (top_indir != -1 && top_indir2 != -1) { if (top.flit.packet == top2.flit.packet) Simulator.stats.ejectsFromSamePacket.Add(1); else Simulator.stats.ejectsFromSamePacket.Add(0); } if (top_indir != -1) { m_buf[top_indir, top.flit.packet.getClass()].Dequeue(); if (top.flit.enterBuffer == Simulator.CurrentRound) Simulator.stats.afc_bufferBypass.Add(1); Simulator.stats.afc_buf_read.Add(); Simulator.stats.afc_buf_read_bysrc[ID].Add(); Simulator.stats.afc_xbar.Add(); Simulator.stats.afc_xbar_bysrc[ID].Add(); if (top_indir == 4) statsInjectFlit(top.flit); // propagate to next router (or eject) if (outdir == 4) acceptFlit(top.flit); else linkOut[outdir].In = top.flit; returnFreeBufferSlot(top); m_buf_occupancy--; } if (outdir == 4 && top_indir2 != -1) { m_buf[top_indir2, top2.flit.packet.getClass()].Dequeue(); if (top2.flit.enterBuffer == Simulator.CurrentRound) Simulator.stats.afc_bufferBypass.Add(1); acceptFlit(top2.flit); } } } else { for (int i = 0; i < Config.meshEjectTrial; i++) { Flit eject = ejectLocal(); if (eject != null) acceptFlit(eject); } for (int i = 0; i < 4; i++) input[i] = null; // grab inputs into a local array so we can sort int c = 0; for (int dir = 0; dir < 4; dir++) if (linkIn[dir] != null && linkIn[dir].Out != null) { input[c++] = linkIn[dir].Out; linkIn[dir].Out.inDir = dir; linkIn[dir].Out = null; } // sometimes network-meddling such as flit-injection can put unexpected // things in outlinks... int outCount = 0; for (int dir = 0; dir < 4; dir++) if (linkOut[dir] != null && linkOut[dir].In != null) outCount++; bool wantToInject = m_injectSlot != null; bool canInject = (c + outCount) < neighbors; bool starved = wantToInject && !canInject; if (starved) { Flit starvedFlit = m_injectSlot; Simulator.controller.reportStarve(coord.ID); statsStarve(starvedFlit); } if (canInject && wantToInject) { Flit inj = null; if (m_injectSlot != null) { inj = m_injectSlot; m_injectSlot = null; } else throw new Exception("trying to inject a null flit"); input[c++] = inj; statsInjectFlit(inj); } // inline bubble sort is faster for this size than Array.Sort() // sort input[] by descending priority. rank(a,b) < 0 iff a has higher priority. for (int i = 0; i < 4; i++) for (int j = i + 1; j < 4; j++) if (input[j] != null && (input[i] == null || rank(input[j], input[i]) < 0)) { Flit t = input[i]; input[i] = input[j]; input[j] = t; } // assign outputs for (int i = 0; i < 4 && input[i] != null; i++) { PreferredDirection pd = determineDirection(input[i], coord); int outDir = -1; if (pd.xDir != Simulator.DIR_NONE && linkOut[pd.xDir].In == null) { linkOut[pd.xDir].In = input[i]; outDir = pd.xDir; } else if (pd.yDir != Simulator.DIR_NONE && linkOut[pd.yDir].In == null) { linkOut[pd.yDir].In = input[i]; outDir = pd.yDir; } // deflect! else { input[i].Deflected = true; int dir = 0; if (Config.randomize_defl) dir = Simulator.rand.Next(4); // randomize deflection dir (so no bias) for (int count = 0; count < 4; count++, dir = (dir + 1) % 4) if (linkOut[dir] != null && linkOut[dir].In == null) { linkOut[dir].In = input[i]; outDir = dir; break; } if (outDir == -1) throw new Exception( String.Format("Ran out of outlinks in arbitration at node {0} on input {1} cycle {2} flit {3} c {4} neighbors {5} outcount {6}", coord, i, Simulator.CurrentRound, input[i], c, neighbors, outCount)); } } } } public override bool canInjectFlit(Flit f) { int cl = f.packet.getClass(); if (m_buffered) return m_buf[4, cl].Count < capacity(cl); else return m_injectSlot == null; } public override void InjectFlit(Flit f) { Simulator.stats.afc_vnet[f.packet.getClass()].Add(); if (m_buffered) { AFCBufferSlot slot = getFreeBufferSlot(f); f.enterBuffer = Simulator.CurrentRound; m_buf[4, f.packet.getClass()].Enqueue(slot); m_buf_occupancy++; Simulator.stats.afc_buf_write.Add(); Simulator.stats.afc_buf_write_bysrc[ID].Add(); } else { if (m_injectSlot != null) throw new Exception("Trying to inject twice in one cycle"); m_injectSlot = f; } } int capacity(int cl) { // in the future, we might size each virtual network differently; for now, // we use just one virtual network (since there is no receiver backpressure) return Config.afc_buf_per_vnet; } public override void flush() { m_injectSlot = null; } protected virtual bool needFlush(Flit f) { return false; } } }
using System; using System.Diagnostics.CodeAnalysis; using System.IO; using System.Threading; using System.Threading.Tasks; namespace THNETII.Common.IO { /// <summary> /// A wrapper Stream implementation around another <see cref="Stream"/> instance that supports automatic /// copying of read and written data into a secondary Copy-Stream. /// </summary> /// <remarks> /// Usage of this type is useful in situations where a non-seekable stream is consumed, but applications need to keep a record /// of the processed data. /// </remarks> public class CopyIOStream : Stream { /// <summary> /// Creates a new stream wrapper around the specified base stream, writing data to the copy stream on every read operation. /// </summary> /// <param name="baseStream">The origin stream to wrap. Must not be <see langword="null"/>.</param> /// <param name="copyStream">The copy stream to which data from the base stream will be written on read operations.</param> /// <returns>A new initialized <see cref="CopyIOStream"/> instance that wraps around the specified base stream.</returns> /// <exception cref="ArgumentNullException"><paramref name="baseStream"/> is <see langword="null"/>.</exception> public static CopyIOStream CreateReadCopy(Stream baseStream, Stream copyStream) => CreateReadCopy(baseStream, copyStream, closeStreams: true); /// <summary> /// Creates a new stream wrapper around the specified base stream, writing data to the copy stream on every read operation. /// </summary> /// <param name="baseStream">The origin stream to wrap. Must not be <see langword="null"/>.</param> /// <param name="copyStream">The copy stream to which data from the base stream will be written on read operations.</param> /// <param name="closeStreams">A boolean value that indicates whether to close the base- and copy-stream when the wrapper is closed.</param> /// <returns>A new initialized <see cref="CopyIOStream"/> instance that wraps around the specified base stream.</returns> /// <exception cref="ArgumentNullException"><paramref name="baseStream"/> is <see langword="null"/>.</exception> public static CopyIOStream CreateReadCopy(Stream baseStream, Stream copyStream, bool closeStreams) => CreateReadCopy(baseStream, copyStream, closeStreams, closeStreams); private static CopyIOStream CreateReadCopy(Stream baseStream, Stream copyStream, bool closeBaseStream, bool closeCopyStream) => new CopyIOStream(baseStream, copyStream, null, closeBaseStream, closeCopyStream, false); /// <summary> /// Creates a new stream wrapper around the specified base stream, writing data to the copy stream on every write operation. /// </summary> /// <param name="baseStream">The origin stream to wrap. Must not be <see langword="null"/>.</param> /// <param name="copyStream">The copy stream to which data from the base stream will be written on write operations.</param> /// <returns>A new initialized <see cref="CopyIOStream"/> instance that wraps around the specified base stream.</returns> /// <exception cref="ArgumentNullException"><paramref name="baseStream"/> is <see langword="null"/>.</exception> public static CopyIOStream CreateWriteCopy(Stream baseStream, Stream copyStream) => CreateWriteCopy(baseStream, copyStream, closeStreams: true); /// <summary> /// Creates a new stream wrapper around the specified base stream, writing data to the copy stream on every write operation. /// </summary> /// <param name="baseStream">The origin stream to wrap. Must not be <see langword="null"/>.</param> /// <param name="copyStream">The copy stream to which data from the base stream will be written on write operations.</param> /// <param name="closeStreams">A boolean value that indicates whether to close the base- and copy-stream when the wrapper is closed.</param> /// <returns>A new initialized <see cref="CopyIOStream"/> instance that wraps around the specified base stream.</returns> /// <exception cref="ArgumentNullException"><paramref name="baseStream"/> is <see langword="null"/>.</exception> public static CopyIOStream CreateWriteCopy(Stream baseStream, Stream copyStream, bool closeStreams) => CreateWriteCopy(baseStream, copyStream, closeStreams, closeStreams); /// <exception cref="ArgumentNullException"><paramref name="baseStream"/> is <see langword="null"/>.</exception> private static CopyIOStream CreateWriteCopy(Stream baseStream, Stream copyStream, bool closeBaseStream, bool closeCopyStream) => new CopyIOStream(baseStream, null, copyStream, closeBaseStream, false, closeCopyStream); /// <summary> /// Creates a new stream wrapper around the specified base stream, writing data to either the read-copy stream or write-copy stream on every IO operation. /// </summary> /// <param name="baseStream">The origin stream to wrap. Must not be <see langword="null"/>.</param> /// <param name="readCopy">The copy stream to which data from the base stream will be written on read operations.</param> /// <param name="writeCopy">The copy stream to which data from the base stream will be written on write operations.</param> /// <exception cref="ArgumentNullException"><paramref name="baseStream"/> is <see langword="null"/>.</exception> public CopyIOStream(Stream baseStream, Stream readCopy, Stream writeCopy) : this(baseStream, readCopy, writeCopy, closeStreams: true) { } /// <summary> /// Creates a new stream wrapper around the specified base stream, writing data to either the read-copy stream or write-copy stream on every IO operation. /// </summary> /// <param name="baseStream">The origin stream to wrap. Must not be <see langword="null"/>.</param> /// <param name="readCopy">The copy stream to which data from the base stream will be written on read operations.</param> /// <param name="writeCopy">The copy stream to which data from the base stream will be written on write operations.</param> /// <param name="closeStreams">A boolean value that indicates whether to close the base- and copy-streams when the wrapper is closed.</param> /// <exception cref="ArgumentNullException"><paramref name="baseStream"/> is <see langword="null"/>.</exception> public CopyIOStream(Stream baseStream, Stream readCopy, Stream writeCopy, bool closeStreams) : this(baseStream, readCopy, writeCopy, closeStreams, closeStreams, closeStreams) { } /// <summary> /// Creates a new stream wrapper around the specified base stream, writing data to either the read-copy stream or write-copy stream on every IO operation. /// </summary> /// <param name="baseStream">The origin stream to wrap. Must not be <see langword="null"/>.</param> /// <param name="readCopy">The copy stream to which data from the base stream will be written on read operations.</param> /// <param name="writeCopy">The copy stream to which data from the base stream will be written on write operations.</param> /// <param name="closeBaseStream">A boolean value that indicates whether to close the base stream when the wrapper is closed.</param> /// <param name="closeCopyStreams">A boolean value that indicates whether to close the copy streams when the wrapper is closed.</param> /// <exception cref="ArgumentNullException"><paramref name="baseStream"/> is <see langword="null"/>.</exception> public CopyIOStream(Stream baseStream, Stream readCopy, Stream writeCopy, bool closeBaseStream, bool closeCopyStreams) : this(baseStream, readCopy, writeCopy, closeBaseStream, closeCopyStreams, closeCopyStreams) { } /// <summary> /// Creates a new stream wrapper around the specified base stream, writing data to either the read-copy stream or write-copy stream on every IO operation. /// </summary> /// <param name="baseStream">The origin stream to wrap. Must not be <see langword="null"/>.</param> /// <param name="readCopy">The copy stream to which data from the base stream will be written on read operations.</param> /// <param name="writeCopy">The copy stream to which data from the base stream will be written on write operations.</param> /// <param name="closeBaseStream">A boolean value that indicates whether to close the base stream when the wrapper is closed.</param> /// <param name="closeReadCopy">A boolean value that indicates whether to close the read-copy stream when the wrapper is closed.</param> /// <param name="closeWriteCopy">A boolean value that indicates whether to close the write-copy stream when the wrapper is closed.</param> /// <exception cref="ArgumentNullException"><paramref name="baseStream"/> is <see langword="null"/>.</exception> public CopyIOStream(Stream baseStream, Stream? readCopy, Stream? writeCopy, bool closeBaseStream, bool closeReadCopy, bool closeWriteCopy) { BaseStream = baseStream ?? throw new ArgumentNullException(nameof(baseStream)); CloseBaseStream = closeBaseStream; ReadCopy = readCopy; CloseReadCopy = !(readCopy is null) ? closeReadCopy : false; WriteCopy = writeCopy; CloseWriteCopy = !(writeCopy is null) ? closeWriteCopy : false; } /// <summary> /// Gets the origin stream on which IO operations are performed when IO operations on this instance are called. /// </summary> /// <value>The non-<see langword="null"/> <see cref="Stream"/> instance that was passed to the <see cref="CopyIOStream"/> constructor when this instance was created.</value> public Stream BaseStream { get; } /// <summary> /// Gets the stream to which the wrapper writes data that is read from the origin stream. /// </summary> /// <value> /// The <see cref="Stream"/> instance that was passed to the <see cref="CopyIOStream"/> constructor when this instance was created; /// or <see langword="null"/> if the wrapper does not copy data on read operations. /// </value> public Stream? ReadCopy { get; } /// <summary> /// Gets the stream to which the wrapper writes data that is written to the origin stream. /// </summary> /// <value> /// The <see cref="Stream"/> instance that was passed to the <see cref="CopyIOStream"/> constructor when this instance was created; /// or <see langword="null"/> if the wrapper does not copy data on write operations. /// </value> public Stream? WriteCopy { get; } /// <summary> /// Gets a value that determines whether the origin stream is closed when this instance is closed. /// </summary> /// <value><see langword="true"/> if <see cref="BaseStream"/> is closed when <see cref="Stream.Dispose()"/> is called; <see langword="false"/>, if <see cref="BaseStream"/> is left open when this instance is diposed.</value> public bool CloseBaseStream { get; } /// <summary> /// Gets a value that determines whether the read-copy stream is closed when this instance is closed. /// </summary> /// <value><see langword="true"/> if <see cref="ReadCopy"/> is closed when <see cref="Stream.Dispose()"/> is called; <see langword="false"/>, if <see cref="ReadCopy"/> is left open when this instance is diposed.</value> public bool CloseReadCopy { get; } /// <summary> /// Gets a value that determines whether the write-copy stream is closed when this instance is closed. /// </summary> /// <value><see langword="true"/> if <see cref="WriteCopy"/> is closed when <see cref="Stream.Dispose()"/> is called; <see langword="false"/>, if <see cref="WriteCopy"/> is left open when this instance is diposed.</value> public bool CloseWriteCopy { get; } /// <summary> /// Gets a value indicating whether the stream wrapper supports reading. /// </summary> /// <value><see langword="true"/> if the origin steam is readable and the read-copy stream is either <see langword="null"/> or writable; otherwise <see langword="false"/>.</value> public override bool CanRead => BaseStream.CanRead && (ReadCopy?.CanWrite ?? true); /// <summary> /// Gets a value indicating whether the stream wrapper supports seeking. /// <para>The <see cref="CopyIOStream"/> wrapper class does not support seeking, even if the wrapped origin stream does. This property always returns <see langword="false"/></para> /// </summary> /// <value><see langword="false"/> as <see cref="CopyIOStream"/> does not support seeking.</value> /// <remarks> /// Seeking is not supported by the <see cref="CopyIOStream"/> class to ensure consistent write and read copies or the origin stream. /// However, if <see cref="BaseStream"/> supports seeking, <see cref="Stream.Seek(long, SeekOrigin)"/> can still be called on the origin stream. /// </remarks> public override bool CanSeek => false; /// <summary> /// Gets a value indicating whether the stream wrapper supports writing. /// </summary> /// <value><see langword="true"/> if the origin steam is writable and the write-copy stream is either <see langword="null"/> or writable; otherwise <see langword="false"/>.</value> public override bool CanWrite => BaseStream.CanWrite && (WriteCopy?.CanWrite ?? true); /// <summary> /// Gets a value that determines whether the stream wrapper can time out. /// </summary> /// <value><see langword="true"/> if the origin stream or either one of the copy streams can time out; otherwise <see langword="false"/>.</value> public override bool CanTimeout => BaseStream.CanTimeout || (ReadCopy?.CanTimeout ?? false) || (WriteCopy?.CanTimeout ?? false); /// <summary> /// Gets the length, in bytes, of the orgin stream. /// </summary> /// <value>A long value representing the length of the stream in bytes.</value> /// <exception cref="NotSupportedException">The origin stream does not support seeking.</exception> /// <exception cref="ObjectDisposedException">Methods were called after the origin stream was closed.</exception> public override long Length => BaseStream.Length; /// <summary> /// Gets the position within the origin stream. /// Setting this property is not supported and will cause a <see cref="NotSupportedException"/> to be thrown. /// </summary> /// <value>The current position within the stream as a <see cref="long"/> valued offset from the beginning of the stream.</value> /// <exception cref="IOException">An I/O error occurred.</exception> /// <exception cref="NotSupportedException">The origin stream does not support seeking.</exception> /// <exception cref="ObjectDisposedException">Methods were called after the origin stream was closed.</exception> public override long Position { get => BaseStream.Position; set => throw new NotSupportedException(); } /// <summary> /// Gets or sets a value, in miliseconds, that determines how long the wrapper will attempt to read from the origin stream /// before timing out. /// </summary> /// <value>The <see cref="Stream.ReadTimeout"/> of the orgin stream or the <see cref="Stream.WriteTimeout"/> of the read-copy stream, whichever is smallest.</value> public override int ReadTimeout { get => GetReadTimeout(); set { if (BaseStream.CanTimeout) BaseStream.ReadTimeout = value; if (ReadCopy?.CanTimeout ?? false) ReadCopy.WriteTimeout = value; } } /// <summary> /// Gets or sets a value, in miliseconds, that determines how long the wrapper will attempt to write to the origin stream /// before timing out. /// </summary> /// <value>The <see cref="Stream.WriteTimeout"/> of the orgin stream or the <see cref="Stream.WriteTimeout"/> of the write-copy stream, whichever is smallest.</value> public override int WriteTimeout { get => GetWriteTimeout(); set { if (BaseStream.CanTimeout) BaseStream.WriteTimeout = value; if (WriteCopy?.CanTimeout ?? false) WriteCopy.WriteTimeout = value; } } private int f_disposed = 0; /// <inheritoc cref="Stream.Dispose(bool)" /> protected override void Dispose(bool disposing) { if (Interlocked.Exchange(ref f_disposed, 1) == 0) { if (CloseBaseStream) BaseStream.Dispose(); if (CloseReadCopy) ReadCopy?.Dispose(); if (CloseWriteCopy) WriteCopy?.Dispose(); } base.Dispose(disposing); } /// <inheritdoc cref="Stream.Flush"/> public override void Flush() { BaseStream.Flush(); ReadCopy?.Flush(); WriteCopy?.Flush(); } /// <inheritdoc cref="Stream.FlushAsync(CancellationToken)"/> public override Task FlushAsync(CancellationToken cancellationToken) { if (!(ReadCopy is null)) { if (!(WriteCopy is null)) return Task.WhenAll(BaseStream.FlushAsync(cancellationToken), ReadCopy.FlushAsync(cancellationToken), WriteCopy.FlushAsync(cancellationToken)); return Task.WhenAll(BaseStream.FlushAsync(cancellationToken), ReadCopy.FlushAsync(cancellationToken)); } else if (!(WriteCopy is null)) return Task.WhenAll(BaseStream.FlushAsync(cancellationToken), WriteCopy.FlushAsync(cancellationToken)); return BaseStream.FlushAsync(cancellationToken); } /// <inheritdoc cref="Stream.Read(byte[], int, int)"/> public override int Read(byte[] buffer, int offset, int count) { var bytesRead = BaseStream.Read(buffer, offset, count); ReadCopy?.Write(buffer, offset, bytesRead); return bytesRead; } /// <inheritdoc cref="Stream.ReadAsync(byte[], int, int, CancellationToken)"/> public override async Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) { var bytesRead = await BaseStream.ReadAsync(buffer, offset, count, cancellationToken).ConfigureAwait(false); if (!(ReadCopy is null)) await ReadCopy.WriteAsync(buffer, offset, bytesRead, cancellationToken).ConfigureAwait(false); return bytesRead; } /// <inheritdoc cref="Stream.Write(byte[], int, int)"/> public override void Write(byte[] buffer, int offset, int count) { BaseStream.Write(buffer, offset, count); WriteCopy?.Write(buffer, offset, count); } /// <inheritdoc cref="Stream.WriteAsync(byte[], int, int, CancellationToken)"/> public override Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) { if (!(WriteCopy is null)) return Task.WhenAll(BaseStream.WriteAsync(buffer, offset, count, cancellationToken), WriteCopy.WriteAsync(buffer, offset, count, cancellationToken)); return BaseStream.WriteAsync(buffer, offset, count, cancellationToken); } /// <summary> /// Always throws a <see cref="NotSupportedException"/>. /// </summary> /// <param name="offset">Ignored. A byte offset relative to the <paramref name="origin"/> parameter.</param> /// <param name="origin">Ignored. A value of type <see cref="SeekOrigin"/> indicating the reference point used to obtain the new position.</param> /// <returns>The new position within the current stream.</returns> /// <remarks> /// Instances derived from <see cref="CopyIOStream"/> by default do not support seeking in order to keep the read and write copies consistent. /// However, if the source stream is seekable you can call <see cref="Stream.Seek"/> on the <see cref="BaseStream"/> member. Note that doing so /// can potentially create garbled or discontinuous content in the read and/or write copies on subsequent I/O operations. /// </remarks> /// <exception cref="NotSupportedException">Instances of <see cref="CopyIOStream"/> do not support seeking.</exception> public override long Seek(long offset, SeekOrigin origin) => throw GetGeneralNotSupportedExceptionException(nameof(Seek)); /// <summary> /// Always throws a <see cref="NotSupportedException"/>. /// </summary> /// <param name="value">Ignored. The desired length of the current stream in bytes.</param> /// <remarks> /// Instances derived from <see cref="CopyIOStream"/> by default do not support setting the length. /// However, if the underlying streams support it, you can call <see cref="Stream.SetLength"/> individually on the /// <see cref="BaseStream"/>, <see cref="ReadCopy"/> and <see cref="WriteCopy"/> members. Note that doing so /// must take into account that <see cref="SetLength"/> might not succeed on all three members. /// </remarks> /// <exception cref="NotSupportedException">Instances of <see cref="CopyIOStream"/> do not support setting the length of the stream.</exception> public override void SetLength(long value) => throw GetGeneralNotSupportedExceptionException(nameof(SetLength)); private int GetReadTimeout() => GetTimeout(s => s.ReadTimeout, ReadCopy); private int GetWriteTimeout() => GetTimeout(s => s.WriteTimeout, WriteCopy); private int GetTimeout(Func<Stream, int> timeoutGetter, Stream? copyStream) { if (BaseStream.CanTimeout) { // Casts to uint to ensure that negative timeout values (infinite timeouts) // are valued greater than a specified non-negative timeout. uint baseTimeout = unchecked((uint)timeoutGetter(BaseStream)); if (copyStream?.CanTimeout ?? false) { uint copyTimeout = unchecked((uint)copyStream.WriteTimeout); uint minimumTimeout = Math.Min(baseTimeout, copyTimeout); return unchecked((int)minimumTimeout); } else return unchecked((int)baseTimeout); } else if (copyStream?.CanTimeout ?? false) return copyStream.WriteTimeout; else throw GetGeneralNotSupportedExceptionException("Timeout"); } [SuppressMessage("Microsoft.Globalization", "CA1305", Justification = "String Formatting not affected by Globalization.")] private NotSupportedException GetGeneralNotSupportedExceptionException(string memberName) => new NotSupportedException($"This operation is not supported on the {GetType()} type. Invoke the {memberName} member on the underlying stream."); } }
using UnityEngine; using System; using System.Collections; using System.Collections.Generic; using System.Xml; using System.IO; public class CustomContentDirector : MonoBehaviour { class TriangleSet { public int[] indices; public TriangleSet() { indices = null; } public void AddIndices(int[] value) { int oldcount = (indices == null) ? 0 : indices.Length; int[] newindices = new int[oldcount + value.Length]; for (int i = 0; i < oldcount; i++) { newindices[i] = indices[i]; } for (int i = 0; i < value.Length; i++) { newindices[i + oldcount] = value[i]; } indices = newindices; } } class IndexGroup { public int vertIndex; public int normIndex; public int texcoordIndex; public IndexGroup(int v, int n, int uv) { vertIndex = v; normIndex = n; texcoordIndex = uv; } } class IndexGroupList { IndexGroup[] _groups; public IndexGroupList() { _groups = null; } public int Count { get { if (null == _groups) { return 0; } else { return _groups.Length; } } } public int GetGroup(int vertIndex, int normIndex, int texcoordIndex) { if (null != _groups) { for (int i = 0; i < _groups.Length; i++) { if (_groups[i].vertIndex == vertIndex && _groups[i].normIndex == normIndex && _groups[i].texcoordIndex == texcoordIndex) { return i; } } } return Add(vertIndex, normIndex, texcoordIndex); } public IndexGroup GetGroup(int index) { return _groups[index]; } public int Add(int vertIndex, int normIndex, int texcoordIndex) { IndexGroup[] newData = new IndexGroup[this.Count + 1]; int idx = 0; if (null != _groups) { foreach (IndexGroup ig in _groups) { newData[idx++] = ig; } } newData[idx] = new IndexGroup(vertIndex, normIndex, texcoordIndex); _groups = newData; return _groups.Length - 1; } } /// <summary> /// Cache of known models /// </summary> static Dictionary<string, GameObject> _cachedModels = new Dictionary<string, GameObject>(); // Use this for initialization static public string GetApplicationDataPath() { string dataPath = ""; #if UNITY_IPHONE string fileNameBase = Application.dataPath.Substring(0, Application.dataPath.LastIndexOf('/')); dataPath = fileNameBase.Substring(0, fileNameBase.LastIndexOf('/')) + "/Documents/"; #elif UNITY_ANDROID dataPath = Application.persistentDataPath + "/"; #else dataPath = Application.dataPath + "/"; #endif return dataPath; } static public string[] GetFilesInFolder(string relFolder) { string fullPath = GetApplicationDataPath() + relFolder; Debug.Log("Searching " + fullPath + " for files..."); string[] result = Directory.GetFiles(fullPath); // Strip the relative folder from the filenames for (int i=0; i < result.Length; i++) { result[i] = result[i].Substring(fullPath.Length + 1); } // All done return result; } static public GameObject LoadModel(string fileName) { // If we get here, we're loading a custom model. If it's in the cache, return what's there. if (_cachedModels.ContainsKey(fileName)) { return (GameObject)GameObject.Instantiate(_cachedModels[fileName]); } // If we get here, this is our first request for this filename. Load the mesh. // (We currently only support the Collada format) string meshFileName = CustomContentDirector.GetApplicationDataPath() + fileName; GameObject o = new GameObject(); // This will be the return value Debug.Log("Loading custom model " + meshFileName + "..."); try { // Open a stream to the file using (FileStream fileStream = new FileStream(meshFileName, FileMode.Open)) { // Load the stream into an Xml document parser XmlDocument doc = new XmlDocument(); doc.Load(fileStream); // Now deserialize the document into a XmlCollada.XmlColladaSchema object. While // the naming could be better, this object allows easy access to all the components // in the Collada mesh. XmlCollada.XmlColladaSchema collada = new XmlCollada.XmlColladaSchema(doc); XmlCollada.Scene scene = collada.Scene; XmlCollada.Instance_Visual_Scene instanceVisualScene = scene.InstanceVisualScene; XmlCollada.Visual_Scene visualScene = collada.LibraryVisualScenes.GetVisualScene(instanceVisualScene.URL.Trim('#')); XmlCollada.XmlColladaList geometryNodeList = visualScene.GetInstanceGeometryNodes(); // We don't support these; but when we decide to, these are here for us //XmlCollada.XmlColladaList materialNodeList = collada.LibraryMaterials.Materials; //XmlCollada.XmlColladaList imageNodeList = collada.LibraryImages.Images; // Navigate through the schema to build all geometries for (int i = 0; i < geometryNodeList.Count; i++) { XmlCollada.Node node = (XmlCollada.Node)geometryNodeList.GetAt(i); XmlCollada.XmlColladaList transforms = node.Transforms; XmlCollada.Instance_Geometry instanceGeometry = node.InstanceGeometry; XmlCollada.Geometry geometry = collada.LibraryGeometries.GetGeometry(instanceGeometry.URL.Trim('#')); XmlCollada.Mesh colladaMesh = geometry.Mesh; XmlCollada.XmlColladaList xmlGeometries = colladaMesh.GetXmlGeometries(); // Build a matrix based on all geometry transforms for (int j = 0; j < transforms.Count; j++) { // TODO: Build a matrix based on the transforms } // Now load all the polygon lists for (int j = 0; j < xmlGeometries.Count; j++) { XmlCollada.XmlGeometry xmlGeometry = (XmlCollada.XmlGeometry)xmlGeometries.GetAt(j); XmlCollada.XmlColladaList inputs = xmlGeometry.GetInputs(); TriangleSet triangleSet = new TriangleSet(); // Create one Unity game object per polygon list GameObject unityNode = new GameObject(); Mesh unityMesh = new Mesh(); unityNode.AddComponent<MeshFilter>().mesh = unityMesh; unityNode.AddComponent<MeshRenderer>(); unityNode.transform.parent = o.transform; unityNode.name = node.Name; // Get the material (Currently unsuppoorted) //string instanceMaterialTarget = node.InstanceGeometry.GetBoundMaterialTarget(xmlGeometry.Material); //XmlCollada.Material material = collada.LibraryMaterials.GetMaterial(instanceMaterialTarget.Trim('#')); //XmlCollada.Effect effect = collada.LibraryEffects.GetEffect(material._instanceEffect.URL.Trim('#')); //XmlCollada.XmlShaderElement shader = effect.ProfileCommon.Technique.Shader; // Now load recognized input components for this polygon list Vector3[] vertices = null; Vector3[] normals = null; Vector2[] texcoords = null; int verticesPOffset = -1; int normalsPOffset = -1; int texcoordsPOffset = -1; int maxInputOffset = 0; for (int k = 0; k < inputs.Count; k++) { XmlCollada.Input input = (XmlCollada.Input)inputs.GetAt(k); if (input.Offset > maxInputOffset) { maxInputOffset = input.Offset; } XmlCollada.Source source; if (input.Semantic == "VERTEX") { source = (XmlCollada.Source)colladaMesh.GetSource(colladaMesh.Vertices.Input.Source.Trim('#')); } else { source = (XmlCollada.Source)colladaMesh.GetSource(input.Source.Trim('#')); } XmlCollada.Float_Array floatArray = source.Float_Array; XmlCollada.Technique_Common techniqueCommon = (XmlCollada.Technique_Common)source.Technique_Common; XmlCollada.Accessor accessor = techniqueCommon.Accessor; XmlCollada.XmlColladaList paramsList = accessor.GetParamsList(); // Calculate the number of named parameters int namedParamCount = 0; for (int l = 0; l < paramsList.Count; l++) { XmlCollada.Param param = (XmlCollada.Param)paramsList.GetAt(l); if (param.Name.Length > 0) { namedParamCount++; } } // Build the coordinate list from all the accessor elements float[] floatValues = new float[accessor.Count * namedParamCount]; int curFloatValue = 0; for (int l = 0; l < accessor.Count; l++) { // Do for all parameters int m0 = accessor.Offset + l * accessor.Stride; for (int m = 0; m < paramsList.Count; m++) { XmlCollada.Param param = (XmlCollada.Param)paramsList.GetAt(m); if (param.Name.Length > 0) { floatValues[curFloatValue++] = floatArray.Values[m0 + m]; } } } if (null == vertices && "VERTEX" == input.Semantic.ToUpper()) { vertices = new Vector3[accessor.Count]; for (int l = 0; l < accessor.Count; l++) { vertices[l] = new Vector3(); vertices[l].x = floatValues[l * namedParamCount]; if (namedParamCount > 1) { vertices[l].y = floatValues[l * namedParamCount + 1]; } if (namedParamCount > 2) { vertices[l].z = floatValues[l * namedParamCount + 2]; } } verticesPOffset = input.Offset; } else if (null == normals && "NORMAL" == input.Semantic.ToUpper()) { normals = new Vector3[accessor.Count]; for (int l = 0; l < accessor.Count; l++) { normals[l] = new Vector3(); normals[l].x = floatValues[l * namedParamCount]; if (namedParamCount > 1) { normals[l].y = floatValues[l * namedParamCount + 1]; } if (namedParamCount > 2) { normals[l].z = floatValues[l * namedParamCount + 2]; } } normalsPOffset = input.Offset; } else if (null == texcoords && "TEXCOORD" == input.Semantic.ToUpper()) { texcoords = new Vector2[accessor.Count]; for (int l = 0; l < accessor.Count; l++) { texcoords[l] = new Vector2(); texcoords[l].x = floatValues[l * namedParamCount]; if (namedParamCount > 1) { texcoords[l].y = floatValues[l * namedParamCount + 1]; } } texcoordsPOffset = input.Offset; } } // for (int k = 0; k < inputs.Count; k++) // Now that all the input components (vertex lists) have been loaded, we load the actual face // and index information. Loading is different for polygons and triangles. if (xmlGeometry.root == "polylist" || xmlGeometry.root == "triangles") { // Make a first pass at reading in the polygons. Because there can be a different // number of vertex indices than normal indices than UV indices, we have to create // our own index group based on the unique pairs of those elements. // // Note: The stride of <p> is equal to one more than the highest input offset. // IndexGroupList indexGroupList = new IndexGroupList(); int[] vcounts = xmlGeometry.GetVCount(); int[] p = xmlGeometry.GetP(); int pIndex = 0; // A triangle list is no different than a polygon list where the vcount is undefined. We'll make // it up here and assign three vertices per triangle. if (xmlGeometry.root == "triangles") { vcounts = new int[xmlGeometry.Count]; for (int k=0; k < xmlGeometry.Count; k++) { vcounts[k] = 3; } } // Do for all elements in vcount (one element is one polygon) for (int k = 0; k < vcounts.Length; k++) { int vertexCount = vcounts[k]; int[] polyVertIndices = new int[vertexCount]; int[] polyNormIndices = (normalsPOffset >= 0) ? new int[vertexCount] : null; int[] polyTexIndices = (texcoordsPOffset >= 0) ? new int[vertexCount] : null; // Do for all vertices for this polygon for (int l = 0; l < vertexCount; l++) { polyVertIndices[l] = p[pIndex + verticesPOffset]; if (normalsPOffset >= 0) { polyNormIndices[l] = p[pIndex + normalsPOffset]; } if (texcoordsPOffset >= 0) { polyTexIndices[l] = p[pIndex + texcoordsPOffset]; } pIndex += maxInputOffset + 1; } // At this point we have all our indices; however we still have to deal with the disjointed // nature of the indices (e.g. 8 vertex indices, 24 normal vertices). This is where our index // group list comes in. Add every unique combination of vertex, normal, and uv indices into // a single array, and build our triangle list while we're at it. int[] triangleIndices = new int[(vertexCount - 2) * 3]; for (int l = 0; l < vertexCount - 2; l++) { triangleIndices[l * 3] = indexGroupList.GetGroup(polyVertIndices[0], ((polyNormIndices != null) ? polyNormIndices[0] : -1), ((polyTexIndices != null) ? polyTexIndices[0] : -1)); triangleIndices[l * 3 + 1] = indexGroupList.GetGroup(polyVertIndices[l + 1], ((polyNormIndices != null) ? polyNormIndices[l + 1] : -1), ((polyTexIndices != null) ? polyTexIndices[l + 1] : -1)); triangleIndices[l * 3 + 2] = indexGroupList.GetGroup(polyVertIndices[l + 2], ((polyNormIndices != null) ? polyNormIndices[l + 2] : -1), ((polyTexIndices != null) ? polyTexIndices[l + 2] : -1)); } // Add the triangles to the generic mesh triangleSet.AddIndices(triangleIndices); } // Now we need to assign all the accumulated vertices, normals, and texcoords to the generic mesh Vector3[] v = new Vector3[indexGroupList.Count]; Vector3[] n = (null != normals) ? new Vector3[indexGroupList.Count] : null; Vector2[] uv = (null != texcoords) ? new Vector2[indexGroupList.Count] : null; for (int k = 0; k < indexGroupList.Count; k++) { IndexGroup group = indexGroupList.GetGroup(k); v[k] = vertices[group.vertIndex]; if (null != normals && group.normIndex >= 0) { n[k] = normals[group.normIndex]; } if (null != texcoords && group.texcoordIndex >= 0) { uv[k] = texcoords[group.texcoordIndex]; } } unityMesh.vertices = v; unityMesh.normals = n; unityMesh.uv = uv; unityMesh.triangles = triangleSet.indices; // Assign a default material unityNode.renderer.material = new Material(Shader.Find ("Diffuse")); unityNode.renderer.material.color = new Color(0.3f,0.3f,0.3f,1); // Now add the triangle set to the geometry //genericMeshGeometry.AddTriangleSet(triangleSet); } // if (xmlGeometry.root == "polylist" || xmlGeometry.root == "triangles") //genericMesh.AddGeometry(genericMeshGeometry); } } // for (int i = 0; i < geometryNodeList.Count; i++) } // Now add the mesh to our cache and return it o.name = "prefab_" + fileName; o.SetActiveRecursively(false); DontDestroyOnLoad(o); _cachedModels.Add(fileName, o); } catch (Exception e) { Debug.LogError(e.ToString()); } return (GameObject)GameObject.Instantiate(o); } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Diagnostics; using System.Runtime.InteropServices; using Internal.Runtime.CompilerServices; namespace System { internal static partial class Number { [StructLayout(LayoutKind.Sequential, Pack = 1)] internal unsafe ref struct BigInteger { private const int MaxBlockCount = 35; private static readonly uint[] s_Pow10UInt32Table = new uint[] { 1, // 10^0 10, // 10^1 100, // 10^2 1000, // 10^3 10000, // 10^4 100000, // 10^5 1000000, // 10^6 10000000, // 10^7 }; private static readonly int[] s_s_Pow10BigNumTableIndices = new int[] { 0, // 10^8 2, // 10^16 5, // 10^32 10, // 10^64 18, // 10^128 33, // 10^256 }; private static readonly uint[] s_Pow10BigNumTable = new uint[] { // 10^8 1, // _length 100000000, // _blocks // 10^16 2, // _length 0x6FC10000, // _blocks 0x002386F2, // 10^32 4, // _length 0x00000000, // _blocks 0x85ACEF81, 0x2D6D415B, 0x000004EE, // 10^64 7, // _length 0x00000000, // _blocks 0x00000000, 0xBF6A1F01, 0x6E38ED64, 0xDAA797ED, 0xE93FF9F4, 0x00184F03, // 10^128 14, // _length 0x00000000, // _blocks 0x00000000, 0x00000000, 0x00000000, 0x2E953E01, 0x03DF9909, 0x0F1538FD, 0x2374E42F, 0xD3CFF5EC, 0xC404DC08, 0xBCCDB0DA, 0xA6337F19, 0xE91F2603, 0x0000024E, // 10^256 27, // _length 0x00000000, // _blocks 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x982E7C01, 0xBED3875B, 0xD8D99F72, 0x12152F87, 0x6BDE50C6, 0xCF4A6E70, 0xD595D80F, 0x26B2716E, 0xADC666B0, 0x1D153624, 0x3C42D35A, 0x63FF540E, 0xCC5573C0, 0x65F9EF17, 0x55BC28F2, 0x80DCC7F7, 0xF46EEDDC, 0x5FDCEFCE, 0x000553F7, // Trailing blocks to ensure MaxBlockCount 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, }; private static readonly uint[] s_MultiplyDeBruijnBitPosition = new uint[] { 0, 9, 1, 10, 13, 21, 2, 29, 11, 14, 16, 18, 22, 25, 3, 30, 8, 12, 20, 28, 15, 17, 24, 7, 19, 27, 23, 6, 26, 5, 4, 31 }; private int _length; private fixed uint _blocks[MaxBlockCount]; public BigInteger(uint value) { _blocks[0] = value; _length = (value == 0) ? 0 : 1; } public BigInteger(ulong value) { var lower = (uint)(value); var upper = (uint)(value >> 32); _blocks[0] = lower; _blocks[1] = upper; _length = (upper == 0) ? 1 : 2; } public static uint BitScanReverse(uint mask) { // This comes from the Stanford Bit Widdling Hacks by Sean Eron Anderson: // http://graphics.stanford.edu/~seander/bithacks.html#IntegerLogDeBruijn mask |= (mask >> 1); // first round down to one less than a power of 2 mask |= (mask >> 2); mask |= (mask >> 4); mask |= (mask >> 8); mask |= (mask >> 16); uint index = (mask * 0x07C4ACDD) >> 27; return s_MultiplyDeBruijnBitPosition[(int)(index)]; } public static int Compare(ref BigInteger lhs, ref BigInteger rhs) { Debug.Assert(unchecked((uint)(lhs._length)) <= MaxBlockCount); Debug.Assert(unchecked((uint)(rhs._length)) <= MaxBlockCount); int lhsLength = lhs._length; int rhsLength = rhs._length; int lengthDelta = (lhsLength - rhsLength); if (lengthDelta != 0) { return lengthDelta; } if (lhsLength == 0) { Debug.Assert(rhsLength == 0); return 0; } for (int index = (lhsLength - 1); index >= 0; index--) { long delta = (long)(lhs._blocks[index]) - rhs._blocks[index]; if (delta != 0) { return delta > 0 ? 1 : -1; } } return 0; } public static uint HeuristicDivide(ref BigInteger dividend, ref BigInteger divisor) { int divisorLength = divisor._length; if (dividend._length < divisorLength) { return 0; } // This is an estimated quotient. Its error should be less than 2. // Reference inequality: // a/b - floor(floor(a)/(floor(b) + 1)) < 2 int lastIndex = (divisorLength - 1); uint quotient = dividend._blocks[lastIndex] / (divisor._blocks[lastIndex] + 1); if (quotient != 0) { // Now we use our estimated quotient to update each block of dividend. // dividend = dividend - divisor * quotient int index = 0; ulong borrow = 0; ulong carry = 0; do { ulong product = ((ulong)(divisor._blocks[index]) * quotient) + carry; carry = product >> 32; ulong difference = (ulong)(dividend._blocks[index]) - (uint)(product) - borrow; borrow = (difference >> 32) & 1; dividend._blocks[index] = (uint)(difference); index++; } while (index < divisorLength); // Remove all leading zero blocks from dividend while ((divisorLength > 0) && (dividend._blocks[divisorLength - 1] == 0)) { divisorLength--; } dividend._length = divisorLength; } // If the dividend is still larger than the divisor, we overshot our estimate quotient. To correct, // we increment the quotient and subtract one more divisor from the dividend (Because we guaranteed the error range). if (Compare(ref dividend, ref divisor) >= 0) { quotient++; // dividend = dividend - divisor int index = 0; ulong borrow = 0; do { ulong difference = (ulong)(dividend._blocks[index]) - divisor._blocks[index] - borrow; borrow = (difference >> 32) & 1; dividend._blocks[index] = (uint)(difference); index++; } while (index < divisorLength); // Remove all leading zero blocks from dividend while ((divisorLength > 0) && (dividend._blocks[divisorLength - 1] == 0)) { divisorLength--; } dividend._length = divisorLength; } return quotient; } public static uint LogBase2(uint value) { Debug.Assert(value != 0); return BitScanReverse(value); } public static uint LogBase2(ulong value) { Debug.Assert(value != 0); uint upper = (uint)(value >> 32); if (upper != 0) { return 32 + LogBase2(upper); } return LogBase2((uint)(value)); } public static void Multiply(ref BigInteger lhs, uint value, ref BigInteger result) { if (lhs.IsZero() || (value == 1)) { result.SetValue(ref lhs); return; } if (value == 0) { result.SetZero(); return; } int lhsLength = lhs._length; int index = 0; ulong carry = 0; while (index < lhsLength) { ulong product = ((ulong)(lhs._blocks[index]) * value) + carry; carry = product >> 32; result._blocks[index] = (uint)(product); index++; } if (carry != 0) { Debug.Assert(unchecked((uint)(lhsLength)) + 1 <= MaxBlockCount); result._blocks[index] = (uint)(carry); result._length += (lhsLength + 1); } } public static void Multiply(ref BigInteger lhs, ref BigInteger rhs, ref BigInteger result) { if (lhs.IsZero() || rhs.IsOne()) { result.SetValue(ref lhs); return; } if (rhs.IsZero()) { result.SetZero(); return; } ref readonly BigInteger large = ref lhs; int largeLength = lhs._length; ref readonly BigInteger small = ref rhs; int smallLength = rhs._length; if (largeLength < smallLength) { large = ref rhs; largeLength = rhs._length; small = ref lhs; smallLength = lhs._length; } int maxResultLength = smallLength + largeLength; Debug.Assert(unchecked((uint)(maxResultLength)) <= MaxBlockCount); // Zero out result internal blocks. Buffer.ZeroMemory((byte*)(result.GetBlocksPointer()), (maxResultLength * sizeof(uint))); int smallIndex = 0; int resultStartIndex = 0; while (smallIndex < smallLength) { // Multiply each block of large BigNum. if (small._blocks[smallIndex] != 0) { int largeIndex = 0; int resultIndex = resultStartIndex; ulong carry = 0; do { ulong product = result._blocks[resultIndex] + ((ulong)(small._blocks[smallIndex]) * large._blocks[largeIndex]) + carry; carry = product >> 32; result._blocks[resultIndex] = (uint)(product); resultIndex++; largeIndex++; } while (largeIndex < largeLength); result._blocks[resultIndex] = (uint)(carry); } smallIndex++; resultStartIndex++; } if ((maxResultLength > 0) && (result._blocks[maxResultLength - 1] == 0)) { result._length = (maxResultLength - 1); } else { result._length = maxResultLength; } } public static void Pow10(uint exponent, ref BigInteger result) { // We leverage two arrays - s_Pow10UInt32Table and s_Pow10BigNumTable to speed up the Pow10 calculation. // // s_Pow10UInt32Table stores the results of 10^0 to 10^7. // s_Pow10BigNumTable stores the results of 10^8, 10^16, 10^32, 10^64, 10^128 and 10^256. // // For example, let's say exp = 0b111111. We can split the exp to two parts, one is small exp, // which 10^smallExp can be represented as uint, another part is 10^bigExp, which must be represented as BigNum. // So the result should be 10^smallExp * 10^bigExp. // // Calculating 10^smallExp is simple, we just lookup the 10^smallExp from s_Pow10UInt32Table. // But here's a bad news: although uint can represent 10^9, exp 9's binary representation is 1001. // That means 10^(1011), 10^(1101), 10^(1111) all cannot be stored as uint, we cannot easily say something like: // "Any bits <= 3 is small exp, any bits > 3 is big exp". So instead of involving 10^8, 10^9 to s_Pow10UInt32Table, // consider 10^8 and 10^9 as a bigNum, so they fall into s_Pow10BigNumTable. Now we can have a simple rule: // "Any bits <= 3 is small exp, any bits > 3 is big exp". // // For 0b111111, we first calculate 10^(smallExp), which is 10^(7), now we can shift right 3 bits, prepare to calculate the bigExp part, // the exp now becomes 0b000111. // // Apparently the lowest bit of bigExp should represent 10^8 because we have already shifted 3 bits for smallExp, so s_Pow10BigNumTable[0] = 10^8. // Now let's shift exp right 1 bit, the lowest bit should represent 10^(8 * 2) = 10^16, and so on... // // That's why we just need the values of s_Pow10BigNumTable be power of 2. // // More details of this implementation can be found at: https://github.com/dotnet/coreclr/pull/12894#discussion_r128890596 BigInteger temp1 = new BigInteger(s_Pow10UInt32Table[exponent & 0x7]); ref BigInteger lhs = ref temp1; BigInteger temp2 = new BigInteger(0); ref BigInteger product = ref temp2; exponent >>= 3; uint index = 0; while (exponent != 0) { // If the current bit is set, multiply it with the corresponding power of 10 if ((exponent & 1) != 0) { // Multiply into the next temporary ref BigInteger rhs = ref *(BigInteger*)(Unsafe.AsPointer(ref s_Pow10BigNumTable[s_s_Pow10BigNumTableIndices[index]])); Multiply(ref lhs, ref rhs, ref product); // Swap to the next temporary ref BigInteger temp = ref product; product = ref lhs; lhs = ref temp; } // Advance to the next bit ++index; exponent >>= 1; } result.SetValue(ref lhs); } public static void PrepareHeuristicDivide(ref BigInteger dividend, ref BigInteger divisor) { uint hiBlock = divisor._blocks[divisor._length - 1]; if ((hiBlock < 8) || (hiBlock > 429496729)) { // Inspired by http://www.ryanjuckett.com/programming/printing-floating-point-numbers/ // Perform a bit shift on all values to get the highest block of the divisor into // the range [8,429496729]. We are more likely to make accurate quotient estimations // in heuristicDivide() with higher divisor values so // we shift the divisor to place the highest bit at index 27 of the highest block. // This is safe because (2^28 - 1) = 268435455 which is less than 429496729. This means // that all values with a highest bit at index 27 are within range. uint hiBlockLog2 = LogBase2(hiBlock); uint shift = (59 - hiBlockLog2) % 32; divisor.ShiftLeft(shift); dividend.ShiftLeft(shift); } } public static void ShiftLeft(ulong input, uint shift, ref BigInteger output) { if (shift == 0) { return; } uint blocksToShift = Math.DivRem(shift, 32, out uint remainingBitsToShift); if (blocksToShift > 0) { // If blocks shifted, we should fill the corresponding blocks with zero. output.ExtendBlocks(0, blocksToShift); } if (remainingBitsToShift == 0) { // We shift 32 * n (n >= 1) bits. No remaining bits. output.ExtendBlock((uint)(input)); uint highBits = (uint)(input >> 32); if (highBits != 0) { output.ExtendBlock(highBits); } } else { // Extract the high position bits which would be shifted out of range. uint highPositionBits = (uint)(input) >> (int)(64 - remainingBitsToShift); // Shift the input. The result should be stored to current block. ulong shiftedInput = input << (int)(remainingBitsToShift); output.ExtendBlock((uint)(shiftedInput)); uint highBits = (uint)(input >> 32); if (highBits != 0) { output.ExtendBlock(highBits); } if (highPositionBits != 0) { // If the high position bits is not 0, we should store them to next block. output.ExtendBlock(highPositionBits); } } } public void ExtendBlock(uint blockValue) { _blocks[_length] = blockValue; _length++; } public void ExtendBlocks(uint blockValue, uint blockCount) { Debug.Assert(blockCount > 0); if (blockCount == 1) { ExtendBlock(blockValue); return; } Buffer.ZeroMemory((byte*)(GetBlocksPointer() + _length), ((blockCount - 1) * sizeof(uint))); _length += (int)(blockCount); _blocks[_length - 1] = blockValue; } public bool IsOne() { return (_length == 1) && (_blocks[0] == 1); } public bool IsZero() { return _length == 0; } public void Multiply(uint value) { Multiply(ref this, value, ref this); } public void Multiply(ref BigInteger value) { var result = new BigInteger(0); Multiply(ref this, ref value, ref result); Buffer.Memcpy((byte*)(GetBlocksPointer()), (byte*)(result.GetBlocksPointer()), (result._length) * sizeof(uint)); _length = result._length; } public void Multiply10() { if (IsZero()) { return; } int index = 0; int length = _length; ulong carry = 0; while (index < length) { var block = (ulong)(_blocks[index]); ulong product = (block << 3) + (block << 1) + carry; carry = product >> 32; _blocks[index] = (uint)(product); index++; } if (carry != 0) { Debug.Assert(unchecked((uint)(_length)) + 1 <= MaxBlockCount); _blocks[index] = (uint)(carry); _length += 1; } } public void SetUInt32(uint value) { _blocks[0] = value; _length = 1; } public void SetUInt64(ulong value) { var lower = (uint)(value); var upper = (uint)(value >> 32); _blocks[0] = lower; _blocks[1] = upper; _length = (upper == 0) ? 1 : 2; } public void SetValue(ref BigInteger rhs) { int rhsLength = rhs._length; Buffer.Memcpy((byte*)(GetBlocksPointer()), (byte*)(rhs.GetBlocksPointer()), (rhsLength * sizeof(uint))); _length = rhsLength; } public void SetZero() { _length = 0; } public void ShiftLeft(uint shift) { // Process blocks high to low so that we can safely process in place var length = _length; if ((length == 0) || (shift == 0)) { return; } uint blocksToShift = Math.DivRem(shift, 32, out uint remainingBitsToShift); // Copy blocks from high to low int readIndex = (length - 1); int writeIndex = readIndex + (int)(blocksToShift); // Check if the shift is block aligned if (remainingBitsToShift == 0) { Debug.Assert(writeIndex < MaxBlockCount); while (readIndex >= 0) { _blocks[writeIndex] = _blocks[readIndex]; readIndex--; writeIndex--; } _length += (int)(blocksToShift); // Zero the remaining low blocks Buffer.ZeroMemory((byte*)(GetBlocksPointer()), (blocksToShift * sizeof(uint))); } else { // We need an extra block for the partial shift writeIndex++; Debug.Assert(writeIndex < MaxBlockCount); // Set the length to hold the shifted blocks _length = writeIndex + 1; // Output the initial blocks uint lowBitsShift = (32 - remainingBitsToShift); uint highBits = 0; uint block = _blocks[readIndex]; uint lowBits = block >> (int)(lowBitsShift); while (readIndex > 0) { _blocks[writeIndex] = highBits | lowBits; highBits = block << (int)(remainingBitsToShift); --readIndex; --writeIndex; block = _blocks[readIndex]; lowBits = block >> (int)lowBitsShift; } // Output the final blocks _blocks[writeIndex] = highBits | lowBits; _blocks[writeIndex - 1] = block << (int)(remainingBitsToShift); // Zero the remaining low blocks Buffer.ZeroMemory((byte*)(GetBlocksPointer()), (blocksToShift * sizeof(uint))); // Check if the terminating block has no set bits if (_blocks[_length - 1] == 0) { _length--; } } } private uint* GetBlocksPointer() { // This is safe to do since we are a ref struct return (uint*)(Unsafe.AsPointer(ref _blocks[0])); } } } }
/*************************************************************************** * Camera.cs * * Copyright (C) 2007 Alan McGovern * Written by Alan McGovern <alan.mcgovern@gmail.com> ****************************************************************************/ /* THIS FILE IS LICENSED UNDER THE MIT LICENSE AS OUTLINED IMMEDIATELY BELOW: * * Permission is hereby granted, free of charge, to any person obtaining a * copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation * the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS IN THE SOFTWARE. */ using System; using System.Collections.Generic; using LibGPhoto2; namespace Gphoto2 { /// <summary> /// This represents an MTP device /// </summary> public class Camera : IDisposable { public static char DirectorySeperator = '/'; private Abilities abilities; private CameraAbilities baseAbilities; private LibGPhoto2.Camera camera; private bool connected; private bool disposed; private LibGPhoto2.Context context; private List<Gphoto2.FileSystem> fileSystems; private string musicPath; private string playlistPath; private string photosPath; private PortInfo port; private int usbBusNumber; private int usbDeviceNumber; /// <value> /// The functions which the camera supports /// </value> public Abilities Abilities { get { return this.abilities; } } /// <value> /// The camera that this object is wrapping /// </value> internal LibGPhoto2.Camera Device { get { return camera; } } /// <value> /// The context which we need to pass libgphoto for every operation /// </value> internal LibGPhoto2.Context Context { get { return context; } } /// <value> /// True if the device has been connected to /// </value> public bool Connected { get { return connected; } } /// <value> /// True if the device has been disposed /// </value> public bool Disposed { get { return disposed; } } /// <value> /// The list of all filesystems on the device /// </value> public List<FileSystem> FileSystems { get { return fileSystems; } } public string MusicFolder { get { return musicPath; } } public string PlaylistFolder { get { return playlistPath; } } public string PhotoFolder { get { return photosPath; } } /// <value> /// The name of the device /// </value> public string Name { get { return baseAbilities.model; } } /// <value> /// The product id of the device /// </value> public int Product { get { return baseAbilities.usb_product; } } /// <value> /// The number of the UsbBus that the device is connected to /// </value> public int UsbBusNumber { get { return usbBusNumber; } } /// <value> /// The number of the UsbPort that the device is connected to /// </value> public int UsbDeviceNumber { get { return usbDeviceNumber; } } /// <value> /// The vendor ID for the device /// </value> public int Vendor { get { return baseAbilities.usb_vendor; } } private Camera (CameraAbilities abilities, PortInfo port, Context context) { string[] parts = port.Path.Substring(4).Split(','); this.abilities = new Abilities(abilities); this.baseAbilities = abilities; this.context = context; this.musicPath = ""; this.photosPath = ""; this.playlistPath = ""; this.port = port; this.usbBusNumber = int.Parse(parts[0]); this.usbDeviceNumber = int.Parse(parts[1]); } private void CheckConnected(bool alreadyConnected) { if(this.Disposed) throw new ObjectDisposedException(typeof(Camera).Name); if(alreadyConnected && !Connected) throw new GPhotoException(ErrorCode.GeneralError, "Camera has not been connected to yet"); if(!alreadyConnected && Connected) throw new GPhotoException(ErrorCode.GeneralError, "Camera has already been connected to"); } /// <summary> /// Connect to the device /// </summary> public void Connect() { CheckConnected(false); camera = new LibGPhoto2.Camera(); camera.SetAbilities(baseAbilities); camera.SetPortInfo(port); camera.Init(context); try { LibGPhoto2.CameraStorageInformation[] storages = camera.GetStorageInformation(Context); fileSystems = new List<FileSystem>(storages.Length); for (int i = 0; i < storages.Length; i++) fileSystems.Add(new FileSystem(this, storages[i])); DetectPaths(); } catch { camera.Exit(context); throw; } connected = true; } private void DetectPaths() { // Get all the folders in the root directory (i.e. /store_00010001) and // case insensitive compare them to find the correct place to upload music, // playlists and pictures. StringComparison c = StringComparison.OrdinalIgnoreCase; foreach (FileSystem fs in FileSystems) { foreach (string s in fs.GetFolders()) { if (musicPath == "") { if(s.Equals("Music", c) || s.Equals("My Music", c)) musicPath = s; } if (playlistPath == "") { if(s.Equals("Playlist", c) || s.Equals("Playlists", c)) playlistPath = s; } if (photosPath == "") { if (s.Equals("Photo", c) || s.Equals("Photos", c)) photosPath = s; } } } } /// <summary> /// Disconnect from the device /// </summary> public void Disconnect() { CheckConnected(true); connected = false; try { using (camera) camera.Exit(context); } finally { camera = null; } } /// <summary> /// Detects all usable cameras which are connected to the system /// </summary> /// <returns>A list containing all cameras which can be connected to</returns> public static List<Camera> Detect() { if (Utilities.Is64Bit && Environment.OSVersion.Platform != PlatformID.Unix) { Console.WriteLine("A 64bit windows system has been detected. This is not supported"); Console.WriteLine("due to the complexity of interoperating with libgphoto2"); Console.WriteLine("as it exposes variable length 'long' types in it's API."); Console.WriteLine("The API is unlikely to change before version 3 of the library"); Console.WriteLine("The current status of this can be found on the libgphoto2"); Console.WriteLine("mailing list. A detailed explanation can be found in the"); Console.WriteLine("README file for libgphoto2-sharp"); return new List<Camera>(); } List<Camera> cameras = new List<Camera>(); Context c = new Context(); using (CameraAbilitiesList abilities = new CameraAbilitiesList()) using (PortInfoList portInfoList = new PortInfoList()) using (CameraList cameraList = new CameraList()) { // Get the list of all devices that are currently supported abilities.Load(c); // Get the list of all the (usb?) ports that are currently available portInfoList.Load(); // Create the list of all the connected devices which can be used abilities.Detect(portInfoList, cameraList, c); // Scan through all the detected cameras and remove any duplicates using (CameraList cams = RemoveDuplicates(cameraList)) { int count = cams.Count(); for(int i = 0; i < count; i++) { CameraAbilities ability = abilities.GetAbilities(abilities.LookupModel(cams.GetName(i))); PortInfo portInfo = portInfoList.GetInfo(portInfoList.LookupPath(cams.GetValue(i))); cameras.Add(new Gphoto2.Camera(ability, portInfo, c)); } } } return cameras; } /// <summary> /// Disposes of the device /// </summary> public void Dispose() { if(Disposed) return; // This just makes sure that we have disconnected if(Connected) Disconnect(); disposed = true; } /// <summary> /// Disconnects from the device, then reconnects again /// </summary> public void Reconnect() { Disconnect(); Connect(); } // FIXME: The actual conditions for ignoring 'usb:' ones is // when it is the only entry for that device. I'm not 100% how // to handle to of the same device when they are represented by // 'usb:' as opposed to the fully qualified name private static CameraList RemoveDuplicates(CameraList cameras) { CameraList list = new CameraList(); try { int count = cameras.Count(); for(int i=0; i < count; i++) { string name = cameras.GetName(i); string value = cameras.GetValue(i); if(value == "usb:") continue; list.Append(name, value); } } catch { list.Dispose(); throw; } return list; } } }
// Copyright 2004-2009 Castle Project - http://www.castleproject.org/ // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. namespace AspectSharp.Lang.Tests { using System; using antlr; using NUnit.Framework; using AspectSharp.Lang.AST; /// <summary> /// Summary description for ParserAspectTestCase. /// </summary> [TestFixture] public class ParserAspectTestCase : ParserTestCaseBase { [Test] public void ParsingAspectEmptyDeclaration() { AspectParser parser = CreateParser( "aspect XPTO for MyNamespace.MyType \r\n" + "end"); EngineConfiguration conf = parser.Parse(); Assert.IsNotNull(conf); Assert.IsNotNull(conf.Aspects); Assert.AreEqual(1, conf.Aspects.Count); AspectDefinition def = conf.Aspects[0]; Assert.IsNotNull(def); Assert.AreEqual("XPTO", def.Name); Assert.AreEqual("MyNamespace.MyType", def.TargetType.SingleType.TypeName); Assert.AreEqual(TargetStrategyEnum.SingleType, def.TargetType.TargetStrategy); } [Test] public void ParsingAspectForAssignable() { AspectParser parser = CreateParser( "aspect XPTO for [ assignableFrom(Customer) ] \r\n" + "end"); EngineConfiguration conf = parser.Parse(); Assert.IsNotNull(conf); Assert.IsNotNull(conf.Aspects); Assert.AreEqual(1, conf.Aspects.Count); AspectDefinition def = conf.Aspects[0]; Assert.IsNotNull(def); Assert.AreEqual("XPTO", def.Name); Assert.AreEqual(TargetStrategyEnum.Assignable, def.TargetType.TargetStrategy); Assert.AreEqual("Customer", def.TargetType.AssignType.TypeName); } [Test] [ExpectedException(typeof (MismatchedTokenException))] public void InvalidAspectForAssignable() { AspectParser parser = CreateParser( "aspect XPTO for [ assignableFrom() ] \r\n" + "end"); EngineConfiguration conf = parser.Parse(); } [Test] public void ParsingAspectForCustomMatcher() { AspectParser parser = CreateParser( "aspect XPTO for [ customMatcher(MyMatcher) ] \r\n" + "end"); EngineConfiguration conf = parser.Parse(); Assert.IsNotNull(conf); Assert.IsNotNull(conf.Aspects); Assert.AreEqual(1, conf.Aspects.Count); AspectDefinition def = conf.Aspects[0]; Assert.IsNotNull(def); Assert.AreEqual("XPTO", def.Name); Assert.AreEqual(TargetStrategyEnum.Custom, def.TargetType.TargetStrategy); Assert.AreEqual("MyMatcher", def.TargetType.CustomMatcherType.TypeName); } [Test] public void ParsingAspectForNamespace() { AspectParser parser = CreateParser( "aspect XPTO for [ my.namespace.types ] \r\n" + "end"); EngineConfiguration conf = parser.Parse(); Assert.IsNotNull(conf); Assert.IsNotNull(conf.Aspects); Assert.AreEqual(1, conf.Aspects.Count); AspectDefinition def = conf.Aspects[0]; Assert.IsNotNull(def); Assert.AreEqual("XPTO", def.Name); Assert.AreEqual(TargetStrategyEnum.Namespace, def.TargetType.TargetStrategy); Assert.AreEqual("my.namespace.types", def.TargetType.NamespaceRoot); Assert.IsFalse(def.TargetType.IncludeSubNamespace); } [Test] public void ParsingAspectForNamespaceWithExcludes() { AspectParser parser = CreateParser( "aspect XPTO for [ my.namespace.types excludes(Customer;Author) ] \r\n" + "end"); EngineConfiguration conf = parser.Parse(); Assert.IsNotNull(conf); Assert.IsNotNull(conf.Aspects); Assert.AreEqual(1, conf.Aspects.Count); AspectDefinition def = conf.Aspects[0]; Assert.IsNotNull(def); Assert.AreEqual("XPTO", def.Name); Assert.AreEqual(TargetStrategyEnum.Namespace, def.TargetType.TargetStrategy); Assert.AreEqual("my.namespace.types", def.TargetType.NamespaceRoot); Assert.IsFalse(def.TargetType.IncludeSubNamespace); Assert.AreEqual(2, def.TargetType.Excludes.Count); } [Test] [ExpectedException(typeof (MismatchedTokenException))] public void InvalidAspectForCustomMatcher() { AspectParser parser = CreateParser( "aspect XPTO for [ customMatcher() ] \r\n" + "end"); EngineConfiguration conf = parser.Parse(); } [Test] public void ParsingAspectEmptyDeclarationWithFullType() { AspectParser parser = CreateParser( "aspect XPTO for MyNamespace.MyType in My.New.Assembly \r\n" + "end"); EngineConfiguration conf = parser.Parse(); Assert.IsNotNull(conf); Assert.IsNotNull(conf.Aspects); Assert.AreEqual(1, conf.Aspects.Count); AspectDefinition def = conf.Aspects[0]; Assert.IsNotNull(def); Assert.AreEqual("XPTO", def.Name); Assert.AreEqual("MyNamespace.MyType", def.TargetType.SingleType.TypeName); Assert.AreEqual("My.New.Assembly", def.TargetType.SingleType.AssemblyReference.AssemblyName); } [Test] public void ParsingAspectWithMixinRefDeclaration() { AspectParser parser = CreateParser( "aspect XPTO for MyNamespace.MyType \r\n" + "" + " include \"customer\"" + "" + "" + "end"); EngineConfiguration conf = parser.Parse(); AspectDefinition def = conf.Aspects[0]; Assert.AreEqual(1, def.Mixins.Count); MixinDefinition typeName = def.Mixins[0]; Assert.AreEqual(TargetTypeEnum.Link, typeName.TypeReference.TargetType); Assert.AreEqual("customer", typeName.TypeReference.LinkRef); } [Test] public void ParsingAspectWithSingleMixinDeclaration() { AspectParser parser = CreateParser( "aspect XPTO for MyNamespace.MyType \r\n" + "" + " include MyNamespace.Type in MyAssembly " + "" + "" + "end"); EngineConfiguration conf = parser.Parse(); AspectDefinition def = conf.Aspects[0]; Assert.AreEqual(1, def.Mixins.Count); MixinDefinition typeName = def.Mixins[0]; Assert.AreEqual(TargetTypeEnum.Type, typeName.TypeReference.TargetType); Assert.AreEqual("MyNamespace.Type", typeName.TypeReference.TypeName); Assert.AreEqual("MyAssembly", typeName.TypeReference.AssemblyReference.AssemblyName); } [Test] public void ParsingAspectWithAFewMixinDeclarations() { AspectParser parser = CreateParser( "aspect XPTO for MyNamespace.MyType \r\n" + "" + " include MyNamespace.Type1 in MyAssembly1 " + " include MyNamespace.Type2 in MyAssembly2 " + " include MyNamespace.Type3 in MyAssembly3 " + "" + "" + "end"); EngineConfiguration conf = parser.Parse(); AspectDefinition def = conf.Aspects[0]; Assert.AreEqual(3, def.Mixins.Count); MixinDefinition typeName = def.Mixins[0]; Assert.AreEqual(TargetTypeEnum.Type, typeName.TypeReference.TargetType); Assert.AreEqual("MyNamespace.Type1", typeName.TypeReference.TypeName); Assert.AreEqual("MyAssembly1", typeName.TypeReference.AssemblyReference.AssemblyName); typeName = def.Mixins[1]; Assert.AreEqual(TargetTypeEnum.Type, typeName.TypeReference.TargetType); Assert.AreEqual("MyNamespace.Type2", typeName.TypeReference.TypeName); Assert.AreEqual("MyAssembly2", typeName.TypeReference.AssemblyReference.AssemblyName); typeName = def.Mixins[2]; Assert.AreEqual(TargetTypeEnum.Type, typeName.TypeReference.TargetType); Assert.AreEqual("MyNamespace.Type3", typeName.TypeReference.TypeName); Assert.AreEqual("MyAssembly3", typeName.TypeReference.AssemblyReference.AssemblyName); } [Test] [ExpectedException(typeof (NoViableAltException))] public void ParsingInvalidAspectEmptyDeclaration() { AspectParser parser = CreateParser( "aspect XPTO for MyNamespace.MyType \r\n" + ""); parser.Parse(); } } }
// // Copyright (c) Microsoft and contributors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // // See the License for the specific language governing permissions and // limitations under the License. // // Warning: This code was generated by a tool. // // Changes to this file may cause incorrect behavior and will be lost if the // code is regenerated. using System; using System.Collections.Generic; using System.Globalization; using System.Linq; using System.Net; using System.Net.Http; using System.Threading; using System.Threading.Tasks; using Microsoft.WindowsAzure; using Microsoft.WindowsAzure.Common; using Microsoft.WindowsAzure.Common.Internals; using Microsoft.WindowsAzure.Management.Monitoring.Metrics; using Microsoft.WindowsAzure.Management.Monitoring.Metrics.Models; using Newtonsoft.Json.Linq; namespace Microsoft.WindowsAzure.Management.Monitoring.Metrics { internal partial class MetricValueOperations : IServiceOperations<MetricsClient>, Microsoft.WindowsAzure.Management.Monitoring.Metrics.IMetricValueOperations { /// <summary> /// Initializes a new instance of the MetricValueOperations class. /// </summary> /// <param name='client'> /// Reference to the service client. /// </param> internal MetricValueOperations(MetricsClient client) { this._client = client; } private MetricsClient _client; /// <summary> /// Gets a reference to the /// Microsoft.WindowsAzure.Management.Monitoring.Metrics.MetricsClient. /// </summary> public MetricsClient Client { get { return this._client; } } /// <summary> /// The List Metric Value operation lists the metric value sets for the /// resource metrics. /// </summary> /// <param name='resourceId'> /// Required. The id of the resource. /// </param> /// <param name='metricNames'> /// Required. The names of the metrics. /// </param> /// <param name='metricNamespace'> /// Optional. The namespace of the metrics. /// </param> /// <param name='timeGrain'> /// Required. The time grain of the metrics. /// </param> /// <param name='startTime'> /// Required. The start time (in UTC) of the metrics. /// </param> /// <param name='endTime'> /// Required. The end time (in UTC) of the metrics. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// The List Metric values operation response. /// </returns> public async System.Threading.Tasks.Task<Microsoft.WindowsAzure.Management.Monitoring.Metrics.Models.MetricValueListResponse> ListAsync(string resourceId, IList<string> metricNames, string metricNamespace, TimeSpan timeGrain, DateTime startTime, DateTime endTime, CancellationToken cancellationToken) { // Validate if (resourceId == null) { throw new ArgumentNullException("resourceId"); } if (metricNames == null) { throw new ArgumentNullException("metricNames"); } if (metricNames.Count <= 0) { throw new ArgumentException("metricNames cannot be empty."); } // Tracing bool shouldTrace = CloudContext.Configuration.Tracing.IsEnabled; string invocationId = null; if (shouldTrace) { invocationId = Tracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceId", resourceId); tracingParameters.Add("metricNames", metricNames); tracingParameters.Add("metricNamespace", metricNamespace); tracingParameters.Add("timeGrain", timeGrain); tracingParameters.Add("startTime", startTime); tracingParameters.Add("endTime", endTime); Tracing.Enter(invocationId, this, "ListAsync", tracingParameters); } // Construct URL string url = "/" + (this.Client.Credentials.SubscriptionId != null ? this.Client.Credentials.SubscriptionId.Trim() : "") + "/services/monitoring/metricvalues/query?"; url = url + "&resourceId=" + Uri.EscapeDataString(resourceId.Trim()); if (metricNamespace != null) { url = url + "&namespace=" + Uri.EscapeDataString(metricNamespace != null ? metricNamespace.Trim() : ""); } url = url + "&names=" + Uri.EscapeDataString(string.Join(",", metricNames)); url = url + "&timeGrain=" + Uri.EscapeDataString(TypeConversion.To8601String(timeGrain)); url = url + "&startTime=" + Uri.EscapeDataString(string.Format(CultureInfo.InvariantCulture, "{0:O}", startTime.ToUniversalTime())); url = url + "&endTime=" + Uri.EscapeDataString(string.Format(CultureInfo.InvariantCulture, "{0:O}", endTime.ToUniversalTime())); string baseUrl = this.Client.BaseUri.AbsoluteUri; // Trim '/' character from the end of baseUrl and beginning of url. if (baseUrl[baseUrl.Length - 1] == '/') { baseUrl = baseUrl.Substring(0, baseUrl.Length - 1); } if (url[0] == '/') { url = url.Substring(1); } url = baseUrl + "/" + url; url = url.Replace(" ", "%20"); // Create HTTP transport objects HttpRequestMessage httpRequest = null; try { httpRequest = new HttpRequestMessage(); httpRequest.Method = HttpMethod.Get; httpRequest.RequestUri = new Uri(url); // Set Headers httpRequest.Headers.Add("Accept", "application/json"); httpRequest.Headers.Add("x-ms-version", "2013-10-01"); // Set Credentials cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); // Send Request HttpResponseMessage httpResponse = null; try { if (shouldTrace) { Tracing.SendRequest(invocationId, httpRequest); } cancellationToken.ThrowIfCancellationRequested(); httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); if (shouldTrace) { Tracing.ReceiveResponse(invocationId, httpResponse); } HttpStatusCode statusCode = httpResponse.StatusCode; if (statusCode != HttpStatusCode.OK) { cancellationToken.ThrowIfCancellationRequested(); CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false)); if (shouldTrace) { Tracing.Error(invocationId, ex); } throw ex; } // Create Result MetricValueListResponse result = null; // Deserialize Response cancellationToken.ThrowIfCancellationRequested(); string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); result = new MetricValueListResponse(); JToken responseDoc = null; if (string.IsNullOrEmpty(responseContent) == false) { responseDoc = JToken.Parse(responseContent); } if (responseDoc != null && responseDoc.Type != JTokenType.Null) { MetricValueSetCollection metricValueSetCollectionInstance = new MetricValueSetCollection(); result.MetricValueSetCollection = metricValueSetCollectionInstance; JToken valueArray = responseDoc["Value"]; if (valueArray != null && valueArray.Type != JTokenType.Null) { foreach (JToken valueValue in ((JArray)valueArray)) { MetricValueSet metricValueSetInstance = new MetricValueSet(); metricValueSetCollectionInstance.Value.Add(metricValueSetInstance); JToken nameValue = valueValue["Name"]; if (nameValue != null && nameValue.Type != JTokenType.Null) { string nameInstance = ((string)nameValue); metricValueSetInstance.Name = nameInstance; } JToken namespaceValue = valueValue["Namespace"]; if (namespaceValue != null && namespaceValue.Type != JTokenType.Null) { string namespaceInstance = ((string)namespaceValue); metricValueSetInstance.Namespace = namespaceInstance; } JToken displayNameValue = valueValue["DisplayName"]; if (displayNameValue != null && displayNameValue.Type != JTokenType.Null) { string displayNameInstance = ((string)displayNameValue); metricValueSetInstance.DisplayName = displayNameInstance; } JToken unitValue = valueValue["Unit"]; if (unitValue != null && unitValue.Type != JTokenType.Null) { string unitInstance = ((string)unitValue); metricValueSetInstance.Unit = unitInstance; } JToken primaryAggregationValue = valueValue["PrimaryAggregation"]; if (primaryAggregationValue != null && primaryAggregationValue.Type != JTokenType.Null) { string primaryAggregationInstance = ((string)primaryAggregationValue); metricValueSetInstance.PrimaryAggregation = primaryAggregationInstance; } JToken timeGrainValue = valueValue["TimeGrain"]; if (timeGrainValue != null && timeGrainValue.Type != JTokenType.Null) { TimeSpan timeGrainInstance = TypeConversion.From8601TimeSpan(((string)timeGrainValue)); metricValueSetInstance.TimeGrain = timeGrainInstance; } JToken startTimeValue = valueValue["StartTime"]; if (startTimeValue != null && startTimeValue.Type != JTokenType.Null) { DateTime startTimeInstance = ((DateTime)startTimeValue); metricValueSetInstance.StartTime = startTimeInstance; } JToken endTimeValue = valueValue["EndTime"]; if (endTimeValue != null && endTimeValue.Type != JTokenType.Null) { DateTime endTimeInstance = ((DateTime)endTimeValue); metricValueSetInstance.EndTime = endTimeInstance; } JToken metricValuesArray = valueValue["MetricValues"]; if (metricValuesArray != null && metricValuesArray.Type != JTokenType.Null) { foreach (JToken metricValuesValue in ((JArray)metricValuesArray)) { MetricValue metricValueInstance = new MetricValue(); metricValueSetInstance.MetricValues.Add(metricValueInstance); JToken timestampValue = metricValuesValue["Timestamp"]; if (timestampValue != null && timestampValue.Type != JTokenType.Null) { DateTime timestampInstance = ((DateTime)timestampValue); metricValueInstance.Timestamp = timestampInstance; } JToken averageValue = metricValuesValue["Average"]; if (averageValue != null && averageValue.Type != JTokenType.Null) { double averageInstance = ((double)averageValue); metricValueInstance.Average = averageInstance; } JToken minimumValue = metricValuesValue["Minimum"]; if (minimumValue != null && minimumValue.Type != JTokenType.Null) { double minimumInstance = ((double)minimumValue); metricValueInstance.Minimum = minimumInstance; } JToken maximumValue = metricValuesValue["Maximum"]; if (maximumValue != null && maximumValue.Type != JTokenType.Null) { double maximumInstance = ((double)maximumValue); metricValueInstance.Maximum = maximumInstance; } JToken totalValue = metricValuesValue["Total"]; if (totalValue != null && totalValue.Type != JTokenType.Null) { double totalInstance = ((double)totalValue); metricValueInstance.Total = totalInstance; } JToken annotationValue = metricValuesValue["Annotation"]; if (annotationValue != null && annotationValue.Type != JTokenType.Null) { string annotationInstance = ((string)annotationValue); metricValueInstance.Annotation = annotationInstance; } JToken countValue = metricValuesValue["Count"]; if (countValue != null && countValue.Type != JTokenType.Null) { int countInstance = ((int)countValue); metricValueInstance.Count = countInstance; } } } } } } result.StatusCode = statusCode; if (httpResponse.Headers.Contains("x-ms-request-id")) { result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (shouldTrace) { Tracing.Exit(invocationId, result); } return result; } finally { if (httpResponse != null) { httpResponse.Dispose(); } } } finally { if (httpRequest != null) { httpRequest.Dispose(); } } } } }
using System; using System.Text; using System.Reflection; class As2805Message { #region Header fields and Data Element variables public int mti = 0; public byte[] primaryBitmap = { 0, 0, 0, 0, 0, 0, 0, 0 }; public byte[] de1_SecondaryBitmap = { 0, 0, 0, 0, 0, 0, 0, 0 }; // DE1: Bitmap, extended (b64) public string de2_PAN; // DE2: PAN (n19) public string de3_ProcCode; // DE3: Processing Code (n6) public string de4_AmtTxn; // DE4: Amount, Txn (n12) public string de5_AmtSettle; // DE5: Amount, Settlement (n12) public string de6_AmtCardhBill; // DE6: Amount,Cardholder billing (n12) public string de7_TransDttm; // DE7: Transmission date and time (n10) public string de8_AmtCardhBillFee; // DE8: Amount, Cardholder billing fee (n8 ) public string de9_ConvRateSettle; // DE9: Conversion rate, settlement (n8 ) public string de10_ConvRateCardhBill; // DE10: Conversion rate, cardh bill (n8 ) public string de11_STAN; // DE11: STAN (n6) public string de12_TimeLocal; // DE12: Time, Local txn (n6) public string de13_DateLocal; // DE13: Date, Local txn (n4) public string de14_DateExpiry; // DE14: Date, expiry (n4) public string de15_DateSetl; // DE15: Date, Settlement (n4) public string de16_DateConv; // DE16: Date, Conversion (n4) public string de17_DateCapt; // DE17: Date, Capture (n4) public string de18_MerchType; // DE18: Merchants type (n4) public string de19_AcqInstCtryCode; // DE19: Acquiring Inst Count code (n3) public string de20_PriAccNumExtCtryCode; // DE20: Prim Acc Num Ext, Count Code (n3) public string de21_FwdInstCtryCode; // DE21: Forwarding inst count code (n3) public string de22_PosEntryMode; // DE22: POS entry mode (n3) public string de23_CardSeqNo; // DE23: Card Seq No (n3) public string de24_NetIntlId; // DE24: Net Intl Id (n3) public string de25_PosCondCode; // DE25: POS Cond Code (n2) public string de26_PosPinCaptCode; // DE26: POS PIN Capture code (n2) public string de27_AuthIdRespLen; // DE27: Auth Ident Resp Len (n1) public string de28_AmtTxnFee; // DE28: Amount, Txn fee (xn8 ) public string de29_AmtSettleFee; // DE29: Amount, Settlment fee (xn8 ) public string de30_AmtTxnProcFee; // DE30: Amount, Txn Proc Fee (xn8 ) public string de31_AmtSettleProcFee; // DE31: Amount, Setl Proc Fee (xn8 ) public string de32_AcqInstIdCode; // DE32: Acq Inst Id Code (n11) public string de33_FwdInstIdCode; // DE33: Fwd Inst Id code (n11) public string de34_PanExt; // DE34: PAN, Extended (ns28 ) public byte[] de35_Track2; // DE35: Track 2 (z37) public byte[] de36_Track3; // DE36: Track 3 (z104) public string de37_RetRefNo; // DE37: RRN (an12) public string de38_AuthIdentResp; // DE38: Auth identification response (an6) public string de39_RespCode; // DE39: Response code (an2) public string de40_ServRestrCode; // DE40: Service Restr Code (an3) public string de41_CardAcptTermId; // DE41: TID (orig ans) (an8 ) public string de42_CardAcptIdCode; // DE42: Card Acceptor Identification Code (orig ans) (an15) public string de43_CardAcptNameLoc; // DE43: Card acpt name/loc (orig ans) (an40) public byte[] de44_AddtRespData; // DE44: Addtl resp data (ans25) public byte[] de45_Track1; // DE45: Track 1 (ans76) public byte[] de46_AddtlDataIso; // DE46: Addtl data - ISO (ans999) public byte[] de47_AddtlDataNat; // DE47: Addtl data - National (ans999) public byte[] de48_AddtlDataPriv; // DE48: Addtl data - Private (ans999) public string de49_CurCodeTxn; // DE49: Cur code, txn (a or n3) public string de50_CurCodeSettle; // DE50: Cur code, setl (a or n3) public string de51_CurCodeCardhBill; // DE51: Cur code, cardh bill (a or n3) public byte[] de52_PinData; // DE52: Pin data (b64) public string de53_SecControlInfo; // DE53: Security related control info (n16) public string de54_AddtlAmts; // DE54: Additional amounts (an120) public byte[] de55_ResIso; // DE55: ICC Data (Reserved ISO) (ans999) public byte[] de56_ResIso; // DE56: Reserved ISO (ans999) public string de57_AmtCash; // DE57: Amount, Cash oz only (n12) public string de58_BalanceLedger; // DE58: Ledger balance oz only (n12) public string de59_BalanceCleared; // DE59: Acct balance, cleared funds oz only (n12) public string de60_PreswipeStatus; // DE60: Preswipe status (Reserved Private, normally ans 999) (an1) public byte[] de61_ResPriv; // DE61: Reserved Private (ans999) public byte[] de62_ResPriv; // DE62: Reserved Private (ans999) public byte[] de63_ResPriv; // DE63: Reserved Private (ans999) public byte[] de64_MAC; // DE64: MAC (b64) public byte[] de65_Bitmap; // DE65: Bit map, extended 2 (b64) public string de66_SettleCode; // DE66: Settlement code (n1) public string de67_ExtPayCode; // DE67: Ext payment code (n2) public string de68_RecvInstCtryCode; // DE68: Recv inst count code (n3) public string de69_SettleInstCtryCode; // DE69: Setl Inst Count code (n3) public string de70_NetMgtInfoCode; // DE70: Net mgt info code (n3) public string de71_MessageNo; // DE71: Message No (n4) public string de72_MessageNoLast; // DE72: Message No Last (n4) public string de73_DateAction; // DE73: Date, Action (n6) public string de74_CreditsNo; // DE74: Credits, Num (n10) public string de75_CreditRevsNo; // DE75: Credit revs, num (n10) public string de76_DebitsNo; // DE76: Debits, num (n10) public string de77_DebitRevsNo; // DE77: Debit revs, num (n10) public string de78_TransfersNo; // DE78: Transfers, num (n10) public string de79_TransferRevsNo; // DE79: Transfer revs, num (n10) public string de80_InquiriesNo; // DE80: Inquiries, num (n10) public string de81_AuthsNo; // DE81: Auths, num (n10) public string de82_CreditsProcFeeAmt; // DE82: Credits, proc fee amt (n12) public string de83_CreditsTxnFeeAmt; // DE83: Credits, transaction fee amt (n12) public string de84_DebitsProcFeeAmt; // DE84: Debits, proc fee amt (n12) public string de85_DebitsTxnFeeAmt; // DE85: Debits, transaction fee amt (n12) public string de86_CreditsAmt; // DE86: Credits, amt (n16) public string de87_CreditRevsAmt; // DE87: Credit revs, amt (n16) public string de88_DebitsAmt; // DE88: Debits, amt (n16) public string de89_DebitRevsAmt; // DE89: Debit revs, amount (n16) public string de90_OrigDataElem; // DE90: Original data elements (n42) public string de91_FileUpdateCode; // DE91: File update code (an1) public string de92_FileSecCode; // DE92: File security code (an2) public string de93_RespInd; // DE93: Response indicator (an5) public string de94_ServInd; // DE94: Service indicator (an7) public string de95_ReplAmts; // DE95: Replacement amounts (an42) public byte[] de96_MsgSecCode; // DE96: Message Security code (b64) public string de97_AmtNetSetl; // DE97: Amount, net settlement (xn16) public byte[] de98_Payee; // DE98: Payee (ans25) public string de99_SettleInstIdCode; // DE99: Setl inst id code (n11) public string de100_RecvInstIdCode; // DE100: Recv inst id code (n11) public string de101_FileName; // DE101: File name (normally ans) (an17) public byte[] de102_AcctId1; // DE102: Account id 1 (ans28 ) public byte[] de103_AcctId2; // DE103: Account id 2 (ans28 ) public byte[] de104_TxnDesc; // DE104: Txn description (ans100) public byte[] de105_ResvIso; // DE105: Reserved for iso use (ans999) public byte[] de106_ResvIso; // DE106: Reserved for iso use (ans999) public byte[] de107_ResvIso; // DE107: Reserved for iso use (ans999) public byte[] de108_ResvIso; // DE108: Reserved for iso use (ans999) public byte[] de109_ResvIso; // DE109: Reserved for iso use (ans999) public byte[] de110_ResvIso; // DE110: Reserved for iso use (ans999) public byte[] de111_ResvIso; // DE111: Reserved for iso use (ans999) public byte[] de112_ResvNat; // DE112: Reserved for national use (ans999) public byte[] de113_ResvNat; // DE113: Reserved for national use (ans999) public byte[] de114_ResvNat; // DE114: Reserved for national use (ans999) public byte[] de115_ResvNat; // DE115: Reserved for national use (ans999) public byte[] de116_ResvNat; // DE116: Reserved for national use (ans999) public string de117_CardStatUpdCode; // DE117: Card status update code (oz only) (an2) public string de118_TotalCashNo; // DE118: Cash, total number oz only (n10) public string de119_TotalCashAmt; // DE119: Cash, total amount oz only (n16) public byte[] de120_ResvPriv; // DE120: Reserved for private use (ans999) public byte[] de121_ResvPriv; // DE121: Reserved for private use (ans999) public byte[] de122_ResvPriv; // DE122: Reserved for private use (ans999) public byte[] de123_ResvPriv; // DE123: Reserved for private use (ans999) public byte[] de124_ResvPriv; // DE124: Reserved for private use (ans999) public byte[] de125_ResvPriv; // DE125: Reserved for private use (ans999) public byte[] de126_ResvPriv; // DE126: Reserved for private use (ans999) public byte[] de127_ResvPriv; // DE127: Reserved for private use (ans999) public byte[] de128_MAC; // DE128: MAC (b64) #endregion #region Parser /// <summary> /// Creates an As2805Message object by parsing the given message. /// The message should start with the first 2 bytes of the MTI. /// </summary> public As2805Message(byte[] buf) { // Read the header fields mti = (buf[0] >> 4) * 1000 + (buf[0] & 0xf) * 100 + (buf[1] >> 4) * 10 + (buf[1] & 0xf); primaryBitmap = B(buf, 2, 8 ); // This keeps track of how much of the buffer we've read so far int offset = 10; // Read the fields according to which ones are set in the bitmap if ((primaryBitmap[0] & 128 ) > 0) de1_SecondaryBitmap = ReadFixRaw(buf, ref offset, 8 ); if ((primaryBitmap[0] & 64) > 0) de2_PAN = ReadVar2Packed(buf, ref offset, 19); if ((primaryBitmap[0] & 32) > 0) de3_ProcCode = ReadFixPacked(buf, ref offset, 6); if ((primaryBitmap[0] & 16) > 0) de4_AmtTxn = ReadFixPacked(buf, ref offset, 12); if ((primaryBitmap[0] & 8 ) > 0) de5_AmtSettle = ReadFixPacked(buf, ref offset, 12); if ((primaryBitmap[0] & 4) > 0) de6_AmtCardhBill = ReadFixPacked(buf, ref offset, 12); if ((primaryBitmap[0] & 2) > 0) de7_TransDttm = ReadFixPacked(buf, ref offset, 10); if ((primaryBitmap[0] & 1) > 0) de8_AmtCardhBillFee = ReadFixPacked(buf, ref offset, 8 ); if ((primaryBitmap[1] & 128 ) > 0) de9_ConvRateSettle = ReadFixPacked(buf, ref offset, 8 ); if ((primaryBitmap[1] & 64) > 0) de10_ConvRateCardhBill = ReadFixPacked(buf, ref offset, 8 ); if ((primaryBitmap[1] & 32) > 0) de11_STAN = ReadFixPacked(buf, ref offset, 6); if ((primaryBitmap[1] & 16) > 0) de12_TimeLocal = ReadFixPacked(buf, ref offset, 6); if ((primaryBitmap[1] & 8 ) > 0) de13_DateLocal = ReadFixPacked(buf, ref offset, 4); if ((primaryBitmap[1] & 4) > 0) de14_DateExpiry = ReadFixPacked(buf, ref offset, 4); if ((primaryBitmap[1] & 2) > 0) de15_DateSetl = ReadFixPacked(buf, ref offset, 4); if ((primaryBitmap[1] & 1) > 0) de16_DateConv = ReadFixPacked(buf, ref offset, 4); if ((primaryBitmap[2] & 128 ) > 0) de17_DateCapt = ReadFixPacked(buf, ref offset, 4); if ((primaryBitmap[2] & 64) > 0) de18_MerchType = ReadFixPacked(buf, ref offset, 4); if ((primaryBitmap[2] & 32) > 0) de19_AcqInstCtryCode = ReadFixPacked(buf, ref offset, 3); if ((primaryBitmap[2] & 16) > 0) de20_PriAccNumExtCtryCode = ReadFixPacked(buf, ref offset, 3); if ((primaryBitmap[2] & 8 ) > 0) de21_FwdInstCtryCode = ReadFixPacked(buf, ref offset, 3); if ((primaryBitmap[2] & 4) > 0) de22_PosEntryMode = ReadFixPacked(buf, ref offset, 3); if ((primaryBitmap[2] & 2) > 0) de23_CardSeqNo = ReadFixPacked(buf, ref offset, 3); if ((primaryBitmap[2] & 1) > 0) de24_NetIntlId = ReadFixPacked(buf, ref offset, 3); if ((primaryBitmap[3] & 128 ) > 0) de25_PosCondCode = ReadFixPacked(buf, ref offset, 2); if ((primaryBitmap[3] & 64) > 0) de26_PosPinCaptCode = ReadFixPacked(buf, ref offset, 2); if ((primaryBitmap[3] & 32) > 0) de27_AuthIdRespLen = ReadFixPacked(buf, ref offset, 1); if ((primaryBitmap[3] & 16) > 0) de28_AmtTxnFee = ReadFixPacked(buf, ref offset, 8 ); if ((primaryBitmap[3] & 8 ) > 0) de29_AmtSettleFee = ReadFixPacked(buf, ref offset, 8 ); if ((primaryBitmap[3] & 4) > 0) de30_AmtTxnProcFee = ReadFixPacked(buf, ref offset, 8 ); if ((primaryBitmap[3] & 2) > 0) de31_AmtSettleProcFee = ReadFixPacked(buf, ref offset, 8 ); if ((primaryBitmap[3] & 1) > 0) de32_AcqInstIdCode = ReadVar2Packed(buf, ref offset, 11); if ((primaryBitmap[4] & 128 ) > 0) de33_FwdInstIdCode = ReadVar2Packed(buf, ref offset, 11); if ((primaryBitmap[4] & 64) > 0) de34_PanExt = S(ReadVar2Raw(buf, ref offset, 28 )); if ((primaryBitmap[4] & 32) > 0) de35_Track2 = ReadVar2Raw(buf, ref offset, 37); if ((primaryBitmap[4] & 16) > 0) de36_Track3 = ReadVar3Raw(buf, ref offset, 104); if ((primaryBitmap[4] & 8 ) > 0) de37_RetRefNo = S(ReadFixRaw(buf, ref offset, 12)); if ((primaryBitmap[4] & 4) > 0) de38_AuthIdentResp = S(ReadFixRaw(buf, ref offset, 6)); if ((primaryBitmap[4] & 2) > 0) de39_RespCode = S(ReadFixRaw(buf, ref offset, 2)); if ((primaryBitmap[4] & 1) > 0) de40_ServRestrCode = S(ReadFixRaw(buf, ref offset, 3)); if ((primaryBitmap[5] & 128 ) > 0) de41_CardAcptTermId = S(ReadFixRaw(buf, ref offset, 8 )); if ((primaryBitmap[5] & 64) > 0) de42_CardAcptIdCode = S(ReadFixRaw(buf, ref offset, 15)); if ((primaryBitmap[5] & 32) > 0) de43_CardAcptNameLoc = S(ReadFixRaw(buf, ref offset, 40)); if ((primaryBitmap[5] & 16) > 0) de44_AddtRespData = ReadVar2Raw(buf, ref offset, 25); if ((primaryBitmap[5] & 8 ) > 0) de45_Track1 = ReadVar2Raw(buf, ref offset, 76); if ((primaryBitmap[5] & 4) > 0) de46_AddtlDataIso = ReadVar3Raw(buf, ref offset, 999); if ((primaryBitmap[5] & 2) > 0) de47_AddtlDataNat = ReadVar3Raw(buf, ref offset, 999); if ((primaryBitmap[5] & 1) > 0) de48_AddtlDataPriv = ReadVar3Raw(buf, ref offset, 999); if ((primaryBitmap[6] & 128 ) > 0) de49_CurCodeTxn = S(ReadFixRaw(buf, ref offset, 3)); if ((primaryBitmap[6] & 64) > 0) de50_CurCodeSettle = S(ReadFixRaw(buf, ref offset, 3)); if ((primaryBitmap[6] & 32) > 0) de51_CurCodeCardhBill = S(ReadFixRaw(buf, ref offset, 3)); if ((primaryBitmap[6] & 16) > 0) de52_PinData = ReadFixRaw(buf, ref offset, 8 ); if ((primaryBitmap[6] & 8 ) > 0) de53_SecControlInfo = ReadFixPacked(buf, ref offset, 16); if ((primaryBitmap[6] & 4) > 0) de54_AddtlAmts = S(ReadVar3Raw(buf, ref offset, 120)); if ((primaryBitmap[6] & 2) > 0) de55_ResIso = ReadVar3Raw(buf, ref offset, 999); if ((primaryBitmap[6] & 1) > 0) de56_ResIso = ReadVar3Raw(buf, ref offset, 999); if ((primaryBitmap[7] & 128 ) > 0) de57_AmtCash = ReadFixPacked(buf, ref offset, 12); if ((primaryBitmap[7] & 64) > 0) de58_BalanceLedger = ReadFixPacked(buf, ref offset, 12); if ((primaryBitmap[7] & 32) > 0) de59_BalanceCleared = ReadFixPacked(buf, ref offset, 12); if ((primaryBitmap[7] & 16) > 0) de60_PreswipeStatus = S(ReadVar3Raw(buf, ref offset, 1)); if ((primaryBitmap[7] & 8 ) > 0) de61_ResPriv = ReadVar3Raw(buf, ref offset, 999); if ((primaryBitmap[7] & 4) > 0) de62_ResPriv = ReadVar3Raw(buf, ref offset, 999); if ((primaryBitmap[7] & 2) > 0) de63_ResPriv = ReadVar3Raw(buf, ref offset, 999); if ((primaryBitmap[7] & 1) > 0) de64_MAC = ReadFixRaw(buf, ref offset, 8 ); if ((de1_SecondaryBitmap[0] & 128 ) > 0) de65_Bitmap = ReadFixRaw(buf, ref offset, 8 ); if ((de1_SecondaryBitmap[0] & 64) > 0) de66_SettleCode = ReadFixPacked(buf, ref offset, 1); if ((de1_SecondaryBitmap[0] & 32) > 0) de67_ExtPayCode = ReadFixPacked(buf, ref offset, 2); if ((de1_SecondaryBitmap[0] & 16) > 0) de68_RecvInstCtryCode = ReadFixPacked(buf, ref offset, 3); if ((de1_SecondaryBitmap[0] & 8 ) > 0) de69_SettleInstCtryCode = ReadFixPacked(buf, ref offset, 3); if ((de1_SecondaryBitmap[0] & 4) > 0) de70_NetMgtInfoCode = ReadFixPacked(buf, ref offset, 3); if ((de1_SecondaryBitmap[0] & 2) > 0) de71_MessageNo = ReadFixPacked(buf, ref offset, 4); if ((de1_SecondaryBitmap[0] & 1) > 0) de72_MessageNoLast = ReadFixPacked(buf, ref offset, 4); if ((de1_SecondaryBitmap[1] & 128 ) > 0) de73_DateAction = ReadFixPacked(buf, ref offset, 6); if ((de1_SecondaryBitmap[1] & 64) > 0) de74_CreditsNo = ReadFixPacked(buf, ref offset, 10); if ((de1_SecondaryBitmap[1] & 32) > 0) de75_CreditRevsNo = ReadFixPacked(buf, ref offset, 10); if ((de1_SecondaryBitmap[1] & 16) > 0) de76_DebitsNo = ReadFixPacked(buf, ref offset, 10); if ((de1_SecondaryBitmap[1] & 8 ) > 0) de77_DebitRevsNo = ReadFixPacked(buf, ref offset, 10); if ((de1_SecondaryBitmap[1] & 4) > 0) de78_TransfersNo = ReadFixPacked(buf, ref offset, 10); if ((de1_SecondaryBitmap[1] & 2) > 0) de79_TransferRevsNo = ReadFixPacked(buf, ref offset, 10); if ((de1_SecondaryBitmap[1] & 1) > 0) de80_InquiriesNo = ReadFixPacked(buf, ref offset, 10); if ((de1_SecondaryBitmap[2] & 128 ) > 0) de81_AuthsNo = ReadFixPacked(buf, ref offset, 10); if ((de1_SecondaryBitmap[2] & 64) > 0) de82_CreditsProcFeeAmt = ReadFixPacked(buf, ref offset, 12); if ((de1_SecondaryBitmap[2] & 32) > 0) de83_CreditsTxnFeeAmt = ReadFixPacked(buf, ref offset, 12); if ((de1_SecondaryBitmap[2] & 16) > 0) de84_DebitsProcFeeAmt = ReadFixPacked(buf, ref offset, 12); if ((de1_SecondaryBitmap[2] & 8 ) > 0) de85_DebitsTxnFeeAmt = ReadFixPacked(buf, ref offset, 12); if ((de1_SecondaryBitmap[2] & 4) > 0) de86_CreditsAmt = ReadFixPacked(buf, ref offset, 16); if ((de1_SecondaryBitmap[2] & 2) > 0) de87_CreditRevsAmt = ReadFixPacked(buf, ref offset, 16); if ((de1_SecondaryBitmap[2] & 1) > 0) de88_DebitsAmt = ReadFixPacked(buf, ref offset, 16); if ((de1_SecondaryBitmap[3] & 128 ) > 0) de89_DebitRevsAmt = ReadFixPacked(buf, ref offset, 16); if ((de1_SecondaryBitmap[3] & 64) > 0) de90_OrigDataElem = ReadFixPacked(buf, ref offset, 42); if ((de1_SecondaryBitmap[3] & 32) > 0) de91_FileUpdateCode = S(ReadFixRaw(buf, ref offset, 1)); if ((de1_SecondaryBitmap[3] & 16) > 0) de92_FileSecCode = S(ReadFixRaw(buf, ref offset, 2)); if ((de1_SecondaryBitmap[3] & 8 ) > 0) de93_RespInd = S(ReadFixRaw(buf, ref offset, 5)); if ((de1_SecondaryBitmap[3] & 4) > 0) de94_ServInd = S(ReadFixRaw(buf, ref offset, 7)); if ((de1_SecondaryBitmap[3] & 2) > 0) de95_ReplAmts = S(ReadFixRaw(buf, ref offset, 42)); if ((de1_SecondaryBitmap[3] & 1) > 0) de96_MsgSecCode = ReadFixRaw(buf, ref offset, 8 ); if ((de1_SecondaryBitmap[4] & 128 ) > 0) de97_AmtNetSetl = ReadFixPacked(buf, ref offset, 16); if ((de1_SecondaryBitmap[4] & 64) > 0) de98_Payee = ReadVar2Raw(buf, ref offset, 25); if ((de1_SecondaryBitmap[4] & 32) > 0) de99_SettleInstIdCode = ReadVar2Packed(buf, ref offset, 11); if ((de1_SecondaryBitmap[4] & 16) > 0) de100_RecvInstIdCode = ReadVar2Packed(buf, ref offset, 11); if ((de1_SecondaryBitmap[4] & 8 ) > 0) de101_FileName = S(ReadVar2Raw(buf, ref offset, 17)); if ((de1_SecondaryBitmap[4] & 4) > 0) de102_AcctId1 = ReadVar2Raw(buf, ref offset, 28 ); if ((de1_SecondaryBitmap[4] & 2) > 0) de103_AcctId2 = ReadVar2Raw(buf, ref offset, 28 ); if ((de1_SecondaryBitmap[4] & 1) > 0) de104_TxnDesc = ReadVar3Raw(buf, ref offset, 100); if ((de1_SecondaryBitmap[5] & 128 ) > 0) de105_ResvIso = ReadVar3Raw(buf, ref offset, 999); if ((de1_SecondaryBitmap[5] & 64) > 0) de106_ResvIso = ReadVar3Raw(buf, ref offset, 999); if ((de1_SecondaryBitmap[5] & 32) > 0) de107_ResvIso = ReadVar3Raw(buf, ref offset, 999); if ((de1_SecondaryBitmap[5] & 16) > 0) de108_ResvIso = ReadVar3Raw(buf, ref offset, 999); if ((de1_SecondaryBitmap[5] & 8 ) > 0) de109_ResvIso = ReadVar3Raw(buf, ref offset, 999); if ((de1_SecondaryBitmap[5] & 4) > 0) de110_ResvIso = ReadVar3Raw(buf, ref offset, 999); if ((de1_SecondaryBitmap[5] & 2) > 0) de111_ResvIso = ReadVar3Raw(buf, ref offset, 999); if ((de1_SecondaryBitmap[5] & 1) > 0) de112_ResvNat = ReadVar3Raw(buf, ref offset, 999); if ((de1_SecondaryBitmap[6] & 128 ) > 0) de113_ResvNat = ReadVar3Raw(buf, ref offset, 999); if ((de1_SecondaryBitmap[6] & 64) > 0) de114_ResvNat = ReadVar3Raw(buf, ref offset, 999); if ((de1_SecondaryBitmap[6] & 32) > 0) de115_ResvNat = ReadVar3Raw(buf, ref offset, 999); if ((de1_SecondaryBitmap[6] & 16) > 0) de116_ResvNat = ReadVar3Raw(buf, ref offset, 999); if ((de1_SecondaryBitmap[6] & 8 ) > 0) de117_CardStatUpdCode = S(ReadFixRaw(buf, ref offset, 2)); if ((de1_SecondaryBitmap[6] & 4) > 0) de118_TotalCashNo = ReadFixPacked(buf, ref offset, 10); if ((de1_SecondaryBitmap[6] & 2) > 0) de119_TotalCashAmt = ReadFixPacked(buf, ref offset, 16); if ((de1_SecondaryBitmap[6] & 1) > 0) de120_ResvPriv = ReadVar3Raw(buf, ref offset, 999); if ((de1_SecondaryBitmap[7] & 128 ) > 0) de121_ResvPriv = ReadVar3Raw(buf, ref offset, 999); if ((de1_SecondaryBitmap[7] & 64) > 0) de122_ResvPriv = ReadVar3Raw(buf, ref offset, 999); if ((de1_SecondaryBitmap[7] & 32) > 0) de123_ResvPriv = ReadVar3Raw(buf, ref offset, 999); if ((de1_SecondaryBitmap[7] & 16) > 0) de124_ResvPriv = ReadVar3Raw(buf, ref offset, 999); if ((de1_SecondaryBitmap[7] & 8 ) > 0) de125_ResvPriv = ReadVar3Raw(buf, ref offset, 999); if ((de1_SecondaryBitmap[7] & 4) > 0) de126_ResvPriv = ReadVar3Raw(buf, ref offset, 999); if ((de1_SecondaryBitmap[7] & 2) > 0) de127_ResvPriv = ReadVar3Raw(buf, ref offset, 999); if ((de1_SecondaryBitmap[7] & 1) > 0) de128_MAC = ReadFixRaw(buf, ref offset, 8 ); } #endregion /// <summary> /// Format the fields as a nice string using reflection /// </summary> public override string ToString() { StringBuilder sb = new StringBuilder(); foreach (FieldInfo i in this.GetType().GetFields()) { object val = i.GetValue(this); if (val != null) { if (val is byte[]) sb.AppendFormat("{0}: {1}\r\n", i.Name, BitConverter.ToString((byte[])val)); else sb.AppendFormat("{0}: {1}\r\n", i.Name, val); } } return sb.ToString(); } #region Internal helpers /// <summary> /// Converts 0-15 to '0-9A-F' /// </summary> char XtoC(int x) { if (x < 10) return (char)('0' + x); return (char)('A' + x); } /// <summary> /// Grab bytes from a buffer /// </summary> byte[] B(byte[] buf, int off, int len) { byte[] a = new byte[len]; Array.Copy(buf, off, a, 0, len); return a; } /// <summary> /// Converts a buffer to a string /// </summary> string S(byte[] buf) { return Encoding.ASCII.GetString(buf); } byte[] ReadFixRaw(byte[] buf, ref int offset, int len) { byte[] val = B(buf, offset, len); offset += len; return val; } byte[] ReadVar2Raw(byte[] buf, ref int offset, int unused) { int len = (buf[offset] - 0x30) * 10 + buf[offset + 1] - 0x30; int oldoffset = offset; offset += len + 2; return B(buf, oldoffset + 2, len); } byte[] ReadVar3Raw(byte[] buf, ref int offset, int unused) { int len = (buf[offset]-0x30)*100 + (buf[offset+1]-0x30)*10 + buf[offset+2]-0x30; int oldoffset = offset; offset += len + 3; return B(buf, oldoffset + 3, len); } /// <summary> /// Reads an LLVARn /// eg input: 31 31 12 34 56 78 91 2F /// 31 31 is ascii '11' which means 11 numbers /// Those 11 numbers are binary packed decimal = 1234567891 /// The 'F' at the end is filler. /// </summary> string ReadVar2Packed(byte[] buf, ref int offset, int unused) { int len = (buf[offset] - 0x30) * 10 + (buf[offset + 1] - 0x30); int bytes = (len + 1) / 2; // The +1 is so it rounds up byte b; StringBuilder sb = new StringBuilder(len); if (len % 2 == 0) // Even number of chars, so there's no padding at the end { for (int i = 0; i < bytes; i++) { b = buf[offset + 2 + i]; sb.Append(XtoC(b >> 4)).Append(XtoC(b & 0xf)); } } else { int i; for (i = 0; i < bytes - 1; i++) { b = buf[offset + 2 + i]; sb.Append(XtoC(b >> 4)).Append(XtoC(b & 0xf)); } sb.Append(XtoC(buf[offset + 2 + i] >> 4)); // Get the last char } offset += bytes + 2; return sb.ToString(); } /// <summary> /// Reads an LLLVARn /// eg input: 30 31 31 12 34 56 78 91 2F /// 30 31 31 is ascii '011' which means 11 numbers /// Those 11 numbers are binary packed decimal = 1234567891 /// The 'F' at the end is filler. /// </summary> string ReadVar3Packed(byte[] buf, ref int offset, int unused) { int len = (buf[offset] - 0x30) * 100 + (buf[offset + 1] - 0x30) * 10 + (buf[offset + 2] - 0x30); int bytes = (len + 1) / 2; // The +1 is so it rounds up byte b; StringBuilder sb = new StringBuilder(len); if (len % 2 == 0) // Even number of chars, so there's no padding at the end { for (int i = 0; i < bytes; i++) { b = buf[offset + 3 + i]; sb.Append(XtoC(b >> 4)).Append(XtoC(b & 0xf)); } } else { int i; for (i = 0; i < bytes - 1; i++) { b = buf[offset + 3 + i]; sb.Append(XtoC(b >> 4)).Append(XtoC(b & 0xf)); } sb.Append(XtoC(buf[offset + 3 + i] >> 4)); // Get the last char } offset += bytes + 3; return sb.ToString(); } /// <summary> /// Parse a fixed length packed [hexi]decimal field /// If it is an odd number of digits, it skips the first nibble /// eg for input 0x01 23 of length 3, it grabs the "123" /// </summary> string ReadFixPacked(byte[] buf, ref int offset, int len) { int bytes = (len + 1) / 2; // The +1 is so it rounds up byte b; StringBuilder sb = new StringBuilder(len); if (len % 2 == 0) // Even number of chars, so there's no padding at the end { for (int i = 0; i < bytes; i++) { b = buf[offset + i]; sb.Append(XtoC(b >> 4)).Append(XtoC(b & 0xf)); } } else // Odd number { int i; sb.Append(XtoC(buf[offset] & 0xf)); // Get the first char from the second nibble for (i = 1; i < bytes; i++) { b = buf[offset + i]; sb.Append(XtoC(b >> 4)).Append(XtoC(b & 0xf)); } } offset += bytes; return sb.ToString(); } #endregion } And here's example code to use it: using System; class Program { static void Main(string[] args) { // Raw bytes for an AS2805 message byte[] message = new byte[] { 0x02, 0x00, 0x80, 0x38, 0x00, 0x01, 0x02, 0xC1, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x65, 0x43, 0x21, 0x13, 0x58, 0x40, 0x04, 0x19, 0x31, 0x31, 0x00, 0x08, 0x76, 0x54, 0x32, 0x1F, 0x30, 0x30, 0x38, 0x34, 0x30, 0x37, 0x31, 0x32, 0x33, 0x34, 0x37, 0x37, 0x37, 0x30, 0x30, 0x30, 0x31, 0x32, 0x33, 0x32, 0x31, 0x32, 0x33, 0x32, 0x31, 0x30, 0x30, 0x38, 0x8C, 0xA0, 0xA6, 0x42, 0x0C, 0x5C, 0xA6, 0x28, 0x01, 0x23}; // Parse the bytes into an instance of the message class As2805Message m = new As2805Message(message); // Display the parsed fields Console.WriteLine(m.ToString()); Console.WriteLine("Press a key"); Console.ReadKey(); } }
using System; using System.Collections.Generic; using System.Reflection; using System.Threading; using System.Runtime.CompilerServices; class Driver { static int result; static bool finally_done; static ManualResetEvent handle; static Thread thread; static object broken; [MethodImplAttribute (MethodImplOptions.NoInlining)] static void ThrowE () { broken.ToString (); } static bool InterruptRequested () { return (Thread.CurrentThread.ThreadState & ThreadState.AbortRequested) == ThreadState.AbortRequested; } [MethodImplAttribute (MethodImplOptions.NoInlining)] public static void SleepLoop () { for (int i = 0; i < 10; ++i) { Console.WriteLine ("step {0} - {1}", i, Thread.CurrentThread.ThreadState); if (InterruptRequested ()) break; Thread.Sleep (100); } if (!InterruptRequested ()) result |= 0x1; try { ThrowE (); } catch (Exception e) { Console.WriteLine ("caught/0 {0} from inside the prot block", e.GetType ()); if (!(e is NullReferenceException)) result |= 0x2; } } [MethodImplAttribute (MethodImplOptions.NoInlining)] public static void CancelAbort () { object lk = new object (); Console.WriteLine ("step 0 - {0}", Thread.CurrentThread.ThreadState); //lock (lk) { Monitor.Wait (lk, 100); } Console.WriteLine ("step 1 - {0}", Thread.CurrentThread.ThreadState); Thread.ResetAbort (); } ///////////////////////////////////////////////////// [MethodImplAttribute (MethodImplOptions.NoInlining)] static void InnerFromEH0 () { thread = Thread.CurrentThread; MethodInfo mi = typeof (Driver).GetMethod ("SleepLoop"); try { try { throw new ArgumentException (); } finally { handle.Set (); SleepLoop (); Console.WriteLine ("done"); finally_done = true; } Console.WriteLine ("After finally"); result |= 0x10; } catch (Exception e) { if (!(e is ArgumentException)) result |= 0x4; Console.WriteLine ("caught/1 a {0} while on {1} res {2}", e.GetType (), Thread.CurrentThread.ThreadState, result); } } [MethodImplAttribute (MethodImplOptions.NoInlining)] static void GuardFromEH0 () { try { InnerFromEH0 (); } catch (Exception e) { if (!(e is ThreadAbortException)) result |= 0x8; Console.WriteLine ("caught/2 a {0} while on {1} res {2}", e.GetType (), Thread.CurrentThread.ThreadState, result); } } public static int test_0_abort_finally_after_throw () { finally_done = false; result = 0; Action ac = GuardFromEH0; handle = new ManualResetEvent (false); var res = ac.BeginInvoke (null, null); handle.WaitOne (); Console.WriteLine ("aborting"); thread.Abort (); Console.WriteLine ("aborted"); res.AsyncWaitHandle.WaitOne (); Console.WriteLine ("waited"); if (!finally_done) result |= 0x100; return result; } ///////////////////////////////////////////////////// [MethodImplAttribute (MethodImplOptions.NoInlining)] static void InnerFromEH1 () { thread = Thread.CurrentThread; MethodInfo mi = typeof (Driver).GetMethod ("SleepLoop"); try { try { throw new ArgumentException (); } finally { handle.Set (); SleepLoop (); CancelAbort (); Console.WriteLine ("done"); finally_done = true; } Console.WriteLine ("After finally"); result |= 0x10; } catch (Exception e) { if (!(e is ArgumentException)) result |= 0x4; Console.WriteLine ("caught/3 a {0} while on {1} res {2}", e.GetType (), Thread.CurrentThread.ThreadState, result); } } [MethodImplAttribute (MethodImplOptions.NoInlining)] static void GuardFromEH1 () { try { InnerFromEH1 (); } catch (Exception e) { result |= 0x8; Console.WriteLine ("caught/4 a {0} while on {1}", e.GetType (), Thread.CurrentThread.ThreadState); } } public static int test_0_abort_finally_and_cancel () { finally_done = false; result = 0; Action ac = GuardFromEH1; handle = new ManualResetEvent (false); var res = ac.BeginInvoke (null, null); handle.WaitOne (); Console.WriteLine ("aborting"); thread.Abort (); Console.WriteLine ("aborted"); res.AsyncWaitHandle.WaitOne (); Console.WriteLine ("waited"); if (!finally_done) result |= 0x100; return result; } ///////////////////////////////////////////////////// [MethodImplAttribute (MethodImplOptions.NoInlining)] static void InnerFromEH () { thread = Thread.CurrentThread; MethodInfo mi = typeof (Driver).GetMethod ("SleepLoop"); try { try { Console.WriteLine ("try block"); } finally { handle.Set (); SleepLoop (); Console.WriteLine ("done"); finally_done = true; } Console.WriteLine ("After finally"); result |= 0x10; } catch (Exception e) { if (!(e is ThreadAbortException)) result |= 0x4; Console.WriteLine ("caught/5 a {0} while on {1} res {2}", e.GetType (), Thread.CurrentThread.ThreadState, result); } } [MethodImplAttribute (MethodImplOptions.NoInlining)] static void GuardFromEH () { try { InnerFromEH (); } catch (Exception e) { if (!(e is ThreadAbortException)) result |= 0x8; Console.WriteLine ("caught/6 a {0} while on {1} res {2}", e.GetType (), Thread.CurrentThread.ThreadState, result); } } public static int test_0_finally_after_try () { AppDomain.CurrentDomain.UnhandledException += (obj, sender) => { Console.WriteLine ("Unhandled {0}", sender.ExceptionObject); }; finally_done = false; result = 0; Action ac = GuardFromEH; handle = new ManualResetEvent (false); var res = ac.BeginInvoke (null, null); handle.WaitOne (); Console.WriteLine ("aborting"); thread.Abort (); Console.WriteLine ("aborted"); res.AsyncWaitHandle.WaitOne (); Console.WriteLine ("waited"); if (!finally_done) result |= 0x100; return result; } ///////////////////////////////////////////////////// static int Main (string[] args) { AppDomain.CurrentDomain.UnhandledException += (obj, sender) => { Console.WriteLine ("Unhandled {0}", sender.ExceptionObject); }; return TestDriver.RunTests (typeof (Driver), args); } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System.Globalization; using System.Text; using System.Diagnostics; namespace System { internal static class UriHelper { private static readonly char[] s_hexUpperChars = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F' }; // http://host/Path/Path/File?Query is the base of // - http://host/Path/Path/File/ ... (those "File" words may be different in semantic but anyway) // - http://host/Path/Path/#Fragment // - http://host/Path/Path/?Query // - http://host/Path/Path/MoreDir/ ... // - http://host/Path/Path/OtherFile?Query // - http://host/Path/Path/Fl // - http://host/Path/Path/ // // It is not a base for // - http://host/Path/Path (that last "Path" is not considered as a directory) // - http://host/Path/Path?Query // - http://host/Path/Path#Fragment // - http://host/Path/Path2/ // - http://host/Path/Path2/MoreDir // - http://host/Path/File // // ASSUMES that strings like http://host/Path/Path/MoreDir/../../ have been canonicalized before going to this method. // ASSUMES that back slashes already have been converted if applicable. // internal static unsafe bool TestForSubPath(char* selfPtr, ushort selfLength, char* otherPtr, ushort otherLength, bool ignoreCase) { ushort i = 0; char chSelf; char chOther; bool AllSameBeforeSlash = true; for (; i < selfLength && i < otherLength; ++i) { chSelf = *(selfPtr + i); chOther = *(otherPtr + i); if (chSelf == '?' || chSelf == '#') { // survived so far and selfPtr does not have any more path segments return true; } // If selfPtr terminates a path segment, so must otherPtr if (chSelf == '/') { if (chOther != '/') { // comparison has falied return false; } // plus the segments must be the same if (!AllSameBeforeSlash) { // comparison has falied return false; } //so far so good AllSameBeforeSlash = true; continue; } // if otherPtr terminates then selfPtr must not have any more path segments if (chOther == '?' || chOther == '#') { break; } if (!ignoreCase) { if (chSelf != chOther) { AllSameBeforeSlash = false; } } else { if (char.ToLowerInvariant(chSelf) != char.ToLowerInvariant(chOther)) { AllSameBeforeSlash = false; } } } // If self is longer then it must not have any more path segments for (; i < selfLength; ++i) { if ((chSelf = *(selfPtr + i)) == '?' || chSelf == '#') { return true; } if (chSelf == '/') { return false; } } //survived by getting to the end of selfPtr return true; } // - forceX characters are always escaped if found // - rsvd character will remain unescaped // // start - starting offset from input // end - the exclusive ending offset in input // destPos - starting offset in dest for output, on return this will be an exclusive "end" in the output. // // In case "dest" has lack of space it will be reallocated by preserving the _whole_ content up to current destPos // // Returns null if nothing has to be escaped AND passed dest was null, otherwise the resulting array with the updated destPos // private const short c_MaxAsciiCharsReallocate = 40; private const short c_MaxUnicodeCharsReallocate = 40; private const short c_MaxUTF_8BytesPerUnicodeChar = 4; private const short c_EncodedCharsPerByte = 3; internal unsafe static char[] EscapeString(string input, int start, int end, char[] dest, ref int destPos, bool isUriString, char force1, char force2, char rsvd) { if (end - start >= Uri.c_MaxUriBufferSize) throw new UriFormatException(SR.net_uri_SizeLimit); int i = start; int prevInputPos = start; byte* bytes = stackalloc byte[c_MaxUnicodeCharsReallocate * c_MaxUTF_8BytesPerUnicodeChar]; // 40*4=160 fixed (char* pStr = input) { for (; i < end; ++i) { char ch = pStr[i]; // a Unicode ? if (ch > '\x7F') { short maxSize = (short)Math.Min(end - i, (int)c_MaxUnicodeCharsReallocate - 1); short count = 1; for (; count < maxSize && pStr[i + count] > '\x7f'; ++count) ; // Is the last a high surrogate? if (pStr[i + count - 1] >= 0xD800 && pStr[i + count - 1] <= 0xDBFF) { // Should be a rare case where the app tries to feed an invalid Unicode surrogates pair if (count == 1 || count == end - i) throw new UriFormatException(SR.net_uri_BadString); // need to grab one more char as a Surrogate except when it's a bogus input ++count; } dest = EnsureDestinationSize(pStr, dest, i, (short)(count * c_MaxUTF_8BytesPerUnicodeChar * c_EncodedCharsPerByte), c_MaxUnicodeCharsReallocate * c_MaxUTF_8BytesPerUnicodeChar * c_EncodedCharsPerByte, ref destPos, prevInputPos); short numberOfBytes = (short)Encoding.UTF8.GetBytes(pStr + i, count, bytes, c_MaxUnicodeCharsReallocate * c_MaxUTF_8BytesPerUnicodeChar); // This is the only exception that built in UriParser can throw after a Uri ctor. // Should not happen unless the app tries to feed an invalid Unicode String if (numberOfBytes == 0) throw new UriFormatException(SR.net_uri_BadString); i += (count - 1); for (count = 0; count < numberOfBytes; ++count) EscapeAsciiChar((char)bytes[count], dest, ref destPos); prevInputPos = i + 1; } else if (ch == '%' && rsvd == '%') { // Means we don't reEncode '%' but check for the possible escaped sequence dest = EnsureDestinationSize(pStr, dest, i, c_EncodedCharsPerByte, c_MaxAsciiCharsReallocate * c_EncodedCharsPerByte, ref destPos, prevInputPos); if (i + 2 < end && EscapedAscii(pStr[i + 1], pStr[i + 2]) != Uri.c_DummyChar) { // leave it escaped dest[destPos++] = '%'; dest[destPos++] = pStr[i + 1]; dest[destPos++] = pStr[i + 2]; i += 2; } else { EscapeAsciiChar('%', dest, ref destPos); } prevInputPos = i + 1; } else if (ch == force1 || ch == force2) { dest = EnsureDestinationSize(pStr, dest, i, c_EncodedCharsPerByte, c_MaxAsciiCharsReallocate * c_EncodedCharsPerByte, ref destPos, prevInputPos); EscapeAsciiChar(ch, dest, ref destPos); prevInputPos = i + 1; } else if (ch != rsvd && (isUriString ? !IsReservedUnreservedOrHash(ch) : !IsUnreserved(ch))) { dest = EnsureDestinationSize(pStr, dest, i, c_EncodedCharsPerByte, c_MaxAsciiCharsReallocate * c_EncodedCharsPerByte, ref destPos, prevInputPos); EscapeAsciiChar(ch, dest, ref destPos); prevInputPos = i + 1; } } if (prevInputPos != i) { // need to fill up the dest array ? if (prevInputPos != start || dest != null) dest = EnsureDestinationSize(pStr, dest, i, 0, 0, ref destPos, prevInputPos); } } return dest; } // // ensure destination array has enough space and contains all the needed input stuff // private unsafe static char[] EnsureDestinationSize(char* pStr, char[] dest, int currentInputPos, short charsToAdd, short minReallocateChars, ref int destPos, int prevInputPos) { if ((object)dest == null || dest.Length < destPos + (currentInputPos - prevInputPos) + charsToAdd) { // allocating or reallocating array by ensuring enough space based on maxCharsToAdd. char[] newresult = new char[destPos + (currentInputPos - prevInputPos) + minReallocateChars]; if ((object)dest != null && destPos != 0) Buffer.BlockCopy(dest, 0, newresult, 0, destPos << 1); dest = newresult; } // ensuring we copied everything form the input string left before last escaping while (prevInputPos != currentInputPos) dest[destPos++] = pStr[prevInputPos++]; return dest; } // // This method will assume that any good Escaped Sequence will be unescaped in the output // - Assumes Dest.Length - detPosition >= end-start // - UnescapeLevel controls various modes of opearion // - Any "bad" escape sequence will remain as is or '%' will be escaped. // - destPosition tells the starting index in dest for placing the result. // On return destPosition tells the last character + 1 postion in the "dest" array. // - The control chars and chars passed in rsdvX parameters may be re-escaped depending on UnescapeLevel // - It is a RARE case when Unescape actually needs escaping some characteres mentioned above. // For this reason it returns a char[] that is usually the same ref as the input "dest" value. // internal unsafe static char[] UnescapeString(string input, int start, int end, char[] dest, ref int destPosition, char rsvd1, char rsvd2, char rsvd3, UnescapeMode unescapeMode, UriParser syntax, bool isQuery) { fixed (char* pStr = input) { return UnescapeString(pStr, start, end, dest, ref destPosition, rsvd1, rsvd2, rsvd3, unescapeMode, syntax, isQuery); } } internal unsafe static char[] UnescapeString(char* pStr, int start, int end, char[] dest, ref int destPosition, char rsvd1, char rsvd2, char rsvd3, UnescapeMode unescapeMode, UriParser syntax, bool isQuery) { byte[] bytes = null; byte escapedReallocations = 0; bool escapeReserved = false; int next = start; bool iriParsing = Uri.IriParsingStatic(syntax) && ((unescapeMode & UnescapeMode.EscapeUnescape) == UnescapeMode.EscapeUnescape); while (true) { // we may need to re-pin dest[] fixed (char* pDest = dest) { if ((unescapeMode & UnescapeMode.EscapeUnescape) == UnescapeMode.CopyOnly) { while (start < end) pDest[destPosition++] = pStr[start++]; return dest; } while (true) { char ch = (char)0; for (; next < end; ++next) { if ((ch = pStr[next]) == '%') { if ((unescapeMode & UnescapeMode.Unescape) == 0) { // re-escape, don't check anything else escapeReserved = true; } else if (next + 2 < end) { ch = EscapedAscii(pStr[next + 1], pStr[next + 2]); // Unescape a good sequence if full unescape is requested if (unescapeMode >= UnescapeMode.UnescapeAll) { if (ch == Uri.c_DummyChar) { if (unescapeMode >= UnescapeMode.UnescapeAllOrThrow) { // Should be a rare case where the app tries to feed an invalid escaped sequence throw new UriFormatException(SR.net_uri_BadString); } continue; } } // re-escape % from an invalid sequence else if (ch == Uri.c_DummyChar) { if ((unescapeMode & UnescapeMode.Escape) != 0) escapeReserved = true; else continue; // we should throw instead but since v1.0 woudl just print '%' } // Do not unescape '%' itself unless full unescape is requested else if (ch == '%') { next += 2; continue; } // Do not unescape a reserved char unless full unescape is requested else if (ch == rsvd1 || ch == rsvd2 || ch == rsvd3) { next += 2; continue; } // Do not unescape a dangerous char unless it's V1ToStringFlags mode else if ((unescapeMode & UnescapeMode.V1ToStringFlag) == 0 && IsNotSafeForUnescape(ch)) { next += 2; continue; } else if (iriParsing && ((ch <= '\x9F' && IsNotSafeForUnescape(ch)) || (ch > '\x9F' && !IriHelper.CheckIriUnicodeRange(ch, isQuery)))) { // check if unenscaping gives a char ouside iri range // if it does then keep it escaped next += 2; continue; } // unescape escaped char or escape % break; } else if (unescapeMode >= UnescapeMode.UnescapeAll) { if (unescapeMode >= UnescapeMode.UnescapeAllOrThrow) { // Should be a rare case where the app tries to feed an invalid escaped sequence throw new UriFormatException(SR.net_uri_BadString); } // keep a '%' as part of a bogus sequence continue; } else { escapeReserved = true; } // escape (escapeReserved==ture) or otheriwse unescape the sequence break; } else if ((unescapeMode & (UnescapeMode.Unescape | UnescapeMode.UnescapeAll)) == (UnescapeMode.Unescape | UnescapeMode.UnescapeAll)) { continue; } else if ((unescapeMode & UnescapeMode.Escape) != 0) { // Could actually escape some of the characters if (ch == rsvd1 || ch == rsvd2 || ch == rsvd3) { // found an unescaped reserved character -> escape it escapeReserved = true; break; } else if ((unescapeMode & UnescapeMode.V1ToStringFlag) == 0 && (ch <= '\x1F' || (ch >= '\x7F' && ch <= '\x9F'))) { // found an unescaped reserved character -> escape it escapeReserved = true; break; } } } //copy off previous characters from input while (start < next) pDest[destPosition++] = pStr[start++]; if (next != end) { if (escapeReserved) { //escape that char // Since this should be _really_ rare case, reallocate with constant size increase of 30 rsvd-type characters. if (escapedReallocations == 0) { escapedReallocations = 30; char[] newDest = new char[dest.Length + escapedReallocations * 3]; fixed (char* pNewDest = newDest) { for (int i = 0; i < destPosition; ++i) pNewDest[i] = pDest[i]; } dest = newDest; // re-pin new dest[] array goto dest_fixed_loop_break; } else { --escapedReallocations; EscapeAsciiChar(pStr[next], dest, ref destPosition); escapeReserved = false; start = ++next; continue; } } // unescaping either one Ascii or possibly multiple Unicode if (ch <= '\x7F') { //ASCII dest[destPosition++] = ch; next += 3; start = next; continue; } // Unicode int byteCount = 1; // lazy initialization of max size, will reuse the array for next sequences if ((object)bytes == null) bytes = new byte[end - next]; bytes[0] = (byte)ch; next += 3; while (next < end) { // Check on exit criterion if ((ch = pStr[next]) != '%' || next + 2 >= end) break; // already made sure we have 3 characters in str ch = EscapedAscii(pStr[next + 1], pStr[next + 2]); //invalid hex sequence ? if (ch == Uri.c_DummyChar) break; // character is not part of a UTF-8 sequence ? else if (ch < '\x80') break; else { //a UTF-8 sequence bytes[byteCount++] = (byte)ch; next += 3; } } Encoding noFallbackCharUTF8 = Encoding.GetEncoding( Encoding.UTF8.CodePage, new EncoderReplacementFallback(""), new DecoderReplacementFallback("")); char[] unescapedChars = new char[bytes.Length]; int charCount = noFallbackCharUTF8.GetChars(bytes, 0, byteCount, unescapedChars, 0); start = next; // match exact bytes // Do not unescape chars not allowed by Iri // need to check for invalid utf sequences that may not have given any chars MatchUTF8Sequence(pDest, dest, ref destPosition, unescapedChars, charCount, bytes, byteCount, isQuery, iriParsing); } if (next == end) goto done; } dest_fixed_loop_break:; } } done: return dest; } // // Need to check for invalid utf sequences that may not have given any chars. // We got the unescaped chars, we then reencode them and match off the bytes // to get the invalid sequence bytes that we just copy off // internal static unsafe void MatchUTF8Sequence(char* pDest, char[] dest, ref int destOffset, char[] unescapedChars, int charCount, byte[] bytes, int byteCount, bool isQuery, bool iriParsing) { int count = 0; fixed (char* unescapedCharsPtr = unescapedChars) { for (int j = 0; j < charCount; ++j) { bool isHighSurr = char.IsHighSurrogate(unescapedCharsPtr[j]); byte[] encodedBytes = Encoding.UTF8.GetBytes(unescapedChars, j, isHighSurr ? 2 : 1); int encodedBytesLength = encodedBytes.Length; // we have to keep unicode chars outside Iri range escaped bool inIriRange = false; if (iriParsing) { if (!isHighSurr) inIriRange = IriHelper.CheckIriUnicodeRange(unescapedChars[j], isQuery); else { bool surrPair = false; inIriRange = IriHelper.CheckIriUnicodeRange(unescapedChars[j], unescapedChars[j + 1], ref surrPair, isQuery); } } while (true) { // Escape any invalid bytes that were before this character while (bytes[count] != encodedBytes[0]) { Debug.Assert(dest.Length > destOffset, "Destination length exceeded destination offset."); EscapeAsciiChar((char)bytes[count++], dest, ref destOffset); } // check if all bytes match bool allBytesMatch = true; int k = 0; for (; k < encodedBytesLength; ++k) { if (bytes[count + k] != encodedBytes[k]) { allBytesMatch = false; break; } } if (allBytesMatch) { count += encodedBytesLength; if (iriParsing) { if (!inIriRange) { // need to keep chars not allowed as escaped for (int l = 0; l < encodedBytes.Length; ++l) { Debug.Assert(dest.Length > destOffset, "Destination length exceeded destination offset."); EscapeAsciiChar((char)encodedBytes[l], dest, ref destOffset); } } else if (!Uri.IsBidiControlCharacter(unescapedCharsPtr[j])) { //copy chars Debug.Assert(dest.Length > destOffset, "Destination length exceeded destination offset."); pDest[destOffset++] = unescapedCharsPtr[j]; if (isHighSurr) { Debug.Assert(dest.Length > destOffset, "Destination length exceeded destination offset."); pDest[destOffset++] = unescapedCharsPtr[j + 1]; } } } else { //copy chars Debug.Assert(dest.Length > destOffset, "Destination length exceeded destination offset."); pDest[destOffset++] = unescapedCharsPtr[j]; if (isHighSurr) { Debug.Assert(dest.Length > destOffset, "Destination length exceeded destination offset."); pDest[destOffset++] = unescapedCharsPtr[j + 1]; } } break; // break out of while (true) since we've matched this char bytes } else { // copy bytes till place where bytes dont match for (int l = 0; l < k; ++l) { Debug.Assert(dest.Length > destOffset, "Destination length exceeded destination offset."); EscapeAsciiChar((char)bytes[count++], dest, ref destOffset); } } } if (isHighSurr) j++; } } // Include any trailing invalid sequences while (count < byteCount) { Debug.Assert(dest.Length > destOffset, "Destination length exceeded destination offset."); EscapeAsciiChar((char)bytes[count++], dest, ref destOffset); } } internal static void EscapeAsciiChar(char ch, char[] to, ref int pos) { to[pos++] = '%'; to[pos++] = s_hexUpperChars[(ch & 0xf0) >> 4]; to[pos++] = s_hexUpperChars[ch & 0xf]; } internal static char EscapedAscii(char digit, char next) { if (!(((digit >= '0') && (digit <= '9')) || ((digit >= 'A') && (digit <= 'F')) || ((digit >= 'a') && (digit <= 'f')))) { return Uri.c_DummyChar; } int res = (digit <= '9') ? ((int)digit - (int)'0') : (((digit <= 'F') ? ((int)digit - (int)'A') : ((int)digit - (int)'a')) + 10); if (!(((next >= '0') && (next <= '9')) || ((next >= 'A') && (next <= 'F')) || ((next >= 'a') && (next <= 'f')))) { return Uri.c_DummyChar; } return (char)((res << 4) + ((next <= '9') ? ((int)next - (int)'0') : (((next <= 'F') ? ((int)next - (int)'A') : ((int)next - (int)'a')) + 10))); } // Do not unescape these in safe mode: // 1) reserved = ";" | "/" | "?" | ":" | "@" | "&" | "=" | "+" | "$" | "," // 2) excluded = control | "#" | "%" | "\" // // That will still give plenty characters unescaped by SafeUnesced mode such as // 1) Unicode characters // 2) Unreserved = alphanum | "-" | "_" | "." | "!" | "~" | "*" | "'" | "(" | ")" // 3) DelimitersAndUnwise = "<" | ">" | <"> | "{" | "}" | "|" | "^" | "[" | "]" | "`" internal static bool IsNotSafeForUnescape(char ch) { if (ch <= '\x1F' || (ch >= '\x7F' && ch <= '\x9F')) return true; else if ((ch >= ';' && ch <= '@' && (ch | '\x2') != '>') || (ch >= '#' && ch <= '&') || ch == '+' || ch == ',' || ch == '/' || ch == '\\') return true; return false; } private const string RFC2396ReservedMarks = @";/?:@&=+$,"; private const string RFC3986ReservedMarks = @":/?#[]@!$&'()*+,;="; private const string RFC2396UnreservedMarks = @"-_.!~*'()"; private const string RFC3986UnreservedMarks = @"-._~"; private static unsafe bool IsReservedUnreservedOrHash(char c) { if (IsUnreserved(c)) { return true; } return (RFC3986ReservedMarks.IndexOf(c) >= 0); } internal static unsafe bool IsUnreserved(char c) { if (Uri.IsAsciiLetterOrDigit(c)) { return true; } return (RFC3986UnreservedMarks.IndexOf(c) >= 0); } internal static bool Is3986Unreserved(char c) { if (Uri.IsAsciiLetterOrDigit(c)) { return true; } return (RFC3986UnreservedMarks.IndexOf(c) >= 0); } } }
// // ListView_Model.cs // // Author: // Aaron Bockover <abockover@novell.com> // // Copyright (C) 2007-2008 Novell, Inc. // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System; using System.Reflection; using Gtk; namespace Hyena.Data.Gui { public partial class ListView<T> : ListViewBase { #pragma warning disable 0067 public event EventHandler ModelChanged; public event EventHandler ModelReloaded; #pragma warning restore 0067 public void SetModel (IListModel<T> model) { SetModel (model, 0.0); } public virtual void SetModel (IListModel<T> value, double vpos) { if (model == value) { return; } if (model != null) { model.Cleared -= OnModelClearedHandler; model.Reloaded -= OnModelReloadedHandler; } model = value; if (model != null) { model.Cleared += OnModelClearedHandler; model.Reloaded += OnModelReloadedHandler; selection_proxy.Selection = model.Selection; IsEverReorderable = model.CanReorder; } ISortable sortable = model as ISortable; if (sortable != null) { ISortableColumn sort_column = ColumnController.SortColumn ?? ColumnController.DefaultSortColumn; if (sort_column != null) { sortable.Sort (sort_column); RecalculateColumnSizes (); RegenerateColumnCache (); InvalidateHeader (); IsReorderable = sortable.SortColumn == null || sortable.SortColumn.SortType == SortType.None; } } if (ViewLayout != null) { ViewLayout.Model = Model; } RefreshViewForModel (vpos); var handler = ModelChanged; if (handler != null) { handler (this, EventArgs.Empty); } } private void RefreshViewForModel (double? vpos) { if (Model == null) { UpdateAdjustments (); QueueDraw (); return; } if (ViewLayout != null) { ViewLayout.ModelUpdated (); } UpdateAdjustments (); if (vpos != null) { ScrollToY ((double) vpos); } else if (Model.Count <= ItemsInView) { // If our view fits all rows at once, make sure we're scrolled to the top ScrollToY (0.0); } else if (vadjustment != null) { ScrollToY (vadjustment.Value); } if (Parent is ScrolledWindow) { Parent.QueueDraw (); } } private void OnModelClearedHandler (object o, EventArgs args) { OnModelCleared (); } private void OnModelReloadedHandler (object o, EventArgs args) { OnModelReloaded (); var handler = ModelReloaded; if (handler != null) { handler (this, EventArgs.Empty); } } private void OnColumnControllerUpdatedHandler (object o, EventArgs args) { OnColumnControllerUpdated (); } protected virtual void OnModelCleared () { RefreshViewForModel (null); } protected virtual void OnModelReloaded () { RefreshViewForModel (null); } private IListModel<T> model; public virtual IListModel<T> Model { get { return model; } } private string row_opaque_property_name = "Sensitive"; private PropertyInfo row_opaque_property_info; bool row_opaque_property_invalid = false; public string RowOpaquePropertyName { get { return row_opaque_property_name; } set { if (value == row_opaque_property_name) { return; } row_opaque_property_name = value; row_opaque_property_info = null; row_opaque_property_invalid = false; InvalidateList (); } } private bool IsRowOpaque (object item) { if (item == null || row_opaque_property_invalid) { return true; } if (row_opaque_property_info == null || row_opaque_property_info.ReflectedType != item.GetType ()) { row_opaque_property_info = item.GetType ().GetProperty (row_opaque_property_name); if (row_opaque_property_info == null || row_opaque_property_info.PropertyType != typeof (bool)) { row_opaque_property_info = null; row_opaque_property_invalid = true; return true; } } return (bool)row_opaque_property_info.GetValue (item, null); } private string row_bold_property_name = "IsBold"; private PropertyInfo row_bold_property_info; bool row_bold_property_invalid = false; public string RowBoldPropertyName { get { return row_bold_property_name; } set { if (value == row_bold_property_name) { return; } row_bold_property_name = value; row_bold_property_info = null; row_bold_property_invalid = false; InvalidateList (); } } private bool IsRowBold (object item) { if (item == null || row_bold_property_invalid) { return false; } if (row_bold_property_info == null || row_bold_property_info.ReflectedType != item.GetType ()) { row_bold_property_info = item.GetType ().GetProperty (row_bold_property_name); if (row_bold_property_info == null || row_bold_property_info.PropertyType != typeof (bool)) { row_bold_property_info = null; row_bold_property_invalid = true; return false; } } return (bool)row_bold_property_info.GetValue (item, null); } } }
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for Additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ using NPOI.POIFS.Properties; using NPOI.Util; using System.IO; using System; using System.Collections.Generic; namespace NPOI.POIFS.FileSystem { /** * This class provides methods to read a DocumentEntry managed by a * {@link NPOIFSFileSystem} instance. */ public class NDocumentInputStream : DocumentInputStream//DocumentReader { /** current offset into the Document */ private int _current_offset; /** current block count */ private int _current_block_count; /** current marked offset into the Document (used by mark and Reset) */ private int _marked_offset; /** and the block count for it */ private int _marked_offset_count; /** the Document's size */ private int _document_size; /** have we been closed? */ private bool _closed; /** the actual Document */ private NPOIFSDocument _document; private IEnumerator<ByteBuffer> _data; private ByteBuffer _buffer; /** * Create an InputStream from the specified DocumentEntry * * @param document the DocumentEntry to be read * * @exception IOException if the DocumentEntry cannot be opened (like, maybe it has * been deleted?) */ public NDocumentInputStream(DocumentEntry document) { if (!(document is DocumentNode)) { throw new IOException("Cannot open internal document storage, " + document + " not a Document Node"); } _current_offset = 0; _current_block_count = 0; _marked_offset = 0; _marked_offset_count = 0; _document_size = document.Size; _closed = false; DocumentNode doc = (DocumentNode)document; DocumentProperty property = (DocumentProperty)doc.Property; _document = new NPOIFSDocument( property, ((DirectoryNode)doc.Parent).NFileSystem ); _data = _document.GetBlockIterator(); } /** * Create an InputStream from the specified Document * * @param document the Document to be read */ public NDocumentInputStream(NPOIFSDocument document) { _current_offset = 0; _current_block_count = 0; _marked_offset = 0; _marked_offset_count = 0; _document_size = document.Size; _closed = false; _document = document; _data = _document.GetBlockIterator(); } public override int Available() { if (_closed) { throw new InvalidOperationException("cannot perform requested operation on a closed stream"); } return _document_size - _current_offset; } public override void Close() { _closed = true; } public override void Mark(int ignoredReadlimit) { _marked_offset = _current_offset; _marked_offset_count = Math.Max(0, _current_block_count - 1); } public override int Read() { DieIfClosed(); if (atEOD()) { return EOF; } byte[] b = new byte[1]; int result = Read(b, 0, 1); if (result >= 0) { if (b[0] < 0) { return b[0] + 256; } return b[0]; } return result; } public override int Read(byte[] b, int off, int len) { DieIfClosed(); if (b == null) { throw new ArgumentException("buffer must not be null"); } if (off < 0 || len < 0 || b.Length < off + len) { throw new IndexOutOfRangeException("can't read past buffer boundaries"); } if (len == 0) { return 0; } if (atEOD()) { return EOF; } int limit = Math.Min(Available(), len); ReadFully(b, off, limit); return limit; } /** * Repositions this stream to the position at the time the mark() method was * last called on this input stream. If mark() has not been called this * method repositions the stream to its beginning. */ public override void Reset() { // Special case for Reset to the start if (_marked_offset == 0 && _marked_offset_count == 0) { _current_block_count = _marked_offset_count; _current_offset = _marked_offset; _data = _document.GetBlockIterator(); _buffer = null; return; } // Start again, then wind on to the required block _data = _document.GetBlockIterator(); _current_offset = 0; for (int i = 0; i < _marked_offset_count; i++) { _data.MoveNext(); _buffer = _data.Current; _current_offset += _buffer.Remain; } _current_block_count = _marked_offset_count; // Do we need to position within it? if (_current_offset != _marked_offset) { // Grab the right block _data.MoveNext(); _buffer = _data.Current; _current_block_count++; // Skip to the right place in it // (It should be positioned already at the start of the block, // we need to Move further inside the block) int skipBy = _marked_offset - _current_offset; _buffer.Position = (_buffer.Position + skipBy); } // All done _current_offset = _marked_offset; } public override long Skip(long n) { DieIfClosed(); if (n < 0) { return 0; } int new_offset = _current_offset + (int)n; if (new_offset < _current_offset) { // wrap around in Converting a VERY large long to an int new_offset = _document_size; } else if (new_offset > _document_size) { new_offset = _document_size; } long rval = new_offset - _current_offset; // TODO Do this better byte[] Skip = new byte[(int)rval]; ReadFully(Skip); return rval; } private void DieIfClosed() { if (_closed) { throw new IOException("cannot perform requested operation on a closed stream"); } } private bool atEOD() { return _current_offset == _document_size; } private void CheckAvaliable(int requestedSize) { if (_closed) { throw new InvalidOperationException("cannot perform requested operation on a closed stream"); } if (requestedSize > _document_size - _current_offset) { throw new Exception("Buffer underrun - requested " + requestedSize + " bytes but " + (_document_size - _current_offset) + " was available"); } } public override void ReadFully(byte[] buf, int off, int len) { CheckAvaliable(len); int read = 0; while (read < len) { // if (_buffer == null || _buffer.remaining() == 0) if (_buffer == null || _buffer.Remain == 0) { _current_block_count++; //_buffer = _data.next(); _data.MoveNext(); _buffer = _data.Current; } int limit = Math.Min(len - read, _buffer.Remain); _buffer.Read(buf, off + read, limit); _current_offset += limit; read += limit; } } public override int ReadByte() { return ReadUByte(); } public override double ReadDouble() { return BitConverter.Int64BitsToDouble(ReadLong()); } public override long ReadLong() { CheckAvaliable(SIZE_LONG); byte[] data = new byte[SIZE_LONG]; ReadFully(data, 0, SIZE_LONG); return LittleEndian.GetLong(data, 0); } public override void ReadFully(byte[] buf) { ReadFully(buf, 0, buf.Length); } public override short ReadShort() { CheckAvaliable(SIZE_SHORT); byte[] data = new byte[SIZE_SHORT]; ReadFully(data, 0, SIZE_SHORT); return LittleEndian.GetShort(data); } public override int ReadInt() { CheckAvaliable(SIZE_INT); byte[] data = new byte[SIZE_INT]; ReadFully(data, 0, SIZE_INT); return LittleEndian.GetInt(data); } public override int ReadUShort() { CheckAvaliable(SIZE_SHORT); byte[] data = new byte[SIZE_SHORT]; ReadFully(data, 0, SIZE_SHORT); return LittleEndian.GetUShort(data); } public override int ReadUByte() { CheckAvaliable(1); byte[] data = new byte[1]; ReadFully(data, 0, 1); if (data[0] >= 0) return data[0]; return data[0] + 256; } public override long Length { get { if (_closed) { throw new InvalidOperationException("cannot perform requested operation on a closed stream"); } return _document_size; //- _current_offset; } } public override long Position { get { if (_closed) { throw new InvalidOperationException("cannot perform requested operation on a closed stream"); } return _current_offset; } set { _current_offset = (int)value; } } public override long Seek(long offset, SeekOrigin origin) { if (offset == 0) { Reset(); } else { Mark((int)offset); } return 0; } } }
using System.Collections.Generic; using Microsoft.AspNetCore.Modules; using Microsoft.Extensions.Logging; using Orchard.ContentManagement.Handlers; using Orchard.ContentManagement.MetaData; namespace Orchard.ContentManagement.Drivers.Coordinators { /// <summary> /// This component coordinates how parts are affecting content items. /// </summary> public class ContentPartHandlerCoordinator : ContentHandlerBase { private readonly IContentPartFactory _contentPartFactory; private readonly IEnumerable<IContentPartHandler> _partHandlers; private readonly IContentDefinitionManager _contentDefinitionManager; private readonly IContentFieldFactory _contentFieldFactory; public ContentPartHandlerCoordinator( IContentPartFactory contentPartFactory, IEnumerable<IContentPartHandler> partHandlers, IContentFieldFactory contentFieldFactory, IContentDefinitionManager contentDefinitionManager, ILogger<ContentPartHandlerCoordinator> logger) { _contentPartFactory = contentPartFactory; _contentFieldFactory = contentFieldFactory; _partHandlers = partHandlers; _contentDefinitionManager = contentDefinitionManager; Logger = logger; } public ILogger Logger { get; set; } public override void Activating(ActivatingContentContext context) { // This method is called on New() // Adds all the parts to a content item based on the content type definition. var contentTypeDefinition = _contentDefinitionManager.GetTypeDefinition(context.ContentType); if (contentTypeDefinition == null) return; foreach (var typePartDefinition in contentTypeDefinition.Parts) { var partName = typePartDefinition.PartDefinition.Name; // We create the part from it's known type or from a generic one var part = _contentPartFactory.CreateContentPart(partName) ?? new ContentPart(); _partHandlers.Invoke(handler => handler.Activating(context, part), Logger); context.Builder.Weld(typePartDefinition.Name, part); } } public override void Activated(ActivatedContentContext context) { var contentTypeDefinition = _contentDefinitionManager.GetTypeDefinition(context.ContentItem.ContentType); if (contentTypeDefinition == null) return; foreach (var typePartDefinition in contentTypeDefinition.Parts) { var partName = typePartDefinition.PartDefinition.Name; var partType = _contentPartFactory.GetContentPartType(partName) ?? typeof(ContentPart); var part = context.ContentItem.Get(partType, partName) as ContentPart; if (part != null) { _partHandlers.Invoke(handler => handler.Activated(context, part), Logger); } } } public override void Creating(CreateContentContext context) { var contentTypeDefinition = _contentDefinitionManager.GetTypeDefinition(context.ContentItem.ContentType); if (contentTypeDefinition == null) return; foreach (var typePartDefinition in contentTypeDefinition.Parts) { var partName = typePartDefinition.PartDefinition.Name; var partType = _contentPartFactory.GetContentPartType(partName) ?? typeof(ContentPart); var part = context.ContentItem.Get(partType, typePartDefinition.Name) as ContentPart; if (part != null) { _partHandlers.Invoke(handler => handler.Creating(context, part), Logger); } } } public override void Created(CreateContentContext context) { var contentTypeDefinition = _contentDefinitionManager.GetTypeDefinition(context.ContentItem.ContentType); if (contentTypeDefinition == null) return; foreach (var typePartDefinition in contentTypeDefinition.Parts) { var partName = typePartDefinition.PartDefinition.Name; var partType = _contentPartFactory.GetContentPartType(partName) ?? typeof(ContentPart); var part = context.ContentItem.Get(partType, typePartDefinition.Name) as ContentPart; if (part != null) { _partHandlers.Invoke(handler => handler.Created(context, part), Logger); } } } public override void Initializing(InitializingContentContext context) { var contentTypeDefinition = _contentDefinitionManager.GetTypeDefinition(context.ContentItem.ContentType); if (contentTypeDefinition == null) return; foreach (var typePartDefinition in contentTypeDefinition.Parts) { var partName = typePartDefinition.PartDefinition.Name; var partType = _contentPartFactory.GetContentPartType(partName) ?? typeof(ContentPart); var part = context.ContentItem.Get(partType, typePartDefinition.Name) as ContentPart; _partHandlers.Invoke(handler => handler.Initializing(context, part), Logger); } } public override void Initialized(InitializingContentContext context) { var contentTypeDefinition = _contentDefinitionManager.GetTypeDefinition(context.ContentItem.ContentType); if (contentTypeDefinition == null) return; foreach (var typePartDefinition in contentTypeDefinition.Parts) { var partName = typePartDefinition.PartDefinition.Name; var partType = _contentPartFactory.GetContentPartType(partName) ?? typeof(ContentPart); var part = context.ContentItem.Get(partType, typePartDefinition.Name) as ContentPart; if (part != null) { _partHandlers.Invoke(handler => handler.Initialized(context, part), Logger); } } } public override void Loading(LoadContentContext context) { // This method is called on Get() // Adds all the missing parts to a content item based on the content type definition. var contentTypeDefinition = _contentDefinitionManager.GetTypeDefinition(context.ContentItem.ContentType); if (contentTypeDefinition == null) return; foreach (var typePartDefinition in contentTypeDefinition.Parts) { var partName = typePartDefinition.PartDefinition.Name; if (!context.ContentItem.Has(partName)) { var part = _contentPartFactory.CreateContentPart(partName) ?? new ContentPart(); _partHandlers.Invoke(handler => handler.Loading(context, part), Logger); } foreach (var partFieldDefinition in typePartDefinition.PartDefinition.Fields) { var part = context.ContentItem.Get<ContentPart>(typePartDefinition.Name); var fieldName = partFieldDefinition.Name; if (!part.Has(fieldName)) { var field = _contentFieldFactory.CreateContentField(partFieldDefinition.FieldDefinition.Name); if (field != null) { context.ContentItem.Get<ContentPart>(typePartDefinition.Name).Weld(fieldName, field); } } } } } public override void Loaded(LoadContentContext context) { var contentTypeDefinition = _contentDefinitionManager.GetTypeDefinition(context.ContentItem.ContentType); if (contentTypeDefinition == null) return; foreach (var typePartDefinition in contentTypeDefinition.Parts) { var partName = typePartDefinition.PartDefinition.Name; var partType = _contentPartFactory.GetContentPartType(partName) ?? typeof(ContentPart); var part = context.ContentItem.Get(partType, typePartDefinition.Name) as ContentPart; if (part != null) { _partHandlers.Invoke(handler => handler.Loaded(context, part), Logger); } } } public override void Publishing(PublishContentContext context) { var contentTypeDefinition = _contentDefinitionManager.GetTypeDefinition(context.ContentItem.ContentType); if (contentTypeDefinition == null) return; foreach (var typePartDefinition in contentTypeDefinition.Parts) { var partName = typePartDefinition.PartDefinition.Name; var partType = _contentPartFactory.GetContentPartType(partName) ?? typeof(ContentPart); var part = context.ContentItem.Get(partType, typePartDefinition.Name) as ContentPart; ; if (part != null) { _partHandlers.Invoke(handler => handler.Publishing(context, part), Logger); } } } public override void Published(PublishContentContext context) { var contentTypeDefinition = _contentDefinitionManager.GetTypeDefinition(context.ContentItem.ContentType); if (contentTypeDefinition == null) return; foreach (var typePartDefinition in contentTypeDefinition.Parts) { var partName = typePartDefinition.PartDefinition.Name; var partType = _contentPartFactory.GetContentPartType(partName) ?? typeof(ContentPart); var part = context.ContentItem.Get(partType, typePartDefinition.Name) as ContentPart; ; if (part != null) { _partHandlers.Invoke(handler => handler.Published(context, part), Logger); } } } public override void Removing(RemoveContentContext context) { var contentTypeDefinition = _contentDefinitionManager.GetTypeDefinition(context.ContentItem.ContentType); if (contentTypeDefinition == null) return; foreach (var typePartDefinition in contentTypeDefinition.Parts) { var partName = typePartDefinition.PartDefinition.Name; var partType = _contentPartFactory.GetContentPartType(partName) ?? typeof(ContentPart); var part = context.ContentItem.Get(partType, typePartDefinition.Name) as ContentPart; ; if (part != null) { _partHandlers.Invoke(handler => handler.Removing(context, part), Logger); } } } public override void Removed(RemoveContentContext context) { var contentTypeDefinition = _contentDefinitionManager.GetTypeDefinition(context.ContentItem.ContentType); if (contentTypeDefinition == null) return; foreach (var typePartDefinition in contentTypeDefinition.Parts) { var partName = typePartDefinition.PartDefinition.Name; var partType = _contentPartFactory.GetContentPartType(partName) ?? typeof(ContentPart); var part = context.ContentItem.Get(partType, typePartDefinition.Name) as ContentPart; ; if (part != null) { _partHandlers.Invoke(handler => handler.Removed(context, part), Logger); } } } public override void Unpublishing(PublishContentContext context) { var contentTypeDefinition = _contentDefinitionManager.GetTypeDefinition(context.ContentItem.ContentType); if (contentTypeDefinition == null) return; foreach (var typePartDefinition in contentTypeDefinition.Parts) { var partName = typePartDefinition.PartDefinition.Name; var partType = _contentPartFactory.GetContentPartType(partName) ?? typeof(ContentPart); var part = context.ContentItem.Get(partType, typePartDefinition.Name) as ContentPart; ; if (part != null) { _partHandlers.Invoke(handler => handler.Unpublishing(context, part), Logger); } } } public override void Unpublished(PublishContentContext context) { var contentTypeDefinition = _contentDefinitionManager.GetTypeDefinition(context.ContentItem.ContentType); if (contentTypeDefinition == null) return; foreach (var typePartDefinition in contentTypeDefinition.Parts) { var partName = typePartDefinition.PartDefinition.Name; var partType = _contentPartFactory.GetContentPartType(partName) ?? typeof(ContentPart); var part = context.ContentItem.Get(partType, typePartDefinition.Name) as ContentPart; ; if (part != null) { _partHandlers.Invoke(handler => handler.Unpublished(context, part), Logger); } } } public override void Updating(UpdateContentContext context) { var contentTypeDefinition = _contentDefinitionManager.GetTypeDefinition(context.ContentItem.ContentType); if (contentTypeDefinition == null) return; foreach (var typePartDefinition in contentTypeDefinition.Parts) { var partName = typePartDefinition.PartDefinition.Name; var partType = _contentPartFactory.GetContentPartType(partName) ?? typeof(ContentPart); var part = context.ContentItem.Get(partType, typePartDefinition.Name) as ContentPart; ; if (part != null) { _partHandlers.Invoke(handler => handler.Updating(context, part), Logger); } } } public override void Updated(UpdateContentContext context) { var contentTypeDefinition = _contentDefinitionManager.GetTypeDefinition(context.ContentItem.ContentType); if (contentTypeDefinition == null) return; foreach (var typePartDefinition in contentTypeDefinition.Parts) { var partName = typePartDefinition.PartDefinition.Name; var partType = _contentPartFactory.GetContentPartType(partName) ?? typeof(ContentPart); var part = context.ContentItem.Get(partType, typePartDefinition.Name) as ContentPart; ; if (part != null) { _partHandlers.Invoke(handler => handler.Updated(context, part), Logger); } } } public override void Versioning(VersionContentContext context) { var contentTypeDefinition = _contentDefinitionManager.GetTypeDefinition(context.ContentItem.ContentType); if (contentTypeDefinition == null) return; foreach (var typePartDefinition in contentTypeDefinition.Parts) { var partName = typePartDefinition.PartDefinition.Name; var partType = _contentPartFactory.GetContentPartType(partName) ?? typeof(ContentPart); var buildingPart = context.BuildingContentItem.Get(partType, partName) as ContentPart; var existingPart = context.ContentItem.Get(partType, typePartDefinition.Name) as ContentPart; if (buildingPart != null && existingPart != null) { _partHandlers.Invoke(handler => handler.Versioning(context, existingPart, buildingPart), Logger); } } } public override void Versioned(VersionContentContext context) { var contentTypeDefinition = _contentDefinitionManager.GetTypeDefinition(context.ContentItem.ContentType); if (contentTypeDefinition == null) return; foreach (var typePartDefinition in contentTypeDefinition.Parts) { var partName = typePartDefinition.PartDefinition.Name; var partType = _contentPartFactory.GetContentPartType(partName) ?? typeof(ContentPart); var buildingPart = (ContentPart)context.BuildingContentItem.Get(partType, partName); var existingPart = (ContentPart)context.ContentItem.Get(partType, typePartDefinition.Name); if (buildingPart != null && existingPart != null) { _partHandlers.Invoke(handler => handler.Versioned(context, existingPart, buildingPart), Logger); } } } public override void GetContentItemAspect(ContentItemAspectContext context) { var contentTypeDefinition = _contentDefinitionManager.GetTypeDefinition(context.ContentItem.ContentType); if (contentTypeDefinition == null) return; foreach (var typePartDefinition in contentTypeDefinition.Parts) { var partName = typePartDefinition.PartDefinition.Name; var partType = _contentPartFactory.GetContentPartType(partName) ?? typeof(ContentPart); var part = context.ContentItem.Get(partType, typePartDefinition.Name) as ContentPart; if (part != null) { _partHandlers.Invoke(handler => handler.GetContentItemAspect(context, part), Logger); } } } } }
using System; using System.Collections; using System.IO; using System.Text; using NUnit.Framework; using Org.BouncyCastle.Asn1; using Org.BouncyCastle.Asn1.Cms; using Org.BouncyCastle.Cms; using Org.BouncyCastle.Crypto; using Org.BouncyCastle.Pkcs; using Org.BouncyCastle.Security; using Org.BouncyCastle.Utilities; using Org.BouncyCastle.Utilities.Encoders; using Org.BouncyCastle.Utilities.IO; using Org.BouncyCastle.Utilities.Test; using Org.BouncyCastle.X509; using Org.BouncyCastle.X509.Store; namespace Org.BouncyCastle.Cms.Tests { [TestFixture] public class SignedDataTest { private const string OrigDN = "O=Bouncy Castle, C=AU"; private static IAsymmetricCipherKeyPair origKP; private static X509Certificate origCert; private const string SignDN = "CN=Bob, OU=Sales, O=Bouncy Castle, C=AU"; private static IAsymmetricCipherKeyPair signKP; private static X509Certificate signCert; private const string ReciDN = "CN=Doug, OU=Sales, O=Bouncy Castle, C=AU"; // private static IAsymmetricCipherKeyPair reciKP; // private static X509Certificate reciCert; private static X509Crl signCrl; private static IAsymmetricCipherKeyPair signGostKP; private static X509Certificate signGostCert; private static IAsymmetricCipherKeyPair signDsaKP; private static X509Certificate signDsaCert; private static IAsymmetricCipherKeyPair signECGostKP; private static X509Certificate signECGostCert; private static IAsymmetricCipherKeyPair signECDsaKP; private static X509Certificate signECDsaCert; private static IAsymmetricCipherKeyPair OrigKP { get { return origKP == null ? (origKP = CmsTestUtil.MakeKeyPair()) : origKP; } } private static IAsymmetricCipherKeyPair SignKP { get { return signKP == null ? (signKP = CmsTestUtil.MakeKeyPair()) : signKP; } } // private static IAsymmetricCipherKeyPair ReciKP // { // get { return reciKP == null ? (reciKP = CmsTestUtil.MakeKeyPair()) : reciKP; } // } private static IAsymmetricCipherKeyPair SignGostKP { get { return signGostKP == null ? (signGostKP = CmsTestUtil.MakeGostKeyPair()) : signGostKP; } } private static IAsymmetricCipherKeyPair SignDsaKP { get { return signDsaKP == null ? (signDsaKP = CmsTestUtil.MakeDsaKeyPair()) : signDsaKP; } } private static IAsymmetricCipherKeyPair SignECGostKP { get { return signECGostKP == null ? (signECGostKP = CmsTestUtil.MakeECGostKeyPair()) : signECGostKP; } } private static IAsymmetricCipherKeyPair SignECDsaKP { get { return signECDsaKP == null ? (signECDsaKP = CmsTestUtil.MakeECDsaKeyPair()) : signECDsaKP; } } private static X509Certificate OrigCert { get { return origCert == null ? (origCert = CmsTestUtil.MakeCertificate(OrigKP, OrigDN, OrigKP, OrigDN)) : origCert; } } private static X509Certificate SignCert { get { return signCert == null ? (signCert = CmsTestUtil.MakeCertificate(SignKP, SignDN, OrigKP, OrigDN)) : signCert; } } // private static X509Certificate ReciCert // { // get { return reciCert == null ? (reciCert = CmsTestUtil.MakeCertificate(ReciKP, ReciDN, SignKP, SignDN)) : reciCert; } // } private static X509Crl SignCrl { get { return signCrl == null ? (signCrl = CmsTestUtil.MakeCrl(SignKP)) : signCrl; } } private static X509Certificate SignGostCert { get { return signGostCert == null ? (signGostCert = CmsTestUtil.MakeCertificate(SignGostKP, SignDN, OrigKP, OrigDN)) : signGostCert; } } private static X509Certificate SignECGostCert { get { return signECGostCert == null ? (signECGostCert = CmsTestUtil.MakeCertificate(SignECGostKP, SignDN, OrigKP, OrigDN)) : signECGostCert; } } private static X509Certificate SignDsaCert { get { return signDsaCert == null ? (signDsaCert = CmsTestUtil.MakeCertificate(SignDsaKP, SignDN, OrigKP, OrigDN)) : signDsaCert; } } private static X509Certificate SignECDsaCert { get { return signECDsaCert == null ? (signECDsaCert = CmsTestUtil.MakeCertificate(SignECDsaKP, SignDN, OrigKP, OrigDN)) : signECDsaCert; } } private static readonly byte[] disorderedMessage = Base64.Decode( "SU9fc3RkaW5fdXNlZABfX2xpYmNfc3RhcnRfbWFpbgBnZXRob3N0aWQAX19n" + "bW9uX3M="); private static readonly byte[] disorderedSet = Base64.Decode( "MIIYXQYJKoZIhvcNAQcCoIIYTjCCGEoCAQExCzAJBgUrDgMCGgUAMAsGCSqG" + "SIb3DQEHAaCCFqswggJUMIIBwKADAgECAgMMg6wwCgYGKyQDAwECBQAwbzEL" + "MAkGA1UEBhMCREUxPTA7BgNVBAoUNFJlZ3VsaWVydW5nc2JlaMhvcmRlIGbI" + "dXIgVGVsZWtvbW11bmlrYXRpb24gdW5kIFBvc3QxITAMBgcCggYBCgcUEwEx" + "MBEGA1UEAxQKNFItQ0EgMTpQTjAiGA8yMDAwMDMyMjA5NDM1MFoYDzIwMDQw" + "MTIxMTYwNDUzWjBvMQswCQYDVQQGEwJERTE9MDsGA1UEChQ0UmVndWxpZXJ1" + "bmdzYmVoyG9yZGUgZsh1ciBUZWxla29tbXVuaWthdGlvbiB1bmQgUG9zdDEh" + "MAwGBwKCBgEKBxQTATEwEQYDVQQDFAo1Ui1DQSAxOlBOMIGhMA0GCSqGSIb3" + "DQEBAQUAA4GPADCBiwKBgQCKHkFTJx8GmoqFTxEOxpK9XkC3NZ5dBEKiUv0I" + "fe3QMqeGMoCUnyJxwW0k2/53duHxtv2yHSZpFKjrjvE/uGwdOMqBMTjMzkFg" + "19e9JPv061wyADOucOIaNAgha/zFt9XUyrHF21knKCvDNExv2MYIAagkTKaj" + "LMAw0bu1J0FadQIFAMAAAAEwCgYGKyQDAwECBQADgYEAgFauXpoTLh3Z3pT/" + "3bhgrxO/2gKGZopWGSWSJPNwq/U3x2EuctOJurj+y2inTcJjespThflpN+7Q" + "nvsUhXU+jL2MtPlObU0GmLvWbi47cBShJ7KElcZAaxgWMBzdRGqTOdtMv+ev" + "2t4igGF/q71xf6J2c3pTLWr6P8s6tzLfOCMwggJDMIIBr6ADAgECAgQAuzyu" + "MAoGBiskAwMBAgUAMG8xCzAJBgNVBAYTAkRFMT0wOwYDVQQKFDRSZWd1bGll" + "cnVuZ3NiZWjIb3JkZSBmyHVyIFRlbGVrb21tdW5pa2F0aW9uIHVuZCBQb3N0" + "MSEwDAYHAoIGAQoHFBMBMTARBgNVBAMUCjVSLUNBIDE6UE4wIhgPMjAwMTA4" + "MjAwODA4MjBaGA8yMDA1MDgyMDA4MDgyMFowSzELMAkGA1UEBhMCREUxEjAQ" + "BgNVBAoUCVNpZ250cnVzdDEoMAwGBwKCBgEKBxQTATEwGAYDVQQDFBFDQSBT" + "SUdOVFJVU1QgMTpQTjCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAhV12" + "N2WhlR6f+3CXP57GrBM9la5Vnsu2b92zv5MZqQOPeEsYbZqDCFkYg1bSwsDE" + "XsGVQqXdQNAGUaapr/EUVVN+hNZ07GcmC1sPeQECgUkxDYjGi4ihbvzxlahj" + "L4nX+UTzJVBfJwXoIvJ+lMHOSpnOLIuEL3SRhBItvRECxN0CAwEAAaMSMBAw" + "DgYDVR0PAQH/BAQDAgEGMAoGBiskAwMBAgUAA4GBACDc9Pc6X8sK1cerphiV" + "LfFv4kpZb9ev4WPy/C6987Qw1SOTElhZAmxaJQBqmDHWlQ63wj1DEqswk7hG" + "LrvQk/iX6KXIn8e64uit7kx6DHGRKNvNGofPjr1WelGeGW/T2ZJKgmPDjCkf" + "sIKt2c3gwa2pDn4mmCz/DStUIqcPDbqLMIICVTCCAcGgAwIBAgIEAJ16STAK" + "BgYrJAMDAQIFADBvMQswCQYDVQQGEwJERTE9MDsGA1UEChQ0UmVndWxpZXJ1" + "bmdzYmVoyG9yZGUgZsh1ciBUZWxla29tbXVuaWthdGlvbiB1bmQgUG9zdDEh" + "MAwGBwKCBgEKBxQTATEwEQYDVQQDFAo1Ui1DQSAxOlBOMCIYDzIwMDEwMjAx" + "MTM0NDI1WhgPMjAwNTAzMjIwODU1NTFaMG8xCzAJBgNVBAYTAkRFMT0wOwYD" + "VQQKFDRSZWd1bGllcnVuZ3NiZWjIb3JkZSBmyHVyIFRlbGVrb21tdW5pa2F0" + "aW9uIHVuZCBQb3N0MSEwDAYHAoIGAQoHFBMBMTARBgNVBAMUCjZSLUNhIDE6" + "UE4wgaEwDQYJKoZIhvcNAQEBBQADgY8AMIGLAoGBAIOiqxUkzVyqnvthihnl" + "tsE5m1Xn5TZKeR/2MQPStc5hJ+V4yptEtIx+Fn5rOoqT5VEVWhcE35wdbPvg" + "JyQFn5msmhPQT/6XSGOlrWRoFummXN9lQzAjCj1sgTcmoLCVQ5s5WpCAOXFw" + "VWu16qndz3sPItn3jJ0F3Kh3w79NglvPAgUAwAAAATAKBgYrJAMDAQIFAAOB" + "gQBpSRdnDb6AcNVaXSmGo6+kVPIBhot1LzJOGaPyDNpGXxd7LV4tMBF1U7gr" + "4k1g9BO6YiMWvw9uiTZmn0CfV8+k4fWEuG/nmafRoGIuay2f+ILuT+C0rnp1" + "4FgMsEhuVNJJAmb12QV0PZII+UneyhAneZuQQzVUkTcVgYxogxdSOzCCAlUw" + "ggHBoAMCAQICBACdekowCgYGKyQDAwECBQAwbzELMAkGA1UEBhMCREUxPTA7" + "BgNVBAoUNFJlZ3VsaWVydW5nc2JlaMhvcmRlIGbIdXIgVGVsZWtvbW11bmlr" + "YXRpb24gdW5kIFBvc3QxITAMBgcCggYBCgcUEwExMBEGA1UEAxQKNlItQ2Eg" + "MTpQTjAiGA8yMDAxMDIwMTEzNDcwN1oYDzIwMDUwMzIyMDg1NTUxWjBvMQsw" + "CQYDVQQGEwJERTE9MDsGA1UEChQ0UmVndWxpZXJ1bmdzYmVoyG9yZGUgZsh1" + "ciBUZWxla29tbXVuaWthdGlvbiB1bmQgUG9zdDEhMAwGBwKCBgEKBxQTATEw" + "EQYDVQQDFAo1Ui1DQSAxOlBOMIGhMA0GCSqGSIb3DQEBAQUAA4GPADCBiwKB" + "gQCKHkFTJx8GmoqFTxEOxpK9XkC3NZ5dBEKiUv0Ife3QMqeGMoCUnyJxwW0k" + "2/53duHxtv2yHSZpFKjrjvE/uGwdOMqBMTjMzkFg19e9JPv061wyADOucOIa" + "NAgha/zFt9XUyrHF21knKCvDNExv2MYIAagkTKajLMAw0bu1J0FadQIFAMAA" + "AAEwCgYGKyQDAwECBQADgYEAV1yTi+2gyB7sUhn4PXmi/tmBxAfe5oBjDW8m" + "gxtfudxKGZ6l/FUPNcrSc5oqBYxKWtLmf3XX87LcblYsch617jtNTkMzhx9e" + "qxiD02ufcrxz2EVt0Akdqiz8mdVeqp3oLcNU/IttpSrcA91CAnoUXtDZYwb/" + "gdQ4FI9l3+qo/0UwggJVMIIBwaADAgECAgQAxIymMAoGBiskAwMBAgUAMG8x" + "CzAJBgNVBAYTAkRFMT0wOwYDVQQKFDRSZWd1bGllcnVuZ3NiZWjIb3JkZSBm" + "yHVyIFRlbGVrb21tdW5pa2F0aW9uIHVuZCBQb3N0MSEwDAYHAoIGAQoHFBMB" + "MTARBgNVBAMUCjZSLUNhIDE6UE4wIhgPMjAwMTEwMTUxMzMxNThaGA8yMDA1" + "MDYwMTA5NTIxN1owbzELMAkGA1UEBhMCREUxPTA7BgNVBAoUNFJlZ3VsaWVy" + "dW5nc2JlaMhvcmRlIGbIdXIgVGVsZWtvbW11bmlrYXRpb24gdW5kIFBvc3Qx" + "ITAMBgcCggYBCgcUEwExMBEGA1UEAxQKN1ItQ0EgMTpQTjCBoTANBgkqhkiG" + "9w0BAQEFAAOBjwAwgYsCgYEAiokD/j6lEP4FexF356OpU5teUpGGfUKjIrFX" + "BHc79G0TUzgVxqMoN1PWnWktQvKo8ETaugxLkP9/zfX3aAQzDW4Zki6x6GDq" + "fy09Agk+RJvhfbbIzRkV4sBBco0n73x7TfG/9NTgVr/96U+I+z/1j30aboM6" + "9OkLEhjxAr0/GbsCBQDAAAABMAoGBiskAwMBAgUAA4GBAHWRqRixt+EuqHhR" + "K1kIxKGZL2vZuakYV0R24Gv/0ZR52FE4ECr+I49o8FP1qiGSwnXB0SwjuH2S" + "iGiSJi+iH/MeY85IHwW1P5e+bOMvEOFhZhQXQixOD7totIoFtdyaj1XGYRef" + "0f2cPOjNJorXHGV8wuBk+/j++sxbd/Net3FtMIICVTCCAcGgAwIBAgIEAMSM" + "pzAKBgYrJAMDAQIFADBvMQswCQYDVQQGEwJERTE9MDsGA1UEChQ0UmVndWxp" + "ZXJ1bmdzYmVoyG9yZGUgZsh1ciBUZWxla29tbXVuaWthdGlvbiB1bmQgUG9z" + "dDEhMAwGBwKCBgEKBxQTATEwEQYDVQQDFAo3Ui1DQSAxOlBOMCIYDzIwMDEx" + "MDE1MTMzNDE0WhgPMjAwNTA2MDEwOTUyMTdaMG8xCzAJBgNVBAYTAkRFMT0w" + "OwYDVQQKFDRSZWd1bGllcnVuZ3NiZWjIb3JkZSBmyHVyIFRlbGVrb21tdW5p" + "a2F0aW9uIHVuZCBQb3N0MSEwDAYHAoIGAQoHFBMBMTARBgNVBAMUCjZSLUNh" + "IDE6UE4wgaEwDQYJKoZIhvcNAQEBBQADgY8AMIGLAoGBAIOiqxUkzVyqnvth" + "ihnltsE5m1Xn5TZKeR/2MQPStc5hJ+V4yptEtIx+Fn5rOoqT5VEVWhcE35wd" + "bPvgJyQFn5msmhPQT/6XSGOlrWRoFummXN9lQzAjCj1sgTcmoLCVQ5s5WpCA" + "OXFwVWu16qndz3sPItn3jJ0F3Kh3w79NglvPAgUAwAAAATAKBgYrJAMDAQIF" + "AAOBgQBi5W96UVDoNIRkCncqr1LLG9vF9SGBIkvFpLDIIbcvp+CXhlvsdCJl" + "0pt2QEPSDl4cmpOet+CxJTdTuMeBNXxhb7Dvualog69w/+K2JbPhZYxuVFZs" + "Zh5BkPn2FnbNu3YbJhE60aIkikr72J4XZsI5DxpZCGh6xyV/YPRdKSljFjCC" + "AlQwggHAoAMCAQICAwyDqzAKBgYrJAMDAQIFADBvMQswCQYDVQQGEwJERTE9" + "MDsGA1UEChQ0UmVndWxpZXJ1bmdzYmVoyG9yZGUgZsh1ciBUZWxla29tbXVu" + "aWthdGlvbiB1bmQgUG9zdDEhMAwGBwKCBgEKBxQTATEwEQYDVQQDFAo1Ui1D" + "QSAxOlBOMCIYDzIwMDAwMzIyMDk0MTI3WhgPMjAwNDAxMjExNjA0NTNaMG8x" + "CzAJBgNVBAYTAkRFMT0wOwYDVQQKFDRSZWd1bGllcnVuZ3NiZWjIb3JkZSBm" + "yHVyIFRlbGVrb21tdW5pa2F0aW9uIHVuZCBQb3N0MSEwDAYHAoIGAQoHFBMB" + "MTARBgNVBAMUCjRSLUNBIDE6UE4wgaEwDQYJKoZIhvcNAQEBBQADgY8AMIGL" + "AoGBAI8x26tmrFJanlm100B7KGlRemCD1R93PwdnG7svRyf5ZxOsdGrDszNg" + "xg6ouO8ZHQMT3NC2dH8TvO65Js+8bIyTm51azF6clEg0qeWNMKiiXbBXa+ph" + "hTkGbXiLYvACZ6/MTJMJ1lcrjpRF7BXtYeYMcEF6znD4pxOqrtbf9z5hAgUA" + "wAAAATAKBgYrJAMDAQIFAAOBgQB99BjSKlGPbMLQAgXlvA9jUsDNhpnVm3a1" + "YkfxSqS/dbQlYkbOKvCxkPGA9NBxisBM8l1zFynVjJoy++aysRmcnLY/sHaz" + "23BF2iU7WERy18H3lMBfYB6sXkfYiZtvQZcWaO48m73ZBySuiV3iXpb2wgs/" + "Cs20iqroAWxwq/W/9jCCAlMwggG/oAMCAQICBDsFZ9UwCgYGKyQDAwECBQAw" + "bzELMAkGA1UEBhMCREUxITAMBgcCggYBCgcUEwExMBEGA1UEAxQKNFItQ0Eg" + "MTpQTjE9MDsGA1UEChQ0UmVndWxpZXJ1bmdzYmVoyG9yZGUgZsh1ciBUZWxl" + "a29tbXVuaWthdGlvbiB1bmQgUG9zdDAiGA8xOTk5MDEyMTE3MzUzNFoYDzIw" + "MDQwMTIxMTYwMDAyWjBvMQswCQYDVQQGEwJERTE9MDsGA1UEChQ0UmVndWxp" + "ZXJ1bmdzYmVoyG9yZGUgZsh1ciBUZWxla29tbXVuaWthdGlvbiB1bmQgUG9z" + "dDEhMAwGBwKCBgEKBxQTATEwEQYDVQQDFAozUi1DQSAxOlBOMIGfMA0GCSqG" + "SIb3DQEBAQUAA4GNADCBiQKBgI4B557mbKQg/AqWBXNJhaT/6lwV93HUl4U8" + "u35udLq2+u9phns1WZkdM3gDfEpL002PeLfHr1ID/96dDYf04lAXQfombils" + "of1C1k32xOvxjlcrDOuPEMxz9/HDAQZA5MjmmYHAIulGI8Qg4Tc7ERRtg/hd" + "0QX0/zoOeXoDSEOBAgTAAAABMAoGBiskAwMBAgUAA4GBAIyzwfT3keHI/n2P" + "LrarRJv96mCohmDZNpUQdZTVjGu5VQjVJwk3hpagU0o/t/FkdzAjOdfEw8Ql" + "3WXhfIbNLv1YafMm2eWSdeYbLcbB5yJ1od+SYyf9+tm7cwfDAcr22jNRBqx8" + "wkWKtKDjWKkevaSdy99sAI8jebHtWz7jzydKMIID9TCCA16gAwIBAgICbMcw" + "DQYJKoZIhvcNAQEFBQAwSzELMAkGA1UEBhMCREUxEjAQBgNVBAoUCVNpZ250" + "cnVzdDEoMAwGBwKCBgEKBxQTATEwGAYDVQQDFBFDQSBTSUdOVFJVU1QgMTpQ" + "TjAeFw0wNDA3MzAxMzAyNDZaFw0wNzA3MzAxMzAyNDZaMDwxETAPBgNVBAMM" + "CFlhY29tOlBOMQ4wDAYDVQRBDAVZYWNvbTELMAkGA1UEBhMCREUxCjAIBgNV" + "BAUTATEwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAIWzLlYLQApocXIp" + "pgCCpkkOUVLgcLYKeOd6/bXAnI2dTHQqT2bv7qzfUnYvOqiNgYdF13pOYtKg" + "XwXMTNFL4ZOI6GoBdNs9TQiZ7KEWnqnr2945HYx7UpgTBclbOK/wGHuCdcwO" + "x7juZs1ZQPFG0Lv8RoiV9s6HP7POqh1sO0P/AgMBAAGjggH1MIIB8TCBnAYD" + "VR0jBIGUMIGRgBQcZzNghfnXoXRm8h1+VITC5caNRqFzpHEwbzELMAkGA1UE" + "BhMCREUxPTA7BgNVBAoUNFJlZ3VsaWVydW5nc2JlaMhvcmRlIGbIdXIgVGVs" + "ZWtvbW11bmlrYXRpb24gdW5kIFBvc3QxITAMBgcCggYBCgcUEwExMBEGA1UE" + "AxQKNVItQ0EgMTpQToIEALs8rjAdBgNVHQ4EFgQU2e5KAzkVuKaM9I5heXkz" + "bcAIuR8wDgYDVR0PAQH/BAQDAgZAMBIGA1UdIAQLMAkwBwYFKyQIAQEwfwYD" + "VR0fBHgwdjB0oCygKoYobGRhcDovL2Rpci5zaWdudHJ1c3QuZGUvbz1TaWdu" + "dHJ1c3QsYz1kZaJEpEIwQDEdMBsGA1UEAxMUQ1JMU2lnblNpZ250cnVzdDE6" + "UE4xEjAQBgNVBAoTCVNpZ250cnVzdDELMAkGA1UEBhMCREUwYgYIKwYBBQUH" + "AQEEVjBUMFIGCCsGAQUFBzABhkZodHRwOi8vZGlyLnNpZ250cnVzdC5kZS9T" + "aWdudHJ1c3QvT0NTUC9zZXJ2bGV0L2h0dHBHYXRld2F5LlBvc3RIYW5kbGVy" + "MBgGCCsGAQUFBwEDBAwwCjAIBgYEAI5GAQEwDgYHAoIGAQoMAAQDAQH/MA0G" + "CSqGSIb3DQEBBQUAA4GBAHn1m3GcoyD5GBkKUY/OdtD6Sj38LYqYCF+qDbJR" + "6pqUBjY2wsvXepUppEler+stH8mwpDDSJXrJyuzf7xroDs4dkLl+Rs2x+2tg" + "BjU+ABkBDMsym2WpwgA8LCdymmXmjdv9tULxY+ec2pjSEzql6nEZNEfrU8nt" + "ZCSCavgqW4TtMYIBejCCAXYCAQEwUTBLMQswCQYDVQQGEwJERTESMBAGA1UE" + "ChQJU2lnbnRydXN0MSgwDAYHAoIGAQoHFBMBMTAYBgNVBAMUEUNBIFNJR05U" + "UlVTVCAxOlBOAgJsxzAJBgUrDgMCGgUAoIGAMBgGCSqGSIb3DQEJAzELBgkq" + "hkiG9w0BBwEwIwYJKoZIhvcNAQkEMRYEFIYfhPoyfGzkLWWSSLjaHb4HQmaK" + "MBwGCSqGSIb3DQEJBTEPFw0wNTAzMjQwNzM4MzVaMCEGBSskCAYFMRgWFi92" + "YXIvZmlsZXMvdG1wXzEvdGVzdDEwDQYJKoZIhvcNAQEFBQAEgYA2IvA8lhVz" + "VD5e/itUxbFboKxeKnqJ5n/KuO/uBCl1N14+7Z2vtw1sfkIG+bJdp3OY2Cmn" + "mrQcwsN99Vjal4cXVj8t+DJzFG9tK9dSLvD3q9zT/GQ0kJXfimLVwCa4NaSf" + "Qsu4xtG0Rav6bCcnzabAkKuNNvKtH8amSRzk870DBg=="); private static readonly byte[] xtraCounterSig = Base64.Decode( "MIIR/AYJKoZIhvcNAQcCoIIR7TCCEekCAQExCzAJBgUrDgMCGgUAMBoGCSqG" + "SIb3DQEHAaANBAtIZWxsbyB3b3JsZKCCDnkwggTPMIIDt6ADAgECAgRDnYD3" + "MA0GCSqGSIb3DQEBBQUAMFgxCzAJBgNVBAYTAklUMRowGAYDVQQKExFJbi5U" + "ZS5TLkEuIFMucC5BLjEtMCsGA1UEAxMkSW4uVGUuUy5BLiAtIENlcnRpZmlj" + "YXRpb24gQXV0aG9yaXR5MB4XDTA4MDkxMjExNDMxMloXDTEwMDkxMjExNDMx" + "MlowgdgxCzAJBgNVBAYTAklUMSIwIAYDVQQKDBlJbnRlc2EgUy5wLkEuLzA1" + "MjYyODkwMDE0MSowKAYDVQQLDCFCdXNpbmVzcyBDb2xsYWJvcmF0aW9uICYg" + "U2VjdXJpdHkxHjAcBgNVBAMMFU1BU1NJTUlMSUFOTyBaSUNDQVJESTERMA8G" + "A1UEBAwIWklDQ0FSREkxFTATBgNVBCoMDE1BU1NJTUlMSUFOTzEcMBoGA1UE" + "BRMTSVQ6WkNDTVNNNzZIMTRMMjE5WTERMA8GA1UELhMIMDAwMDI1ODUwgaAw" + "DQYJKoZIhvcNAQEBBQADgY4AMIGKAoGBALeJTjmyFgx1SIP6c2AuB/kuyHo5" + "j/prKELTALsFDimre/Hxr3wOSet1TdQfFzU8Lu+EJqgfV9cV+cI1yeH1rZs7" + "lei7L3tX/VR565IywnguX5xwvteASgWZr537Fkws50bvTEMyYOj1Tf3FZvZU" + "z4n4OD39KI4mfR9i1eEVIxR3AgQAizpNo4IBoTCCAZ0wHQYDVR0RBBYwFIES" + "emljY2FyZGlAaW50ZXNhLml0MC8GCCsGAQUFBwEDBCMwITAIBgYEAI5GAQEw" + "CwYGBACORgEDAgEUMAgGBgQAjkYBBDBZBgNVHSAEUjBQME4GBgQAizABATBE" + "MEIGCCsGAQUFBwIBFjZodHRwOi8vZS10cnVzdGNvbS5pbnRlc2EuaXQvY2Ff" + "cHViYmxpY2EvQ1BTX0lOVEVTQS5odG0wDgYDVR0PAQH/BAQDAgZAMIGDBgNV" + "HSMEfDB6gBQZCQOW0bjFWBt+EORuxPagEgkQqKFcpFowWDELMAkGA1UEBhMC" + "SVQxGjAYBgNVBAoTEUluLlRlLlMuQS4gUy5wLkEuMS0wKwYDVQQDEyRJbi5U" + "ZS5TLkEuIC0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHmCBDzRARMwOwYDVR0f" + "BDQwMjAwoC6gLIYqaHR0cDovL2UtdHJ1c3Rjb20uaW50ZXNhLml0L0NSTC9J" + "TlRFU0EuY3JsMB0GA1UdDgQWBBTf5ItL8KmQh541Dxt7YxcWI1254TANBgkq" + "hkiG9w0BAQUFAAOCAQEAgW+uL1CVWQepbC/wfCmR6PN37Sueb4xiKQj2mTD5" + "UZ5KQjpivy/Hbuf0NrfKNiDEhAvoHSPC31ebGiKuTMFNyZPHfPEUnyYGSxea" + "2w837aXJFr6utPNQGBRi89kH90sZDlXtOSrZI+AzJJn5QK3F9gjcayU2NZXQ" + "MJgRwYmFyn2w4jtox+CwXPQ9E5XgxiMZ4WDL03cWVXDLX00EOJwnDDMUNTRI" + "m9Zv+4SKTNlfFbi9UTBqWBySkDzAelsfB2U61oqc2h1xKmCtkGMmN9iZT+Qz" + "ZC/vaaT+hLEBFGAH2gwFrYc4/jTBKyBYeU1vsAxsibIoTs1Apgl6MH75qPDL" + "BzCCBM8wggO3oAMCAQICBEOdgPcwDQYJKoZIhvcNAQEFBQAwWDELMAkGA1UE" + "BhMCSVQxGjAYBgNVBAoTEUluLlRlLlMuQS4gUy5wLkEuMS0wKwYDVQQDEyRJ" + "bi5UZS5TLkEuIC0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwOTEy" + "MTE0MzEyWhcNMTAwOTEyMTE0MzEyWjCB2DELMAkGA1UEBhMCSVQxIjAgBgNV" + "BAoMGUludGVzYSBTLnAuQS4vMDUyNjI4OTAwMTQxKjAoBgNVBAsMIUJ1c2lu" + "ZXNzIENvbGxhYm9yYXRpb24gJiBTZWN1cml0eTEeMBwGA1UEAwwVTUFTU0lN" + "SUxJQU5PIFpJQ0NBUkRJMREwDwYDVQQEDAhaSUNDQVJESTEVMBMGA1UEKgwM" + "TUFTU0lNSUxJQU5PMRwwGgYDVQQFExNJVDpaQ0NNU003NkgxNEwyMTlZMREw" + "DwYDVQQuEwgwMDAwMjU4NTCBoDANBgkqhkiG9w0BAQEFAAOBjgAwgYoCgYEA" + "t4lOObIWDHVIg/pzYC4H+S7IejmP+msoQtMAuwUOKat78fGvfA5J63VN1B8X" + "NTwu74QmqB9X1xX5wjXJ4fWtmzuV6Lsve1f9VHnrkjLCeC5fnHC+14BKBZmv" + "nfsWTCznRu9MQzJg6PVN/cVm9lTPifg4Pf0ojiZ9H2LV4RUjFHcCBACLOk2j" + "ggGhMIIBnTAdBgNVHREEFjAUgRJ6aWNjYXJkaUBpbnRlc2EuaXQwLwYIKwYB" + "BQUHAQMEIzAhMAgGBgQAjkYBATALBgYEAI5GAQMCARQwCAYGBACORgEEMFkG" + "A1UdIARSMFAwTgYGBACLMAEBMEQwQgYIKwYBBQUHAgEWNmh0dHA6Ly9lLXRy" + "dXN0Y29tLmludGVzYS5pdC9jYV9wdWJibGljYS9DUFNfSU5URVNBLmh0bTAO" + "BgNVHQ8BAf8EBAMCBkAwgYMGA1UdIwR8MHqAFBkJA5bRuMVYG34Q5G7E9qAS" + "CRCooVykWjBYMQswCQYDVQQGEwJJVDEaMBgGA1UEChMRSW4uVGUuUy5BLiBT" + "LnAuQS4xLTArBgNVBAMTJEluLlRlLlMuQS4gLSBDZXJ0aWZpY2F0aW9uIEF1" + "dGhvcml0eYIEPNEBEzA7BgNVHR8ENDAyMDCgLqAshipodHRwOi8vZS10cnVz" + "dGNvbS5pbnRlc2EuaXQvQ1JML0lOVEVTQS5jcmwwHQYDVR0OBBYEFN/ki0vw" + "qZCHnjUPG3tjFxYjXbnhMA0GCSqGSIb3DQEBBQUAA4IBAQCBb64vUJVZB6ls" + "L/B8KZHo83ftK55vjGIpCPaZMPlRnkpCOmK/L8du5/Q2t8o2IMSEC+gdI8Lf" + "V5saIq5MwU3Jk8d88RSfJgZLF5rbDzftpckWvq6081AYFGLz2Qf3SxkOVe05" + "Ktkj4DMkmflArcX2CNxrJTY1ldAwmBHBiYXKfbDiO2jH4LBc9D0TleDGIxnh" + "YMvTdxZVcMtfTQQ4nCcMMxQ1NEib1m/7hIpM2V8VuL1RMGpYHJKQPMB6Wx8H" + "ZTrWipzaHXEqYK2QYyY32JlP5DNkL+9ppP6EsQEUYAfaDAWthzj+NMErIFh5" + "TW+wDGyJsihOzUCmCXowfvmo8MsHMIIEzzCCA7egAwIBAgIEQ52A9zANBgkq" + "hkiG9w0BAQUFADBYMQswCQYDVQQGEwJJVDEaMBgGA1UEChMRSW4uVGUuUy5B" + "LiBTLnAuQS4xLTArBgNVBAMTJEluLlRlLlMuQS4gLSBDZXJ0aWZpY2F0aW9u" + "IEF1dGhvcml0eTAeFw0wODA5MTIxMTQzMTJaFw0xMDA5MTIxMTQzMTJaMIHY" + "MQswCQYDVQQGEwJJVDEiMCAGA1UECgwZSW50ZXNhIFMucC5BLi8wNTI2Mjg5" + "MDAxNDEqMCgGA1UECwwhQnVzaW5lc3MgQ29sbGFib3JhdGlvbiAmIFNlY3Vy" + "aXR5MR4wHAYDVQQDDBVNQVNTSU1JTElBTk8gWklDQ0FSREkxETAPBgNVBAQM" + "CFpJQ0NBUkRJMRUwEwYDVQQqDAxNQVNTSU1JTElBTk8xHDAaBgNVBAUTE0lU" + "OlpDQ01TTTc2SDE0TDIxOVkxETAPBgNVBC4TCDAwMDAyNTg1MIGgMA0GCSqG" + "SIb3DQEBAQUAA4GOADCBigKBgQC3iU45shYMdUiD+nNgLgf5Lsh6OY/6ayhC" + "0wC7BQ4pq3vx8a98DknrdU3UHxc1PC7vhCaoH1fXFfnCNcnh9a2bO5Xouy97" + "V/1UeeuSMsJ4Ll+ccL7XgEoFma+d+xZMLOdG70xDMmDo9U39xWb2VM+J+Dg9" + "/SiOJn0fYtXhFSMUdwIEAIs6TaOCAaEwggGdMB0GA1UdEQQWMBSBEnppY2Nh" + "cmRpQGludGVzYS5pdDAvBggrBgEFBQcBAwQjMCEwCAYGBACORgEBMAsGBgQA" + "jkYBAwIBFDAIBgYEAI5GAQQwWQYDVR0gBFIwUDBOBgYEAIswAQEwRDBCBggr" + "BgEFBQcCARY2aHR0cDovL2UtdHJ1c3Rjb20uaW50ZXNhLml0L2NhX3B1YmJs" + "aWNhL0NQU19JTlRFU0EuaHRtMA4GA1UdDwEB/wQEAwIGQDCBgwYDVR0jBHww" + "eoAUGQkDltG4xVgbfhDkbsT2oBIJEKihXKRaMFgxCzAJBgNVBAYTAklUMRow" + "GAYDVQQKExFJbi5UZS5TLkEuIFMucC5BLjEtMCsGA1UEAxMkSW4uVGUuUy5B" + "LiAtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggQ80QETMDsGA1UdHwQ0MDIw" + "MKAuoCyGKmh0dHA6Ly9lLXRydXN0Y29tLmludGVzYS5pdC9DUkwvSU5URVNB" + "LmNybDAdBgNVHQ4EFgQU3+SLS/CpkIeeNQ8be2MXFiNdueEwDQYJKoZIhvcN" + "AQEFBQADggEBAIFvri9QlVkHqWwv8Hwpkejzd+0rnm+MYikI9pkw+VGeSkI6" + "Yr8vx27n9Da3yjYgxIQL6B0jwt9XmxoirkzBTcmTx3zxFJ8mBksXmtsPN+2l" + "yRa+rrTzUBgUYvPZB/dLGQ5V7Tkq2SPgMySZ+UCtxfYI3GslNjWV0DCYEcGJ" + "hcp9sOI7aMfgsFz0PROV4MYjGeFgy9N3FlVwy19NBDicJwwzFDU0SJvWb/uE" + "ikzZXxW4vVEwalgckpA8wHpbHwdlOtaKnNodcSpgrZBjJjfYmU/kM2Qv72mk" + "/oSxARRgB9oMBa2HOP40wSsgWHlNb7AMbImyKE7NQKYJejB++ajwywcxggM8" + "MIIDOAIBATBgMFgxCzAJBgNVBAYTAklUMRowGAYDVQQKExFJbi5UZS5TLkEu" + "IFMucC5BLjEtMCsGA1UEAxMkSW4uVGUuUy5BLiAtIENlcnRpZmljYXRpb24g" + "QXV0aG9yaXR5AgRDnYD3MAkGBSsOAwIaBQAwDQYJKoZIhvcNAQEBBQAEgYB+" + "lH2cwLqc91mP8prvgSV+RRzk13dJdZvdoVjgQoFrPhBiZCNIEoHvIhMMA/sM" + "X6euSRZk7EjD24FasCEGYyd0mJVLEy6TSPmuW+wWz/28w3a6IWXBGrbb/ild" + "/CJMkPgLPGgOVD1WDwiNKwfasiQSFtySf5DPn3jFevdLeMmEY6GCAjIwggEV" + "BgkqhkiG9w0BCQYxggEGMIIBAgIBATBgMFgxCzAJBgNVBAYTAklUMRowGAYD" + "VQQKExFJbi5UZS5TLkEuIFMucC5BLjEtMCsGA1UEAxMkSW4uVGUuUy5BLiAt" + "IENlcnRpZmljYXRpb24gQXV0aG9yaXR5AgRDnYD3MAkGBSsOAwIaBQAwDQYJ" + "KoZIhvcNAQEBBQAEgYBHlOULfT5GDigIvxP0qZOy8VbpntmzaPF55VV4buKV" + "35J+uHp98gXKp0LrHM69V5IRKuyuQzHHFBqsXxsRI9o6KoOfgliD9Xc+BeMg" + "dKzQhBhBYoFREq8hQM0nSbqDNHYAQyNHMzUA/ZQUO5dlFuH8Dw3iDYAhNtfd" + "PrlchKJthDCCARUGCSqGSIb3DQEJBjGCAQYwggECAgEBMGAwWDELMAkGA1UE" + "BhMCSVQxGjAYBgNVBAoTEUluLlRlLlMuQS4gUy5wLkEuMS0wKwYDVQQDEyRJ" + "bi5UZS5TLkEuIC0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkCBEOdgPcwCQYF" + "Kw4DAhoFADANBgkqhkiG9w0BAQEFAASBgEeU5Qt9PkYOKAi/E/Spk7LxVume" + "2bNo8XnlVXhu4pXfkn64en3yBcqnQusczr1XkhEq7K5DMccUGqxfGxEj2joq" + "g5+CWIP1dz4F4yB0rNCEGEFigVESryFAzSdJuoM0dgBDI0czNQD9lBQ7l2UW" + "4fwPDeINgCE2190+uVyEom2E"); private void VerifySignatures( CmsSignedData s, byte[] contentDigest) { IX509Store x509Certs = s.GetCertificates("Collection"); SignerInformationStore signers = s.GetSignerInfos(); ICollection c = signers.GetSigners(); foreach (SignerInformation signer in c) { ICollection certCollection = x509Certs.GetMatches(signer.SignerID); IEnumerator certEnum = certCollection.GetEnumerator(); certEnum.MoveNext(); X509Certificate cert = (X509Certificate) certEnum.Current; Assert.IsTrue(signer.Verify(cert)); if (contentDigest != null) { Assert.IsTrue(Arrays.AreEqual(contentDigest, signer.GetContentDigest())); } } } private void VerifySignatures( CmsSignedData s) { VerifySignatures(s, null); } [Test] public void TestDetachedVerification() { byte[] data = Encoding.ASCII.GetBytes("Hello World!"); CmsProcessable msg = new CmsProcessableByteArray(data); IList certList = new ArrayList(); certList.Add(OrigCert); certList.Add(SignCert); IX509Store x509Certs = X509StoreFactory.Create( "Certificate/Collection", new X509CollectionStoreParameters(certList)); CmsSignedDataGenerator gen = new CmsSignedDataGenerator(); gen.AddSigner(OrigKP.Private, OrigCert, CmsSignedDataGenerator.DigestSha1); gen.AddSigner(OrigKP.Private, OrigCert, CmsSignedDataGenerator.DigestMD5); gen.AddCertificates(x509Certs); CmsSignedData s = gen.Generate(msg); IDictionary hashes = new Hashtable(); hashes.Add(CmsSignedDataGenerator.DigestSha1, CalculateHash("SHA1", data)); hashes.Add(CmsSignedDataGenerator.DigestMD5, CalculateHash("MD5", data)); s = new CmsSignedData(hashes, s.GetEncoded()); VerifySignatures(s, null); } private byte[] CalculateHash( string digestName, byte[] data) { IDigest digest = DigestUtilities.GetDigest(digestName); digest.BlockUpdate(data, 0, data.Length); return DigestUtilities.DoFinal(digest); } [Test] public void TestSha1AndMD5WithRsaEncapsulatedRepeated() { IList certList = new ArrayList(); CmsProcessable msg = new CmsProcessableByteArray(Encoding.ASCII.GetBytes("Hello World!")); certList.Add(OrigCert); certList.Add(SignCert); IX509Store x509Certs = X509StoreFactory.Create( "Certificate/Collection", new X509CollectionStoreParameters(certList)); CmsSignedDataGenerator gen = new CmsSignedDataGenerator(); gen.AddSigner(OrigKP.Private, OrigCert, CmsSignedDataGenerator.DigestSha1); gen.AddSigner(OrigKP.Private, OrigCert, CmsSignedDataGenerator.DigestMD5); gen.AddCertificates(x509Certs); CmsSignedData s = gen.Generate(msg, true); s = new CmsSignedData(ContentInfo.GetInstance(Asn1Object.FromByteArray(s.GetEncoded()))); x509Certs = s.GetCertificates("Collection"); SignerInformationStore signers = s.GetSignerInfos(); Assert.AreEqual(2, signers.Count); SignerID sid = null; ICollection c = signers.GetSigners(); foreach (SignerInformation signer in c) { ICollection certCollection = x509Certs.GetMatches(signer.SignerID); IEnumerator certEnum = certCollection.GetEnumerator(); certEnum.MoveNext(); X509Certificate cert = (X509Certificate) certEnum.Current; sid = signer.SignerID; Assert.IsTrue(signer.Verify(cert)); // // check content digest // byte[] contentDigest = (byte[])gen.GetGeneratedDigests()[signer.DigestAlgOid]; AttributeTable table = signer.SignedAttributes; Asn1.Cms.Attribute hash = table[CmsAttributes.MessageDigest]; Assert.IsTrue(Arrays.AreEqual(contentDigest, ((Asn1OctetString)hash.AttrValues[0]).GetOctets())); } c = signers.GetSigners(sid); Assert.AreEqual(2, c.Count); // // try using existing signer // gen = new CmsSignedDataGenerator(); gen.AddSigners(s.GetSignerInfos()); gen.AddCertificates(s.GetCertificates("Collection")); gen.AddCrls(s.GetCrls("Collection")); s = gen.Generate(msg, true); s = new CmsSignedData(ContentInfo.GetInstance(Asn1Object.FromByteArray(s.GetEncoded()))); x509Certs = s.GetCertificates("Collection"); signers = s.GetSignerInfos(); c = signers.GetSigners(); Assert.AreEqual(2, c.Count); foreach (SignerInformation signer in c) { ICollection certCollection = x509Certs.GetMatches(signer.SignerID); IEnumerator certEnum = certCollection.GetEnumerator(); certEnum.MoveNext(); X509Certificate cert = (X509Certificate) certEnum.Current; Assert.AreEqual(true, signer.Verify(cert)); } CheckSignerStoreReplacement(s, signers); } // NB: C# build doesn't support "no attributes" version of CmsSignedDataGenerator.Generate // [Test] // public void TestSha1WithRsaNoAttributes() // { // IList certList = new ArrayList(); // CmsProcessable msg = new CmsProcessableByteArray(Encoding.ASCII.GetBytes("Hello world!")); // // certList.Add(OrigCert); // certList.Add(SignCert); // // IX509Store x509Certs = X509StoreFactory.Create( // "Certificate/Collection", // new X509CollectionStoreParameters(certList)); // // CmsSignedDataGenerator gen = new CmsSignedDataGenerator(); // // gen.AddSigner(OrigKP.Private, OrigCert, CmsSignedDataGenerator.DigestSha1); // // gen.AddCertificates(x509Certs); // // CmsSignedData s = gen.Generate(CmsSignedDataGenerator.Data, msg, false, false); // // // // // compute expected content digest // // // IDigest md = DigestUtilities.GetDigest("SHA1"); // // byte[] testBytes = Encoding.ASCII.GetBytes("Hello world!"); // md.BlockUpdate(testBytes, 0, testBytes.Length); // byte[] hash = DigestUtilities.DoFinal(md); // // VerifySignatures(s, hash); // } [Test] public void TestSha1WithRsaAndAttributeTable() { byte[] testBytes = Encoding.ASCII.GetBytes("Hello world!"); IList certList = new ArrayList(); CmsProcessable msg = new CmsProcessableByteArray(testBytes); certList.Add(OrigCert); certList.Add(SignCert); IX509Store x509Certs = X509StoreFactory.Create( "Certificate/Collection", new X509CollectionStoreParameters(certList)); CmsSignedDataGenerator gen = new CmsSignedDataGenerator(); IDigest md = DigestUtilities.GetDigest("SHA1"); md.BlockUpdate(testBytes, 0, testBytes.Length); byte[] hash = DigestUtilities.DoFinal(md); Asn1.Cms.Attribute attr = new Asn1.Cms.Attribute(CmsAttributes.MessageDigest, new DerSet(new DerOctetString(hash))); Asn1EncodableVector v = new Asn1EncodableVector(attr); gen.AddSigner(OrigKP.Private, OrigCert, CmsSignedDataGenerator.DigestSha1, new AttributeTable(v), null); gen.AddCertificates(x509Certs); CmsSignedData s = gen.Generate(CmsSignedDataGenerator.Data, null, false); // // the signature is detached, so need to add msg before passing on // s = new CmsSignedData(msg, s.GetEncoded()); // // compute expected content digest // VerifySignatures(s, hash); } [Test] public void TestSha1WithRsaEncapsulated() { EncapsulatedTest(SignKP, SignCert, CmsSignedDataGenerator.DigestSha1); } [Test] public void TestSha1WithRsaEncapsulatedSubjectKeyID() { SubjectKeyIDTest(SignKP, SignCert, CmsSignedDataGenerator.DigestSha1); } [Test] public void TestSha1WithRsaPss() { rsaPssTest("SHA1", CmsSignedDataGenerator.DigestSha1); } [Test] public void TestSha224WithRsaPss() { rsaPssTest("SHA224", CmsSignedDataGenerator.DigestSha224); } [Test] public void TestSha256WithRsaPss() { rsaPssTest("SHA256", CmsSignedDataGenerator.DigestSha256); } [Test] public void TestSha384WithRsaPss() { rsaPssTest("SHA384", CmsSignedDataGenerator.DigestSha384); } [Test] public void TestSha224WithRsaEncapsulated() { EncapsulatedTest(SignKP, SignCert, CmsSignedDataGenerator.DigestSha224); } [Test] public void TestSha256WithRsaEncapsulated() { EncapsulatedTest(SignKP, SignCert, CmsSignedDataGenerator.DigestSha256); } [Test] public void TestRipeMD128WithRsaEncapsulated() { EncapsulatedTest(SignKP, SignCert, CmsSignedDataGenerator.DigestRipeMD128); } [Test] public void TestRipeMD160WithRsaEncapsulated() { EncapsulatedTest(SignKP, SignCert, CmsSignedDataGenerator.DigestRipeMD160); } [Test] public void TestRipeMD256WithRsaEncapsulated() { EncapsulatedTest(SignKP, SignCert, CmsSignedDataGenerator.DigestRipeMD256); } [Test] public void TestECDsaEncapsulated() { EncapsulatedTest(SignECDsaKP, SignECDsaCert, CmsSignedDataGenerator.DigestSha1); } [Test] public void TestECDsaEncapsulatedSubjectKeyID() { SubjectKeyIDTest(SignECDsaKP, SignECDsaCert, CmsSignedDataGenerator.DigestSha1); } [Test] public void TestECDsaSha224Encapsulated() { EncapsulatedTest(SignECDsaKP, SignECDsaCert, CmsSignedDataGenerator.DigestSha224); } [Test] public void TestECDsaSha256Encapsulated() { EncapsulatedTest(SignECDsaKP, SignECDsaCert, CmsSignedDataGenerator.DigestSha256); } [Test] public void TestECDsaSha384Encapsulated() { EncapsulatedTest(SignECDsaKP, SignECDsaCert, CmsSignedDataGenerator.DigestSha384); } [Test] public void TestECDsaSha512Encapsulated() { EncapsulatedTest(SignECDsaKP, SignECDsaCert, CmsSignedDataGenerator.DigestSha512); } [Test] public void TestECDsaSha512EncapsulatedWithKeyFactoryAsEC() { // X509EncodedKeySpec pubSpec = new X509EncodedKeySpec(_signEcDsaKP.getPublic().getEncoded()); byte[] pubEnc = SubjectPublicKeyInfoFactory.CreateSubjectPublicKeyInfo(SignECDsaKP.Public).GetDerEncoded(); // PKCS8EncodedKeySpec privSpec = new PKCS8EncodedKeySpec(_signEcDsaKP.Private.getEncoded()); byte[] privEnc = PrivateKeyInfoFactory.CreatePrivateKeyInfo(SignECDsaKP.Private).GetDerEncoded(); // KeyFactory keyFact = KeyFactory.GetInstance("EC", "BC"); // KeyPair kp = new KeyPair(keyFact.generatePublic(pubSpec), keyFact.generatePrivate(privSpec)); IAsymmetricCipherKeyPair kp = new AsymmetricCipherKeyPair( PublicKeyFactory.CreateKey(pubEnc), PrivateKeyFactory.CreateKey(privEnc)); EncapsulatedTest(kp, SignECDsaCert, CmsSignedDataGenerator.DigestSha512); } [Test] public void TestDsaEncapsulated() { EncapsulatedTest(SignDsaKP, SignDsaCert, CmsSignedDataGenerator.DigestSha1); } [Test] public void TestDsaEncapsulatedSubjectKeyID() { SubjectKeyIDTest(SignDsaKP, SignDsaCert, CmsSignedDataGenerator.DigestSha1); } [Test] public void TestGost3411WithGost3410Encapsulated() { EncapsulatedTest(SignGostKP, SignGostCert, CmsSignedDataGenerator.DigestGost3411); } [Test] public void TestGost3411WithECGost3410Encapsulated() { EncapsulatedTest(SignECGostKP, SignECGostCert, CmsSignedDataGenerator.DigestGost3411); } [Test] public void TestSha1WithRsaCounterSignature() { IList certList = new ArrayList(); IList crlList = new ArrayList(); CmsProcessable msg = new CmsProcessableByteArray(Encoding.ASCII.GetBytes("Hello World!")); certList.Add(SignCert); certList.Add(OrigCert); crlList.Add(SignCrl); IX509Store x509Certs = X509StoreFactory.Create( "Certificate/Collection", new X509CollectionStoreParameters(certList)); IX509Store x509Crls = X509StoreFactory.Create( "CRL/Collection", new X509CollectionStoreParameters(crlList)); CmsSignedDataGenerator gen = new CmsSignedDataGenerator(); gen.AddSigner(SignKP.Private, SignCert, CmsSignedDataGenerator.DigestSha1); gen.AddCertificates(x509Certs); gen.AddCrls(x509Crls); CmsSignedData s = gen.Generate(msg, true); SignerInformation origSigner = (SignerInformation) new ArrayList(s.GetSignerInfos().GetSigners())[0]; SignerInformationStore counterSigners1 = gen.GenerateCounterSigners(origSigner); SignerInformationStore counterSigners2 = gen.GenerateCounterSigners(origSigner); SignerInformation signer1 = SignerInformation.AddCounterSigners(origSigner, counterSigners1); SignerInformation signer2 = SignerInformation.AddCounterSigners(signer1, counterSigners2); SignerInformationStore cs = signer2.GetCounterSignatures(); ICollection csSigners = cs.GetSigners(); Assert.AreEqual(2, csSigners.Count); foreach (SignerInformation cSigner in csSigners) { ICollection certCollection = x509Certs.GetMatches(cSigner.SignerID); IEnumerator certEnum = certCollection.GetEnumerator(); certEnum.MoveNext(); X509Certificate cert = (X509Certificate) certEnum.Current; Assert.IsNull(cSigner.SignedAttributes[Asn1.Pkcs.PkcsObjectIdentifiers.Pkcs9AtContentType]); Assert.IsTrue(cSigner.Verify(cert)); } } private void rsaPssTest( string digestName, string digestOID) { IList certList = new ArrayList(); byte[] msgBytes = Encoding.ASCII.GetBytes("Hello World!"); CmsProcessable msg = new CmsProcessableByteArray(msgBytes); certList.Add(OrigCert); certList.Add(SignCert); IX509Store x509Certs = X509StoreFactory.Create( "Certificate/Collection", new X509CollectionStoreParameters(certList)); CmsSignedDataGenerator gen = new CmsSignedDataGenerator(); gen.AddSigner(OrigKP.Private, OrigCert, CmsSignedDataGenerator.EncryptionRsaPss, digestOID); gen.AddCertificates(x509Certs); CmsSignedData s = gen.Generate(CmsSignedDataGenerator.Data, msg, false); // // compute expected content digest // IDigest md = DigestUtilities.GetDigest(digestName); md.BlockUpdate(msgBytes, 0, msgBytes.Length); byte[] expectedDigest = DigestUtilities.DoFinal(md); VerifySignatures(s, expectedDigest); } private void SubjectKeyIDTest( IAsymmetricCipherKeyPair signaturePair, X509Certificate signatureCert, string digestAlgorithm) { IList certList = new ArrayList(); IList crlList = new ArrayList(); CmsProcessable msg = new CmsProcessableByteArray(Encoding.ASCII.GetBytes("Hello World!")); certList.Add(signatureCert); certList.Add(OrigCert); crlList.Add(SignCrl); IX509Store x509Certs = X509StoreFactory.Create( "Certificate/Collection", new X509CollectionStoreParameters(certList)); IX509Store x509Crls = X509StoreFactory.Create( "CRL/Collection", new X509CollectionStoreParameters(crlList)); CmsSignedDataGenerator gen = new CmsSignedDataGenerator(); gen.AddSigner(signaturePair.Private, CmsTestUtil.CreateSubjectKeyId(signatureCert.GetPublicKey()).GetKeyIdentifier(), digestAlgorithm); gen.AddCertificates(x509Certs); gen.AddCrls(x509Crls); CmsSignedData s = gen.Generate(msg, true); Assert.AreEqual(3, s.Version); MemoryStream bIn = new MemoryStream(s.GetEncoded(), false); Asn1InputStream aIn = new Asn1InputStream(bIn); s = new CmsSignedData(ContentInfo.GetInstance(aIn.ReadObject())); x509Certs = s.GetCertificates("Collection"); x509Crls = s.GetCrls("Collection"); SignerInformationStore signers = s.GetSignerInfos(); foreach (SignerInformation signer in signers.GetSigners()) { ICollection certCollection = x509Certs.GetMatches(signer.SignerID); IEnumerator certEnum = certCollection.GetEnumerator(); certEnum.MoveNext(); X509Certificate cert = (X509Certificate) certEnum.Current; Assert.IsTrue(signer.Verify(cert)); } // // check for CRLs // ArrayList crls = new ArrayList(x509Crls.GetMatches(null)); Assert.AreEqual(1, crls.Count); Assert.IsTrue(crls.Contains(SignCrl)); // // try using existing signer // gen = new CmsSignedDataGenerator(); gen.AddSigners(s.GetSignerInfos()); gen.AddCertificates(s.GetCertificates("Collection")); gen.AddCrls(s.GetCrls("Collection")); s = gen.Generate(msg, true); bIn = new MemoryStream(s.GetEncoded(), false); aIn = new Asn1InputStream(bIn); s = new CmsSignedData(ContentInfo.GetInstance(aIn.ReadObject())); x509Certs = s.GetCertificates("Collection"); x509Crls = s.GetCrls("Collection"); signers = s.GetSignerInfos(); foreach (SignerInformation signer in signers.GetSigners()) { ICollection certCollection = x509Certs.GetMatches(signer.SignerID); IEnumerator certEnum = certCollection.GetEnumerator(); certEnum.MoveNext(); X509Certificate cert = (X509Certificate) certEnum.Current; Assert.IsTrue(signer.Verify(cert)); } CheckSignerStoreReplacement(s, signers); } private void EncapsulatedTest( IAsymmetricCipherKeyPair signaturePair, X509Certificate signatureCert, string digestAlgorithm) { IList certList = new ArrayList(); IList crlList = new ArrayList(); CmsProcessable msg = new CmsProcessableByteArray(Encoding.ASCII.GetBytes("Hello World!")); certList.Add(signatureCert); certList.Add(OrigCert); crlList.Add(SignCrl); IX509Store x509Certs = X509StoreFactory.Create( "Certificate/Collection", new X509CollectionStoreParameters(certList)); IX509Store x509Crls = X509StoreFactory.Create( "CRL/Collection", new X509CollectionStoreParameters(crlList)); CmsSignedDataGenerator gen = new CmsSignedDataGenerator(); gen.AddSigner(signaturePair.Private, signatureCert, digestAlgorithm); gen.AddCertificates(x509Certs); gen.AddCrls(x509Crls); CmsSignedData s = gen.Generate(msg, true); s = new CmsSignedData(ContentInfo.GetInstance(Asn1Object.FromByteArray(s.GetEncoded()))); x509Certs = s.GetCertificates("Collection"); x509Crls = s.GetCrls("Collection"); SignerInformationStore signers = s.GetSignerInfos(); ICollection c = signers.GetSigners(); foreach (SignerInformation signer in c) { ICollection certCollection = x509Certs.GetMatches(signer.SignerID); IEnumerator certEnum = certCollection.GetEnumerator(); certEnum.MoveNext(); X509Certificate cert = (X509Certificate) certEnum.Current; Assert.IsTrue(signer.Verify(cert)); } // // check for CRLs // ArrayList crls = new ArrayList(x509Crls.GetMatches(null)); Assert.AreEqual(1, crls.Count); Assert.IsTrue(crls.Contains(SignCrl)); // // try using existing signer // gen = new CmsSignedDataGenerator(); gen.AddSigners(s.GetSignerInfos()); gen.AddCertificates(s.GetCertificates("Collection")); gen.AddCrls(s.GetCrls("Collection")); s = gen.Generate(msg, true); s = new CmsSignedData(ContentInfo.GetInstance(Asn1Object.FromByteArray(s.GetEncoded()))); x509Certs = s.GetCertificates("Collection"); x509Crls = s.GetCrls("Collection"); signers = s.GetSignerInfos(); c = signers.GetSigners(); foreach (SignerInformation signer in c) { ICollection certCollection = x509Certs.GetMatches(signer.SignerID); IEnumerator certEnum = certCollection.GetEnumerator(); certEnum.MoveNext(); X509Certificate cert = (X509Certificate) certEnum.Current; Assert.IsTrue(signer.Verify(cert)); } CheckSignerStoreReplacement(s, signers); } // // signerInformation store replacement test. // private void CheckSignerStoreReplacement( CmsSignedData orig, SignerInformationStore signers) { CmsSignedData s = CmsSignedData.ReplaceSigners(orig, signers); IX509Store x509Certs = s.GetCertificates("Collection"); signers = s.GetSignerInfos(); ICollection c = signers.GetSigners(); foreach (SignerInformation signer in c) { ICollection certCollection = x509Certs.GetMatches(signer.SignerID); IEnumerator certEnum = certCollection.GetEnumerator(); certEnum.MoveNext(); X509Certificate cert = (X509Certificate) certEnum.Current; Assert.IsTrue(signer.Verify(cert)); } } [Test] public void TestUnsortedAttributes() { CmsSignedData s = new CmsSignedData(new CmsProcessableByteArray(disorderedMessage), disorderedSet); IX509Store x509Certs = s.GetCertificates("Collection"); SignerInformationStore signers = s.GetSignerInfos(); ICollection c = signers.GetSigners(); foreach (SignerInformation signer in c) { ICollection certCollection = x509Certs.GetMatches(signer.SignerID); IEnumerator certEnum = certCollection.GetEnumerator(); certEnum.MoveNext(); X509Certificate cert = (X509Certificate) certEnum.Current; Assert.IsTrue(signer.Verify(cert)); } } [Test] public void TestNullContentWithSigner() { IList certList = new ArrayList(); certList.Add(OrigCert); certList.Add(SignCert); IX509Store x509Certs = X509StoreFactory.Create( "Certificate/Collection", new X509CollectionStoreParameters(certList)); CmsSignedDataGenerator gen = new CmsSignedDataGenerator(); gen.AddSigner(OrigKP.Private, OrigCert, CmsSignedDataGenerator.DigestSha1); gen.AddCertificates(x509Certs); CmsSignedData s = gen.Generate(null, false); s = new CmsSignedData(ContentInfo.GetInstance(Asn1Object.FromByteArray(s.GetEncoded()))); VerifySignatures(s); } [Test] public void TestWithAttributeCertificate() { IList certList = new ArrayList(); CmsProcessable msg = new CmsProcessableByteArray(Encoding.ASCII.GetBytes("Hello World!")); certList.Add(SignDsaCert); IX509Store x509Certs = X509StoreFactory.Create( "Certificate/Collection", new X509CollectionStoreParameters(certList)); CmsSignedDataGenerator gen = new CmsSignedDataGenerator(); gen.AddSigner(OrigKP.Private, OrigCert, CmsSignedDataGenerator.DigestSha1); gen.AddCertificates(x509Certs); IX509AttributeCertificate attrCert = CmsTestUtil.GetAttributeCertificate(); ArrayList attrCerts = new ArrayList(); attrCerts.Add(attrCert); IX509Store store = X509StoreFactory.Create( "AttributeCertificate/Collection", new X509CollectionStoreParameters(attrCerts)); gen.AddAttributeCertificates(store); CmsSignedData sd = gen.Generate(msg); Assert.AreEqual(4, sd.Version); store = sd.GetAttributeCertificates("Collection"); ArrayList coll = new ArrayList(store.GetMatches(null)); Assert.AreEqual(1, coll.Count); Assert.IsTrue(coll.Contains(attrCert)); // // create new certstore // certList = new ArrayList(); certList.Add(OrigCert); certList.Add(SignCert); x509Certs = X509StoreFactory.Create( "Certificate/Collection", new X509CollectionStoreParameters(certList)); // // replace certs // sd = CmsSignedData.ReplaceCertificatesAndCrls(sd, x509Certs, null, null); VerifySignatures(sd); } [Test] public void TestCertStoreReplacement() { IList certList = new ArrayList(); CmsProcessable msg = new CmsProcessableByteArray(Encoding.ASCII.GetBytes("Hello World!")); certList.Add(SignDsaCert); IX509Store x509Certs = X509StoreFactory.Create( "Certificate/Collection", new X509CollectionStoreParameters(certList)); CmsSignedDataGenerator gen = new CmsSignedDataGenerator(); gen.AddSigner(OrigKP.Private, OrigCert, CmsSignedDataGenerator.DigestSha1); gen.AddCertificates(x509Certs); CmsSignedData sd = gen.Generate(msg); // // create new certstore // certList = new ArrayList(); certList.Add(OrigCert); certList.Add(SignCert); x509Certs = X509StoreFactory.Create( "Certificate/Collection", new X509CollectionStoreParameters(certList)); // // replace certs // sd = CmsSignedData.ReplaceCertificatesAndCrls(sd, x509Certs, null, null); VerifySignatures(sd); } [Test] public void TestEncapsulatedCertStoreReplacement() { IList certList = new ArrayList(); CmsProcessable msg = new CmsProcessableByteArray(Encoding.ASCII.GetBytes("Hello World!")); certList.Add(SignDsaCert); IX509Store x509Certs = X509StoreFactory.Create( "Certificate/Collection", new X509CollectionStoreParameters(certList)); CmsSignedDataGenerator gen = new CmsSignedDataGenerator(); gen.AddSigner(OrigKP.Private, OrigCert, CmsSignedDataGenerator.DigestSha1); gen.AddCertificates(x509Certs); CmsSignedData sd = gen.Generate(msg, true); // // create new certstore // certList = new ArrayList(); certList.Add(OrigCert); certList.Add(SignCert); x509Certs = X509StoreFactory.Create( "Certificate/Collection", new X509CollectionStoreParameters(certList)); // // replace certs // sd = CmsSignedData.ReplaceCertificatesAndCrls(sd, x509Certs, null, null); VerifySignatures(sd); } [Test] public void TestCertOrdering1() { IList certList = new ArrayList(); CmsProcessable msg = new CmsProcessableByteArray(Encoding.ASCII.GetBytes("Hello World!")); certList.Add(OrigCert); certList.Add(SignCert); certList.Add(SignDsaCert); IX509Store x509Certs = X509StoreFactory.Create( "Certificate/Collection", new X509CollectionStoreParameters(certList)); CmsSignedDataGenerator gen = new CmsSignedDataGenerator(); gen.AddSigner(OrigKP.Private, OrigCert, CmsSignedDataGenerator.DigestSha1); gen.AddCertificates(x509Certs); CmsSignedData sd = gen.Generate(msg, true); x509Certs = sd.GetCertificates("Collection"); ArrayList a = new ArrayList(x509Certs.GetMatches(null)); Assert.AreEqual(3, a.Count); Assert.AreEqual(OrigCert, a[0]); Assert.AreEqual(SignCert, a[1]); Assert.AreEqual(SignDsaCert, a[2]); } [Test] public void TestCertOrdering2() { IList certList = new ArrayList(); CmsProcessable msg = new CmsProcessableByteArray(Encoding.ASCII.GetBytes("Hello World!")); certList.Add(SignCert); certList.Add(SignDsaCert); certList.Add(OrigCert); IX509Store x509Certs = X509StoreFactory.Create( "Certificate/Collection", new X509CollectionStoreParameters(certList)); CmsSignedDataGenerator gen = new CmsSignedDataGenerator(); gen.AddSigner(OrigKP.Private, OrigCert, CmsSignedDataGenerator.DigestSha1); gen.AddCertificates(x509Certs); CmsSignedData sd = gen.Generate(msg, true); x509Certs = sd.GetCertificates("Collection"); ArrayList a = new ArrayList(x509Certs.GetMatches(null)); Assert.AreEqual(3, a.Count); Assert.AreEqual(SignCert, a[0]); Assert.AreEqual(SignDsaCert, a[1]); Assert.AreEqual(OrigCert, a[2]); } [Test] public void TestSignerStoreReplacement() { IList certList = new ArrayList(); CmsProcessable msg = new CmsProcessableByteArray(Encoding.ASCII.GetBytes("Hello World!")); certList.Add(OrigCert); certList.Add(SignCert); IX509Store x509Certs = X509StoreFactory.Create( "Certificate/Collection", new X509CollectionStoreParameters(certList)); CmsSignedDataGenerator gen = new CmsSignedDataGenerator(); gen.AddSigner(OrigKP.Private, OrigCert, CmsSignedDataGenerator.DigestSha1); gen.AddCertificates(x509Certs); CmsSignedData original = gen.Generate(msg, true); // // create new Signer // gen = new CmsSignedDataGenerator(); gen.AddSigner(OrigKP.Private, OrigCert, CmsSignedDataGenerator.DigestSha224); gen.AddCertificates(x509Certs); CmsSignedData newSD = gen.Generate(msg, true); // // replace signer // CmsSignedData sd = CmsSignedData.ReplaceSigners(original, newSD.GetSignerInfos()); IEnumerator signerEnum = sd.GetSignerInfos().GetSigners().GetEnumerator(); signerEnum.MoveNext(); SignerInformation signer = (SignerInformation) signerEnum.Current; Assert.AreEqual(CmsSignedDataGenerator.DigestSha224, signer.DigestAlgOid); // we use a parser here as it requires the digests to be correct in the digest set, if it // isn't we'll get a NullPointerException CmsSignedDataParser sp = new CmsSignedDataParser(sd.GetEncoded()); sp.GetSignedContent().Drain(); VerifySignatures(sp); } [Test] public void TestEncapsulatedSamples() { doTestSample("PSSSignDataSHA1Enc.sig"); doTestSample("PSSSignDataSHA256Enc.sig"); doTestSample("PSSSignDataSHA512Enc.sig"); } [Test] public void TestSamples() { doTestSample("PSSSignData.data", "PSSSignDataSHA1.sig"); doTestSample("PSSSignData.data", "PSSSignDataSHA256.sig"); doTestSample("PSSSignData.data", "PSSSignDataSHA512.sig"); } [Test] public void TestCounterSig() { CmsSignedData sig = new CmsSignedData(GetInput("counterSig.p7m")); SignerInformationStore ss = sig.GetSignerInfos(); ArrayList signers = new ArrayList(ss.GetSigners()); SignerInformationStore cs = ((SignerInformation)signers[0]).GetCounterSignatures(); ArrayList csSigners = new ArrayList(cs.GetSigners()); Assert.AreEqual(1, csSigners.Count); foreach (SignerInformation cSigner in csSigners) { ArrayList certCollection = new ArrayList( sig.GetCertificates("Collection").GetMatches(cSigner.SignerID)); X509Certificate cert = (X509Certificate)certCollection[0]; Assert.IsNull(cSigner.SignedAttributes[Asn1.Pkcs.PkcsObjectIdentifiers.Pkcs9AtContentType]); Assert.IsTrue(cSigner.Verify(cert)); } VerifySignatures(sig); } private void doTestSample( string sigName) { CmsSignedData sig = new CmsSignedData(GetInput(sigName)); VerifySignatures(sig); } private void doTestSample( string messageName, string sigName) { CmsSignedData sig = new CmsSignedData( new CmsProcessableByteArray(GetInput(messageName)), GetInput(sigName)); VerifySignatures(sig); } private byte[] GetInput( string name) { return Streams.ReadAll(SimpleTest.GetTestDataAsStream("cms.sigs." + name)); } [Test] public void TestForMultipleCounterSignatures() { CmsSignedData sd = new CmsSignedData(xtraCounterSig); foreach (SignerInformation sigI in sd.GetSignerInfos().GetSigners()) { SignerInformationStore counter = sigI.GetCounterSignatures(); IList sigs = new ArrayList(counter.GetSigners()); Assert.AreEqual(2, sigs.Count); } } private void VerifySignatures( CmsSignedDataParser sp) { IX509Store x509Certs = sp.GetCertificates("Collection"); SignerInformationStore signers = sp.GetSignerInfos(); foreach (SignerInformation signer in signers.GetSigners()) { ICollection certCollection = x509Certs.GetMatches(signer.SignerID); IEnumerator certEnum = certCollection.GetEnumerator(); certEnum.MoveNext(); X509Certificate cert = (X509Certificate)certEnum.Current; Assert.IsTrue(signer.Verify(cert)); } } } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/iam/v1/policy.proto #pragma warning disable 1591, 0612, 3021 #region Designer generated code using pb = global::Google.Protobuf; using pbc = global::Google.Protobuf.Collections; using pbr = global::Google.Protobuf.Reflection; using scg = global::System.Collections.Generic; namespace Google.Cloud.Iam.V1 { /// <summary>Holder for reflection information generated from google/iam/v1/policy.proto</summary> public static partial class PolicyReflection { #region Descriptor /// <summary>File descriptor for google/iam/v1/policy.proto</summary> public static pbr::FileDescriptor Descriptor { get { return descriptor; } } private static pbr::FileDescriptor descriptor; static PolicyReflection() { byte[] descriptorData = global::System.Convert.FromBase64String( string.Concat( "Chpnb29nbGUvaWFtL3YxL3BvbGljeS5wcm90bxINZ29vZ2xlLmlhbS52MRoc", "Z29vZ2xlL2FwaS9hbm5vdGF0aW9ucy5wcm90byJRCgZQb2xpY3kSDwoHdmVy", "c2lvbhgBIAEoBRIoCghiaW5kaW5ncxgEIAMoCzIWLmdvb2dsZS5pYW0udjEu", "QmluZGluZxIMCgRldGFnGAMgASgMIigKB0JpbmRpbmcSDAoEcm9sZRgBIAEo", "CRIPCgdtZW1iZXJzGAIgAygJIkIKC1BvbGljeURlbHRhEjMKDmJpbmRpbmdf", "ZGVsdGFzGAEgAygLMhsuZ29vZ2xlLmlhbS52MS5CaW5kaW5nRGVsdGEilwEK", "DEJpbmRpbmdEZWx0YRIyCgZhY3Rpb24YASABKA4yIi5nb29nbGUuaWFtLnYx", "LkJpbmRpbmdEZWx0YS5BY3Rpb24SDAoEcm9sZRgCIAEoCRIOCgZtZW1iZXIY", "AyABKAkiNQoGQWN0aW9uEhYKEkFDVElPTl9VTlNQRUNJRklFRBAAEgcKA0FE", "RBABEgoKBlJFTU9WRRACQoMBChFjb20uZ29vZ2xlLmlhbS52MUILUG9saWN5", "UHJvdG9QAVowZ29vZ2xlLmdvbGFuZy5vcmcvZ2VucHJvdG8vZ29vZ2xlYXBp", "cy9pYW0vdjE7aWFt+AEBqgITR29vZ2xlLkNsb3VkLklhbS5WMcoCE0dvb2ds", "ZVxDbG91ZFxJYW1cVjFiBnByb3RvMw==")); descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, new pbr::FileDescriptor[] { global::Google.Api.AnnotationsReflection.Descriptor, }, new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::Google.Cloud.Iam.V1.Policy), global::Google.Cloud.Iam.V1.Policy.Parser, new[]{ "Version", "Bindings", "Etag" }, null, null, null), new pbr::GeneratedClrTypeInfo(typeof(global::Google.Cloud.Iam.V1.Binding), global::Google.Cloud.Iam.V1.Binding.Parser, new[]{ "Role", "Members" }, null, null, null), new pbr::GeneratedClrTypeInfo(typeof(global::Google.Cloud.Iam.V1.PolicyDelta), global::Google.Cloud.Iam.V1.PolicyDelta.Parser, new[]{ "BindingDeltas" }, null, null, null), new pbr::GeneratedClrTypeInfo(typeof(global::Google.Cloud.Iam.V1.BindingDelta), global::Google.Cloud.Iam.V1.BindingDelta.Parser, new[]{ "Action", "Role", "Member" }, null, new[]{ typeof(global::Google.Cloud.Iam.V1.BindingDelta.Types.Action) }, null) })); } #endregion } #region Messages /// <summary> /// Defines an Identity and Access Management (IAM) policy. It is used to /// specify access control policies for Cloud Platform resources. /// /// A `Policy` consists of a list of `bindings`. A `Binding` binds a list of /// `members` to a `role`, where the members can be user accounts, Google groups, /// Google domains, and service accounts. A `role` is a named list of permissions /// defined by IAM. /// /// **Example** /// /// { /// "bindings": [ /// { /// "role": "roles/owner", /// "members": [ /// "user:mike@example.com", /// "group:admins@example.com", /// "domain:google.com", /// "serviceAccount:my-other-app@appspot.gserviceaccount.com", /// ] /// }, /// { /// "role": "roles/viewer", /// "members": ["user:sean@example.com"] /// } /// ] /// } /// /// For a description of IAM and its features, see the /// [IAM developer's guide](https://cloud.google.com/iam). /// </summary> public sealed partial class Policy : pb::IMessage<Policy> { private static readonly pb::MessageParser<Policy> _parser = new pb::MessageParser<Policy>(() => new Policy()); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pb::MessageParser<Policy> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { get { return global::Google.Cloud.Iam.V1.PolicyReflection.Descriptor.MessageTypes[0]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public Policy() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public Policy(Policy other) : this() { version_ = other.version_; bindings_ = other.bindings_.Clone(); etag_ = other.etag_; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public Policy Clone() { return new Policy(this); } /// <summary>Field number for the "version" field.</summary> public const int VersionFieldNumber = 1; private int version_; /// <summary> /// Version of the `Policy`. The default version is 0. /// </summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int Version { get { return version_; } set { version_ = value; } } /// <summary>Field number for the "bindings" field.</summary> public const int BindingsFieldNumber = 4; private static readonly pb::FieldCodec<global::Google.Cloud.Iam.V1.Binding> _repeated_bindings_codec = pb::FieldCodec.ForMessage(34, global::Google.Cloud.Iam.V1.Binding.Parser); private readonly pbc::RepeatedField<global::Google.Cloud.Iam.V1.Binding> bindings_ = new pbc::RepeatedField<global::Google.Cloud.Iam.V1.Binding>(); /// <summary> /// Associates a list of `members` to a `role`. /// Multiple `bindings` must not be specified for the same `role`. /// `bindings` with no members will result in an error. /// </summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pbc::RepeatedField<global::Google.Cloud.Iam.V1.Binding> Bindings { get { return bindings_; } } /// <summary>Field number for the "etag" field.</summary> public const int EtagFieldNumber = 3; private pb::ByteString etag_ = pb::ByteString.Empty; /// <summary> /// `etag` is used for optimistic concurrency control as a way to help /// prevent simultaneous updates of a policy from overwriting each other. /// It is strongly suggested that systems make use of the `etag` in the /// read-modify-write cycle to perform policy updates in order to avoid race /// conditions: An `etag` is returned in the response to `getIamPolicy`, and /// systems are expected to put that etag in the request to `setIamPolicy` to /// ensure that their change will be applied to the same version of the policy. /// /// If no `etag` is provided in the call to `setIamPolicy`, then the existing /// policy is overwritten blindly. /// </summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pb::ByteString Etag { get { return etag_; } set { etag_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { return Equals(other as Policy); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Equals(Policy other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (Version != other.Version) return false; if(!bindings_.Equals(other.bindings_)) return false; if (Etag != other.Etag) return false; return true; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override int GetHashCode() { int hash = 1; if (Version != 0) hash ^= Version.GetHashCode(); hash ^= bindings_.GetHashCode(); if (Etag.Length != 0) hash ^= Etag.GetHashCode(); return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { if (Version != 0) { output.WriteRawTag(8); output.WriteInt32(Version); } if (Etag.Length != 0) { output.WriteRawTag(26); output.WriteBytes(Etag); } bindings_.WriteTo(output, _repeated_bindings_codec); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int CalculateSize() { int size = 0; if (Version != 0) { size += 1 + pb::CodedOutputStream.ComputeInt32Size(Version); } size += bindings_.CalculateSize(_repeated_bindings_codec); if (Etag.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeBytesSize(Etag); } return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(Policy other) { if (other == null) { return; } if (other.Version != 0) { Version = other.Version; } bindings_.Add(other.bindings_); if (other.Etag.Length != 0) { Etag = other.Etag; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: input.SkipLastField(); break; case 8: { Version = input.ReadInt32(); break; } case 26: { Etag = input.ReadBytes(); break; } case 34: { bindings_.AddEntriesFrom(input, _repeated_bindings_codec); break; } } } } } /// <summary> /// Associates `members` with a `role`. /// </summary> public sealed partial class Binding : pb::IMessage<Binding> { private static readonly pb::MessageParser<Binding> _parser = new pb::MessageParser<Binding>(() => new Binding()); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pb::MessageParser<Binding> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { get { return global::Google.Cloud.Iam.V1.PolicyReflection.Descriptor.MessageTypes[1]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public Binding() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public Binding(Binding other) : this() { role_ = other.role_; members_ = other.members_.Clone(); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public Binding Clone() { return new Binding(this); } /// <summary>Field number for the "role" field.</summary> public const int RoleFieldNumber = 1; private string role_ = ""; /// <summary> /// Role that is assigned to `members`. /// For example, `roles/viewer`, `roles/editor`, or `roles/owner`. /// Required /// </summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public string Role { get { return role_; } set { role_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } /// <summary>Field number for the "members" field.</summary> public const int MembersFieldNumber = 2; private static readonly pb::FieldCodec<string> _repeated_members_codec = pb::FieldCodec.ForString(18); private readonly pbc::RepeatedField<string> members_ = new pbc::RepeatedField<string>(); /// <summary> /// Specifies the identities requesting access for a Cloud Platform resource. /// `members` can have the following values: /// /// * `allUsers`: A special identifier that represents anyone who is /// on the internet; with or without a Google account. /// /// * `allAuthenticatedUsers`: A special identifier that represents anyone /// who is authenticated with a Google account or a service account. /// /// * `user:{emailid}`: An email address that represents a specific Google /// account. For example, `alice@gmail.com` or `joe@example.com`. /// /// * `serviceAccount:{emailid}`: An email address that represents a service /// account. For example, `my-other-app@appspot.gserviceaccount.com`. /// /// * `group:{emailid}`: An email address that represents a Google group. /// For example, `admins@example.com`. /// /// * `domain:{domain}`: A Google Apps domain name that represents all the /// users of that domain. For example, `google.com` or `example.com`. /// </summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pbc::RepeatedField<string> Members { get { return members_; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { return Equals(other as Binding); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Equals(Binding other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (Role != other.Role) return false; if(!members_.Equals(other.members_)) return false; return true; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override int GetHashCode() { int hash = 1; if (Role.Length != 0) hash ^= Role.GetHashCode(); hash ^= members_.GetHashCode(); return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { if (Role.Length != 0) { output.WriteRawTag(10); output.WriteString(Role); } members_.WriteTo(output, _repeated_members_codec); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int CalculateSize() { int size = 0; if (Role.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(Role); } size += members_.CalculateSize(_repeated_members_codec); return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(Binding other) { if (other == null) { return; } if (other.Role.Length != 0) { Role = other.Role; } members_.Add(other.members_); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: input.SkipLastField(); break; case 10: { Role = input.ReadString(); break; } case 18: { members_.AddEntriesFrom(input, _repeated_members_codec); break; } } } } } /// <summary> /// The difference delta between two policies. /// </summary> public sealed partial class PolicyDelta : pb::IMessage<PolicyDelta> { private static readonly pb::MessageParser<PolicyDelta> _parser = new pb::MessageParser<PolicyDelta>(() => new PolicyDelta()); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pb::MessageParser<PolicyDelta> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { get { return global::Google.Cloud.Iam.V1.PolicyReflection.Descriptor.MessageTypes[2]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public PolicyDelta() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public PolicyDelta(PolicyDelta other) : this() { bindingDeltas_ = other.bindingDeltas_.Clone(); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public PolicyDelta Clone() { return new PolicyDelta(this); } /// <summary>Field number for the "binding_deltas" field.</summary> public const int BindingDeltasFieldNumber = 1; private static readonly pb::FieldCodec<global::Google.Cloud.Iam.V1.BindingDelta> _repeated_bindingDeltas_codec = pb::FieldCodec.ForMessage(10, global::Google.Cloud.Iam.V1.BindingDelta.Parser); private readonly pbc::RepeatedField<global::Google.Cloud.Iam.V1.BindingDelta> bindingDeltas_ = new pbc::RepeatedField<global::Google.Cloud.Iam.V1.BindingDelta>(); /// <summary> /// The delta for Bindings between two policies. /// </summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pbc::RepeatedField<global::Google.Cloud.Iam.V1.BindingDelta> BindingDeltas { get { return bindingDeltas_; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { return Equals(other as PolicyDelta); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Equals(PolicyDelta other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if(!bindingDeltas_.Equals(other.bindingDeltas_)) return false; return true; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override int GetHashCode() { int hash = 1; hash ^= bindingDeltas_.GetHashCode(); return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { bindingDeltas_.WriteTo(output, _repeated_bindingDeltas_codec); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int CalculateSize() { int size = 0; size += bindingDeltas_.CalculateSize(_repeated_bindingDeltas_codec); return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(PolicyDelta other) { if (other == null) { return; } bindingDeltas_.Add(other.bindingDeltas_); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: input.SkipLastField(); break; case 10: { bindingDeltas_.AddEntriesFrom(input, _repeated_bindingDeltas_codec); break; } } } } } /// <summary> /// One delta entry for Binding. Each individual change (only one member in each /// entry) to a binding will be a separate entry. /// </summary> public sealed partial class BindingDelta : pb::IMessage<BindingDelta> { private static readonly pb::MessageParser<BindingDelta> _parser = new pb::MessageParser<BindingDelta>(() => new BindingDelta()); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pb::MessageParser<BindingDelta> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { get { return global::Google.Cloud.Iam.V1.PolicyReflection.Descriptor.MessageTypes[3]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public BindingDelta() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public BindingDelta(BindingDelta other) : this() { action_ = other.action_; role_ = other.role_; member_ = other.member_; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public BindingDelta Clone() { return new BindingDelta(this); } /// <summary>Field number for the "action" field.</summary> public const int ActionFieldNumber = 1; private global::Google.Cloud.Iam.V1.BindingDelta.Types.Action action_ = 0; /// <summary> /// The action that was performed on a Binding. /// Required /// </summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public global::Google.Cloud.Iam.V1.BindingDelta.Types.Action Action { get { return action_; } set { action_ = value; } } /// <summary>Field number for the "role" field.</summary> public const int RoleFieldNumber = 2; private string role_ = ""; /// <summary> /// Role that is assigned to `members`. /// For example, `roles/viewer`, `roles/editor`, or `roles/owner`. /// Required /// </summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public string Role { get { return role_; } set { role_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } /// <summary>Field number for the "member" field.</summary> public const int MemberFieldNumber = 3; private string member_ = ""; /// <summary> /// A single identity requesting access for a Cloud Platform resource. /// Follows the same format of Binding.members. /// Required /// </summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public string Member { get { return member_; } set { member_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { return Equals(other as BindingDelta); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Equals(BindingDelta other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (Action != other.Action) return false; if (Role != other.Role) return false; if (Member != other.Member) return false; return true; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override int GetHashCode() { int hash = 1; if (Action != 0) hash ^= Action.GetHashCode(); if (Role.Length != 0) hash ^= Role.GetHashCode(); if (Member.Length != 0) hash ^= Member.GetHashCode(); return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { if (Action != 0) { output.WriteRawTag(8); output.WriteEnum((int) Action); } if (Role.Length != 0) { output.WriteRawTag(18); output.WriteString(Role); } if (Member.Length != 0) { output.WriteRawTag(26); output.WriteString(Member); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int CalculateSize() { int size = 0; if (Action != 0) { size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) Action); } if (Role.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(Role); } if (Member.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(Member); } return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(BindingDelta other) { if (other == null) { return; } if (other.Action != 0) { Action = other.Action; } if (other.Role.Length != 0) { Role = other.Role; } if (other.Member.Length != 0) { Member = other.Member; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: input.SkipLastField(); break; case 8: { action_ = (global::Google.Cloud.Iam.V1.BindingDelta.Types.Action) input.ReadEnum(); break; } case 18: { Role = input.ReadString(); break; } case 26: { Member = input.ReadString(); break; } } } } #region Nested types /// <summary>Container for nested types declared in the BindingDelta message type.</summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static partial class Types { /// <summary> /// The type of action performed on a Binding in a policy. /// </summary> public enum Action { /// <summary> /// Unspecified. /// </summary> [pbr::OriginalName("ACTION_UNSPECIFIED")] Unspecified = 0, /// <summary> /// Addition of a Binding. /// </summary> [pbr::OriginalName("ADD")] Add = 1, /// <summary> /// Removal of a Binding. /// </summary> [pbr::OriginalName("REMOVE")] Remove = 2, } } #endregion } #endregion } #endregion Designer generated code
using System; using System.Drawing; using System.Collections; using System.ComponentModel; using System.Windows.Forms; using System.Data; using System.IO; using System.Configuration; using DowReplayManager.NET.Readers; using DowReplayManager.NET.Types; using DowReplayManager.NET.Code; using DowReplayManager.NET.Handlers; namespace DowReplayManager.NET { /// <summary> /// Summary description for frmMain. /// </summary> public class frmMain : System.Windows.Forms.Form { private System.Windows.Forms.Label label1; private System.Windows.Forms.TextBox txtSearch; private System.Windows.Forms.ColumnHeader chPlayerGames; private System.Windows.Forms.ColumnHeader chPlayerRank; private System.Windows.Forms.TreeView tvCategories; private System.Windows.Forms.ColumnHeader chMapRank; private System.Windows.Forms.ColumnHeader chMapGames; private StoreReader storeReader; private System.Windows.Forms.ColumnHeader chReplayName; private System.Windows.Forms.ListView lvReplays; private System.Windows.Forms.ListView lvPlayers; private System.Windows.Forms.ListView lvMaps; private System.Windows.Forms.ColumnHeader chReplayDateAdded; private System.Windows.Forms.ColumnHeader chReplayDateModified; private System.Windows.Forms.ColumnHeader chReplayFile; private System.Windows.Forms.Button btnClearSearch; private System.Windows.Forms.ColumnHeader chPlayerName; private DataView dvPlayers; private DataView dvMaps; private DataView dvReplays; private ReplayManager replayManager; private System.Windows.Forms.ColumnHeader chMapName; private System.Windows.Forms.ContextMenu cmPReplay; private System.Windows.Forms.MenuItem cmPRename; private System.Windows.Forms.MenuItem cmPFileRename; private System.Windows.Forms.MenuItem cmPAvailable; private System.Windows.Forms.MenuItem menuItem6; private System.Windows.Forms.ContextMenu cmPCat; private int selectedCategory; private System.Windows.Forms.MenuItem menuItem4; private System.Windows.Forms.MenuItem menuItem7; private System.Windows.Forms.MenuItem menuItem8; private System.Windows.Forms.MenuItem miPCatRename; private System.Windows.Forms.MenuItem miPCatNew; private System.Windows.Forms.MenuItem miPCatDelete; private System.Windows.Forms.MenuItem cmPReplayView; private System.Windows.Forms.MenuItem menuItem2; private System.Windows.Forms.NotifyIcon niMain; private System.Windows.Forms.ContextMenu cmNotify; private System.Windows.Forms.MenuItem menuItem3; private System.Windows.Forms.MenuItem miShow; private System.Windows.Forms.MenuItem miMaskRename; private System.ComponentModel.IContainer components; private System.Windows.Forms.MenuItem cmPDelete; private System.Windows.Forms.StatusBarPanel sbpMessage; private System.Windows.Forms.Splitter splitStatusMain; private System.Windows.Forms.Panel pnlMain; private System.Windows.Forms.Panel pnlTopToolbar; private System.Windows.Forms.Splitter splitter2; private System.Windows.Forms.Panel pnlReplayDetail; private System.Windows.Forms.Splitter splitter3; private System.Windows.Forms.Panel pnlCategories; private System.Windows.Forms.Splitter splitter4; private System.Windows.Forms.Panel pnlViewMain; private System.Windows.Forms.Splitter splitter5; private System.Windows.Forms.Panel pnlViewTop; private System.Windows.Forms.Splitter splitter6; private System.Windows.Forms.Panel pnlMainTopRight; private System.Windows.Forms.ToolBar tbMain; private System.Windows.Forms.Splitter splitter1; private System.Windows.Forms.Panel panel1; private System.Windows.Forms.StatusBar sbMain; private System.Windows.Forms.StatusBarPanel sbpNumReplays; private System.Windows.Forms.StatusBarPanel sbpBlank; private Logging log = null; public frmMain() { InitializeComponent(); log = new Logging("DoWRM.log"); log.Open(); if (ConfigurationSettings.AppSettings["LogLevel"] != null) log.LogLevel = Convert.ToInt32(ConfigurationSettings.AppSettings["LogLevel"]); log.Write(LogType.Info, 5, "Logging started..."); //create the replay manager log.Write(LogType.Info, 5, "Create ReplayManager object"); replayManager = new ReplayManager(Application.ExecutablePath, "Replays.zip"); replayManager.log = log; //poll for new replays in the playback folder log.Write(LogType.Info, 5, "Polling replay folder (" + replayManager.DoWPlaybackFolder + ")..."); object[] replayHashes = replayManager.PollPlaybackFolder(false); log.Write(LogType.Info, 5, "Found " + replayHashes.Length.ToString() + " replays"); //load the store log.Write(LogType.Info, 5, "Create StoreReader object"); storeReader = new StoreReader("cat.dat", "rec.dat"); storeReader.log = log; storeReader.Read(); //check if we have the replays found in the poll object[] newreplays = null; if (replayHashes != null) { newreplays = storeReader.HasHashes(replayHashes); log.Write(LogType.Info, 5, "Found " + newreplays.Length.ToString() + " NEW replay(s)"); } //add replays that we dont have... if (newreplays != null && newreplays.Length > 0) { log.Write(LogType.Info, 5, "Adding replay(s) to store..."); replayManager.StoreReader = storeReader; replayManager.Archive = ReplayManager.ArchiveType.FileStore; replayManager.AddReplays(newreplays); } log.Write(LogType.Info, 5, "Check for Available replays..."); Methods.htAvailable = replayManager.GetAvailable(); if (Methods.htAvailable != null) log.Write(LogType.Info, 5, "Found " + Methods.htAvailable.Count.ToString() + " Available replays"); log.Write(LogType.Info, 5, "Creating and populating views..."); Methods.PopulateCategoriesView(tvCategories, storeReader); dvPlayers = Methods.CreatePlayerDataView(storeReader, storeReader.Replays); Methods.PopulatePlayersView(lvPlayers, dvPlayers, null, "Games DESC"); dvMaps = Methods.CreateMapDataView(storeReader, storeReader.Replays); Methods.PopulateMapsView(lvMaps, dvMaps, null, "Games DESC"); dvReplays = Methods.CreateReplayDataView(storeReader, storeReader.Replays); Methods.PopulateReplaysView(lvReplays, dvReplays, null, "Added DESC"); //set the replay number in the status bar System.Text.StringBuilder sNumReplays = new System.Text.StringBuilder(); sNumReplays.Append("Replays: "); sNumReplays.Append(storeReader.Replays.Replay.Count.ToString()); sbpNumReplays.Text = sNumReplays.ToString(); //dont think i need these storeReader.Replays.AcceptChanges(); storeReader.Categories.AcceptChanges(); } /// <summary> /// Clean up any resources being used. /// </summary> protected override void Dispose( bool disposing ) { log.Close(); if( disposing ) { if (components != null) { components.Dispose(); } } base.Dispose( disposing ); } #region Windows Form Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { this.components = new System.ComponentModel.Container(); System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(frmMain)); this.tvCategories = new System.Windows.Forms.TreeView(); this.cmsPCategories = new System.Windows.Forms.ContextMenuStrip(this.components); this.miNewCategory = new System.Windows.Forms.ToolStripMenuItem(); this.toolStripSeparator4 = new System.Windows.Forms.ToolStripSeparator(); this.miRenameCategory = new System.Windows.Forms.ToolStripMenuItem(); this.miDeleteCategory = new System.Windows.Forms.ToolStripMenuItem(); this.toolStripSeparator1 = new System.Windows.Forms.ToolStripSeparator(); this.makeDefaultToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); this.cmPCat = new System.Windows.Forms.ContextMenu(); this.miPCatRename = new System.Windows.Forms.MenuItem(); this.menuItem4 = new System.Windows.Forms.MenuItem(); this.miPCatNew = new System.Windows.Forms.MenuItem(); this.miPCatDelete = new System.Windows.Forms.MenuItem(); this.menuItem7 = new System.Windows.Forms.MenuItem(); this.menuItem8 = new System.Windows.Forms.MenuItem(); this.label1 = new System.Windows.Forms.Label(); this.txtSearch = new System.Windows.Forms.TextBox(); this.lvReplays = new System.Windows.Forms.ListView(); this.chReplayName = new System.Windows.Forms.ColumnHeader(); this.chReplayDateModified = new System.Windows.Forms.ColumnHeader(); this.chReplayDateAdded = new System.Windows.Forms.ColumnHeader(); this.chReplayFile = new System.Windows.Forms.ColumnHeader(); this.cmsPReplays = new System.Windows.Forms.ContextMenuStrip(this.components); this.miAvailableReplay = new System.Windows.Forms.ToolStripMenuItem(); this.toolStripSeparator3 = new System.Windows.Forms.ToolStripSeparator(); this.miRenameReplay = new System.Windows.Forms.ToolStripMenuItem(); this.replayRenameToolStripMenuItem1 = new System.Windows.Forms.ToolStripMenuItem(); this.customRenameToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); this.toolStripSeparator2 = new System.Windows.Forms.ToolStripSeparator(); this.miDeleteReplay = new System.Windows.Forms.ToolStripMenuItem(); this.cmPReplay = new System.Windows.Forms.ContextMenu(); this.cmPAvailable = new System.Windows.Forms.MenuItem(); this.menuItem6 = new System.Windows.Forms.MenuItem(); this.cmPReplayView = new System.Windows.Forms.MenuItem(); this.menuItem2 = new System.Windows.Forms.MenuItem(); this.cmPDelete = new System.Windows.Forms.MenuItem(); this.cmPRename = new System.Windows.Forms.MenuItem(); this.cmPFileRename = new System.Windows.Forms.MenuItem(); this.miMaskRename = new System.Windows.Forms.MenuItem(); this.lvPlayers = new System.Windows.Forms.ListView(); this.chPlayerRank = new System.Windows.Forms.ColumnHeader(); this.chPlayerName = new System.Windows.Forms.ColumnHeader(); this.chPlayerGames = new System.Windows.Forms.ColumnHeader(); this.lvMaps = new System.Windows.Forms.ListView(); this.chMapRank = new System.Windows.Forms.ColumnHeader(); this.chMapName = new System.Windows.Forms.ColumnHeader(); this.chMapGames = new System.Windows.Forms.ColumnHeader(); this.btnClearSearch = new System.Windows.Forms.Button(); this.niMain = new System.Windows.Forms.NotifyIcon(this.components); this.cmNotify = new System.Windows.Forms.ContextMenu(); this.miShow = new System.Windows.Forms.MenuItem(); this.menuItem3 = new System.Windows.Forms.MenuItem(); this.sbMain = new System.Windows.Forms.StatusBar(); this.sbpMessage = new System.Windows.Forms.StatusBarPanel(); this.sbpNumReplays = new System.Windows.Forms.StatusBarPanel(); this.sbpBlank = new System.Windows.Forms.StatusBarPanel(); this.splitStatusMain = new System.Windows.Forms.Splitter(); this.pnlMain = new System.Windows.Forms.Panel(); this.pnlViewMain = new System.Windows.Forms.Panel(); this.pnlViewTop = new System.Windows.Forms.Panel(); this.pnlMainTopRight = new System.Windows.Forms.Panel(); this.splitter6 = new System.Windows.Forms.Splitter(); this.splitter5 = new System.Windows.Forms.Splitter(); this.splitter4 = new System.Windows.Forms.Splitter(); this.pnlCategories = new System.Windows.Forms.Panel(); this.splitter3 = new System.Windows.Forms.Splitter(); this.pnlReplayDetail = new System.Windows.Forms.Panel(); this.splitter2 = new System.Windows.Forms.Splitter(); this.pnlTopToolbar = new System.Windows.Forms.Panel(); this.tbMain = new System.Windows.Forms.ToolBar(); this.splitter1 = new System.Windows.Forms.Splitter(); this.panel1 = new System.Windows.Forms.Panel(); this.cmsPCategories.SuspendLayout(); this.cmsPReplays.SuspendLayout(); ((System.ComponentModel.ISupportInitialize)(this.sbpMessage)).BeginInit(); ((System.ComponentModel.ISupportInitialize)(this.sbpNumReplays)).BeginInit(); ((System.ComponentModel.ISupportInitialize)(this.sbpBlank)).BeginInit(); this.pnlMain.SuspendLayout(); this.pnlViewMain.SuspendLayout(); this.pnlViewTop.SuspendLayout(); this.pnlMainTopRight.SuspendLayout(); this.pnlCategories.SuspendLayout(); this.pnlTopToolbar.SuspendLayout(); this.panel1.SuspendLayout(); this.SuspendLayout(); // // tvCategories // this.tvCategories.AllowDrop = true; this.tvCategories.BackColor = System.Drawing.Color.White; this.tvCategories.ContextMenuStrip = this.cmsPCategories; this.tvCategories.Dock = System.Windows.Forms.DockStyle.Fill; this.tvCategories.Font = new System.Drawing.Font("Tahoma", 8.25F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0))); this.tvCategories.ForeColor = System.Drawing.Color.Black; this.tvCategories.LabelEdit = true; this.tvCategories.Location = new System.Drawing.Point(0, 0); this.tvCategories.Name = "tvCategories"; this.tvCategories.ShowRootLines = false; this.tvCategories.Size = new System.Drawing.Size(153, 293); this.tvCategories.TabIndex = 0; this.tvCategories.DragDrop += new System.Windows.Forms.DragEventHandler(this.tvCategories_DragDrop); this.tvCategories.AfterLabelEdit += new System.Windows.Forms.NodeLabelEditEventHandler(this.tvCategories_AfterLabelEdit); this.tvCategories.AfterSelect += new System.Windows.Forms.TreeViewEventHandler(this.tvCategories_AfterSelect); this.tvCategories.DragEnter += new System.Windows.Forms.DragEventHandler(this.tvCategories_DragEnter); this.tvCategories.BeforeLabelEdit += new System.Windows.Forms.NodeLabelEditEventHandler(this.tvCategories_BeforeLabelEdit); // // cmsPCategories // this.cmsPCategories.Items.AddRange(new System.Windows.Forms.ToolStripItem[] { this.miNewCategory, this.toolStripSeparator4, this.miRenameCategory, this.miDeleteCategory, this.toolStripSeparator1, this.makeDefaultToolStripMenuItem}); this.cmsPCategories.LayoutStyle = System.Windows.Forms.ToolStripLayoutStyle.Table; this.cmsPCategories.Name = "cmsPCategories"; this.cmsPCategories.Size = new System.Drawing.Size(167, 104); // // miNewCategory // this.miNewCategory.Name = "miNewCategory"; this.miNewCategory.Size = new System.Drawing.Size(166, 22); this.miNewCategory.Text = "New Category..."; this.miNewCategory.Click += new System.EventHandler(this.miNewCategoryClick); // // toolStripSeparator4 // this.toolStripSeparator4.Name = "toolStripSeparator4"; this.toolStripSeparator4.Size = new System.Drawing.Size(163, 6); // // miRenameCategory // this.miRenameCategory.Name = "miRenameCategory"; this.miRenameCategory.Size = new System.Drawing.Size(166, 22); this.miRenameCategory.Text = "Rename"; this.miRenameCategory.Click += new System.EventHandler(this.miRenameCategoryClick); // // miDeleteCategory // this.miDeleteCategory.Name = "miDeleteCategory"; this.miDeleteCategory.Size = new System.Drawing.Size(166, 22); this.miDeleteCategory.Text = "Delete"; this.miDeleteCategory.Click += new System.EventHandler(this.miDeleteCategoryClick); // // toolStripSeparator1 // this.toolStripSeparator1.Name = "toolStripSeparator1"; this.toolStripSeparator1.Size = new System.Drawing.Size(163, 6); // // makeDefaultToolStripMenuItem // this.makeDefaultToolStripMenuItem.Enabled = false; this.makeDefaultToolStripMenuItem.Name = "makeDefaultToolStripMenuItem"; this.makeDefaultToolStripMenuItem.Size = new System.Drawing.Size(166, 22); this.makeDefaultToolStripMenuItem.Text = "Make Default"; // // cmPCat // this.cmPCat.MenuItems.AddRange(new System.Windows.Forms.MenuItem[] { this.miPCatRename, this.menuItem4, this.miPCatNew, this.miPCatDelete, this.menuItem7, this.menuItem8}); // // miPCatRename // this.miPCatRename.DefaultItem = true; this.miPCatRename.Index = 0; this.miPCatRename.Text = "Rename..."; this.miPCatRename.Click += new System.EventHandler(this.miPCatRename_Click); // // menuItem4 // this.menuItem4.Index = 1; this.menuItem4.Text = "-"; // // miPCatNew // this.miPCatNew.Index = 2; this.miPCatNew.Text = "New Category..."; this.miPCatNew.Click += new System.EventHandler(this.miPNewCat_Click); // // miPCatDelete // this.miPCatDelete.Index = 3; this.miPCatDelete.Text = "Delete Category..."; this.miPCatDelete.Click += new System.EventHandler(this.miPCatDelete_Click); // // menuItem7 // this.menuItem7.Index = 4; this.menuItem7.Text = "-"; // // menuItem8 // this.menuItem8.Enabled = false; this.menuItem8.Index = 5; this.menuItem8.Text = "Make Default"; // // label1 // this.label1.Location = new System.Drawing.Point(165, 9); this.label1.Name = "label1"; this.label1.Size = new System.Drawing.Size(48, 16); this.label1.TabIndex = 1; this.label1.Text = "Search:"; // // txtSearch // this.txtSearch.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left) | System.Windows.Forms.AnchorStyles.Right))); this.txtSearch.BackColor = System.Drawing.Color.White; this.txtSearch.Font = new System.Drawing.Font("Tahoma", 8.25F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0))); this.txtSearch.ForeColor = System.Drawing.Color.Black; this.txtSearch.Location = new System.Drawing.Point(220, 3); this.txtSearch.Name = "txtSearch"; this.txtSearch.Size = new System.Drawing.Size(414, 21); this.txtSearch.TabIndex = 0; this.txtSearch.TextChanged += new System.EventHandler(this.txtSearch_TextChanged); // // lvReplays // this.lvReplays.AllowColumnReorder = true; this.lvReplays.BackColor = System.Drawing.Color.White; this.lvReplays.Columns.AddRange(new System.Windows.Forms.ColumnHeader[] { this.chReplayName, this.chReplayDateModified, this.chReplayDateAdded, this.chReplayFile}); this.lvReplays.ContextMenuStrip = this.cmsPReplays; this.lvReplays.Dock = System.Windows.Forms.DockStyle.Bottom; this.lvReplays.Font = new System.Drawing.Font("Tahoma", 8.25F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0))); this.lvReplays.ForeColor = System.Drawing.Color.Black; this.lvReplays.FullRowSelect = true; this.lvReplays.Location = new System.Drawing.Point(0, 134); this.lvReplays.Name = "lvReplays"; this.lvReplays.Size = new System.Drawing.Size(539, 159); this.lvReplays.TabIndex = 5; this.lvReplays.UseCompatibleStateImageBehavior = false; this.lvReplays.View = System.Windows.Forms.View.Details; this.lvReplays.DoubleClick += new System.EventHandler(this.lvReplays_DoubleClick); this.lvReplays.SelectedIndexChanged += new System.EventHandler(this.lvReplays_SelectedIndexChanged); this.lvReplays.KeyDown += new System.Windows.Forms.KeyEventHandler(this.lvReplays_KeyDown); this.lvReplays.ColumnClick += new System.Windows.Forms.ColumnClickEventHandler(this.lvReplays_ColumnClick); this.lvReplays.ItemDrag += new System.Windows.Forms.ItemDragEventHandler(this.lvReplays_ItemDrag); // // chReplayName // this.chReplayName.Text = "Name"; this.chReplayName.Width = 197; // // chReplayDateModified // this.chReplayDateModified.Text = "Last Modified"; this.chReplayDateModified.Width = 126; // // chReplayDateAdded // this.chReplayDateAdded.Text = "Date Added"; this.chReplayDateAdded.Width = 125; // // chReplayFile // this.chReplayFile.Text = "Filename"; this.chReplayFile.Width = 380; // // cmsPReplays // this.cmsPReplays.Items.AddRange(new System.Windows.Forms.ToolStripItem[] { this.miAvailableReplay, this.toolStripSeparator3, this.miRenameReplay, this.replayRenameToolStripMenuItem1, this.customRenameToolStripMenuItem, this.toolStripSeparator2, this.miDeleteReplay}); this.cmsPReplays.Name = "cmsPReplays"; this.cmsPReplays.Size = new System.Drawing.Size(176, 148); // // miAvailableReplay // this.miAvailableReplay.CheckOnClick = true; this.miAvailableReplay.Name = "miAvailableReplay"; this.miAvailableReplay.Size = new System.Drawing.Size(175, 22); this.miAvailableReplay.Text = "Available"; this.miAvailableReplay.Click += new System.EventHandler(this.miAvailableReplayClick); // // toolStripSeparator3 // this.toolStripSeparator3.Name = "toolStripSeparator3"; this.toolStripSeparator3.Size = new System.Drawing.Size(172, 6); // // miRenameReplay // this.miRenameReplay.Name = "miRenameReplay"; this.miRenameReplay.Size = new System.Drawing.Size(175, 22); this.miRenameReplay.Text = "Rename"; this.miRenameReplay.Click += new System.EventHandler(this.miRenameReplayClick); // // replayRenameToolStripMenuItem1 // this.replayRenameToolStripMenuItem1.Enabled = false; this.replayRenameToolStripMenuItem1.Name = "replayRenameToolStripMenuItem1"; this.replayRenameToolStripMenuItem1.Size = new System.Drawing.Size(175, 22); this.replayRenameToolStripMenuItem1.Text = "Replay Rename..."; this.replayRenameToolStripMenuItem1.Visible = false; // // customRenameToolStripMenuItem // this.customRenameToolStripMenuItem.Enabled = false; this.customRenameToolStripMenuItem.Name = "customRenameToolStripMenuItem"; this.customRenameToolStripMenuItem.Size = new System.Drawing.Size(175, 22); this.customRenameToolStripMenuItem.Text = "Custom Rename..."; this.customRenameToolStripMenuItem.Visible = false; // // toolStripSeparator2 // this.toolStripSeparator2.Name = "toolStripSeparator2"; this.toolStripSeparator2.Size = new System.Drawing.Size(172, 6); // // miDeleteReplay // this.miDeleteReplay.Name = "miDeleteReplay"; this.miDeleteReplay.Size = new System.Drawing.Size(175, 22); this.miDeleteReplay.Text = "Delete"; this.miDeleteReplay.Click += new System.EventHandler(this.miDeleteReplayClick); // // cmPReplay // this.cmPReplay.MenuItems.AddRange(new System.Windows.Forms.MenuItem[] { this.cmPAvailable, this.menuItem6, this.cmPReplayView, this.menuItem2, this.cmPDelete, this.cmPRename, this.cmPFileRename, this.miMaskRename}); // // cmPAvailable // this.cmPAvailable.Index = 0; this.cmPAvailable.Text = "Available"; this.cmPAvailable.Click += new System.EventHandler(this.cmPAvailable_Click); // // menuItem6 // this.menuItem6.Index = 1; this.menuItem6.Text = "-"; // // cmPReplayView // this.cmPReplayView.DefaultItem = true; this.cmPReplayView.Index = 2; this.cmPReplayView.Text = "View"; this.cmPReplayView.Click += new System.EventHandler(this.cmPReplayView_Click); // // menuItem2 // this.menuItem2.Index = 3; this.menuItem2.Text = "-"; // // cmPDelete // this.cmPDelete.Index = 4; this.cmPDelete.Text = "Delete"; this.cmPDelete.Click += new System.EventHandler(this.cmPDelete_Click); // // cmPRename // this.cmPRename.Index = 5; this.cmPRename.Text = "Rename..."; this.cmPRename.Click += new System.EventHandler(this.cmPRename_Click); // // cmPFileRename // this.cmPFileRename.Index = 6; this.cmPFileRename.Text = "File Rename..."; this.cmPFileRename.Click += new System.EventHandler(this.cmPFileRename_Click); // // miMaskRename // this.miMaskRename.Index = 7; this.miMaskRename.Text = "Mask Rename..."; this.miMaskRename.Visible = false; this.miMaskRename.Click += new System.EventHandler(this.miMaskRename_Click); // // lvPlayers // this.lvPlayers.BackColor = System.Drawing.Color.White; this.lvPlayers.Columns.AddRange(new System.Windows.Forms.ColumnHeader[] { this.chPlayerRank, this.chPlayerName, this.chPlayerGames}); this.lvPlayers.Dock = System.Windows.Forms.DockStyle.Left; this.lvPlayers.Font = new System.Drawing.Font("Tahoma", 8.25F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0))); this.lvPlayers.ForeColor = System.Drawing.Color.Black; this.lvPlayers.FullRowSelect = true; this.lvPlayers.Location = new System.Drawing.Point(0, 0); this.lvPlayers.MultiSelect = false; this.lvPlayers.Name = "lvPlayers"; this.lvPlayers.Size = new System.Drawing.Size(262, 131); this.lvPlayers.TabIndex = 6; this.lvPlayers.UseCompatibleStateImageBehavior = false; this.lvPlayers.View = System.Windows.Forms.View.Details; this.lvPlayers.SelectedIndexChanged += new System.EventHandler(this.lvPlayers_SelectedIndexChanged); this.lvPlayers.ColumnClick += new System.Windows.Forms.ColumnClickEventHandler(this.lvPlayers_ColumnClick); // // chPlayerRank // this.chPlayerRank.Text = "#"; this.chPlayerRank.Width = 19; // // chPlayerName // this.chPlayerName.Text = "Player"; this.chPlayerName.Width = 118; // // chPlayerGames // this.chPlayerGames.Text = "Games"; this.chPlayerGames.TextAlign = System.Windows.Forms.HorizontalAlignment.Right; this.chPlayerGames.Width = 47; // // lvMaps // this.lvMaps.BackColor = System.Drawing.Color.White; this.lvMaps.Columns.AddRange(new System.Windows.Forms.ColumnHeader[] { this.chMapRank, this.chMapName, this.chMapGames}); this.lvMaps.Dock = System.Windows.Forms.DockStyle.Fill; this.lvMaps.Font = new System.Drawing.Font("Tahoma", 8.25F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0))); this.lvMaps.ForeColor = System.Drawing.Color.Black; this.lvMaps.FullRowSelect = true; this.lvMaps.Location = new System.Drawing.Point(0, 0); this.lvMaps.MultiSelect = false; this.lvMaps.Name = "lvMaps"; this.lvMaps.Size = new System.Drawing.Size(274, 131); this.lvMaps.TabIndex = 7; this.lvMaps.UseCompatibleStateImageBehavior = false; this.lvMaps.View = System.Windows.Forms.View.Details; this.lvMaps.SelectedIndexChanged += new System.EventHandler(this.lvMaps_SelectedIndexChanged); this.lvMaps.ColumnClick += new System.Windows.Forms.ColumnClickEventHandler(this.lvMaps_ColumnClick); // // chMapRank // this.chMapRank.Text = "#"; this.chMapRank.Width = 20; // // chMapName // this.chMapName.Text = "Map"; this.chMapName.Width = 138; // // chMapGames // this.chMapGames.Text = "Games"; this.chMapGames.Width = 48; // // btnClearSearch // this.btnClearSearch.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right))); this.btnClearSearch.Location = new System.Drawing.Point(639, 6); this.btnClearSearch.Name = "btnClearSearch"; this.btnClearSearch.Size = new System.Drawing.Size(48, 18); this.btnClearSearch.TabIndex = 8; this.btnClearSearch.Text = "Clear"; this.btnClearSearch.Click += new System.EventHandler(this.btnClearSearch_Click); // // niMain // this.niMain.ContextMenu = this.cmNotify; this.niMain.Text = "Replay Manager"; this.niMain.Visible = true; // // cmNotify // this.cmNotify.MenuItems.AddRange(new System.Windows.Forms.MenuItem[] { this.miShow, this.menuItem3}); // // miShow // this.miShow.Index = 0; this.miShow.Text = "Show"; this.miShow.Click += new System.EventHandler(this.miShow_Click); // // menuItem3 // this.menuItem3.Index = 1; this.menuItem3.Text = "About"; // // sbMain // this.sbMain.Location = new System.Drawing.Point(0, 492); this.sbMain.Name = "sbMain"; this.sbMain.Panels.AddRange(new System.Windows.Forms.StatusBarPanel[] { this.sbpMessage, this.sbpNumReplays, this.sbpBlank}); this.sbMain.RightToLeft = System.Windows.Forms.RightToLeft.No; this.sbMain.ShowPanels = true; this.sbMain.Size = new System.Drawing.Size(695, 22); this.sbMain.TabIndex = 9; // // sbpMessage // this.sbpMessage.AutoSize = System.Windows.Forms.StatusBarPanelAutoSize.Spring; this.sbpMessage.BorderStyle = System.Windows.Forms.StatusBarPanelBorderStyle.Raised; this.sbpMessage.Name = "sbpMessage"; this.sbpMessage.Width = 601; // // sbpNumReplays // this.sbpNumReplays.Alignment = System.Windows.Forms.HorizontalAlignment.Center; this.sbpNumReplays.AutoSize = System.Windows.Forms.StatusBarPanelAutoSize.Contents; this.sbpNumReplays.BorderStyle = System.Windows.Forms.StatusBarPanelBorderStyle.Raised; this.sbpNumReplays.Name = "sbpNumReplays"; this.sbpNumReplays.Text = "Replays: "; this.sbpNumReplays.Width = 58; // // sbpBlank // this.sbpBlank.BorderStyle = System.Windows.Forms.StatusBarPanelBorderStyle.None; this.sbpBlank.Name = "sbpBlank"; this.sbpBlank.Width = 20; // // splitStatusMain // this.splitStatusMain.Dock = System.Windows.Forms.DockStyle.Bottom; this.splitStatusMain.Enabled = false; this.splitStatusMain.Location = new System.Drawing.Point(0, 489); this.splitStatusMain.Name = "splitStatusMain"; this.splitStatusMain.Size = new System.Drawing.Size(695, 3); this.splitStatusMain.TabIndex = 10; this.splitStatusMain.TabStop = false; // // pnlMain // this.pnlMain.Controls.Add(this.pnlViewMain); this.pnlMain.Controls.Add(this.splitter4); this.pnlMain.Controls.Add(this.pnlCategories); this.pnlMain.Controls.Add(this.splitter3); this.pnlMain.Controls.Add(this.pnlReplayDetail); this.pnlMain.Controls.Add(this.splitter2); this.pnlMain.Controls.Add(this.pnlTopToolbar); this.pnlMain.Dock = System.Windows.Forms.DockStyle.Fill; this.pnlMain.Location = new System.Drawing.Point(0, 0); this.pnlMain.Name = "pnlMain"; this.pnlMain.Size = new System.Drawing.Size(695, 464); this.pnlMain.TabIndex = 11; // // pnlViewMain // this.pnlViewMain.Controls.Add(this.pnlViewTop); this.pnlViewMain.Controls.Add(this.splitter5); this.pnlViewMain.Controls.Add(this.lvReplays); this.pnlViewMain.Dock = System.Windows.Forms.DockStyle.Fill; this.pnlViewMain.Location = new System.Drawing.Point(156, 32); this.pnlViewMain.Name = "pnlViewMain"; this.pnlViewMain.Size = new System.Drawing.Size(539, 293); this.pnlViewMain.TabIndex = 8; // // pnlViewTop // this.pnlViewTop.Controls.Add(this.pnlMainTopRight); this.pnlViewTop.Controls.Add(this.splitter6); this.pnlViewTop.Controls.Add(this.lvPlayers); this.pnlViewTop.Dock = System.Windows.Forms.DockStyle.Fill; this.pnlViewTop.Location = new System.Drawing.Point(0, 0); this.pnlViewTop.Name = "pnlViewTop"; this.pnlViewTop.Size = new System.Drawing.Size(539, 131); this.pnlViewTop.TabIndex = 7; // // pnlMainTopRight // this.pnlMainTopRight.Controls.Add(this.lvMaps); this.pnlMainTopRight.Dock = System.Windows.Forms.DockStyle.Fill; this.pnlMainTopRight.Location = new System.Drawing.Point(265, 0); this.pnlMainTopRight.Name = "pnlMainTopRight"; this.pnlMainTopRight.Size = new System.Drawing.Size(274, 131); this.pnlMainTopRight.TabIndex = 8; // // splitter6 // this.splitter6.Location = new System.Drawing.Point(262, 0); this.splitter6.Name = "splitter6"; this.splitter6.Size = new System.Drawing.Size(3, 131); this.splitter6.TabIndex = 7; this.splitter6.TabStop = false; // // splitter5 // this.splitter5.Dock = System.Windows.Forms.DockStyle.Bottom; this.splitter5.Location = new System.Drawing.Point(0, 131); this.splitter5.Name = "splitter5"; this.splitter5.Size = new System.Drawing.Size(539, 3); this.splitter5.TabIndex = 6; this.splitter5.TabStop = false; // // splitter4 // this.splitter4.Location = new System.Drawing.Point(153, 32); this.splitter4.Name = "splitter4"; this.splitter4.Size = new System.Drawing.Size(3, 293); this.splitter4.TabIndex = 7; this.splitter4.TabStop = false; // // pnlCategories // this.pnlCategories.Controls.Add(this.tvCategories); this.pnlCategories.Dock = System.Windows.Forms.DockStyle.Left; this.pnlCategories.Location = new System.Drawing.Point(0, 32); this.pnlCategories.Name = "pnlCategories"; this.pnlCategories.Size = new System.Drawing.Size(153, 293); this.pnlCategories.TabIndex = 6; // // splitter3 // this.splitter3.Dock = System.Windows.Forms.DockStyle.Bottom; this.splitter3.Location = new System.Drawing.Point(0, 325); this.splitter3.Name = "splitter3"; this.splitter3.Size = new System.Drawing.Size(695, 3); this.splitter3.TabIndex = 5; this.splitter3.TabStop = false; // // pnlReplayDetail // this.pnlReplayDetail.Dock = System.Windows.Forms.DockStyle.Bottom; this.pnlReplayDetail.Location = new System.Drawing.Point(0, 328); this.pnlReplayDetail.Name = "pnlReplayDetail"; this.pnlReplayDetail.Size = new System.Drawing.Size(695, 136); this.pnlReplayDetail.TabIndex = 4; // // splitter2 // this.splitter2.Dock = System.Windows.Forms.DockStyle.Top; this.splitter2.Enabled = false; this.splitter2.Location = new System.Drawing.Point(0, 29); this.splitter2.Name = "splitter2"; this.splitter2.Size = new System.Drawing.Size(695, 3); this.splitter2.TabIndex = 3; this.splitter2.TabStop = false; // // pnlTopToolbar // this.pnlTopToolbar.Controls.Add(this.label1); this.pnlTopToolbar.Controls.Add(this.txtSearch); this.pnlTopToolbar.Controls.Add(this.btnClearSearch); this.pnlTopToolbar.Dock = System.Windows.Forms.DockStyle.Top; this.pnlTopToolbar.Location = new System.Drawing.Point(0, 0); this.pnlTopToolbar.Name = "pnlTopToolbar"; this.pnlTopToolbar.Size = new System.Drawing.Size(695, 29); this.pnlTopToolbar.TabIndex = 0; // // tbMain // this.tbMain.Appearance = System.Windows.Forms.ToolBarAppearance.Flat; this.tbMain.ButtonSize = new System.Drawing.Size(16, 16); this.tbMain.DropDownArrows = true; this.tbMain.Location = new System.Drawing.Point(0, 0); this.tbMain.Name = "tbMain"; this.tbMain.ShowToolTips = true; this.tbMain.Size = new System.Drawing.Size(695, 22); this.tbMain.TabIndex = 12; // // splitter1 // this.splitter1.Dock = System.Windows.Forms.DockStyle.Top; this.splitter1.Location = new System.Drawing.Point(0, 22); this.splitter1.Name = "splitter1"; this.splitter1.Size = new System.Drawing.Size(695, 3); this.splitter1.TabIndex = 13; this.splitter1.TabStop = false; // // panel1 // this.panel1.Controls.Add(this.pnlMain); this.panel1.Dock = System.Windows.Forms.DockStyle.Fill; this.panel1.Location = new System.Drawing.Point(0, 25); this.panel1.Name = "panel1"; this.panel1.Size = new System.Drawing.Size(695, 464); this.panel1.TabIndex = 14; // // frmMain // this.AllowDrop = true; this.AutoScaleBaseSize = new System.Drawing.Size(5, 14); this.ClientSize = new System.Drawing.Size(695, 514); this.Controls.Add(this.panel1); this.Controls.Add(this.splitter1); this.Controls.Add(this.tbMain); this.Controls.Add(this.splitStatusMain); this.Controls.Add(this.sbMain); this.Font = new System.Drawing.Font("Tahoma", 8.25F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0))); this.Icon = ((System.Drawing.Icon)(resources.GetObject("$this.Icon"))); this.Name = "frmMain"; this.Text = "Dawn of War Replay Manager v1.0 - by Shiver"; this.Load += new System.EventHandler(this.frmMain_Load); this.cmsPCategories.ResumeLayout(false); this.cmsPReplays.ResumeLayout(false); ((System.ComponentModel.ISupportInitialize)(this.sbpMessage)).EndInit(); ((System.ComponentModel.ISupportInitialize)(this.sbpNumReplays)).EndInit(); ((System.ComponentModel.ISupportInitialize)(this.sbpBlank)).EndInit(); this.pnlMain.ResumeLayout(false); this.pnlViewMain.ResumeLayout(false); this.pnlViewTop.ResumeLayout(false); this.pnlMainTopRight.ResumeLayout(false); this.pnlCategories.ResumeLayout(false); this.pnlTopToolbar.ResumeLayout(false); this.pnlTopToolbar.PerformLayout(); this.panel1.ResumeLayout(false); this.ResumeLayout(false); this.PerformLayout(); } private System.Windows.Forms.ToolStripMenuItem miAvailableReplay; private System.Windows.Forms.ToolStripMenuItem miRenameReplay; private System.Windows.Forms.ToolStripMenuItem miDeleteReplay; private System.Windows.Forms.ToolStripMenuItem miDeleteCategory; private System.Windows.Forms.ToolStripMenuItem miRenameCategory; private System.Windows.Forms.ToolStripMenuItem miNewCategory; private System.Windows.Forms.ToolStripSeparator toolStripSeparator2; private System.Windows.Forms.ToolStripMenuItem customRenameToolStripMenuItem; private System.Windows.Forms.ToolStripMenuItem replayRenameToolStripMenuItem1; private System.Windows.Forms.ToolStripSeparator toolStripSeparator3; private System.Windows.Forms.ContextMenuStrip cmsPReplays; private System.Windows.Forms.ToolStripMenuItem makeDefaultToolStripMenuItem; private System.Windows.Forms.ToolStripSeparator toolStripSeparator1; private System.Windows.Forms.ToolStripSeparator toolStripSeparator4; private System.Windows.Forms.ContextMenuStrip cmsPCategories; #endregion /// <summary> /// The main entry point for the application. /// </summary> [STAThread] static void Main() { Application.Run(new frmMain()); } private void frmMain_Load(object sender, System.EventArgs e) { } /// <summary> /// Fetches the tree node using the category tag ID /// </summary> /// <param name="tag">Tag to search for</param> /// <param name="nodes">Node collection that is searched</param> /// <returns>A TreeNode object if successful or null</returns> private TreeNode GetNodeByTag(int tag, TreeNodeCollection nodes) { for (int rooti = 0; rooti < nodes.Count; rooti++) { if (nodes[rooti].Tag.ToString() == tag.ToString()) return nodes[rooti]; else if (nodes[rooti].GetNodeCount(true) > 0) { TreeNode node = GetNodeByTag(tag, nodes[rooti].Nodes); if (node != null) return node; } } return null; } /// <summary> /// Adds the categories to the main TreeView /// </summary> /// <param name="cats">A CategoryStore object to use as a source</param> private void addCategories(CategoryStore cats) { TreeNode node; foreach (CategoryStore.CategoryRow row in cats.Category) { node = new TreeNode(); node.Tag = row.ID; node.Text = row.Name; if (row.ParentID != 0) { TreeNode pNode = GetNodeByTag(row.ParentID, tvCategories.Nodes); if (pNode != null) pNode.Nodes.Add(node); } else { tvCategories.Nodes.Add(node); } } tvCategories.ExpandAll(); } /// <summary> /// Apply a global filter according to the category that we are selected on /// </summary> /// <param name="CategoryID"></param> /// <returns>Returns the number of replays in the current category or -1 if none</returns> private int FilterCategories(int CategoryID) { if (CategoryID != 1) // category ID 1 is reserved for "All Replays" { //fetch all the replayids for the category object[] replayids = Methods.GetReplaysByCategoryID(dvReplays, CategoryID); //kind of lame, but it generates the expression (1x long string) for all the replayids to match System.Text.StringBuilder s_Replayids = new System.Text.StringBuilder();; if (replayids.Length > 0) { foreach (object replayid in replayids) { if (s_Replayids.Length == 0) s_Replayids.Append("(ReplayID = " + replayid); else s_Replayids.Append(" OR ReplayID = " + replayid); } s_Replayids.Append(")"); //apply the global filter to show only the found replay ids Methods.GlobalReplayFilter(dvReplays, dvPlayers, dvMaps, s_Replayids.ToString()); } else return -1; //no replays in that category } else { //showing all replays... remove global filters Methods.GlobalReplayFilter(dvReplays, dvPlayers, dvMaps, null); } return dvReplays.Table.Rows.Count; } private void tvCategories_AfterSelect(object sender, System.Windows.Forms.TreeViewEventArgs e) { selectedCategory = (int)e.Node.Tag; //reset the search options. could be slow to refilter after a new category select. txtSearch.Text = ""; int result = FilterCategories((int)e.Node.Tag); if (result > 0) { //repopulate the list views with the right replays Methods.PopulateReplaysView(lvReplays, dvReplays, "", "Added DESC"); Methods.PopulatePlayersView(lvPlayers, dvPlayers, "", "Games DESC"); Methods.PopulateMapsView(lvMaps, dvMaps, "", "Games DESC"); } else { //if nothing is found... clear the lists lvReplays.Items.Clear(); lvPlayers.Items.Clear(); lvMaps.Items.Clear(); } } private void btnClearSearch_Click(object sender, System.EventArgs e) { txtSearch.Text = ""; } private void txtSearch_TextChanged(object sender, System.EventArgs e) { string search_filter = "LIKE '" + txtSearch.Text + "%'"; Methods.PopulateMapsView(lvMaps, dvMaps, "Map " + search_filter, "Games DESC"); Methods.PopulatePlayersView(lvPlayers, dvPlayers, "Player " + search_filter, "Games DESC"); Methods.PopulateReplaysView(lvReplays, dvReplays, "Name " + search_filter, "Added DESC"); } private void lvReplays_DoubleClick(object sender, System.EventArgs e) { //show the last selected replay frmReplayView rv = new frmReplayView(); string filename = Methods.GetReplayFilenameByReplayID(dvReplays, (int)lvReplays.SelectedItems[lvReplays.SelectedItems.Count - 1].Tag); ReplayReader reader = new ReplayReader(replayManager.ReplayManagerFilePath + @"\" + filename); rv.Replay = reader.Read(); rv.ShowDialog(); } private void lvPlayers_SelectedIndexChanged(object sender, System.EventArgs e) { if (lvPlayers.SelectedItems.Count > 0) { int iname = chPlayerName.Index; object[] replayids = Methods.GetReplaysByPlayerName(dvPlayers, lvPlayers.SelectedItems[0].SubItems[iname].Text); System.Text.StringBuilder s_Replayids = new System.Text.StringBuilder();; foreach (object replayid in replayids) { if (s_Replayids.Length == 0) s_Replayids.Append("ReplayID = " + replayid); else s_Replayids.Append(" OR ReplayID = " + replayid); } Methods.PopulateReplaysView(lvReplays, dvReplays, s_Replayids.ToString(), "Added DESC"); } } private void lvMaps_SelectedIndexChanged(object sender, System.EventArgs e) { if (lvMaps.SelectedItems.Count > 0) { int iname = chMapName.Index; object[] replayids = Methods.GetReplaysByMapName(dvMaps, lvMaps.SelectedItems[0].SubItems[iname].Text); System.Text.StringBuilder s_Replayids = new System.Text.StringBuilder(); if (replayids.Length > 0) { foreach (object replayid in replayids) { if (s_Replayids.Length == 0) s_Replayids.Append("ReplayID = " + replayid); else s_Replayids.Append(" OR ReplayID = " + replayid); } } Methods.PopulateReplaysView(lvReplays, dvReplays, s_Replayids.ToString(), "Added DESC"); } } private void cmPCategory_Click(object sender, System.EventArgs e) { if (lvReplays.SelectedItems.Count > 0) { for (int index = 0; index < lvReplays.SelectedItems.Count; index++) { int itag = (int)lvReplays.SelectedItems[index].Tag; } } } private void lvReplays_ItemDrag(object sender, System.Windows.Forms.ItemDragEventArgs e) { ArrayList list = new ArrayList(); foreach (ListViewItem item in lvReplays.SelectedItems) { list.Add(item.Tag); } if (list.Count > 0) lvReplays.DoDragDrop(list.ToArray(), DragDropEffects.Move); } private void tvCategories_DragDrop(object sender, System.Windows.Forms.DragEventArgs e) { //fetch the categoryid that the item was dragged to TreeNode node = tvCategories.GetNodeAt( tvCategories.PointToClient(new Point(e.X, e.Y))); int itag = (int)node.Tag; if (itag >= 0) { Methods.AlterReplayCategory((object[])e.Data.GetData(typeof(object[])), itag, storeReader); dvReplays = Methods.CreateReplayDataView(storeReader, storeReader.Replays); int result = FilterCategories(selectedCategory); if (result > 0) { //repopulate the listviews with the updated category information Methods.PopulateReplaysView(lvReplays, dvReplays, "", "Added DESC"); Methods.PopulatePlayersView(lvPlayers, dvPlayers, "", "Games DESC"); Methods.PopulateMapsView(lvMaps, dvMaps, "", "Games DESC"); } else { //if no replays left... clear the list lvReplays.Items.Clear(); lvPlayers.Items.Clear(); lvMaps.Items.Clear(); } } } private void tvCategories_DragEnter(object sender, System.Windows.Forms.DragEventArgs e) { e.Effect = DragDropEffects.Move; } private void lvPlayers_ColumnClick(object sender, System.Windows.Forms.ColumnClickEventArgs e) { int cPlayerGames = chPlayerGames.Index; int cPlayerName = chPlayerName.Index; int cPlayerRank = chPlayerRank.Index; //get the column and order its currently sorting by string[] split = dvPlayers.Sort.Split(' '); string column = split[0]; string order = ""; if (split.Length >= 2) order = split[1]; //rank column was clicked if (e.Column == cPlayerRank) { if (column.ToUpper() == "GAMES") { if (order.ToUpper() == "DESC") dvPlayers.Sort = "Games"; else dvPlayers.Sort = "Games DESC"; } else { dvPlayers.Sort = "Games DESC"; } Methods.PopulatePlayersView(lvPlayers, dvPlayers, dvPlayers.RowFilter, dvPlayers.Sort); } else if (e.Column == cPlayerName) //player name clicked { if (column.ToUpper() == "Player") { if (order.ToUpper() == "DESC") dvPlayers.Sort = "Player"; else dvPlayers.Sort = "Player DESC"; } else { if (order.ToUpper() == "DESC") dvPlayers.Sort = "Player"; else dvPlayers.Sort = "Player DESC"; } Methods.PopulatePlayersView(lvPlayers, dvPlayers, dvPlayers.RowFilter, dvPlayers.Sort); } else if (e.Column == cPlayerGames) //num games clicked { if (column.ToUpper() == "GAMES") { if (order.ToUpper() == "DESC") dvPlayers.Sort = "Games"; else dvPlayers.Sort = "Games DESC"; } else { if (order.ToUpper() == "DESC") dvPlayers.Sort = "Games"; else dvPlayers.Sort = "Games DESC"; } Methods.PopulatePlayersView(lvPlayers, dvPlayers, dvPlayers.RowFilter, dvPlayers.Sort); } } private void lvMaps_ColumnClick(object sender, System.Windows.Forms.ColumnClickEventArgs e) { int cMapName = chMapName.Index; int cMapGames = chMapGames.Index; int cMapRank = chMapRank.Index; //get the column and order its currently sorting by string[] split = dvMaps.Sort.Split(' ');; string column = split[0]; string order = ""; if (split.Length >= 2) order = split[1]; if (e.Column == cMapName) { if (column.ToUpper() == "MAP") { if (order.ToUpper() == "DESC") dvMaps.Sort = "Map"; else dvMaps.Sort = "Map DESC"; } else { if (order.ToUpper() == "DESC") dvMaps.Sort = "Map"; else dvMaps.Sort = "Map DESC"; } Methods.PopulateMapsView(lvMaps, dvMaps, dvMaps.RowFilter, dvMaps.Sort); } else if (e.Column == cMapGames) { if (column.ToUpper() == "GAMES") { if (order.ToUpper() == "DESC") dvMaps.Sort = "Games"; else dvMaps.Sort = "Games DESC"; } else { if (order.ToUpper() == "DESC") dvMaps.Sort = "Games"; else dvMaps.Sort = "Games DESC"; } Methods.PopulateMapsView(lvMaps, dvMaps, dvMaps.RowFilter, dvMaps.Sort); } else if (e.Column == cMapRank) { Methods.PopulateMapsView(lvMaps, dvMaps, dvMaps.RowFilter, dvMaps.Sort); } } private void lvReplays_ColumnClick(object sender, System.Windows.Forms.ColumnClickEventArgs e) { int cReplayName = chReplayName.Index; int cReplayModified = chReplayDateModified.Index; int cReplayAdded = chReplayDateAdded.Index; int cReplayFilename = chReplayFile.Index; //get the column and order its currently sorting by string[] split = dvReplays.Sort.Split(' ');; string column = split[0]; string order = ""; if (split.Length >= 2) order = split[1]; if (e.Column == cReplayName) { if (column.ToUpper() == "NAME") { if (order.ToUpper() == "DESC") dvReplays.Sort = "Name"; else dvReplays.Sort = "Name DESC"; } else { if (order.ToUpper() == "DESC") dvReplays.Sort = "Name"; else dvReplays.Sort = "Name DESC"; } Methods.PopulateReplaysView(lvReplays, dvReplays, dvReplays.RowFilter, dvReplays.Sort); } else if (e.Column == cReplayModified) { if (column.ToUpper() == "MODIFIED") { if (order.ToUpper() == "DESC") dvReplays.Sort = "Modified"; else dvReplays.Sort = "Modified DESC"; } else { if (order.ToUpper() == "DESC") dvReplays.Sort = "Modified"; else dvReplays.Sort = "Modified DESC"; } Methods.PopulateReplaysView(lvReplays, dvReplays, dvReplays.RowFilter, dvReplays.Sort); } else if (e.Column == cReplayAdded) { if (column.ToUpper() == "ADDED") { if (order.ToUpper() == "DESC") dvReplays.Sort = "Added"; else dvReplays.Sort = "Added DESC"; } else { if (order.ToUpper() == "DESC") dvReplays.Sort = "Added"; else dvReplays.Sort = "Added DESC"; } Methods.PopulateReplaysView(lvReplays, dvReplays, dvReplays.RowFilter, dvReplays.Sort); } else if (e.Column == cReplayFilename) { if (column.ToUpper() == "FILENAME") { if (order.ToUpper() == "DESC") dvReplays.Sort = "Filename"; else dvReplays.Sort = "Filename DESC"; } else { if (order.ToUpper() == "DESC") dvReplays.Sort = "Filename"; else dvReplays.Sort = "Filename DESC"; } Methods.PopulateReplaysView(lvReplays, dvReplays, dvReplays.RowFilter, dvReplays.Sort); } } private void miPCatRename_Click(object sender, System.EventArgs e) { /* tvCategories.SelectedNode.BeginEdit(); */ } private void tvCategories_BeforeLabelEdit(object sender, System.Windows.Forms.NodeLabelEditEventArgs e) { //check if we can edit this first... if (storeReader.IsPermanent((int)e.Node.Tag)) tvCategories.SelectedNode.EndEdit(true); tvCategories.SelectedNode = e.Node; } private void tvCategories_AfterLabelEdit(object sender, System.Windows.Forms.NodeLabelEditEventArgs e) { if (!e.CancelEdit && e.Label != null) { int itag = (int)e.Node.Tag; if (!storeReader.IsPermanent(itag)) Methods.RenameCategory(storeReader, itag, e.Label); } } private void miPNewCat_Click(object sender, System.EventArgs e) { /* frmInput input = new frmInput(); input.Question = StoreReader.STORE_CATEGORYNEW_INPUT; input.Caption = "Create New Category..."; DialogResult result = input.ShowDialog(this); if (result == DialogResult.OK) { Methods.CreateNewCategory(storeReader, input.Input, (int)tvCategories.SelectedNode.Tag); Methods.PopulateCategoriesView(tvCategories, storeReader); } */ } private void miPCatDelete_Click(object sender, System.EventArgs e) { /* //TODO: Replace this null checking with a right click select on a node... if (tvCategories.SelectedNode != null) { if (!storeReader.IsPermanent((int)tvCategories.SelectedNode.Tag)) { string msg = StoreReader.STORE_CATEGORYDELETE_CONFIRM.Replace("{0}", "'" + tvCategories.SelectedNode.Text + "'"); DialogResult result = MessageBox.Show(this, msg, "Confirm Delete", MessageBoxButtons.YesNo, MessageBoxIcon.Question); if (result == DialogResult.Yes) { Methods.DeleteCategory(storeReader, (int)tvCategories.SelectedNode.Tag); Methods.PopulateCategoriesView(tvCategories, storeReader); } } else { string error = StoreReader.STORE_CATEGORYDELETE_PERM.Replace("{0}", "'" + tvCategories.SelectedNode.Text + "'"); MessageBox.Show(this, error, "Delete failed!", MessageBoxButtons.OK, MessageBoxIcon.Information); } } */ } private void cmPFileRename_Click(object sender, System.EventArgs e) { if (lvReplays.SelectedItems.Count == 1) { ReplayStore.ReplayRow row = storeReader.GetReplayByID((int)lvReplays.SelectedItems[0].Tag); frmInput input = new frmInput(); string question = StoreReader.STORE_REPLAYRENAME_INPUT.Replace("{0}", row.Name);; input.Question = question; input.Caption = "Filename Rename..."; input.Input = row.Filename; input.MaxLength = 255; DialogResult result = input.ShowDialog(this); if (result == DialogResult.OK) { if (replayManager.RenameReplayFile(row.Filename, input.Input)) { Methods.RenameReplayFile(storeReader, row.ID, input.Input); dvReplays = Methods.CreateReplayDataView(storeReader, storeReader.Replays); Methods.PopulateReplaysView(lvReplays, dvReplays, dvReplays.RowFilter, dvReplays.Sort); } else { string error = ReplayManager.MANAGER_REPLAYFILERENAME_FAIL.Replace("{0}", row.Filename); error = error.Replace("{1}", input.Input); MessageBox.Show(this, error, "Rename failed!", MessageBoxButtons.OK, MessageBoxIcon.Error); } } } else MessageBox.Show(this, StoreReader.STORE_REPLAYFILERENAME_SELECT, "Rename failed!", MessageBoxButtons.OK, MessageBoxIcon.Error); } private void cmPRename_Click(object sender, System.EventArgs e) { /* Replay replay = null; if (lvReplays.SelectedItems.Count == 1) { ReplayStore.ReplayRow row = storeReader.GetReplayByID((int)lvReplays.SelectedItems[0].Tag); frmInput input = new frmInput(); string question = StoreReader.STORE_REPLAYNAMERENAME_INPUT.Replace("{0}", row.Name);; input.Question = question; input.Caption = "Replay Rename..."; input.Input = row.Name; input.MaxLength = 255; DialogResult result = input.ShowDialog(this); if (result == DialogResult.OK) { ReplayReader reader = new ReplayReader(replayManager.ReplayManagerFilePath + @"\" + row.Filename); replay = reader.Read(); if (replay != null && replayManager.RenameReplay(replay, input.Input)) { Methods.RenameReplay(storeReader, row.ID, input.Input); dvReplays = Methods.CreateReplayDataView(storeReader, storeReader.Replays); Methods.PopulateReplaysView(lvReplays, dvReplays, dvReplays.RowFilter, dvReplays.Sort); } else { string error = ReplayManager.MANAGER_REPLAYFILERENAME_FAIL.Replace("{0}", row.Filename); error = error.Replace("{1}", input.Input); MessageBox.Show(this, error, "Rename failed!", MessageBoxButtons.OK, MessageBoxIcon.Error); } } } else MessageBox.Show(this, StoreReader.STORE_REPLAYFILERENAME_SELECT, "Rename failed!", MessageBoxButtons.OK, MessageBoxIcon.Error); */ } private void cmPAvailable_Click(object sender, System.EventArgs e) { /* log.Write(LogType.Info, 5, "Toggling Available for " + lvReplays.SelectedIndices.Count.ToString() + " selected replay(s)"); try { foreach (int index in lvReplays.SelectedIndices) { log.Write(LogType.Info, 5, "Getting ReplayRow for ReplayID=" + lvReplays.Items[index].Tag.ToString()); ReplayStore.ReplayRow row = storeReader.GetReplayByID((int)lvReplays.Items[index].Tag); if (!replayManager.MakeAvailable(replayManager.ReplayManagerFilePath + @"\" + row.Filename)) { string error = ReplayManager.MANAGER_REPLAYAVAIL_FAIL.Replace("{0}", row.Filename); MessageBox.Show(this, error, "Make available failed!", MessageBoxButtons.OK, MessageBoxIcon.Error); } Methods.htAvailable = replayManager.GetAvailable(); if (!Methods.htAvailable.ContainsKey(row.Filename)) lvReplays.Items[index].BackColor = System.Drawing.Color.Empty; else lvReplays.Items[index].BackColor = System.Drawing.Color.PowderBlue; } } catch(Exception x) { log.Write(LogType.Error, 1, "cmPAvailable_Click(): " + x.StackTrace); } */ } private void miShow_Click(object sender, System.EventArgs e) { this.Show(); this.ShowInTaskbar = true; } private void cmPReplayView_Click(object sender, System.EventArgs e) { lvReplays_DoubleClick(lvReplays, null); } private void lvReplays_KeyDown(object sender, System.Windows.Forms.KeyEventArgs e) { switch (e.KeyCode) { case Keys.Enter: lvReplays_DoubleClick(lvReplays, null); break; case Keys.Delete: //Methods.DeleteReplay(lvReplays, dvReplays, int id); break; default: break; } } private void miMaskRename_Click(object sender, System.EventArgs e) { frmMaskRename mask = new frmMaskRename(); mask.ShowDialog(); } private void lvReplays_SelectedIndexChanged(object sender, System.EventArgs e) { //TODO: need to check or uncheck availabe } private void cmPDelete_Click(object sender, System.EventArgs e) { /* ArrayList list = new ArrayList(); foreach (ListViewItem item in lvReplays.SelectedItems) { list.Add(item.Tag); } if (list.Count > 0) { object[] filenames = Methods.DeleteReplay(storeReader, list.ToArray()); if (filenames.Length > 0) { //delete files in both the Replays folder and Playback replayManager.DeleteReplays(filenames); //if we were able to save the store etc, reload the views dvReplays = Methods.CreateReplayDataView(storeReader, storeReader.Replays); Methods.PopulateReplaysView(lvReplays, dvReplays, dvReplays.RowFilter, dvReplays.Sort); dvMaps = Methods.CreateMapDataView(storeReader, storeReader.Replays); Methods.PopulateMapsView(lvMaps, dvMaps, dvMaps.RowFilter, dvMaps.Sort); dvPlayers = Methods.CreatePlayerDataView(storeReader, storeReader.Replays); Methods.PopulatePlayersView(lvPlayers, dvPlayers, dvPlayers.RowFilter, dvPlayers.Sort); } else { //failed } } */ } void miNewCategoryClick(object sender, System.EventArgs e) { frmInput input = new frmInput(); input.Question = StoreReader.STORE_CATEGORYNEW_INPUT; input.Caption = "Create New Category..."; DialogResult result = input.ShowDialog(this); if (result == DialogResult.OK) { Methods.CreateNewCategory(storeReader, input.Input, (int)tvCategories.SelectedNode.Tag); Methods.PopulateCategoriesView(tvCategories, storeReader); } } void miRenameCategoryClick(object sender, System.EventArgs e) { tvCategories.SelectedNode.BeginEdit(); } void miDeleteCategoryClick(object sender, System.EventArgs e) { //TODO: Replace this null checking with a right click select on a node... if (tvCategories.SelectedNode != null) { if (!storeReader.IsPermanent((int)tvCategories.SelectedNode.Tag)) { string msg = StoreReader.STORE_CATEGORYDELETE_CONFIRM.Replace("{0}", "'" + tvCategories.SelectedNode.Text + "'"); DialogResult result = MessageBox.Show(this, msg, "Confirm Delete", MessageBoxButtons.YesNo, MessageBoxIcon.Question); if (result == DialogResult.Yes) { Methods.DeleteCategory(storeReader, (int)tvCategories.SelectedNode.Tag); Methods.PopulateCategoriesView(tvCategories, storeReader); } } else { string error = StoreReader.STORE_CATEGORYDELETE_PERM.Replace("{0}", "'" + tvCategories.SelectedNode.Text + "'"); MessageBox.Show(this, error, "Delete failed!", MessageBoxButtons.OK, MessageBoxIcon.Information); } } } void miAvailableReplayClick(object sender, System.EventArgs e) { log.Write(LogType.Info, 5, "Toggling Available for " + lvReplays.SelectedIndices.Count.ToString() + " selected replay(s)"); try { foreach (int index in lvReplays.SelectedIndices) { log.Write(LogType.Info, 5, "Getting ReplayRow for ReplayID=" + lvReplays.Items[index].Tag.ToString()); ReplayStore.ReplayRow row = storeReader.GetReplayByID((int)lvReplays.Items[index].Tag); if (!replayManager.MakeAvailable(replayManager.ReplayManagerFilePath + @"\" + row.Filename)) { string error = ReplayManager.MANAGER_REPLAYAVAIL_FAIL.Replace("{0}", row.Filename); MessageBox.Show(this, error, "Make available failed!", MessageBoxButtons.OK, MessageBoxIcon.Error); } Methods.htAvailable = replayManager.GetAvailable(); if (!Methods.htAvailable.ContainsKey(row.Filename)) lvReplays.Items[index].BackColor = System.Drawing.Color.Empty; else lvReplays.Items[index].BackColor = System.Drawing.Color.PowderBlue; } } catch(Exception x) { log.Write(LogType.Error, 1, "cmPAvailable_Click(): " + x.StackTrace); } } void miDeleteReplayClick(object sender, System.EventArgs e) { ArrayList list = new ArrayList(); foreach (ListViewItem item in lvReplays.SelectedItems) { list.Add(item.Tag); } if (list.Count > 0) { object[] filenames = Methods.DeleteReplay(storeReader, list.ToArray()); if (filenames.Length > 0) { //delete files in both the Replays folder and Playback replayManager.DeleteReplays(filenames); //if we were able to save the store etc, reload the views dvReplays = Methods.CreateReplayDataView(storeReader, storeReader.Replays); Methods.PopulateReplaysView(lvReplays, dvReplays, dvReplays.RowFilter, dvReplays.Sort); dvMaps = Methods.CreateMapDataView(storeReader, storeReader.Replays); Methods.PopulateMapsView(lvMaps, dvMaps, dvMaps.RowFilter, dvMaps.Sort); dvPlayers = Methods.CreatePlayerDataView(storeReader, storeReader.Replays); Methods.PopulatePlayersView(lvPlayers, dvPlayers, dvPlayers.RowFilter, dvPlayers.Sort); } else { //failed } } } void miRenameReplayClick(object sender, System.EventArgs e) { Replay replay = null; if (lvReplays.SelectedItems.Count == 1) { ReplayStore.ReplayRow row = storeReader.GetReplayByID((int)lvReplays.SelectedItems[0].Tag); frmInput input = new frmInput(); string question = StoreReader.STORE_REPLAYNAMERENAME_INPUT.Replace("{0}", row.Name);; input.Question = question; input.Caption = "Replay Rename..."; input.Input = row.Name; input.MaxLength = 255; DialogResult result = input.ShowDialog(this); if (result == DialogResult.OK) { ReplayReader reader = new ReplayReader(replayManager.ReplayManagerFilePath + @"\" + row.Filename); replay = reader.Read(); if (replay != null && replayManager.RenameReplay(replay, input.Input)) { Methods.RenameReplay(storeReader, row.ID, input.Input); dvReplays = Methods.CreateReplayDataView(storeReader, storeReader.Replays); Methods.PopulateReplaysView(lvReplays, dvReplays, dvReplays.RowFilter, dvReplays.Sort); } else { string error = ReplayManager.MANAGER_REPLAYFILERENAME_FAIL.Replace("{0}", row.Filename); error = error.Replace("{1}", input.Input); MessageBox.Show(this, error, "Rename failed!", MessageBoxButtons.OK, MessageBoxIcon.Error); } } } else MessageBox.Show(this, StoreReader.STORE_REPLAYFILERENAME_SELECT, "Rename failed!", MessageBoxButtons.OK, MessageBoxIcon.Error); } } }
//*************************************************** //* This file was generated by tool //* SharpKit //* At: 29/08/2012 03:59:39 p.m. //*************************************************** using SharpKit.JavaScript; namespace Ext.container { #region AbstractContainer /// <inheritdocs /> /// <summary> /// <p><strong>NOTE</strong> This is a private utility class for internal use by the framework. Don't rely on its existence.</p><p>An abstract base class which provides shared methods for Containers across the Sencha product line.</p> /// <p>Please refer to sub class's documentation</p> /// </summary> [JsType(JsMode.Prototype, Export=false, OmitOptionalParameters=true)] public partial class AbstractContainer : Ext.Component { /// <summary> /// A string component id or the numeric index of the component that should be /// initially activated within the container's layout on render. For example, /// activeItem: 'item-1' or activeItem: 0 (index 0 = the first item in the /// container's collection). activeItem only applies to layout styles that can /// display items one at a time (like Ext.layout.container.Card and /// Ext.layout.container.Fit). /// </summary> public object activeItem; /// <summary> /// If true the container will automatically destroy any contained component that is removed /// from it, else destruction must be handled manually. /// Defaults to: <c>true</c> /// </summary> public bool autoDestroy; /// <summary> /// An array of events that, when fired, should be bubbled to any parent container. /// See Ext.util.Observable.enableBubble. /// Defaults to: <c>[&quot;add&quot;, &quot;remove&quot;]</c> /// </summary> public JsString bubbleEvents; /// <summary> /// The default xtype of child Components to create in this Container when /// a child item is specified as a raw configuration object, rather than as an instantiated Component. /// Defaults to: <c>&quot;panel&quot;</c> /// </summary> public JsString defaultType; /// <summary> /// This option is a means of applying default settings to all added items whether added /// through the items config or via the add or insert methods. /// Defaults are applied to both config objects and instantiated components conditionally /// so as not to override existing properties in the item (see <see cref="Ext.ExtContext.applyIf">Ext.applyIf</see>). /// If the defaults option is specified as a function, then the function will be called /// using this Container as the scope (<c>this</c> reference) and passing the added item as /// the first parameter. Any resulting object from that call is then applied to the item /// as default properties. /// For example, to automatically apply padding to the body of each of a set of /// contained <see cref="Ext.panel.Panel">Ext.panel.Panel</see> items, you could pass: /// <c>defaults: {bodyStyle:'padding:15px'}</c>. /// Usage: /// <code>defaults: { // defaults are applied to items, not the container /// autoScroll: true /// }, /// items: [ /// // default will not be applied here, panel1 will be autoScroll: false /// { /// xtype: 'panel', /// id: 'panel1', /// autoScroll: false /// }, /// // this component will have autoScroll: true /// new <see cref="Ext.panel.Panel">Ext.panel.Panel</see>({ /// id: 'panel2' /// }) /// ] /// </code> /// </summary> public object defaults; /// <summary> /// True to move any component to the detachedBody when the component is /// removed from this container. This option is only applicable when the component is not destroyed while /// being removed, see autoDestroy and remove. If this option is set to false, the DOM /// of the component will remain in the current place until it is explicitly moved. /// Defaults to: <c>true</c> /// </summary> public bool detachOnRemove; /// <summary> /// Important: In order for child items to be correctly sized and /// positioned, typically a layout manager must be specified through /// the layout configuration option. /// The sizing and positioning of child <see cref="Ext.container.AbstractContainerConfig.items">items</see> is the responsibility of /// the Container's layout manager which creates and manages the type of layout /// you have in mind. For example: /// If the <see cref="Ext.container.AbstractContainerConfig.layout">layout</see> configuration is not explicitly specified for /// a general purpose container (e.g. Container or Panel) the /// <see cref="Ext.layout.container.Auto">default layout manager</see> will be used /// which does nothing but render child components sequentially into the /// Container (no sizing or positioning will be performed in this situation). /// <strong>layout</strong> may be specified as either as an Object or as a String: /// Specify as an Object /// Example usage: /// <code>layout: { /// type: 'vbox', /// align: 'left' /// } /// </code> /// <li><p><strong>type</strong></p> /// <p>The layout type to be used for this container. If not specified, /// a default <see cref="Ext.layout.container.Auto">Ext.layout.container.Auto</see> will be created and used.</p> /// <p>Valid layout <c>type</c> values are:</p> /// <ul> /// <li><see cref="Ext.layout.container.Auto">Auto</see> - <strong>Default</strong></li> /// <li><see cref="Ext.layout.container.Card">card</see></li> /// <li><see cref="Ext.layout.container.Fit">fit</see></li> /// <li><see cref="Ext.layout.container.HBox">hbox</see></li> /// <li><see cref="Ext.layout.container.VBox">vbox</see></li> /// <li><see cref="Ext.layout.container.Anchor">anchor</see></li> /// <li><see cref="Ext.layout.container.Table">table</see></li> /// </ul> /// </li> /// <li><p>Layout specific configuration properties</p> /// <p>Additional layout specific configuration properties may also be /// specified. For complete details regarding the valid config options for /// each layout type, see the layout class corresponding to the <c>type</c> /// specified.</p></li> /// Specify as a String /// Example usage: /// <code>layout: 'vbox' /// </code> /// <li><p><strong>layout</strong></p> /// <p>The layout <c>type</c> to be used for this container (see list /// of valid layout type values above).</p> /// <p>Additional layout specific configuration properties. For complete /// details regarding the valid config options for each layout type, see the /// layout class corresponding to the <c>layout</c> specified.</p></li> /// Configuring the default layout type /// <code>If a certain Container class has a default layout (For example a <see cref="Ext.toolbar.Toolbar">Toolbar</see> /// with a default `Box` layout), then to simply configure the default layout, /// use an object, but without the `type` property: /// xtype: 'toolbar', /// layout: { /// pack: 'center' /// } /// </code> /// </summary> public object layout; /// <summary> /// If true, suspend calls to doLayout. Useful when batching multiple adds to a container /// and not passing them as multiple arguments or an array. /// Defaults to: <c>false</c> /// </summary> public bool suspendLayout; /// <summary> /// The MixedCollection containing all the child items of this container. /// </summary> public Ext.util.AbstractMixedCollection items{get;set;} /// <summary> /// The number of container layout calls made on this object. /// Defaults to: <c>0</c> /// </summary> private JsNumber layoutCounter{get;set;} /// <summary> /// Adds Component(s) to this Container. /// Description: /// <li>Fires the <see cref="Ext.container.AbstractContainerEvents.beforeadd">beforeadd</see> event before adding.</li> /// <li>The Container's <see cref="Ext.container.AbstractContainerConfig.defaults">default config values</see> will be applied /// accordingly (see <c><see cref="Ext.container.AbstractContainerConfig.defaults">defaults</see></c> for details).</li> /// <li>Fires the <c><see cref="Ext.container.AbstractContainerEvents.add">add</see></c> event after the component has been added.</li> /// Notes: /// If the Container is <strong>already rendered</strong> when <c>add</c> /// is called, it will render the newly added Component into its content area. /// <strong>If</strong> the Container was configured with a size-managing <see cref="Ext.container.AbstractContainerConfig.layout">layout</see> manager, /// the Container will recalculate its internal layout at this time too. /// Note that the default layout manager simply renders child Components sequentially /// into the content area and thereafter performs no sizing. /// If adding multiple new child Components, pass them as an array to the <c>add</c> method, /// so that only one layout recalculation is performed. /// <code>tb = new <see cref="Ext.toolbar.Toolbar">Ext.toolbar.Toolbar</see>({ /// renderTo: document.body /// }); // toolbar is rendered /// // add multiple items. /// // (<see cref="Ext.container.AbstractContainerConfig.defaultType">defaultType</see> for <see cref="Ext.toolbar.Toolbar">Toolbar</see> is 'button') /// tb.add([{text:'Button 1'}, {text:'Button 2'}]); /// </code> /// To inject components between existing ones, use the <see cref="Ext.container.AbstractContainer.insert">insert</see> method. /// Warning: /// Components directly managed by the BorderLayout layout manager may not be removed /// or added. See the Notes for <see cref="Ext.layout.container.Border">BorderLayout</see> for /// more details. /// </summary> /// <param name="component"><p>Either one or more Components to add or an Array of Components to add. /// See <c><see cref="Ext.container.AbstractContainerConfig.items">items</see></c> for additional information.</p> /// </param> /// <returns> /// <span><see cref="Ext.Component">Ext.Component</see>[]/<see cref="Ext.Component">Ext.Component</see></span><div><p>The Components that were added.</p> /// </div> /// </returns> public object[] add(params object[] component){return null;} /// <summary> /// Invoked after the Container has laid out (and rendered if necessary) /// its child Components. /// <p>This is a <i>template method</i>. /// a hook into the functionality of this class. /// Feel free to override it in child classes.</p> /// </summary> /// <param name="layout"> /// </param> protected virtual void afterLayout(Ext.layout.container.Container layout){} /// <summary> /// Occurs before componentLayout is run. Returning false from this method /// will prevent the containerLayout from being executed. /// <p>This is a <i>template method</i>. /// a hook into the functionality of this class. /// Feel free to override it in child classes.</p> /// </summary> protected virtual void beforeLayout(){} /// <summary> /// Cascades down the component/container heirarchy from this component (passed in /// the first call), calling the specified function with each component. The scope /// (this reference) of the function call will be the scope provided or the current /// component. The arguments to the function will be the args provided or the current /// component. If the function returns false at any point, the cascade is stopped on /// that branch. /// </summary> /// <param name="fn"><p>The function to call</p> /// </param> /// <param name="scope"><p>The scope of the function (defaults to current component)</p> /// </param> /// <param name="args"><p>The args to call the function with. The current component /// always passed as the last argument.</p> /// </param> /// <returns> /// <span><see cref="Ext.container.Container">Ext.Container</see></span><div><p>this</p> /// </div> /// </returns> public Ext.container.Container cascade(System.Delegate fn, object scope=null, object args=null){return null;} /// <summary> /// Retrieves the first direct child of this container which matches the passed selector. /// The passed in selector must comply with an Ext.ComponentQuery selector. /// </summary> /// <param name="selector"><p>An <see cref="Ext.ComponentQuery">Ext.ComponentQuery</see> selector. If no selector is /// specified, the first child will be returned.</p> /// </param> public AbstractContainer child(object selector=null){return null;} /// <summary> /// Manually force this container's layout to be recalculated. The framework uses this internally to refresh layouts /// form most cases. /// </summary> /// <returns> /// <span><see cref="Ext.container.Container">Ext.container.Container</see></span><div><p>this</p> /// </div> /// </returns> public Ext.container.Container doLayout(){return null;} /// <summary> /// Retrieves the first descendant of this container which matches the passed selector. /// The passed in selector must comply with an Ext.ComponentQuery selector. /// </summary> /// <param name="selector"><p>An <see cref="Ext.ComponentQuery">Ext.ComponentQuery</see> selector. If no selector is /// specified, the first child will be returned.</p> /// </param> public AbstractContainer down(object selector=null){return null;} /// <summary> /// Gets a list of child components to enable/disable when the container is /// enabled/disabled /// </summary> /// <returns> /// <span><see cref="Ext.Component">Ext.Component</see>[]</span><div><p>Items to be enabled/disabled</p> /// </div> /// </returns> private Ext.Component[] getChildItemsToDisable(){return null;} /// <summary> /// Examines this container's items property and gets a direct child /// component of this container. /// </summary> /// <param name="comp"><p>This parameter may be any of the following:</p> /// <ul> /// <li>a <strong>String</strong> : representing the <see cref="Ext.ComponentConfig.itemId">itemId</see> /// or <see cref="Ext.ComponentConfig.id">id</see> of the child component.</li> /// <li>a <strong>Number</strong> : representing the position of the child component /// within the <see cref="Ext.container.AbstractContainer.items">items</see> <strong>property</strong></li> /// </ul> /// <p>For additional information see <see cref="Ext.util.MixedCollection.get">Ext.util.MixedCollection.get</see>.</p> /// </param> /// <returns> /// <span><see cref="Ext.Component">Ext.Component</see></span><div><p>The component (if found).</p> /// </div> /// </returns> public Ext.Component getComponent(object comp){return null;} /// <summary> /// Returns the layout instance currently associated with this Container. /// If a layout has not been instantiated yet, that is done first /// </summary> /// <returns> /// <span><see cref="Ext.layout.container.Container">Ext.layout.container.Container</see></span><div><p>The layout</p> /// </div> /// </returns> public Ext.layout.container.Container getLayout(){return null;} /// <summary> /// Inserts a Component into this Container at a specified index. Fires the /// beforeadd event before inserting, then fires the add /// event after the Component has been inserted. /// </summary> /// <param name="index"><p>The index at which the Component will be inserted /// into the Container's items collection</p> /// </param> /// <param name="component"><p>The child Component to insert.</p> /// <p>Ext uses lazy rendering, and will only render the inserted Component should /// it become necessary.</p> /// <p>A Component config object may be passed in order to avoid the overhead of /// constructing a real Component object if lazy rendering might mean that the /// inserted Component will not be rendered immediately. To take advantage of /// this 'lazy instantiation', set the <see cref="Ext.ComponentConfig.xtype">Ext.Component.xtype</see> config /// property to the registered type of the Component wanted.</p> /// <p>For a list of all available xtypes, see <see cref="Ext.Component">Ext.Component</see>.</p> /// </param> /// <returns> /// <span><see cref="Ext.Component">Ext.Component</see></span><div><p>component The Component (or config object) that was /// inserted with the Container's default config values applied.</p> /// </div> /// </returns> public Ext.Component insert(JsNumber index, Ext.Component component){return null;} /// <summary> /// Determines whether this Container is an ancestor of the passed Component. /// This will return true if the passed Component is anywhere within the subtree /// beneath this Container. /// </summary> /// <param name="possibleDescendant"><p>The Component to test for presence /// within this Container's subtree.</p> /// </param> public void isAncestor(Ext.Component possibleDescendant){} /// <summary> /// Moves a Component within the Container /// </summary> /// <param name="fromIdx"><p>The index the Component you wish to move is currently at.</p> /// </param> /// <param name="toIdx"><p>The new index for the Component.</p> /// </param> /// <returns> /// <span><see cref="Ext.Component">Ext.Component</see></span><div><p>component The Component (or config object) that was moved.</p> /// </div> /// </returns> public Ext.Component move(JsNumber fromIdx, JsNumber toIdx){return null;} /// <summary> /// This method is invoked after a new Component has been added. It /// is passed the Component which has been added. This method may /// be used to update any internal structure which may depend upon /// the state of the child items. /// <p>This is a <i>template method</i>. /// a hook into the functionality of this class. /// Feel free to override it in child classes.</p> /// </summary> /// <param name="component"> /// </param> /// <param name="position"> /// </param> protected virtual void onAdd(Ext.Component component, JsNumber position){} /// <summary> /// This method is invoked before adding a new child Component. It /// is passed the new Component, and may be used to modify the /// Component, or prepare the Container in some way. Returning /// false aborts the add operation. /// <p>This is a <i>template method</i>. /// a hook into the functionality of this class. /// Feel free to override it in child classes.</p> /// </summary> /// <param name="item"> /// </param> protected virtual void onBeforeAdd(Ext.Component item){} /// <summary> /// This method is invoked after a new Component has been /// removed. It is passed the Component which has been /// removed. This method may be used to update any internal /// structure which may depend upon the state of the child items. /// <p>This is a <i>template method</i>. /// a hook into the functionality of this class. /// Feel free to override it in child classes.</p> /// </summary> /// <param name="component"> /// </param> /// <param name="autoDestroy"> /// </param> protected virtual void onRemove(Ext.Component component, bool autoDestroy){} /// <summary> /// Retrieves all descendant components which match the passed selector. /// Executes an Ext.ComponentQuery.query using this container as its root. /// </summary> /// <param name="selector"><p>Selector complying to an <see cref="Ext.ComponentQuery">Ext.ComponentQuery</see> selector. /// If no selector is specified all items will be returned.</p> /// </param> /// <returns> /// <span><see cref="Ext.Component">Ext.Component</see>[]</span><div><p>Components which matched the selector</p> /// </div> /// </returns> public Ext.Component[] query(object selector=null){return null;} /// <summary> /// Retrieves all descendant components which match the passed function. /// The function should return false for components that are to be /// excluded from the selection. /// </summary> /// <param name="fn"><p>The matcher function. It will be called with a single argument, /// the component being tested.</p> /// </param> /// <param name="scope"><p>The scope in which to run the function. If not specified, /// it will default to the active component.</p> /// </param> /// <returns> /// <span><see cref="Ext.Component">Ext.Component</see>[]</span><div><p>Components matched by the passed function</p> /// </div> /// </returns> public Ext.Component[] queryBy(System.Delegate fn, object scope=null){return null;} /// <summary> /// Finds a component at any level under this container matching the id/itemId. /// This is a shorthand for calling ct.down('#' + id); /// </summary> /// <param name="id"><p>The id to find</p> /// </param> /// <returns> /// <span><see cref="Ext.Component">Ext.Component</see></span><div><p>The matching id, null if not found</p> /// </div> /// </returns> public Ext.Component queryById(JsString id){return null;} /// <summary> /// Removes a component from this container. Fires the beforeremove event /// before removing, then fires the remove event after the component has /// been removed. /// </summary> /// <param name="component"><p>The component reference or id to remove.</p> /// </param> /// <param name="autoDestroy"><p>True to automatically invoke the removed Component's /// <see cref="Ext.Component">Ext.Component.destroy</see> function.</p> /// <p>Defaults to the value of this Container's <see cref="Ext.container.AbstractContainerConfig.autoDestroy">autoDestroy</see> config.</p> /// </param> /// <returns> /// <span><see cref="Ext.Component">Ext.Component</see></span><div><p>component The Component that was removed.</p> /// </div> /// </returns> public Ext.Component remove(object component, object autoDestroy=null){return null;} /// <summary> /// Removes all components from this container. /// </summary> /// <param name="autoDestroy"><p>True to automatically invoke the removed /// Component's <see cref="Ext.Component">Ext.Component.destroy</see> function. /// Defaults to the value of this Container's <see cref="Ext.container.AbstractContainerConfig.autoDestroy">autoDestroy</see> config.</p> /// </param> /// <returns> /// <span><see cref="Ext.Component">Ext.Component</see>[]</span><div><p>Array of the removed components</p> /// </div> /// </returns> public Ext.Component[] removeAll(object autoDestroy=null){return null;} public AbstractContainer(AbstractContainerConfig config){} public AbstractContainer(){} public AbstractContainer(params object[] args){} } #endregion #region AbstractContainerConfig /// <inheritdocs /> [JsType(JsMode.Json, Export=false, OmitOptionalParameters=true)] public partial class AbstractContainerConfig : Ext.ComponentConfig { /// <summary> /// A string component id or the numeric index of the component that should be /// initially activated within the container's layout on render. For example, /// activeItem: 'item-1' or activeItem: 0 (index 0 = the first item in the /// container's collection). activeItem only applies to layout styles that can /// display items one at a time (like Ext.layout.container.Card and /// Ext.layout.container.Fit). /// </summary> public object activeItem; /// <summary> /// If true the container will automatically destroy any contained component that is removed /// from it, else destruction must be handled manually. /// Defaults to: <c>true</c> /// </summary> public bool autoDestroy; /// <summary> /// An array of events that, when fired, should be bubbled to any parent container. /// See Ext.util.Observable.enableBubble. /// Defaults to: <c>[&quot;add&quot;, &quot;remove&quot;]</c> /// </summary> public JsString bubbleEvents; /// <summary> /// The default xtype of child Components to create in this Container when /// a child item is specified as a raw configuration object, rather than as an instantiated Component. /// Defaults to: <c>&quot;panel&quot;</c> /// </summary> public JsString defaultType; /// <summary> /// This option is a means of applying default settings to all added items whether added /// through the items config or via the add or insert methods. /// Defaults are applied to both config objects and instantiated components conditionally /// so as not to override existing properties in the item (see <see cref="Ext.ExtContext.applyIf">Ext.applyIf</see>). /// If the defaults option is specified as a function, then the function will be called /// using this Container as the scope (<c>this</c> reference) and passing the added item as /// the first parameter. Any resulting object from that call is then applied to the item /// as default properties. /// For example, to automatically apply padding to the body of each of a set of /// contained <see cref="Ext.panel.Panel">Ext.panel.Panel</see> items, you could pass: /// <c>defaults: {bodyStyle:'padding:15px'}</c>. /// Usage: /// <code>defaults: { // defaults are applied to items, not the container /// autoScroll: true /// }, /// items: [ /// // default will not be applied here, panel1 will be autoScroll: false /// { /// xtype: 'panel', /// id: 'panel1', /// autoScroll: false /// }, /// // this component will have autoScroll: true /// new <see cref="Ext.panel.Panel">Ext.panel.Panel</see>({ /// id: 'panel2' /// }) /// ] /// </code> /// </summary> public object defaults; /// <summary> /// True to move any component to the detachedBody when the component is /// removed from this container. This option is only applicable when the component is not destroyed while /// being removed, see autoDestroy and remove. If this option is set to false, the DOM /// of the component will remain in the current place until it is explicitly moved. /// Defaults to: <c>true</c> /// </summary> public bool detachOnRemove; /// <summary> /// A single item, or an array of child Components to be added to this container /// <strong>Unless configured with a <see cref="Ext.container.AbstractContainerConfig.layout">layout</see>, a Container simply renders child /// Components serially into its encapsulating element and performs no sizing or /// positioning upon them.</strong> /// Example: /// <code>// specifying a single item /// items: {...}, /// layout: 'fit', // The single items is sized to fit /// // specifying multiple items /// items: [{...}, {...}], /// layout: 'hbox', // The items are arranged horizontally /// </code> /// Each item may be: /// <li>A <see cref="Ext.Component">Component</see></li> /// <li>A Component configuration object</li> /// If a configuration object is specified, the actual type of Component to be /// instantiated my be indicated by using the <see cref="Ext.ComponentConfig.xtype">xtype</see> option. /// Every Component class has its own <see cref="Ext.ComponentConfig.xtype">xtype</see>. /// If an <see cref="Ext.ComponentConfig.xtype">xtype</see> is not explicitly specified, the /// <see cref="Ext.container.AbstractContainerConfig.defaultType">defaultType</see> for the Container is used, which by default is usually <c>panel</c>. /// Notes: /// Ext uses lazy rendering. Child Components will only be rendered /// should it become necessary. Items are automatically laid out when they are first /// shown (no sizing is done while hidden), or in response to a <see cref="Ext.container.AbstractContainer.doLayout">doLayout</see> call. /// Do not specify <see cref="Ext.panel.PanelConfig.contentEl">contentEl</see> or /// <see cref="Ext.panel.PanelConfig.html">html</see> with <c>items</c>. /// </summary> public object items; /// <summary> /// Important: In order for child items to be correctly sized and /// positioned, typically a layout manager must be specified through /// the layout configuration option. /// The sizing and positioning of child <see cref="Ext.container.AbstractContainerConfig.items">items</see> is the responsibility of /// the Container's layout manager which creates and manages the type of layout /// you have in mind. For example: /// If the <see cref="Ext.container.AbstractContainerConfig.layout">layout</see> configuration is not explicitly specified for /// a general purpose container (e.g. Container or Panel) the /// <see cref="Ext.layout.container.Auto">default layout manager</see> will be used /// which does nothing but render child components sequentially into the /// Container (no sizing or positioning will be performed in this situation). /// <strong>layout</strong> may be specified as either as an Object or as a String: /// Specify as an Object /// Example usage: /// <code>layout: { /// type: 'vbox', /// align: 'left' /// } /// </code> /// <li><p><strong>type</strong></p> /// <p>The layout type to be used for this container. If not specified, /// a default <see cref="Ext.layout.container.Auto">Ext.layout.container.Auto</see> will be created and used.</p> /// <p>Valid layout <c>type</c> values are:</p> /// <ul> /// <li><see cref="Ext.layout.container.Auto">Auto</see> - <strong>Default</strong></li> /// <li><see cref="Ext.layout.container.Card">card</see></li> /// <li><see cref="Ext.layout.container.Fit">fit</see></li> /// <li><see cref="Ext.layout.container.HBox">hbox</see></li> /// <li><see cref="Ext.layout.container.VBox">vbox</see></li> /// <li><see cref="Ext.layout.container.Anchor">anchor</see></li> /// <li><see cref="Ext.layout.container.Table">table</see></li> /// </ul> /// </li> /// <li><p>Layout specific configuration properties</p> /// <p>Additional layout specific configuration properties may also be /// specified. For complete details regarding the valid config options for /// each layout type, see the layout class corresponding to the <c>type</c> /// specified.</p></li> /// Specify as a String /// Example usage: /// <code>layout: 'vbox' /// </code> /// <li><p><strong>layout</strong></p> /// <p>The layout <c>type</c> to be used for this container (see list /// of valid layout type values above).</p> /// <p>Additional layout specific configuration properties. For complete /// details regarding the valid config options for each layout type, see the /// layout class corresponding to the <c>layout</c> specified.</p></li> /// Configuring the default layout type /// <code>If a certain Container class has a default layout (For example a <see cref="Ext.toolbar.Toolbar">Toolbar</see> /// with a default `Box` layout), then to simply configure the default layout, /// use an object, but without the `type` property: /// xtype: 'toolbar', /// layout: { /// pack: 'center' /// } /// </code> /// </summary> public object layout; /// <summary> /// If true, suspend calls to doLayout. Useful when batching multiple adds to a container /// and not passing them as multiple arguments or an array. /// Defaults to: <c>false</c> /// </summary> public bool suspendLayout; public AbstractContainerConfig(params object[] args){} } #endregion #region AbstractContainerEvents /// <inheritdocs /> [JsType(JsMode.Json, Export=false, OmitOptionalParameters=true)] public partial class AbstractContainerEvents : Ext.ComponentEvents { /// <summary> /// Fires after any Ext.Component is added or inserted into the container. /// <strong>This event bubbles:</strong> 'add' will also be fired when Component is added to any of /// the child containers or their childern or ... /// </summary> /// <param name="this"> /// </param> /// <param name="component"><p>The component that was added</p> /// </param> /// <param name="index"><p>The index at which the component was added to the container's items collection</p> /// </param> /// <param name="eOpts"><p>The options object passed to <see cref="Ext.util.Observable.addListener">Ext.util.Observable.addListener</see>.</p> /// </param> public void add(Ext.container.Container @this, Ext.Component component, JsNumber index, object eOpts){} /// <summary> /// Fires when the components in this container are arranged by the associated layout manager. /// </summary> /// <param name="this"> /// </param> /// <param name="layout"><p>The ContainerLayout implementation for this container</p> /// </param> /// <param name="eOpts"><p>The options object passed to <see cref="Ext.util.Observable.addListener">Ext.util.Observable.addListener</see>.</p> /// </param> public void afterlayout(Ext.container.Container @this, Ext.layout.container.Container layout, object eOpts){} /// <summary> /// Fires before any Ext.Component is added or inserted into the container. /// A handler can return false to cancel the add. /// </summary> /// <param name="this"> /// </param> /// <param name="component"><p>The component being added</p> /// </param> /// <param name="index"><p>The index at which the component will be added to the container's items collection</p> /// </param> /// <param name="eOpts"><p>The options object passed to <see cref="Ext.util.Observable.addListener">Ext.util.Observable.addListener</see>.</p> /// </param> public void beforeadd(Ext.container.Container @this, Ext.Component component, JsNumber index, object eOpts){} /// <summary> /// Fires before any Ext.Component is removed from the container. A handler can return /// false to cancel the remove. /// </summary> /// <param name="this"> /// </param> /// <param name="component"><p>The component being removed</p> /// </param> /// <param name="eOpts"><p>The options object passed to <see cref="Ext.util.Observable.addListener">Ext.util.Observable.addListener</see>.</p> /// </param> public void beforeremove(Ext.container.Container @this, Ext.Component component, object eOpts){} /// <summary> /// Fires after any Ext.Component is removed from the container. /// <strong>This event bubbles:</strong> 'remove' will also be fired when Component is removed from any of /// the child containers or their children or ... /// </summary> /// <param name="this"> /// </param> /// <param name="component"><p>The component that was removed</p> /// </param> /// <param name="eOpts"><p>The options object passed to <see cref="Ext.util.Observable.addListener">Ext.util.Observable.addListener</see>.</p> /// </param> public void remove(Ext.container.Container @this, Ext.Component component, object eOpts){} public AbstractContainerEvents(params object[] args){} } #endregion }
// Copyright (c) 2007-2012 Andrej Repin aka Gremlin2 // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. using System; using System.Collections.Generic; using System.Globalization; using System.Security.Cryptography; using System.Text; using System.Xml; using System.Text.RegularExpressions; using FB2Fix.Sgml; namespace FB2Fix.ObjectModel { public class FictionBook { private static Regex librusecIdPattern; private readonly XmlDocument document; private DocumentInfoNode documentInfo; private TitleInfoNode titleInfo; private TitleInfoNode srcTitleInfo; private PublishInfoNode publishInfo; private XmlNode documentInfoNode; private XmlNode titleInfoNode; private XmlNode srcTitleInfoNode; private XmlNode publishInfoNode; private XmlNode descriptionNode; private List<CustomInfoNode> customInfos; private Fb2FixStatus documentStatus; private ModificationType modificationType; private DateTime containerDateTime; static FictionBook() { librusecIdPattern = new Regex(@"^\w{3}\s\w{3}\s\d{1,2}\s\d{2}:\d{2}:\d{2}\s\d{4}$"); } public DocumentInfoNode DocumentInfo { get { return this.documentInfo; } } public TitleInfoNode TitleInfo { get { return this.titleInfo; } } public TitleInfoNode SrcTitleInfo { get { return this.srcTitleInfo; } } public PublishInfoNode PublishInfo { get { return this.publishInfo; } } public XmlNode DescriptionNode { get { return this.descriptionNode; } } public XmlDocument Document { get { return this.document; } } public Fb2FixStatus DocumentStatus { get { return this.documentStatus; } set { if(this.documentStatus != value) { ChangeDocumentStatus(value); this.documentStatus = value; } } } public ModificationType ModificationType { get { return this.modificationType; } set { this.modificationType |= value; } } public bool Modified { get { return (this.modificationType != ModificationType.None); } } public float Version { get { return this.documentInfo.Version ?? 0.0f; } set { if((this.documentInfo.Version ?? 0.0) != value) { ChangeDocumentVersion(value); this.documentInfo.Version = value; } } } public DateTime ContainerDateTime { get { return this.containerDateTime; } set { this.containerDateTime = value; } } public FictionBook(XmlDocument document) { if (document == null) { throw new ArgumentNullException("document"); } this.document = document; this.documentStatus = Fb2FixStatus.None; XmlNode statusInfoNode = document.SelectSingleNode("//FictionBook/description/custom-info[@info-type='fb2fix-status']"); if (statusInfoNode != null && statusInfoNode.NodeType == XmlNodeType.Element) { if (String.IsNullOrEmpty(statusInfoNode.InnerText)) { try { this.documentStatus = (Fb2FixStatus)Enum.Parse(typeof(Fb2FixStatus), statusInfoNode.InnerText, true); } catch (ArgumentException) { } } } this.documentInfoNode = document.SelectSingleNode("//FictionBook/description/document-info"); if (this.documentInfoNode != null && this.documentInfoNode.NodeType == XmlNodeType.Element) { documentInfo = new DocumentInfoNode(); documentInfo.Load(this.documentInfoNode as XmlElement); } this.titleInfoNode = document.SelectSingleNode("//FictionBook/description/title-info"); if (this.titleInfoNode != null && this.titleInfoNode.NodeType == XmlNodeType.Element) { titleInfo = new TitleInfoNode(); titleInfo.Load(this.titleInfoNode as XmlElement); } if (titleInfo == null) { throw new InvalidFictionBookFormatException(); } this.srcTitleInfoNode = document.SelectSingleNode("//FictionBook/description/src-title-info"); if (this.srcTitleInfoNode != null && this.srcTitleInfoNode.NodeType == XmlNodeType.Element) { srcTitleInfo = new TitleInfoNode(); srcTitleInfo.Load(this.srcTitleInfoNode as XmlElement); } this.publishInfoNode = document.SelectSingleNode("//FictionBook/description/publish-info"); if (this.publishInfoNode != null && this.publishInfoNode.NodeType == XmlNodeType.Element) { publishInfo = new PublishInfoNode(); publishInfo.Load(this.publishInfoNode as XmlElement); } this.descriptionNode = document.SelectSingleNode("//FictionBook/description"); if (this.descriptionNode == null) { throw new InvalidFictionBookFormatException(); } XmlNodeList nodes = document.SelectNodes("//FictionBook/description/custom-info"); customInfos = new List<CustomInfoNode>(nodes.Count); foreach (XmlNode node in nodes) { if (node.NodeType == XmlNodeType.Element) { CustomInfoNode item = new CustomInfoNode(); item.Load((XmlElement) node); if (!String.IsNullOrEmpty(item.InfoType)) { switch (item.InfoType) { case "fb2fix-status": case "librusec-id": case "previous-id": continue; } } item.XmlNode = node; customInfos.Add(item); } } this.modificationType = ModificationType.None; this.containerDateTime = DateTime.Now; } private void ChangeDocumentStatus(Fb2FixStatus status) { XmlNode statusInfoNode = document.SelectSingleNode("//FictionBook/description/custom-info[@info-type='fb2fix-status']"); if (statusInfoNode != null && statusInfoNode.NodeType == XmlNodeType.Element) { statusInfoNode.InnerText = Enum.GetName(typeof(Fb2FixStatus), status); } else { XmlElement xmlStatusNode = document.CreateElement("custom-info"); xmlStatusNode.InnerText = Enum.GetName(typeof(Fb2FixStatus), Fb2FixStatus.Passed); XmlAttribute statusAttr = document.CreateAttribute("info-type"); statusAttr.Value = "fb2fix-status"; xmlStatusNode.Attributes.Append(statusAttr); this.descriptionNode.AppendChild(xmlStatusNode); } } private void ChangeDocumentVersion(float version) { XmlNode versionInfoNode = document.SelectSingleNode("//FictionBook/description/document-info/version"); if (versionInfoNode != null && versionInfoNode.NodeType == XmlNodeType.Element) { versionInfoNode.InnerText = DocumentInfoNode.FormatVersion(version); } else { throw new InvalidFictionBookFormatException(); } } private static string ComputeDocumentId(string value) { byte[] hash; StringBuilder documentId = new StringBuilder(40); MD5CryptoServiceProvider hashProvider = new MD5CryptoServiceProvider(); hash = hashProvider.ComputeHash(Encoding.UTF8.GetBytes(value)); documentId.Append("fb2-"); for (int index = 0; index < hash.Length; index++) { switch (index) { case 4: case 6: case 8: case 10: documentId.Append("-"); documentId.Append(hash[index].ToString("X2")); break; default: documentId.Append(hash[index].ToString("X2")); break; } } return documentId.ToString(); } private void CheckAuthorInfo(IEnumerable<AuthorInfoNode> authors) { foreach (AuthorInfoNode author in authors) { if (author.FirstName == null && author.LastName != null) { author.FirstName = String.Empty; this.modificationType |= ModificationType.Description; } else if (author.FirstName != null && author.LastName == null) { author.LastName = String.Empty; this.modificationType |= ModificationType.Description; } else if ((String.IsNullOrEmpty(author.FirstName) && String.IsNullOrEmpty(author.LastName)) && String.IsNullOrEmpty(author.NickName)) { author.NickName = "FB2Fix"; this.modificationType |= ModificationType.Description; } } } public void CheckDocumentHeader(Fb2FixArguments options) { foreach (TitleInfoNode infoNode in new TitleInfoNode[] { titleInfo, srcTitleInfo }) { if (infoNode == null) { continue; } if (infoNode.Genres.Count == 0) { infoNode.Genres.Add(new GenreInfoNode("nonfiction")); this.modificationType |= ModificationType.Description; } CheckAuthorInfo(infoNode.Authors); if (infoNode.BookTitle == null) { if (publishInfo != null && !String.IsNullOrEmpty(publishInfo.BookName)) { infoNode.BookTitle = publishInfo.BookName; this.modificationType |= ModificationType.Description; } else if(titleInfo != null && !String.IsNullOrEmpty(titleInfo.BookTitle)) { infoNode.BookTitle = titleInfo.BookTitle; this.modificationType |= ModificationType.Description; } else { throw new InvalidFictionBookFormatException(); } } if (infoNode.Lang == null) { infoNode.Lang = "ru"; this.modificationType |= ModificationType.Description; } CheckAuthorInfo(infoNode.Translators); } if (options.mapGenres) { foreach (TitleInfoNode infoNode in new TitleInfoNode[] { titleInfo, srcTitleInfo }) { if (infoNode == null) { continue; } foreach (GenreInfoNode genre in infoNode.Genres) { if(!genre.IsEmpty()) { if(GenreTable.Table.MapTable.ContainsKey(genre.Genre)) { genre.Genre = GenreTable.Table.MapTable[genre.Genre]; this.modificationType |= ModificationType.Description; } } } Set<GenreInfoNode> genres = new Set<GenreInfoNode>(); foreach (GenreInfoNode genre in infoNode.Genres) { genres.Add(genre); } infoNode.Genres.Clear(); infoNode.Genres.AddRange(genres); } } if (documentInfo == null) { documentInfo = new DocumentInfoNode(); AuthorInfoNode documentAuthor = new AuthorInfoNode(); documentAuthor.NickName = "FB2Fix"; documentInfo.Authors.Add(documentAuthor); documentInfo.Id = ComputeDocumentId(document.DocumentElement.InnerText); documentInfo.Date = DateTime.Now; documentInfo.ProgramUsed = "FB2Fix"; documentInfo.Version = 0.0f; this.modificationType |= ModificationType.DocumentInfo; } else { CheckAuthorInfo(documentInfo.Authors); if (documentInfo.Date == null) { documentInfo.Date = DateTime.Now; this.modificationType |= ModificationType.Description; } if (documentInfo.Version == null) { documentInfo.Version = 0.0f; this.modificationType |= ModificationType.DocumentInfo; } if (!options.regenerateId) { string programUsed = this.documentInfo.ProgramUsed ?? String.Empty; //if (String.Compare(documentInfo.ProgramUsed, "LibRusEc kit", true, CultureInfo.InvariantCulture) == 0) if (programUsed.IndexOf("LibRusEc kit", StringComparison.InvariantCultureIgnoreCase) >= 0 && !String.IsNullOrEmpty(documentInfo.Id)) { //@"^\w{3}\s\w{3}\s\d{1,2}\s\d{2}:\d{2}:\d{2}\s\d{4}$" if (librusecIdPattern.Match(documentInfo.Id).Success) { if (document.SelectSingleNode("//FictionBook/description/custom-info[@info-type='librusec-id']") == null) { XmlElement xmlLibRusEcId = document.CreateElement("custom-info"); xmlLibRusEcId.InnerText = documentInfo.Id; XmlAttribute attr = document.CreateAttribute("info-type"); attr.Value = "librusec-id"; xmlLibRusEcId.Attributes.Append(attr); this.descriptionNode.AppendChild(xmlLibRusEcId); documentInfo.Id = ComputeDocumentId(document.DocumentElement.InnerText); this.modificationType |= ModificationType.Description; } } } } else { if (String.IsNullOrEmpty(documentInfo.Id)) { XmlElement xmlPreviousId = this.document.SelectSingleNode("//FictionBook/description/custom-info[@info-type='previous-id']") as XmlElement; if (xmlPreviousId == null) { xmlPreviousId = document.CreateElement("custom-info"); XmlAttribute attr = document.CreateAttribute("info-type"); attr.Value = "previous-id"; xmlPreviousId.Attributes.Append(attr); this.descriptionNode.AppendChild(xmlPreviousId); } xmlPreviousId.InnerText = documentInfo.Id; } documentInfo.Id = ComputeDocumentId(document.DocumentElement.InnerText); this.modificationType |= ModificationType.Description; } if (String.IsNullOrEmpty(documentInfo.Id)) { documentInfo.Id = ComputeDocumentId(document.DocumentElement.InnerText); this.modificationType |= ModificationType.Description; } CheckAuthorInfo(documentInfo.Publishers); } XmlElement xmlNewTitleInfo = document.CreateElement("title-info"); xmlNewTitleInfo = titleInfo.Store(document, xmlNewTitleInfo); this.descriptionNode.ReplaceChild(xmlNewTitleInfo, titleInfoNode); titleInfoNode = xmlNewTitleInfo; if (srcTitleInfo != null) { XmlElement xmlNewSrcTitleInfo = document.CreateElement("src-title-info"); xmlNewSrcTitleInfo = srcTitleInfo.Store(document, xmlNewSrcTitleInfo); this.descriptionNode.ReplaceChild(xmlNewSrcTitleInfo, srcTitleInfoNode); srcTitleInfoNode = xmlNewSrcTitleInfo; } XmlElement xmlNewDocumentInfo = document.CreateElement("document-info"); xmlNewDocumentInfo = documentInfo.Store(document, xmlNewDocumentInfo); if (documentInfoNode == null) { if (srcTitleInfoNode == null) { this.descriptionNode.InsertAfter(xmlNewDocumentInfo, titleInfoNode); } else { this.descriptionNode.InsertAfter(xmlNewDocumentInfo, srcTitleInfoNode); } } else { this.descriptionNode.ReplaceChild(xmlNewDocumentInfo, documentInfoNode); } if (publishInfo != null) { XmlElement xmlNewPublishInfo = document.CreateElement("publish-info"); xmlNewPublishInfo = publishInfo.Store(document, xmlNewPublishInfo); if (xmlNewPublishInfo != null) { this.descriptionNode.ReplaceChild(xmlNewPublishInfo, publishInfoNode); } else { this.descriptionNode.RemoveChild(publishInfoNode); } } foreach (CustomInfoNode customInfoNode in customInfos) { XmlElement element = document.CreateElement("custom-info"); element = customInfoNode.Store(document, element); if (element != null) { this.descriptionNode.ReplaceChild(customInfoNode.XmlNode, element); } else { this.descriptionNode.RemoveChild(customInfoNode.XmlNode); } } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.ComponentModel.Composition.Extensibility; using System.ComponentModel.Composition.Factories; using System.ComponentModel.Composition.Primitives; using System.Linq; using System.UnitTesting; using Microsoft.Internal; using Xunit; namespace System.ComponentModel.Composition.ReflectionModel { public class ReflectionComposablePartTests { [Fact] [ActiveIssue(25498, TestPlatforms.AnyUnix)] // System.Reflection.ReflectionTypeLoadException : Unable to load one or more of the requested types.Retrieve the LoaderExceptions property for more information. public void Constructor1_DefinitionAsDefinitionArgument_ShouldSetOriginProperty() { var expectations = Expectations.GetAttributedDefinitions(); foreach (var e in expectations) { var definition = (ICompositionElement)new ReflectionComposablePart(e); Assert.Same(e, definition.Origin); } } [Fact] public void Constructor1_NullAsDefinitionArgument_ShouldThrowArgumentNull() { Assert.Throws<ArgumentNullException>("definition", () => { new ReflectionComposablePart((ReflectionComposablePartDefinition)null); }); } [Fact] public void Constructor2_NullAsAttributedPartArgument_ShouldThrowArgumentNull() { Assert.Throws<ArgumentNullException>("attributedPart", () => { new ReflectionComposablePart(PartDefinitionFactory.CreateAttributed(), (object)null); }); } [Fact] public void Constructor2_ValueTypeAsAttributedPartArgument_ShouldThrowArgument() { Assert.Throws<ArgumentException>("attributedPart", () => { new ReflectionComposablePart(PartDefinitionFactory.CreateAttributed(), 42); }); } [Fact] public void Constructor1_AttributedComposablePartDefintion_ShouldProduceValidObject() { var definition = PartDefinitionFactory.CreateAttributed(typeof(MyExport)); var part = new ReflectionComposablePart(definition); Assert.Equal(definition, part.Definition); Assert.NotNull(part.Metadata); Assert.False(part is IDisposable); } [Fact] public void Constructor1_AttributedComposablePartDefintion_Disposable_ShouldProduceValidObject() { var definition = PartDefinitionFactory.CreateAttributed(typeof(DisposablePart)); var part = new DisposableReflectionComposablePart(definition); Assert.Equal(definition, part.Definition); Assert.NotNull(part.Metadata); Assert.True(part is IDisposable); } [Fact] public void Constructor1_Type_ShouldProduceValidObject() { var part = new ReflectionComposablePart(PartDefinitionFactory.CreateAttributed(typeof(MyExport))); } [Fact] public void Constructor1_Object_ShouldProduceValidObject() { var part = new ReflectionComposablePart(PartDefinitionFactory.CreateAttributed(typeof(MyExport)), new MyExport()); } [Fact] public void Metadata_WhenDisposed_ShouldThrowObjectDisposed() { var part = CreateDefaultDisposablePart(); ((IDisposable)part).Dispose(); ExceptionAssert.ThrowsDisposed(part, () => { var metadata = part.Metadata; }); } [Fact] public void ImportDefinitions_WhenDisposed_ShouldThrowObjectDisposed() { var part = CreateDefaultDisposablePart(); ((IDisposable)part).Dispose(); ExceptionAssert.ThrowsDisposed(part, () => { var definitions = part.ImportDefinitions; }); } [Fact] public void ExportDefinitions_WhenDisposed_ShouldThrowObjectDisposed() { var part = CreateDefaultDisposablePart(); ((IDisposable)part).Dispose(); ExceptionAssert.ThrowsDisposed(part, () => { var definitions = part.ExportDefinitions; }); } [Fact] public void OnComposed_WhenDisposed_ShouldThrowObjectDisposed() { var part = CreateDefaultDisposablePart(); ((IDisposable)part).Dispose(); ExceptionAssert.ThrowsDisposed(part, () => { part.Activate(); }); } [Fact] public void OnComposed_MissingPostImportsOnInstance_ShouldThrowComposition() { var part = CreatePart(new MySharedPartExport()); // Dev10:484204 - This used to cause a failure but after we made // ReflectionComposablePart internal we needed to back remove this // validation for post imports to make declarative composition work. //part.Activate().VerifyFailure(CompositionIssueId.ImportNotSetOnPart); part.Activate(); } [Fact] public void OnComposed_ProperlyComposed_ShouldSucceed() { var import = new TrivialImporter(); var export = new TrivialExporter(); var part = CreatePart(import); var importDef = part.ImportDefinitions.First(); part.SetImport(importDef, CreateSimpleExports(export)); part.Activate(); Assert.True(export.done, "OnImportsSatisfied should have been called"); } [Fact] public void OnComposed_UnhandledExceptionThrowInOnImportsSatisfied_ShouldThrowComposablePart() { var part = CreatePart(typeof(ExceptionDuringINotifyImport)); var definition = part.ImportDefinitions.First(); part.SetImport(definition, CreateSimpleExports(21)); CompositionAssert.ThrowsPart<NotImplementedException>(RetryMode.DoNotRetry, () => { part.Activate(); }); } [Fact] public void SetImport_WhenDisposed_ShouldThrowObjectDisposed() { var part = CreateDefaultDisposablePart(); var definition = part.ImportDefinitions.First(); ((IDisposable)part).Dispose(); ExceptionAssert.ThrowsDisposed(part, () => { part.SetImport(definition, Enumerable.Empty<Export>()); }); } [Fact] public void SetImport_NullAsImportDefinitionArgument_ShouldThrowArgumentNull() { var part = CreateDefaultPart(); Assert.Throws<ArgumentNullException>("definition", () => { part.SetImport((ImportDefinition)null, Enumerable.Empty<Export>()); }); } [Fact] public void SetImport_NullAsExportsArgument_ShouldThrowArgumentNull() { var part = CreatePart(typeof(MySharedPartExport)); var import = part.ImportDefinitions.First(); Assert.Throws<ArgumentNullException>("exports", () => { part.SetImport(import, (IEnumerable<Export>)null); }); } [Fact] public void SetImport_ExportsArrayWithNullElementAsExportsArgument_ShouldThrowArgument() { var part = CreatePart(typeof(MySharedPartExport)); var definition = part.ImportDefinitions.First(); Assert.Throws<ArgumentException>("exports", () => { part.SetImport(definition, new Export[] { null }); }); } [Fact] public void SetImport_WrongDefinitionAsDefinitionArgument_ShouldThrowArgument() { var part = CreateDefaultPart(); var definition = ImportDefinitionFactory.Create(); Assert.Throws<ArgumentException>("definition", () => { part.SetImport(definition, Enumerable.Empty<Export>()); }); } [Fact] public void SetImport_SetNonRecomposableDefinitionAsDefinitionArgumentAfterOnComposed_ShouldThrowInvalidOperation() { var part = CreatePartWithNonRecomposableImport(); var definition = part.ImportDefinitions.First(); part.SetImport(definition, Enumerable.Empty<Export>()); part.Activate(); ExceptionAssert.Throws<InvalidOperationException>(() => { part.SetImport(definition, Enumerable.Empty<Export>()); }); } [Fact] public void SetImport_ZeroOrOneDefinitionAsDefinitionArgumentAndTwoExportsAsExportsArgument_ShouldThrowArgument() { var part = CreatePartWithZeroOrOneImport(); var definition = part.ImportDefinitions.First(); var exports = ExportFactory.Create("Import", 2); Assert.Throws<ArgumentException>("exports", () => { part.SetImport(definition, exports); }); } [Fact] public void SetImport_ExactlyOneDefinitionAsDefinitionArgumentAndTwoExportsAsExportsArgument_ShouldThrowArgument() { var part = CreatePartWithExactlyOneImport(); var definition = part.ImportDefinitions.First(); var exports = ExportFactory.Create("Import", 2); Assert.Throws<ArgumentException>("exports", () => { part.SetImport(definition, exports); }); } [Fact] public void SetImport_ExactlyOneDefinitionAsDefinitionArgumentAndEmptyExportsAsExportsArgument_ShouldThrowArgument() { var part = CreatePartWithExactlyOneImport(); var definition = part.ImportDefinitions.First(); var exports = Enumerable.Empty<Export>(); Assert.Throws<ArgumentException>("exports", () => { part.SetImport(definition, exports); }); } [Fact] public void SetImport_WrongTypeExportGiven_ShouldThrowComposablePart() { var part = CreatePart(new MySharedPartExport()); var import = part.ImportDefinitions.First(); CompositionAssert.ThrowsPart(() => { part.SetImport(import, CreateSimpleExports("21")); }); } [Fact] public void SetImport_SetPostValueAndSetAgainOnInstance_ShouldSetProperty() { var import = new MySharedPartExport(); var part = CreatePart(import); var importDef = part.ImportDefinitions.First(); part.SetImport(importDef, CreateSimpleExports(21)); Assert.NotEqual(21, import.Value); part.Activate(); Assert.Equal(21, import.Value); part.SetImport(importDef, CreateSimpleExports(42)); Assert.NotEqual(42, import.Value); part.Activate(); Assert.Equal(42, import.Value); } [Fact] public void GetExportedValue_WhenDisposed_ShouldThrowObjectDisposed() { var part = CreateDefaultDisposablePart(); var definition = part.ExportDefinitions.First(); ((IDisposable)part).Dispose(); ExceptionAssert.ThrowsDisposed(part, () => { part.GetExportedValue(definition); }); } [Fact] public void GetExportedValue_NullAsDefinitionArgument_ShouldThrowArgumentNull() { var part = CreateDefaultPart(); Assert.Throws<ArgumentNullException>("definition", () => { part.GetExportedValue((ExportDefinition)null); }); } [Fact] public void GetExportedValue_WrongDefinitionAsDefinitionArgument_ShouldThrowArgument() { var part = CreateDefaultPart(); var definition = ExportDefinitionFactory.Create(); Assert.Throws<ArgumentException>("definition", () => { part.GetExportedValue(definition); }); } [Fact] public void GetExportedValue_MissingPrerequisiteImport_ShouldThrowInvalidOperation() { var part = CreatePart(typeof(SimpleConstructorInjectedObject)); var definition = part.ExportDefinitions.First(); ExceptionAssert.Throws<InvalidOperationException>(() => { part.GetExportedValue(definition); }); } [Fact] [ActiveIssue(484204)] public void GetExportedValue_MissingPostImports_ShouldThrowComposition() { var part = CreatePart(typeof(MySharedPartExport)); // Signal that the composition should be finished part.Activate(); var definition = part.ExportDefinitions.First(); // Dev10:484204 - This used to cause a failure but after we made // ReflectionComposablePart internal we needed to back remove this // validation for post imports to make declarative composition work. CompositionAssert.ThrowsError(ErrorId.ImportNotSetOnPart, () => { part.GetExportedValue(definition); }); } [Fact] public void GetExportedValue_NoConstructorOnDefinition_ShouldThrowComposablePart() { var part = CreatePart(typeof(ClassWithNoMarkedOrDefaultConstructor)); var definition = part.ExportDefinitions.First(); CompositionAssert.ThrowsPart(() => { part.GetExportedValue(definition); }); } [Fact] public void GetExportedValue_UnhandledExceptionThrowInConstructor_ShouldThrowComposablePart() { var part = CreatePart(typeof(ExportWithExceptionDuringConstruction)); var definition = part.ExportDefinitions.First(); CompositionAssert.ThrowsPart<NotImplementedException>(() => { part.GetExportedValue(definition); }); } [Fact] public void GetExportedValue_GetObjectAfterSetPreImport_ShouldGetValue() { var part = CreatePart(typeof(SimpleConstructorInjectedObject)); var import = part.ImportDefinitions.First(); part.SetImport(import, CreateSimpleExports(21)); part.Activate(); var definition = part.ExportDefinitions.First(); var exportObject = (SimpleConstructorInjectedObject)part.GetExportedValue(definition); Assert.Equal(21, exportObject.CISimpleValue); } [Fact] public void GetExportedValue_GetObjectAfterSetPostImport_ShouldGetValue() { var part = CreatePart(typeof(MySharedPartExport)); var import = part.ImportDefinitions.First(); part.SetImport(import, CreateSimpleExports(21)); part.Activate(); var definition = part.ExportDefinitions.First(); var exportObject = (MySharedPartExport)part.GetExportedValue(definition); Assert.NotNull(exportObject); Assert.Equal(21, exportObject.Value); } [Fact] public void GetExportedValue_CallMultipleTimes_ShouldReturnSame() { var part = CreatePart(typeof(MySharedPartExport)); var import = part.ImportDefinitions.First(); part.SetImport(import, CreateSimpleExports(21)); part.Activate(); var definition = part.ExportDefinitions.First(); var exportedValue1 = part.GetExportedValue(definition); var exportedValue2 = part.GetExportedValue(definition); Assert.Same(exportedValue1, exportedValue2); } [Fact] public void GetExportedValue_FromStaticClass_ShouldReturnExport() { var part = CreatePart(typeof(StaticExportClass)); var definition = part.ExportDefinitions.First(); var exportObject = (string)part.GetExportedValue(definition); Assert.Equal("StaticString", exportObject); } [Fact] public void GetExportedValue_OptionalPostNotGiven_ShouldReturnValidObject() { var part = CreatePart(typeof(ClassWithOptionalPostImport)); part.Activate(); var definition = part.ExportDefinitions.First(); var exportObject = (ClassWithOptionalPostImport)part.GetExportedValue(definition); Assert.Null(exportObject.Formatter); } [Fact] public void GetExportedValue_OptionalPreNotGiven_ShouldReturnValidObject() { var part = CreatePart(typeof(ClassWithOptionalPreImport)); part.Activate(); var definition = part.ExportDefinitions.First(); var exportedValue = (ClassWithOptionalPreImport)part.GetExportedValue(definition); Assert.Null(exportedValue.Formatter); } [Fact] [ActiveIssue(25498, TestPlatforms.AnyUnix)] // System.Reflection.ReflectionTypeLoadException : Unable to load one or more of the requested types.Retrieve the LoaderExceptions property for more information. public void ICompositionElementDisplayName_ShouldReturnTypeDisplayName() { var expectations = Expectations.GetAttributedTypes(); foreach (var e in expectations) { var part = (ICompositionElement)CreatePart(e); Assert.Equal(e.GetDisplayName(), part.DisplayName); } } [Fact] [ActiveIssue(25498, TestPlatforms.AnyUnix)] // System.Reflection.ReflectionTypeLoadException : Unable to load one or more of the requested types. Retrieve the LoaderExceptions property for more information. public void ToString_ShouldReturnICompositionElementDisplayName() { var expectations = Expectations.GetAttributedTypes(); foreach (var e in expectations) { var part = (ICompositionElement)CreatePart(e); Assert.Equal(part.DisplayName, part.ToString()); } } [PartNotDiscoverable] public class PropertyExporter { [Export] public object Property { get { return new object(); } } } [PartNotDiscoverable] public class FieldExporter { [Export] public object Field = null; } [PartNotDiscoverable] public class MethodExporter { [Export("Method")] public void Method() { } } [PartNotDiscoverable] [Export] public class TypeExporter { } [Fact] public void GetExportedObjectAlwaysReturnsSameReference_ForProperty() { var cp = CreatePart(new PropertyExporter()); var ed = cp.ExportDefinitions.Single(); var eo1 = cp.GetExportedValue(ed); var eo2 = cp.GetExportedValue(ed); Assert.Same(eo1, eo2); } [Fact] public void GetExportedObjectAlwaysReturnsSameReference_ForField() { var exporter = new FieldExporter(); var cp = CreatePart(new FieldExporter()); var ed = cp.ExportDefinitions.Single(); exporter.Field = new object(); var eo1 = cp.GetExportedValue(ed); exporter.Field = new object(); var eo2 = cp.GetExportedValue(ed); Assert.Same(eo1, eo2); } [Fact] public void GetExportedObjectAlwaysReturnsSameReference_ForMethod() { var cp = CreatePart(new MethodExporter()); var ed = cp.ExportDefinitions.Single(); var eo1 = cp.GetExportedValue(ed); var eo2 = cp.GetExportedValue(ed); Assert.Same(eo1, eo2); } [Fact] public void GetExportedObjectAlwaysReturnsSameReference_ForType() { var cp = CreatePart(new TypeExporter()); var ed = cp.ExportDefinitions.Single(); var eo1 = cp.GetExportedValue(ed); var eo2 = cp.GetExportedValue(ed); Assert.Same(eo1, eo2); } [PartNotDiscoverable] public class MethodWithoutContractName { [Export] public void MethodWithoutContractNameNotAllowed() { } } public interface IContract { } [AttributeUsage(AttributeTargets.Method, AllowMultiple = false, Inherited = false)] public class CustomImportAttributeInvalidTarget : ImportAttribute { public CustomImportAttributeInvalidTarget() : base(typeof(IContract)) { } } [PartNotDiscoverable] public class ImportWithCustomImport { [CustomImport] IContract ImportWithCustomAttributeImport { get; set; } } [PartNotDiscoverable] public class ImportWithCustomImportInvalidTarget { [CustomImportAttributeInvalidTarget] void InvalidImport() { } } [Fact] public void ImportDefinitions_ImportWithCustomAttributeImports() { var part = CreatePart(typeof(ImportWithCustomImport)); Assert.Equal(1, part.ImportDefinitions.Count()); ContractBasedImportDefinition import = part.ImportDefinitions.First() as ContractBasedImportDefinition; Assert.NotNull(import); Assert.Equal(AttributedModelServices.GetContractName(typeof(IContract)), import.ContractName); Assert.Equal(AttributedModelServices.GetTypeIdentity(typeof(IContract)), import.RequiredTypeIdentity); } [Fact] public void ImportDefinitions_ImportWithCustomImportInvalidTarget_ShouldbeIgnored() { var part = CreatePart(typeof(ImportWithCustomImportInvalidTarget)); Assert.Equal(0, part.ImportDefinitions.Count()); } [PartNotDiscoverable] public class ImportManyWithCustomImportMany { [CustomImportMany] IContract ImportManyWithCustomAttributeImportMany { get; set; } } [PartNotDiscoverable] public class ImportManyWithCustomImportManyInvalidTarget { [CustomImportMany] void InvalidImportMany() { } } [Fact] public void ImportDefinitions_ImportManyWithCustomAttributeImportManys() { var part = CreatePart(typeof(ImportManyWithCustomImportMany)); Assert.Equal(1, part.ImportDefinitions.Count()); ContractBasedImportDefinition import = part.ImportDefinitions.First() as ContractBasedImportDefinition; Assert.NotNull(import); Assert.Equal(AttributedModelServices.GetContractName(typeof(IContract)), import.ContractName); Assert.Equal(AttributedModelServices.GetTypeIdentity(typeof(IContract)), import.RequiredTypeIdentity); } [Fact] public void ImportDefinitions_ImportManyWithCustomImportManyInvalidTarget_ShouldbeIgnored() { var part = CreatePart(typeof(ImportManyWithCustomImportManyInvalidTarget)); Assert.Equal(0, part.ImportDefinitions.Count()); } [AttributeUsage(AttributeTargets.Constructor, AllowMultiple = false, Inherited = false)] public class CustomImportingConstructorAttribute : ImportingConstructorAttribute { public CustomImportingConstructorAttribute() : base() { } } [AttributeUsage(AttributeTargets.Constructor, AllowMultiple = true, Inherited = false)] public class CustomImportingConstructorAllowMultipleAttribute : ImportingConstructorAttribute { public CustomImportingConstructorAllowMultipleAttribute() : base() { } } [AttributeUsage(AttributeTargets.Method, AllowMultiple = false, Inherited = false)] public class CustomImportingConstructorInvalidTargetAttribute : ImportingConstructorAttribute { public CustomImportingConstructorInvalidTargetAttribute() : base() { } } [PartNotDiscoverable] public class ImportingConstructorWithCustomImportingConstructor { [CustomImportingConstructor] ImportingConstructorWithCustomImportingConstructor([Import] IContract argument) { } } [PartNotDiscoverable] public class ImportingConstructorWithCustomImportingConstructorAllowMultiple { [CustomImportingConstructorAllowMultiple] [CustomImportingConstructorAllowMultiple] ImportingConstructorWithCustomImportingConstructorAllowMultiple([Import] IContract argument) { } } [PartNotDiscoverable] public class ImportingConstructorWithCustomImportingConstructorInvalidTarget { [CustomImportingConstructorInvalidTarget] void InvalidImportingConstructor() { } } [Fact] public void ImportDefinitions_ImportingConstructorWithCustomAttributeImportingConstructors() { var part = CreatePart(typeof(ImportingConstructorWithCustomImportingConstructor)); Assert.Equal(1, part.ImportDefinitions.Count()); ContractBasedImportDefinition import = part.ImportDefinitions.First() as ContractBasedImportDefinition; Assert.NotNull(import); Assert.Equal(AttributedModelServices.GetContractName(typeof(IContract)), import.ContractName); Assert.Equal(AttributedModelServices.GetTypeIdentity(typeof(IContract)), import.RequiredTypeIdentity); } [Fact] public void ImportDefinitions_ImportingConstructorWithCustomAttributeImportingConstructorsWithAllowMultiple_ShouldNotThrowInvalidOperation() { var part = CreatePart(typeof(ImportingConstructorWithCustomImportingConstructorAllowMultiple)); Assert.Equal(1, part.ImportDefinitions.Count()); ContractBasedImportDefinition import = part.ImportDefinitions.First() as ContractBasedImportDefinition; Assert.NotNull(import); Assert.Equal(AttributedModelServices.GetContractName(typeof(IContract)), import.ContractName); Assert.Equal(AttributedModelServices.GetTypeIdentity(typeof(IContract)), import.RequiredTypeIdentity); } [Fact] public void ImportDefinitions_ImportingConstructorWithCustomImportingConstructorInvalidTarget_ShouldbeIgnored() { var part = CreatePart(typeof(ImportingConstructorWithCustomImportingConstructorInvalidTarget)); Assert.Equal(0, part.ImportDefinitions.Count()); } private Export[] CreateSimpleExports(object value) { var export = ExportFactory.Create("NoContract", () => value); return new Export[] { export }; } private ReflectionComposablePart CreatePartWithExport() { return CreatePart(typeof(StaticExportClass)); } private ReflectionComposablePart CreatePartWithNonRecomposableImport() { return CreatePart(typeof(SingleImportWithAllowDefault)); } private ReflectionComposablePart CreatePartWithZeroOrOneImport() { return CreatePart(typeof(SingleImportWithAllowDefault)); } private ReflectionComposablePart CreatePartWithExactlyOneImport() { return CreatePart(typeof(SingleImport)); } private ReflectionComposablePart CreateDefaultPart() { return CreatePart(new object()); } [PartNotDiscoverable] [Export] public class DisposablePart : IDisposable { [Import(AllowDefault = true)] public int Foo { get; set; } public void Dispose() { } } private ReflectionComposablePart CreateDefaultDisposablePart() { return CreatePart(typeof(DisposablePart)); } private ReflectionComposablePart CreatePart(object instance) { if (instance is Type) { var definition = PartDefinitionFactory.CreateAttributed((Type)instance); return (ReflectionComposablePart)definition.CreatePart(); } else { var definition = PartDefinitionFactory.CreateAttributed(instance.GetType()); return new ReflectionComposablePart(definition, instance); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.IO; using System.Collections.Generic; using Internal.IL.Stubs; using Internal.TypeSystem; using Internal.Metadata.NativeFormat.Writer; using ILCompiler.Metadata; using ILCompiler.DependencyAnalysis; using Debug = System.Diagnostics.Debug; namespace ILCompiler { /// <summary> /// This class is responsible for managing native metadata to be emitted into the compiled /// module. It applies a policy that every type/method emitted shall be reflectable. /// </summary> public class CompilerGeneratedMetadataManager : MetadataManager { private GeneratedTypesAndCodeMetadataPolicy _metadataPolicy; public CompilerGeneratedMetadataManager(CompilationModuleGroup group, CompilerTypeSystemContext typeSystemContext) : base(group, typeSystemContext) { _metadataPolicy = new GeneratedTypesAndCodeMetadataPolicy(this); } private HashSet<MetadataType> _typeDefinitionsGenerated = new HashSet<MetadataType>(); private HashSet<MethodDesc> _methodDefinitionsGenerated = new HashSet<MethodDesc>(); private HashSet<ModuleDesc> _modulesSeen = new HashSet<ModuleDesc>(); private Dictionary<DynamicInvokeMethodSignature, MethodDesc> _dynamicInvokeThunks = new Dictionary<DynamicInvokeMethodSignature, MethodDesc>(); protected override void AddGeneratedType(TypeDesc type) { if (type.IsDefType && type.IsTypeDefinition) { var mdType = type as MetadataType; if (mdType != null) { _modulesSeen.Add(mdType.Module); _typeDefinitionsGenerated.Add(mdType); } } base.AddGeneratedType(type); } public override IEnumerable<ModuleDesc> GetCompilationModulesWithMetadata() { return _modulesSeen; } protected override void AddGeneratedMethod(MethodDesc method) { AddGeneratedType(method.OwningType); _methodDefinitionsGenerated.Add(method.GetTypicalMethodDefinition()); base.AddGeneratedMethod(method); } public override bool IsReflectionBlocked(MetadataType type) { return _metadataPolicy.IsBlocked(type); } protected override void ComputeMetadata(out byte[] metadataBlob, out List<MetadataMapping<MetadataType>> typeMappings, out List<MetadataMapping<MethodDesc>> methodMappings, out List<MetadataMapping<FieldDesc>> fieldMappings) { var transformed = MetadataTransform.Run(new GeneratedTypesAndCodeMetadataPolicy(this), _modulesSeen); // TODO: DeveloperExperienceMode: Use transformed.Transform.HandleType() to generate // TypeReference records for _typeDefinitionsGenerated that don't have metadata. // (To be used in MissingMetadataException messages) // Generate metadata blob var writer = new MetadataWriter(); writer.ScopeDefinitions.AddRange(transformed.Scopes); var ms = new MemoryStream(); writer.Write(ms); metadataBlob = ms.ToArray(); typeMappings = new List<MetadataMapping<MetadataType>>(); methodMappings = new List<MetadataMapping<MethodDesc>>(); fieldMappings = new List<MetadataMapping<FieldDesc>>(); // Generate type definition mappings foreach (var definition in _typeDefinitionsGenerated) { MetadataRecord record = transformed.GetTransformedTypeDefinition(definition); // Reflection requires that we maintain type identity. Even if we only generated a TypeReference record, // if there is an EEType for it, we also need a mapping table entry for it. if (record == null) record = transformed.GetTransformedTypeReference(definition); if (record != null) typeMappings.Add(new MetadataMapping<MetadataType>(definition, writer.GetRecordHandle(record))); } foreach (var method in GetCompiledMethods()) { if (method.IsCanonicalMethod(CanonicalFormKind.Specific)) { // Canonical methods are not interesting. continue; } MetadataRecord record = transformed.GetTransformedMethodDefinition(method.GetTypicalMethodDefinition()); if (record != null) methodMappings.Add(new MetadataMapping<MethodDesc>(method, writer.GetRecordHandle(record))); } foreach (var eetypeGenerated in GetTypesWithEETypes()) { if (eetypeGenerated.IsGenericDefinition) continue; foreach (FieldDesc field in eetypeGenerated.GetFields()) { Field record = transformed.GetTransformedFieldDefinition(field.GetTypicalFieldDefinition()); if (record != null) fieldMappings.Add(new MetadataMapping<FieldDesc>(field, writer.GetRecordHandle(record))); } } } /// <summary> /// Is there a reflection invoke stub for a method that is invokable? /// </summary> public override bool HasReflectionInvokeStubForInvokableMethod(MethodDesc method) { return true; } /// <summary> /// Gets a stub that can be used to reflection-invoke a method with a given signature. /// </summary> public override MethodDesc GetReflectionInvokeStub(MethodDesc method) { TypeSystemContext context = method.Context; var sig = method.Signature; // Get a generic method that can be used to invoke method with this shape. MethodDesc thunk; var lookupSig = new DynamicInvokeMethodSignature(sig); if (!_dynamicInvokeThunks.TryGetValue(lookupSig, out thunk)) { thunk = new DynamicInvokeMethodThunk(_compilationModuleGroup.GeneratedAssembly.GetGlobalModuleType(), lookupSig); _dynamicInvokeThunks.Add(lookupSig, thunk); } return InstantiateDynamicInvokeMethodForMethod(thunk, method); } private struct GeneratedTypesAndCodeMetadataPolicy : IMetadataPolicy { private CompilerGeneratedMetadataManager _parent; private ExplicitScopeAssemblyPolicyMixin _explicitScopeMixin; private Dictionary<MetadataType, bool> _isAttributeCache; public GeneratedTypesAndCodeMetadataPolicy(CompilerGeneratedMetadataManager parent) { _parent = parent; _explicitScopeMixin = new ExplicitScopeAssemblyPolicyMixin(); MetadataType systemAttributeType = parent._typeSystemContext.SystemModule.GetType("System", "Attribute", false); _isAttributeCache = new Dictionary<MetadataType, bool>(); _isAttributeCache.Add(systemAttributeType, true); } public bool GeneratesMetadata(FieldDesc fieldDef) { return _parent._typeDefinitionsGenerated.Contains((MetadataType)fieldDef.OwningType); } public bool GeneratesMetadata(MethodDesc methodDef) { return _parent._methodDefinitionsGenerated.Contains(methodDef); } public bool GeneratesMetadata(MetadataType typeDef) { // Metadata consistency: if a nested type generates metadata, the containing type is // required to generate metadata, or metadata generation will fail. foreach (var nested in typeDef.GetNestedTypes()) { if (GeneratesMetadata(nested)) return true; } return _parent._typeDefinitionsGenerated.Contains(typeDef); } public bool IsBlocked(MetadataType typeDef) { // If an attribute type would generate metadata in this blob (had we compiled it), consider it blocked. // Otherwise we end up with an attribute that is an unresolvable TypeRef and we would get a TypeLoadException // when enumerating attributes on anything that has it. if (!GeneratesMetadata(typeDef) && _parent._compilationModuleGroup.ContainsType(typeDef) && IsAttributeType(typeDef)) { return true; } return false; } private bool IsAttributeType(MetadataType type) { bool result; if (!_isAttributeCache.TryGetValue(type, out result)) { MetadataType baseType = type.MetadataBaseType; result = baseType != null && IsAttributeType(baseType); _isAttributeCache.Add(type, result); } return result; } public ModuleDesc GetModuleOfType(MetadataType typeDef) { return _explicitScopeMixin.GetModuleOfType(typeDef); } } } }
// *********************************************************************** // Copyright (c) 2008-2015 Charlie Poole // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // *********************************************************************** using System; using System.Collections.Generic; using System.Reflection; using NUnit.Compatibility; using NUnit.Framework.Interfaces; using NUnit.Framework.Internal; using NUnit.Framework.Internal.Builders; namespace NUnit.Framework { /// <summary> /// TestCaseAttribute is used to mark parameterized test cases /// and provide them with their arguments. /// </summary> [AttributeUsage(AttributeTargets.Method, AllowMultiple = true, Inherited=false)] public class TestCaseAttribute : NUnitAttribute, ITestBuilder, ITestCaseData, IImplyFixture { #region Constructors /// <summary> /// Construct a TestCaseAttribute with a list of arguments. /// This constructor is not CLS-Compliant /// </summary> /// <param name="arguments"></param> public TestCaseAttribute(params object[] arguments) { RunState = RunState.Runnable; if (arguments == null) Arguments = new object[] { null }; else Arguments = arguments; Properties = new PropertyBag(); } /// <summary> /// Construct a TestCaseAttribute with a single argument /// </summary> /// <param name="arg"></param> public TestCaseAttribute(object arg) { RunState = RunState.Runnable; Arguments = new object[] { arg }; Properties = new PropertyBag(); } /// <summary> /// Construct a TestCaseAttribute with a two arguments /// </summary> /// <param name="arg1"></param> /// <param name="arg2"></param> public TestCaseAttribute(object arg1, object arg2) { RunState = RunState.Runnable; Arguments = new object[] { arg1, arg2 }; Properties = new PropertyBag(); } /// <summary> /// Construct a TestCaseAttribute with a three arguments /// </summary> /// <param name="arg1"></param> /// <param name="arg2"></param> /// <param name="arg3"></param> public TestCaseAttribute(object arg1, object arg2, object arg3) { RunState = RunState.Runnable; Arguments = new object[] { arg1, arg2, arg3 }; Properties = new PropertyBag(); } #endregion #region ITestData Members /// <summary> /// Gets or sets the name of the test. /// </summary> /// <value>The name of the test.</value> public string TestName { get; set; } /// <summary> /// Gets or sets the RunState of this test case. /// </summary> public RunState RunState { get; private set; } /// <summary> /// Gets the list of arguments to a test case /// </summary> public object[] Arguments { get; private set; } /// <summary> /// Gets the properties of the test case /// </summary> public IPropertyBag Properties { get; private set; } #endregion #region ITestCaseData Members /// <summary> /// Gets or sets the expected result. /// </summary> /// <value>The result.</value> public object ExpectedResult { get { return _expectedResult; } set { _expectedResult = value; HasExpectedResult = true; } } private object _expectedResult; /// <summary> /// Returns true if the expected result has been set /// </summary> public bool HasExpectedResult { get; private set; } #endregion #region Other Properties /// <summary> /// Gets or sets the description. /// </summary> /// <value>The description.</value> public string Description { get { return Properties.Get(PropertyNames.Description) as string; } set { Properties.Set(PropertyNames.Description, value); } } /// <summary> /// The author of this test /// </summary> public string Author { get { return Properties.Get(PropertyNames.Author) as string; } set { Properties.Set(PropertyNames.Author, value); } } /// <summary> /// The type that this test is testing /// </summary> public Type TestOf { get { return _testOf; } set { _testOf = value; Properties.Set(PropertyNames.TestOf, value.FullName); } } private Type _testOf; /// <summary> /// Gets or sets the reason for ignoring the test /// </summary> public string Ignore { get { return IgnoreReason; } set { IgnoreReason = value; } } /// <summary> /// Gets or sets a value indicating whether this <see cref="NUnit.Framework.TestCaseAttribute"/> is explicit. /// </summary> /// <value> /// <c>true</c> if explicit; otherwise, <c>false</c>. /// </value> public bool Explicit { get { return RunState == RunState.Explicit; } set { RunState = value ? RunState.Explicit : RunState.Runnable; } } /// <summary> /// Gets or sets the reason for not running the test. /// </summary> /// <value>The reason.</value> public string Reason { get { return Properties.Get(PropertyNames.SkipReason) as string; } set { Properties.Set(PropertyNames.SkipReason, value); } } /// <summary> /// Gets or sets the ignore reason. When set to a non-null /// non-empty value, the test is marked as ignored. /// </summary> /// <value>The ignore reason.</value> public string IgnoreReason { get { return Reason; } set { RunState = RunState.Ignored; Reason = value; } } #if !PORTABLE /// <summary> /// Comma-delimited list of platforms to run the test for /// </summary> public string IncludePlatform { get; set; } /// <summary> /// Comma-delimited list of platforms to not run the test for /// </summary> public string ExcludePlatform { get; set; } #endif /// <summary> /// Gets and sets the category for this test case. /// May be a comma-separated list of categories. /// </summary> public string Category { get { return Properties.Get(PropertyNames.Category) as string; } set { foreach (string cat in value.Split(new char[] { ',' }) ) Properties.Add(PropertyNames.Category, cat); } } #endregion #region Helper Methods private TestCaseParameters GetParametersForTestCase(IMethodInfo method) { TestCaseParameters parms; try { #if NETCF var tmethod = method.MakeGenericMethodEx(Arguments); if (tmethod == null) throw new NotSupportedException("Cannot determine generic types from probing"); method = tmethod; #endif IParameterInfo[] parameters = method.GetParameters(); int argsNeeded = parameters.Length; parms = new TestCaseParameters(this); SpecialArgumentsHandling(parms, parameters); int argsProvided = parms.Arguments.Length; //if (method.GetParameters().Length == 1 && method.GetParameters()[0].ParameterType == typeof(object[])) // parms.Arguments = new object[]{parms.Arguments}; // Special handling when sole argument is an object[] if (argsNeeded == 1 && method.GetParameters()[0].ParameterType == typeof(object[])) { if (argsProvided > 1 || argsProvided == 1 && parms.Arguments[0].GetType() != typeof(object[])) { parms.Arguments = new object[] { parms.Arguments }; } } if (argsProvided == argsNeeded) PerformSpecialConversions(parms.Arguments, parameters); } catch (Exception ex) { parms = new TestCaseParameters(ex); } return parms; } /// <summary> /// Special handling for params arguments /// Special handling for optional parameters /// </summary> /// <param name="parms">The parameters obtained</param> /// <param name="parameters">The parameters expected by the test method.</param> public static void SpecialArgumentsHandling(TestCaseParameters parms, IParameterInfo[] parameters) { int argsNeeded = parameters.Length; int argsProvided = parms.Arguments.Length; // Special handling for params arguments if (argsNeeded > 0 && argsProvided >= argsNeeded - 1) { IParameterInfo lastParameter = parameters[argsNeeded - 1]; Type lastParameterType = lastParameter.ParameterType; Type elementType = lastParameterType.GetElementType(); if (lastParameterType.IsArray && lastParameter.IsDefined<ParamArrayAttribute>(false)) { if (argsProvided == argsNeeded) { Type lastArgumentType = parms.Arguments[argsProvided - 1].GetType(); if (!lastParameterType.GetTypeInfo().IsAssignableFrom(lastArgumentType.GetTypeInfo())) { Array array = Array.CreateInstance(elementType, 1); array.SetValue(parms.Arguments[argsProvided - 1], 0); parms.Arguments[argsProvided - 1] = array; } } else { object[] newArglist = new object[argsNeeded]; for (int i = 0; i < argsNeeded && i < argsProvided; i++) newArglist[i] = parms.Arguments[i]; int length = argsProvided - argsNeeded + 1; Array array = Array.CreateInstance(elementType, length); for (int i = 0; i < length; i++) array.SetValue(parms.Arguments[argsNeeded + i - 1], i); newArglist[argsNeeded - 1] = array; parms.Arguments = newArglist; argsProvided = argsNeeded; } } } #if !NETCF //Special handling for optional parameters if (argsProvided < argsNeeded) { object[] newArgList = new object[parameters.Length]; Array.Copy(parms.Arguments, newArgList, argsProvided); //Fill with Type.Missing for remaining required parameters where optional for (var i = parms.Arguments.Length; i < parameters.Length; i++) { if (parameters[i].IsOptional) newArgList[i] = Type.Missing; else { if (i < parms.Arguments.Length) newArgList[i] = parms.Arguments[i]; else throw new TargetParameterCountException("Incorrect number of parameters specified for TestCase"); } } parms.Arguments = newArgList; } #endif } /// <summary> /// Performs several special conversions allowed by NUnit in order to /// permit arguments with types that cannot be used in the constructor /// of an Attribute such as TestCaseAttribute or to simplify their use. /// </summary> /// <param name="arglist">The arguments to be converted</param> /// <param name="parameters">The ParameterInfo array for the method</param> private static void PerformSpecialConversions(object[] arglist, IParameterInfo[] parameters) { for (int i = 0; i < arglist.Length; i++) { object arg = arglist[i]; Type targetType = parameters[i].ParameterType; if (arg == null) continue; if (arg is SpecialValue && (SpecialValue)arg == SpecialValue.Null) { arglist[i] = null; continue; } if (targetType.IsAssignableFrom(arg.GetType())) continue; #if !PORTABLE if (arg is DBNull) { arglist[i] = null; continue; } #endif bool convert = false; if (targetType == typeof(short) || targetType == typeof(byte) || targetType == typeof(sbyte) || targetType == typeof(short?) || targetType == typeof(byte?) || targetType == typeof(sbyte?) || targetType == typeof(double?)) { convert = arg is int; } else if (targetType == typeof(decimal) || targetType == typeof(decimal?)) { convert = arg is double || arg is string || arg is int; } else if (targetType == typeof(DateTime) || targetType == typeof(DateTime?)) { convert = arg is string; } if (convert) { Type convertTo = targetType.GetTypeInfo().IsGenericType && targetType.GetGenericTypeDefinition() == typeof(Nullable<>) ? targetType.GetGenericArguments()[0] : targetType; arglist[i] = Convert.ChangeType(arg, convertTo, System.Globalization.CultureInfo.InvariantCulture); } else // Convert.ChangeType doesn't work for TimeSpan from string if ((targetType == typeof(TimeSpan) || targetType == typeof(TimeSpan?)) && arg is string) { arglist[i] = TimeSpan.Parse((string)arg); } } } #endregion #region ITestBuilder Members /// <summary> /// Construct one or more TestMethods from a given MethodInfo, /// using available parameter data. /// </summary> /// <param name="method">The MethodInfo for which tests are to be constructed.</param> /// <param name="suite">The suite to which the tests will be added.</param> /// <returns>One or more TestMethods</returns> public IEnumerable<TestMethod> BuildFrom(IMethodInfo method, Test suite) { TestMethod test = new NUnitTestCaseBuilder().BuildTestMethod(method, suite, GetParametersForTestCase(method)); #if !PORTABLE if (test.RunState != RunState.NotRunnable && test.RunState != RunState.Ignored) { PlatformHelper platformHelper = new PlatformHelper(); if (!platformHelper.IsPlatformSupported(this)) { test.RunState = RunState.Skipped; test.Properties.Add(PropertyNames.SkipReason, platformHelper.Reason); } } #endif yield return test; } #endregion } }