context
stringlengths
2.52k
185k
gt
stringclasses
1 value
/* * Copyright (c) Citrix Systems, Inc. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1) Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2) Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials * provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE * COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections; using System.Collections.Generic; using CookComputing.XmlRpc; namespace XenAPI { /// <summary> /// A type of virtual GPU /// First published in XenServer 6.2 SP1 Tech-Preview. /// </summary> public partial class VGPU_type : XenObject<VGPU_type> { public VGPU_type() { } public VGPU_type(string uuid, string vendor_name, string model_name, long framebuffer_size, long max_heads, long max_resolution_x, long max_resolution_y, List<XenRef<PGPU>> supported_on_PGPUs, List<XenRef<PGPU>> enabled_on_PGPUs, List<XenRef<VGPU>> VGPUs, List<XenRef<GPU_group>> supported_on_GPU_groups, List<XenRef<GPU_group>> enabled_on_GPU_groups) { this.uuid = uuid; this.vendor_name = vendor_name; this.model_name = model_name; this.framebuffer_size = framebuffer_size; this.max_heads = max_heads; this.max_resolution_x = max_resolution_x; this.max_resolution_y = max_resolution_y; this.supported_on_PGPUs = supported_on_PGPUs; this.enabled_on_PGPUs = enabled_on_PGPUs; this.VGPUs = VGPUs; this.supported_on_GPU_groups = supported_on_GPU_groups; this.enabled_on_GPU_groups = enabled_on_GPU_groups; } /// <summary> /// Creates a new VGPU_type from a Proxy_VGPU_type. /// </summary> /// <param name="proxy"></param> public VGPU_type(Proxy_VGPU_type proxy) { this.UpdateFromProxy(proxy); } public override void UpdateFrom(VGPU_type update) { uuid = update.uuid; vendor_name = update.vendor_name; model_name = update.model_name; framebuffer_size = update.framebuffer_size; max_heads = update.max_heads; max_resolution_x = update.max_resolution_x; max_resolution_y = update.max_resolution_y; supported_on_PGPUs = update.supported_on_PGPUs; enabled_on_PGPUs = update.enabled_on_PGPUs; VGPUs = update.VGPUs; supported_on_GPU_groups = update.supported_on_GPU_groups; enabled_on_GPU_groups = update.enabled_on_GPU_groups; } internal void UpdateFromProxy(Proxy_VGPU_type proxy) { uuid = proxy.uuid == null ? null : (string)proxy.uuid; vendor_name = proxy.vendor_name == null ? null : (string)proxy.vendor_name; model_name = proxy.model_name == null ? null : (string)proxy.model_name; framebuffer_size = proxy.framebuffer_size == null ? 0 : long.Parse((string)proxy.framebuffer_size); max_heads = proxy.max_heads == null ? 0 : long.Parse((string)proxy.max_heads); max_resolution_x = proxy.max_resolution_x == null ? 0 : long.Parse((string)proxy.max_resolution_x); max_resolution_y = proxy.max_resolution_y == null ? 0 : long.Parse((string)proxy.max_resolution_y); supported_on_PGPUs = proxy.supported_on_PGPUs == null ? null : XenRef<PGPU>.Create(proxy.supported_on_PGPUs); enabled_on_PGPUs = proxy.enabled_on_PGPUs == null ? null : XenRef<PGPU>.Create(proxy.enabled_on_PGPUs); VGPUs = proxy.VGPUs == null ? null : XenRef<VGPU>.Create(proxy.VGPUs); supported_on_GPU_groups = proxy.supported_on_GPU_groups == null ? null : XenRef<GPU_group>.Create(proxy.supported_on_GPU_groups); enabled_on_GPU_groups = proxy.enabled_on_GPU_groups == null ? null : XenRef<GPU_group>.Create(proxy.enabled_on_GPU_groups); } public Proxy_VGPU_type ToProxy() { Proxy_VGPU_type result_ = new Proxy_VGPU_type(); result_.uuid = (uuid != null) ? uuid : ""; result_.vendor_name = (vendor_name != null) ? vendor_name : ""; result_.model_name = (model_name != null) ? model_name : ""; result_.framebuffer_size = framebuffer_size.ToString(); result_.max_heads = max_heads.ToString(); result_.max_resolution_x = max_resolution_x.ToString(); result_.max_resolution_y = max_resolution_y.ToString(); result_.supported_on_PGPUs = (supported_on_PGPUs != null) ? Helper.RefListToStringArray(supported_on_PGPUs) : new string[] {}; result_.enabled_on_PGPUs = (enabled_on_PGPUs != null) ? Helper.RefListToStringArray(enabled_on_PGPUs) : new string[] {}; result_.VGPUs = (VGPUs != null) ? Helper.RefListToStringArray(VGPUs) : new string[] {}; result_.supported_on_GPU_groups = (supported_on_GPU_groups != null) ? Helper.RefListToStringArray(supported_on_GPU_groups) : new string[] {}; result_.enabled_on_GPU_groups = (enabled_on_GPU_groups != null) ? Helper.RefListToStringArray(enabled_on_GPU_groups) : new string[] {}; return result_; } /// <summary> /// Creates a new VGPU_type from a Hashtable. /// </summary> /// <param name="table"></param> public VGPU_type(Hashtable table) { uuid = Marshalling.ParseString(table, "uuid"); vendor_name = Marshalling.ParseString(table, "vendor_name"); model_name = Marshalling.ParseString(table, "model_name"); framebuffer_size = Marshalling.ParseLong(table, "framebuffer_size"); max_heads = Marshalling.ParseLong(table, "max_heads"); max_resolution_x = Marshalling.ParseLong(table, "max_resolution_x"); max_resolution_y = Marshalling.ParseLong(table, "max_resolution_y"); supported_on_PGPUs = Marshalling.ParseSetRef<PGPU>(table, "supported_on_PGPUs"); enabled_on_PGPUs = Marshalling.ParseSetRef<PGPU>(table, "enabled_on_PGPUs"); VGPUs = Marshalling.ParseSetRef<VGPU>(table, "VGPUs"); supported_on_GPU_groups = Marshalling.ParseSetRef<GPU_group>(table, "supported_on_GPU_groups"); enabled_on_GPU_groups = Marshalling.ParseSetRef<GPU_group>(table, "enabled_on_GPU_groups"); } public bool DeepEquals(VGPU_type other) { if (ReferenceEquals(null, other)) return false; if (ReferenceEquals(this, other)) return true; return Helper.AreEqual2(this._uuid, other._uuid) && Helper.AreEqual2(this._vendor_name, other._vendor_name) && Helper.AreEqual2(this._model_name, other._model_name) && Helper.AreEqual2(this._framebuffer_size, other._framebuffer_size) && Helper.AreEqual2(this._max_heads, other._max_heads) && Helper.AreEqual2(this._max_resolution_x, other._max_resolution_x) && Helper.AreEqual2(this._max_resolution_y, other._max_resolution_y) && Helper.AreEqual2(this._supported_on_PGPUs, other._supported_on_PGPUs) && Helper.AreEqual2(this._enabled_on_PGPUs, other._enabled_on_PGPUs) && Helper.AreEqual2(this._VGPUs, other._VGPUs) && Helper.AreEqual2(this._supported_on_GPU_groups, other._supported_on_GPU_groups) && Helper.AreEqual2(this._enabled_on_GPU_groups, other._enabled_on_GPU_groups); } public override string SaveChanges(Session session, string opaqueRef, VGPU_type server) { if (opaqueRef == null) { System.Diagnostics.Debug.Assert(false, "Cannot create instances of this type on the server"); return ""; } else { throw new InvalidOperationException("This type has no read/write properties"); } } /// <summary> /// Get a record containing the current state of the given VGPU_type. /// First published in XenServer 6.2 SP1 Tech-Preview. /// </summary> /// <param name="session">The session</param> /// <param name="_vgpu_type">The opaque_ref of the given vgpu_type</param> public static VGPU_type get_record(Session session, string _vgpu_type) { return new VGPU_type((Proxy_VGPU_type)session.proxy.vgpu_type_get_record(session.uuid, (_vgpu_type != null) ? _vgpu_type : "").parse()); } /// <summary> /// Get a reference to the VGPU_type instance with the specified UUID. /// First published in XenServer 6.2 SP1 Tech-Preview. /// </summary> /// <param name="session">The session</param> /// <param name="_uuid">UUID of object to return</param> public static XenRef<VGPU_type> get_by_uuid(Session session, string _uuid) { return XenRef<VGPU_type>.Create(session.proxy.vgpu_type_get_by_uuid(session.uuid, (_uuid != null) ? _uuid : "").parse()); } /// <summary> /// Get the uuid field of the given VGPU_type. /// First published in XenServer 6.2 SP1 Tech-Preview. /// </summary> /// <param name="session">The session</param> /// <param name="_vgpu_type">The opaque_ref of the given vgpu_type</param> public static string get_uuid(Session session, string _vgpu_type) { return (string)session.proxy.vgpu_type_get_uuid(session.uuid, (_vgpu_type != null) ? _vgpu_type : "").parse(); } /// <summary> /// Get the vendor_name field of the given VGPU_type. /// First published in XenServer 6.2 SP1 Tech-Preview. /// </summary> /// <param name="session">The session</param> /// <param name="_vgpu_type">The opaque_ref of the given vgpu_type</param> public static string get_vendor_name(Session session, string _vgpu_type) { return (string)session.proxy.vgpu_type_get_vendor_name(session.uuid, (_vgpu_type != null) ? _vgpu_type : "").parse(); } /// <summary> /// Get the model_name field of the given VGPU_type. /// First published in XenServer 6.2 SP1 Tech-Preview. /// </summary> /// <param name="session">The session</param> /// <param name="_vgpu_type">The opaque_ref of the given vgpu_type</param> public static string get_model_name(Session session, string _vgpu_type) { return (string)session.proxy.vgpu_type_get_model_name(session.uuid, (_vgpu_type != null) ? _vgpu_type : "").parse(); } /// <summary> /// Get the framebuffer_size field of the given VGPU_type. /// First published in XenServer 6.2 SP1 Tech-Preview. /// </summary> /// <param name="session">The session</param> /// <param name="_vgpu_type">The opaque_ref of the given vgpu_type</param> public static long get_framebuffer_size(Session session, string _vgpu_type) { return long.Parse((string)session.proxy.vgpu_type_get_framebuffer_size(session.uuid, (_vgpu_type != null) ? _vgpu_type : "").parse()); } /// <summary> /// Get the max_heads field of the given VGPU_type. /// First published in XenServer 6.2 SP1 Tech-Preview. /// </summary> /// <param name="session">The session</param> /// <param name="_vgpu_type">The opaque_ref of the given vgpu_type</param> public static long get_max_heads(Session session, string _vgpu_type) { return long.Parse((string)session.proxy.vgpu_type_get_max_heads(session.uuid, (_vgpu_type != null) ? _vgpu_type : "").parse()); } /// <summary> /// Get the max_resolution_x field of the given VGPU_type. /// First published in XenServer 6.2 SP1. /// </summary> /// <param name="session">The session</param> /// <param name="_vgpu_type">The opaque_ref of the given vgpu_type</param> public static long get_max_resolution_x(Session session, string _vgpu_type) { return long.Parse((string)session.proxy.vgpu_type_get_max_resolution_x(session.uuid, (_vgpu_type != null) ? _vgpu_type : "").parse()); } /// <summary> /// Get the max_resolution_y field of the given VGPU_type. /// First published in XenServer 6.2 SP1. /// </summary> /// <param name="session">The session</param> /// <param name="_vgpu_type">The opaque_ref of the given vgpu_type</param> public static long get_max_resolution_y(Session session, string _vgpu_type) { return long.Parse((string)session.proxy.vgpu_type_get_max_resolution_y(session.uuid, (_vgpu_type != null) ? _vgpu_type : "").parse()); } /// <summary> /// Get the supported_on_PGPUs field of the given VGPU_type. /// First published in XenServer 6.2 SP1 Tech-Preview. /// </summary> /// <param name="session">The session</param> /// <param name="_vgpu_type">The opaque_ref of the given vgpu_type</param> public static List<XenRef<PGPU>> get_supported_on_PGPUs(Session session, string _vgpu_type) { return XenRef<PGPU>.Create(session.proxy.vgpu_type_get_supported_on_pgpus(session.uuid, (_vgpu_type != null) ? _vgpu_type : "").parse()); } /// <summary> /// Get the enabled_on_PGPUs field of the given VGPU_type. /// First published in XenServer 6.2 SP1 Tech-Preview. /// </summary> /// <param name="session">The session</param> /// <param name="_vgpu_type">The opaque_ref of the given vgpu_type</param> public static List<XenRef<PGPU>> get_enabled_on_PGPUs(Session session, string _vgpu_type) { return XenRef<PGPU>.Create(session.proxy.vgpu_type_get_enabled_on_pgpus(session.uuid, (_vgpu_type != null) ? _vgpu_type : "").parse()); } /// <summary> /// Get the VGPUs field of the given VGPU_type. /// First published in XenServer 6.2 SP1 Tech-Preview. /// </summary> /// <param name="session">The session</param> /// <param name="_vgpu_type">The opaque_ref of the given vgpu_type</param> public static List<XenRef<VGPU>> get_VGPUs(Session session, string _vgpu_type) { return XenRef<VGPU>.Create(session.proxy.vgpu_type_get_vgpus(session.uuid, (_vgpu_type != null) ? _vgpu_type : "").parse()); } /// <summary> /// Get the supported_on_GPU_groups field of the given VGPU_type. /// First published in XenServer 6.2 SP1. /// </summary> /// <param name="session">The session</param> /// <param name="_vgpu_type">The opaque_ref of the given vgpu_type</param> public static List<XenRef<GPU_group>> get_supported_on_GPU_groups(Session session, string _vgpu_type) { return XenRef<GPU_group>.Create(session.proxy.vgpu_type_get_supported_on_gpu_groups(session.uuid, (_vgpu_type != null) ? _vgpu_type : "").parse()); } /// <summary> /// Get the enabled_on_GPU_groups field of the given VGPU_type. /// First published in XenServer 6.2 SP1. /// </summary> /// <param name="session">The session</param> /// <param name="_vgpu_type">The opaque_ref of the given vgpu_type</param> public static List<XenRef<GPU_group>> get_enabled_on_GPU_groups(Session session, string _vgpu_type) { return XenRef<GPU_group>.Create(session.proxy.vgpu_type_get_enabled_on_gpu_groups(session.uuid, (_vgpu_type != null) ? _vgpu_type : "").parse()); } /// <summary> /// Return a list of all the VGPU_types known to the system. /// First published in XenServer 6.2 SP1 Tech-Preview. /// </summary> /// <param name="session">The session</param> public static List<XenRef<VGPU_type>> get_all(Session session) { return XenRef<VGPU_type>.Create(session.proxy.vgpu_type_get_all(session.uuid).parse()); } /// <summary> /// Get all the VGPU_type Records at once, in a single XML RPC call /// First published in XenServer 6.2 SP1 Tech-Preview. /// </summary> /// <param name="session">The session</param> public static Dictionary<XenRef<VGPU_type>, VGPU_type> get_all_records(Session session) { return XenRef<VGPU_type>.Create<Proxy_VGPU_type>(session.proxy.vgpu_type_get_all_records(session.uuid).parse()); } /// <summary> /// Unique identifier/object reference /// </summary> public virtual string uuid { get { return _uuid; } set { if (!Helper.AreEqual(value, _uuid)) { _uuid = value; Changed = true; NotifyPropertyChanged("uuid"); } } } private string _uuid; /// <summary> /// Name of VGPU vendor /// </summary> public virtual string vendor_name { get { return _vendor_name; } set { if (!Helper.AreEqual(value, _vendor_name)) { _vendor_name = value; Changed = true; NotifyPropertyChanged("vendor_name"); } } } private string _vendor_name; /// <summary> /// Model name associated with the VGPU type /// </summary> public virtual string model_name { get { return _model_name; } set { if (!Helper.AreEqual(value, _model_name)) { _model_name = value; Changed = true; NotifyPropertyChanged("model_name"); } } } private string _model_name; /// <summary> /// Framebuffer size of the VGPU type, in bytes /// </summary> public virtual long framebuffer_size { get { return _framebuffer_size; } set { if (!Helper.AreEqual(value, _framebuffer_size)) { _framebuffer_size = value; Changed = true; NotifyPropertyChanged("framebuffer_size"); } } } private long _framebuffer_size; /// <summary> /// Maximum number of displays supported by the VGPU type /// </summary> public virtual long max_heads { get { return _max_heads; } set { if (!Helper.AreEqual(value, _max_heads)) { _max_heads = value; Changed = true; NotifyPropertyChanged("max_heads"); } } } private long _max_heads; /// <summary> /// Maximum resultion (width) supported by the VGPU type /// First published in XenServer 6.2 SP1. /// </summary> public virtual long max_resolution_x { get { return _max_resolution_x; } set { if (!Helper.AreEqual(value, _max_resolution_x)) { _max_resolution_x = value; Changed = true; NotifyPropertyChanged("max_resolution_x"); } } } private long _max_resolution_x; /// <summary> /// Maximum resoltion (height) supported by the VGPU type /// First published in XenServer 6.2 SP1. /// </summary> public virtual long max_resolution_y { get { return _max_resolution_y; } set { if (!Helper.AreEqual(value, _max_resolution_y)) { _max_resolution_y = value; Changed = true; NotifyPropertyChanged("max_resolution_y"); } } } private long _max_resolution_y; /// <summary> /// List of PGPUs that support this VGPU type /// </summary> public virtual List<XenRef<PGPU>> supported_on_PGPUs { get { return _supported_on_PGPUs; } set { if (!Helper.AreEqual(value, _supported_on_PGPUs)) { _supported_on_PGPUs = value; Changed = true; NotifyPropertyChanged("supported_on_PGPUs"); } } } private List<XenRef<PGPU>> _supported_on_PGPUs; /// <summary> /// List of PGPUs that have this VGPU type enabled /// </summary> public virtual List<XenRef<PGPU>> enabled_on_PGPUs { get { return _enabled_on_PGPUs; } set { if (!Helper.AreEqual(value, _enabled_on_PGPUs)) { _enabled_on_PGPUs = value; Changed = true; NotifyPropertyChanged("enabled_on_PGPUs"); } } } private List<XenRef<PGPU>> _enabled_on_PGPUs; /// <summary> /// List of VGPUs of this type /// </summary> public virtual List<XenRef<VGPU>> VGPUs { get { return _VGPUs; } set { if (!Helper.AreEqual(value, _VGPUs)) { _VGPUs = value; Changed = true; NotifyPropertyChanged("VGPUs"); } } } private List<XenRef<VGPU>> _VGPUs; /// <summary> /// List of GPU groups in which at least one PGPU supports this VGPU type /// First published in XenServer 6.2 SP1. /// </summary> public virtual List<XenRef<GPU_group>> supported_on_GPU_groups { get { return _supported_on_GPU_groups; } set { if (!Helper.AreEqual(value, _supported_on_GPU_groups)) { _supported_on_GPU_groups = value; Changed = true; NotifyPropertyChanged("supported_on_GPU_groups"); } } } private List<XenRef<GPU_group>> _supported_on_GPU_groups; /// <summary> /// List of GPU groups in which at least one have this VGPU type enabled /// First published in XenServer 6.2 SP1. /// </summary> public virtual List<XenRef<GPU_group>> enabled_on_GPU_groups { get { return _enabled_on_GPU_groups; } set { if (!Helper.AreEqual(value, _enabled_on_GPU_groups)) { _enabled_on_GPU_groups = value; Changed = true; NotifyPropertyChanged("enabled_on_GPU_groups"); } } } private List<XenRef<GPU_group>> _enabled_on_GPU_groups; } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System.Diagnostics; using System.Runtime.CompilerServices; using System.Text; namespace System.IO { /// <summary>Contains internal path helpers that are shared between many projects.</summary> internal static partial class PathInternal { internal const string ExtendedPathPrefix = @"\\?\"; internal const string UncPathPrefix = @"\\"; internal const string UncExtendedPrefixToInsert = @"?\UNC\"; internal const string UncExtendedPathPrefix = @"\\?\UNC\"; internal const string DevicePathPrefix = @"\\.\"; internal const int MaxShortPath = 260; internal const int MaxShortDirectoryPath = 248; internal const int MaxLongPath = short.MaxValue; internal static readonly char[] InvalidPathChars = { '\"', '<', '>', '|', '\0', (char)1, (char)2, (char)3, (char)4, (char)5, (char)6, (char)7, (char)8, (char)9, (char)10, (char)11, (char)12, (char)13, (char)14, (char)15, (char)16, (char)17, (char)18, (char)19, (char)20, (char)21, (char)22, (char)23, (char)24, (char)25, (char)26, (char)27, (char)28, (char)29, (char)30, (char)31 }; internal static readonly char[] InvalidPathCharsWithAdditionalChecks = // This is used by HasIllegalCharacters { '\"', '<', '>', '|', '\0', (char)1, (char)2, (char)3, (char)4, (char)5, (char)6, (char)7, (char)8, (char)9, (char)10, (char)11, (char)12, (char)13, (char)14, (char)15, (char)16, (char)17, (char)18, (char)19, (char)20, (char)21, (char)22, (char)23, (char)24, (char)25, (char)26, (char)27, (char)28, (char)29, (char)30, (char)31, '*', '?' }; /// <summary> /// Returns true if the given character is a valid drive letter /// </summary> internal static bool IsValidDriveChar(char value) { return ((value >= 'A' && value <= 'Z') || (value >= 'a' && value <= 'z')); } /// <summary> /// Returns true if the path is too long /// </summary> internal static bool IsPathTooLong(string fullPath) { // We'll never know precisely what will fail as paths get changed internally in Windows and // may grow to exceed MaxExtendedPath. We'll only try to catch ones we know will absolutely // fail. if (fullPath.Length < MaxLongPath - UncExtendedPathPrefix.Length) { // We won't push it over MaxLongPath return false; } // We need to check if we have a prefix to account for one being implicitly added. if (IsExtended(fullPath)) { // We won't prepend, just check return fullPath.Length >= MaxLongPath; } if (fullPath.StartsWith(UncPathPrefix, StringComparison.Ordinal)) { return fullPath.Length + UncExtendedPrefixToInsert.Length >= MaxLongPath; } return fullPath.Length + ExtendedPathPrefix.Length >= MaxLongPath; } /// <summary> /// Returns true if the directory is too long /// </summary> internal static bool IsDirectoryTooLong(string fullPath) { return IsPathTooLong(fullPath); } /// <summary> /// Adds the extended path prefix (\\?\) if not already present, IF the path is not relative, /// AND the path is more than 259 characters. (> MAX_PATH + null) /// </summary> internal static string EnsureExtendedPrefixOverMaxPath(string path) { if (path != null && path.Length >= MaxShortPath) { return EnsureExtendedPrefix(path); } else { return path; } } /// <summary> /// Adds the extended path prefix (\\?\) if not already present and if the path is not relative or a device (\\.\). /// </summary> internal static string EnsureExtendedPrefix(string path) { if (IsExtended(path) || IsRelative(path) || IsDevice(path)) return path; // Given \\server\share in longpath becomes \\?\UNC\server\share if (path.StartsWith(UncPathPrefix, StringComparison.OrdinalIgnoreCase)) return path.Insert(2, PathInternal.UncExtendedPrefixToInsert); return PathInternal.ExtendedPathPrefix + path; } /// <summary> /// Adds the extended path prefix (\\?\) if not already present and if the path is not relative or a device (\\.\). /// </summary> internal static void EnsureExtendedPrefix(StringBuilder path) { if (IsExtended(path) || IsRelative(path) || IsDevice(path)) return; // Given \\server\share in longpath becomes \\?\UNC\server\share if (path.StartsWithOrdinal(UncPathPrefix)) { path.Insert(2, PathInternal.UncExtendedPrefixToInsert); return; } path.Insert(0, PathInternal.ExtendedPathPrefix); } /// <summary> /// Removes the extended path prefix (\\?\) if present. /// </summary> internal static string RemoveExtendedPrefix(string path) { if (!IsExtended(path)) return path; // Given \\?\UNC\server\share we return \\server\share if (IsExtendedUnc(path)) return path.Remove(2, 6); return path.Substring(4); } /// <summary> /// Removes the extended path prefix (\\?\) if present. /// </summary> internal static StringBuilder RemoveExtendedPrefix(StringBuilder path) { if (!IsExtended(path)) return path; // Given \\?\UNC\server\share we return \\server\share if (IsExtendedUnc(path)) return path.Remove(2, 6); return path.Remove(0, 4); } /// <summary> /// Returns true if the path uses the device syntax (\\.\) /// </summary> internal static bool IsDevice(string path) { return path != null && path.StartsWith(DevicePathPrefix, StringComparison.Ordinal); } /// <summary> /// Returns true if the path uses the device syntax (\\.\) /// </summary> internal static bool IsDevice(StringBuilder path) { return path != null && path.StartsWithOrdinal(DevicePathPrefix); } /// <summary> /// Returns true if the path uses the extended syntax (\\?\) /// </summary> internal static bool IsExtended(string path) { return path != null && path.StartsWith(ExtendedPathPrefix, StringComparison.Ordinal); } /// <summary> /// Returns true if the path uses the extended syntax (\\?\) /// </summary> internal static bool IsExtended(StringBuilder path) { return path != null && path.StartsWithOrdinal(ExtendedPathPrefix); } /// <summary> /// Returns true if the path uses the extended UNC syntax (\\?\UNC\) /// </summary> internal static bool IsExtendedUnc(string path) { return path != null && path.StartsWith(UncExtendedPathPrefix, StringComparison.Ordinal); } /// <summary> /// Returns true if the path uses the extended UNC syntax (\\?\UNC\) /// </summary> internal static bool IsExtendedUnc(StringBuilder path) { return path != null && path.StartsWithOrdinal(UncExtendedPathPrefix); } /// <summary> /// Returns a value indicating if the given path contains invalid characters (", &lt;, &gt;, | /// NUL, or any ASCII char whose integer representation is in the range of 1 through 31), /// optionally checking for ? and *. /// </summary> internal static bool HasIllegalCharacters(string path, bool checkAdditional = false) { Debug.Assert(path != null); // See: http://msdn.microsoft.com/en-us/library/windows/desktop/aa365247(v=vs.85).aspx // Question mark is a normal part of extended path syntax (\\?\) int startIndex = PathInternal.IsExtended(path) ? ExtendedPathPrefix.Length : 0; return path.IndexOfAny(checkAdditional ? InvalidPathCharsWithAdditionalChecks : InvalidPathChars, startIndex) >= 0; } /// <summary> /// Only check for ? and *. /// </summary> [MethodImpl(MethodImplOptions.AggressiveInlining)] internal unsafe static bool HasAdditionalIllegalCharacters(string path) { int startIndex = PathInternal.IsExtended(path) ? ExtendedPathPrefix.Length : 0; char currentChar; for (int i = startIndex; i < path.Length; i++) { currentChar = path[i]; if (currentChar == '*' || currentChar == '?') return true; } return false; } /// <summary> /// Gets the length of the root of the path (drive, share, etc.). /// </summary> internal unsafe static int GetRootLength(string path) { fixed(char* value = path) { return (int)GetRootLength(value, (ulong)path.Length); } } private unsafe static ulong GetRootLength(char* path, ulong pathLength) { ulong i = 0; ulong volumeSeparatorLength = 2; // Length to the colon "C:" ulong uncRootLength = 2; // Length to the start of the server name "\\" bool extendedSyntax = StartsWithOrdinal(path, pathLength, ExtendedPathPrefix); bool extendedUncSyntax = StartsWithOrdinal(path, pathLength, UncExtendedPathPrefix); if (extendedSyntax) { // Shift the position we look for the root from to account for the extended prefix if (extendedUncSyntax) { // "\\" -> "\\?\UNC\" uncRootLength = (ulong)UncExtendedPathPrefix.Length; } else { // "C:" -> "\\?\C:" volumeSeparatorLength += (ulong)ExtendedPathPrefix.Length; } } if ((!extendedSyntax || extendedUncSyntax) && pathLength > 0 && IsDirectorySeparator(path[0])) { // UNC or simple rooted path (e.g. "\foo", NOT "\\?\C:\foo") i = 1; // Drive rooted (\foo) is one character if (extendedUncSyntax || (pathLength > 1 && IsDirectorySeparator(path[1]))) { // UNC (\\?\UNC\ or \\), scan past the next two directory separators at most // (e.g. to \\?\UNC\Server\Share or \\Server\Share\) i = uncRootLength; int n = 2; // Maximum separators to skip while (i < pathLength && (!IsDirectorySeparator(path[i]) || --n > 0)) i++; } } else if (pathLength >= volumeSeparatorLength && path[volumeSeparatorLength - 1] == Path.VolumeSeparatorChar) { // Path is at least longer than where we expect a colon, and has a colon (\\?\A:, A:) // If the colon is followed by a directory separator, move past it i = volumeSeparatorLength; if (pathLength >= volumeSeparatorLength + 1 && IsDirectorySeparator(path[volumeSeparatorLength])) i++; } return i; } private unsafe static bool StartsWithOrdinal(char* source, ulong sourceLength, string value) { if (sourceLength < (ulong)value.Length) return false; for (int i = 0; i < value.Length; i++) { if (value[i] != source[i]) return false; } return true; } /// <summary> /// Gets the length of the root of the path (drive, share, etc.). /// </summary> /// <summary> /// Returns true if the path specified is relative to the current drive or working directory. /// Returns false if the path is fixed to a specific drive or UNC path. This method does no /// validation of the path (URIs will be returned as relative as a result). /// </summary> /// <remarks> /// Handles paths that use the alternate directory separator. It is a frequent mistake to /// assume that rooted paths (Path.IsPathRooted) are not relative. This isn't the case. /// "C:a" is drive relative- meaning that it will be resolved against the current directory /// for C: (rooted, but relative). "C:\a" is rooted and not relative (the current directory /// will not be used to modify the path). /// </remarks> internal static bool IsRelative(string path) { if (path.Length < 2) { // It isn't fixed, it must be relative. There is no way to specify a fixed // path with one character (or less). return true; } if (IsDirectorySeparator(path[0])) { // There is no valid way to specify a relative path with two initial slashes return !IsDirectorySeparator(path[1]); } // The only way to specify a fixed path that doesn't begin with two slashes // is the drive, colon, slash format- i.e. C:\ return !((path.Length >= 3) && (path[1] == Path.VolumeSeparatorChar) && IsDirectorySeparator(path[2])); } /// <summary> /// Returns true if the path specified is relative to the current drive or working directory. /// Returns false if the path is fixed to a specific drive or UNC path. This method does no /// validation of the path (URIs will be returned as relative as a result). /// </summary> /// <remarks> /// Handles paths that use the alternate directory separator. It is a frequent mistake to /// assume that rooted paths (Path.IsPathRooted) are not relative. This isn't the case. /// "C:a" is drive relative- meaning that it will be resolved against the current directory /// for C: (rooted, but relative). "C:\a" is rooted and not relative (the current directory /// will not be used to modify the path). /// </remarks> internal static bool IsRelative(StringBuilder path) { if (path.Length < 2) { // It isn't fixed, it must be relative. There is no way to specify a fixed // path with one character (or less). return true; } if (IsDirectorySeparator(path[0])) { // There is no valid way to specify a relative path with two initial slashes return !IsDirectorySeparator(path[1]); } // The only way to specify a fixed path that doesn't begin with two slashes // is the drive, colon, slash format- i.e. C:\ return !((path.Length >= 3) && (path[1] == Path.VolumeSeparatorChar) && IsDirectorySeparator(path[2])); } /// <summary> /// Returns the characters to skip at the start of the path if it starts with space(s) and a drive or UNC. /// (examples are " C:", " \\") /// This is a legacy behavior of Path.GetFullPath(). /// </summary> internal static int PathStartSkip(string path) { int startIndex = 0; while (startIndex < path.Length && path[startIndex] == ' ') startIndex++; if (startIndex > 0) { if (startIndex + 1 < path.Length && ((PathInternal.IsDirectorySeparator(path[startIndex]) && PathInternal.IsDirectorySeparator(path[startIndex + 1])) || (path[startIndex + 1] == ':' && PathInternal.IsValidDriveChar(path[startIndex])))) { // Go ahead and skip spaces as we're either " C:" or " \\" } else { // Not one of the cases we're looking for, go back to the beginning startIndex = 0; } } return startIndex; } /// <summary> /// True if the given character is a directory separator. /// </summary> [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static bool IsDirectorySeparator(char c) { return c == Path.DirectorySeparatorChar || c == Path.AltDirectorySeparatorChar; } } }
/* * Copyright 2002-2015 Drew Noakes * * Modified by Yakov Danilov <yakodani@gmail.com> for Imazen LLC (Ported from Java to C#) * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * More information about this project is available at: * * https://drewnoakes.com/code/exif/ * https://github.com/drewnoakes/metadata-extractor */ using System.Collections.Generic; using JetBrains.Annotations; using Sharpen; namespace Com.Drew.Metadata.Photoshop { /// <summary>Holds the metadata found in the APPD segment of a JPEG file saved by Photoshop.</summary> /// <author>Drew Noakes https://drewnoakes.com</author> /// <author>Yuri Binev</author> public class PhotoshopDirectory : Com.Drew.Metadata.Directory { public const int TagChannelsRowsColumnsDepthMode = unchecked((int)(0x03E8)); public const int TagMacPrintInfo = unchecked((int)(0x03E9)); public const int TagXml = unchecked((int)(0x03EA)); public const int TagIndexedColorTable = unchecked((int)(0x03EB)); public const int TagResolutionInfo = unchecked((int)(0x03ED)); public const int TagAlphaChannels = unchecked((int)(0x03EE)); public const int TagDisplayInfoObsolete = unchecked((int)(0x03EF)); public const int TagCaption = unchecked((int)(0x03F0)); public const int TagBorderInformation = unchecked((int)(0x03F1)); public const int TagBackgroundColor = unchecked((int)(0x03F2)); public const int TagPrintFlags = unchecked((int)(0x03F3)); public const int TagGrayscaleAndMultichannelHalftoningInformation = unchecked((int)(0x03F4)); public const int TagColorHalftoningInformation = unchecked((int)(0x03F5)); public const int TagDuotoneHalftoningInformation = unchecked((int)(0x03F6)); public const int TagGrayscaleAndMultichannelTransferFunction = unchecked((int)(0x03F7)); public const int TagColorTransferFunctions = unchecked((int)(0x03F8)); public const int TagDuotoneTransferFunctions = unchecked((int)(0x03F9)); public const int TagDuotoneImageInformation = unchecked((int)(0x03FA)); public const int TagEffectiveBlackAndWhiteValues = unchecked((int)(0x03FB)); public const int TagEpsOptions = unchecked((int)(0x03FD)); public const int TagQuickMaskInformation = unchecked((int)(0x03FE)); public const int TagLayerStateInformation = unchecked((int)(0x0400)); public const int TagLayersGroupInformation = unchecked((int)(0x0402)); public const int TagIptc = unchecked((int)(0x0404)); public const int TagImageModeForRawFormatFiles = unchecked((int)(0x0405)); public const int TagJpegQuality = unchecked((int)(0x0406)); public const int TagGridAndGuidesInformation = unchecked((int)(0x0408)); public const int TagThumbnailOld = unchecked((int)(0x0409)); public const int TagCopyright = unchecked((int)(0x040A)); public const int TagUrl = unchecked((int)(0x040B)); public const int TagThumbnail = unchecked((int)(0x040C)); public const int TagGlobalAngle = unchecked((int)(0x040D)); public const int TagIccProfileBytes = unchecked((int)(0x040F)); public const int TagWatermark = unchecked((int)(0x0410)); public const int TagIccUntaggedProfile = unchecked((int)(0x0411)); public const int TagEffectsVisible = unchecked((int)(0x0412)); public const int TagSpotHalftone = unchecked((int)(0x0413)); public const int TagSeedNumber = unchecked((int)(0x0414)); public const int TagUnicodeAlphaNames = unchecked((int)(0x0415)); public const int TagIndexedColorTableCount = unchecked((int)(0x0416)); public const int TagTransparencyIndex = unchecked((int)(0x0417)); public const int TagGlobalAltitude = unchecked((int)(0x0419)); public const int TagSlices = unchecked((int)(0x041A)); public const int TagWorkflowUrl = unchecked((int)(0x041B)); public const int TagJumpToXpep = unchecked((int)(0x041C)); public const int TagAlphaIdentifiers = unchecked((int)(0x041D)); public const int TagUrlList = unchecked((int)(0x041E)); public const int TagVersion = unchecked((int)(0x0421)); public const int TagExifData1 = unchecked((int)(0x0422)); public const int TagExifData3 = unchecked((int)(0x0423)); public const int TagXmpData = unchecked((int)(0x0424)); public const int TagCaptionDigest = unchecked((int)(0x0425)); public const int TagPrintScale = unchecked((int)(0x0426)); public const int TagPixelAspectRatio = unchecked((int)(0x0428)); public const int TagLayerComps = unchecked((int)(0x0429)); public const int TagAlternateDuotoneColors = unchecked((int)(0x042A)); public const int TagAlternateSpotColors = unchecked((int)(0x042B)); public const int TagLayerSelectionIds = unchecked((int)(0x042D)); public const int TagHdrToningInfo = unchecked((int)(0x042E)); public const int TagPrintInfo = unchecked((int)(0x042F)); public const int TagLayerGroupsEnabledId = unchecked((int)(0x0430)); public const int TagColorSamplers = unchecked((int)(0x0431)); public const int TagMeasurementScale = unchecked((int)(0x0432)); public const int TagTimelineInformation = unchecked((int)(0x0433)); public const int TagSheetDisclosure = unchecked((int)(0x0434)); public const int TagDisplayInfo = unchecked((int)(0x0435)); public const int TagOnionSkins = unchecked((int)(0x0436)); public const int TagCountInformation = unchecked((int)(0x0438)); public const int TagPrintInfo2 = unchecked((int)(0x043A)); public const int TagPrintStyle = unchecked((int)(0x043B)); public const int TagMacNsprintinfo = unchecked((int)(0x043C)); public const int TagWinDevmode = unchecked((int)(0x043D)); public const int TagAutoSaveFilePath = unchecked((int)(0x043E)); public const int TagAutoSaveFormat = unchecked((int)(0x043F)); public const int TagPathSelectionState = unchecked((int)(0x0440)); public const int TagClippingPathName = unchecked((int)(0x0BB7)); public const int TagOriginPathInfo = unchecked((int)(0x0BB8)); public const int TagImageReadyVariablesXml = unchecked((int)(0x1B58)); public const int TagImageReadyDataSets = unchecked((int)(0x1B59)); public const int TagLightroomWorkflow = unchecked((int)(0x1F40)); public const int TagPrintFlagsInfo = unchecked((int)(0x2710)); [NotNull] protected internal static readonly Dictionary<int?, string> _tagNameMap = new Dictionary<int?, string>(); static PhotoshopDirectory() { // OBSOLETE 0x03FC // OBSOLETE 0x03FF // Working path (not saved) 0x0401 // OBSOLETE 0x0403 // OBSOLETE 0x040E // CLIPPING PATHS 0x07D0 -> 0x0BB6 // PLUG IN RESOURCES 0x0FA0 -> 0x1387 _tagNameMap.Put(TagChannelsRowsColumnsDepthMode, "Channels, Rows, Columns, Depth, Mode"); _tagNameMap.Put(TagMacPrintInfo, "Mac Print Info"); _tagNameMap.Put(TagXml, "XML Data"); _tagNameMap.Put(TagIndexedColorTable, "Indexed Color Table"); _tagNameMap.Put(TagResolutionInfo, "Resolution Info"); _tagNameMap.Put(TagAlphaChannels, "Alpha Channels"); _tagNameMap.Put(TagDisplayInfoObsolete, "Display Info (Obsolete)"); _tagNameMap.Put(TagCaption, "Caption"); _tagNameMap.Put(TagBorderInformation, "Border Information"); _tagNameMap.Put(TagBackgroundColor, "Background Color"); _tagNameMap.Put(TagPrintFlags, "Print Flags"); _tagNameMap.Put(TagGrayscaleAndMultichannelHalftoningInformation, "Grayscale and Multichannel Halftoning Information"); _tagNameMap.Put(TagColorHalftoningInformation, "Color Halftoning Information"); _tagNameMap.Put(TagDuotoneHalftoningInformation, "Duotone Halftoning Information"); _tagNameMap.Put(TagGrayscaleAndMultichannelTransferFunction, "Grayscale and Multichannel Transfer Function"); _tagNameMap.Put(TagColorTransferFunctions, "Color Transfer Functions"); _tagNameMap.Put(TagDuotoneTransferFunctions, "Duotone Transfer Functions"); _tagNameMap.Put(TagDuotoneImageInformation, "Duotone Image Information"); _tagNameMap.Put(TagEffectiveBlackAndWhiteValues, "Effective Black and White Values"); _tagNameMap.Put(TagEpsOptions, "EPS Options"); _tagNameMap.Put(TagQuickMaskInformation, "Quick Mask Information"); _tagNameMap.Put(TagLayerStateInformation, "Layer State Information"); _tagNameMap.Put(TagLayersGroupInformation, "Layers Group Information"); _tagNameMap.Put(TagIptc, "IPTC-NAA Record"); _tagNameMap.Put(TagImageModeForRawFormatFiles, "Image Mode for Raw Format Files"); _tagNameMap.Put(TagJpegQuality, "JPEG Quality"); _tagNameMap.Put(TagGridAndGuidesInformation, "Grid and Guides Information"); _tagNameMap.Put(TagThumbnailOld, "Photoshop 4.0 Thumbnail"); _tagNameMap.Put(TagCopyright, "Copyright Flag"); _tagNameMap.Put(TagUrl, "URL"); _tagNameMap.Put(TagThumbnail, "Thumbnail Data"); _tagNameMap.Put(TagGlobalAngle, "Global Angle"); _tagNameMap.Put(TagIccProfileBytes, "ICC Profile Bytes"); _tagNameMap.Put(TagWatermark, "Watermark"); _tagNameMap.Put(TagIccUntaggedProfile, "ICC Untagged Profile"); _tagNameMap.Put(TagEffectsVisible, "Effects Visible"); _tagNameMap.Put(TagSpotHalftone, "Spot Halftone"); _tagNameMap.Put(TagSeedNumber, "Seed Number"); _tagNameMap.Put(TagUnicodeAlphaNames, "Unicode Alpha Names"); _tagNameMap.Put(TagIndexedColorTableCount, "Indexed Color Table Count"); _tagNameMap.Put(TagTransparencyIndex, "Transparency Index"); _tagNameMap.Put(TagGlobalAltitude, "Global Altitude"); _tagNameMap.Put(TagSlices, "Slices"); _tagNameMap.Put(TagWorkflowUrl, "Workflow URL"); _tagNameMap.Put(TagJumpToXpep, "Jump To XPEP"); _tagNameMap.Put(TagAlphaIdentifiers, "Alpha Identifiers"); _tagNameMap.Put(TagUrlList, "URL List"); _tagNameMap.Put(TagVersion, "Version Info"); _tagNameMap.Put(TagExifData1, "EXIF Data 1"); _tagNameMap.Put(TagExifData3, "EXIF Data 3"); _tagNameMap.Put(TagXmpData, "XMP Data"); _tagNameMap.Put(TagCaptionDigest, "Caption Digest"); _tagNameMap.Put(TagPrintScale, "Print Scale"); _tagNameMap.Put(TagPixelAspectRatio, "Pixel Aspect Ratio"); _tagNameMap.Put(TagLayerComps, "Layer Comps"); _tagNameMap.Put(TagAlternateDuotoneColors, "Alternate Duotone Colors"); _tagNameMap.Put(TagAlternateSpotColors, "Alternate Spot Colors"); _tagNameMap.Put(TagLayerSelectionIds, "Layer Selection IDs"); _tagNameMap.Put(TagHdrToningInfo, "HDR Toning Info"); _tagNameMap.Put(TagPrintInfo, "Print Info"); _tagNameMap.Put(TagLayerGroupsEnabledId, "Layer Groups Enabled ID"); _tagNameMap.Put(TagColorSamplers, "Color Samplers"); _tagNameMap.Put(TagMeasurementScale, "Measurement Scale"); _tagNameMap.Put(TagTimelineInformation, "Timeline Information"); _tagNameMap.Put(TagSheetDisclosure, "Sheet Disclosure"); _tagNameMap.Put(TagDisplayInfo, "Display Info"); _tagNameMap.Put(TagOnionSkins, "Onion Skins"); _tagNameMap.Put(TagCountInformation, "Count information"); _tagNameMap.Put(TagPrintInfo2, "Print Info 2"); _tagNameMap.Put(TagPrintStyle, "Print Style"); _tagNameMap.Put(TagMacNsprintinfo, "Mac NSPrintInfo"); _tagNameMap.Put(TagWinDevmode, "Win DEVMODE"); _tagNameMap.Put(TagAutoSaveFilePath, "Auto Save File Path"); _tagNameMap.Put(TagAutoSaveFormat, "Auto Save Format"); _tagNameMap.Put(TagPathSelectionState, "Path Selection State"); _tagNameMap.Put(TagClippingPathName, "Clipping Path Name"); _tagNameMap.Put(TagOriginPathInfo, "Origin Path Info"); _tagNameMap.Put(TagImageReadyVariablesXml, "Image Ready Variables XML"); _tagNameMap.Put(TagImageReadyDataSets, "Image Ready Data Sets"); _tagNameMap.Put(TagLightroomWorkflow, "Lightroom Workflow"); _tagNameMap.Put(TagPrintFlagsInfo, "Print Flags Information"); } public PhotoshopDirectory() { this.SetDescriptor(new PhotoshopDescriptor(this)); } [NotNull] public override string GetName() { return "Photoshop"; } [NotNull] protected internal override Dictionary<int?, string> GetTagNameMap() { return _tagNameMap; } [CanBeNull] public virtual sbyte[] GetThumbnailBytes() { sbyte[] storedBytes = GetByteArray(Com.Drew.Metadata.Photoshop.PhotoshopDirectory.TagThumbnail); if (storedBytes == null) { storedBytes = GetByteArray(Com.Drew.Metadata.Photoshop.PhotoshopDirectory.TagThumbnailOld); } if (storedBytes == null) { return null; } int thumbSize = storedBytes.Length - 28; sbyte[] thumbBytes = new sbyte[thumbSize]; System.Array.Copy(storedBytes, 28, thumbBytes, 0, thumbSize); return thumbBytes; } } }
using System; using System.Runtime.InteropServices; using System.Text; using System.Collections.Generic; using OpenHome.Net.Core; namespace OpenHome.Net.Device.Providers { public interface IDvProviderOpenhomeOrgSubscriptionLongPoll1 : IDisposable { } /// <summary> /// Provider for the openhome.org:SubscriptionLongPoll:1 UPnP service /// </summary> public class DvProviderOpenhomeOrgSubscriptionLongPoll1 : DvProvider, IDisposable, IDvProviderOpenhomeOrgSubscriptionLongPoll1 { private GCHandle iGch; private ActionDelegate iDelegateSubscribe; private ActionDelegate iDelegateUnsubscribe; private ActionDelegate iDelegateRenew; private ActionDelegate iDelegateGetPropertyUpdates; /// <summary> /// Constructor /// </summary> /// <param name="aDevice">Device which owns this provider</param> protected DvProviderOpenhomeOrgSubscriptionLongPoll1(DvDevice aDevice) : base(aDevice, "openhome.org", "SubscriptionLongPoll", 1) { iGch = GCHandle.Alloc(this); } /// <summary> /// Signal that the action Subscribe is supported. /// </summary> /// <remarks>The action's availability will be published in the device's service.xml. /// Subscribe must be overridden if this is called.</remarks> protected void EnableActionSubscribe() { OpenHome.Net.Core.Action action = new OpenHome.Net.Core.Action("Subscribe"); List<String> allowedValues = new List<String>(); action.AddInputParameter(new ParameterString("ClientId", allowedValues)); action.AddInputParameter(new ParameterString("Udn", allowedValues)); action.AddInputParameter(new ParameterString("Service", allowedValues)); action.AddInputParameter(new ParameterUint("RequestedDuration")); action.AddOutputParameter(new ParameterString("Sid", allowedValues)); action.AddOutputParameter(new ParameterUint("Duration")); iDelegateSubscribe = new ActionDelegate(DoSubscribe); EnableAction(action, iDelegateSubscribe, GCHandle.ToIntPtr(iGch)); } /// <summary> /// Signal that the action Unsubscribe is supported. /// </summary> /// <remarks>The action's availability will be published in the device's service.xml. /// Unsubscribe must be overridden if this is called.</remarks> protected void EnableActionUnsubscribe() { OpenHome.Net.Core.Action action = new OpenHome.Net.Core.Action("Unsubscribe"); List<String> allowedValues = new List<String>(); action.AddInputParameter(new ParameterString("Sid", allowedValues)); iDelegateUnsubscribe = new ActionDelegate(DoUnsubscribe); EnableAction(action, iDelegateUnsubscribe, GCHandle.ToIntPtr(iGch)); } /// <summary> /// Signal that the action Renew is supported. /// </summary> /// <remarks>The action's availability will be published in the device's service.xml. /// Renew must be overridden if this is called.</remarks> protected void EnableActionRenew() { OpenHome.Net.Core.Action action = new OpenHome.Net.Core.Action("Renew"); List<String> allowedValues = new List<String>(); action.AddInputParameter(new ParameterString("Sid", allowedValues)); action.AddInputParameter(new ParameterUint("RequestedDuration")); action.AddOutputParameter(new ParameterUint("Duration")); iDelegateRenew = new ActionDelegate(DoRenew); EnableAction(action, iDelegateRenew, GCHandle.ToIntPtr(iGch)); } /// <summary> /// Signal that the action GetPropertyUpdates is supported. /// </summary> /// <remarks>The action's availability will be published in the device's service.xml. /// GetPropertyUpdates must be overridden if this is called.</remarks> protected void EnableActionGetPropertyUpdates() { OpenHome.Net.Core.Action action = new OpenHome.Net.Core.Action("GetPropertyUpdates"); List<String> allowedValues = new List<String>(); action.AddInputParameter(new ParameterString("ClientId", allowedValues)); action.AddOutputParameter(new ParameterString("Updates", allowedValues)); iDelegateGetPropertyUpdates = new ActionDelegate(DoGetPropertyUpdates); EnableAction(action, iDelegateGetPropertyUpdates, GCHandle.ToIntPtr(iGch)); } /// <summary> /// Subscribe action. /// </summary> /// <remarks>Will be called when the device stack receives an invocation of the /// Subscribe action for the owning device. /// /// Must be implemented iff EnableActionSubscribe was called.</remarks> /// <param name="aInvocation">Interface allowing querying of aspects of this particular action invocation.</param> /// <param name="aClientId"></param> /// <param name="aUdn"></param> /// <param name="aService"></param> /// <param name="aRequestedDuration"></param> /// <param name="aSid"></param> /// <param name="aDuration"></param> protected virtual void Subscribe(IDvInvocation aInvocation, string aClientId, string aUdn, string aService, uint aRequestedDuration, out string aSid, out uint aDuration) { throw (new ActionDisabledError()); } /// <summary> /// Unsubscribe action. /// </summary> /// <remarks>Will be called when the device stack receives an invocation of the /// Unsubscribe action for the owning device. /// /// Must be implemented iff EnableActionUnsubscribe was called.</remarks> /// <param name="aInvocation">Interface allowing querying of aspects of this particular action invocation.</param> /// <param name="aSid"></param> protected virtual void Unsubscribe(IDvInvocation aInvocation, string aSid) { throw (new ActionDisabledError()); } /// <summary> /// Renew action. /// </summary> /// <remarks>Will be called when the device stack receives an invocation of the /// Renew action for the owning device. /// /// Must be implemented iff EnableActionRenew was called.</remarks> /// <param name="aInvocation">Interface allowing querying of aspects of this particular action invocation.</param> /// <param name="aSid"></param> /// <param name="aRequestedDuration"></param> /// <param name="aDuration"></param> protected virtual void Renew(IDvInvocation aInvocation, string aSid, uint aRequestedDuration, out uint aDuration) { throw (new ActionDisabledError()); } /// <summary> /// GetPropertyUpdates action. /// </summary> /// <remarks>Will be called when the device stack receives an invocation of the /// GetPropertyUpdates action for the owning device. /// /// Must be implemented iff EnableActionGetPropertyUpdates was called.</remarks> /// <param name="aInvocation">Interface allowing querying of aspects of this particular action invocation.</param> /// <param name="aClientId"></param> /// <param name="aUpdates"></param> protected virtual void GetPropertyUpdates(IDvInvocation aInvocation, string aClientId, out string aUpdates) { throw (new ActionDisabledError()); } private static int DoSubscribe(IntPtr aPtr, IntPtr aInvocation) { GCHandle gch = GCHandle.FromIntPtr(aPtr); DvProviderOpenhomeOrgSubscriptionLongPoll1 self = (DvProviderOpenhomeOrgSubscriptionLongPoll1)gch.Target; DvInvocation invocation = new DvInvocation(aInvocation); string clientId; string udn; string service; uint requestedDuration; string sid; uint duration; try { invocation.ReadStart(); clientId = invocation.ReadString("ClientId"); udn = invocation.ReadString("Udn"); service = invocation.ReadString("Service"); requestedDuration = invocation.ReadUint("RequestedDuration"); invocation.ReadEnd(); self.Subscribe(invocation, clientId, udn, service, requestedDuration, out sid, out duration); } catch (ActionError e) { invocation.ReportActionError(e, "Subscribe"); return -1; } catch (PropertyUpdateError) { invocation.ReportError(501, String.Format("Invalid value for property {0}", new object[] { "Subscribe" })); return -1; } catch (Exception e) { System.Diagnostics.Debug.WriteLine("WARNING: unexpected exception {0} thrown by {1}", new object[] { e, "Subscribe" }); System.Diagnostics.Debug.WriteLine(" Only ActionError or PropertyUpdateError should be thrown by actions"); return -1; } try { invocation.WriteStart(); invocation.WriteString("Sid", sid); invocation.WriteUint("Duration", duration); invocation.WriteEnd(); } catch (ActionError) { return -1; } catch (System.Exception e) { System.Diagnostics.Debug.WriteLine("WARNING: unexpected exception {0} thrown by {1}", new object[] { e, "Subscribe" }); System.Diagnostics.Debug.WriteLine(" Only ActionError can be thrown by action response writer"); } return 0; } private static int DoUnsubscribe(IntPtr aPtr, IntPtr aInvocation) { GCHandle gch = GCHandle.FromIntPtr(aPtr); DvProviderOpenhomeOrgSubscriptionLongPoll1 self = (DvProviderOpenhomeOrgSubscriptionLongPoll1)gch.Target; DvInvocation invocation = new DvInvocation(aInvocation); string sid; try { invocation.ReadStart(); sid = invocation.ReadString("Sid"); invocation.ReadEnd(); self.Unsubscribe(invocation, sid); } catch (ActionError e) { invocation.ReportActionError(e, "Unsubscribe"); return -1; } catch (PropertyUpdateError) { invocation.ReportError(501, String.Format("Invalid value for property {0}", new object[] { "Unsubscribe" })); return -1; } catch (Exception e) { System.Diagnostics.Debug.WriteLine("WARNING: unexpected exception {0} thrown by {1}", new object[] { e, "Unsubscribe" }); System.Diagnostics.Debug.WriteLine(" Only ActionError or PropertyUpdateError should be thrown by actions"); return -1; } try { invocation.WriteStart(); invocation.WriteEnd(); } catch (ActionError) { return -1; } catch (System.Exception e) { System.Diagnostics.Debug.WriteLine("WARNING: unexpected exception {0} thrown by {1}", new object[] { e, "Unsubscribe" }); System.Diagnostics.Debug.WriteLine(" Only ActionError can be thrown by action response writer"); } return 0; } private static int DoRenew(IntPtr aPtr, IntPtr aInvocation) { GCHandle gch = GCHandle.FromIntPtr(aPtr); DvProviderOpenhomeOrgSubscriptionLongPoll1 self = (DvProviderOpenhomeOrgSubscriptionLongPoll1)gch.Target; DvInvocation invocation = new DvInvocation(aInvocation); string sid; uint requestedDuration; uint duration; try { invocation.ReadStart(); sid = invocation.ReadString("Sid"); requestedDuration = invocation.ReadUint("RequestedDuration"); invocation.ReadEnd(); self.Renew(invocation, sid, requestedDuration, out duration); } catch (ActionError e) { invocation.ReportActionError(e, "Renew"); return -1; } catch (PropertyUpdateError) { invocation.ReportError(501, String.Format("Invalid value for property {0}", new object[] { "Renew" })); return -1; } catch (Exception e) { System.Diagnostics.Debug.WriteLine("WARNING: unexpected exception {0} thrown by {1}", new object[] { e, "Renew" }); System.Diagnostics.Debug.WriteLine(" Only ActionError or PropertyUpdateError should be thrown by actions"); return -1; } try { invocation.WriteStart(); invocation.WriteUint("Duration", duration); invocation.WriteEnd(); } catch (ActionError) { return -1; } catch (System.Exception e) { System.Diagnostics.Debug.WriteLine("WARNING: unexpected exception {0} thrown by {1}", new object[] { e, "Renew" }); System.Diagnostics.Debug.WriteLine(" Only ActionError can be thrown by action response writer"); } return 0; } private static int DoGetPropertyUpdates(IntPtr aPtr, IntPtr aInvocation) { GCHandle gch = GCHandle.FromIntPtr(aPtr); DvProviderOpenhomeOrgSubscriptionLongPoll1 self = (DvProviderOpenhomeOrgSubscriptionLongPoll1)gch.Target; DvInvocation invocation = new DvInvocation(aInvocation); string clientId; string updates; try { invocation.ReadStart(); clientId = invocation.ReadString("ClientId"); invocation.ReadEnd(); self.GetPropertyUpdates(invocation, clientId, out updates); } catch (ActionError e) { invocation.ReportActionError(e, "GetPropertyUpdates"); return -1; } catch (PropertyUpdateError) { invocation.ReportError(501, String.Format("Invalid value for property {0}", new object[] { "GetPropertyUpdates" })); return -1; } catch (Exception e) { System.Diagnostics.Debug.WriteLine("WARNING: unexpected exception {0} thrown by {1}", new object[] { e, "GetPropertyUpdates" }); System.Diagnostics.Debug.WriteLine(" Only ActionError or PropertyUpdateError should be thrown by actions"); return -1; } try { invocation.WriteStart(); invocation.WriteString("Updates", updates); invocation.WriteEnd(); } catch (ActionError) { return -1; } catch (System.Exception e) { System.Diagnostics.Debug.WriteLine("WARNING: unexpected exception {0} thrown by {1}", new object[] { e, "GetPropertyUpdates" }); System.Diagnostics.Debug.WriteLine(" Only ActionError can be thrown by action response writer"); } return 0; } /// <summary> /// Must be called for each class instance. Must be called before Core.Library.Close(). /// </summary> public virtual void Dispose() { if (DisposeProvider()) iGch.Free(); } } }
// Copyright (c) Microsoft Corporation. // Licensed under the MIT License. #region StyleCop Suppression - generated code using System; using System.ComponentModel; using System.Windows; using System.Windows.Input; namespace Microsoft.Management.UI.Internal { [Localizability(LocalizationCategory.None)] partial class AddFilterRulePicker { // // CancelAddFilterRules routed command // /// <summary> /// Closes the picker and unchecks all items in the panel. /// </summary> public static readonly RoutedCommand CancelAddFilterRulesCommand = new RoutedCommand("CancelAddFilterRules",typeof(AddFilterRulePicker)); static private void CancelAddFilterRulesCommand_CommandExecuted(object sender, ExecutedRoutedEventArgs e) { AddFilterRulePicker obj = (AddFilterRulePicker) sender; obj.OnCancelAddFilterRulesExecuted( e ); } /// <summary> /// Called when CancelAddFilterRules executes. /// </summary> /// <remarks> /// Closes the picker and unchecks all items in the panel. /// </remarks> protected virtual void OnCancelAddFilterRulesExecuted(ExecutedRoutedEventArgs e) { OnCancelAddFilterRulesExecutedImplementation(e); } partial void OnCancelAddFilterRulesExecutedImplementation(ExecutedRoutedEventArgs e); // // OkAddFilterRules routed command // /// <summary> /// Closes the picker and calls AddFilterRulesCommand with the collection of checked items from the picker. /// </summary> public static readonly RoutedCommand OkAddFilterRulesCommand = new RoutedCommand("OkAddFilterRules",typeof(AddFilterRulePicker)); static private void OkAddFilterRulesCommand_CommandCanExecute(object sender, CanExecuteRoutedEventArgs e) { AddFilterRulePicker obj = (AddFilterRulePicker) sender; obj.OnOkAddFilterRulesCanExecute( e ); } static private void OkAddFilterRulesCommand_CommandExecuted(object sender, ExecutedRoutedEventArgs e) { AddFilterRulePicker obj = (AddFilterRulePicker) sender; obj.OnOkAddFilterRulesExecuted( e ); } /// <summary> /// Called to determine if OkAddFilterRules can execute. /// </summary> protected virtual void OnOkAddFilterRulesCanExecute(CanExecuteRoutedEventArgs e) { OnOkAddFilterRulesCanExecuteImplementation(e); } partial void OnOkAddFilterRulesCanExecuteImplementation(CanExecuteRoutedEventArgs e); /// <summary> /// Called when OkAddFilterRules executes. /// </summary> /// <remarks> /// Closes the picker and calls AddFilterRulesCommand with the collection of checked items from the picker. /// </remarks> protected virtual void OnOkAddFilterRulesExecuted(ExecutedRoutedEventArgs e) { OnOkAddFilterRulesExecutedImplementation(e); } partial void OnOkAddFilterRulesExecutedImplementation(ExecutedRoutedEventArgs e); // // AddFilterRulesCommand dependency property // /// <summary> /// Identifies the AddFilterRulesCommand dependency property. /// </summary> public static readonly DependencyProperty AddFilterRulesCommandProperty = DependencyProperty.Register( "AddFilterRulesCommand", typeof(ICommand), typeof(AddFilterRulePicker), new PropertyMetadata( null, AddFilterRulesCommandProperty_PropertyChanged) ); /// <summary> /// Gets or sets the command used to communicate that the action has occurred. /// </summary> [Bindable(true)] [Category("Common Properties")] [Description("Gets or sets the command used to communicate that the action has occurred.")] [Localizability(LocalizationCategory.None)] public ICommand AddFilterRulesCommand { get { return (ICommand) GetValue(AddFilterRulesCommandProperty); } set { SetValue(AddFilterRulesCommandProperty,value); } } static private void AddFilterRulesCommandProperty_PropertyChanged(DependencyObject o, DependencyPropertyChangedEventArgs e) { AddFilterRulePicker obj = (AddFilterRulePicker) o; obj.OnAddFilterRulesCommandChanged( new PropertyChangedEventArgs<ICommand>((ICommand)e.OldValue, (ICommand)e.NewValue) ); } /// <summary> /// Occurs when AddFilterRulesCommand property changes. /// </summary> public event EventHandler<PropertyChangedEventArgs<ICommand>> AddFilterRulesCommandChanged; /// <summary> /// Called when AddFilterRulesCommand property changes. /// </summary> protected virtual void OnAddFilterRulesCommandChanged(PropertyChangedEventArgs<ICommand> e) { OnAddFilterRulesCommandChangedImplementation(e); RaisePropertyChangedEvent(AddFilterRulesCommandChanged, e); } partial void OnAddFilterRulesCommandChangedImplementation(PropertyChangedEventArgs<ICommand> e); // // AddFilterRulesCommandTarget dependency property // /// <summary> /// Identifies the AddFilterRulesCommandTarget dependency property. /// </summary> public static readonly DependencyProperty AddFilterRulesCommandTargetProperty = DependencyProperty.Register( "AddFilterRulesCommandTarget", typeof(IInputElement), typeof(AddFilterRulePicker), new PropertyMetadata( null, AddFilterRulesCommandTargetProperty_PropertyChanged) ); /// <summary> /// Gets or sets a target of the Command. /// </summary> [Bindable(true)] [Category("Common Properties")] [Description("Gets or sets a target of the Command.")] [Localizability(LocalizationCategory.None)] public IInputElement AddFilterRulesCommandTarget { get { return (IInputElement) GetValue(AddFilterRulesCommandTargetProperty); } set { SetValue(AddFilterRulesCommandTargetProperty,value); } } static private void AddFilterRulesCommandTargetProperty_PropertyChanged(DependencyObject o, DependencyPropertyChangedEventArgs e) { AddFilterRulePicker obj = (AddFilterRulePicker) o; obj.OnAddFilterRulesCommandTargetChanged( new PropertyChangedEventArgs<IInputElement>((IInputElement)e.OldValue, (IInputElement)e.NewValue) ); } /// <summary> /// Occurs when AddFilterRulesCommandTarget property changes. /// </summary> public event EventHandler<PropertyChangedEventArgs<IInputElement>> AddFilterRulesCommandTargetChanged; /// <summary> /// Called when AddFilterRulesCommandTarget property changes. /// </summary> protected virtual void OnAddFilterRulesCommandTargetChanged(PropertyChangedEventArgs<IInputElement> e) { OnAddFilterRulesCommandTargetChangedImplementation(e); RaisePropertyChangedEvent(AddFilterRulesCommandTargetChanged, e); } partial void OnAddFilterRulesCommandTargetChangedImplementation(PropertyChangedEventArgs<IInputElement> e); // // IsOpen dependency property // /// <summary> /// Identifies the IsOpen dependency property. /// </summary> public static readonly DependencyProperty IsOpenProperty = DependencyProperty.Register( "IsOpen", typeof(bool), typeof(AddFilterRulePicker), new PropertyMetadata( BooleanBoxes.FalseBox, IsOpenProperty_PropertyChanged) ); /// <summary> /// Gets or sets a value indicating whether the Popup is visible. /// </summary> [Bindable(true)] [Category("Common Properties")] [Description("Gets or sets a value indicating whether the Popup is visible.")] [Localizability(LocalizationCategory.None)] public bool IsOpen { get { return (bool) GetValue(IsOpenProperty); } set { SetValue(IsOpenProperty,BooleanBoxes.Box(value)); } } static private void IsOpenProperty_PropertyChanged(DependencyObject o, DependencyPropertyChangedEventArgs e) { AddFilterRulePicker obj = (AddFilterRulePicker) o; obj.OnIsOpenChanged( new PropertyChangedEventArgs<bool>((bool)e.OldValue, (bool)e.NewValue) ); } /// <summary> /// Occurs when IsOpen property changes. /// </summary> public event EventHandler<PropertyChangedEventArgs<bool>> IsOpenChanged; /// <summary> /// Called when IsOpen property changes. /// </summary> protected virtual void OnIsOpenChanged(PropertyChangedEventArgs<bool> e) { OnIsOpenChangedImplementation(e); RaisePropertyChangedEvent(IsOpenChanged, e); } partial void OnIsOpenChangedImplementation(PropertyChangedEventArgs<bool> e); /// <summary> /// Called when a property changes. /// </summary> private void RaisePropertyChangedEvent<T>(EventHandler<PropertyChangedEventArgs<T>> eh, PropertyChangedEventArgs<T> e) { if (eh != null) { eh(this,e); } } // // Static constructor // /// <summary> /// Called when the type is initialized. /// </summary> static AddFilterRulePicker() { DefaultStyleKeyProperty.OverrideMetadata(typeof(AddFilterRulePicker), new FrameworkPropertyMetadata(typeof(AddFilterRulePicker))); CommandManager.RegisterClassCommandBinding( typeof(AddFilterRulePicker), new CommandBinding( AddFilterRulePicker.CancelAddFilterRulesCommand, CancelAddFilterRulesCommand_CommandExecuted )); CommandManager.RegisterClassCommandBinding( typeof(AddFilterRulePicker), new CommandBinding( AddFilterRulePicker.OkAddFilterRulesCommand, OkAddFilterRulesCommand_CommandExecuted, OkAddFilterRulesCommand_CommandCanExecute )); StaticConstructorImplementation(); } static partial void StaticConstructorImplementation(); } } #endregion
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Collections.Immutable; using System.Composition; using System.Linq; using System.Threading; using System.Threading.Tasks; using Microsoft.CodeAnalysis.CSharp; using Microsoft.CodeAnalysis.CSharp.Extensions; using Microsoft.CodeAnalysis.CSharp.Syntax; using Microsoft.CodeAnalysis.CSharp.Utilities; using Microsoft.CodeAnalysis.Editor.CSharp.Formatting.Indentation; using Microsoft.CodeAnalysis.Editor.Implementation.Formatting.Indentation; using Microsoft.CodeAnalysis.Editor.Shared.Options; using Microsoft.CodeAnalysis.Editor.Shared.Utilities; using Microsoft.CodeAnalysis.Formatting; using Microsoft.CodeAnalysis.Formatting.Rules; using Microsoft.CodeAnalysis.Host.Mef; using Microsoft.CodeAnalysis.LanguageServices; using Microsoft.CodeAnalysis.Options; using Microsoft.CodeAnalysis.Shared.Extensions; using Microsoft.CodeAnalysis.Text; using Roslyn.Utilities; namespace Microsoft.CodeAnalysis.Editor.CSharp.Formatting { [ExportLanguageService(typeof(IEditorFormattingService), LanguageNames.CSharp), Shared] internal partial class CSharpEditorFormattingService : IEditorFormattingService { // All the characters that might potentially trigger formatting when typed private readonly char[] _supportedChars = ";{}#nte:)".ToCharArray(); public CSharpEditorFormattingService() { } public bool SupportsFormatDocument => true; public bool SupportsFormatOnPaste => true; public bool SupportsFormatSelection => true; public bool SupportsFormatOnReturn => true; public bool SupportsFormattingOnTypedCharacter(Document document, char ch) { // Performance: This method checks several options to determine if we should do smart // indent, none of which are controlled by editorconfig. Instead of calling // document.GetOptionsAsync we can use the Workspace's global options and thus save the // work of attempting to read in the editorconfig file. var options = document.Project.Solution.Workspace.Options; var smartIndentOn = options.GetOption(FormattingOptions.SmartIndent, LanguageNames.CSharp) == FormattingOptions.IndentStyle.Smart; // We consider the proper placement of a close curly when it is typed at the start of the // line to be a smart-indentation operation. As such, even if "format on typing" is off, // if "smart indent" is on, we'll still format this. (However, we won't touch anything // else in the block this close curly belongs to.). // // TODO(cyrusn): Should we expose an option for this? Personally, i don't think so. // If a user doesn't want this behavior, they can turn off 'smart indent' and control // everything themselves. if (ch == '}' && smartIndentOn) { return true; } // If format-on-typing is not on, then we don't support formatting on any other characters. var autoFormattingOnTyping = options.GetOption(FeatureOnOffOptions.AutoFormattingOnTyping, LanguageNames.CSharp); if (!autoFormattingOnTyping) { return false; } if (ch == '}' && !options.GetOption(FeatureOnOffOptions.AutoFormattingOnCloseBrace, LanguageNames.CSharp)) { return false; } if (ch == ';' && !options.GetOption(FeatureOnOffOptions.AutoFormattingOnSemicolon, LanguageNames.CSharp)) { return false; } // don't auto format after these keys if smart indenting is not on. if ((ch == '#' || ch == 'n') && !smartIndentOn) { return false; } return _supportedChars.Contains(ch); } public async Task<IList<TextChange>> GetFormattingChangesAsync(Document document, TextSpan? textSpan, CancellationToken cancellationToken) { var root = await document.GetSyntaxRootAsync(cancellationToken).ConfigureAwait(false); var options = await document.GetOptionsAsync(cancellationToken).ConfigureAwait(false); var span = textSpan ?? new TextSpan(0, root.FullSpan.Length); var formattingSpan = CommonFormattingHelpers.GetFormattingSpan(root, span); return Formatter.GetFormattedTextChanges(root, SpecializedCollections.SingletonEnumerable(formattingSpan), document.Project.Solution.Workspace, options, cancellationToken); } public async Task<IList<TextChange>> GetFormattingChangesOnPasteAsync(Document document, TextSpan textSpan, CancellationToken cancellationToken) { var root = await document.GetSyntaxRootAsync(cancellationToken).ConfigureAwait(false); var options = await document.GetOptionsAsync(cancellationToken).ConfigureAwait(false); var formattingSpan = CommonFormattingHelpers.GetFormattingSpan(root, textSpan); var service = document.GetLanguageService<ISyntaxFormattingService>(); if (service == null) { return SpecializedCollections.EmptyList<TextChange>(); } var rules = new List<IFormattingRule>() { new PasteFormattingRule() }; rules.AddRange(service.GetDefaultFormattingRules()); return Formatter.GetFormattedTextChanges(root, SpecializedCollections.SingletonEnumerable(formattingSpan), document.Project.Solution.Workspace, options, rules, cancellationToken); } private IEnumerable<IFormattingRule> GetFormattingRules(Document document, int position) { var workspace = document.Project.Solution.Workspace; var formattingRuleFactory = workspace.Services.GetService<IHostDependentFormattingRuleFactoryService>(); return formattingRuleFactory.CreateRule(document, position).Concat(Formatter.GetDefaultFormattingRules(document)); } public async Task<IList<TextChange>> GetFormattingChangesOnReturnAsync(Document document, int caretPosition, CancellationToken cancellationToken) { var options = await document.GetOptionsAsync(cancellationToken).ConfigureAwait(false); if (!options.GetOption(FeatureOnOffOptions.AutoFormattingOnReturn)) { return null; } var formattingRules = this.GetFormattingRules(document, caretPosition); // first, find the token user just typed. SyntaxToken token = await GetTokenBeforeTheCaretAsync(document, caretPosition, cancellationToken).ConfigureAwait(false); if (token.IsMissing) { return null; } string text = null; if (IsInvalidToken(token, ref text)) { return null; } // Check to see if the token is ')' and also the parent is a using statement. If not, bail if (TokenShouldNotFormatOnReturn(token)) { return null; } // if formatting range fails, do format token one at least var changes = await FormatRangeAsync(document, token, formattingRules, cancellationToken).ConfigureAwait(false); if (changes.Count > 0) { return changes; } // if we can't, do normal smart indentation return await FormatTokenAsync(document, token, formattingRules, cancellationToken).ConfigureAwait(false); } private static bool TokenShouldNotFormatOnReturn(SyntaxToken token) { return !token.IsKind(SyntaxKind.CloseParenToken) || !token.Parent.IsKind(SyntaxKind.UsingStatement); } private static async Task<bool> TokenShouldNotFormatOnTypeCharAsync( SyntaxToken token, CancellationToken cancellationToken) { // If the token is a ) we only want to format if it's the close paren // of a using statement. That way if we have nested usings, the inner // using will align with the outer one when the user types the close paren. if (token.IsKind(SyntaxKind.CloseParenToken) && !token.Parent.IsKind(SyntaxKind.UsingStatement)) { return true; } // If the token is a : we only want to format if it's a labeled statement // or case. When the colon is typed we'll want ot immediately have those // statements snap to their appropriate indentation level. if (token.IsKind(SyntaxKind.ColonToken) && !(token.Parent.IsKind(SyntaxKind.LabeledStatement) || token.Parent is SwitchLabelSyntax)) { return true; } // Only format an { if it is the first token on a line. We don't want to // mess with it if it's inside a line. if (token.IsKind(SyntaxKind.OpenBraceToken)) { var text = await token.SyntaxTree.GetTextAsync(cancellationToken).ConfigureAwait(false); if (!token.IsFirstTokenOnLine(text)) { return true; } } return false; } public async Task<IList<TextChange>> GetFormattingChangesAsync(Document document, char typedChar, int caretPosition, CancellationToken cancellationToken) { var formattingRules = this.GetFormattingRules(document, caretPosition); // first, find the token user just typed. var token = await GetTokenBeforeTheCaretAsync(document, caretPosition, cancellationToken).ConfigureAwait(false); if (token.IsMissing || !ValidSingleOrMultiCharactersTokenKind(typedChar, token.Kind()) || token.IsKind(SyntaxKind.EndOfFileToken, SyntaxKind.None)) { return null; } var service = document.GetLanguageService<ISyntaxFactsService>(); if (service != null && service.IsInNonUserCode(token.SyntaxTree, caretPosition, cancellationToken)) { return null; } var shouldNotFormat = await TokenShouldNotFormatOnTypeCharAsync(token, cancellationToken).ConfigureAwait(false); if (shouldNotFormat) { return null; } // don't attempt to format on close brace if autoformat on close brace feature is off, instead just smart indent var options = await document.GetOptionsAsync(cancellationToken).ConfigureAwait(false); var autoFormattingCloseBraceOff = !options.GetOption(FeatureOnOffOptions.AutoFormattingOnCloseBrace) || !options.GetOption(FeatureOnOffOptions.AutoFormattingOnTyping); bool smartIndentOnly = token.IsKind(SyntaxKind.CloseBraceToken) && autoFormattingCloseBraceOff; if (smartIndentOnly) { // if we're only doing smart indent, then ignore all edits to this token that occur before // the span of the token. They're irrelevant and may screw up other code the user doesn't // want touched. var tokenEdits = await FormatTokenAsync(document, token, formattingRules, cancellationToken).ConfigureAwait(false); var filteredEdits = tokenEdits.Where(t => t.Span.Start >= token.FullSpan.Start).ToList(); return filteredEdits; } // if formatting range fails, do format token one at least var changes = await FormatRangeAsync(document, token, formattingRules, cancellationToken).ConfigureAwait(false); if (changes.Count > 0) { return changes; } return await FormatTokenAsync(document, token, formattingRules, cancellationToken).ConfigureAwait(false); } private static async Task<SyntaxToken> GetTokenBeforeTheCaretAsync(Document document, int caretPosition, CancellationToken cancellationToken) { var tree = await document.GetSyntaxTreeAsync(cancellationToken).ConfigureAwait(false); var position = Math.Max(0, caretPosition - 1); var root = await tree.GetRootAsync(cancellationToken).ConfigureAwait(false); var token = root.FindToken(position, findInsideTrivia: true); return token; } private async Task<IList<TextChange>> FormatTokenAsync(Document document, SyntaxToken token, IEnumerable<IFormattingRule> formattingRules, CancellationToken cancellationToken) { var root = await document.GetSyntaxRootAsync(cancellationToken).ConfigureAwait(false); var options = await document.GetOptionsAsync(cancellationToken).ConfigureAwait(false); var formatter = CreateSmartTokenFormatter(options, formattingRules, root); var changes = await formatter.FormatTokenAsync(document.Project.Solution.Workspace, token, cancellationToken).ConfigureAwait(false); return changes; } private ISmartTokenFormatter CreateSmartTokenFormatter(OptionSet optionSet, IEnumerable<IFormattingRule> formattingRules, SyntaxNode root) { return new SmartTokenFormatter(optionSet, formattingRules, (CompilationUnitSyntax)root); } private async Task<IList<TextChange>> FormatRangeAsync( Document document, SyntaxToken endToken, IEnumerable<IFormattingRule> formattingRules, CancellationToken cancellationToken) { if (!IsEndToken(endToken)) { return SpecializedCollections.EmptyList<TextChange>(); } var tokenRange = FormattingRangeHelper.FindAppropriateRange(endToken); if (tokenRange == null || tokenRange.Value.Item1.Equals(tokenRange.Value.Item2)) { return SpecializedCollections.EmptyList<TextChange>(); } if (IsInvalidTokenKind(tokenRange.Value.Item1) || IsInvalidTokenKind(tokenRange.Value.Item2)) { return SpecializedCollections.EmptyList<TextChange>(); } var root = await document.GetSyntaxRootAsync(cancellationToken).ConfigureAwait(false); var options = await document.GetOptionsAsync(cancellationToken).ConfigureAwait(false); var formatter = new SmartTokenFormatter(options, formattingRules, (CompilationUnitSyntax)root); var changes = formatter.FormatRange(document.Project.Solution.Workspace, tokenRange.Value.Item1, tokenRange.Value.Item2, cancellationToken); return changes; } private bool IsEndToken(SyntaxToken endToken) { if (endToken.IsKind(SyntaxKind.OpenBraceToken)) { return false; } return true; } private bool ValidSingleOrMultiCharactersTokenKind(char typedChar, SyntaxKind kind) { // We'll autoformat on n, t, e, only if they are the last character of the below // keywords. switch (typedChar) { case ('n'): return kind == SyntaxKind.RegionKeyword || kind == SyntaxKind.EndRegionKeyword; case ('t'): return kind == SyntaxKind.SelectKeyword; case ('e'): return kind == SyntaxKind.WhereKeyword; default: return true; } } private bool IsInvalidToken(char typedChar, SyntaxToken token) { string text = null; if (IsInvalidToken(token, ref text)) { return true; } return text[0] != typedChar; } private bool IsInvalidToken(SyntaxToken token, ref string text) { if (IsInvalidTokenKind(token)) { return true; } text = token.ToString(); if (text.Length != 1) { return true; } return false; } private bool IsInvalidTokenKind(SyntaxToken token) { // invalid token to be formatted return token.IsKind(SyntaxKind.None) || token.IsKind(SyntaxKind.EndOfDirectiveToken) || token.IsKind(SyntaxKind.EndOfFileToken); } } }
using UnityEngine; using UnityEditor; using System.Collections.Generic; public class NGUIRootCreator : UICreateNewUIWizard{ public static GameObject CreateNewUI(){ return UICreateNewUIWizard.CreateNewUI(UICreateNewUIWizard.camType); } }; public class NGUIFontCreator { private static string fontPrefabPath; private static Object fontPrefab; public static void CreateDynamic(string preName, PsdLayerToNGUI.Data data, PsdLayerCommandParser.Control c) { #if UNITY_3_5 #else if (data.trueTypeFont == null){ Debug.LogError("True Type Font doens't set"); return; } NGUIFontCreator.fontPrefabPath = preName + "_Font_" + c.fontSize + ".prefab"; NGUISettings.ambigiousFont = data.trueTypeFont; NGUISettings.fontSize = c.fontSize; #endif } public static void PrepareBitmap(string preName, PsdLayerToNGUI.Data data){ NGUIFontCreator.fontPrefabPath = preName + "_Font.prefab"; #if UNITY_3_4 NGUIFontCreator.fontPrefab = EditorUtility.CreateEmptyPrefab(NGUIFontCreator.fontPrefabPath); #else NGUIFontCreator.fontPrefab = PrefabUtility.CreateEmptyPrefab(NGUIFontCreator.fontPrefabPath); #endif } public static void CreateBitmap(PsdLayerToNGUI.Data data){ // Create a new game object for the font var name = data.fontTexture.name; var go = new GameObject(name); var bitmapFont = go.AddComponent<UIFont>(); NGUISettings.ambigiousFont = bitmapFont; NGUISettings.fontData = data.fontData; NGUISettings.fontTexture = data.fontTexture; { BMFontReader.Load(bitmapFont.bmFont, NGUITools.GetHierarchy(bitmapFont.gameObject), NGUISettings.fontData.bytes); bitmapFont.spriteName = NGUISettings.fontTexture.name; bitmapFont.atlas = NGUISettings.atlas; } // Update the prefab #if UNITY_3_4 EditorUtility.ReplacePrefab(go, NGUIFontCreator.fontPrefab); #else PrefabUtility.ReplacePrefab(go, NGUIFontCreator.fontPrefab); #endif GameObject.DestroyImmediate(go); AssetDatabase.Refresh(); // Select the atlas go = AssetDatabase.LoadAssetAtPath(fontPrefabPath, typeof(GameObject)) as GameObject; data.fontPrefab = go.GetComponent<UIFont>(); data.fontPrefab.spriteName = name; NGUISettings.ambigiousFont = data.fontPrefab; } }; public class NGUIWidgetCreator { // UICreateWidgetWizard public static string mButton = ""; public static string mImage0 = ""; public static string mImage1 = ""; public static string mImage2 = ""; public static string mImage3 = ""; public static string mSliderBG = ""; public static string mSliderFG = ""; public static string mSliderTB = ""; public static string mCheckBG = ""; public static string mCheck = ""; public static string mInputBG = ""; public static string mListFG = ""; public static string mListBG = ""; public static string mListHL = ""; public static string mScrollBG = ""; public static string mScrollFG = ""; static Color mColor = Color.white; static bool mScrollCL = true; static UIScrollBar.FillDirection mFillDir = UIScrollBar.FillDirection.LeftToRight; /// <summary> /// Convenience function -- creates the "Add To" button and the parent object field to the right of it. /// </summary> private static bool ShouldCreate (GameObject go, bool isValid) { return true; } /// <summary> /// Label creation function. /// </summary> public static void CreateLabel (GameObject go) { if (ShouldCreate(go, NGUISettings.ambigiousFont != null)) { UILabel lbl = NGUITools.AddWidget<UILabel>(go); lbl.ambigiousFont = NGUISettings.ambigiousFont; lbl.fontSize = NGUISettings.fontSize; lbl.color = mColor; lbl.AssumeNaturalSize(); Selection.activeGameObject = lbl.gameObject; } } /// <summary> /// Sprite creation function. /// </summary> public static void CreateSprite (GameObject go, string field, bool slicedSprite) { if (ShouldCreate(go, NGUISettings.atlas != null)) { var sprite = NGUITools.AddWidget<UISprite>(go); sprite.type = slicedSprite ? UISprite.Type.Sliced : UISprite.Type.Simple; sprite.name = sprite.name + " (" + field + ")"; sprite.atlas = NGUISettings.atlas; sprite.spriteName = field; sprite.pivot = NGUISettings.pivot; sprite.MakePixelPerfect(); Selection.activeGameObject = sprite.gameObject; } } /// <summary> /// UI Texture doesn't do anything other than creating the widget. /// </summary> public static void CreateSimpleTexture (GameObject go) { if (ShouldCreate(go, true)) { UITexture tex = NGUITools.AddWidget<UITexture>(go); Selection.activeGameObject = tex.gameObject; } } /// <summary> /// Button creation function. /// </summary> public static void CreateButton (GameObject go) { if (ShouldCreate(go, NGUISettings.atlas != null)) { int depth = 0;//NGUITools.CalculateNextDepth(go); go = NGUITools.AddChild(go); go.name = "Button"; UISprite bg = NGUITools.AddWidget<UISprite>(go); bg.type = UISprite.Type.Sliced; bg.name = "Background"; bg.depth = depth; bg.atlas = NGUISettings.atlas; bg.spriteName = mButton; bg.width = 200; bg.height = 50; bg.MakePixelPerfect(); if (NGUISettings.ambigiousFont != null) { UILabel lbl = NGUITools.AddWidget<UILabel>(go); lbl.ambigiousFont = NGUISettings.ambigiousFont; lbl.text = go.name; lbl.AssumeNaturalSize(); } // Add a collider NGUITools.AddWidgetCollider(go); // Add the scripts go.AddComponent<UIButton>().tweenTarget = bg != null ? bg.gameObject : null; go.AddComponent<UIPlaySound>(); Selection.activeGameObject = go; } } /// <summary> /// Button creation function. /// </summary> public static void CreateImageButton (GameObject go) { if (ShouldCreate(go, NGUISettings.atlas != null)) { int depth = 0;//NGUITools.CalculateNextDepth(go); go = NGUITools.AddChild(go); go.name = "Image Button"; UISpriteData sp = NGUISettings.atlas.GetSprite(mImage0); UISprite sprite = NGUITools.AddWidget<UISprite>(go); sprite.type = sp.hasBorder ? UISprite.Type.Sliced : UISprite.Type.Simple; sprite.name = "Background"; sprite.depth = depth; sprite.atlas = NGUISettings.atlas; sprite.spriteName = mImage0; sprite.transform.localScale = new Vector3(150f, 40f, 1f); sprite.MakePixelPerfect(); if (NGUISettings.ambigiousFont != null) { UILabel lbl = NGUITools.AddWidget<UILabel>(go); lbl.ambigiousFont = NGUISettings.ambigiousFont; lbl.text = go.name; lbl.AssumeNaturalSize(); } // Add a collider NGUITools.AddWidgetCollider(go); // Add the scripts UIImageButton ib = go.AddComponent<UIImageButton>(); ib.target = sprite; ib.normalSprite = mImage0; ib.hoverSprite = mImage1; ib.pressedSprite = mImage2; ib.disabledSprite = mImage3; go.AddComponent<UIPlaySound>(); Selection.activeGameObject = go; } } /// <summary> /// Toggle creation function. /// </summary> public static void CreateToggle (GameObject go) { if (ShouldCreate(go, NGUISettings.atlas != null)) { int depth = 0;//NGUITools.CalculateNextDepth(go); go = NGUITools.AddChild(go); go.name = "Toggle"; UISprite bg = NGUITools.AddWidget<UISprite>(go); bg.type = UISprite.Type.Sliced; bg.name = "1-Background"; bg.depth = depth; bg.atlas = NGUISettings.atlas; bg.spriteName = mCheckBG; bg.width = 26; bg.height = 26; bg.MakePixelPerfect(); UISprite fg = NGUITools.AddWidget<UISprite>(go); fg.name = "2-Checkmark"; fg.atlas = NGUISettings.atlas; fg.spriteName = mCheck; fg.MakePixelPerfect(); if (NGUISettings.ambigiousFont != null) { UILabel lbl = NGUITools.AddWidget<UILabel>(go); lbl.ambigiousFont = NGUISettings.ambigiousFont; lbl.text = go.name; lbl.pivot = UIWidget.Pivot.Left; lbl.transform.localPosition = new Vector3(16f, 0f, 0f); lbl.MakePixelPerfect(); } // Add a collider NGUITools.AddWidgetCollider(go); // Add the scripts go.AddComponent<UIToggle>().activeSprite = fg; go.AddComponent<UIButton>().tweenTarget = bg.gameObject; go.AddComponent<UIButtonScale>().tweenTarget = bg.transform; go.AddComponent<UIPlaySound>(); Selection.activeGameObject = go; } } /// <summary> /// Scroll bar template. /// </summary> public static void CreateScrollBar (GameObject go) { if (ShouldCreate(go, NGUISettings.atlas != null)) { int depth = 0;//NGUITools.CalculateNextDepth(go); go = NGUITools.AddChild(go); go.name = "Scroll Bar"; UISprite bg = NGUITools.AddWidget<UISprite>(go); bg.type = UISprite.Type.Sliced; bg.name = "Background"; bg.depth = depth; bg.atlas = NGUISettings.atlas; bg.spriteName = mScrollBG; Vector4 border = bg.border; bg.width = Mathf.RoundToInt(400f + border.x + border.z); bg.height = Mathf.RoundToInt(14f + border.y + border.w); bg.MakePixelPerfect(); UISprite fg = NGUITools.AddWidget<UISprite>(go); fg.type = UISprite.Type.Sliced; fg.name = "Foreground"; fg.atlas = NGUISettings.atlas; fg.spriteName = mScrollFG; UIScrollBar sb = go.AddComponent<UIScrollBar>(); sb.foregroundWidget = fg; sb.backgroundWidget = bg; sb.fillDirection = mFillDir; sb.barSize = 0.3f; sb.value = 0.3f; sb.ForceUpdate(); if (mScrollCL) { NGUITools.AddWidgetCollider(bg.gameObject); NGUITools.AddWidgetCollider(fg.gameObject); } Selection.activeGameObject = go; } } /// <summary> /// Progress bar creation function. /// </summary> public static void CreateSlider (GameObject go, bool slider) { if (ShouldCreate(go, NGUISettings.atlas != null)) { int depth = 0;//NGUITools.CalculateNextDepth(go); go = NGUITools.AddChild(go); go.name = slider ? "Slider" : "Progress Bar"; // Background sprite UISpriteData bgs = NGUISettings.atlas.GetSprite(mSliderBG); UISprite back = (UISprite)NGUITools.AddWidget<UISprite>(go); back.type = bgs.hasBorder ? UISprite.Type.Sliced : UISprite.Type.Simple; back.name = "Background"; back.depth = depth; back.pivot = UIWidget.Pivot.Left; back.atlas = NGUISettings.atlas; back.spriteName = mSliderBG; back.width = 200; back.height = 30; back.transform.localPosition = Vector3.zero; back.MakePixelPerfect(); // Fireground sprite UISpriteData fgs = NGUISettings.atlas.GetSprite(mSliderFG); UISprite front = NGUITools.AddWidget<UISprite>(go); front.type = fgs.hasBorder ? UISprite.Type.Sliced : UISprite.Type.Simple; front.name = "Foreground"; front.pivot = UIWidget.Pivot.Left; front.atlas = NGUISettings.atlas; front.spriteName = mSliderFG; front.width = 200; front.height = 30; front.transform.localPosition = Vector3.zero; front.MakePixelPerfect(); // Add a collider if (slider) NGUITools.AddWidgetCollider(go); // Add the slider script UISlider uiSlider = go.AddComponent<UISlider>(); uiSlider.foregroundWidget = front; // Thumb sprite if (slider) { UISpriteData tbs = NGUISettings.atlas.GetSprite(mSliderTB); UISprite thb = NGUITools.AddWidget<UISprite>(go); thb.type = tbs.hasBorder ? UISprite.Type.Sliced : UISprite.Type.Simple; thb.name = "Thumb"; thb.atlas = NGUISettings.atlas; thb.spriteName = mSliderTB; thb.width = 20; thb.height = 40; thb.transform.localPosition = new Vector3(200f, 0f, 0f); thb.MakePixelPerfect(); NGUITools.AddWidgetCollider(thb.gameObject); thb.gameObject.AddComponent<UIButtonColor>(); thb.gameObject.AddComponent<UIButtonScale>(); uiSlider.thumb = thb.transform; } uiSlider.value = 1f; // Select the slider Selection.activeGameObject = go; } } /// <summary> /// Input field creation function. /// </summary> public static void CreateInput (GameObject go, bool isPassword) { if (ShouldCreate(go, NGUISettings.atlas != null && NGUISettings.ambigiousFont != null)) { int depth = 0;//NGUITools.CalculateNextDepth(go); go = NGUITools.AddChild(go); go.name = "Input"; int padding = 3; UISprite bg = NGUITools.AddWidget<UISprite>(go); bg.type = UISprite.Type.Sliced; bg.name = "Background"; bg.depth = depth; bg.atlas = NGUISettings.atlas; bg.spriteName = mInputBG; bg.pivot = UIWidget.Pivot.Left; bg.width = 400; if (NGUISettings.ambigiousFont != null) bg.height = NGUISettings.fontSize + padding * 2; else bg.height = 32; bg.transform.localPosition = Vector3.zero; bg.MakePixelPerfect(); UILabel lbl = NGUITools.AddWidget<UILabel>(go); lbl.ambigiousFont = NGUISettings.ambigiousFont; lbl.pivot = UIWidget.Pivot.Left; lbl.transform.localPosition = new Vector3(padding, 0f, 0f); lbl.multiLine = false; lbl.supportEncoding = false; lbl.width = Mathf.RoundToInt(400f - padding * 2f); lbl.text = "You can type here"; lbl.AssumeNaturalSize(); // Add a collider to the background NGUITools.AddWidgetCollider(go); // Add an input script to the background and have it point to the label UIInput input = go.AddComponent<UIInput>(); input.label = lbl; input.inputType = isPassword ? UIInput.InputType.Password : UIInput.InputType.Standard; // Update the selection Selection.activeGameObject = go; } } /// <summary> /// Create a popup list or a menu. /// </summary> public static void CreatePopup (GameObject go, bool isDropDown) { if (ShouldCreate(go, NGUISettings.atlas != null && NGUISettings.ambigiousFont != null)) { int depth = 0;//NGUITools.CalculateNextDepth(go); go = NGUITools.AddChild(go); go.name = isDropDown ? "Popup List" : "Popup Menu"; UISpriteData sphl = NGUISettings.atlas.GetSprite(mListHL); UISpriteData spfg = NGUISettings.atlas.GetSprite(mListFG); Vector2 hlPadding = new Vector2(Mathf.Max(4f, sphl.paddingLeft), Mathf.Max(4f, sphl.paddingTop)); Vector2 fgPadding = new Vector2(Mathf.Max(4f, spfg.paddingLeft), Mathf.Max(4f, spfg.paddingTop)); // Background sprite UISprite sprite = NGUITools.AddSprite(go, NGUISettings.atlas, mListFG); sprite.depth = depth; sprite.atlas = NGUISettings.atlas; sprite.pivot = UIWidget.Pivot.Left; sprite.width = Mathf.RoundToInt(150f + fgPadding.x * 2f); sprite.height = Mathf.RoundToInt(NGUISettings.fontSize + fgPadding.y * 2f); sprite.transform.localPosition = Vector3.zero; sprite.MakePixelPerfect(); // Text label UILabel lbl = NGUITools.AddWidget<UILabel>(go); lbl.ambigiousFont = NGUISettings.ambigiousFont; lbl.fontSize = NGUISettings.fontSize; lbl.text = go.name; lbl.pivot = UIWidget.Pivot.Left; lbl.cachedTransform.localPosition = new Vector3(fgPadding.x, 0f, 0f); lbl.MakePixelPerfect(); // Add a collider NGUITools.AddWidgetCollider(go); // Add the popup list UIPopupList list = go.AddComponent<UIPopupList>(); list.atlas = NGUISettings.atlas; list.ambigiousFont = NGUISettings.ambigiousFont; list.backgroundSprite = mListBG; list.highlightSprite = mListHL; list.padding = hlPadding; if (isDropDown) EventDelegate.Add(list.onChange, lbl.SetCurrentSelection); for (int i = 0; i < 5; ++i) list.items.Add(isDropDown ? ("List Option " + i) : ("Menu Option " + i)); // Add the scripts go.AddComponent<UIButton>().tweenTarget = sprite.gameObject; go.AddComponent<UIPlaySound>(); Selection.activeGameObject = go; } } }; public class NGUIAtlasMaker { // UIAtlasMaker public class SpriteEntry : UISpriteData { // Sprite texture -- original texture or a temporary texture public Texture2D tex; // Whether the texture is temporary and should be deleted public bool temporaryTexture = false; } public static void CreateAtlas(string currentPath) { string prefabPath = ""; string matPath = ""; { if (string.IsNullOrEmpty(prefabPath)) prefabPath = currentPath + ".prefab"; if (string.IsNullOrEmpty(matPath)) matPath = currentPath + ".mat"; } // Try to load the material Material mat = AssetDatabase.LoadAssetAtPath(matPath, typeof(Material)) as Material; // If the material doesn't exist, create it if (mat == null) { Shader shader = Shader.Find("Unlit/Transparent Colored"); mat = new Material(shader); // Save the material AssetDatabase.CreateAsset(mat, matPath); AssetDatabase.Refresh(); // Load the material so it's usable mat = AssetDatabase.LoadAssetAtPath(matPath, typeof(Material)) as Material; } GameObject go = AssetDatabase.LoadAssetAtPath(prefabPath, typeof(GameObject)) as GameObject; { // Create a new prefab for the atlas #if UNITY_3_4 Object prefab = (go != null) ? go : EditorUtility.CreateEmptyPrefab(prefabPath); #else Object prefab = (go != null) ? go : PrefabUtility.CreateEmptyPrefab(prefabPath); #endif // Create a new game object for the atlas go = new GameObject(System.IO.Path.GetFileName(currentPath)); go.AddComponent<UIAtlas>().spriteMaterial = mat; // Update the prefab #if UNITY_3_4 EditorUtility.ReplacePrefab(go, prefab); #else PrefabUtility.ReplacePrefab(go, prefab); #endif GameObject.DestroyImmediate(go); AssetDatabase.SaveAssets(); AssetDatabase.Refresh(); // Select the atlas go = AssetDatabase.LoadAssetAtPath(prefabPath, typeof(GameObject)) as GameObject; NGUISettings.atlas = go.GetComponent<UIAtlas>(); } } public static void UpdateAtlas(List<Texture> textures, bool appendSprites) { // Create a list of sprites using the collected textures List<NGUIAtlasMaker.SpriteEntry> sprites = NGUIAtlasMaker.CreateSprites(textures); if (sprites.Count > 0) { // Extract sprites from the atlas, filling in the missing pieces if (appendSprites) NGUIAtlasMaker.ExtractSprites(NGUISettings.atlas, sprites); // NOTE: It doesn't seem to be possible to undo writing to disk, and there also seems to be no way of // detecting an Undo event. Without either of these it's not possible to restore the texture saved to disk, // so the undo process doesn't work right. Because of this I'd rather disable it altogether until a solution is found. // The ability to undo this action is always useful //NGUIEditorTools.RegisterUndo("Update Atlas", UISettings.atlas, UISettings.atlas.texture, UISettings.atlas.material); // Update the atlas NGUIAtlasMaker.UpdateAtlas(NGUISettings.atlas, sprites); } } /// <summary> /// Create a list of sprites using the specified list of textures. /// </summary> public static List<SpriteEntry> CreateSprites (List<Texture> textures) { List<SpriteEntry> list = new List<SpriteEntry>(); foreach (Texture tex in textures) { Texture2D oldTex = NGUIEditorTools.ImportTexture(tex, true, false, true); if (oldTex == null) continue; // If we aren't doing trimming, just use the texture as-is if (!NGUISettings.atlasTrimming && !NGUISettings.atlasPMA) { SpriteEntry sprite = new SpriteEntry(); sprite.SetRect(0, 0, oldTex.width, oldTex.height); sprite.tex = oldTex; sprite.name = oldTex.name; sprite.temporaryTexture = false; list.Add(sprite); continue; } // If we want to trim transparent pixels, there is more work to be done Color32[] pixels = oldTex.GetPixels32(); int xmin = oldTex.width; int xmax = 0; int ymin = oldTex.height; int ymax = 0; int oldWidth = oldTex.width; int oldHeight = oldTex.height; // Find solid pixels if (NGUISettings.atlasTrimming) { for (int y = 0, yw = oldHeight; y < yw; ++y) { for (int x = 0, xw = oldWidth; x < xw; ++x) { Color32 c = pixels[y * xw + x]; if (c.a != 0) { if (y < ymin) ymin = y; if (y > ymax) ymax = y; if (x < xmin) xmin = x; if (x > xmax) xmax = x; } } } } else { xmin = 0; xmax = oldWidth - 1; ymin = 0; ymax = oldHeight - 1; } int newWidth = (xmax - xmin) + 1; int newHeight = (ymax - ymin) + 1; if (newWidth > 0 && newHeight > 0) { SpriteEntry sprite = new SpriteEntry(); sprite.x = 0; sprite.y = 0; sprite.width = oldTex.width; sprite.height = oldTex.height; // If the dimensions match, then nothing was actually trimmed if (!NGUISettings.atlasPMA && (newWidth == oldWidth && newHeight == oldHeight)) { sprite.tex = oldTex; sprite.name = oldTex.name; sprite.temporaryTexture = false; } else { // Copy the non-trimmed texture data into a temporary buffer Color32[] newPixels = new Color32[newWidth * newHeight]; for (int y = 0; y < newHeight; ++y) { for (int x = 0; x < newWidth; ++x) { int newIndex = y * newWidth + x; int oldIndex = (ymin + y) * oldWidth + (xmin + x); if (NGUISettings.atlasPMA) newPixels[newIndex] = NGUITools.ApplyPMA(pixels[oldIndex]); else newPixels[newIndex] = pixels[oldIndex]; } } // Create a new texture sprite.temporaryTexture = true; sprite.name = oldTex.name; sprite.tex = new Texture2D(newWidth, newHeight); sprite.tex.SetPixels32(newPixels); sprite.tex.Apply(); // Remember the padding offset sprite.SetPadding(xmin, ymin, oldWidth - newWidth - xmin, oldHeight - newHeight - ymin); } list.Add(sprite); } } return list; } /// <summary> /// Extract sprites from the atlas, adding them to the list. /// </summary> public static void ExtractSprites (UIAtlas atlas, List<SpriteEntry> finalSprites) { // Make the atlas texture readable Texture2D atlasTex = NGUIEditorTools.ImportTexture(atlas.texture, true, false, !atlas.premultipliedAlpha); if (atlasTex != null) { Color32[] oldPixels = null; int oldWidth = atlasTex.width; int oldHeight = atlasTex.height; List<UISpriteData> existingSprites = atlas.spriteList; foreach (UISpriteData es in existingSprites) { bool found = false; foreach (SpriteEntry fs in finalSprites) { if (es.name == fs.name) { fs.CopyBorderFrom(es); found = true; break; } } if (!found) { // Read the atlas if (oldPixels == null) oldPixels = atlasTex.GetPixels32(); int xmin = Mathf.Clamp(es.x, 0, oldWidth); int ymin = Mathf.Clamp(es.y, 0, oldHeight); int newWidth = Mathf.Clamp(es.width, 0, oldWidth); int newHeight = Mathf.Clamp(es.height, 0, oldHeight); if (newWidth == 0 || newHeight == 0) continue; Color32[] newPixels = new Color32[newWidth * newHeight]; for (int y = 0; y < newHeight; ++y) { for (int x = 0; x < newWidth; ++x) { int newIndex = (newHeight - 1 - y) * newWidth + x; int oldIndex = (oldHeight - 1 - (ymin + y)) * oldWidth + (xmin + x); newPixels[newIndex] = oldPixels[oldIndex]; } } // Create a new sprite SpriteEntry sprite = new SpriteEntry(); sprite.CopyFrom(es); sprite.SetRect(0, 0, newWidth, newHeight); sprite.temporaryTexture = true; sprite.tex = new Texture2D(newWidth, newHeight); sprite.tex.SetPixels32(newPixels); sprite.tex.Apply(); finalSprites.Add(sprite); } } } // The atlas no longer needs to be readable NGUIEditorTools.ImportTexture(atlas.texture, false, false, !atlas.premultipliedAlpha); } /// <summary> /// Update the sprite atlas, keeping only the sprites that are on the specified list. /// </summary> public static void UpdateAtlas (UIAtlas atlas, List<SpriteEntry> sprites) { if (sprites.Count > 0) { // Combine all sprites into a single texture and save it if (UpdateTexture(atlas, sprites)) { // Replace the sprites within the atlas ReplaceSprites(atlas, sprites); } // Release the temporary textures ReleaseSprites(sprites); return; } else { atlas.spriteList.Clear(); string path = NGUIEditorTools.GetSaveableTexturePath(atlas); atlas.spriteMaterial.mainTexture = null; if (!string.IsNullOrEmpty(path)) AssetDatabase.DeleteAsset(path); } atlas.MarkAsChanged(); Selection.activeGameObject = (NGUISettings.atlas != null) ? NGUISettings.atlas.gameObject : null; } /// <summary> /// Combine all sprites into a single texture and save it to disk. /// </summary> static bool UpdateTexture (UIAtlas atlas, List<SpriteEntry> sprites) { // Get the texture for the atlas Texture2D tex = atlas.texture as Texture2D; string oldPath = (tex != null) ? AssetDatabase.GetAssetPath(tex.GetInstanceID()) : ""; string newPath = NGUIEditorTools.GetSaveableTexturePath(atlas); // Clear the read-only flag in texture file attributes if (System.IO.File.Exists(newPath)) { #if !UNITY_4_1 && !UNITY_4_0 && !UNITY_3_5 if (!AssetDatabase.IsOpenForEdit(newPath)) { Debug.LogError(newPath + " is not editable. Did you forget to do a check out?"); return false; } #endif System.IO.FileAttributes newPathAttrs = System.IO.File.GetAttributes(newPath); newPathAttrs &= ~System.IO.FileAttributes.ReadOnly; System.IO.File.SetAttributes(newPath, newPathAttrs); } bool newTexture = (tex == null || oldPath != newPath); if (newTexture) { // Create a new texture for the atlas tex = new Texture2D(1, 1, TextureFormat.ARGB32, false); } else { // Make the atlas readable so we can save it tex = NGUIEditorTools.ImportTexture(oldPath, true, false, !atlas.premultipliedAlpha); } // Pack the sprites into this texture if (PackTextures(tex, sprites)) { byte[] bytes = tex.EncodeToPNG(); System.IO.File.WriteAllBytes(newPath, bytes); bytes = null; // Load the texture we just saved as a Texture2D AssetDatabase.SaveAssets(); AssetDatabase.Refresh(); tex = NGUIEditorTools.ImportTexture(newPath, false, true, !atlas.premultipliedAlpha); // Update the atlas texture if (newTexture) { if (tex == null) Debug.LogError("Failed to load the created atlas saved as " + newPath); else atlas.spriteMaterial.mainTexture = tex; ReleaseSprites(sprites); AssetDatabase.SaveAssets(); AssetDatabase.Refresh(); } return true; } else { if (!newTexture) NGUIEditorTools.ImportTexture(oldPath, false, true, !atlas.premultipliedAlpha); //Debug.LogError("Operation canceled: The selected sprites can't fit into the atlas.\n" + // "Keep large sprites outside the atlas (use UITexture), and/or use multiple atlases instead."); EditorUtility.DisplayDialog("Operation Canceled", "The selected sprites can't fit into the atlas.\n" + "Keep large sprites outside the atlas (use UITexture), and/or use multiple atlases instead", "OK"); return false; } } /// <summary> /// Replace the sprites within the atlas. /// </summary> static void ReplaceSprites (UIAtlas atlas, List<SpriteEntry> sprites) { // Get the list of sprites we'll be updating List<UISpriteData> spriteList = atlas.spriteList; List<UISpriteData> kept = new List<UISpriteData>(); // Run through all the textures we added and add them as sprites to the atlas for (int i = 0; i < sprites.Count; ++i) { SpriteEntry se = sprites[i]; UISpriteData sprite = AddSprite(spriteList, se); kept.Add(sprite); } // Remove unused sprites for (int i = spriteList.Count; i > 0; ) { UISpriteData sp = spriteList[--i]; if (!kept.Contains(sp)) spriteList.RemoveAt(i); } // Sort the sprites so that they are alphabetical within the atlas atlas.SortAlphabetically(); atlas.MarkAsChanged(); } /// <summary> /// Release all temporary textures created for the sprites. /// </summary> static void ReleaseSprites (List<SpriteEntry> sprites) { foreach (SpriteEntry se in sprites) { if (se.temporaryTexture) { NGUITools.Destroy(se.tex); se.tex = null; } } Resources.UnloadUnusedAssets(); } /// <summary> /// Used to sort the sprites by pixels used /// </summary> static int Compare (SpriteEntry a, SpriteEntry b) { // A is null b is not b is greater so put it at the front of the list if (a == null && b != null) return 1; // A is not null b is null a is greater so put it at the front of the list if (a == null && b != null) return -1; // Get the total pixels used for each sprite int aPixels = a.width * a.height; int bPixels = b.width * b.height; if (aPixels > bPixels) return -1; else if (aPixels < bPixels) return 1; return 0; } /// <summary> /// Pack all of the specified sprites into a single texture, updating the outer and inner rects of the sprites as needed. /// </summary> static bool PackTextures (Texture2D tex, List<SpriteEntry> sprites) { Texture2D[] textures = new Texture2D[sprites.Count]; Rect[] rects; #if UNITY_3_5 || UNITY_4_0 int maxSize = 4096; #else int maxSize = SystemInfo.maxTextureSize; #endif #if UNITY_ANDROID || UNITY_IPHONE maxSize = Mathf.Min(maxSize, NGUISettings.allow4096 ? 4096 : 2048); #endif if (NGUISettings.unityPacking) { for (int i = 0; i < sprites.Count; ++i) textures[i] = sprites[i].tex; rects = tex.PackTextures(textures, NGUISettings.atlasPadding, maxSize); } else { sprites.Sort(Compare); for (int i = 0; i < sprites.Count; ++i) textures[i] = sprites[i].tex; rects = UITexturePacker.PackTextures(tex, textures, 4, 4, NGUISettings.atlasPadding, maxSize); } for (int i = 0; i < sprites.Count; ++i) { Rect rect = NGUIMath.ConvertToPixels(rects[i], tex.width, tex.height, true); // Make sure that we don't shrink the textures if (Mathf.RoundToInt(rect.width) != textures[i].width) return false; SpriteEntry se = sprites[i]; se.x = Mathf.RoundToInt(rect.x); se.y = Mathf.RoundToInt(rect.y); se.width = Mathf.RoundToInt(rect.width); se.height = Mathf.RoundToInt(rect.height); } return true; } /// <summary> /// Convenience function. /// </summary> static string atlasName { get { return NGUISettings.GetString("NGUI Atlas Name", null); } set { NGUISettings.SetString("NGUI Atlas Name", value); } } /// <summary> /// Helper function that creates a single sprite list from both the atlas's sprites as well as selected textures. /// Dictionary value meaning: /// 0 = No change /// 1 = Update /// 2 = Add /// </summary> Dictionary<string, int> GetSpriteList (List<Texture> textures) { Dictionary<string, int> spriteList = new Dictionary<string, int>(); if (NGUISettings.atlas != null && NGUISettings.atlas.name == atlasName) { BetterList<string> spriteNames = NGUISettings.atlas.GetListOfSprites(); foreach (string sp in spriteNames) spriteList.Add(sp, 0); } // If we have textures to work with, include them as well if (textures.Count > 0) { List<string> texNames = new List<string>(); foreach (Texture tex in textures) texNames.Add(tex.name); texNames.Sort(); foreach (string tex in texNames) { if (spriteList.ContainsKey(tex)) spriteList[tex] = 1; else spriteList.Add(tex, 2); } } return spriteList; } /// <summary> /// Add a new sprite to the atlas, given the texture it's coming from and the packed rect within the atlas. /// </summary> static UISpriteData AddSprite (List<UISpriteData> sprites, SpriteEntry se) { // See if this sprite already exists foreach (UISpriteData sp in sprites) { if (sp.name == se.name) { sp.CopyFrom(se); return sp; } } UISpriteData sprite = new UISpriteData(); sprite.CopyFrom(se); sprites.Add(sprite); return sprite; } };
using System; using System.IO; using System.Security.Cryptography; namespace ICSharpCode.SharpZipLib.Zip.Compression.Streams { /// <summary> /// An input buffer customised for use by <see cref="InflaterInputStream"/> /// </summary> /// <remarks> /// The buffer supports decryption of incoming data. /// </remarks> public class InflaterInputBuffer { #region Constructors /// <summary> /// Initialise a new instance of <see cref="InflaterInputBuffer"/> with a default buffer size /// </summary> /// <param name="stream">The stream to buffer.</param> public InflaterInputBuffer(Stream stream) : this(stream, 4096) { } /// <summary> /// Initialise a new instance of <see cref="InflaterInputBuffer"/> /// </summary> /// <param name="stream">The stream to buffer.</param> /// <param name="bufferSize">The size to use for the buffer</param> /// <remarks>A minimum buffer size of 1KB is permitted. Lower sizes are treated as 1KB.</remarks> public InflaterInputBuffer(Stream stream, int bufferSize) { inputStream = stream; if (bufferSize < 1024) { bufferSize = 1024; } rawData = new byte[bufferSize]; clearText = rawData; } #endregion /// <summary> /// Get the length of bytes bytes in the <see cref="RawData"/> /// </summary> public int RawLength { get { return rawLength; } } /// <summary> /// Get the contents of the raw data buffer. /// </summary> /// <remarks>This may contain encrypted data.</remarks> public byte[] RawData { get { return rawData; } } /// <summary> /// Get the number of useable bytes in <see cref="ClearText"/> /// </summary> public int ClearTextLength { get { return clearTextLength; } } /// <summary> /// Get the contents of the clear text buffer. /// </summary> public byte[] ClearText { get { return clearText; } } /// <summary> /// Get/set the number of bytes available /// </summary> public int Available { get { return available; } set { available = value; } } /// <summary> /// Call <see cref="Inflater.SetInput(byte[], int, int)"/> passing the current clear text buffer contents. /// </summary> /// <param name="inflater">The inflater to set input for.</param> public void SetInflaterInput(Inflater inflater) { if (available > 0) { inflater.SetInput(clearText, clearTextLength - available, available); available = 0; } } /// <summary> /// Fill the buffer from the underlying input stream. /// </summary> public void Fill() { rawLength = 0; int toRead = rawData.Length; while (toRead > 0) { int count = inputStream.Read(rawData, rawLength, toRead); if (count <= 0) { break; } rawLength += count; toRead -= count; } if (cryptoTransform != null) { clearTextLength = cryptoTransform.TransformBlock(rawData, 0, rawLength, clearText, 0); } else { clearTextLength = rawLength; } available = clearTextLength; } /// <summary> /// Read a buffer directly from the input stream /// </summary> /// <param name="buffer">The buffer to fill</param> /// <returns>Returns the number of bytes read.</returns> public int ReadRawBuffer(byte[] buffer) { return ReadRawBuffer(buffer, 0, buffer.Length); } /// <summary> /// Read a buffer directly from the input stream /// </summary> /// <param name="outBuffer">The buffer to read into</param> /// <param name="offset">The offset to start reading data into.</param> /// <param name="length">The number of bytes to read.</param> /// <returns>Returns the number of bytes read.</returns> public int ReadRawBuffer(byte[] outBuffer, int offset, int length) { if (length < 0) { throw new ArgumentOutOfRangeException(nameof(length)); } int currentOffset = offset; int currentLength = length; while (currentLength > 0) { if (available <= 0) { Fill(); if (available <= 0) { return 0; } } int toCopy = Math.Min(currentLength, available); System.Array.Copy(rawData, rawLength - (int)available, outBuffer, currentOffset, toCopy); currentOffset += toCopy; currentLength -= toCopy; available -= toCopy; } return length; } /// <summary> /// Read clear text data from the input stream. /// </summary> /// <param name="outBuffer">The buffer to add data to.</param> /// <param name="offset">The offset to start adding data at.</param> /// <param name="length">The number of bytes to read.</param> /// <returns>Returns the number of bytes actually read.</returns> public int ReadClearTextBuffer(byte[] outBuffer, int offset, int length) { if (length < 0) { throw new ArgumentOutOfRangeException(nameof(length)); } int currentOffset = offset; int currentLength = length; while (currentLength > 0) { if (available <= 0) { Fill(); if (available <= 0) { return 0; } } int toCopy = Math.Min(currentLength, available); Array.Copy(clearText, clearTextLength - (int)available, outBuffer, currentOffset, toCopy); currentOffset += toCopy; currentLength -= toCopy; available -= toCopy; } return length; } /// <summary> /// Read a <see cref="byte"/> from the input stream. /// </summary> /// <returns>Returns the byte read.</returns> public int ReadLeByte() { if (available <= 0) { Fill(); if (available <= 0) { throw new ZipException("EOF in header"); } } byte result = rawData[rawLength - available]; available -= 1; return result; } /// <summary> /// Read an <see cref="short"/> in little endian byte order. /// </summary> /// <returns>The short value read case to an int.</returns> public int ReadLeShort() { return ReadLeByte() | (ReadLeByte() << 8); } /// <summary> /// Read an <see cref="int"/> in little endian byte order. /// </summary> /// <returns>The int value read.</returns> public int ReadLeInt() { return ReadLeShort() | (ReadLeShort() << 16); } /// <summary> /// Read a <see cref="long"/> in little endian byte order. /// </summary> /// <returns>The long value read.</returns> public long ReadLeLong() { return (uint)ReadLeInt() | ((long)ReadLeInt() << 32); } /// <summary> /// Get/set the <see cref="ICryptoTransform"/> to apply to any data. /// </summary> /// <remarks>Set this value to null to have no transform applied.</remarks> public ICryptoTransform CryptoTransform { set { cryptoTransform = value; if (cryptoTransform != null) { if (rawData == clearText) { if (internalClearText == null) { internalClearText = new byte[rawData.Length]; } clearText = internalClearText; } clearTextLength = rawLength; if (available > 0) { cryptoTransform.TransformBlock(rawData, rawLength - available, available, clearText, rawLength - available); } } else { clearText = rawData; clearTextLength = rawLength; } } } #region Instance Fields int rawLength; byte[] rawData; int clearTextLength; byte[] clearText; byte[] internalClearText; int available; ICryptoTransform cryptoTransform; Stream inputStream; #endregion } /// <summary> /// This filter stream is used to decompress data compressed using the "deflate" /// format. The "deflate" format is described in RFC 1951. /// /// This stream may form the basis for other decompression filters, such /// as the <see cref="ICSharpCode.SharpZipLib.GZip.GZipInputStream">GZipInputStream</see>. /// /// Author of the original java version : John Leuner. /// </summary> public class InflaterInputStream : Stream { #region Constructors /// <summary> /// Create an InflaterInputStream with the default decompressor /// and a default buffer size of 4KB. /// </summary> /// <param name = "baseInputStream"> /// The InputStream to read bytes from /// </param> public InflaterInputStream(Stream baseInputStream) : this(baseInputStream, new Inflater(), 4096) { } /// <summary> /// Create an InflaterInputStream with the specified decompressor /// and a default buffer size of 4KB. /// </summary> /// <param name = "baseInputStream"> /// The source of input data /// </param> /// <param name = "inf"> /// The decompressor used to decompress data read from baseInputStream /// </param> public InflaterInputStream(Stream baseInputStream, Inflater inf) : this(baseInputStream, inf, 4096) { } /// <summary> /// Create an InflaterInputStream with the specified decompressor /// and the specified buffer size. /// </summary> /// <param name = "baseInputStream"> /// The InputStream to read bytes from /// </param> /// <param name = "inflater"> /// The decompressor to use /// </param> /// <param name = "bufferSize"> /// Size of the buffer to use /// </param> public InflaterInputStream(Stream baseInputStream, Inflater inflater, int bufferSize) { if (baseInputStream == null) { throw new ArgumentNullException(nameof(baseInputStream)); } if (inflater == null) { throw new ArgumentNullException(nameof(inflater)); } if (bufferSize <= 0) { throw new ArgumentOutOfRangeException(nameof(bufferSize)); } this.baseInputStream = baseInputStream; this.inf = inflater; inputBuffer = new InflaterInputBuffer(baseInputStream, bufferSize); } #endregion /// <summary> /// Gets or sets a flag indicating ownership of underlying stream. /// When the flag is true <see cref="Stream.Dispose()" /> will close the underlying stream also. /// </summary> /// <remarks>The default value is true.</remarks> public bool IsStreamOwner { get; set; } = true; /// <summary> /// Skip specified number of bytes of uncompressed data /// </summary> /// <param name ="count"> /// Number of bytes to skip /// </param> /// <returns> /// The number of bytes skipped, zero if the end of /// stream has been reached /// </returns> /// <exception cref="ArgumentOutOfRangeException"> /// <paramref name="count">The number of bytes</paramref> to skip is less than or equal to zero. /// </exception> public long Skip(long count) { if (count <= 0) { throw new ArgumentOutOfRangeException(nameof(count)); } // v0.80 Skip by seeking if underlying stream supports it... if (baseInputStream.CanSeek) { baseInputStream.Seek(count, SeekOrigin.Current); return count; } else { int length = 2048; if (count < length) { length = (int)count; } byte[] tmp = new byte[length]; int readCount = 1; long toSkip = count; while ((toSkip > 0) && (readCount > 0)) { if (toSkip < length) { length = (int)toSkip; } readCount = baseInputStream.Read(tmp, 0, length); toSkip -= readCount; } return count - toSkip; } } /// <summary> /// Clear any cryptographic state. /// </summary> protected void StopDecrypting() { inputBuffer.CryptoTransform = null; } /// <summary> /// Returns 0 once the end of the stream (EOF) has been reached. /// Otherwise returns 1. /// </summary> public virtual int Available { get { return inf.IsFinished ? 0 : 1; } } /// <summary> /// Fills the buffer with more data to decompress. /// </summary> /// <exception cref="SharpZipBaseException"> /// Stream ends early /// </exception> protected void Fill() { // Protect against redundant calls if (inputBuffer.Available <= 0) { inputBuffer.Fill(); if (inputBuffer.Available <= 0) { throw new SharpZipBaseException("Unexpected EOF"); } } inputBuffer.SetInflaterInput(inf); } #region Stream Overrides /// <summary> /// Gets a value indicating whether the current stream supports reading /// </summary> public override bool CanRead { get { return baseInputStream.CanRead; } } /// <summary> /// Gets a value of false indicating seeking is not supported for this stream. /// </summary> public override bool CanSeek { get { return false; } } /// <summary> /// Gets a value of false indicating that this stream is not writeable. /// </summary> public override bool CanWrite { get { return false; } } /// <summary> /// A value representing the length of the stream in bytes. /// </summary> public override long Length { get { //return inputBuffer.RawLength; throw new NotSupportedException("InflaterInputStream Length is not supported"); } } /// <summary> /// The current position within the stream. /// Throws a NotSupportedException when attempting to set the position /// </summary> /// <exception cref="NotSupportedException">Attempting to set the position</exception> public override long Position { get { return baseInputStream.Position; } set { throw new NotSupportedException("InflaterInputStream Position not supported"); } } /// <summary> /// Flushes the baseInputStream /// </summary> public override void Flush() { baseInputStream.Flush(); } /// <summary> /// Sets the position within the current stream /// Always throws a NotSupportedException /// </summary> /// <param name="offset">The relative offset to seek to.</param> /// <param name="origin">The <see cref="SeekOrigin"/> defining where to seek from.</param> /// <returns>The new position in the stream.</returns> /// <exception cref="NotSupportedException">Any access</exception> public override long Seek(long offset, SeekOrigin origin) { throw new NotSupportedException("Seek not supported"); } /// <summary> /// Set the length of the current stream /// Always throws a NotSupportedException /// </summary> /// <param name="value">The new length value for the stream.</param> /// <exception cref="NotSupportedException">Any access</exception> public override void SetLength(long value) { throw new NotSupportedException("InflaterInputStream SetLength not supported"); } /// <summary> /// Writes a sequence of bytes to stream and advances the current position /// This method always throws a NotSupportedException /// </summary> /// <param name="buffer">Thew buffer containing data to write.</param> /// <param name="offset">The offset of the first byte to write.</param> /// <param name="count">The number of bytes to write.</param> /// <exception cref="NotSupportedException">Any access</exception> public override void Write(byte[] buffer, int offset, int count) { throw new NotSupportedException("InflaterInputStream Write not supported"); } /// <summary> /// Writes one byte to the current stream and advances the current position /// Always throws a NotSupportedException /// </summary> /// <param name="value">The byte to write.</param> /// <exception cref="NotSupportedException">Any access</exception> public override void WriteByte(byte value) { throw new NotSupportedException("InflaterInputStream WriteByte not supported"); } /// <summary> /// Closes the input stream. When <see cref="IsStreamOwner"></see> /// is true the underlying stream is also closed. /// </summary> protected override void Dispose(bool disposing) { if (!isClosed) { isClosed = true; if (IsStreamOwner) { baseInputStream.Dispose(); } } } /// <summary> /// Reads decompressed data into the provided buffer byte array /// </summary> /// <param name ="buffer"> /// The array to read and decompress data into /// </param> /// <param name ="offset"> /// The offset indicating where the data should be placed /// </param> /// <param name ="count"> /// The number of bytes to decompress /// </param> /// <returns>The number of bytes read. Zero signals the end of stream</returns> /// <exception cref="SharpZipBaseException"> /// Inflater needs a dictionary /// </exception> public override int Read(byte[] buffer, int offset, int count) { if (inf.IsNeedingDictionary) { throw new SharpZipBaseException("Need a dictionary"); } int remainingBytes = count; while (true) { int bytesRead = inf.Inflate(buffer, offset, remainingBytes); offset += bytesRead; remainingBytes -= bytesRead; if (remainingBytes == 0 || inf.IsFinished) { break; } if (inf.IsNeedingInput) { Fill(); } else if (bytesRead == 0) { throw new ZipException("Dont know what to do"); } } return count - remainingBytes; } #endregion #region Instance Fields /// <summary> /// Decompressor for this stream /// </summary> protected Inflater inf; /// <summary> /// <see cref="InflaterInputBuffer">Input buffer</see> for this stream. /// </summary> protected InflaterInputBuffer inputBuffer; /// <summary> /// Base stream the inflater reads from. /// </summary> private Stream baseInputStream; /// <summary> /// The compressed size /// </summary> protected long csize; /// <summary> /// Flag indicating wether this instance has been closed or not. /// </summary> bool isClosed; #endregion } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. #pragma warning disable 436 using System; using System.IO; using System.Collections.Generic; using System.Text; using Microsoft.Build.Engine.UnitTests; using Microsoft.Build.Framework; using Microsoft.Build.Logging; using Microsoft.Build.Shared; using EventSourceSink = Microsoft.Build.BackEnd.Logging.EventSourceSink; using Project = Microsoft.Build.Evaluation.Project; using Xunit; namespace Microsoft.Build.UnitTests { public class FileLogger_Tests { /// <summary> /// Basic test of the file logger. Writes to a log file in the temp directory. /// </summary> [Fact] public void Basic() { FileLogger fileLogger = new FileLogger(); string logFile = FileUtilities.GetTemporaryFile(); fileLogger.Parameters = "verbosity=Normal;logfile=" + logFile; Project project = ObjectModelHelpers.CreateInMemoryProject(@" <Project ToolsVersion=`msbuilddefaulttoolsversion` xmlns=`msbuildnamespace`> <Target Name=`Build`> <Message Text=`Hello world from the FileLogger`/> </Target> </Project> "); project.Build(fileLogger); project.ProjectCollection.UnregisterAllLoggers(); string log = File.ReadAllText(logFile); Assert.Contains("Hello world from the FileLogger", log); // "Log should have contained message" File.Delete(logFile); } /// <summary> /// Basic case of logging a message to a file /// Verify it logs and encoding is ANSI /// </summary> [Fact] public void BasicNoExistingFile() { string log = null; try { log = GetTempFilename(); SetUpFileLoggerAndLogMessage("logfile=" + log, new BuildMessageEventArgs("message here", null, null, MessageImportance.High)); VerifyFileContent(log, "message here"); byte[] content = ReadRawBytes(log); Assert.Equal((byte)109, content[0]); // 'm' } finally { if (null != log) File.Delete(log); } } /// <summary> /// Invalid file should error nicely /// </summary> [Fact] [Trait("Category", "netcore-osx-failing")] [Trait("Category", "netcore-linux-failing")] [Trait("Category", "mono-osx-failing")] public void InvalidFile() { Assert.Throws<LoggerException>(() => { string log = null; try { SetUpFileLoggerAndLogMessage("logfile=||invalid||", new BuildMessageEventArgs("message here", null, null, MessageImportance.High)); } finally { if (null != log) File.Delete(log); } } ); } /// <summary> /// Specific verbosity overrides global verbosity /// </summary> [Fact] public void SpecificVerbosity() { string log = null; try { log = GetTempFilename(); FileLogger fl = new FileLogger(); EventSourceSink es = new EventSourceSink(); fl.Parameters = "verbosity=diagnostic;logfile=" + log; // diagnostic specific setting fl.Verbosity = LoggerVerbosity.Quiet; // quiet global setting fl.Initialize(es); fl.MessageHandler(null, new BuildMessageEventArgs("message here", null, null, MessageImportance.High)); fl.Shutdown(); // expect message to appear because diagnostic not quiet verbosity was used VerifyFileContent(log, "message here"); } finally { if (null != log) File.Delete(log); } } /// <summary> /// Test the short hand verbosity settings for the file logger /// </summary> [Fact] public void ValidVerbosities() { string[] verbositySettings = new string[] { "Q", "quiet", "m", "minimal", "N", "normal", "d", "detailed", "diag", "DIAGNOSTIC" }; LoggerVerbosity[] verbosityEnumerations = new LoggerVerbosity[] {LoggerVerbosity.Quiet, LoggerVerbosity.Quiet, LoggerVerbosity.Minimal, LoggerVerbosity.Minimal, LoggerVerbosity.Normal, LoggerVerbosity.Normal, LoggerVerbosity.Detailed, LoggerVerbosity.Detailed, LoggerVerbosity.Diagnostic, LoggerVerbosity.Diagnostic}; for (int i = 0; i < verbositySettings.Length; i++) { FileLogger fl = new FileLogger(); fl.Parameters = "verbosity=" + verbositySettings[i] + ";"; EventSourceSink es = new EventSourceSink(); fl.Initialize(es); fl.Shutdown(); Assert.Equal(fl.Verbosity, verbosityEnumerations[i]); } // Do the same using the v shorthand for (int i = 0; i < verbositySettings.Length; i++) { FileLogger fl = new FileLogger(); fl.Parameters = "v=" + verbositySettings[i] + ";"; EventSourceSink es = new EventSourceSink(); fl.Initialize(es); fl.Shutdown(); Assert.Equal(fl.Verbosity, verbosityEnumerations[i]); } } /// <summary> /// Invalid verbosity setting /// </summary> [Fact] public void InvalidVerbosity() { Assert.Throws<LoggerException>(() => { FileLogger fl = new FileLogger(); fl.Parameters = "verbosity=CookiesAndCream"; EventSourceSink es = new EventSourceSink(); fl.Initialize(es); } ); } /// <summary> /// Invalid encoding setting /// </summary> [Fact] public void InvalidEncoding() { Assert.Throws<LoggerException>(() => { string log = null; try { log = GetTempFilename(); FileLogger fl = new FileLogger(); EventSourceSink es = new EventSourceSink(); fl.Parameters = "encoding=foo;logfile=" + log; fl.Initialize(es); } finally { if (null != log) File.Delete(log); } } ); } /// <summary> /// Valid encoding setting /// </summary> [Fact] public void ValidEncoding() { string log = null; try { log = GetTempFilename(); SetUpFileLoggerAndLogMessage("encoding=utf-16;logfile=" + log, new BuildMessageEventArgs("message here", null, null, MessageImportance.High)); byte[] content = ReadRawBytes(log); // FF FE is the BOM for UTF16 Assert.Equal((byte)255, content[0]); Assert.Equal((byte)254, content[1]); } finally { if (null != log) File.Delete(log); } } /// <summary> /// Valid encoding setting /// </summary> [Fact] public void ValidEncoding2() { string log = null; try { log = GetTempFilename(); SetUpFileLoggerAndLogMessage("encoding=utf-8;logfile=" + log, new BuildMessageEventArgs("message here", null, null, MessageImportance.High)); byte[] content = ReadRawBytes(log); // EF BB BF is the BOM for UTF8 Assert.Equal((byte)239, content[0]); Assert.Equal((byte)187, content[1]); Assert.Equal((byte)191, content[2]); } finally { if (null != log) File.Delete(log); } } /// <summary> /// Read the raw byte content of a file /// </summary> /// <param name="log"></param> /// <returns></returns> private byte[] ReadRawBytes(string log) { byte[] content; using (FileStream stream = new FileStream(log, FileMode.Open)) { content = new byte[stream.Length]; for (int i = 0; i < stream.Length; i++) { content[i] = (byte)stream.ReadByte(); } } return content; } /// <summary> /// Logging a message to a file that already exists should overwrite it /// </summary> [Fact] public void BasicExistingFileNoAppend() { string log = null; try { log = GetTempFilename(); WriteContentToFile(log); SetUpFileLoggerAndLogMessage("logfile=" + log, new BuildMessageEventArgs("message here", null, null, MessageImportance.High)); VerifyFileContent(log, "message here"); } finally { if (null != log) File.Delete(log); } } /// <summary> /// Logging to a file that already exists, with "append" set, should append /// </summary> [Fact] public void BasicExistingFileAppend() { string log = null; try { log = GetTempFilename(); WriteContentToFile(log); SetUpFileLoggerAndLogMessage("append;logfile=" + log, new BuildMessageEventArgs("message here", null, null, MessageImportance.High)); VerifyFileContent(log, "existing content\nmessage here"); } finally { if (null != log) File.Delete(log); } } /// <summary> /// Logging to a file in a directory that doesn't exists /// </summary> [Fact] public void BasicNoExistingDirectory() { string directory = Path.Combine(ObjectModelHelpers.TempProjectDir, Guid.NewGuid().ToString("N")); string log = Path.Combine(directory, "build.log"); Assert.False(Directory.Exists(directory)); Assert.False(File.Exists(log)); try { SetUpFileLoggerAndLogMessage("logfile=" + log, new BuildMessageEventArgs("message here", null, null, MessageImportance.High)); VerifyFileContent(log, "message here"); } finally { ObjectModelHelpers.DeleteDirectory(directory); } } [Theory] [InlineData("warningsonly")] [InlineData("errorsonly")] [InlineData("errorsonly;warningsonly")] public void EmptyErrorLogUsingWarningsErrorsOnly(string loggerOption) { using (var env = TestEnvironment.Create()) { var logFile = env.CreateFile(".log").Path; // Note: Only the ParallelConsoleLogger supports this scenario (log file empty on no error/warn). We // need to explicitly enable it here with the 'ENABLEMPLOGGING' flag. FileLogger fileLogger = new FileLogger {Parameters = $"{loggerOption};logfile={logFile};ENABLEMPLOGGING" }; Project project = ObjectModelHelpers.CreateInMemoryProject(@" <Project ToolsVersion=`msbuilddefaulttoolsversion` xmlns=`msbuildnamespace`> <Target Name=`Build`> <Message Text=`Hello world from the FileLogger`/> </Target> </Project>"); project.Build(fileLogger); project.ProjectCollection.UnregisterAllLoggers(); // File should exist and be 0 length (no summary information, etc.) var result = new FileInfo(logFile); Assert.True(result.Exists); Assert.Equal(0, new FileInfo(logFile).Length); } } /// <summary> /// File logger is writting the verbosity level as soon the build starts. /// </summary> [Theory] [InlineData(LoggerVerbosity.Quiet, false)] [InlineData(LoggerVerbosity.Minimal, false)] [InlineData(LoggerVerbosity.Normal, true)] [InlineData(LoggerVerbosity.Detailed, true)] [InlineData(LoggerVerbosity.Diagnostic, true)] public void LogVerbosityMessage(LoggerVerbosity loggerVerbosity, bool shouldContain) { using (var testEnvironment = TestEnvironment.Create()) { var fileLogger = new FileLogger { Verbosity = loggerVerbosity }; var logFile = testEnvironment.CreateFile(".log"); fileLogger.Parameters = "logfile=" + logFile.Path; Project project = ObjectModelHelpers.CreateInMemoryProject(@" <Project ToolsVersion=`msbuilddefaulttoolsversion` xmlns=`msbuildnamespace`> <Target Name=`Build` /> </Project> "); project.Build(fileLogger); project.ProjectCollection.UnregisterAllLoggers(); string log = File.ReadAllText(logFile.Path); var message = ResourceUtilities.FormatResourceStringStripCodeAndKeyword("LogLoggerVerbosity", loggerVerbosity); if (shouldContain) { Assert.Contains(message, log); } else { Assert.DoesNotContain(message, log); } } } /// <summary> /// Gets a filename for a nonexistent temporary file. /// </summary> /// <returns></returns> private string GetTempFilename() { string path = FileUtilities.GetTemporaryFile(); File.Delete(path); return path; } /// <summary> /// Writes a string to a file. /// </summary> /// <param name="log"></param> private void WriteContentToFile(string log) { using (StreamWriter sw = FileUtilities.OpenWrite(log, false)) { sw.WriteLine("existing content"); } } /// <summary> /// Creates a FileLogger, sets its parameters and initializes it, /// logs a message to it, and calls shutdown /// </summary> /// <param name="parameters"></param> /// <returns></returns> private void SetUpFileLoggerAndLogMessage(string parameters, BuildMessageEventArgs message) { FileLogger fl = new FileLogger(); EventSourceSink es = new EventSourceSink(); fl.Parameters = parameters; fl.Initialize(es); fl.MessageHandler(null, message); fl.Shutdown(); return; } /// <summary> /// Verifies that a file contains exactly the expected content. /// </summary> /// <param name="file"></param> /// <param name="expectedContent"></param> private void VerifyFileContent(string file, string expectedContent) { string actualContent; using (StreamReader sr = FileUtilities.OpenRead(file)) { actualContent = sr.ReadToEnd(); } string[] actualLines = actualContent.Split(MSBuildConstants.NewlineChar, StringSplitOptions.RemoveEmptyEntries); string[] expectedLines = expectedContent.Split(MSBuildConstants.NewlineChar, StringSplitOptions.RemoveEmptyEntries); Assert.Equal(expectedLines.Length, actualLines.Length); for (int i = 0; i < expectedLines.Length; i++) { Assert.Equal(expectedLines[i].Trim(), actualLines[i].Trim()); } } #region DistributedLogger /// <summary> /// Check the ability of the distributed logger to correctly tell its internal file logger where to log the file /// </summary> [Fact] public void DistributedFileLoggerParameters() { DistributedFileLogger fileLogger = new DistributedFileLogger(); try { fileLogger.NodeId = 0; fileLogger.Initialize(new EventSourceSink()); Assert.Equal(0, string.Compare(fileLogger.InternalFilelogger.Parameters, "ForceNoAlign;ShowEventId;ShowCommandLine;logfile=msbuild0.log;", StringComparison.OrdinalIgnoreCase)); fileLogger.Shutdown(); fileLogger.NodeId = 3; fileLogger.Parameters = "logfile=" + Path.Combine(Directory.GetCurrentDirectory(), "mylogfile.log"); fileLogger.Initialize(new EventSourceSink()); Assert.Equal(0, string.Compare(fileLogger.InternalFilelogger.Parameters, "ForceNoAlign;ShowEventId;ShowCommandLine;logfile=" + Path.Combine(Directory.GetCurrentDirectory(), "mylogfile3.log") + ";", StringComparison.OrdinalIgnoreCase)); fileLogger.Shutdown(); fileLogger.NodeId = 4; fileLogger.Parameters = "logfile=" + Path.Combine(Directory.GetCurrentDirectory(), "mylogfile.log"); fileLogger.Initialize(new EventSourceSink()); Assert.Equal(0, string.Compare(fileLogger.InternalFilelogger.Parameters, "ForceNoAlign;ShowEventId;ShowCommandLine;logfile=" + Path.Combine(Directory.GetCurrentDirectory(), "mylogfile4.log") + ";", StringComparison.OrdinalIgnoreCase)); fileLogger.Shutdown(); Directory.CreateDirectory(Path.Combine(Directory.GetCurrentDirectory(), "tempura")); fileLogger.NodeId = 1; fileLogger.Parameters = "logfile=" + Path.Combine(Directory.GetCurrentDirectory(), "tempura", "mylogfile.log"); fileLogger.Initialize(new EventSourceSink()); Assert.Equal(0, string.Compare(fileLogger.InternalFilelogger.Parameters, "ForceNoAlign;ShowEventId;ShowCommandLine;logfile=" + Path.Combine(Directory.GetCurrentDirectory(), "tempura", "mylogfile1.log") + ";", StringComparison.OrdinalIgnoreCase)); fileLogger.Shutdown(); } finally { if (Directory.Exists(Path.Combine(Directory.GetCurrentDirectory(), "tempura"))) { File.Delete(Path.Combine(Directory.GetCurrentDirectory(), "tempura", "mylogfile1.log")); FileUtilities.DeleteWithoutTrailingBackslash(Path.Combine(Directory.GetCurrentDirectory(), "tempura")); } File.Delete(Path.Combine(Directory.GetCurrentDirectory(), "mylogfile0.log")); File.Delete(Path.Combine(Directory.GetCurrentDirectory(), "mylogfile3.log")); File.Delete(Path.Combine(Directory.GetCurrentDirectory(), "mylogfile4.log")); } } [Fact] public void DistributedLoggerNullEmpty() { Assert.Throws<LoggerException>(() => { DistributedFileLogger fileLogger = new DistributedFileLogger(); fileLogger.NodeId = 0; fileLogger.Initialize(new EventSourceSink()); fileLogger.NodeId = 1; fileLogger.Parameters = "logfile="; fileLogger.Initialize(new EventSourceSink()); Assert.True(false); } ); } #endregion } }
/* * QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals. * Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections.Generic; using System.Linq; using NUnit.Framework; using Python.Runtime; using QuantConnect.Data; using QuantConnect.Data.Custom.IconicTypes; using QuantConnect.Data.Market; using QuantConnect.Indicators; using QuantConnect.Python; namespace QuantConnect.Tests.Common.Data { [TestFixture] public class SliceTests { [Test] public void AccessesByDataType() { var now = DateTime.UtcNow; var tradeBar = new TradeBar { Symbol = Symbols.SPY, Time = now }; var unlinkedData = new UnlinkedData { Symbol = Symbols.SPY, Time = now }; var quoteBar = new QuoteBar { Symbol = Symbols.SPY, Time = now }; var tick = new Tick(now, Symbols.SPY, 1.1m, 2.1m) {TickType = TickType.Trade}; var openInterest = new OpenInterest(now, Symbols.SPY, 1); var split = new Split(Symbols.SPY, now, 1, 1, SplitType.SplitOccurred); var delisting = new Delisting(Symbols.SPY, now, 1, DelistingType.Delisted); var slice = new Slice(now, new BaseData[] {quoteBar, tradeBar, unlinkedData, tick, split, delisting, openInterest }); Assert.AreEqual(slice.Get(typeof(TradeBar))[Symbols.SPY], tradeBar); Assert.AreEqual(slice.Get(typeof(UnlinkedData))[Symbols.SPY], unlinkedData); Assert.AreEqual(slice.Get(typeof(QuoteBar))[Symbols.SPY], quoteBar); Assert.AreEqual(slice.Get(typeof(Tick))[Symbols.SPY], tick); Assert.AreEqual(slice.Get(typeof(Split))[Symbols.SPY], split); Assert.AreEqual(slice.Get(typeof(Delisting))[Symbols.SPY], delisting); Assert.AreEqual(slice.Get(typeof(OpenInterest))[Symbols.SPY], openInterest); } [Test] public void AccessesBaseBySymbol() { IndicatorDataPoint tick = new IndicatorDataPoint(Symbols.SPY, DateTime.Now, 1); Slice slice = new Slice(DateTime.Now, new[] { tick }); IndicatorDataPoint data = slice[tick.Symbol]; Assert.AreEqual(tick, data); } [Test] public void AccessesTradeBarBySymbol() { TradeBar tradeBar = new TradeBar { Symbol = Symbols.SPY, Time = DateTime.Now }; Slice slice = new Slice(DateTime.Now, new[] { tradeBar }); TradeBar data = slice[tradeBar.Symbol]; Assert.AreEqual(tradeBar, data); } [Test] public void EquitiesIgnoreQuoteBars() { var quoteBar = new QuoteBar { Symbol = Symbols.SPY, Time = DateTime.Now }; var slice = new Slice(DateTime.Now, new[] { quoteBar }); Assert.IsFalse(slice.HasData); Assert.IsTrue(slice.ToList().Count == 0); Assert.IsFalse(slice.ContainsKey(Symbols.SPY)); Assert.Throws<KeyNotFoundException>(() => { var data = slice[Symbols.SPY]; }); Assert.AreEqual(0, slice.Count); var tickQuoteBar = new Tick { Symbol = Symbols.SPY, Time = DateTime.Now, TickType = TickType.Quote }; slice = new Slice(DateTime.Now, new[] { tickQuoteBar }); Assert.IsFalse(slice.HasData); Assert.IsTrue(slice.ToList().Count == 0); Assert.IsFalse(slice.ContainsKey(Symbols.SPY)); Assert.Throws<KeyNotFoundException>(() => { var data = slice[Symbols.SPY]; }); Assert.AreEqual(0, slice.Count); } [Test] public void AccessesTradeBarCollection() { TradeBar tradeBar1 = new TradeBar { Symbol = Symbols.SPY, Time = DateTime.Now }; TradeBar tradeBar2 = new TradeBar { Symbol = Symbols.AAPL, Time = DateTime.Now }; Slice slice = new Slice(DateTime.Now, new[] { tradeBar1, tradeBar2 }); TradeBars tradeBars = slice.Bars; Assert.AreEqual(2, tradeBars.Count); } [Test] public void AccessesTicksBySymbol() { Tick tick1 = new Tick { Time = DateTime.Now, Symbol = Symbols.SPY, Value = 1m, Quantity = 2m }; Tick tick2 = new Tick { Time = DateTime.Now, Symbol = Symbols.SPY, Value = 1.1m, Quantity = 2.1m }; Slice slice = new Slice(DateTime.Now, new[] { tick1, tick2 }); List<Tick> data = slice[tick1.Symbol]; Assert.IsInstanceOf(typeof(List<Tick>), data); Assert.AreEqual(2, data.Count); } [Test] public void AccessesTicksCollection() { Tick tick1 = new Tick { Time = DateTime.Now, Symbol = Symbols.SPY, Value = 1, Quantity = 2 }; Tick tick2 = new Tick { Time = DateTime.Now, Symbol = Symbols.SPY, Value = 1.1m, Quantity = 2.1m }; Tick tick3 = new Tick { Time = DateTime.Now, Symbol = Symbols.AAPL, Value = 1, Quantity = 2 }; Tick tick4 = new Tick { Time = DateTime.Now, Symbol = Symbols.AAPL, Value = 1.1m, Quantity = 2.1m }; Slice slice = new Slice(DateTime.Now, new[] { tick1, tick2, tick3, tick4 }); Ticks ticks = slice.Ticks; Assert.AreEqual(2, ticks.Count); Assert.AreEqual(2, ticks[Symbols.SPY].Count); Assert.AreEqual(2, ticks[Symbols.AAPL].Count); } [Test] public void DifferentCollectionsAreCorrectlyGeneratedSameSymbol() { var quoteBar = new QuoteBar(DateTime.Now, Symbols.SPY, new Bar(3100, 3100, 3100, 3100), 0, new Bar(3101, 3101, 3101, 3101), 0, Time.OneMinute); var tradeBar = new TradeBar { Symbol = Symbols.SPY, Time = DateTime.Now }; var slice = new Slice(DateTime.Now, new BaseData[] { quoteBar, tradeBar }); Assert.AreEqual(1, slice.QuoteBars.Count); Assert.AreEqual(1, slice.Bars.Count); Assert.AreEqual(1, slice.Get<QuoteBar>().Count); Assert.AreEqual(1, slice.Get<TradeBar>().Count); } [Test] public void AccessesCustomGenericallyByTypeOtherTypesPresent() { var tradeBar = new TradeBar { Symbol = Symbols.SPY, Time = DateTime.Now }; var unlinkedDataSpy = new UnlinkedData { Symbol = Symbols.SPY, Time = DateTime.Now }; var slice = new Slice(DateTime.Now, new BaseData[] { unlinkedDataSpy, tradeBar }); var unlinkedData = slice.Get<UnlinkedData>(); Assert.AreEqual(1, unlinkedData.Count); } [Test] public void AccessesCustomGenericallyByType() { var unlinkedDataSpy = new UnlinkedData { Symbol = Symbols.SPY, Time = DateTime.Now }; var unlinkedDataAapl = new UnlinkedData { Symbol = Symbols.AAPL, Time = DateTime.Now }; var slice = new Slice(DateTime.Now, new[] { unlinkedDataSpy, unlinkedDataAapl }); var unlinkedData = slice.Get<UnlinkedData>(); Assert.AreEqual(2, unlinkedData.Count); } [Test] public void AccessesTickGenericallyByType() { Tick TickSpy = new Tick { Symbol = Symbols.SPY, Time = DateTime.Now }; Tick TickAapl = new Tick { Symbol = Symbols.AAPL, Time = DateTime.Now }; Slice slice = new Slice(DateTime.Now, new[] { TickSpy, TickAapl }); DataDictionary<Tick> TickData = slice.Get<Tick>(); Assert.AreEqual(2, TickData.Count); } [Test] public void AccessesTradeBarGenericallyByType() { TradeBar TradeBarSpy = new TradeBar { Symbol = Symbols.SPY, Time = DateTime.Now }; TradeBar TradeBarAapl = new TradeBar { Symbol = Symbols.AAPL, Time = DateTime.Now }; Slice slice = new Slice(DateTime.Now, new[] { TradeBarSpy, TradeBarAapl }); DataDictionary<TradeBar> TradeBarData = slice.Get<TradeBar>(); Assert.AreEqual(2, TradeBarData.Count); } [Test] public void AccessesGenericallyByTypeAndSymbol() { var unlinkedDataSpy = new UnlinkedData { Symbol = Symbols.SPY, Time = DateTime.Now }; var unlinkedDataAapl = new UnlinkedData { Symbol = Symbols.AAPL, Time = DateTime.Now }; var slice = new Slice(DateTime.Now, new[] { unlinkedDataSpy, unlinkedDataAapl }); var unlinkedData = slice.Get<UnlinkedData>(Symbols.SPY); Assert.AreEqual(unlinkedDataSpy, unlinkedData); } [Test] public void PythonGetCustomData() { using (Py.GIL()) { dynamic test = PythonEngine.ModuleFromString("testModule", @" from AlgorithmImports import * def Test(slice): data = slice.Get(UnlinkedData) return data").GetAttr("Test"); var unlinkedDataSpy = new UnlinkedData { Symbol = Symbols.SPY, Time = DateTime.Now, Value = 10 }; var unlinkedDataAapl = new UnlinkedData { Symbol = Symbols.AAPL, Time = DateTime.Now, Value = 11 }; var slice = new Slice(DateTime.Now, new[] { unlinkedDataSpy, unlinkedDataAapl }); var data = test(new PythonSlice(slice)); Assert.AreEqual(2, (int)data.Count); Assert.AreEqual(10, (int)data[Symbols.SPY].Value); Assert.AreEqual(11, (int)data[Symbols.AAPL].Value); } } [Test] public void PythonEnumerationWorks() { using (Py.GIL()) { dynamic test = PythonEngine.ModuleFromString("testModule", @" from AlgorithmImports import * def Test(slice): for dataPoint in slice: return dataPoint").GetAttr("Test"); var unlinkedDataAapl = new UnlinkedData { Symbol = Symbols.AAPL, Time = DateTime.Now, Value = 11 }; var slice = new Slice(DateTime.Now, new[] { unlinkedDataAapl }); var data = test(new PythonSlice(slice)) as PyObject; var keyValuePair = data.As<KeyValuePair<Symbol, BaseData>>(); Assert.IsNotNull(keyValuePair); Assert.AreEqual(11, keyValuePair.Value.Value); } } [Test] public void PythonGetBySymbolCustomData() { using (Py.GIL()) { dynamic test = PythonEngine.ModuleFromString("testModule", @" from AlgorithmImports import * from QuantConnect.Tests import * def Test(slice): data = slice.Get(UnlinkedData) value = data[Symbols.AAPL].Value if value != 11: raise Exception('Unexpected value')").GetAttr("Test"); var unlinkedDataSpy = new UnlinkedData { Symbol = Symbols.SPY, Time = DateTime.Now, Value = 10 }; var unlinkedDataAapl = new UnlinkedData { Symbol = Symbols.AAPL, Time = DateTime.Now, Value = 11 }; var slice = new Slice(DateTime.Now, new[] { unlinkedDataSpy, unlinkedDataAapl }); Assert.DoesNotThrow(() => test(new PythonSlice(slice))); } } [Test] public void PythonGetAndSymbolCustomData() { using (Py.GIL()) { dynamic test = PythonEngine.ModuleFromString("testModule", @" from AlgorithmImports import * from QuantConnect.Tests import * def Test(slice): data = slice.Get(UnlinkedData, Symbols.AAPL) value = data.Value if value != 11: raise Exception('Unexpected value')").GetAttr("Test"); var unlinkedDataSpy = new UnlinkedData { Symbol = Symbols.SPY, Time = DateTime.Now, Value = 10 }; var unlinkedDataAapl = new UnlinkedData { Symbol = Symbols.AAPL, Time = DateTime.Now, Value = 11 }; var slice = new Slice(DateTime.Now, new[] { unlinkedDataSpy, unlinkedDataAapl }); Assert.DoesNotThrow(() => test(new PythonSlice(slice))); } } [Test] public void PythonGetTradeBar() { using (Py.GIL()) { dynamic test = PythonEngine.ModuleFromString("testModule", @" from AlgorithmImports import * def Test(slice): data = slice.Get(TradeBar) return data").GetAttr("Test"); var TradeBarSpy = new TradeBar { Symbol = Symbols.SPY, Time = DateTime.Now, Value = 8 }; var TradeBarAapl = new TradeBar { Symbol = Symbols.AAPL, Time = DateTime.Now, Value = 9 }; var unlinkedDataSpy = new UnlinkedData { Symbol = Symbols.SPY, Time = DateTime.Now, Value = 10 }; var unlinkedDataAapl = new UnlinkedData { Symbol = Symbols.AAPL, Time = DateTime.Now, Value = 11 }; var slice = new Slice(DateTime.Now, new BaseData[] { unlinkedDataSpy, TradeBarAapl, unlinkedDataAapl, TradeBarSpy }); var data = test(new PythonSlice(slice)); Assert.AreEqual(2, (int)data.Count); Assert.AreEqual(8, (int)data[Symbols.SPY].Value); Assert.AreEqual(9, (int)data[Symbols.AAPL].Value); } } [Test] public void PythonGetBySymbolOpenInterest() { using (Py.GIL()) { dynamic test = PythonEngine.ModuleFromString("testModule", @" from AlgorithmImports import * from QuantConnect.Tests import * def Test(slice): data = slice.Get(OpenInterest) value = data[Symbols.AAPL].Value if value != 33: raise Exception('Unexpected value')").GetAttr("Test"); var now = DateTime.UtcNow; var TradeBarSpy = new TradeBar { Symbol = Symbols.SPY, Time = now, Value = 8 }; var TradeBarAapl = new TradeBar { Symbol = Symbols.AAPL, Time = now, Value = 9 }; var unlinkedDataSpy = new UnlinkedData { Symbol = Symbols.SPY, Time = now, Value = 10 }; var unlinkedDataAapl = new UnlinkedData { Symbol = Symbols.AAPL, Time = now, Value = 11 }; var openInterest = new OpenInterest(now, Symbols.AAPL, 33); var slice = new Slice(now, new BaseData[] { unlinkedDataSpy, TradeBarAapl, unlinkedDataAapl, TradeBarSpy, openInterest }); Assert.DoesNotThrow(() => test(new PythonSlice(slice))); } } [Test] public void PythonGetBySymbolTradeBar() { using (Py.GIL()) { dynamic test = PythonEngine.ModuleFromString("testModule", @" from AlgorithmImports import * from QuantConnect.Tests import * def Test(slice): data = slice.Get(TradeBar) value = data[Symbols.AAPL].Value if value != 9: raise Exception('Unexpected value')").GetAttr("Test"); var TradeBarSpy = new TradeBar { Symbol = Symbols.SPY, Time = DateTime.Now, Value = 8 }; var TradeBarAapl = new TradeBar { Symbol = Symbols.AAPL, Time = DateTime.Now, Value = 9 }; var unlinkedDataSpy = new UnlinkedData { Symbol = Symbols.SPY, Time = DateTime.Now, Value = 10 }; var unlinkedDataAapl = new UnlinkedData { Symbol = Symbols.AAPL, Time = DateTime.Now, Value = 11 }; var slice = new Slice(DateTime.Now, new BaseData[] { unlinkedDataSpy, TradeBarAapl, unlinkedDataAapl, TradeBarSpy }); Assert.DoesNotThrow(() => test(new PythonSlice(slice))); } } [Test] public void PythonGetAndSymbolTradeBar() { using (Py.GIL()) { dynamic test = PythonEngine.ModuleFromString("testModule", @" from AlgorithmImports import * from QuantConnect.Tests import * def Test(slice): data = slice.Get(TradeBar, Symbols.AAPL) value = data.Value if value != 9: raise Exception('Unexpected value')").GetAttr("Test"); var TradeBarSpy = new TradeBar { Symbol = Symbols.SPY, Time = DateTime.Now, Value = 8 }; var TradeBarAapl = new TradeBar { Symbol = Symbols.AAPL, Time = DateTime.Now, Value = 9 }; var unlinkedDataSpy = new UnlinkedData { Symbol = Symbols.SPY, Time = DateTime.Now, Value = 10 }; var unlinkedDataAapl = new UnlinkedData { Symbol = Symbols.AAPL, Time = DateTime.Now, Value = 11 }; var slice = new Slice(DateTime.Now, new BaseData[] { unlinkedDataSpy, TradeBarAapl, unlinkedDataAapl, TradeBarSpy }); Assert.DoesNotThrow(() => test(new PythonSlice(slice))); } } [Test] public void PythonGetCustomData_Iterate_IndexedLinkedData() { using (Py.GIL()) { dynamic test = PythonEngine.ModuleFromString("testModule", @" from AlgorithmImports import * from QuantConnect.Data.Custom.IconicTypes import * from QuantConnect.Logging import * def Test(slice): data = slice.Get(IndexedLinkedData) count = 0 for singleData in data: Log.Trace(str(singleData)) count += 1 if count != 2: raise Exception('Unexpected value')").GetAttr("Test"); var indexedLinkedDataSpy = new IndexedLinkedData { Symbol = Symbols.SPY, Time = DateTime.Now, Value = 10 }; var tradeBarAapl = new TradeBar { Symbol = Symbols.AAPL, Time = DateTime.Now, Value = 9 }; var indexedLinkedDataAapl = new IndexedLinkedData { Symbol = Symbols.AAPL, Time = DateTime.Now, Value = 11 }; var slice = new Slice(DateTime.Now, new BaseData[] { indexedLinkedDataSpy, tradeBarAapl, indexedLinkedDataAapl }); Assert.DoesNotThrow(() => test(new PythonSlice(slice))); } } [Test] public void PythonGetCustomData_Iterate_IndexedLinkedData_Empty() { using (Py.GIL()) { dynamic test = PythonEngine.ModuleFromString("testModule", @" from AlgorithmImports import * from QuantConnect.Data.Custom.IconicTypes import * def Test(slice): data = slice.Get(IndexedLinkedData) for singleData in data: raise Exception('Unexpected iteration') for singleData in data.Values: raise Exception('Unexpected iteration') data = slice.Get(IndexedLinkedData) for singleData in data: raise Exception('Unexpected iteration') for singleData in data.Values: raise Exception('Unexpected iteration')").GetAttr("Test"); var tradeBarAapl = new TradeBar { Symbol = Symbols.AAPL, Time = DateTime.Now, Value = 9 }; var slice = new Slice(DateTime.Now, new List<BaseData> { tradeBarAapl }); Assert.DoesNotThrow(() => test(new PythonSlice(slice))); } } [Test] public void PythonGetCustomData_Iterate() { using (Py.GIL()) { dynamic test = PythonEngine.ModuleFromString("testModule", @" from AlgorithmImports import * def Test(slice): data = slice.Get(UnlinkedData) count = 0 for singleData in data: count += 1 if count != 2: raise Exception('Unexpected value')").GetAttr("Test"); var unlinkedDataSpy = new UnlinkedData { Symbol = Symbols.SPY, Time = DateTime.Now, Value = 10 }; var unlinkedDataAapl = new UnlinkedData { Symbol = Symbols.AAPL, Time = DateTime.Now, Value = 11 }; var slice = new Slice(DateTime.Now, new[] { unlinkedDataSpy, unlinkedDataAapl }); Assert.DoesNotThrow(() => test(new PythonSlice(slice))); } } [Test] public void EnumeratorDoesNotThrowWithTicks() { var slice = new Slice(DateTime.Now, new[] { new Tick {Time = DateTime.Now, Symbol = Symbols.SPY, Value = 1, Quantity = 2}, new Tick{Time = DateTime.Now, Symbol = Symbols.SPY, Value = 1.1m, Quantity = 2.1m}, new Tick{Time = DateTime.Now, Symbol = Symbols.AAPL, Value = 1, Quantity = 2}, new Tick{Time = DateTime.Now, Symbol = Symbols.AAPL, Value = 1.1m, Quantity = 2.1m} }); Assert.AreEqual(4, slice.Count()); } [Test] public void AccessesTradeBarAndQuoteBarForSameSymbol() { var tradeBar = new TradeBar(DateTime.Now, Symbols.BTCUSD, 3000, 3000, 3000, 3000, 100, Time.OneMinute); var quoteBar = new QuoteBar(DateTime.Now, Symbols.BTCUSD, new Bar(3100, 3100, 3100, 3100), 0, new Bar(3101, 3101, 3101, 3101), 0, Time.OneMinute); var tradeBars = new TradeBars { { Symbols.BTCUSD, tradeBar } }; var quoteBars = new QuoteBars { { Symbols.BTCUSD, quoteBar } }; var slice = new Slice(DateTime.Now, new BaseData[] { tradeBar, quoteBar }, tradeBars, quoteBars, null, null, null, null, null, null, null); var tradeBarData = slice.Get<TradeBar>(); Assert.AreEqual(1, tradeBarData.Count); Assert.AreEqual(3000, tradeBarData[Symbols.BTCUSD].Close); var quoteBarData = slice.Get<QuoteBar>(); Assert.AreEqual(1, quoteBarData.Count); Assert.AreEqual(3100, quoteBarData[Symbols.BTCUSD].Bid.Close); Assert.AreEqual(3101, quoteBarData[Symbols.BTCUSD].Ask.Close); slice = new Slice(DateTime.Now, new BaseData[] { tradeBar, quoteBar }); tradeBarData = slice.Get<TradeBar>(); Assert.AreEqual(1, tradeBarData.Count); Assert.AreEqual(3000, tradeBarData[Symbols.BTCUSD].Close); quoteBarData = slice.Get<QuoteBar>(); Assert.AreEqual(1, quoteBarData.Count); Assert.AreEqual(3100, quoteBarData[Symbols.BTCUSD].Bid.Close); Assert.AreEqual(3101, quoteBarData[Symbols.BTCUSD].Ask.Close); } [Test] public void PythonSlice_clear() { using (Py.GIL()) { dynamic test = PythonEngine.ModuleFromString("testModule", @" from AlgorithmImports import * def Test(slice): slice.clear()").GetAttr("Test"); Assert.Throws<PythonException>(() => test(GetPythonSlice()), "Slice is read-only: cannot clear the collection"); } } [Test] public void PythonSlice_popitem() { using (Py.GIL()) { dynamic test = PythonEngine.ModuleFromString("testModule", @" from AlgorithmImports import * def Test(slice): slice.popitem()").GetAttr("Test"); Assert.Throws<PythonException>(() => test(GetPythonSlice()), "Slice is read-only: cannot pop an item from the collection"); } } [Test] public void PythonSlice_pop() { using (Py.GIL()) { dynamic test = PythonEngine.ModuleFromString("testModule", @" from AlgorithmImports import * def Test(slice, symbol): slice.pop(symbol)").GetAttr("Test"); Assert.Throws<PythonException>(() => test(GetPythonSlice(), Symbols.SPY), $"Slice is read-only: cannot pop the value for {Symbols.SPY} from the collection"); } } [Test] public void PythonSlice_pop_default() { using (Py.GIL()) { dynamic test = PythonEngine.ModuleFromString("testModule", @" from AlgorithmImports import * def Test(slice, symbol, default_value): slice.pop(symbol, default_value)").GetAttr("Test"); Assert.Throws<PythonException>(() => test(GetPythonSlice(), Symbols.SPY, null), $"Slice is read-only: cannot pop the value for {Symbols.SPY} from the collection"); } } [Test] public void PythonSlice_update_fails() { using (Py.GIL()) { dynamic test = PythonEngine.ModuleFromString("testModule", @" from AlgorithmImports import * def Test(slice, symbol): item = { symbol: 1 } slice.update(item)").GetAttr("Test"); Assert.Throws<PythonException>(() => test(GetPythonSlice(), Symbols.SPY), "Slice is read-only: cannot update the collection"); } } [Test] public void PythonSlice_update_success() { using (Py.GIL()) { dynamic test = PythonEngine.ModuleFromString("testModule", @" from AlgorithmImports import * def Test(slice, symbol, bar): item = { symbol: bar } slice.Bars.update(item)").GetAttr("Test"); var expected = new TradeBar(); var pythonSlice = GetPythonSlice(); Assert.DoesNotThrow(() => test(pythonSlice, Symbols.SPY, expected)); Assert.AreEqual(expected, pythonSlice.Bars[Symbols.SPY]); } } [Test] public void PythonSlice_contains() { using (Py.GIL()) { dynamic test = PythonEngine.ModuleFromString("testModule", @" from AlgorithmImports import * AddReference(""QuantConnect.Tests"") from QuantConnect.Tests.Common.Data import * def Test(slice, symbol): return symbol in slice").GetAttr("Test"); bool result = false; Assert.DoesNotThrow(() => result = test(GetSlice(), Symbols.SPY)); Assert.IsTrue(result); result = false; Assert.DoesNotThrow(() => result = test(GetPythonSlice(), Symbols.SPY)); Assert.IsTrue(result); } } [Test, Ignore("Performance test")] public void PythonSlice_performance() { using (Py.GIL()) { dynamic test = PythonEngine.ModuleFromString("testModule", @" from AlgorithmImports import * AddReference(""QuantConnect.Tests"") from QuantConnect.Tests.Common.Data import * def Test(slice, symbol): msg = '__contains__' if 'SPY' in slice: msg += ' Py' now = datetime.now() for i in range(0,1000000): result = 'SPY' in slice span1 = (datetime.now()-now).total_seconds() if slice.ContainsKey('SPY'): msg += ' C#\n' now = datetime.now() for i in range(0,1000000): result = slice.ContainsKey('SPY') span2 = (datetime.now()-now).total_seconds() msg += f'Py: {span1}\nC#: {span2}\nRatio: {span1/span2}' msg += '\n\n__len__' if len(slice) > 0: msg += ' Py' now = datetime.now() for i in range(0,1000000): result = len(slice) span1 = (datetime.now()-now).total_seconds() if slice.Count > 0: msg += ' C#\n' now = datetime.now() for i in range(0,1000000): result = slice.Count span2 = (datetime.now()-now).total_seconds() msg += f'Py: {span1}\nC#: {span2}\nRatio: {span1/span2}' msg += '\n\nkeys()' if len(slice.keys()) > 0: msg += ' Py' now = datetime.now() for i in range(0,1000000): result = slice.keys() span1 = (datetime.now()-now).total_seconds() if len(slice.Keys) > 0: msg += ' C#\n' now = datetime.now() for i in range(0,1000000): result = slice.Keys span2 = (datetime.now()-now).total_seconds() msg += f'Py: {span1}\nC#: {span2}\nRatio: {span1/span2}' msg += '\n\nvalues()' if len(slice.values()) > 0: msg += ' Py' now = datetime.now() for i in range(0,1000000): result = slice.values() span1 = (datetime.now()-now).total_seconds() if len(slice.Values) > 0: msg += ' C#\n' now = datetime.now() for i in range(0,1000000): result = slice.Values span2 = (datetime.now()-now).total_seconds() msg += f'Py: {span1}\nC#: {span2}\nRatio: {span1/span2}' msg += '\n\nget()' if slice.get(symbol): msg += ' Py' now = datetime.now() for i in range(0,1000000): result = slice.get(symbol) span1 = (datetime.now()-now).total_seconds() dummy = None if slice.TryGetValue(symbol, dummy): msg += ' C#\n' now = datetime.now() for i in range(0,1000000): result = slice.TryGetValue(symbol, dummy) span2 = (datetime.now()-now).total_seconds() msg += f'Py: {span1}\nC#: {span2}\nRatio: {span1/span2}' msg += '\n\nitems()' if slice.items(): msg += ' Py' now = datetime.now() for i in range(0,1000000): result = list(slice.items()) span1 = (datetime.now()-now).total_seconds() msg += ' C#\n' now = datetime.now() for i in range(0,1000000): result = [x for x in slice] span2 = (datetime.now()-now).total_seconds() msg += f'Py: {span1}\nC#: {span2}\nRatio: {span1/span2}' return msg").GetAttr("Test"); var message = string.Empty; Assert.DoesNotThrow(() => message = test(GetPythonSlice(), Symbols.SPY)); Assert.Ignore(message); } } [Test] public void PythonSlice_len() { using (Py.GIL()) { dynamic test = PythonEngine.ModuleFromString("testModule", @" from AlgorithmImports import * AddReference(""QuantConnect.Tests"") from QuantConnect.Tests.Common.Data import * def Test(slice, symbol): return len(slice)").GetAttr("Test"); var result = -1; Assert.DoesNotThrow(() => result = test(GetSlice(), Symbols.SPY)); Assert.AreEqual(2, result); result = -1; Assert.DoesNotThrow(() => result = test(GetPythonSlice(), Symbols.SPY)); Assert.AreEqual(2, result); } } [Test] public void PythonSlice_copy() { using (Py.GIL()) { dynamic test = PythonEngine.ModuleFromString("testModule", @" from AlgorithmImports import * def Test(slice, symbol): copy = slice.copy() return ', '.join([f'{k}: {v.Value}' for k,v in copy.items()])").GetAttr("Test"); var result = string.Empty; Assert.DoesNotThrow(() => result = test(GetPythonSlice(), Symbols.SPY)); Assert.AreEqual("SPY R735QTJ8XC9X: 10.0, AAPL R735QTJ8XC9X: 11.0", result); } } [Test] public void PythonSlice_items() { using (Py.GIL()) { dynamic test = PythonEngine.ModuleFromString("testModule", @" from AlgorithmImports import * def Test(slice): return ', '.join([f'{k}: {v.Value}' for k,v in slice.items()])").GetAttr("Test"); var result = string.Empty; Assert.DoesNotThrow(() => result = test(GetPythonSlice())); Assert.AreEqual("SPY R735QTJ8XC9X: 10.0, AAPL R735QTJ8XC9X: 11.0", result); } } [Test] public void PythonSlice_keys() { using (Py.GIL()) { dynamic test = PythonEngine.ModuleFromString("testModule", @" from AlgorithmImports import * def Test(slice): return slice.keys()").GetAttr("Test"); var slice = GetPythonSlice(); var result = new List<Symbol>(); Assert.DoesNotThrow(() => result = test(slice)); foreach (var key in slice.Keys) { Assert.IsTrue(result.Contains(key)); } } } [Test] public void PythonSlice_values() { using (Py.GIL()) { dynamic test = PythonEngine.ModuleFromString("testModule", @" from AlgorithmImports import * def Test(slice): return slice.values()").GetAttr("Test"); var slice = GetPythonSlice(); var result = new List<BaseData>(); Assert.DoesNotThrow(() => result = test(slice)); foreach (var value in slice.Values) { Assert.IsTrue(result.Contains(value)); } } } [Test] public void PythonSlice_fromkeys() { using (Py.GIL()) { dynamic test = PythonEngine.ModuleFromString("testModule", @" from AlgorithmImports import * def Test(slice, keys): newDict = slice.fromkeys(keys) return ', '.join([f'{k}: {v.Value}' for k,v in newDict.items()])").GetAttr("Test"); var result = string.Empty; Assert.DoesNotThrow(() => result = test(GetPythonSlice(), new[] { Symbols.SPY })); Assert.AreEqual("SPY R735QTJ8XC9X: 10.0", result); } } [Test] public void PythonSlice_fromkeys_default() { using (Py.GIL()) { dynamic test = PythonEngine.ModuleFromString("testModule", @" from AlgorithmImports import * def Test(slice, keys, default_value): newDict = slice.fromkeys(keys, default_value) return ', '.join([f'{k}: {v.Value}' for k,v in newDict.items()])").GetAttr("Test"); var result = string.Empty; Assert.DoesNotThrow(() => result = test(GetPythonSlice(), new[] { Symbols.EURUSD }, new Tick())); Assert.AreEqual("EURUSD 8G: 0.0", result); } } [Test] public void PythonSlice_get_success() { using (Py.GIL()) { dynamic test = PythonEngine.ModuleFromString("testModule", @" from AlgorithmImports import * def Test(slice, symbol): return slice.get(symbol)").GetAttr("Test"); var pythonSlice = GetPythonSlice(); dynamic expected = pythonSlice[Symbols.SPY]; PyObject result = null; Assert.DoesNotThrow(() => result = test(GetPythonSlice(), Symbols.SPY )); BaseData actual; Assert.IsTrue(result.TryConvert(out actual)); Assert.AreEqual(expected.Symbol, actual.Symbol); Assert.AreEqual(expected.Value, actual.Value); } } [Test] public void PythonSlice_get_default() { using (Py.GIL()) { dynamic test = PythonEngine.ModuleFromString("testModule", @" from AlgorithmImports import * def Test(slice, symbol, default_value): return slice.get(symbol, default_value)").GetAttr("Test"); var pythonSlice = GetPythonSlice(); var expected = new QuoteBar { Symbol = Symbols.EURUSD, Time = DateTime.Now, Value = 9 }; PyObject result = null; Assert.DoesNotThrow(() => result = test(GetPythonSlice(), Symbols.EURUSD, expected)); BaseData actual; Assert.IsTrue(result.TryConvert(out actual)); Assert.AreEqual(expected.Symbol, actual.Symbol); Assert.AreEqual(expected.Value, actual.Value); } } [Test] public void PythonSlice_get_NoneIfKeyNotFound() { using (Py.GIL()) { dynamic test = PythonEngine.ModuleFromString("testModule", @" from AlgorithmImports import * def Test(slice, symbol): return slice.get(symbol)").GetAttr("Test"); Assert.IsNull(test(GetPythonSlice(), Symbols.EURUSD)); } } [Test] public void PythonSlice_setdefault_success() { using (Py.GIL()) { dynamic test = PythonEngine.ModuleFromString("testModule", @" from AlgorithmImports import * def Test(slice, symbol): return slice.setdefault(symbol)").GetAttr("Test"); var pythonSlice = GetPythonSlice(); dynamic expected = pythonSlice[Symbols.SPY]; PyObject result = null; Assert.DoesNotThrow(() => result = test(GetPythonSlice(), Symbols.SPY)); BaseData actual; Assert.IsTrue(result.TryConvert(out actual)); Assert.AreEqual(expected.Symbol, actual.Symbol); Assert.AreEqual(expected.Value, actual.Value); } } [Test] public void PythonSlice_setdefault_default_success() { using (Py.GIL()) { dynamic test = PythonEngine.ModuleFromString("testModule", @" from AlgorithmImports import * def Test(slice, symbol, default_value): return slice.setdefault(symbol, default_value)").GetAttr("Test"); var value = new Tick(); var pythonSlice = GetPythonSlice(); dynamic expected = pythonSlice[Symbols.SPY]; PyObject result = null; // Since SPY is found, no need to set the default. Therefore it does not throw. Assert.DoesNotThrow(() => result = test(GetPythonSlice(), Symbols.SPY, value)); BaseData actual; Assert.IsTrue(result.TryConvert(out actual)); Assert.AreEqual(expected.Symbol, actual.Symbol); Assert.AreEqual(expected.Value, actual.Value); } } [Test] public void PythonSlice_setdefault_keynotfound() { using (Py.GIL()) { dynamic test = PythonEngine.ModuleFromString("testModule", @" from AlgorithmImports import * def Test(slice, symbol): return slice.setdefault(symbol)").GetAttr("Test"); var symbol = Symbols.EURUSD; Assert.Throws<PythonException>(() => test(GetPythonSlice(), symbol), $"Slice is read-only: cannot set default value to for {symbol}"); } } private Slice GetSlice() { SymbolCache.Clear(); var indexedLinkedDataSpy = new IndexedLinkedData { Symbol = Symbols.SPY, Time = DateTime.Now, Value = 10 }; var tradeBarAapl = new TradeBar { Symbol = Symbols.AAPL, Time = DateTime.Now, Value = 9 }; var indexedLinkedDataAapl = new IndexedLinkedData { Symbol = Symbols.AAPL, Time = DateTime.Now, Value = 11 }; return new Slice(DateTime.Now, new BaseData[] { indexedLinkedDataSpy, tradeBarAapl, indexedLinkedDataAapl }); } private PythonSlice GetPythonSlice() => new PythonSlice(GetSlice()); } public class PublicArrayTest { public int[] items; public PublicArrayTest() { items = new int[5] { 0, 1, 2, 3, 4 }; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. namespace System.Xml.Schema { using System.Collections; using System.ComponentModel; using System.Xml.Serialization; public class XmlSchemaComplexType : XmlSchemaType { private XmlSchemaDerivationMethod _block = XmlSchemaDerivationMethod.None; private XmlSchemaContentModel _contentModel; private XmlSchemaParticle _particle; private XmlSchemaObjectCollection _attributes; private XmlSchemaAnyAttribute _anyAttribute; private XmlSchemaParticle _contentTypeParticle = XmlSchemaParticle.Empty; private XmlSchemaDerivationMethod _blockResolved; private XmlSchemaObjectTable _localElements; private XmlSchemaObjectTable _attributeUses; private XmlSchemaAnyAttribute _attributeWildcard; private static XmlSchemaComplexType s_anyTypeLax; private static XmlSchemaComplexType s_anyTypeSkip; private static XmlSchemaComplexType s_untypedAnyType; //additional info for Partial validation private byte _pvFlags; private const byte wildCardMask = 0x01; private const byte isMixedMask = 0x02; private const byte isAbstractMask = 0x04; //const byte dupDeclMask = 0x08; static XmlSchemaComplexType() { s_anyTypeLax = CreateAnyType(XmlSchemaContentProcessing.Lax); s_anyTypeSkip = CreateAnyType(XmlSchemaContentProcessing.Skip); // Create xdt:untypedAny s_untypedAnyType = new XmlSchemaComplexType(); s_untypedAnyType.SetQualifiedName(new XmlQualifiedName("untypedAny", XmlReservedNs.NsXQueryDataType)); s_untypedAnyType.IsMixed = true; s_untypedAnyType.SetContentTypeParticle(s_anyTypeLax.ContentTypeParticle); s_untypedAnyType.SetContentType(XmlSchemaContentType.Mixed); s_untypedAnyType.ElementDecl = SchemaElementDecl.CreateAnyTypeElementDecl(); s_untypedAnyType.ElementDecl.SchemaType = s_untypedAnyType; s_untypedAnyType.ElementDecl.ContentValidator = AnyTypeContentValidator; } private static XmlSchemaComplexType CreateAnyType(XmlSchemaContentProcessing processContents) { XmlSchemaComplexType localAnyType = new XmlSchemaComplexType(); localAnyType.SetQualifiedName(DatatypeImplementation.QnAnyType); XmlSchemaAny anyElement = new XmlSchemaAny(); anyElement.MinOccurs = decimal.Zero; anyElement.MaxOccurs = decimal.MaxValue; anyElement.ProcessContents = processContents; anyElement.BuildNamespaceList(null); XmlSchemaSequence seq = new XmlSchemaSequence(); seq.Items.Add(anyElement); localAnyType.SetContentTypeParticle(seq); localAnyType.SetContentType(XmlSchemaContentType.Mixed); localAnyType.ElementDecl = SchemaElementDecl.CreateAnyTypeElementDecl(); localAnyType.ElementDecl.SchemaType = localAnyType; //Create contentValidator for Any ParticleContentValidator contentValidator = new ParticleContentValidator(XmlSchemaContentType.Mixed); contentValidator.Start(); contentValidator.OpenGroup(); contentValidator.AddNamespaceList(anyElement.NamespaceList, anyElement); contentValidator.AddStar(); contentValidator.CloseGroup(); ContentValidator anyContentValidator = contentValidator.Finish(true); localAnyType.ElementDecl.ContentValidator = anyContentValidator; XmlSchemaAnyAttribute anyAttribute = new XmlSchemaAnyAttribute(); anyAttribute.ProcessContents = processContents; anyAttribute.BuildNamespaceList(null); localAnyType.SetAttributeWildcard(anyAttribute); localAnyType.ElementDecl.AnyAttribute = anyAttribute; return localAnyType; } public XmlSchemaComplexType() { } [XmlIgnore] internal static XmlSchemaComplexType AnyType { get { return s_anyTypeLax; } } [XmlIgnore] internal static XmlSchemaComplexType UntypedAnyType { get { return s_untypedAnyType; } } [XmlIgnore] internal static XmlSchemaComplexType AnyTypeSkip { get { return s_anyTypeSkip; } } internal static ContentValidator AnyTypeContentValidator { get { return s_anyTypeLax.ElementDecl.ContentValidator; } } [XmlAttribute("abstract"), DefaultValue(false)] public bool IsAbstract { get { return (_pvFlags & isAbstractMask) != 0; } set { if (value) { _pvFlags = (byte)(_pvFlags | isAbstractMask); } else { _pvFlags = (byte)(_pvFlags & ~isAbstractMask); } } } [XmlAttribute("block"), DefaultValue(XmlSchemaDerivationMethod.None)] public XmlSchemaDerivationMethod Block { get { return _block; } set { _block = value; } } [XmlAttribute("mixed"), DefaultValue(false)] public override bool IsMixed { get { return (_pvFlags & isMixedMask) != 0; } set { if (value) { _pvFlags = (byte)(_pvFlags | isMixedMask); } else { _pvFlags = (byte)(_pvFlags & ~isMixedMask); } } } [XmlElement("simpleContent", typeof(XmlSchemaSimpleContent)), XmlElement("complexContent", typeof(XmlSchemaComplexContent))] public XmlSchemaContentModel ContentModel { get { return _contentModel; } set { _contentModel = value; } } [XmlElement("group", typeof(XmlSchemaGroupRef)), XmlElement("choice", typeof(XmlSchemaChoice)), XmlElement("all", typeof(XmlSchemaAll)), XmlElement("sequence", typeof(XmlSchemaSequence))] public XmlSchemaParticle Particle { get { return _particle; } set { _particle = value; } } [XmlElement("attribute", typeof(XmlSchemaAttribute)), XmlElement("attributeGroup", typeof(XmlSchemaAttributeGroupRef))] public XmlSchemaObjectCollection Attributes { get { if (_attributes == null) { _attributes = new XmlSchemaObjectCollection(); } return _attributes; } } [XmlElement("anyAttribute")] public XmlSchemaAnyAttribute AnyAttribute { get { return _anyAttribute; } set { _anyAttribute = value; } } [XmlIgnore] public XmlSchemaContentType ContentType { get { return SchemaContentType; } } [XmlIgnore] public XmlSchemaParticle ContentTypeParticle { get { return _contentTypeParticle; } } [XmlIgnore] public XmlSchemaDerivationMethod BlockResolved { get { return _blockResolved; } } [XmlIgnore] public XmlSchemaObjectTable AttributeUses { get { if (_attributeUses == null) { _attributeUses = new XmlSchemaObjectTable(); } return _attributeUses; } } [XmlIgnore] public XmlSchemaAnyAttribute AttributeWildcard { get { return _attributeWildcard; } } [XmlIgnore] internal XmlSchemaObjectTable LocalElements { get { if (_localElements == null) { _localElements = new XmlSchemaObjectTable(); } return _localElements; } } internal void SetContentTypeParticle(XmlSchemaParticle value) { _contentTypeParticle = value; } internal void SetBlockResolved(XmlSchemaDerivationMethod value) { _blockResolved = value; } internal void SetAttributeWildcard(XmlSchemaAnyAttribute value) { _attributeWildcard = value; } internal bool HasWildCard { get { return (_pvFlags & wildCardMask) != 0; } set { if (value) { _pvFlags = (byte)(_pvFlags | wildCardMask); } else { _pvFlags = (byte)(_pvFlags & ~wildCardMask); } } } internal override XmlQualifiedName DerivedFrom { get { if (_contentModel == null) { // type derived from anyType return XmlQualifiedName.Empty; } if (_contentModel.Content is XmlSchemaComplexContentRestriction) return ((XmlSchemaComplexContentRestriction)_contentModel.Content).BaseTypeName; else if (_contentModel.Content is XmlSchemaComplexContentExtension) return ((XmlSchemaComplexContentExtension)_contentModel.Content).BaseTypeName; else if (_contentModel.Content is XmlSchemaSimpleContentRestriction) return ((XmlSchemaSimpleContentRestriction)_contentModel.Content).BaseTypeName; else if (_contentModel.Content is XmlSchemaSimpleContentExtension) return ((XmlSchemaSimpleContentExtension)_contentModel.Content).BaseTypeName; else return XmlQualifiedName.Empty; } } internal void SetAttributes(XmlSchemaObjectCollection newAttributes) { _attributes = newAttributes; } internal bool ContainsIdAttribute(bool findAll) { int idCount = 0; foreach (XmlSchemaAttribute attribute in this.AttributeUses.Values) { if (attribute.Use != XmlSchemaUse.Prohibited) { XmlSchemaDatatype datatype = attribute.Datatype; if (datatype != null && datatype.TypeCode == XmlTypeCode.Id) { idCount++; if (idCount > 1) { //two or more attributes is error break; } } } } return findAll ? (idCount > 1) : (idCount > 0); } internal override XmlSchemaObject Clone() { System.Diagnostics.Debug.Fail("Should never call Clone() on XmlSchemaComplexType. Call Clone(XmlSchema) instead."); return Clone(null); } internal XmlSchemaObject Clone(XmlSchema parentSchema) { XmlSchemaComplexType complexType = (XmlSchemaComplexType)MemberwiseClone(); //Deep clone the QNames as these will be updated on chameleon includes if (complexType.ContentModel != null) { //simpleContent or complexContent XmlSchemaSimpleContent simpleContent = complexType.ContentModel as XmlSchemaSimpleContent; if (simpleContent != null) { XmlSchemaSimpleContent newSimpleContent = (XmlSchemaSimpleContent)simpleContent.Clone(); XmlSchemaSimpleContentExtension simpleExt = simpleContent.Content as XmlSchemaSimpleContentExtension; if (simpleExt != null) { XmlSchemaSimpleContentExtension newSimpleExt = (XmlSchemaSimpleContentExtension)simpleExt.Clone(); newSimpleExt.BaseTypeName = simpleExt.BaseTypeName.Clone(); newSimpleExt.SetAttributes(CloneAttributes(simpleExt.Attributes)); newSimpleContent.Content = newSimpleExt; } else { //simpleContent.Content is XmlSchemaSimpleContentRestriction XmlSchemaSimpleContentRestriction simpleRest = (XmlSchemaSimpleContentRestriction)simpleContent.Content; XmlSchemaSimpleContentRestriction newSimpleRest = (XmlSchemaSimpleContentRestriction)simpleRest.Clone(); newSimpleRest.BaseTypeName = simpleRest.BaseTypeName.Clone(); newSimpleRest.SetAttributes(CloneAttributes(simpleRest.Attributes)); newSimpleContent.Content = newSimpleRest; } complexType.ContentModel = newSimpleContent; } else { // complexType.ContentModel is XmlSchemaComplexContent XmlSchemaComplexContent complexContent = (XmlSchemaComplexContent)complexType.ContentModel; XmlSchemaComplexContent newComplexContent = (XmlSchemaComplexContent)complexContent.Clone(); XmlSchemaComplexContentExtension complexExt = complexContent.Content as XmlSchemaComplexContentExtension; if (complexExt != null) { XmlSchemaComplexContentExtension newComplexExt = (XmlSchemaComplexContentExtension)complexExt.Clone(); newComplexExt.BaseTypeName = complexExt.BaseTypeName.Clone(); newComplexExt.SetAttributes(CloneAttributes(complexExt.Attributes)); if (HasParticleRef(complexExt.Particle, parentSchema)) { newComplexExt.Particle = CloneParticle(complexExt.Particle, parentSchema); } newComplexContent.Content = newComplexExt; } else { // complexContent.Content is XmlSchemaComplexContentRestriction XmlSchemaComplexContentRestriction complexRest = complexContent.Content as XmlSchemaComplexContentRestriction; XmlSchemaComplexContentRestriction newComplexRest = (XmlSchemaComplexContentRestriction)complexRest.Clone(); newComplexRest.BaseTypeName = complexRest.BaseTypeName.Clone(); newComplexRest.SetAttributes(CloneAttributes(complexRest.Attributes)); if (HasParticleRef(newComplexRest.Particle, parentSchema)) { newComplexRest.Particle = CloneParticle(newComplexRest.Particle, parentSchema); } newComplexContent.Content = newComplexRest; } complexType.ContentModel = newComplexContent; } } else { //equals XmlSchemaComplexContent with baseType is anyType if (HasParticleRef(complexType.Particle, parentSchema)) { complexType.Particle = CloneParticle(complexType.Particle, parentSchema); } complexType.SetAttributes(CloneAttributes(complexType.Attributes)); } complexType.ClearCompiledState(); return complexType; } private void ClearCompiledState() { //Re-set post-compiled state for cloned object _attributeUses = null; _localElements = null; _attributeWildcard = null; _contentTypeParticle = XmlSchemaParticle.Empty; _blockResolved = XmlSchemaDerivationMethod.None; } internal static XmlSchemaObjectCollection CloneAttributes(XmlSchemaObjectCollection attributes) { if (HasAttributeQNameRef(attributes)) { XmlSchemaObjectCollection newAttributes = attributes.Clone(); XmlSchemaAttributeGroupRef attributeGroupRef; XmlSchemaAttributeGroupRef newAttGroupRef; XmlSchemaObject xso; XmlSchemaAttribute att; for (int i = 0; i < attributes.Count; i++) { xso = attributes[i]; attributeGroupRef = xso as XmlSchemaAttributeGroupRef; if (attributeGroupRef != null) { newAttGroupRef = (XmlSchemaAttributeGroupRef)attributeGroupRef.Clone(); newAttGroupRef.RefName = attributeGroupRef.RefName.Clone(); newAttributes[i] = newAttGroupRef; } else { //Its XmlSchemaAttribute att = xso as XmlSchemaAttribute; if (!att.RefName.IsEmpty || !att.SchemaTypeName.IsEmpty) { newAttributes[i] = att.Clone(); } } } return newAttributes; } return attributes; } private static XmlSchemaObjectCollection CloneGroupBaseParticles(XmlSchemaObjectCollection groupBaseParticles, XmlSchema parentSchema) { XmlSchemaObjectCollection newParticles = groupBaseParticles.Clone(); for (int i = 0; i < groupBaseParticles.Count; i++) { XmlSchemaParticle p = (XmlSchemaParticle)groupBaseParticles[i]; newParticles[i] = CloneParticle(p, parentSchema); } return newParticles; } internal static XmlSchemaParticle CloneParticle(XmlSchemaParticle particle, XmlSchema parentSchema) { XmlSchemaGroupBase groupBase = particle as XmlSchemaGroupBase; if (groupBase != null) { //Choice or sequence XmlSchemaGroupBase newGroupBase = groupBase; XmlSchemaObjectCollection newGroupbaseParticles = CloneGroupBaseParticles(groupBase.Items, parentSchema); newGroupBase = (XmlSchemaGroupBase)groupBase.Clone(); newGroupBase.SetItems(newGroupbaseParticles); return newGroupBase; } else if (particle is XmlSchemaGroupRef) { // group ref XmlSchemaGroupRef newGroupRef = (XmlSchemaGroupRef)particle.Clone(); newGroupRef.RefName = newGroupRef.RefName.Clone(); return newGroupRef; } else { XmlSchemaElement oldElem = particle as XmlSchemaElement; // If the particle is an element and one of the following is true: // - it references another element by name // - it references its type by name // - it's form (effective) is qualified (meaning it will inherint namespace from chameleon includes if that happens) // then the element itself needs to be cloned. if (oldElem != null && (!oldElem.RefName.IsEmpty || !oldElem.SchemaTypeName.IsEmpty || GetResolvedElementForm(parentSchema, oldElem) == XmlSchemaForm.Qualified)) { XmlSchemaElement newElem = (XmlSchemaElement)oldElem.Clone(parentSchema); return newElem; } } return particle; } // This method returns the effective value of the "element form" for the specified element in the specified // parentSchema. Element form is either qualified, unqualified or none. If it's qualified it means that // if the element doesn't declare its own namespace the targetNamespace of the schema is used instead. // The element form can be either specified on the element itself via the "form" attribute or // if that one is not present its inheritted from the value of the elementFormDefault attribute on the owning // schema. private static XmlSchemaForm GetResolvedElementForm(XmlSchema parentSchema, XmlSchemaElement element) { if (element.Form == XmlSchemaForm.None && parentSchema != null) { return parentSchema.ElementFormDefault; } else { return element.Form; } } internal static bool HasParticleRef(XmlSchemaParticle particle, XmlSchema parentSchema) { XmlSchemaGroupBase groupBase = particle as XmlSchemaGroupBase; if (groupBase != null) { bool foundRef = false; int i = 0; while (i < groupBase.Items.Count && !foundRef) { XmlSchemaParticle p = (XmlSchemaParticle)groupBase.Items[i++]; if (p is XmlSchemaGroupRef) { foundRef = true; } else { XmlSchemaElement elem = p as XmlSchemaElement; // This is the same condition as in the CloneParticle method // that's on purpose. This method is used to determine if we need to clone the whole particle. // If we do, then the CloneParticle is called and it will try to clone only // those elements which need cloning - and those are the ones matching this condition. if (elem != null && (!elem.RefName.IsEmpty || !elem.SchemaTypeName.IsEmpty || GetResolvedElementForm(parentSchema, elem) == XmlSchemaForm.Qualified)) { foundRef = true; } else { foundRef = HasParticleRef(p, parentSchema); } } } return foundRef; } else if (particle is XmlSchemaGroupRef) { return true; } return false; } internal static bool HasAttributeQNameRef(XmlSchemaObjectCollection attributes) { for (int i = 0; i < attributes.Count; ++i) { if (attributes[i] is XmlSchemaAttributeGroupRef) { return true; } else { XmlSchemaAttribute attribute = attributes[i] as XmlSchemaAttribute; if (!attribute.RefName.IsEmpty || !attribute.SchemaTypeName.IsEmpty) { return true; } } } return false; } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyrightD * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections; using System.Collections.Generic; using System.Linq; using System.Reflection; using System.Text; using log4net; using Nini.Config; using Mono.Addins; using OpenMetaverse; using OpenSim.Framework; using OpenSim.Framework.Console; using OpenSim.Framework.Servers; using OpenSim.Framework.Servers.HttpServer; using OpenSim.Region.Framework.Interfaces; using OpenSim.Region.Framework.Scenes; using Caps=OpenSim.Framework.Capabilities.Caps; namespace OpenSim.Region.CoreModules.Framework { [Extension(Path = "/OpenSim/RegionModules", NodeName = "RegionModule", Id = "CapabilitiesModule")] public class CapabilitiesModule : INonSharedRegionModule, ICapabilitiesModule { private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); private string m_showCapsCommandFormat = " {0,-38} {1,-60}\n"; protected Scene m_scene; /// <summary> /// Each agent has its own capabilities handler. /// </summary> protected Dictionary<uint, Caps> m_capsObjects = new Dictionary<uint, Caps>(); protected Dictionary<UUID, string> m_capsPaths = new Dictionary<UUID, string>(); protected Dictionary<UUID, Dictionary<ulong, string>> m_childrenSeeds = new Dictionary<UUID, Dictionary<ulong, string>>(); public void Initialise(IConfigSource source) { } public void AddRegion(Scene scene) { m_scene = scene; m_scene.RegisterModuleInterface<ICapabilitiesModule>(this); MainConsole.Instance.Commands.AddCommand( "Comms", false, "show caps list", "show caps list", "Shows list of registered capabilities for users.", HandleShowCapsListCommand); MainConsole.Instance.Commands.AddCommand( "Comms", false, "show caps stats by user", "show caps stats by user [<first-name> <last-name>]", "Shows statistics on capabilities use by user.", "If a user name is given, then prints a detailed breakdown of caps use ordered by number of requests received.", HandleShowCapsStatsByUserCommand); MainConsole.Instance.Commands.AddCommand( "Comms", false, "show caps stats by cap", "show caps stats by cap [<cap-name>]", "Shows statistics on capabilities use by capability.", "If a capability name is given, then prints a detailed breakdown of use by each user.", HandleShowCapsStatsByCapCommand); } public void RegionLoaded(Scene scene) { } public void RemoveRegion(Scene scene) { m_scene.UnregisterModuleInterface<ICapabilitiesModule>(this); } public void PostInitialise() { } public void Close() {} public string Name { get { return "Capabilities Module"; } } public Type ReplaceableInterface { get { return null; } } public void CreateCaps(UUID agentId, uint circuitCode) { int ts = Util.EnvironmentTickCount(); /* this as no business here... * must be done elsewhere ( and is ) int flags = m_scene.GetUserFlags(agentId); m_log.ErrorFormat("[CreateCaps]: banCheck {0} ", Util.EnvironmentTickCountSubtract(ts)); if (m_scene.RegionInfo.EstateSettings.IsBanned(agentId, flags)) return; */ Caps caps; String capsObjectPath = GetCapsPath(agentId); lock (m_capsObjects) { if (m_capsObjects.ContainsKey(circuitCode)) { Caps oldCaps = m_capsObjects[circuitCode]; if (capsObjectPath == oldCaps.CapsObjectPath) { // m_log.WarnFormat( // "[CAPS]: Reusing caps for agent {0} in region {1}. Old caps path {2}, new caps path {3}. ", // agentId, m_scene.RegionInfo.RegionName, oldCaps.CapsObjectPath, capsObjectPath); return; } else { // not reusing add extra melanie cleanup // Remove tge handlers. They may conflict with the // new object created below oldCaps.DeregisterHandlers(); // Better safe ... should not be needed but also // no big deal m_capsObjects.Remove(circuitCode); } } // m_log.DebugFormat( // "[CAPS]: Adding capabilities for agent {0} in {1} with path {2}", // agentId, m_scene.RegionInfo.RegionName, capsObjectPath); caps = new Caps(MainServer.Instance, m_scene.RegionInfo.ExternalHostName, (MainServer.Instance == null) ? 0: MainServer.Instance.Port, capsObjectPath, agentId, m_scene.RegionInfo.RegionName); m_log.DebugFormat("[CreateCaps]: new caps agent {0}, circuit {1}, path {2}, time {3} ",agentId, circuitCode,caps.CapsObjectPath, Util.EnvironmentTickCountSubtract(ts)); m_capsObjects[circuitCode] = caps; } m_scene.EventManager.TriggerOnRegisterCaps(agentId, caps); // m_log.ErrorFormat("[CreateCaps]: end {0} ", Util.EnvironmentTickCountSubtract(ts)); } public void RemoveCaps(UUID agentId, uint circuitCode) { m_log.DebugFormat("[CAPS]: Remove caps for agent {0} in region {1}", agentId, m_scene.RegionInfo.RegionName); lock (m_childrenSeeds) { if (m_childrenSeeds.ContainsKey(agentId)) { m_childrenSeeds.Remove(agentId); } } lock (m_capsObjects) { if (m_capsObjects.ContainsKey(circuitCode)) { m_capsObjects[circuitCode].DeregisterHandlers(); m_scene.EventManager.TriggerOnDeregisterCaps(agentId, m_capsObjects[circuitCode]); m_capsObjects.Remove(circuitCode); } else { foreach (KeyValuePair<uint, Caps> kvp in m_capsObjects) { if (kvp.Value.AgentID == agentId) { kvp.Value.DeregisterHandlers(); m_scene.EventManager.TriggerOnDeregisterCaps(agentId, kvp.Value); m_capsObjects.Remove(kvp.Key); return; } } m_log.WarnFormat( "[CAPS]: Received request to remove CAPS handler for root agent {0} in {1}, but no such CAPS handler found!", agentId, m_scene.RegionInfo.RegionName); } } } public Caps GetCapsForUser(uint circuitCode) { lock (m_capsObjects) { if (m_capsObjects.ContainsKey(circuitCode)) { return m_capsObjects[circuitCode]; } } return null; } public void ActivateCaps(uint circuitCode) { lock (m_capsObjects) { if (m_capsObjects.ContainsKey(circuitCode)) { m_capsObjects[circuitCode].Activate(); } } } public void SetAgentCapsSeeds(AgentCircuitData agent) { lock (m_capsPaths) m_capsPaths[agent.AgentID] = agent.CapsPath; lock (m_childrenSeeds) m_childrenSeeds[agent.AgentID] = ((agent.ChildrenCapSeeds == null) ? new Dictionary<ulong, string>() : agent.ChildrenCapSeeds); } public string GetCapsPath(UUID agentId) { lock (m_capsPaths) { if (m_capsPaths.ContainsKey(agentId)) { return m_capsPaths[agentId]; } } return null; } public Dictionary<ulong, string> GetChildrenSeeds(UUID agentID) { Dictionary<ulong, string> seeds = null; lock (m_childrenSeeds) if (m_childrenSeeds.TryGetValue(agentID, out seeds)) return seeds; return new Dictionary<ulong, string>(); } public void DropChildSeed(UUID agentID, ulong handle) { Dictionary<ulong, string> seeds; lock (m_childrenSeeds) { if (m_childrenSeeds.TryGetValue(agentID, out seeds)) { seeds.Remove(handle); } } } public string GetChildSeed(UUID agentID, ulong handle) { Dictionary<ulong, string> seeds; string returnval; lock (m_childrenSeeds) { if (m_childrenSeeds.TryGetValue(agentID, out seeds)) { if (seeds.TryGetValue(handle, out returnval)) return returnval; } } return null; } public void SetChildrenSeed(UUID agentID, Dictionary<ulong, string> seeds) { //m_log.DebugFormat(" !!! Setting child seeds in {0} to {1}", m_scene.RegionInfo.RegionName, seeds.Count); lock (m_childrenSeeds) m_childrenSeeds[agentID] = seeds; } public void DumpChildrenSeeds(UUID agentID) { m_log.Info("================ ChildrenSeed "+m_scene.RegionInfo.RegionName+" ================"); lock (m_childrenSeeds) { foreach (KeyValuePair<ulong, string> kvp in m_childrenSeeds[agentID]) { uint x, y; Util.RegionHandleToRegionLoc(kvp.Key, out x, out y); m_log.Info(" >> "+x+", "+y+": "+kvp.Value); } } } private void HandleShowCapsListCommand(string module, string[] cmdParams) { if (SceneManager.Instance.CurrentScene != null && SceneManager.Instance.CurrentScene != m_scene) return; StringBuilder capsReport = new StringBuilder(); capsReport.AppendFormat("Region {0}:\n", m_scene.RegionInfo.RegionName); lock (m_capsObjects) { foreach (KeyValuePair<uint, Caps> kvp in m_capsObjects) { capsReport.AppendFormat("** Circuit {0}:\n", kvp.Key); Caps caps = kvp.Value; for (IDictionaryEnumerator kvp2 = caps.CapsHandlers.GetCapsDetails(false, null).GetEnumerator(); kvp2.MoveNext(); ) { Uri uri = new Uri(kvp2.Value.ToString()); capsReport.AppendFormat(m_showCapsCommandFormat, kvp2.Key, uri.PathAndQuery); } foreach (KeyValuePair<string, PollServiceEventArgs> kvp2 in caps.GetPollHandlers()) capsReport.AppendFormat(m_showCapsCommandFormat, kvp2.Key, kvp2.Value.Url); foreach (KeyValuePair<string, string> kvp3 in caps.ExternalCapsHandlers) capsReport.AppendFormat(m_showCapsCommandFormat, kvp3.Key, kvp3.Value); } } MainConsole.Instance.Output(capsReport.ToString()); } private void HandleShowCapsStatsByCapCommand(string module, string[] cmdParams) { if (SceneManager.Instance.CurrentScene != null && SceneManager.Instance.CurrentScene != m_scene) return; if (cmdParams.Length != 5 && cmdParams.Length != 6) { MainConsole.Instance.Output("Usage: show caps stats by cap [<cap-name>]"); return; } StringBuilder sb = new StringBuilder(); sb.AppendFormat("Region {0}:\n", m_scene.Name); if (cmdParams.Length == 5) { BuildSummaryStatsByCapReport(sb); } else if (cmdParams.Length == 6) { BuildDetailedStatsByCapReport(sb, cmdParams[5]); } MainConsole.Instance.Output(sb.ToString()); } private void BuildDetailedStatsByCapReport(StringBuilder sb, string capName) { /* sb.AppendFormat("Capability name {0}\n", capName); ConsoleDisplayTable cdt = new ConsoleDisplayTable(); cdt.AddColumn("User Name", 34); cdt.AddColumn("Req Received", 12); cdt.AddColumn("Req Handled", 12); cdt.Indent = 2; Dictionary<string, int> receivedStats = new Dictionary<string, int>(); Dictionary<string, int> handledStats = new Dictionary<string, int>(); m_scene.ForEachScenePresence( sp => { Caps caps = m_scene.CapsModule.GetCapsForUser(sp.UUID); if (caps == null) return; Dictionary<string, IRequestHandler> capsHandlers = caps.CapsHandlers.GetCapsHandlers(); IRequestHandler reqHandler; if (capsHandlers.TryGetValue(capName, out reqHandler)) { receivedStats[sp.Name] = reqHandler.RequestsReceived; handledStats[sp.Name] = reqHandler.RequestsHandled; } else { PollServiceEventArgs pollHandler = null; if (caps.TryGetPollHandler(capName, out pollHandler)) { receivedStats[sp.Name] = pollHandler.RequestsReceived; handledStats[sp.Name] = pollHandler.RequestsHandled; } } } ); foreach (KeyValuePair<string, int> kvp in receivedStats.OrderByDescending(kp => kp.Value)) { cdt.AddRow(kvp.Key, kvp.Value, handledStats[kvp.Key]); } sb.Append(cdt.ToString()); */ } private void BuildSummaryStatsByCapReport(StringBuilder sb) { /* ConsoleDisplayTable cdt = new ConsoleDisplayTable(); cdt.AddColumn("Name", 34); cdt.AddColumn("Req Received", 12); cdt.AddColumn("Req Handled", 12); cdt.Indent = 2; Dictionary<string, int> receivedStats = new Dictionary<string, int>(); Dictionary<string, int> handledStats = new Dictionary<string, int>(); m_scene.ForEachScenePresence( sp => { Caps caps = m_scene.CapsModule.GetCapsForUser(sp.UUID); if (caps == null) return; foreach (IRequestHandler reqHandler in caps.CapsHandlers.GetCapsHandlers().Values) { string reqName = reqHandler.Name ?? ""; if (!receivedStats.ContainsKey(reqName)) { receivedStats[reqName] = reqHandler.RequestsReceived; handledStats[reqName] = reqHandler.RequestsHandled; } else { receivedStats[reqName] += reqHandler.RequestsReceived; handledStats[reqName] += reqHandler.RequestsHandled; } } foreach (KeyValuePair<string, PollServiceEventArgs> kvp in caps.GetPollHandlers()) { string name = kvp.Key; PollServiceEventArgs pollHandler = kvp.Value; if (!receivedStats.ContainsKey(name)) { receivedStats[name] = pollHandler.RequestsReceived; handledStats[name] = pollHandler.RequestsHandled; } else { receivedStats[name] += pollHandler.RequestsReceived; handledStats[name] += pollHandler.RequestsHandled; } } } ); foreach (KeyValuePair<string, int> kvp in receivedStats.OrderByDescending(kp => kp.Value)) cdt.AddRow(kvp.Key, kvp.Value, handledStats[kvp.Key]); sb.Append(cdt.ToString()); */ } private void HandleShowCapsStatsByUserCommand(string module, string[] cmdParams) { /* if (SceneManager.Instance.CurrentScene != null && SceneManager.Instance.CurrentScene != m_scene) return; if (cmdParams.Length != 5 && cmdParams.Length != 7) { MainConsole.Instance.Output("Usage: show caps stats by user [<first-name> <last-name>]"); return; } StringBuilder sb = new StringBuilder(); sb.AppendFormat("Region {0}:\n", m_scene.Name); if (cmdParams.Length == 5) { BuildSummaryStatsByUserReport(sb); } else if (cmdParams.Length == 7) { string firstName = cmdParams[5]; string lastName = cmdParams[6]; ScenePresence sp = m_scene.GetScenePresence(firstName, lastName); if (sp == null) return; BuildDetailedStatsByUserReport(sb, sp); } MainConsole.Instance.Output(sb.ToString()); */ } private void BuildDetailedStatsByUserReport(StringBuilder sb, ScenePresence sp) { /* sb.AppendFormat("Avatar name {0}, type {1}\n", sp.Name, sp.IsChildAgent ? "child" : "root"); ConsoleDisplayTable cdt = new ConsoleDisplayTable(); cdt.AddColumn("Cap Name", 34); cdt.AddColumn("Req Received", 12); cdt.AddColumn("Req Handled", 12); cdt.Indent = 2; Caps caps = m_scene.CapsModule.GetCapsForUser(sp.UUID); if (caps == null) return; List<CapTableRow> capRows = new List<CapTableRow>(); foreach (IRequestHandler reqHandler in caps.CapsHandlers.GetCapsHandlers().Values) capRows.Add(new CapTableRow(reqHandler.Name, reqHandler.RequestsReceived, reqHandler.RequestsHandled)); foreach (KeyValuePair<string, PollServiceEventArgs> kvp in caps.GetPollHandlers()) capRows.Add(new CapTableRow(kvp.Key, kvp.Value.RequestsReceived, kvp.Value.RequestsHandled)); foreach (CapTableRow ctr in capRows.OrderByDescending(ctr => ctr.RequestsReceived)) cdt.AddRow(ctr.Name, ctr.RequestsReceived, ctr.RequestsHandled); sb.Append(cdt.ToString()); */ } private void BuildSummaryStatsByUserReport(StringBuilder sb) { /* ConsoleDisplayTable cdt = new ConsoleDisplayTable(); cdt.AddColumn("Name", 32); cdt.AddColumn("Type", 5); cdt.AddColumn("Req Received", 12); cdt.AddColumn("Req Handled", 12); cdt.Indent = 2; m_scene.ForEachScenePresence( sp => { Caps caps = m_scene.CapsModule.GetCapsForUser(sp.UUID); if (caps == null) return; Dictionary<string, IRequestHandler> capsHandlers = caps.CapsHandlers.GetCapsHandlers(); int totalRequestsReceived = 0; int totalRequestsHandled = 0; foreach (IRequestHandler reqHandler in capsHandlers.Values) { totalRequestsReceived += reqHandler.RequestsReceived; totalRequestsHandled += reqHandler.RequestsHandled; } Dictionary<string, PollServiceEventArgs> capsPollHandlers = caps.GetPollHandlers(); foreach (PollServiceEventArgs handler in capsPollHandlers.Values) { totalRequestsReceived += handler.RequestsReceived; totalRequestsHandled += handler.RequestsHandled; } cdt.AddRow(sp.Name, sp.IsChildAgent ? "child" : "root", totalRequestsReceived, totalRequestsHandled); } ); sb.Append(cdt.ToString()); */ } private class CapTableRow { public string Name { get; set; } public int RequestsReceived { get; set; } public int RequestsHandled { get; set; } public CapTableRow(string name, int requestsReceived, int requestsHandled) { Name = name; RequestsReceived = requestsReceived; RequestsHandled = requestsHandled; } } } }
// // Copyright (c) 2004-2016 Jaroslaw Kowalski <jaak@jkowalski.net>, Kim Christensen, Julian Verdurmen // // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // // * Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // // * Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // * Neither the name of Jaroslaw Kowalski nor the names of its // contributors may be used to endorse or promote products derived from this // software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF // THE POSSIBILITY OF SUCH DAMAGE. // namespace NLog.Layouts { using System; using System.Collections.Generic; using System.ComponentModel; using System.Globalization; using System.Text; using NLog.Config; /// <summary> /// A specialized layout that renders CSV-formatted events. /// </summary> [Layout("CsvLayout")] [ThreadAgnostic] [AppDomainFixedOutput] public class CsvLayout : LayoutWithHeaderAndFooter { private string actualColumnDelimiter; private string doubleQuoteChar; private char[] quotableCharacters; /// <summary> /// Initializes a new instance of the <see cref="CsvLayout"/> class. /// </summary> public CsvLayout() { this.Columns = new List<CsvColumn>(); this.WithHeader = true; this.Delimiter = CsvColumnDelimiterMode.Auto; this.Quoting = CsvQuotingMode.Auto; this.QuoteChar = "\""; this.Layout = this; this.Header = new CsvHeaderLayout(this); this.Footer = null; } /// <summary> /// Gets the array of parameters to be passed. /// </summary> /// <docgen category='CSV Options' order='10' /> [ArrayParameter(typeof(CsvColumn), "column")] public IList<CsvColumn> Columns { get; private set; } /// <summary> /// Gets or sets a value indicating whether CVS should include header. /// </summary> /// <value>A value of <c>true</c> if CVS should include header; otherwise, <c>false</c>.</value> /// <docgen category='CSV Options' order='10' /> public bool WithHeader { get; set; } /// <summary> /// Gets or sets the column delimiter. /// </summary> /// <docgen category='CSV Options' order='10' /> [DefaultValue("Auto")] public CsvColumnDelimiterMode Delimiter { get; set; } /// <summary> /// Gets or sets the quoting mode. /// </summary> /// <docgen category='CSV Options' order='10' /> [DefaultValue("Auto")] public CsvQuotingMode Quoting { get; set; } /// <summary> /// Gets or sets the quote Character. /// </summary> /// <docgen category='CSV Options' order='10' /> [DefaultValue("\"")] public string QuoteChar { get; set; } /// <summary> /// Gets or sets the custom column delimiter value (valid when ColumnDelimiter is set to 'Custom'). /// </summary> /// <docgen category='CSV Options' order='10' /> public string CustomColumnDelimiter { get; set; } /// <summary> /// Initializes the layout. /// </summary> protected override void InitializeLayout() { base.InitializeLayout(); if (!this.WithHeader) { this.Header = null; } switch (this.Delimiter) { case CsvColumnDelimiterMode.Auto: this.actualColumnDelimiter = CultureInfo.CurrentCulture.TextInfo.ListSeparator; break; case CsvColumnDelimiterMode.Comma: this.actualColumnDelimiter = ","; break; case CsvColumnDelimiterMode.Semicolon: this.actualColumnDelimiter = ";"; break; case CsvColumnDelimiterMode.Pipe: this.actualColumnDelimiter = "|"; break; case CsvColumnDelimiterMode.Tab: this.actualColumnDelimiter = "\t"; break; case CsvColumnDelimiterMode.Space: this.actualColumnDelimiter = " "; break; case CsvColumnDelimiterMode.Custom: this.actualColumnDelimiter = this.CustomColumnDelimiter; break; } this.quotableCharacters = (this.QuoteChar + "\r\n" + this.actualColumnDelimiter).ToCharArray(); this.doubleQuoteChar = this.QuoteChar + this.QuoteChar; } /// <summary> /// Formats the log event for write. /// </summary> /// <param name="logEvent">The log event to be formatted.</param> /// <returns>A string representation of the log event.</returns> protected override string GetFormattedMessage(LogEventInfo logEvent) { string cachedValue; if (logEvent.TryGetCachedLayoutValue(this, out cachedValue)) { return cachedValue; } var sb = new StringBuilder(); //Memory profiling pointed out that using a foreach-loop was allocating //an Enumerator. Switching to a for-loop avoids the memory allocation. for (int i = 0; i < this.Columns.Count; i++) { CsvColumn col = this.Columns[i]; if (i != 0) { sb.Append(this.actualColumnDelimiter); } bool useQuoting; string text = col.Layout.Render(logEvent); switch (this.Quoting) { case CsvQuotingMode.Nothing: useQuoting = false; break; case CsvQuotingMode.All: useQuoting = true; break; default: case CsvQuotingMode.Auto: if (text.IndexOfAny(this.quotableCharacters) >= 0) { useQuoting = true; } else { useQuoting = false; } break; } if (useQuoting) { sb.Append(this.QuoteChar); } if (useQuoting) { sb.Append(text.Replace(this.QuoteChar, this.doubleQuoteChar)); } else { sb.Append(text); } if (useQuoting) { sb.Append(this.QuoteChar); } } return logEvent.AddCachedLayoutValue(this, sb.ToString()); } private string GetHeader() { var sb = new StringBuilder(); //Memory profiling pointed out that using a foreach-loop was allocating //an Enumerator. Switching to a for-loop avoids the memory allocation. for (int i = 0; i < this.Columns.Count; i++) { CsvColumn col = this.Columns[i]; if (i != 0) { sb.Append(this.actualColumnDelimiter); } bool useQuoting; string text = col.Name; switch (this.Quoting) { case CsvQuotingMode.Nothing: useQuoting = false; break; case CsvQuotingMode.All: useQuoting = true; break; default: case CsvQuotingMode.Auto: if (text.IndexOfAny(this.quotableCharacters) >= 0) { useQuoting = true; } else { useQuoting = false; } break; } if (useQuoting) { sb.Append(this.QuoteChar); } if (useQuoting) { sb.Append(text.Replace(this.QuoteChar, this.doubleQuoteChar)); } else { sb.Append(text); } if (useQuoting) { sb.Append(this.QuoteChar); } } return sb.ToString(); } /// <summary> /// Header for CSV layout. /// </summary> [ThreadAgnostic] private class CsvHeaderLayout : Layout { private CsvLayout parent; /// <summary> /// Initializes a new instance of the <see cref="CsvHeaderLayout"/> class. /// </summary> /// <param name="parent">The parent.</param> public CsvHeaderLayout(CsvLayout parent) { this.parent = parent; } /// <summary> /// Renders the layout for the specified logging event by invoking layout renderers. /// </summary> /// <param name="logEvent">The logging event.</param> /// <returns>The rendered layout.</returns> protected override string GetFormattedMessage(LogEventInfo logEvent) { string cached; if (logEvent.TryGetCachedLayoutValue(this, out cached)) { return cached; } return logEvent.AddCachedLayoutValue(this, this.parent.GetHeader()); } } } }
// **************************************************************** // Copyright 2008, Charlie Poole // This is free software licensed under the NUnit license. You may // obtain a copy of the license at http://nunit.org. // **************************************************************** using System; using System.IO; using System.Reflection; using BF = System.Reflection.BindingFlags; namespace NUnit.Core { /// <summary> /// Proxy class for operations on a real log4net appender, /// allowing NUnit to work with multiple versions of log4net /// and to fail gracefully if no log4net assembly is present. /// </summary> public class Log4NetCapture : TextCapture { /// <summary> /// The TextWriter to which text is redirected /// </summary> private TextWriter writer; /// <summary> /// The threshold for capturing text. A value of "Off" /// means that no text is captured. A value of "All" /// should be taken to mean the highest possible level /// of verbosity supported by the derived class. The /// meaning of any other values is determined by the /// derived class. /// </summary> private LoggingThreshold threshold = LoggingThreshold.Off; private Assembly log4netAssembly; private Type appenderType; private Type basicConfiguratorType; private object appender; private bool isInitialized; // Layout codes that work for versions from // log4net 1.2.0.30714 to 1.2.10: // // %a = domain friendly name // %c = logger name (%c{1} = last component ) // %d = date and time // %d{ABSOLUTE} = time only // %l = source location of the error // %m = message // %n = newline // %p = level // %r = elapsed milliseconds since program start // %t = thread // %x = nested diagnostic content (NDC) private static readonly string logFormat = "%d{ABSOLUTE} %-5p [%4t] %c{1} [%x]- %m%n"; /// <summary> /// Gets or sets the TextWriter to which text is redirected /// when captured. The value may only be changed when the /// logging threshold is set to "Off" /// </summary> public override TextWriter Writer { get { return writer; } set { if (threshold != LoggingThreshold.Off) throw new System.InvalidOperationException( "Writer may not be changed while capture is enabled"); writer = value; } } /// <summary> /// Gets or sets the capture threshold value, which represents /// the degree of verbosity of the output text stream. /// Derived classes may supply multiple levels of capture but /// must retain the use of the "Off" setting to represent /// no logging. /// </summary> public override LoggingThreshold Threshold { get { return threshold; } set { if (value != threshold) { bool turnOff = value == LoggingThreshold.Off; //bool turnOn = threshold == LoggingThreshold.Off; //if (turnOff) StopCapture(); threshold = value; if (!turnOff) StartCapture(); } } } private void StartCapture() { if (IsLog4netAvailable) { string threshold = Threshold.ToString(); if ( !SetLoggingThreshold( threshold ) ) SetLoggingThreshold( "Error" ); SetAppenderTextWriter( this.Writer ); ConfigureAppender(); } } private void ResumeCapture() { if (IsLog4netAvailable) { SetLoggingThreshold(Threshold.ToString()); ConfigureAppender(); } } private void StopCapture() { if ( writer != null ) writer.Flush(); if ( appender != null ) { SetLoggingThreshold( "Off" ); //SetAppenderTextWriter( null ); } } #region Private Properties and Methods private bool IsLog4netAvailable { get { if (!isInitialized) InitializeTypes(); return log4netAssembly != null && basicConfiguratorType != null && appenderType != null; } } private void InitializeTypes() { try { log4netAssembly = Assembly.Load("log4net"); if (log4netAssembly != null) { appenderType = log4netAssembly.GetType( "log4net.Appender.TextWriterAppender", false, false); basicConfiguratorType = log4netAssembly.GetType( "log4net.Config.BasicConfigurator", false, false); appender = TryCreateAppender(); if (appender != null) SetAppenderLogFormat(logFormat); } } catch { } finally { isInitialized = true; } } /// <summary> /// Attempt to create a TextWriterAppender using reflection, /// failing silently if it is not possible. /// </summary> private object TryCreateAppender() { ConstructorInfo ctor = appenderType.GetConstructor( Type.EmptyTypes ); object appender = ctor.Invoke( new object[0] ); return appender; } private void SetAppenderLogFormat( string logFormat ) { Type patternLayoutType = log4netAssembly.GetType( "log4net.Layout.PatternLayout", false, false ); if ( patternLayoutType == null ) return; ConstructorInfo ctor = patternLayoutType.GetConstructor( new Type[] { typeof(string) } ); if ( ctor != null ) { object patternLayout = ctor.Invoke( new object[] { logFormat } ); if ( patternLayout != null ) { PropertyInfo prop = appenderType.GetProperty( "Layout", BF.Public | BF.Instance | BF.SetProperty ); if ( prop != null ) prop.SetValue( appender, patternLayout, null ); } } } private bool SetLoggingThreshold( string threshold ) { PropertyInfo prop = appenderType.GetProperty( "Threshold", BF.Public | BF.Instance | BF.SetProperty ); if ( prop == null ) return false; Type levelType = prop.PropertyType; FieldInfo levelField = levelType.GetField( threshold, BF.Public | BF.Static | BF.IgnoreCase ); if ( levelField == null ) return false; object level = levelField.GetValue( null ); prop.SetValue( appender, level, null ); return true; } private void SetAppenderTextWriter( TextWriter writer ) { PropertyInfo prop = appenderType.GetProperty( "Writer", BF.Instance | BF.Public | BF.SetProperty ); if ( prop != null ) prop.SetValue( appender, writer, null ); } private void ConfigureAppender() { MethodInfo configureMethod = basicConfiguratorType.GetMethod( "Configure", new Type[] { appenderType } ); if ( configureMethod != null ) configureMethod.Invoke( null, new object[] { appender } ); } #endregion } }
/* * Copyright (c) 2014 All Rights Reserved by the SDL Group. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Text; using System.Management.Automation; using System.Xml; using System.Xml.Linq; using Trisoft.ISHRemote.Objects; using Trisoft.ISHRemote.Objects.Public; using Trisoft.ISHRemote.Exceptions; using Trisoft.ISHRemote.HelperClasses; namespace Trisoft.ISHRemote.Cmdlets.Settings { /// <summary> /// <para type="synopsis">This cmdlet compares two IshTypeFieldDefinition sets.</para> /// <para type="description">This cmdlet compares two IshTypeFieldDefinition sets allowing system compares (or even a compare with the deprecated TriDKXmlSetup format). The result is an IshTypeFieldDefinition with a Compare property indicating equal, different, left only or right only. /// Note that fields FDESCRIPTION and FCHANGES can have either type String or LongText, you can consider this false positives - this will only affect field filter operators.</para> /// </summary> /// <example> /// <code> /// $ishSessionA = New-IshSession -WsBaseUrl "https://example.com/ISHWSPROD/" -PSCredential "Admin" /// $ishSessionB = New-IshSession -WsBaseUrl "https://example.com/ISHWSTEST/" -PSCredential "Admin" /// Compare-IshTypeFieldDefinition -LeftIshSession $ishSessionA -RightIshSession $ishSessionB /// </code> /// <para>Compares incoming IshSession entries that are not equal, so indicating differences, left only and right only.</para> /// </example> /// <example> /// <code> /// $ishSessionA = New-IshSession -WsBaseUrl "https://example.com/ISHWSPROD/" -PSCredential "Admin" /// $ishSessionB = New-IshSession -WsBaseUrl "https://example.com/ISHWSTEST/" -PSCredential "Admin" /// Compare-IshTypeFieldDefinition -LeftIshSession $ishSessionA -RightIshSession $ishSessionB -ExcludeLeftUnique /// </code> /// <para>Compares incoming IshSession entries that are not equal, so indicating differences and right only changes compared to the $ishSessionA reference.</para> /// </example> /// <example> /// <code> /// $ishSession = New-IshSession -WsBaseUrl "https://example.com/ISHWSPROD/" -PSCredential "Admin" /// $ishTypeFieldDefinitions = Get-IshTypeFieldDefinition -TriDKXmlSetupFilePath $tempFilePath /// Compare-IshTypeFieldDefinition -LeftIshTypeFieldDefinition $ishTypeFieldDefinitions -RightIshSession $ishSession -ExcludeLeftUnique /// </code> /// <para>Compares provided reference TriDKXmlSetup export file with incoming IshSession and that lists differences and right only changes.</para> /// </example> /// <example> /// <code> /// $ishSession = New-IshSession -WsBaseUrl "https://example.com/ISHWSPROD/" -PSCredential "Admin" /// $ishTypeFieldDefinitions = Get-IshTypeFieldDefinition /// Compare-IshTypeFieldDefinition -LeftIshSession $ishSession -RightIshTypeFieldDefinition $ishTypeFieldDefinitions -IncludeIdentical -ExcludeDifferent /// </code> /// <para>Compares incoming IshSession and IshTypeFieldDefinitions (TriDKXmlSetup export file made available through Resource entry). The IncludeIdentical flag will also return matching rows, while the ExcludeDifferent flag will not return rows with differences.</para> /// </example> /// <example> /// <code> /// $ishSession = New-IshSession -WsBaseUrl "https://example.com/ISHWSPROD/" -PSCredential "Admin" /// $ishTypeFieldDefinitions = Get-IshTypeFieldDefinition /// Compare-IshTypeFieldDefinition -LeftIshTypeFieldDefinition $ishTypeFieldDefinitions -RightIshSession $ishSession -IncludeIdentical | /// Where-Object -Property Name -NotLike "FTEST*" | /// Out-GridView /// </code> /// <para>Compares reference IshTypeFieldDefinitions (TriDKXmlSetup export file made available through Resource entry) with incoming IshSession. /// The IncludeIdentical flag will also return matching rows, while the Where-Object clause filters out fields with a certain name. The PowerShell Out-GridView does a nice visual rendering in PowerShell ISE.</para> /// </example> [Cmdlet(VerbsData.Compare, "IshTypeFieldDefinition", SupportsShouldProcess = false)] [OutputType(typeof(IshTypeFieldDefinitionCompare))] public sealed class CompareIshTypeFieldDefinition : SettingsCmdlet { /// <summary> /// <para type="description">The reference object of type IshSession</para> /// </summary> [Parameter(Mandatory = false, ValueFromPipelineByPropertyName = false)] [ValidateNotNullOrEmpty] public IshSession LeftIshSession { get; set; } /// <summary> /// <para type="description">The reference object of type IshTypeFieldDefinition array</para> /// </summary> [Parameter(Mandatory = false, ValueFromPipelineByPropertyName = false)] [ValidateNotNullOrEmpty] public IshTypeFieldDefinition[] LeftIshTypeFieldDefinition { get; set; } /// <summary> /// <para type="description">The difference object of type IshSession</para> /// </summary> [Parameter(Mandatory = false, ValueFromPipelineByPropertyName = false)] [ValidateNotNullOrEmpty] public IshSession RightIshSession { get; set; } /// <summary> /// <para type="description">The difference object of type IshTypeFieldDefinition array</para> /// </summary> [Parameter(Mandatory = false, ValueFromPipelineByPropertyName = false)] [ValidateNotNullOrEmpty] public IshTypeFieldDefinition[] RightIshTypeFieldDefinition { get; set; } /// <summary> /// <para type="description">Display characteristics of compared objects that are equal. By default, only characteristics that differ between the left and right objects are displayed.</para> /// </summary> [Parameter(Mandatory = false, ValueFromPipelineByPropertyName = false)] public SwitchParameter IncludeIdentical { get; set; } /// <summary> /// <para type="description">Stop displaying characteristics of compared objects that are different.</para> /// </summary> [Parameter(Mandatory = false, ValueFromPipelineByPropertyName = false)] public SwitchParameter ExcludeDifferent { get; set; } /// <summary> /// <para type="description">Stop displaying characteristics of compared objects that are unique in the left reference object.</para> /// </summary> [Parameter(Mandatory = false, ValueFromPipelineByPropertyName = false)] public SwitchParameter ExcludeLeftUnique { get; set; } /// <summary> /// <para type="description">Stop displaying characteristics of compared objects that are unique in the right difference object.</para> /// </summary> [Parameter(Mandatory = false, ValueFromPipelineByPropertyName = false)] public SwitchParameter ExcludeRightUnique { get; set; } private IshTypeFieldSetup _leftIshTypeFieldSetup; private IshTypeFieldSetup _rightIshTypeFieldSetup; private SortedDictionary<string, IshTypeFieldDefinition> _sortedSuperList; protected override void ProcessRecord() { try { if (LeftIshSession != null) { _leftIshTypeFieldSetup = new IshTypeFieldSetup(Logger, LeftIshSession.IshTypeFieldDefinition); } else if (LeftIshTypeFieldDefinition != null) { _leftIshTypeFieldSetup = new IshTypeFieldSetup(Logger, LeftIshTypeFieldDefinition.ToList<IshTypeFieldDefinition>()); } else { throw new ArgumentException($"Missing incoming Left... parameter."); } WriteDebug($"Comparing _leftIshTypeFieldSetup[{_leftIshTypeFieldSetup.IshTypeFieldDefinition.Count}]"); if (RightIshSession != null) { _rightIshTypeFieldSetup = new IshTypeFieldSetup(Logger, RightIshSession.IshTypeFieldDefinition); } else if (RightIshTypeFieldDefinition != null) { _rightIshTypeFieldSetup = new IshTypeFieldSetup(Logger, RightIshTypeFieldDefinition.ToList<IshTypeFieldDefinition>()); } else { throw new ArgumentException($"Missing incoming Right... parameter."); } WriteDebug($"Comparing _rightIshTypeFieldSetup[{_rightIshTypeFieldSetup.IshTypeFieldDefinition.Count}]"); Compare(); } catch (TrisoftAutomationException trisoftAutomationException) { ThrowTerminatingError(new ErrorRecord(trisoftAutomationException, base.GetType().Name, ErrorCategory.InvalidOperation, null)); } catch (Exception exception) { ThrowTerminatingError(new ErrorRecord(exception, base.GetType().Name, ErrorCategory.NotSpecified, null)); } } private void Compare() { // created on sorted superlist of ISHType/Level/FieldName so that compares are happening in a predictable order CreateSuperSortedList(); foreach (string key in _sortedSuperList.Keys) { IshTypeFieldDefinition left = _leftIshTypeFieldSetup.GetValue(key); IshTypeFieldDefinition right = _rightIshTypeFieldSetup.GetValue(key); if ((left != null) && (right != null)) { int compareResult = left.CompareTo(right); if ((compareResult == 0) && IncludeIdentical) { WriteObject(new IshTypeFieldDefinitionCompare(left, IshTypeFieldDefinitionCompare.Compare.Identical)); } if ((compareResult != 0) && !ExcludeDifferent) { WriteObject(new IshTypeFieldDefinitionCompare(left, IshTypeFieldDefinitionCompare.Compare.LeftDifferent)); WriteObject(new IshTypeFieldDefinitionCompare(right, IshTypeFieldDefinitionCompare.Compare.RightDifferent)); } } else if ((left != null) && !ExcludeLeftUnique) { WriteObject(new IshTypeFieldDefinitionCompare(left, IshTypeFieldDefinitionCompare.Compare.LeftOnly)); } else if ((right != null) && !ExcludeRightUnique) { WriteObject(new IshTypeFieldDefinitionCompare(right, IshTypeFieldDefinitionCompare.Compare.RightOnly)); } } // for each entry in the superlist // lookupLeft and lookupRight, and promote each to IshTypeFieldDefinitionCompare (with left or right enum) // if (lookupLeft != null && lookupRight != null) // CompareIshTypeFieldDefinition() // if CompareIshTypeFieldDefinition() == true && IncludeIdentical // WriteObject with equal enum // if CompareIshTypeFieldDefinition() == false && !ExcludeDifferent // WriteObject with diff enum // else if (lookupLeft != null && !ExcludeLeftUnique) // WriteObject with left enum // else if (lookupRight != null && !ExcludeRightUnique) // WriteObject with right enum // else // WriteDebug($"Comparing , in the sorted list but lookups fails.") } private void CreateSuperSortedList() { _sortedSuperList = new SortedDictionary<string, IshTypeFieldDefinition>(); foreach (var entry in _leftIshTypeFieldSetup.IshTypeFieldDefinition) { _sortedSuperList.Add(entry.Key, entry); } foreach (var entry in _rightIshTypeFieldSetup.IshTypeFieldDefinition) { if (!_sortedSuperList.ContainsKey(entry.Key)) { _sortedSuperList.Add(entry.Key, entry); } } WriteDebug($"Comparing _sortedSuperList[{_sortedSuperList.Keys.Count}]"); } } }
/********************************************************************++ * Copyright (c) Microsoft Corporation. All rights reserved. * --********************************************************************/ using System.Management.Automation.Internal; using System.Management.Automation.Runspaces; using System.Management.Automation.Runspaces.Internal; using System.Management.Automation.Remoting.Client; using Dbg = System.Management.Automation.Diagnostics; namespace System.Management.Automation.Remoting { /// <summary> /// Implements ServerRemoteSessionDataStructureHandler /// </summary> internal class ClientRemoteSessionDSHandlerImpl : ClientRemoteSessionDataStructureHandler, IDisposable { [TraceSourceAttribute("CRSDSHdlerImpl", "ClientRemoteSessionDSHandlerImpl")] private static PSTraceSource s_trace = PSTraceSource.GetTracer("CRSDSHdlerImpl", "ClientRemoteSessionDSHandlerImpl"); private const string resBaseName = "remotingerroridstrings"; private BaseClientSessionTransportManager _transportManager; private ClientRemoteSessionDSHandlerStateMachine _stateMachine; private ClientRemoteSession _session; private RunspaceConnectionInfo _connectionInfo; // used for connection redirection. private Uri _redirectUri; private int _maxUriRedirectionCount; private bool _isCloseCalled; private object _syncObject = new object(); private PSRemotingCryptoHelper _cryptoHelper; private ClientRemoteSession.URIDirectionReported _uriRedirectionHandler; internal override BaseClientSessionTransportManager TransportManager { get { return _transportManager; } } internal override BaseClientCommandTransportManager CreateClientCommandTransportManager( System.Management.Automation.Runspaces.Internal.ClientRemotePowerShell cmd, bool noInput) { BaseClientCommandTransportManager cmdTransportMgr = _transportManager.CreateClientCommandTransportManager(_connectionInfo, cmd, noInput); // listen to data ready events. cmdTransportMgr.DataReceived += DispatchInputQueueData; return cmdTransportMgr; } #region constructors /// <summary> /// Creates an instance of ClientRemoteSessionDSHandlerImpl /// </summary> internal ClientRemoteSessionDSHandlerImpl(ClientRemoteSession session, PSRemotingCryptoHelper cryptoHelper, RunspaceConnectionInfo connectionInfo, ClientRemoteSession.URIDirectionReported uriRedirectionHandler) { Dbg.Assert(_maxUriRedirectionCount >= 0, "maxUriRedirectionCount cannot be less than 0."); if (session == null) { throw PSTraceSource.NewArgumentNullException("session"); } _session = session; //Create state machine _stateMachine = new ClientRemoteSessionDSHandlerStateMachine(); _stateMachine.StateChanged += HandleStateChanged; _connectionInfo = connectionInfo; // Create transport manager _cryptoHelper = cryptoHelper; _transportManager = _connectionInfo.CreateClientSessionTransportManager( _session.RemoteRunspacePoolInternal.InstanceId, _session.RemoteRunspacePoolInternal.Name, cryptoHelper); _transportManager.DataReceived += DispatchInputQueueData; _transportManager.WSManTransportErrorOccured += HandleTransportError; _transportManager.CloseCompleted += HandleCloseComplete; _transportManager.DisconnectCompleted += HandleDisconnectComplete; _transportManager.ReconnectCompleted += HandleReconnectComplete; _transportManager.RobustConnectionNotification += new EventHandler<ConnectionStatusEventArgs>(HandleRobustConnectionNotification); WSManConnectionInfo wsmanConnectionInfo = _connectionInfo as WSManConnectionInfo; if (null != wsmanConnectionInfo) { // only WSMan transport supports redirection // store the uri redirection handler and authmechanism // for uri redirection. _uriRedirectionHandler = uriRedirectionHandler; _maxUriRedirectionCount = wsmanConnectionInfo.MaximumConnectionRedirectionCount; } } #endregion constructors #region create /// <summary> /// Makes a create call asynchronously. /// </summary> internal override void CreateAsync() { // errors are reported through WSManTransportErrorOccured event on // the transport manager. _transportManager.CreateCompleted += HandleCreateComplete; _transportManager.CreateAsync(); } /// <summary> /// This callback is called on complete of async connect call /// </summary> /// <param name="sender"></param> /// <param name="args"></param> private void HandleCreateComplete(object sender, EventArgs args) { // This is a no-op at the moment..as we dont need to inform anything to // state machine here..StateMachine must already have reached NegotiationSent // state and waiting for Negotiation Received which will happen only from // DataReceived event. } private void HandleConnectComplete(object sender, EventArgs args) { //No-OP. Once the negotiation messages are exchanged and the session gets into established state, //it will take care of spawning the receieve operation on the connected session // There is however a caveat. // A rouge remote server if it does not send the required negotiation data in the Connect Response, // then the state machine can never get into the established state and the runspace can never get into a opened state // Living with this for now. } #endregion create #region disconnect internal override void DisconnectAsync() { _transportManager.DisconnectAsync(); } private void HandleDisconnectComplete(object sender, EventArgs args) { //Set statemachine event RemoteSessionStateMachineEventArgs disconnectCompletedArg = new RemoteSessionStateMachineEventArgs(RemoteSessionEvent.DisconnectCompleted); StateMachine.RaiseEvent(disconnectCompletedArg); } #endregion disconnect #region RobustConnection events private void HandleRobustConnectionNotification(object sender, ConnectionStatusEventArgs e) { RemoteSessionStateMachineEventArgs eventArgument = null; switch (e.Notification) { case ConnectionStatus.AutoDisconnectStarting: eventArgument = new RemoteSessionStateMachineEventArgs(RemoteSessionEvent.RCDisconnectStarted); break; case ConnectionStatus.AutoDisconnectSucceeded: eventArgument = new RemoteSessionStateMachineEventArgs(RemoteSessionEvent.DisconnectCompleted, new RuntimeException( StringUtil.Format(RemotingErrorIdStrings.RCAutoDisconnectingError, _session.RemoteRunspacePoolInternal.ConnectionInfo.ComputerName))); break; case ConnectionStatus.InternalErrorAbort: eventArgument = new RemoteSessionStateMachineEventArgs(RemoteSessionEvent.FatalError); break; } if (eventArgument != null) { StateMachine.RaiseEvent(eventArgument); } } #endregion #region reconnect internal override void ReconnectAsync() { _transportManager.ReconnectAsync(); } private void HandleReconnectComplete(object sender, EventArgs args) { //Set statemachine event RemoteSessionStateMachineEventArgs reconnectCompletedArg = new RemoteSessionStateMachineEventArgs(RemoteSessionEvent.ReconnectCompleted); StateMachine.RaiseEvent(reconnectCompletedArg); } #endregion reconnect #region close /// <summary> /// Close the connection asynchronously. /// </summary> internal override void CloseConnectionAsync() { lock (_syncObject) { if (_isCloseCalled) { return; } _transportManager.CloseAsync(); _isCloseCalled = true; } } private void HandleCloseComplete(object sender, EventArgs args) { // This event gets raised only when the connection is closed successfully. RemoteSessionStateMachineEventArgs closeCompletedArg = new RemoteSessionStateMachineEventArgs(RemoteSessionEvent.CloseCompleted); _stateMachine.RaiseEvent(closeCompletedArg); } #endregion close #region negotiation /// <summary> /// Sends the negotiation package asynchronously /// </summary> internal override void SendNegotiationAsync(RemoteSessionState sessionState) { // This state change is made before the call to CreateAsync to ensure the state machine // is prepared for a NegotiationReceived response. Otherwise a race condition can // occur when the transport NegotiationReceived arrives too soon, breaking the session. // This race condition was observed for OutOfProc transport when reusing the OutOfProc process. //this will change StateMachine to NegotiationSent. RemoteSessionStateMachineEventArgs negotiationSendCompletedArg = new RemoteSessionStateMachineEventArgs(RemoteSessionEvent.NegotiationSendCompleted); _stateMachine.RaiseEvent(negotiationSendCompletedArg); if (sessionState == RemoteSessionState.NegotiationSending) { _transportManager.CreateAsync(); } else if (sessionState == RemoteSessionState.NegotiationSendingOnConnect) { _transportManager.ConnectCompleted += HandleConnectComplete; _transportManager.ConnectAsync(); } else { Dbg.Assert(false, "SendNegotiationAsync called in unexpected session state"); } } internal override event EventHandler<RemoteSessionNegotiationEventArgs> NegotiationReceived; #endregion negotiation #region state change /// <summary> /// This event indicates that the connection state has changed. /// </summary> internal override event EventHandler<RemoteSessionStateEventArgs> ConnectionStateChanged; private void HandleStateChanged(object sender, RemoteSessionStateEventArgs arg) { if (arg == null) { throw PSTraceSource.NewArgumentNullException("arg"); } // Enqueue session related negotiation packets first if ((arg.SessionStateInfo.State == RemoteSessionState.NegotiationSending) || (arg.SessionStateInfo.State == RemoteSessionState.NegotiationSendingOnConnect)) { HandleNegotiationSendingStateChange(); } // this will enable top-layers to enqueue any packets during NegotiationSending and // during other states. ConnectionStateChanged.SafeInvoke(this, arg); if ((arg.SessionStateInfo.State == RemoteSessionState.NegotiationSending) || (arg.SessionStateInfo.State == RemoteSessionState.NegotiationSendingOnConnect)) { SendNegotiationAsync(arg.SessionStateInfo.State); } //once session is established.. start receiving data (if not already done and only apples to wsmanclientsessionTM) if (arg.SessionStateInfo.State == RemoteSessionState.Established) { WSManClientSessionTransportManager tm = _transportManager as WSManClientSessionTransportManager; if (tm != null) { tm.AdjustForProtocolVariations(_session.ServerProtocolVersion); tm.StartReceivingData(); } } // Close the transport manager only after powershell's close their transports // Powershell's close their transport using the ConnectionStateChaged event notification. if (arg.SessionStateInfo.State == RemoteSessionState.ClosingConnection) { CloseConnectionAsync(); } //process disconnect if (arg.SessionStateInfo.State == RemoteSessionState.Disconnecting) { DisconnectAsync(); } //process reconnect if (arg.SessionStateInfo.State == RemoteSessionState.Reconnecting) { ReconnectAsync(); } } /// <summary> /// Clubing negotiation packet + runspace creation and then doing transportManager.ConnectAsync(). /// This will save us 2 network calls by doing all the work in one network call. /// </summary> private void HandleNegotiationSendingStateChange() { RemoteSessionCapability clientCapability = _session.Context.ClientCapability; Dbg.Assert(clientCapability.RemotingDestination == RemotingDestination.Server, "Expected clientCapability.RemotingDestination == RemotingDestination.Server"); //Encode and send the negotiation reply RemoteDataObject data = RemotingEncoder.GenerateClientSessionCapability( clientCapability, _session.RemoteRunspacePoolInternal.InstanceId); RemoteDataObject<PSObject> dataAsPSObject = RemoteDataObject<PSObject>.CreateFrom( data.Destination, data.DataType, data.RunspacePoolId, data.PowerShellId, (PSObject)data.Data); _transportManager.DataToBeSentCollection.Add<PSObject>(dataAsPSObject); } #endregion state change internal override ClientRemoteSessionDSHandlerStateMachine StateMachine { get { return _stateMachine; } } #region URI Redirection /// <summary> /// Transport reported an error saying that uri is redirected. This method /// will perform the redirection to the new URI by doing the following: /// 1. Close the current transport manager to clean resources /// 2. Raise a warning that URI is getting redirected. /// 3. Using the new URI, ask the same transport manager to redirect /// Step 1 is performed here. Step2-3 is performed in another method /// </summary> /// <param name="newURIString"></param> /// <exception cref="ArgumentNullException"> /// newURIString is a null reference. /// </exception> /// <exception cref="UriFormatException"> /// uriString is empty. /// The scheme specified in uriString is invalid. /// uriString contains too many slashes. /// The password specified in uriString is invalid. /// The host name specified in uriString is invalid. /// </exception> private void PerformURIRedirection(string newURIString) { _redirectUri = new Uri(newURIString); // make sure connection is not closed while we are handling the redirection. lock (_syncObject) { // if connection is closed by the user..no need to redirect if (_isCloseCalled) { return; } // clear our current close complete & Error handlers _transportManager.CloseCompleted -= HandleCloseComplete; _transportManager.WSManTransportErrorOccured -= HandleTransportError; // perform other steps only after transport manager is closed. _transportManager.CloseCompleted += HandleTransportCloseCompleteForRedirection; // Handle errors happened while redirecting differently..We need to reset the // original handlers in this case. _transportManager.WSManTransportErrorOccured += HandleTransportErrorForRedirection; _transportManager.PrepareForRedirection(); } } private void HandleTransportCloseCompleteForRedirection(object source, EventArgs args) { _transportManager.CloseCompleted -= HandleTransportCloseCompleteForRedirection; _transportManager.WSManTransportErrorOccured -= HandleTransportErrorForRedirection; // reattach the close complete and error handlers _transportManager.CloseCompleted += HandleCloseComplete; _transportManager.WSManTransportErrorOccured += HandleTransportError; PerformURIRedirectionStep2(_redirectUri); } private void HandleTransportErrorForRedirection(object sender, TransportErrorOccuredEventArgs e) { _transportManager.CloseCompleted -= HandleTransportCloseCompleteForRedirection; _transportManager.WSManTransportErrorOccured -= HandleTransportErrorForRedirection; // reattach the close complete and error handlers _transportManager.CloseCompleted += HandleCloseComplete; _transportManager.WSManTransportErrorOccured += HandleTransportError; HandleTransportError(sender, e); } /// <summary> /// This is step 2 of URI redirection. This is called after the current transport manager /// is closed. This is usually called from the close complete callback. /// </summary> /// <param name="newURI"></param> private void PerformURIRedirectionStep2(System.Uri newURI) { Dbg.Assert(null != newURI, "Uri cannot be null"); lock (_syncObject) { // if connection is closed by the user..no need to redirect if (_isCloseCalled) { return; } // raise warning to report the redirection if (null != _uriRedirectionHandler) { _uriRedirectionHandler(newURI); } // start a new connection _transportManager.Redirect(newURI, _connectionInfo); } } #endregion #region data handling /// <summary> /// Handler which handles transport errors. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> internal void HandleTransportError(object sender, TransportErrorOccuredEventArgs e) { Dbg.Assert(e != null, "HandleTransportError expects non-null eventargs"); // handle uri redirections PSRemotingTransportRedirectException redirectException = e.Exception as PSRemotingTransportRedirectException; if ((redirectException != null) && (_maxUriRedirectionCount > 0)) { Exception exception = null; try { // honor max redirection count given by the user. _maxUriRedirectionCount--; PerformURIRedirection(redirectException.RedirectLocation); return; } catch (ArgumentNullException argumentException) { exception = argumentException; } catch (UriFormatException uriFormatException) { exception = uriFormatException; } // if we are here, there must be an exception constructing a uri if (null != exception) { PSRemotingTransportException newException = new PSRemotingTransportException(PSRemotingErrorId.RedirectedURINotWellFormatted, RemotingErrorIdStrings.RedirectedURINotWellFormatted, _session.Context.RemoteAddress.OriginalString, redirectException.RedirectLocation); newException.TransportMessage = e.Exception.TransportMessage; e.Exception = newException; } } RemoteSessionEvent sessionEvent = RemoteSessionEvent.ConnectFailed; switch (e.ReportingTransportMethod) { case TransportMethodEnum.CreateShellEx: sessionEvent = RemoteSessionEvent.ConnectFailed; break; case TransportMethodEnum.SendShellInputEx: case TransportMethodEnum.CommandInputEx: sessionEvent = RemoteSessionEvent.SendFailed; break; case TransportMethodEnum.ReceiveShellOutputEx: case TransportMethodEnum.ReceiveCommandOutputEx: sessionEvent = RemoteSessionEvent.ReceiveFailed; break; case TransportMethodEnum.CloseShellOperationEx: sessionEvent = RemoteSessionEvent.CloseFailed; break; case TransportMethodEnum.DisconnectShellEx: sessionEvent = RemoteSessionEvent.DisconnectFailed; break; case TransportMethodEnum.ReconnectShellEx: sessionEvent = RemoteSessionEvent.ReconnectFailed; break; } RemoteSessionStateMachineEventArgs errorArgs = new RemoteSessionStateMachineEventArgs(sessionEvent, e.Exception); _stateMachine.RaiseEvent(errorArgs); } /// <summary> /// Dispatches data when it arrives from the input queue /// </summary> /// <param name="sender"></param> /// <param name="dataArg"> /// arg which contains the data recevied from input queue /// </param> internal void DispatchInputQueueData(object sender, RemoteDataEventArgs dataArg) { if (dataArg == null) { throw PSTraceSource.NewArgumentNullException("dataArg"); } RemoteDataObject<PSObject> rcvdData = dataArg.ReceivedData; if (rcvdData == null) { throw PSTraceSource.NewArgumentException("dataArg"); } RemotingDestination destination = rcvdData.Destination; if ((destination & RemotingDestination.Client) != RemotingDestination.Client) { throw new PSRemotingDataStructureException(RemotingErrorIdStrings.RemotingDestinationNotForMe, RemotingDestination.Client, destination); } RemotingTargetInterface targetInterface = rcvdData.TargetInterface; switch (targetInterface) { case RemotingTargetInterface.Session: { //Messages for session can cause statemachine state to change. //These messages are first processed by Sessiondata structure handler and depending //on the type of message, appropriate event is raised in state machine ProcessSessionMessages(dataArg); break; } case RemotingTargetInterface.RunspacePool: case RemotingTargetInterface.PowerShell: //Non Session messages do not change the state of the statemachine. //However instead of forwarding them to Runspace/pipeline here, an //event is raised in state machine which verified that state is //suitable for accpeting these messages. if state is suitable statemachine //will call DoMessageForwading which will forward the messages appropriately RemoteSessionStateMachineEventArgs msgRcvArg = new RemoteSessionStateMachineEventArgs(RemoteSessionEvent.MessageReceived, null); if (StateMachine.CanByPassRaiseEvent(msgRcvArg)) { ProcessNonSessionMessages(dataArg.ReceivedData); } else { StateMachine.RaiseEvent(msgRcvArg); } break; default: { Dbg.Assert(false, "we should not be encountering this"); } break; } } // TODO: If this is not used remove this // internal override event EventHandler<RemoteDataEventArgs> DataReceived; /// <summary> /// This processes the object received from transport which are /// targetted for session /// </summary> /// <param name="arg"> /// argument containg the data object /// </param> private void ProcessSessionMessages(RemoteDataEventArgs arg) { if (arg == null || arg.ReceivedData == null) { throw PSTraceSource.NewArgumentNullException("arg"); } RemoteDataObject<PSObject> rcvdData = arg.ReceivedData; RemotingTargetInterface targetInterface = rcvdData.TargetInterface; Dbg.Assert(targetInterface == RemotingTargetInterface.Session, "targetInterface must be Session"); RemotingDataType dataType = rcvdData.DataType; switch (dataType) { case RemotingDataType.CloseSession: PSRemotingDataStructureException reasonOfClose = new PSRemotingDataStructureException(RemotingErrorIdStrings.ServerRequestedToCloseSession); RemoteSessionStateMachineEventArgs closeSessionArg = new RemoteSessionStateMachineEventArgs(RemoteSessionEvent.Close, reasonOfClose); _stateMachine.RaiseEvent(closeSessionArg); break; case RemotingDataType.SessionCapability: RemoteSessionCapability capability = null; try { capability = RemotingDecoder.GetSessionCapability(rcvdData.Data); } catch (PSRemotingDataStructureException dse) { // this will happen if expected properties are not // received for session capability throw new PSRemotingDataStructureException(RemotingErrorIdStrings.ClientNotFoundCapabilityProperties, dse.Message, PSVersionInfo.BuildVersion, RemotingConstants.ProtocolVersion); } RemoteSessionStateMachineEventArgs capabilityArg = new RemoteSessionStateMachineEventArgs(RemoteSessionEvent.NegotiationReceived); capabilityArg.RemoteSessionCapability = capability; _stateMachine.RaiseEvent(capabilityArg); RemoteSessionNegotiationEventArgs negotiationArg = new RemoteSessionNegotiationEventArgs(capability); NegotiationReceived.SafeInvoke(this, negotiationArg); break; case RemotingDataType.EncryptedSessionKey: { String encryptedSessionKey = RemotingDecoder.GetEncryptedSessionKey(rcvdData.Data); EncryptedSessionKeyReceived.SafeInvoke(this, new RemoteDataEventArgs<string>(encryptedSessionKey)); } break; case RemotingDataType.PublicKeyRequest: { PublicKeyRequestReceived.SafeInvoke(this, new RemoteDataEventArgs<string>(String.Empty)); } break; default: { throw new PSRemotingDataStructureException(RemotingErrorIdStrings.ReceivedUnsupportedAction, dataType); } } } /// <summary> /// This processes the object received from transport which are /// not targetted for session /// </summary> /// <param name="rcvdData"> /// received data. /// </param> internal void ProcessNonSessionMessages(RemoteDataObject<PSObject> rcvdData) { // TODO: Consider changing to Dbg.Assert() if (rcvdData == null) { throw PSTraceSource.NewArgumentNullException("rcvdData"); } RemotingTargetInterface targetInterface = rcvdData.TargetInterface; Guid clientRunspacePoolId; RemoteRunspacePoolInternal runspacePool; switch (targetInterface) { case RemotingTargetInterface.Session: Dbg.Assert(false, "The session remote data is handled my session data structure handler, not here"); break; case RemotingTargetInterface.RunspacePool: clientRunspacePoolId = rcvdData.RunspacePoolId; runspacePool = _session.GetRunspacePool(clientRunspacePoolId); if (runspacePool != null) { // GETBACK runspacePool.DataStructureHandler.ProcessReceivedData(rcvdData); } else { // The runspace pool may have been removed on the client side, // so, we should just ignore the message. s_trace.WriteLine(@"Client received data for Runspace (id: {0}), but the Runspace cannot be found", clientRunspacePoolId); } break; case RemotingTargetInterface.PowerShell: clientRunspacePoolId = rcvdData.RunspacePoolId; runspacePool = _session.GetRunspacePool(clientRunspacePoolId); // GETBACK runspacePool.DataStructureHandler.DispatchMessageToPowerShell(rcvdData); break; default: break; } } #endregion data handling #region IDisposable /// <summary> /// public method for dispose /// </summary> public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } /// <summary> /// release all resources /// </summary> /// <param name="disposing">if true, release all managed resources</param> protected void Dispose(bool disposing) { if (disposing) { _transportManager.Dispose(); } } #endregion IDisposable #region Key Exchange internal override event EventHandler<RemoteDataEventArgs<string>> EncryptedSessionKeyReceived; internal override event EventHandler<RemoteDataEventArgs<string>> PublicKeyRequestReceived; /// <summary> /// Send the specified local public key to the remote end /// </summary> /// <param name="localPublicKey">local public key as a string</param> internal override void SendPublicKeyAsync(string localPublicKey) { _transportManager.DataToBeSentCollection.Add<object>( RemotingEncoder.GenerateMyPublicKey(_session.RemoteRunspacePoolInternal.InstanceId, localPublicKey, RemotingDestination.Server)); } /// <summary> /// Raise the public key received event /// </summary> /// <param name="receivedData">received data</param> /// <remarks>This method is a hook to be called /// from the transport manager</remarks> internal override void RaiseKeyExchangeMessageReceived(RemoteDataObject<PSObject> receivedData) { ProcessSessionMessages(new RemoteDataEventArgs(receivedData)); } #endregion Key Exchange } }
#region -- License Terms -- // // MessagePack for CLI // // Copyright (C) 2010-2017 FUJIWARA, Yusuke // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // #endregion -- License Terms -- using System; using System.Collections.Generic; #if NETSTANDARD1_1 using Contract = MsgPack.MPContract; #else using System.Diagnostics.Contracts; #endif // NETSTANDARD1_1 using System.Globalization; using System.Linq; using System.Reflection; using System.Reflection.Emit; using MsgPack.Serialization.AbstractSerializers; using MsgPack.Serialization.Reflection; namespace MsgPack.Serialization.EmittingSerializers { /// <summary> /// Defines common features for emitters. /// </summary> internal sealed partial class SerializerEmitter { private readonly SerializerSpecification _specification; private readonly bool _isDebuggable; private readonly ModuleBuilder _host; private readonly TypeBuilder _typeBuilder; private readonly Dictionary<string, MethodBuilder> _methodTable; private readonly Dictionary<string, FieldBuilder> _fieldTable; /// <summary> /// Initializes a new instance of the <see cref="SerializerEmitter"/> class. /// </summary> /// <param name="host">The host <see cref="ModuleBuilder"/>.</param> /// <param name="specification">The specification of the serializer.</param> /// <param name="baseClass">Type of the base class of the serializer.</param> /// <param name="isDebuggable">Set to <c>true</c> when <paramref name="host"/> is debuggable.</param> public SerializerEmitter( ModuleBuilder host, SerializerSpecification specification, Type baseClass, bool isDebuggable ) { Contract.Requires( host != null ); Contract.Requires( specification != null ); Contract.Requires( baseClass != null ); Tracer.Emit.TraceEvent( Tracer.EventType.DefineType, Tracer.EventId.DefineType, "Create {0}", specification.SerializerTypeFullName ); this._methodTable = new Dictionary<string, MethodBuilder>(); this._fieldTable = new Dictionary<string, FieldBuilder>(); this._specification = specification; this._host = host; this._typeBuilder = host.DefineType( specification.SerializerTypeFullName, TypeAttributes.Sealed | TypeAttributes.Public | TypeAttributes.UnicodeClass | TypeAttributes.AutoLayout | TypeAttributes.BeforeFieldInit, baseClass ); #if DEBUG Contract.Assert( this._typeBuilder.BaseType != null, "baseType != null" ); #endif // DEBUG this._isDebuggable = isDebuggable; #if DEBUG && !NET35 && !NETSTANDARD1_1 && !NETSTANDARD1_3 && !NETSTANDARD2_0 if ( isDebuggable && SerializerDebugging.DumpEnabled ) { SerializerDebugging.PrepareDump( host.Assembly as AssemblyBuilder ); } #endif // DEBUG && !NET35 && !NETSTANDARD1_1 && !NETSTANDARD1_3 && !NETSTANDARD2_0 } #region -- Field -- /// <summary> /// Regisgters specified field to the current emitting session. /// </summary> /// <param name="name">The name of the field.</param> /// <param name="type">The type of the field.</param> /// <returns><see cref="FieldBuilder"/>.</returns> public FieldBuilder RegisterField( string name, Type type ) { FieldBuilder field; if ( !this._fieldTable.TryGetValue( name, out field ) ) { field = this.DefineInitonlyField( name, type ); this._fieldTable.Add( name, field ); } return field; } private FieldBuilder DefineInitonlyField( string name, Type type ) { return this._typeBuilder.DefineField( name, type, FieldAttributes.Private | FieldAttributes.InitOnly ); } #endregion -- Field -- #region -- Method -- /// <summary> /// Gets the IL generator to implement specified method override. /// </summary> /// <param name="methodName">The name of the method.</param> /// <returns> /// The IL generator to implement specified method override. /// This value will not be <c>null</c>. /// </returns> public ILMethodConctext DefineOverrideMethod( string methodName ) { return this.DefineMethod( methodName, true, false, null, ReflectionAbstractions.EmptyTypes ); } /// <summary> /// Gets the IL generator to implement specified private instance method. /// </summary> /// <param name="methodName">The name of the method.</param> /// <param name="isStatic"><c>true</c> for static method.</param> /// <param name="returnType">The type of the method return value.</param> /// <param name="parameterTypes">The types of the method parameters.</param> /// <returns> /// The IL generator to implement specified method override. /// This value will not be <c>null</c>. /// </returns> public ILMethodConctext DefinePrivateMethod( string methodName, bool isStatic, Type returnType, params Type[] parameterTypes ) { return this.DefineMethod( methodName, false, isStatic, returnType, parameterTypes ); } private ILMethodConctext DefineMethod( string methodName, bool isOverride, bool isStatic, Type returnType, Type[] parameterTypes ) { if ( this._methodTable.ContainsKey( methodName ) ) { throw new InvalidOperationException( String.Format( CultureInfo.CurrentCulture, "Method {0} is already defined.", methodName ) ); } #if DEBUG Contract.Assert( this._typeBuilder.BaseType != null, "this._typeBuilder.BaseType != null" ); #endif // DEBUG MethodBuilder builder; if ( isOverride ) { var baseMethod = this._typeBuilder.BaseType.GetRuntimeMethod( methodName ); builder = this._typeBuilder.DefineMethod( baseMethod.Name, ( baseMethod.Attributes | MethodAttributes.Final ) & ( ~MethodAttributes.Abstract ), baseMethod.CallingConvention, baseMethod.ReturnType, baseMethod.GetParameterTypes() ); this._typeBuilder.DefineMethodOverride( builder, baseMethod ); } else { builder = this._typeBuilder.DefineMethod( methodName, MethodAttributes.Private | MethodAttributes.HideBySig | ( isStatic ? MethodAttributes.Static : 0 ), isStatic ? CallingConventions.Standard : CallingConventions.HasThis, returnType, parameterTypes ); } this._methodTable[ methodName ] = builder; return new ILMethodConctext( this.GetILGenerator( builder, parameterTypes ), builder, parameterTypes ); } #endregion -- Method -- #region -- IL Generation -- [System.Diagnostics.CodeAnalysis.SuppressMessage( "Microsoft.Usage", "CA1801:ReviewUnusedParameters", MessageId = "parameterTypes", Justification = "For DEBUG build." )] private TracingILGenerator GetILGenerator( ConstructorBuilder builder, Type[] parameterTypes ) { #if DEBUG if ( SerializerDebugging.TraceEnabled ) { SerializerDebugging.ILTraceWriter.WriteLine(); SerializerDebugging.ILTraceWriter.WriteLine( "{2} {3} {0}::.ctor({1}) {4}", this._typeBuilder.Name, String.Join( ", ", parameterTypes.Select( t => t.GetFullName() ).ToArray() ), builder.Attributes.ToILString(), builder.CallingConvention.ToILString(), #if !NET35 && !NET40 builder.MethodImplementationFlags.ToILString() #else String.Empty #endif // !NET35 && !NET40 ); } #endif // DEBUG return new TracingILGenerator( builder, SerializerDebugging.ILTraceWriter, this._isDebuggable ); } [System.Diagnostics.CodeAnalysis.SuppressMessage( "Microsoft.Usage", "CA1801:ReviewUnusedParameters", MessageId = "parameterTypes", Justification = "For DEBUG build." )] private TracingILGenerator GetILGenerator( MethodBuilder builder, Type[] parameterTypes ) { #if DEBUG if ( SerializerDebugging.TraceEnabled ) { SerializerDebugging.ILTraceWriter.WriteLine(); SerializerDebugging.ILTraceWriter.WriteLine( "{4} {5} {3} {0}::{1}({2}) {6}", this._typeBuilder.Name, builder.Name, String.Join( ", ", parameterTypes.Select( t => t.GetFullName() ).ToArray() ), builder.ReturnType.GetFullName(), builder.Attributes.ToILString(), builder.CallingConvention.ToILString(), #if !NET35 && !NET40 builder.MethodImplementationFlags.ToILString() #else String.Empty #endif // !NET35 && !NET40 ); } #endif // DEBUG return new TracingILGenerator( builder, SerializerDebugging.ILTraceWriter, this._isDebuggable ); } #endregion -- IL Generation -- #region -- Constructor -- private ConstructorBuilder DefineConstructor( MethodAttributes attributes, params Type[] parameterTypes ) { return this._typeBuilder.DefineConstructor( attributes, CallingConventions.Standard, parameterTypes ); } private ConstructorBuilder CreateConstructor( MethodAttributes attributes, Type[] parameterTypes, Action<Type, TracingILGenerator> emitter ) { var builder = this.DefineConstructor( attributes, parameterTypes ); emitter( this._typeBuilder.BaseType, this.GetILGenerator( builder, parameterTypes ) ); #if DEBUG if ( SerializerDebugging.TraceEnabled ) { SerializerDebugging.FlushTraceData(); } #endif // DEBUG return builder; } #endregion -- Constructor -- } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Diagnostics; using System.Diagnostics.Contracts; namespace System.Globalization { // Modern Persian calendar is a solar observation based calendar. Each new year begins on the day when the vernal equinox occurs before noon. // The epoch is the date of the vernal equinox prior to the epoch of the Islamic calendar (March 19, 622 Julian or March 22, 622 Gregorian) // There is no Persian year 0. Ordinary years have 365 days. Leap years have 366 days with the last month (Esfand) gaining the extra day. /* ** Calendar support range: ** Calendar Minimum Maximum ** ========== ========== ========== ** Gregorian 0622/03/22 9999/12/31 ** Persian 0001/01/01 9378/10/13 */ [Serializable] public class PersianCalendar : Calendar { public static readonly int PersianEra = 1; internal static long PersianEpoch = new DateTime(622, 3, 22).Ticks / GregorianCalendar.TicksPerDay; private const int ApproximateHalfYear = 180; internal const int DatePartYear = 0; internal const int DatePartDayOfYear = 1; internal const int DatePartMonth = 2; internal const int DatePartDay = 3; internal const int MonthsPerYear = 12; internal static int[] DaysToMonth = { 0, 31, 62, 93, 124, 155, 186, 216, 246, 276, 306, 336, 366 }; internal const int MaxCalendarYear = 9378; internal const int MaxCalendarMonth = 10; internal const int MaxCalendarDay = 13; // Persian calendar (year: 1, month: 1, day:1 ) = Gregorian (year: 622, month: 3, day: 22) // This is the minimal Gregorian date that we support in the PersianCalendar. internal static DateTime minDate = new DateTime(622, 3, 22); internal static DateTime maxDate = DateTime.MaxValue; public override DateTime MinSupportedDateTime { get { return (minDate); } } public override DateTime MaxSupportedDateTime { get { return (maxDate); } } public override CalendarAlgorithmType AlgorithmType { get { return CalendarAlgorithmType.SolarCalendar; } } // Construct an instance of Persian calendar. public PersianCalendar() { } internal override CalendarId BaseCalendarID { get { return CalendarId.GREGORIAN; } } internal override CalendarId ID { get { return CalendarId.PERSIAN; } } /*=================================GetAbsoluteDatePersian========================== **Action: Gets the Absolute date for the given Persian date. The absolute date means ** the number of days from January 1st, 1 A.D. **Returns: **Arguments: **Exceptions: ============================================================================*/ private long GetAbsoluteDatePersian(int year, int month, int day) { if (year >= 1 && year <= MaxCalendarYear && month >= 1 && month <= 12) { int ordinalDay = DaysInPreviousMonths(month) + day - 1; // day is one based, make 0 based since this will be the number of days we add to beginning of year below int approximateDaysFromEpochForYearStart = (int)(CalendricalCalculationsHelper.MeanTropicalYearInDays * (year - 1)); long yearStart = CalendricalCalculationsHelper.PersianNewYearOnOrBefore(PersianEpoch + approximateDaysFromEpochForYearStart + ApproximateHalfYear); yearStart += ordinalDay; return yearStart; } throw new ArgumentOutOfRangeException(null, SR.ArgumentOutOfRange_BadYearMonthDay); } internal static void CheckTicksRange(long ticks) { if (ticks < minDate.Ticks || ticks > maxDate.Ticks) { throw new ArgumentOutOfRangeException( "time", String.Format( CultureInfo.InvariantCulture, SR.ArgumentOutOfRange_CalendarRange, minDate, maxDate)); } } internal static void CheckEraRange(int era) { if (era != CurrentEra && era != PersianEra) { throw new ArgumentOutOfRangeException(nameof(era), SR.ArgumentOutOfRange_InvalidEraValue); } } internal static void CheckYearRange(int year, int era) { CheckEraRange(era); if (year < 1 || year > MaxCalendarYear) { throw new ArgumentOutOfRangeException( nameof(year), String.Format( CultureInfo.CurrentCulture, SR.ArgumentOutOfRange_Range, 1, MaxCalendarYear)); } } internal static void CheckYearMonthRange(int year, int month, int era) { CheckYearRange(year, era); if (year == MaxCalendarYear) { if (month > MaxCalendarMonth) { throw new ArgumentOutOfRangeException( nameof(month), String.Format( CultureInfo.CurrentCulture, SR.ArgumentOutOfRange_Range, 1, MaxCalendarMonth)); } } if (month < 1 || month > 12) { throw new ArgumentOutOfRangeException(nameof(month), SR.ArgumentOutOfRange_Month); } } private static int MonthFromOrdinalDay(int ordinalDay) { Debug.Assert(ordinalDay <= 366); int index = 0; while (ordinalDay > DaysToMonth[index]) index++; return index; } private static int DaysInPreviousMonths(int month) { Debug.Assert(1 <= month && month <= 12); --month; // months are one based but for calculations use 0 based return DaysToMonth[month]; } /*=================================GetDatePart========================== **Action: Returns a given date part of this <i>DateTime</i>. This method is used ** to compute the year, day-of-year, month, or day part. **Returns: **Arguments: **Exceptions: ArgumentException if part is incorrect. ============================================================================*/ internal int GetDatePart(long ticks, int part) { long NumDays; // The calculation buffer in number of days. CheckTicksRange(ticks); // // Get the absolute date. The absolute date is the number of days from January 1st, 1 A.D. // 1/1/0001 is absolute date 1. // NumDays = ticks / GregorianCalendar.TicksPerDay + 1; // // Calculate the appromixate Persian Year. // long yearStart = CalendricalCalculationsHelper.PersianNewYearOnOrBefore(NumDays); int y = (int)(Math.Floor(((yearStart - PersianEpoch) / CalendricalCalculationsHelper.MeanTropicalYearInDays) + 0.5)) + 1; Debug.Assert(y >= 1); if (part == DatePartYear) { return y; } // // Calculate the Persian Month. // int ordinalDay = (int)(NumDays - CalendricalCalculationsHelper.GetNumberOfDays(this.ToDateTime(y, 1, 1, 0, 0, 0, 0, 1))); if (part == DatePartDayOfYear) { return ordinalDay; } int m = MonthFromOrdinalDay(ordinalDay); Debug.Assert(ordinalDay >= 1); Debug.Assert(m >= 1 && m <= 12); if (part == DatePartMonth) { return m; } int d = ordinalDay - DaysInPreviousMonths(m); Debug.Assert(1 <= d); Debug.Assert(d <= 31); // // Calculate the Persian Day. // if (part == DatePartDay) { return (d); } // Incorrect part value. throw new InvalidOperationException(SR.InvalidOperation_DateTimeParsing); } // Returns the DateTime resulting from adding the given number of // months to the specified DateTime. The result is computed by incrementing // (or decrementing) the year and month parts of the specified DateTime by // value months, and, if required, adjusting the day part of the // resulting date downwards to the last day of the resulting month in the // resulting year. The time-of-day part of the result is the same as the // time-of-day part of the specified DateTime. // // In more precise terms, considering the specified DateTime to be of the // form y / m / d + t, where y is the // year, m is the month, d is the day, and t is the // time-of-day, the result is y1 / m1 / d1 + t, // where y1 and m1 are computed by adding value months // to y and m, and d1 is the largest value less than // or equal to d that denotes a valid day in month m1 of year // y1. // public override DateTime AddMonths(DateTime time, int months) { if (months < -120000 || months > 120000) { throw new ArgumentOutOfRangeException( nameof(months), String.Format( CultureInfo.CurrentCulture, SR.ArgumentOutOfRange_Range, -120000, 120000)); } Contract.EndContractBlock(); // Get the date in Persian calendar. int y = GetDatePart(time.Ticks, DatePartYear); int m = GetDatePart(time.Ticks, DatePartMonth); int d = GetDatePart(time.Ticks, DatePartDay); int i = m - 1 + months; if (i >= 0) { m = i % 12 + 1; y = y + i / 12; } else { m = 12 + (i + 1) % 12; y = y + (i - 11) / 12; } int days = GetDaysInMonth(y, m); if (d > days) { d = days; } long ticks = GetAbsoluteDatePersian(y, m, d) * TicksPerDay + time.Ticks % TicksPerDay; Calendar.CheckAddResult(ticks, MinSupportedDateTime, MaxSupportedDateTime); return (new DateTime(ticks)); } // Returns the DateTime resulting from adding the given number of // years to the specified DateTime. The result is computed by incrementing // (or decrementing) the year part of the specified DateTime by value // years. If the month and day of the specified DateTime is 2/29, and if the // resulting year is not a leap year, the month and day of the resulting // DateTime becomes 2/28. Otherwise, the month, day, and time-of-day // parts of the result are the same as those of the specified DateTime. // public override DateTime AddYears(DateTime time, int years) { return (AddMonths(time, years * 12)); } // Returns the day-of-month part of the specified DateTime. The returned // value is an integer between 1 and 31. // public override int GetDayOfMonth(DateTime time) { return (GetDatePart(time.Ticks, DatePartDay)); } // Returns the day-of-week part of the specified DateTime. The returned value // is an integer between 0 and 6, where 0 indicates Sunday, 1 indicates // Monday, 2 indicates Tuesday, 3 indicates Wednesday, 4 indicates // Thursday, 5 indicates Friday, and 6 indicates Saturday. // public override DayOfWeek GetDayOfWeek(DateTime time) { return ((DayOfWeek)((int)(time.Ticks / TicksPerDay + 1) % 7)); } // Returns the day-of-year part of the specified DateTime. The returned value // is an integer between 1 and 366. // public override int GetDayOfYear(DateTime time) { return (GetDatePart(time.Ticks, DatePartDayOfYear)); } // Returns the number of days in the month given by the year and // month arguments. // public override int GetDaysInMonth(int year, int month, int era) { CheckYearMonthRange(year, month, era); if ((month == MaxCalendarMonth) && (year == MaxCalendarYear)) { return MaxCalendarDay; } int daysInMonth = DaysToMonth[month] - DaysToMonth[month - 1]; if ((month == MonthsPerYear) && !IsLeapYear(year)) { Debug.Assert(daysInMonth == 30); --daysInMonth; } return daysInMonth; } // Returns the number of days in the year given by the year argument for the current era. // public override int GetDaysInYear(int year, int era) { CheckYearRange(year, era); if (year == MaxCalendarYear) { return DaysToMonth[MaxCalendarMonth - 1] + MaxCalendarDay; } // Common years have 365 days. Leap years have 366 days. return (IsLeapYear(year, CurrentEra) ? 366 : 365); } // Returns the era for the specified DateTime value. public override int GetEra(DateTime time) { CheckTicksRange(time.Ticks); return (PersianEra); } public override int[] Eras { get { return (new int[] { PersianEra }); } } // Returns the month part of the specified DateTime. The returned value is an // integer between 1 and 12. // public override int GetMonth(DateTime time) { return (GetDatePart(time.Ticks, DatePartMonth)); } // Returns the number of months in the specified year and era. public override int GetMonthsInYear(int year, int era) { CheckYearRange(year, era); if (year == MaxCalendarYear) { return MaxCalendarMonth; } return (12); } // Returns the year part of the specified DateTime. The returned value is an // integer between 1 and MaxCalendarYear. // public override int GetYear(DateTime time) { return (GetDatePart(time.Ticks, DatePartYear)); } // Checks whether a given day in the specified era is a leap day. This method returns true if // the date is a leap day, or false if not. // public override bool IsLeapDay(int year, int month, int day, int era) { // The year/month/era value checking is done in GetDaysInMonth(). int daysInMonth = GetDaysInMonth(year, month, era); if (day < 1 || day > daysInMonth) { throw new ArgumentOutOfRangeException( nameof(day), String.Format( CultureInfo.CurrentCulture, SR.ArgumentOutOfRange_Day, daysInMonth, month)); } return (IsLeapYear(year, era) && month == 12 && day == 30); } // Returns the leap month in a calendar year of the specified era. This method returns 0 // if this calendar does not have leap month, or this year is not a leap year. // public override int GetLeapMonth(int year, int era) { CheckYearRange(year, era); return (0); } // Checks whether a given month in the specified era is a leap month. This method returns true if // month is a leap month, or false if not. // public override bool IsLeapMonth(int year, int month, int era) { CheckYearMonthRange(year, month, era); return (false); } // Checks whether a given year in the specified era is a leap year. This method returns true if // year is a leap year, or false if not. // public override bool IsLeapYear(int year, int era) { CheckYearRange(year, era); if (year == MaxCalendarYear) { return false; } return (GetAbsoluteDatePersian(year + 1, 1, 1) - GetAbsoluteDatePersian(year, 1, 1)) == 366; } // Returns the date and time converted to a DateTime value. Throws an exception if the n-tuple is invalid. // public override DateTime ToDateTime(int year, int month, int day, int hour, int minute, int second, int millisecond, int era) { // The year/month/era checking is done in GetDaysInMonth(). int daysInMonth = GetDaysInMonth(year, month, era); if (day < 1 || day > daysInMonth) { // BCLDebug.Log("year = " + year + ", month = " + month + ", day = " + day); throw new ArgumentOutOfRangeException( nameof(day), String.Format( CultureInfo.CurrentCulture, SR.ArgumentOutOfRange_Day, daysInMonth, month)); } long lDate = GetAbsoluteDatePersian(year, month, day); if (lDate >= 0) { return (new DateTime(lDate * GregorianCalendar.TicksPerDay + TimeToTicks(hour, minute, second, millisecond))); } else { throw new ArgumentOutOfRangeException(null, SR.ArgumentOutOfRange_BadYearMonthDay); } } private const int DEFAULT_TWO_DIGIT_YEAR_MAX = 1410; public override int TwoDigitYearMax { get { if (twoDigitYearMax == -1) { twoDigitYearMax = GetSystemTwoDigitYearSetting(ID, DEFAULT_TWO_DIGIT_YEAR_MAX); } return (twoDigitYearMax); } set { VerifyWritable(); if (value < 99 || value > MaxCalendarYear) { throw new ArgumentOutOfRangeException( nameof(value), String.Format( CultureInfo.CurrentCulture, SR.ArgumentOutOfRange_Range, 99, MaxCalendarYear)); } twoDigitYearMax = value; } } public override int ToFourDigitYear(int year) { if (year < 0) { throw new ArgumentOutOfRangeException(nameof(year), SR.ArgumentOutOfRange_NeedNonNegNum); } Contract.EndContractBlock(); if (year < 100) { return (base.ToFourDigitYear(year)); } if (year > MaxCalendarYear) { throw new ArgumentOutOfRangeException( nameof(year), String.Format( CultureInfo.CurrentCulture, SR.ArgumentOutOfRange_Range, 1, MaxCalendarYear)); } return (year); } } }
// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! namespace Google.Cloud.Compute.V1.Snippets { using Google.Api.Gax; using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using lro = Google.LongRunning; /// <summary>Generated snippets.</summary> public sealed class AllGeneratedServiceAttachmentsClientSnippets { /// <summary>Snippet for AggregatedList</summary> public void AggregatedListRequestObject() { // Snippet: AggregatedList(AggregatedListServiceAttachmentsRequest, CallSettings) // Create client ServiceAttachmentsClient serviceAttachmentsClient = ServiceAttachmentsClient.Create(); // Initialize request argument(s) AggregatedListServiceAttachmentsRequest request = new AggregatedListServiceAttachmentsRequest { OrderBy = "", Project = "", Filter = "", IncludeAllScopes = false, ReturnPartialSuccess = false, }; // Make the request PagedEnumerable<ServiceAttachmentAggregatedList, KeyValuePair<string, ServiceAttachmentsScopedList>> response = serviceAttachmentsClient.AggregatedList(request); // Iterate over all response items, lazily performing RPCs as required foreach (KeyValuePair<string, ServiceAttachmentsScopedList> item in response) { // Do something with each item Console.WriteLine(item); } // Or iterate over pages (of server-defined size), performing one RPC per page foreach (ServiceAttachmentAggregatedList page in response.AsRawResponses()) { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (KeyValuePair<string, ServiceAttachmentsScopedList> item in page) { // Do something with each item Console.WriteLine(item); } } // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<KeyValuePair<string, ServiceAttachmentsScopedList>> singlePage = response.ReadPage(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (KeyValuePair<string, ServiceAttachmentsScopedList> item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for AggregatedListAsync</summary> public async Task AggregatedListRequestObjectAsync() { // Snippet: AggregatedListAsync(AggregatedListServiceAttachmentsRequest, CallSettings) // Create client ServiceAttachmentsClient serviceAttachmentsClient = await ServiceAttachmentsClient.CreateAsync(); // Initialize request argument(s) AggregatedListServiceAttachmentsRequest request = new AggregatedListServiceAttachmentsRequest { OrderBy = "", Project = "", Filter = "", IncludeAllScopes = false, ReturnPartialSuccess = false, }; // Make the request PagedAsyncEnumerable<ServiceAttachmentAggregatedList, KeyValuePair<string, ServiceAttachmentsScopedList>> response = serviceAttachmentsClient.AggregatedListAsync(request); // Iterate over all response items, lazily performing RPCs as required await response.ForEachAsync((KeyValuePair<string, ServiceAttachmentsScopedList> item) => { // Do something with each item Console.WriteLine(item); }); // Or iterate over pages (of server-defined size), performing one RPC per page await response.AsRawResponses().ForEachAsync((ServiceAttachmentAggregatedList page) => { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (KeyValuePair<string, ServiceAttachmentsScopedList> item in page) { // Do something with each item Console.WriteLine(item); } }); // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<KeyValuePair<string, ServiceAttachmentsScopedList>> singlePage = await response.ReadPageAsync(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (KeyValuePair<string, ServiceAttachmentsScopedList> item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for AggregatedList</summary> public void AggregatedList() { // Snippet: AggregatedList(string, string, int?, CallSettings) // Create client ServiceAttachmentsClient serviceAttachmentsClient = ServiceAttachmentsClient.Create(); // Initialize request argument(s) string project = ""; // Make the request PagedEnumerable<ServiceAttachmentAggregatedList, KeyValuePair<string, ServiceAttachmentsScopedList>> response = serviceAttachmentsClient.AggregatedList(project); // Iterate over all response items, lazily performing RPCs as required foreach (KeyValuePair<string, ServiceAttachmentsScopedList> item in response) { // Do something with each item Console.WriteLine(item); } // Or iterate over pages (of server-defined size), performing one RPC per page foreach (ServiceAttachmentAggregatedList page in response.AsRawResponses()) { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (KeyValuePair<string, ServiceAttachmentsScopedList> item in page) { // Do something with each item Console.WriteLine(item); } } // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<KeyValuePair<string, ServiceAttachmentsScopedList>> singlePage = response.ReadPage(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (KeyValuePair<string, ServiceAttachmentsScopedList> item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for AggregatedListAsync</summary> public async Task AggregatedListAsync() { // Snippet: AggregatedListAsync(string, string, int?, CallSettings) // Create client ServiceAttachmentsClient serviceAttachmentsClient = await ServiceAttachmentsClient.CreateAsync(); // Initialize request argument(s) string project = ""; // Make the request PagedAsyncEnumerable<ServiceAttachmentAggregatedList, KeyValuePair<string, ServiceAttachmentsScopedList>> response = serviceAttachmentsClient.AggregatedListAsync(project); // Iterate over all response items, lazily performing RPCs as required await response.ForEachAsync((KeyValuePair<string, ServiceAttachmentsScopedList> item) => { // Do something with each item Console.WriteLine(item); }); // Or iterate over pages (of server-defined size), performing one RPC per page await response.AsRawResponses().ForEachAsync((ServiceAttachmentAggregatedList page) => { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (KeyValuePair<string, ServiceAttachmentsScopedList> item in page) { // Do something with each item Console.WriteLine(item); } }); // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<KeyValuePair<string, ServiceAttachmentsScopedList>> singlePage = await response.ReadPageAsync(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (KeyValuePair<string, ServiceAttachmentsScopedList> item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for Delete</summary> public void DeleteRequestObject() { // Snippet: Delete(DeleteServiceAttachmentRequest, CallSettings) // Create client ServiceAttachmentsClient serviceAttachmentsClient = ServiceAttachmentsClient.Create(); // Initialize request argument(s) DeleteServiceAttachmentRequest request = new DeleteServiceAttachmentRequest { RequestId = "", Region = "", Project = "", ServiceAttachment = "", }; // Make the request lro::Operation<Operation, Operation> response = serviceAttachmentsClient.Delete(request); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = response.PollUntilCompleted(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = serviceAttachmentsClient.PollOnceDelete(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for DeleteAsync</summary> public async Task DeleteRequestObjectAsync() { // Snippet: DeleteAsync(DeleteServiceAttachmentRequest, CallSettings) // Additional: DeleteAsync(DeleteServiceAttachmentRequest, CancellationToken) // Create client ServiceAttachmentsClient serviceAttachmentsClient = await ServiceAttachmentsClient.CreateAsync(); // Initialize request argument(s) DeleteServiceAttachmentRequest request = new DeleteServiceAttachmentRequest { RequestId = "", Region = "", Project = "", ServiceAttachment = "", }; // Make the request lro::Operation<Operation, Operation> response = await serviceAttachmentsClient.DeleteAsync(request); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = await response.PollUntilCompletedAsync(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = await serviceAttachmentsClient.PollOnceDeleteAsync(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for Delete</summary> public void Delete() { // Snippet: Delete(string, string, string, CallSettings) // Create client ServiceAttachmentsClient serviceAttachmentsClient = ServiceAttachmentsClient.Create(); // Initialize request argument(s) string project = ""; string region = ""; string serviceAttachment = ""; // Make the request lro::Operation<Operation, Operation> response = serviceAttachmentsClient.Delete(project, region, serviceAttachment); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = response.PollUntilCompleted(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = serviceAttachmentsClient.PollOnceDelete(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for DeleteAsync</summary> public async Task DeleteAsync() { // Snippet: DeleteAsync(string, string, string, CallSettings) // Additional: DeleteAsync(string, string, string, CancellationToken) // Create client ServiceAttachmentsClient serviceAttachmentsClient = await ServiceAttachmentsClient.CreateAsync(); // Initialize request argument(s) string project = ""; string region = ""; string serviceAttachment = ""; // Make the request lro::Operation<Operation, Operation> response = await serviceAttachmentsClient.DeleteAsync(project, region, serviceAttachment); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = await response.PollUntilCompletedAsync(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = await serviceAttachmentsClient.PollOnceDeleteAsync(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for Get</summary> public void GetRequestObject() { // Snippet: Get(GetServiceAttachmentRequest, CallSettings) // Create client ServiceAttachmentsClient serviceAttachmentsClient = ServiceAttachmentsClient.Create(); // Initialize request argument(s) GetServiceAttachmentRequest request = new GetServiceAttachmentRequest { Region = "", Project = "", ServiceAttachment = "", }; // Make the request ServiceAttachment response = serviceAttachmentsClient.Get(request); // End snippet } /// <summary>Snippet for GetAsync</summary> public async Task GetRequestObjectAsync() { // Snippet: GetAsync(GetServiceAttachmentRequest, CallSettings) // Additional: GetAsync(GetServiceAttachmentRequest, CancellationToken) // Create client ServiceAttachmentsClient serviceAttachmentsClient = await ServiceAttachmentsClient.CreateAsync(); // Initialize request argument(s) GetServiceAttachmentRequest request = new GetServiceAttachmentRequest { Region = "", Project = "", ServiceAttachment = "", }; // Make the request ServiceAttachment response = await serviceAttachmentsClient.GetAsync(request); // End snippet } /// <summary>Snippet for Get</summary> public void Get() { // Snippet: Get(string, string, string, CallSettings) // Create client ServiceAttachmentsClient serviceAttachmentsClient = ServiceAttachmentsClient.Create(); // Initialize request argument(s) string project = ""; string region = ""; string serviceAttachment = ""; // Make the request ServiceAttachment response = serviceAttachmentsClient.Get(project, region, serviceAttachment); // End snippet } /// <summary>Snippet for GetAsync</summary> public async Task GetAsync() { // Snippet: GetAsync(string, string, string, CallSettings) // Additional: GetAsync(string, string, string, CancellationToken) // Create client ServiceAttachmentsClient serviceAttachmentsClient = await ServiceAttachmentsClient.CreateAsync(); // Initialize request argument(s) string project = ""; string region = ""; string serviceAttachment = ""; // Make the request ServiceAttachment response = await serviceAttachmentsClient.GetAsync(project, region, serviceAttachment); // End snippet } /// <summary>Snippet for GetIamPolicy</summary> public void GetIamPolicyRequestObject() { // Snippet: GetIamPolicy(GetIamPolicyServiceAttachmentRequest, CallSettings) // Create client ServiceAttachmentsClient serviceAttachmentsClient = ServiceAttachmentsClient.Create(); // Initialize request argument(s) GetIamPolicyServiceAttachmentRequest request = new GetIamPolicyServiceAttachmentRequest { Region = "", Resource = "", Project = "", OptionsRequestedPolicyVersion = 0, }; // Make the request Policy response = serviceAttachmentsClient.GetIamPolicy(request); // End snippet } /// <summary>Snippet for GetIamPolicyAsync</summary> public async Task GetIamPolicyRequestObjectAsync() { // Snippet: GetIamPolicyAsync(GetIamPolicyServiceAttachmentRequest, CallSettings) // Additional: GetIamPolicyAsync(GetIamPolicyServiceAttachmentRequest, CancellationToken) // Create client ServiceAttachmentsClient serviceAttachmentsClient = await ServiceAttachmentsClient.CreateAsync(); // Initialize request argument(s) GetIamPolicyServiceAttachmentRequest request = new GetIamPolicyServiceAttachmentRequest { Region = "", Resource = "", Project = "", OptionsRequestedPolicyVersion = 0, }; // Make the request Policy response = await serviceAttachmentsClient.GetIamPolicyAsync(request); // End snippet } /// <summary>Snippet for GetIamPolicy</summary> public void GetIamPolicy() { // Snippet: GetIamPolicy(string, string, string, CallSettings) // Create client ServiceAttachmentsClient serviceAttachmentsClient = ServiceAttachmentsClient.Create(); // Initialize request argument(s) string project = ""; string region = ""; string resource = ""; // Make the request Policy response = serviceAttachmentsClient.GetIamPolicy(project, region, resource); // End snippet } /// <summary>Snippet for GetIamPolicyAsync</summary> public async Task GetIamPolicyAsync() { // Snippet: GetIamPolicyAsync(string, string, string, CallSettings) // Additional: GetIamPolicyAsync(string, string, string, CancellationToken) // Create client ServiceAttachmentsClient serviceAttachmentsClient = await ServiceAttachmentsClient.CreateAsync(); // Initialize request argument(s) string project = ""; string region = ""; string resource = ""; // Make the request Policy response = await serviceAttachmentsClient.GetIamPolicyAsync(project, region, resource); // End snippet } /// <summary>Snippet for Insert</summary> public void InsertRequestObject() { // Snippet: Insert(InsertServiceAttachmentRequest, CallSettings) // Create client ServiceAttachmentsClient serviceAttachmentsClient = ServiceAttachmentsClient.Create(); // Initialize request argument(s) InsertServiceAttachmentRequest request = new InsertServiceAttachmentRequest { RequestId = "", Region = "", Project = "", ServiceAttachmentResource = new ServiceAttachment(), }; // Make the request lro::Operation<Operation, Operation> response = serviceAttachmentsClient.Insert(request); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = response.PollUntilCompleted(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = serviceAttachmentsClient.PollOnceInsert(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for InsertAsync</summary> public async Task InsertRequestObjectAsync() { // Snippet: InsertAsync(InsertServiceAttachmentRequest, CallSettings) // Additional: InsertAsync(InsertServiceAttachmentRequest, CancellationToken) // Create client ServiceAttachmentsClient serviceAttachmentsClient = await ServiceAttachmentsClient.CreateAsync(); // Initialize request argument(s) InsertServiceAttachmentRequest request = new InsertServiceAttachmentRequest { RequestId = "", Region = "", Project = "", ServiceAttachmentResource = new ServiceAttachment(), }; // Make the request lro::Operation<Operation, Operation> response = await serviceAttachmentsClient.InsertAsync(request); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = await response.PollUntilCompletedAsync(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = await serviceAttachmentsClient.PollOnceInsertAsync(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for Insert</summary> public void Insert() { // Snippet: Insert(string, string, ServiceAttachment, CallSettings) // Create client ServiceAttachmentsClient serviceAttachmentsClient = ServiceAttachmentsClient.Create(); // Initialize request argument(s) string project = ""; string region = ""; ServiceAttachment serviceAttachmentResource = new ServiceAttachment(); // Make the request lro::Operation<Operation, Operation> response = serviceAttachmentsClient.Insert(project, region, serviceAttachmentResource); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = response.PollUntilCompleted(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = serviceAttachmentsClient.PollOnceInsert(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for InsertAsync</summary> public async Task InsertAsync() { // Snippet: InsertAsync(string, string, ServiceAttachment, CallSettings) // Additional: InsertAsync(string, string, ServiceAttachment, CancellationToken) // Create client ServiceAttachmentsClient serviceAttachmentsClient = await ServiceAttachmentsClient.CreateAsync(); // Initialize request argument(s) string project = ""; string region = ""; ServiceAttachment serviceAttachmentResource = new ServiceAttachment(); // Make the request lro::Operation<Operation, Operation> response = await serviceAttachmentsClient.InsertAsync(project, region, serviceAttachmentResource); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = await response.PollUntilCompletedAsync(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = await serviceAttachmentsClient.PollOnceInsertAsync(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for List</summary> public void ListRequestObject() { // Snippet: List(ListServiceAttachmentsRequest, CallSettings) // Create client ServiceAttachmentsClient serviceAttachmentsClient = ServiceAttachmentsClient.Create(); // Initialize request argument(s) ListServiceAttachmentsRequest request = new ListServiceAttachmentsRequest { Region = "", OrderBy = "", Project = "", Filter = "", ReturnPartialSuccess = false, }; // Make the request PagedEnumerable<ServiceAttachmentList, ServiceAttachment> response = serviceAttachmentsClient.List(request); // Iterate over all response items, lazily performing RPCs as required foreach (ServiceAttachment item in response) { // Do something with each item Console.WriteLine(item); } // Or iterate over pages (of server-defined size), performing one RPC per page foreach (ServiceAttachmentList page in response.AsRawResponses()) { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (ServiceAttachment item in page) { // Do something with each item Console.WriteLine(item); } } // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<ServiceAttachment> singlePage = response.ReadPage(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (ServiceAttachment item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for ListAsync</summary> public async Task ListRequestObjectAsync() { // Snippet: ListAsync(ListServiceAttachmentsRequest, CallSettings) // Create client ServiceAttachmentsClient serviceAttachmentsClient = await ServiceAttachmentsClient.CreateAsync(); // Initialize request argument(s) ListServiceAttachmentsRequest request = new ListServiceAttachmentsRequest { Region = "", OrderBy = "", Project = "", Filter = "", ReturnPartialSuccess = false, }; // Make the request PagedAsyncEnumerable<ServiceAttachmentList, ServiceAttachment> response = serviceAttachmentsClient.ListAsync(request); // Iterate over all response items, lazily performing RPCs as required await response.ForEachAsync((ServiceAttachment item) => { // Do something with each item Console.WriteLine(item); }); // Or iterate over pages (of server-defined size), performing one RPC per page await response.AsRawResponses().ForEachAsync((ServiceAttachmentList page) => { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (ServiceAttachment item in page) { // Do something with each item Console.WriteLine(item); } }); // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<ServiceAttachment> singlePage = await response.ReadPageAsync(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (ServiceAttachment item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for List</summary> public void List() { // Snippet: List(string, string, string, int?, CallSettings) // Create client ServiceAttachmentsClient serviceAttachmentsClient = ServiceAttachmentsClient.Create(); // Initialize request argument(s) string project = ""; string region = ""; // Make the request PagedEnumerable<ServiceAttachmentList, ServiceAttachment> response = serviceAttachmentsClient.List(project, region); // Iterate over all response items, lazily performing RPCs as required foreach (ServiceAttachment item in response) { // Do something with each item Console.WriteLine(item); } // Or iterate over pages (of server-defined size), performing one RPC per page foreach (ServiceAttachmentList page in response.AsRawResponses()) { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (ServiceAttachment item in page) { // Do something with each item Console.WriteLine(item); } } // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<ServiceAttachment> singlePage = response.ReadPage(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (ServiceAttachment item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for ListAsync</summary> public async Task ListAsync() { // Snippet: ListAsync(string, string, string, int?, CallSettings) // Create client ServiceAttachmentsClient serviceAttachmentsClient = await ServiceAttachmentsClient.CreateAsync(); // Initialize request argument(s) string project = ""; string region = ""; // Make the request PagedAsyncEnumerable<ServiceAttachmentList, ServiceAttachment> response = serviceAttachmentsClient.ListAsync(project, region); // Iterate over all response items, lazily performing RPCs as required await response.ForEachAsync((ServiceAttachment item) => { // Do something with each item Console.WriteLine(item); }); // Or iterate over pages (of server-defined size), performing one RPC per page await response.AsRawResponses().ForEachAsync((ServiceAttachmentList page) => { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (ServiceAttachment item in page) { // Do something with each item Console.WriteLine(item); } }); // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<ServiceAttachment> singlePage = await response.ReadPageAsync(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (ServiceAttachment item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for Patch</summary> public void PatchRequestObject() { // Snippet: Patch(PatchServiceAttachmentRequest, CallSettings) // Create client ServiceAttachmentsClient serviceAttachmentsClient = ServiceAttachmentsClient.Create(); // Initialize request argument(s) PatchServiceAttachmentRequest request = new PatchServiceAttachmentRequest { RequestId = "", Region = "", Project = "", ServiceAttachment = "", ServiceAttachmentResource = new ServiceAttachment(), }; // Make the request lro::Operation<Operation, Operation> response = serviceAttachmentsClient.Patch(request); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = response.PollUntilCompleted(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = serviceAttachmentsClient.PollOncePatch(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for PatchAsync</summary> public async Task PatchRequestObjectAsync() { // Snippet: PatchAsync(PatchServiceAttachmentRequest, CallSettings) // Additional: PatchAsync(PatchServiceAttachmentRequest, CancellationToken) // Create client ServiceAttachmentsClient serviceAttachmentsClient = await ServiceAttachmentsClient.CreateAsync(); // Initialize request argument(s) PatchServiceAttachmentRequest request = new PatchServiceAttachmentRequest { RequestId = "", Region = "", Project = "", ServiceAttachment = "", ServiceAttachmentResource = new ServiceAttachment(), }; // Make the request lro::Operation<Operation, Operation> response = await serviceAttachmentsClient.PatchAsync(request); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = await response.PollUntilCompletedAsync(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = await serviceAttachmentsClient.PollOncePatchAsync(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for Patch</summary> public void Patch() { // Snippet: Patch(string, string, string, ServiceAttachment, CallSettings) // Create client ServiceAttachmentsClient serviceAttachmentsClient = ServiceAttachmentsClient.Create(); // Initialize request argument(s) string project = ""; string region = ""; string serviceAttachment = ""; ServiceAttachment serviceAttachmentResource = new ServiceAttachment(); // Make the request lro::Operation<Operation, Operation> response = serviceAttachmentsClient.Patch(project, region, serviceAttachment, serviceAttachmentResource); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = response.PollUntilCompleted(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = serviceAttachmentsClient.PollOncePatch(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for PatchAsync</summary> public async Task PatchAsync() { // Snippet: PatchAsync(string, string, string, ServiceAttachment, CallSettings) // Additional: PatchAsync(string, string, string, ServiceAttachment, CancellationToken) // Create client ServiceAttachmentsClient serviceAttachmentsClient = await ServiceAttachmentsClient.CreateAsync(); // Initialize request argument(s) string project = ""; string region = ""; string serviceAttachment = ""; ServiceAttachment serviceAttachmentResource = new ServiceAttachment(); // Make the request lro::Operation<Operation, Operation> response = await serviceAttachmentsClient.PatchAsync(project, region, serviceAttachment, serviceAttachmentResource); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = await response.PollUntilCompletedAsync(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = await serviceAttachmentsClient.PollOncePatchAsync(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for SetIamPolicy</summary> public void SetIamPolicyRequestObject() { // Snippet: SetIamPolicy(SetIamPolicyServiceAttachmentRequest, CallSettings) // Create client ServiceAttachmentsClient serviceAttachmentsClient = ServiceAttachmentsClient.Create(); // Initialize request argument(s) SetIamPolicyServiceAttachmentRequest request = new SetIamPolicyServiceAttachmentRequest { Region = "", Resource = "", Project = "", RegionSetPolicyRequestResource = new RegionSetPolicyRequest(), }; // Make the request Policy response = serviceAttachmentsClient.SetIamPolicy(request); // End snippet } /// <summary>Snippet for SetIamPolicyAsync</summary> public async Task SetIamPolicyRequestObjectAsync() { // Snippet: SetIamPolicyAsync(SetIamPolicyServiceAttachmentRequest, CallSettings) // Additional: SetIamPolicyAsync(SetIamPolicyServiceAttachmentRequest, CancellationToken) // Create client ServiceAttachmentsClient serviceAttachmentsClient = await ServiceAttachmentsClient.CreateAsync(); // Initialize request argument(s) SetIamPolicyServiceAttachmentRequest request = new SetIamPolicyServiceAttachmentRequest { Region = "", Resource = "", Project = "", RegionSetPolicyRequestResource = new RegionSetPolicyRequest(), }; // Make the request Policy response = await serviceAttachmentsClient.SetIamPolicyAsync(request); // End snippet } /// <summary>Snippet for SetIamPolicy</summary> public void SetIamPolicy() { // Snippet: SetIamPolicy(string, string, string, RegionSetPolicyRequest, CallSettings) // Create client ServiceAttachmentsClient serviceAttachmentsClient = ServiceAttachmentsClient.Create(); // Initialize request argument(s) string project = ""; string region = ""; string resource = ""; RegionSetPolicyRequest regionSetPolicyRequestResource = new RegionSetPolicyRequest(); // Make the request Policy response = serviceAttachmentsClient.SetIamPolicy(project, region, resource, regionSetPolicyRequestResource); // End snippet } /// <summary>Snippet for SetIamPolicyAsync</summary> public async Task SetIamPolicyAsync() { // Snippet: SetIamPolicyAsync(string, string, string, RegionSetPolicyRequest, CallSettings) // Additional: SetIamPolicyAsync(string, string, string, RegionSetPolicyRequest, CancellationToken) // Create client ServiceAttachmentsClient serviceAttachmentsClient = await ServiceAttachmentsClient.CreateAsync(); // Initialize request argument(s) string project = ""; string region = ""; string resource = ""; RegionSetPolicyRequest regionSetPolicyRequestResource = new RegionSetPolicyRequest(); // Make the request Policy response = await serviceAttachmentsClient.SetIamPolicyAsync(project, region, resource, regionSetPolicyRequestResource); // End snippet } /// <summary>Snippet for TestIamPermissions</summary> public void TestIamPermissionsRequestObject() { // Snippet: TestIamPermissions(TestIamPermissionsServiceAttachmentRequest, CallSettings) // Create client ServiceAttachmentsClient serviceAttachmentsClient = ServiceAttachmentsClient.Create(); // Initialize request argument(s) TestIamPermissionsServiceAttachmentRequest request = new TestIamPermissionsServiceAttachmentRequest { Region = "", Resource = "", Project = "", TestPermissionsRequestResource = new TestPermissionsRequest(), }; // Make the request TestPermissionsResponse response = serviceAttachmentsClient.TestIamPermissions(request); // End snippet } /// <summary>Snippet for TestIamPermissionsAsync</summary> public async Task TestIamPermissionsRequestObjectAsync() { // Snippet: TestIamPermissionsAsync(TestIamPermissionsServiceAttachmentRequest, CallSettings) // Additional: TestIamPermissionsAsync(TestIamPermissionsServiceAttachmentRequest, CancellationToken) // Create client ServiceAttachmentsClient serviceAttachmentsClient = await ServiceAttachmentsClient.CreateAsync(); // Initialize request argument(s) TestIamPermissionsServiceAttachmentRequest request = new TestIamPermissionsServiceAttachmentRequest { Region = "", Resource = "", Project = "", TestPermissionsRequestResource = new TestPermissionsRequest(), }; // Make the request TestPermissionsResponse response = await serviceAttachmentsClient.TestIamPermissionsAsync(request); // End snippet } /// <summary>Snippet for TestIamPermissions</summary> public void TestIamPermissions() { // Snippet: TestIamPermissions(string, string, string, TestPermissionsRequest, CallSettings) // Create client ServiceAttachmentsClient serviceAttachmentsClient = ServiceAttachmentsClient.Create(); // Initialize request argument(s) string project = ""; string region = ""; string resource = ""; TestPermissionsRequest testPermissionsRequestResource = new TestPermissionsRequest(); // Make the request TestPermissionsResponse response = serviceAttachmentsClient.TestIamPermissions(project, region, resource, testPermissionsRequestResource); // End snippet } /// <summary>Snippet for TestIamPermissionsAsync</summary> public async Task TestIamPermissionsAsync() { // Snippet: TestIamPermissionsAsync(string, string, string, TestPermissionsRequest, CallSettings) // Additional: TestIamPermissionsAsync(string, string, string, TestPermissionsRequest, CancellationToken) // Create client ServiceAttachmentsClient serviceAttachmentsClient = await ServiceAttachmentsClient.CreateAsync(); // Initialize request argument(s) string project = ""; string region = ""; string resource = ""; TestPermissionsRequest testPermissionsRequestResource = new TestPermissionsRequest(); // Make the request TestPermissionsResponse response = await serviceAttachmentsClient.TestIamPermissionsAsync(project, region, resource, testPermissionsRequestResource); // End snippet } } }
using System; using System.Collections.Generic; using System.Reflection; using System.Text; using log4net; using PacketDotNet.MiscUtil.Conversion; using PacketDotNet.Utils; namespace PacketDotNet { /// <summary> /// An ICMP packet. /// See http://en.wikipedia.org/wiki/ICMPv6 /// </summary> [Serializable] public class ICMPv6Packet : InternetPacket { #if DEBUG private static readonly ILog log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); #else // NOTE: No need to warn about lack of use, the compiler won't // put any calls to 'log' here but we need 'log' to exist to compile #pragma warning disable 0169, 0649 private static readonly ILogInactive log; #pragma warning restore 0169, 0649 #endif /// <value> /// The Type value /// </value> public virtual ICMPv6Types Type { get { var val = this.header.Bytes[this.header.Offset + ICMPv6Fields.TypePosition]; if(Enum.IsDefined(typeof(ICMPv6Types), val)) { return (ICMPv6Types) val; } throw new ArgumentOutOfRangeException("Type of \"" + val + "\" is not defined in ICMPv6Types"); } set { this.header.Bytes[this.header.Offset + ICMPv6Fields.TypePosition] = (byte) value; } } /// <summary> Fetch the ICMP code </summary> public virtual byte Code { get { return this.header.Bytes[this.header.Offset + ICMPv6Fields.CodePosition]; } set { this.header.Bytes[this.header.Offset + ICMPv6Fields.CodePosition] = value; } } /// <value> /// Checksum value /// </value> public ushort Checksum { get { return EndianBitConverter.Big.ToUInt16(this.header.Bytes, this.header.Offset + ICMPv6Fields.ChecksumPosition); } set { var theValue = value; EndianBitConverter.Big.CopyBytes(theValue, this.header.Bytes, this.header.Offset + ICMPv6Fields.ChecksumPosition); } } /// <summary> /// Constructor /// </summary> /// <param name="bas"> /// A <see cref="ByteArraySegment" /> /// </param> public ICMPv6Packet(ByteArraySegment bas) { log.Debug(""); this.header = new ByteArraySegment(bas); } /// <summary> /// Constructor with parent packet /// </summary> /// <param name="bas"> /// A <see cref="ByteArraySegment" /> /// </param> /// <param name="ParentPacket"> /// A <see cref="Packet" /> /// </param> public ICMPv6Packet(ByteArraySegment bas, Packet ParentPacket) : this(bas) { this.ParentPacket = ParentPacket; } /// <summary> /// Used to prevent a recursive stack overflow /// when recalculating in UpdateCalculatedValues() /// </summary> private bool skipUpdating; /// <summary> /// Recalculate the checksum /// </summary> public override void UpdateCalculatedValues() { if(this.skipUpdating) { return; } // prevent us from entering this routine twice // by setting this flag, the act of retrieving the Bytes // property will cause this routine to be called which will // retrieve Bytes recursively and overflow the stack this.skipUpdating = true; // start with this packet with a zeroed out checksum field this.Checksum = 0; var originalBytes = this.Bytes; var ipv6Parent = this.ParentPacket as IPv6Packet; var bytesToChecksum = ipv6Parent.AttachPseudoIPHeader(originalBytes); // calculate the one's complement sum of the tcp header this.Checksum = (ushort) ChecksumUtils.OnesComplementSum(bytesToChecksum); // clear the skip variable this.skipUpdating = false; } /// <summary> Fetch ascii escape sequence of the color associated with this packet type.</summary> public override String Color { get { return AnsiEscapeSequences.LightBlue; } } /// <summary cref="Packet.ToString(StringOutputType)" /> public override string ToString(StringOutputType outputFormat) { var buffer = new StringBuilder(); var color = ""; var colorEscape = ""; if(outputFormat == StringOutputType.Colored || outputFormat == StringOutputType.VerboseColored) { color = this.Color; colorEscape = AnsiEscapeSequences.Reset; } if(outputFormat == StringOutputType.Normal || outputFormat == StringOutputType.Colored) { // build the output string buffer.AppendFormat("{0}[ICMPPacket: Type={2}, Code={3}]{1}", color, colorEscape, this.Type, this.Code); } if(outputFormat == StringOutputType.Verbose || outputFormat == StringOutputType.VerboseColored) { // collect the properties and their value var properties = new Dictionary<string, string>(); properties.Add("type", this.Type + " (" + (int) this.Type + ")"); properties.Add("code", this.Code.ToString()); // TODO: Implement a checksum verification for ICMPv6 properties.Add("checksum", "0x" + this.Checksum.ToString("x")); // TODO: Implement ICMPv6 Option fields here? // calculate the padding needed to right-justify the property names var padLength = RandomUtils.LongestStringLength(new List<string>(properties.Keys)); // build the output string buffer.AppendLine("ICMP: ******* ICMPv6 - \"Internet Control Message Protocol (Version 6)\"- offset=? length=" + this.TotalPacketLength); buffer.AppendLine("ICMP:"); foreach(var property in properties) { buffer.AppendLine("ICMP: " + property.Key.PadLeft(padLength) + " = " + property.Value); } buffer.AppendLine("ICMP:"); } // append the base string output buffer.Append(base.ToString(outputFormat)); return buffer.ToString(); } /// <summary> /// Returns the ICMPv6Packet inside of Packet p or null if /// there is no encapsulated ICMPv6Packet /// </summary> /// <param name="p"> /// A <see cref="Packet" /> /// </param> /// <returns> /// A <see cref="ICMPv6Packet" /> /// </returns> [Obsolete("Use Packet.Extract() instead")] public static ICMPv6Packet GetEncapsulated(Packet p) { log.Debug(""); if(p is InternetLinkLayerPacket) { var payload = InternetLinkLayerPacket.GetInnerPayload((InternetLinkLayerPacket) p); if(payload is IpPacket) { var payload2 = payload.PayloadPacket; if(payload2 is ICMPv6Packet) { return (ICMPv6Packet) payload2; } } } return null; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections.Generic; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Diagnostics.Contracts; using Microsoft.Internal; namespace System.ComponentModel.Composition.Hosting { /// <summary> /// AtomicComposition provides lightweight atomicCompositional semantics to enable temporary /// state to be managed for a series of nested atomicCompositions. Each atomicComposition maintains /// queryable state along with a sequence of actions necessary to complete the state when /// the atomicComposition is no longer in danger of being rolled back. State is completed or /// rolled back when the atomicComposition is disposed, depending on the state of the /// CompleteOnDipose property which defaults to false. The using(...) pattern in C# is a /// convenient mechanism for defining atomicComposition scopes. /// /// The least obvious aspects of AtomicComposition deal with nesting. /// /// Firstly, no complete actions are actually performed until the outermost atomicComposition is /// completed. Completeting or rolling back nested atomicCompositions serves only to change which /// actions would be completed the outer atomicComposition. /// /// Secondly, state is added in the form of queries associated with an object key. The /// key represents a unique object the state is being held on behalf of. The quieries are /// accessed throught the Query methods which provide automatic chaining to execute queries /// across the target atomicComposition and its inner atomicComposition as appropriate. /// /// Lastly, when a nested atomicComposition is created for a given outer the outer atomicComposition is locked. /// It remains locked until the inner atomicComposition is disposed or completeed preventing the addition of /// state, actions or other inner atomicCompositions. /// </summary> public class AtomicComposition : IDisposable { private readonly AtomicComposition _outerAtomicComposition; private KeyValuePair<object, object>[] _values; private int _valueCount = 0; private List<Action> _completeActionList; private List<Action> _revertActionList; private bool _isDisposed = false; private bool _isCompleted = false; private bool _containsInnerAtomicComposition = false; public AtomicComposition() : this(null) { } public AtomicComposition(AtomicComposition outerAtomicComposition) { // Lock the inner atomicComposition so that we can assume nothing changes except on // the innermost scope, and thereby optimize the query path if (outerAtomicComposition != null) { _outerAtomicComposition = outerAtomicComposition; _outerAtomicComposition.ContainsInnerAtomicComposition = true; } } public void SetValue(object key, object value) { ThrowIfDisposed(); ThrowIfCompleted(); ThrowIfContainsInnerAtomicComposition(); Requires.NotNull(key, nameof(key)); SetValueInternal(key, value); } public bool TryGetValue<T>(object key, out T value) { return TryGetValue(key, false, out value); } [SuppressMessage("Microsoft.Design", "CA1021:AvoidOutParameters")] public bool TryGetValue<T>(object key, bool localAtomicCompositionOnly, out T value) { ThrowIfDisposed(); ThrowIfCompleted(); Requires.NotNull(key, nameof(key)); return TryGetValueInternal(key, localAtomicCompositionOnly, out value); } public void AddCompleteAction(Action completeAction) { ThrowIfDisposed(); ThrowIfCompleted(); ThrowIfContainsInnerAtomicComposition(); Requires.NotNull(completeAction, nameof(completeAction)); if (_completeActionList == null) { _completeActionList = new List<Action>(); } _completeActionList.Add(completeAction); } public void AddRevertAction(Action revertAction) { ThrowIfDisposed(); ThrowIfCompleted(); ThrowIfContainsInnerAtomicComposition(); Requires.NotNull(revertAction, nameof(revertAction)); if (_revertActionList == null) { _revertActionList = new List<Action>(); } _revertActionList.Add(revertAction); } public void Complete() { ThrowIfDisposed(); ThrowIfCompleted(); if (_outerAtomicComposition == null) { // Execute all the complete actions FinalComplete(); } else { // Copy the actions and state to the outer atomicComposition CopyComplete(); } _isCompleted = true; } public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } protected virtual void Dispose(bool disposing) { ThrowIfDisposed(); _isDisposed = true; if (_outerAtomicComposition != null) { _outerAtomicComposition.ContainsInnerAtomicComposition = false; } // Revert is always immediate and involves forgetting information and // exceuting any appropriate revert actions if (!_isCompleted) { if (_revertActionList != null) { List<Exception> exceptions = null; // Execute the revert actions in reverse order to ensure // everything incrementally rollsback its state. for (int i = _revertActionList.Count - 1; i >= 0; i--) { Action action = _revertActionList[i]; try { action(); } catch(CompositionException) { // This can only happen after preview is completed, so ... abandon remainder of events is correct throw; } catch(Exception e) { if (exceptions == null) { //If any exceptions leak through the actions we will swallow them for now // complete processing the list // and we will throw InvalidOperationException with an AggregateException as it's innerException exceptions = new List<Exception>(); } exceptions.Add(e); } } _revertActionList = null; if(exceptions != null) { throw new InvalidOperationException(SR.InvalidOperation_RevertAndCompleteActionsMustNotThrow, new AggregateException(exceptions)); } } } } private void FinalComplete() { // Completeting the outer most scope is easy, just execute all the actions if (_completeActionList != null) { List<Exception> exceptions = null; foreach (Action action in _completeActionList) { try { action(); } catch(CompositionException) { // This can only happen after preview is completed, so ... abandon remainder of events is correct throw; } catch(Exception e) { if (exceptions == null) { //If any exceptions leak through the actions we will swallow them for now complete processing the list // and we will throw InvalidOperationException with an AggregateException as it's innerException exceptions = new List<Exception>(); } exceptions.Add(e); } } _completeActionList = null; if(exceptions != null) { throw new InvalidOperationException(SR.InvalidOperation_RevertAndCompleteActionsMustNotThrow, new AggregateException(exceptions)); } } } private void CopyComplete() { Assumes.NotNull(_outerAtomicComposition); _outerAtomicComposition.ContainsInnerAtomicComposition = false; // Inner scopes are much odder, because completeting them means coalescing them into the // outer scope - the complete or revert actions are deferred until the outermost scope completes // or any intermediate rolls back if (_completeActionList != null) { foreach (Action action in _completeActionList) { _outerAtomicComposition.AddCompleteAction(action); } } if (_revertActionList != null) { foreach (Action action in _revertActionList) { _outerAtomicComposition.AddRevertAction(action); } } // We can copy over existing atomicComposition entries because they're either already chained or // overwrite by design and can now be completed or rolled back together for (var index = 0; index < _valueCount; index++) { _outerAtomicComposition.SetValueInternal( _values[index].Key, _values[index].Value); } } private bool ContainsInnerAtomicComposition { set { if (value == true && _containsInnerAtomicComposition == true) { throw new InvalidOperationException(SR.AtomicComposition_AlreadyNested); } _containsInnerAtomicComposition = value; } } private bool TryGetValueInternal<T>(object key, bool localAtomicCompositionOnly, out T value) { for (var index = 0; index < _valueCount; index++) { if (_values[index].Key == key) { value = (T)_values[index].Value; return true; } } // If there's no atomicComposition available then recurse until we hit the outermost // scope, where upon we go ahead and return null if (!localAtomicCompositionOnly && _outerAtomicComposition != null) { return _outerAtomicComposition.TryGetValueInternal<T>(key, localAtomicCompositionOnly, out value); } value = default(T); return false; } private void SetValueInternal(object key, object value) { // Handle overwrites quickly for (var index = 0; index < _valueCount; index++) { if (_values[index].Key == key) { _values[index] = new KeyValuePair<object,object>(key, value); return; } } // Expand storage when needed if (_values == null || _valueCount == _values.Length) { var newQueries = new KeyValuePair<object, object>[_valueCount == 0 ? 5 : _valueCount * 2]; if (_values != null) { Array.Copy(_values, newQueries, _valueCount); } _values = newQueries; } // Store a new entry _values[_valueCount] = new KeyValuePair<object, object>(key, value); _valueCount++; return; } [DebuggerStepThrough] private void ThrowIfContainsInnerAtomicComposition() { if (_containsInnerAtomicComposition) { throw new InvalidOperationException(SR.AtomicComposition_PartOfAnotherAtomicComposition); } } [DebuggerStepThrough] private void ThrowIfCompleted() { if (_isCompleted) { throw new InvalidOperationException(SR.AtomicComposition_AlreadyCompleted); } } [DebuggerStepThrough] private void ThrowIfDisposed() { if (_isDisposed) { throw ExceptionBuilder.CreateObjectDisposed(this); } } } }
#region License /* ********************************************************************************** * Copyright (c) Roman Ivantsov * This source code is subject to terms and conditions of the MIT License * for Irony. A copy of the license can be found in the License.txt file * at the root of this distribution. * By using this source code in any fashion, you are agreeing to be bound by the terms of the * MIT License. * You must not remove this notice from this software. * **********************************************************************************/ #endregion //Authors: Roman Ivantsov - initial implementation and some later edits // Philipp Serr - implementation of advanced features for c#, python, VB using System; using System.Collections.Generic; using System.Globalization; using System.Text; using System.Diagnostics; namespace Irony.Parsing { using BigInteger = Microsoft.Scripting.Math.BigInteger; using Complex64 = Microsoft.Scripting.Math.Complex64; [Flags] public enum NumberOptions { None = 0, Default = None, AllowStartEndDot = 0x01, //python : http://docs.python.org/ref/floating.html IntOnly = 0x02, NoDotAfterInt = 0x04, //for use with IntOnly flag; essentially tells terminal to avoid matching integer if // it is followed by dot (or exp symbol) - leave to another terminal that will handle float numbers AllowSign = 0x08, DisableQuickParse = 0x10, AllowLetterAfter = 0x20, // allow number be followed by a letter or underscore; by default this flag is not set, so "3a" would not be // recognized as number followed by an identifier AllowUnderscore = 0x40, // Ruby allows underscore inside number: 1_234 //The following should be used with base-identifying prefixes Binary = 0x0100, //e.g. GNU GCC C Extension supports binary number literals Octal = 0x0200, Hex = 0x0400, } public class NumberLiteral : CompoundTerminalBase { //Flags for internal use public enum NumberFlagsInternal : short { HasDot = 0x1000, HasExp = 0x2000, } //nested helper class public class ExponentsTable : Dictionary<char, TypeCode> { } #region Public Consts //currently using TypeCodes for identifying numeric types public const TypeCode TypeCodeBigInt = (TypeCode)30; public const TypeCode TypeCodeImaginary = (TypeCode)31; #endregion #region constructors and initialization public NumberLiteral(string name) : this(name, NumberOptions.Default) { } public NumberLiteral(string name, NumberOptions flags, Type astNodeType) : this(name, flags) { base.AstNodeType = astNodeType; } public NumberLiteral(string name, NumberOptions flags, AstNodeCreator astNodeCreator) : this(name, flags) { base.AstNodeCreator = astNodeCreator; } public NumberLiteral(string name, NumberOptions flags) : base(name) { Options = flags; base.SetFlag(TermFlags.IsLiteral); } public void AddPrefix(string prefix, NumberOptions flags) { PrefixFlags.Add(prefix, (short) flags); Prefixes.Add(prefix); } public void AddExponentSymbols(string symbols, TypeCode floatType) { foreach(var exp in symbols) _exponentsTable[exp] = floatType; } #endregion #region Public fields/properties: ExponentSymbols, Suffixes public NumberOptions Options; public char DecimalSeparator = '.'; //Default types are assigned to literals without suffixes; first matching type used public TypeCode[] DefaultIntTypes = new TypeCode[] { TypeCode.Int32 }; public TypeCode DefaultFloatType = TypeCode.Double; private ExponentsTable _exponentsTable = new ExponentsTable(); private string _allExponentSymbols; public bool IsSet(NumberOptions option) { return (Options & option) != 0; } #endregion #region Private fields: _quickParseTerminators #endregion #region overrides public override void Init(GrammarData grammarData) { base.Init(grammarData); //Default Exponent symbols if table is empty if(_exponentsTable.Count == 0 && !IsSet(NumberOptions.IntOnly)) { _exponentsTable['e'] = DefaultFloatType; _exponentsTable['E'] = DefaultFloatType; } // collect all exponent symbols _allExponentSymbols = string.Empty; foreach(var exp in _exponentsTable.Keys) _allExponentSymbols += exp; if (this.EditorInfo == null) this.EditorInfo = new TokenEditorInfo(TokenType.Literal, TokenColor.Number, TokenTriggers.None); } public override IList<string> GetFirsts() { StringList result = new StringList(); result.AddRange(base.Prefixes); //we assume that prefix is always optional, so number can always start with plain digit result.AddRange(new string[] { "0", "1", "2", "3", "4", "5", "6", "7", "8", "9" }); // Python float numbers can start with a dot if (IsSet(NumberOptions.AllowStartEndDot)) result.Add(DecimalSeparator.ToString()); if (IsSet(NumberOptions.AllowSign)) result.AddRange(new string[] {"-", "+"} ); return result; } //Most numbers in source programs are just one-digit instances of 0, 1, 2, and maybe others until 9 // so we try to do a quick parse for these, without starting the whole general process protected override Token QuickParse(ParsingContext context, ISourceStream source) { if (IsSet(NumberOptions.DisableQuickParse)) return null; char current = source.PreviewChar; //it must be a digit followed by a terminator if (!char.IsDigit(current) || GrammarData.WhitespaceAndDelimiters.IndexOf(source.NextPreviewChar) < 0) return null; int iValue = current - '0'; object value = null; switch (DefaultIntTypes[0]) { case TypeCode.Int32: value = iValue; break; case TypeCode.UInt32: value = (UInt32)iValue; break; case TypeCode.Byte: value = (byte)iValue; break; case TypeCode.SByte: value = (sbyte) iValue; break; case TypeCode.Int16: value = (Int16)iValue; break; case TypeCode.UInt16: value = (UInt16)iValue; break; default: return null; } source.PreviewPosition++; return source.CreateToken(this.OutputTerminal, value); } protected override void InitDetails(ParsingContext context, CompoundTokenDetails details) { base.InitDetails(context, details); details.Flags = (short) this.Options; } protected override void ReadPrefix(ISourceStream source, CompoundTokenDetails details) { //check that is not a 0 followed by dot; //this may happen in Python for number "0.123" - we can mistakenly take "0" as octal prefix if (source.PreviewChar == '0' && source.NextPreviewChar == '.') return; base.ReadPrefix(source, details); }//method protected override bool ReadBody(ISourceStream source, CompoundTokenDetails details) { //remember start - it may be different from source.TokenStart, we may have skipped prefix int start = source.PreviewPosition; char current = source.PreviewChar; if (IsSet(NumberOptions.AllowSign) && (current == '-' || current == '+')) { details.Sign = current.ToString(); source.PreviewPosition++; } //Figure out digits set string digits = GetDigits(details); bool isDecimal = !details.IsSet((short) (NumberOptions.Binary | NumberOptions.Octal | NumberOptions.Hex)); bool allowFloat = !IsSet(NumberOptions.IntOnly); bool foundDigits = false; while (!source.EOF()) { current = source.PreviewChar; //1. If it is a digit, just continue going; the same for '_' if it is allowed if (digits.IndexOf(current) >= 0 || IsSet(NumberOptions.AllowUnderscore) && current == '_') { source.PreviewPosition++; foundDigits = true; continue; } //2. Check if it is a dot in float number bool isDot = current == DecimalSeparator; if (allowFloat && isDot) { //If we had seen already a dot or exponent, don't accept this one; bool hasDotOrExp = details.IsSet((short) (NumberFlagsInternal.HasDot | NumberFlagsInternal.HasExp)); if (hasDotOrExp) break; //from while loop //In python number literals (NumberAllowPointFloat) a point can be the first and last character, //We accept dot only if it is followed by a digit if (digits.IndexOf(source.NextPreviewChar) < 0 && !IsSet(NumberOptions.AllowStartEndDot)) break; //from while loop details.Flags |= (int) NumberFlagsInternal.HasDot; source.PreviewPosition++; continue; } //3. Check if it is int number followed by dot or exp symbol bool isExpSymbol = (details.ExponentSymbol == null) && _allExponentSymbols.IndexOf(current) >= 0; if (!allowFloat && foundDigits && (isDot || isExpSymbol)) { //If no partial float allowed then return false - it is not integer, let float terminal recognize it as float if (IsSet(NumberOptions.NoDotAfterInt)) return false; //otherwise break, it is integer and we're done reading digits break; } //4. Only for decimals - check if it is (the first) exponent symbol if (allowFloat && isDecimal && isExpSymbol) { char next = source.NextPreviewChar; bool nextIsSign = next == '-' || next == '+'; bool nextIsDigit = digits.IndexOf(next) >= 0; if (!nextIsSign && !nextIsDigit) break; //Exponent should be followed by either sign or digit //ok, we've got real exponent details.ExponentSymbol = current.ToString(); //remember the exp char details.Flags |= (int) NumberFlagsInternal.HasExp; source.PreviewPosition++; if (nextIsSign) source.PreviewPosition++; //skip +/- explicitly so we don't have to deal with them on the next iteration continue; } //4. It is something else (not digit, not dot or exponent) - we're done break; //from while loop }//while int end = source.PreviewPosition; if (!foundDigits) return false; details.Body = source.Text.Substring(start, end - start); return true; } protected internal override void InvokeValidateToken(ParsingContext context) { if (!IsSet(NumberOptions.AllowLetterAfter)) { var current = context.Source.PreviewChar; if(char.IsLetter(current) || current == '_') { context.CurrentToken = context.Source.CreateErrorToken(Resources.ErrNoLetterAfterNum); // "Number cannot be followed by a letter." } } base.InvokeValidateToken(context); } protected override bool ConvertValue(CompoundTokenDetails details) { if (String.IsNullOrEmpty(details.Body)) { details.Error = Resources.ErrInvNumber; // "Invalid number."; return false; } AssignTypeCodes(details); //check for underscore if (IsSet(NumberOptions.AllowUnderscore) && details.Body.Contains("_")) details.Body = details.Body.Replace("_", string.Empty); //Try quick paths switch (details.TypeCodes[0]) { case TypeCode.Int32: if (QuickConvertToInt32(details)) return true; break; case TypeCode.Double: if (QuickConvertToDouble(details)) return true; break; } //Go full cycle details.Value = null; foreach (TypeCode typeCode in details.TypeCodes) { switch (typeCode) { case TypeCode.Single: case TypeCode.Double: case TypeCode.Decimal: case TypeCodeImaginary: return ConvertToFloat(typeCode, details); case TypeCode.SByte: case TypeCode.Byte: case TypeCode.Int16: case TypeCode.UInt16: case TypeCode.Int32: case TypeCode.UInt32: case TypeCode.Int64: case TypeCode.UInt64: if (details.Value == null) //if it is not done yet TryConvertToUlong(details); //try to convert to ULong and place the result into details.Value field; if(TryCastToIntegerType(typeCode, details)) //now try to cast the ULong value to the target type return true; break; case TypeCodeBigInt: if (ConvertToBigInteger(details)) return true; break; }//switch } return false; }//method private void AssignTypeCodes(CompoundTokenDetails details) { //Type could be assigned when we read suffix; if so, just exit if (details.TypeCodes != null) return; //Decide on float types var hasDot = details.IsSet((short)(NumberFlagsInternal.HasDot)); var hasExp = details.IsSet((short)(NumberFlagsInternal.HasExp)); var isFloat = (hasDot || hasExp); if (!isFloat) { details.TypeCodes = DefaultIntTypes; return; } //so we have a float. If we have exponent symbol then use it to select type if (hasExp) { TypeCode code; if (_exponentsTable.TryGetValue(details.ExponentSymbol[0], out code)) { details.TypeCodes = new TypeCode[] {code}; return; } }//if hasExp //Finally assign default float type details.TypeCodes = new TypeCode[] {DefaultFloatType}; } #endregion #region private utilities private bool QuickConvertToInt32(CompoundTokenDetails details) { int radix = GetRadix(details); if (radix == 10 && details.Body.Length > 10) return false; //10 digits is maximum for int32; int32.MaxValue = 2 147 483 647 try { //workaround for .Net FX bug: http://connect.microsoft.com/VisualStudio/feedback/ViewFeedback.aspx?FeedbackID=278448 int iValue = 0; if (radix == 10) iValue = Convert.ToInt32(details.Body, CultureInfo.InvariantCulture); else iValue = Convert.ToInt32(details.Body, radix); details.Value = iValue; return true; } catch { return false; } }//method private bool QuickConvertToDouble(CompoundTokenDetails details) { if (details.IsSet((short)(NumberOptions.Binary | NumberOptions.Octal | NumberOptions.Hex))) return false; if (details.IsSet((short)(NumberFlagsInternal.HasExp))) return false; if (DecimalSeparator != '.') return false; double dvalue; if (!double.TryParse(details.Body, NumberStyles.AllowDecimalPoint, CultureInfo.InvariantCulture, out dvalue)) return false; details.Value = dvalue; return true; } private bool ConvertToFloat(TypeCode typeCode, CompoundTokenDetails details) { //only decimal numbers can be fractions if (details.IsSet((short)(NumberOptions.Binary | NumberOptions.Octal | NumberOptions.Hex))) { details.Error = Resources.ErrInvNumber; // "Invalid number."; return false; } string body = details.Body; //Some languages allow exp symbols other than E. Check if it is the case, and change it to E // - otherwise .NET conversion methods may fail if (details.IsSet((short)NumberFlagsInternal.HasExp) && details.ExponentSymbol.ToUpper() != "E") body = body.Replace(details.ExponentSymbol, "E"); //'.' decimal seperator required by invariant culture if (details.IsSet((short)NumberFlagsInternal.HasDot) && DecimalSeparator != '.') body = body.Replace(DecimalSeparator, '.'); switch (typeCode) { case TypeCode.Double: case TypeCodeImaginary: double dValue; if (!Double.TryParse(body, NumberStyles.Float, CultureInfo.InvariantCulture, out dValue)) return false; if (typeCode == TypeCodeImaginary) details.Value = new Complex64(0, dValue); else details.Value = dValue; return true; case TypeCode.Single: float fValue; if (!Single.TryParse(body, NumberStyles.Float, CultureInfo.InvariantCulture, out fValue)) return false; details.Value = fValue; return true; case TypeCode.Decimal: decimal decValue; if (!Decimal.TryParse(body, NumberStyles.Float, CultureInfo.InvariantCulture, out decValue)) return false; details.Value = decValue; return true; }//switch return false; } private bool TryCastToIntegerType(TypeCode typeCode, CompoundTokenDetails details) { if (details.Value == null) return false; try { if (typeCode != TypeCode.UInt64) details.Value = Convert.ChangeType(details.Value, typeCode, CultureInfo.InvariantCulture); return true; } catch (Exception) { details.Error = string.Format(Resources.ErrCannotConvertValueToType, details.Value, typeCode.ToString()); return false; } }//method private bool TryConvertToUlong(CompoundTokenDetails details) { try { int radix = GetRadix(details); //workaround for .Net FX bug: http://connect.microsoft.com/VisualStudio/feedback/ViewFeedback.aspx?FeedbackID=278448 if (radix == 10) details.Value = Convert.ToUInt64(details.Body, CultureInfo.InvariantCulture); else details.Value = Convert.ToUInt64(details.Body, radix); return true; } catch(OverflowException) { details.Error = string.Format(Resources.ErrCannotConvertValueToType, details.Value, TypeCode.UInt64.ToString()); return false; } } private bool ConvertToBigInteger(CompoundTokenDetails details) { //ignore leading zeros and sign details.Body = details.Body.TrimStart('+').TrimStart('-').TrimStart('0'); if (string.IsNullOrEmpty(details.Body)) details.Body = "0"; int bodyLength = details.Body.Length; int radix = GetRadix(details); int wordLength = GetSafeWordLength(details); int sectionCount = GetSectionCount(bodyLength, wordLength); ulong[] numberSections = new ulong[sectionCount]; //big endian try { int startIndex = details.Body.Length - wordLength; for (int sectionIndex = sectionCount - 1; sectionIndex >= 0; sectionIndex--) { if (startIndex < 0) { wordLength += startIndex; startIndex = 0; } //workaround for .Net FX bug: http://connect.microsoft.com/VisualStudio/feedback/ViewFeedback.aspx?FeedbackID=278448 if (radix == 10) numberSections[sectionIndex] = Convert.ToUInt64(details.Body.Substring(startIndex, wordLength)); else numberSections[sectionIndex] = Convert.ToUInt64(details.Body.Substring(startIndex, wordLength), radix); startIndex -= wordLength; } } catch { details.Error = Resources.ErrInvNumber;// "Invalid number."; return false; } //produce big integer ulong safeWordRadix = GetSafeWordRadix(details); BigInteger bigIntegerValue = numberSections[0]; for (int i = 1; i < sectionCount; i++) bigIntegerValue = checked(bigIntegerValue * safeWordRadix + numberSections[i]); if (details.Sign == "-") bigIntegerValue = -bigIntegerValue; details.Value = bigIntegerValue; return true; } private int GetRadix(CompoundTokenDetails details) { if (details.IsSet((short)NumberOptions.Hex)) return 16; if (details.IsSet((short)NumberOptions.Octal)) return 8; if (details.IsSet((short)NumberOptions.Binary)) return 2; return 10; } private string GetDigits(CompoundTokenDetails details) { if (details.IsSet((short)NumberOptions.Hex)) return Strings.HexDigits; if (details.IsSet((short)NumberOptions.Octal)) return Strings.OctalDigits; if (details.IsSet((short)NumberOptions.Binary)) return Strings.BinaryDigits; return Strings.DecimalDigits; } private int GetSafeWordLength(CompoundTokenDetails details) { if (details.IsSet((short)NumberOptions.Hex)) return 15; if (details.IsSet((short)NumberOptions.Octal)) return 21; //maxWordLength 22 if (details.IsSet((short)NumberOptions.Binary)) return 63; return 19; //maxWordLength 20 } private int GetSectionCount(int stringLength, int safeWordLength) { int quotient = stringLength / safeWordLength; int remainder = stringLength - quotient * safeWordLength; return remainder == 0 ? quotient : quotient + 1; } //radix^safeWordLength private ulong GetSafeWordRadix(CompoundTokenDetails details) { if (details.IsSet((short)NumberOptions.Hex)) return 1152921504606846976; if (details.IsSet((short)NumberOptions.Octal)) return 9223372036854775808; if (details.IsSet((short) NumberOptions.Binary)) return 9223372036854775808; return 10000000000000000000; } private static bool IsIntegerCode(TypeCode code) { return (code >= TypeCode.SByte && code <= TypeCode.UInt64); } #endregion }//class }
//----------------------------------------------------------------------- // <copyright file="AssemblyResolverTests.cs" company="SonarSource SA and Microsoft Corporation"> // Copyright (c) SonarSource SA and Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. // </copyright> //----------------------------------------------------------------------- using System; using Microsoft.VisualStudio.TestTools.UnitTesting; using System.IO; using SonarQube.Plugins.Test.Common; using System.Reflection; using SonarQube.Plugins.Common; using Microsoft.CSharp; using System.CodeDom.Compiler; namespace SonarQube.Plugins.CommonTests { [TestClass] public class AssemblyResolverTests { public TestContext TestContext { get; set; } #region Tests [TestMethod] public void AssemblyResolver_Creation() { // 1. Null logger AssertException.Expect<ArgumentNullException>(() => new AssemblyResolver(null, new string[] { this.TestContext.TestDeploymentDir })); // 2. Null paths AssertException.Expect<ArgumentException>(() => new AssemblyResolver(new TestLogger(), null)); // 3. Empty paths AssertException.Expect<ArgumentException>(() => new AssemblyResolver(new TestLogger(), new string[] { })); } /// <summary> /// Tests the loading of an assembly with a single type and no dependencies. This should succeed even without the AssemblyResolver. /// </summary> [TestMethod] public void AssemblyResolver_NoImpactOnDefaultResolution() { // Arrange TestLogger logger = new TestLogger(); string testFolder = TestUtils.CreateTestDirectory(this.TestContext); CompileSimpleAssembly("SimpleAssembly.dll", testFolder, logger); object simpleObject = null; // Act using (AssemblyResolver resolver = new AssemblyResolver(logger, testFolder)) { // Look in every assembly under the supplied directory foreach (string assemblyPath in Directory.GetFiles(testFolder, "*.dll", SearchOption.AllDirectories)) { Assembly assembly = Assembly.LoadFile(assemblyPath); foreach (Type type in assembly.GetExportedTypes()) { if (!type.IsAbstract) { simpleObject = Activator.CreateInstance(type); } } } // Assert Assert.IsNotNull(simpleObject); Assert.AreEqual<string>("SimpleProgram", simpleObject.GetType().ToString()); AssertResolverCaller(resolver); } } /// <summary> /// Tests the case where assembly resolution should fail correctly. /// </summary> [TestMethod] public void AssemblyResolver_NonExistentAssembly_ResolutionFails() { // Arrange TestLogger logger = new TestLogger(); string testFolder = TestUtils.CreateTestDirectory(this.TestContext); // Act using (AssemblyResolver resolver = new AssemblyResolver(logger, testFolder)) { AssertAssemblyLoadFails("nonexistent library"); // Assert AssertResolverCaller(resolver); } } /// <summary> /// Tests the case where assembly resolution should succeed. /// </summary> [TestMethod] public void AssemblyResolver_ResolutionByFullAssemblyName_Succeeds() { // Arrange string testFolder = TestUtils.CreateTestDirectory(this.TestContext); Assembly testAssembly = CompileSimpleAssembly("SimpleAssemblyByFullName.dll", testFolder, new TestLogger()); // Act Assembly resolvedAssembly = AssertAssemblyLoadSucceedsOnlyWithResolver("SimpleAssemblyByFullName, Version = 1.0.0.0, Culture = neutral, PublicKeyToken = null", testFolder); // Assert AssertExpectedAssemblyLoaded(testAssembly, resolvedAssembly); } /// <summary> /// Tests the case where assembly resolution should succeed. /// </summary> [TestMethod] public void AssemblyResolver_ResolutionByFileName_Succeeds() { // Arrante string testFolder = TestUtils.CreateTestDirectory(this.TestContext); Assembly testAssembly = CompileSimpleAssembly("SimpleAssemblyByFileName.dll", testFolder, new TestLogger()); // Act Assembly resolvedAssembly = AssertAssemblyLoadSucceedsOnlyWithResolver("SimpleAssemblyByFileName.dll", testFolder); // Assert AssertExpectedAssemblyLoaded(testAssembly, resolvedAssembly); } /// <summary> /// Tests the case where assembly resolution should succeed. /// </summary> [TestMethod] public void AssemblyResolver_ResolutionByFullAssemblyNameWithSpace_Succeeds() { // Arrange string testFolder = TestUtils.CreateTestDirectory(this.TestContext); Assembly testAssembly = CompileSimpleAssembly("Space in Name ByFullName.dll", testFolder, new TestLogger()); // Act Assembly resolvedAssembly = AssertAssemblyLoadSucceedsOnlyWithResolver("Space in Name ByFullName, Version = 1.0.0.0, Culture = neutral, PublicKeyToken = null", testFolder); // Assert AssertExpectedAssemblyLoaded(testAssembly, resolvedAssembly); } /// <summary> /// Tests the case where assembly resolution should succeed. /// </summary> [TestMethod] public void AssemblyResolver_ResolutionByFileNameWithSpace_Succeeds() { // Arrante string testFolder = TestUtils.CreateTestDirectory(this.TestContext); Assembly testAssembly = CompileSimpleAssembly("Space in Name ByFileName.dll", testFolder, new TestLogger()); // Act Assembly resolvedAssembly = AssertAssemblyLoadSucceedsOnlyWithResolver("Space in Name ByFileName.dll", testFolder); // Assert AssertExpectedAssemblyLoaded(testAssembly, resolvedAssembly); } /// <summary> /// Tests the case where assembly resolution should succeed. /// </summary> [TestMethod] public void AssemblyResolver_VersionAssemblyRequested() { // Setup string testFolder = TestUtils.CreateTestDirectory(this.TestContext); Assembly testAssembly = CompileSimpleAssembly("VersionAsm1.dll", testFolder, new TestLogger(), "2.1.0.4"); // 1. Search for a version that can be found -> succeeds Assembly resolvedAssembly = AssertAssemblyLoadSucceedsOnlyWithResolver("VersionAsm1, Version = 2.1.0.4, Culture = neutral, PublicKeyToken = null", testFolder); AssertExpectedAssemblyLoaded(testAssembly, resolvedAssembly); // 2. Search for a version that can't be found -> fails using (AssemblyResolver resolver = new AssemblyResolver(new TestLogger(), testFolder)) { AssertAssemblyLoadFails("VersionAsm1, Version = 1.0.0.4, Culture = neutral, PublicKeyToken = null"); AssertResolverCaller(resolver); } } #endregion #region Private methods private Assembly CompileSimpleAssembly(string assemblyFileName, string asmFolder, ILogger logger, string version = "1.0.0.0") { Directory.CreateDirectory(asmFolder); string fullAssemblyFilePath = Path.Combine(asmFolder, assemblyFileName); return CompileAssembly(@"public class SimpleProgram { public static void Main(string[] args) { System.Console.WriteLine(""Hello World""); } }", fullAssemblyFilePath, version, logger); } /// <summary> /// Compiles the supplied code into a new assembly /// </summary> private static Assembly CompileAssembly(string code, string outputFilePath, string asmVersion, ILogger logger) { string versionedCode = string.Format(System.Globalization.CultureInfo.InvariantCulture, @"[assembly:System.Reflection.AssemblyVersionAttribute(""{0}"")] {1}", asmVersion, code); CompilerResults result = null; using (CSharpCodeProvider provider = new CSharpCodeProvider()) { CompilerParameters options = new CompilerParameters(); options.OutputAssembly = outputFilePath; options.GenerateExecutable = true; options.GenerateInMemory = false; result = provider.CompileAssemblyFromSource(options, versionedCode); if (result.Errors.Count > 0) { foreach (string item in result.Output) { logger.LogInfo(item); } Assert.Fail("Test setup error: failed to create dynamic assembly. See the test output for compiler output"); } } return result.CompiledAssembly; } #endregion #region Checks private static void AssertAssemblyLoadFails(string asmRef) { AssertException.Expect<FileNotFoundException>(() => Assembly.Load(asmRef)); } private Assembly AssertAssemblyLoadSucceedsOnlyWithResolver(string asmRef, string searchPath) { // Check the assembly load fails without the assembly resolver AssertAssemblyLoadFails(asmRef); // Act Assembly resolveResult; // Create a test logger that will only record output from the resolver // so we can check it has been called using (AssemblyResolver resolver = new AssemblyResolver(new TestLogger(), searchPath)) { resolveResult = Assembly.Load(asmRef); // Assert AssertResolverCaller(resolver); } // Assert Assert.IsNotNull(resolveResult, "Failed to the load the assembly"); return resolveResult; } private static void AssertResolverCaller(AssemblyResolver resolver) { Assert.IsTrue(resolver.ResolverCalled, "Expected the assembly resolver to have been called"); } private static void AssertExpectedAssemblyLoaded(Assembly expected, Assembly resolved) { Assert.IsNotNull(resolved, "Resolved assembly should not be null"); Assert.AreEqual(expected.Location, resolved.Location, "Failed to load the expected assembly"); } #endregion } }
// Copyright 2016 Serilog Contributors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Net; using System.Net.Http; using System.Threading.Tasks; using Serilog.Debugging; using Serilog.Events; using Serilog.Formatting; using Serilog.Sinks.PeriodicBatching; namespace Serilog.Sinks.Splunk { /// <summary> /// A sink to log to the Event Collector available in Splunk 6.3 /// </summary> public class EventCollectorSink : PeriodicBatchingSink { private const int DefaultQueueLimit = 100000; private readonly string _splunkHost; private readonly string _uriPath; private readonly ITextFormatter _jsonFormatter; private readonly EventCollectorClient _httpClient; /// <summary> /// Taken from Splunk.Logging.Common /// </summary> private static readonly HttpStatusCode[] HttpEventCollectorApplicationErrors = { HttpStatusCode.Forbidden, HttpStatusCode.MethodNotAllowed, HttpStatusCode.BadRequest }; /// <summary> /// Creates a new instance of the sink /// </summary> /// <param name="splunkHost">The host of the Splunk instance with the Event collector configured</param> /// <param name="eventCollectorToken">The token to use when authenticating with the event collector</param> /// <param name="batchSizeLimit">The size of the batch when sending to the event collector</param> /// <param name="formatProvider">The format provider used when rendering the message</param> /// <param name="renderTemplate">Whether to render the message template</param> /// <param name="batchIntervalInSeconds">The interval in seconds that batching should occur</param> /// <param name="queueLimit">Maximum number of events in the queue</param> public EventCollectorSink( string splunkHost, string eventCollectorToken, int batchIntervalInSeconds = 5, int batchSizeLimit = 100, int? queueLimit = null, IFormatProvider formatProvider = null, bool renderTemplate = true) : this( splunkHost, eventCollectorToken, null, null, null, null, null, batchIntervalInSeconds, batchSizeLimit, queueLimit, formatProvider, renderTemplate) { } /// <summary> /// Creates a new instance of the sink /// </summary> /// <param name="splunkHost">The host of the Splunk instance with the Event collector configured</param> /// <param name="eventCollectorToken">The token to use when authenticating with the event collector</param> /// <param name="uriPath">Change the default endpoint of the Event Collector e.g. services/collector/event</param> /// <param name="batchSizeLimit">The size of the batch when sending to the event collector</param> /// <param name="formatProvider">The format provider used when rendering the message</param> /// <param name="renderTemplate">Whether to render the message template</param> /// <param name="batchIntervalInSeconds">The interval in seconds that batching should occur</param> /// <param name="queueLimit">Maximum number of events in the queue</param> /// <param name="index">The Splunk index to log to</param> /// <param name="source">The source of the event</param> /// <param name="sourceType">The source type of the event</param> /// <param name="host">The host of the event</param> /// <param name="messageHandler">The handler used to send HTTP requests</param> public EventCollectorSink( string splunkHost, string eventCollectorToken, string uriPath, string source, string sourceType, string host, string index, int batchIntervalInSeconds, int batchSizeLimit, int? queueLimit, IFormatProvider formatProvider = null, bool renderTemplate = true, HttpMessageHandler messageHandler = null) : this( splunkHost, eventCollectorToken, uriPath, batchIntervalInSeconds, batchSizeLimit, queueLimit, new SplunkJsonFormatter(renderTemplate, formatProvider, source, sourceType, host, index), messageHandler) { } /// <summary> /// Creates a new instance of the sink with Customfields /// </summary> /// <param name="splunkHost">The host of the Splunk instance with the Event collector configured</param> /// <param name="eventCollectorToken">The token to use when authenticating with the event collector</param> /// <param name="uriPath">Change the default endpoint of the Event Collector e.g. services/collector/event</param> /// <param name="batchSizeLimit">The size of the batch when sending to the event collector</param> /// <param name="queueLimit">Maximum number of events in the queue</param> /// <param name="formatProvider">The format provider used when rendering the message</param> /// <param name="renderTemplate">Whether to render the message template</param> /// <param name="batchIntervalInSeconds">The interval in seconds that batching should occur</param> /// <param name="index">The Splunk index to log to</param> /// <param name="fields">Add extra CustomExtraFields for Splunk to index</param> /// <param name="source">The source of the event</param> /// <param name="sourceType">The source type of the event</param> /// <param name="host">The host of the event</param> /// <param name="messageHandler">The handler used to send HTTP requests</param> public EventCollectorSink( string splunkHost, string eventCollectorToken, string uriPath, string source, string sourceType, string host, string index, CustomFields fields, int batchIntervalInSeconds, int batchSizeLimit, int? queueLimit, IFormatProvider formatProvider = null, bool renderTemplate = true, HttpMessageHandler messageHandler = null) // TODO here is the jsonformatter creation. We must make way to test output of jsonformatter. : this( splunkHost, eventCollectorToken, uriPath, batchIntervalInSeconds, batchSizeLimit, queueLimit, new SplunkJsonFormatter(renderTemplate, formatProvider, source, sourceType, host, index, fields), messageHandler) { } /// <summary> /// Creates a new instance of the sink /// </summary> /// <param name="splunkHost">The host of the Splunk instance with the Event collector configured</param> /// <param name="eventCollectorToken">The token to use when authenticating with the event collector</param> /// <param name="uriPath">Change the default endpoint of the Event Collector e.g. services/collector/event</param> /// <param name="batchSizeLimit">The size of the batch when sending to the event collector</param> /// <param name="batchIntervalInSeconds">The interval in seconds that batching should occur</param> /// <param name="queueLimit">Maximum number of events in the queue</param> /// <param name="jsonFormatter">The text formatter used to render log events into a JSON format for consumption by Splunk</param> /// <param name="messageHandler">The handler used to send HTTP requests</param> public EventCollectorSink( string splunkHost, string eventCollectorToken, string uriPath, int batchIntervalInSeconds, int batchSizeLimit, int? queueLimit, ITextFormatter jsonFormatter, HttpMessageHandler messageHandler = null) : base(batchSizeLimit, TimeSpan.FromSeconds(batchIntervalInSeconds), queueLimit ?? DefaultQueueLimit) { _uriPath = uriPath; _splunkHost = splunkHost; _jsonFormatter = jsonFormatter; _httpClient = messageHandler != null ? new EventCollectorClient(eventCollectorToken, messageHandler) : new EventCollectorClient(eventCollectorToken); } /// <summary> /// Emit a batch of log events, running asynchronously. /// </summary> /// <param name="events">The events to emit.</param> /// <remarks> /// Override either <see cref="PeriodicBatchingSink.EmitBatch" /> or <see cref="PeriodicBatchingSink.EmitBatchAsync" />, not both. /// </remarks> protected override async Task EmitBatchAsync(IEnumerable<LogEvent> events) { var allEvents = new StringWriter(); foreach (var logEvent in events) { _jsonFormatter.Format(logEvent, allEvents); } var request = new EventCollectorRequest(_splunkHost, allEvents.ToString(), _uriPath); var response = await _httpClient.SendAsync(request).ConfigureAwait(false); if (!response.IsSuccessStatusCode) { //Application Errors sent via HTTP Event Collector if (HttpEventCollectorApplicationErrors.Any(x => x == response.StatusCode)) { // By not throwing an exception here the PeriodicBatchingSink will assume the batch succeeded and not send it again. SelfLog.WriteLine( "A status code of {0} was received when attempting to send to {1}. The event has been discarded and will not be placed back in the queue.", response.StatusCode.ToString(), _splunkHost); } else { // EnsureSuccessStatusCode will throw an exception and the PeriodicBatchingSink will catch/log the exception and retry the batch. response.EnsureSuccessStatusCode(); } } } } }
using System; using System.IO; using System.Linq; using System.Text; using System.Web; namespace AjaxControlToolkit { public static class AjaxFileUploadHelper { const int ChunkSize = 1024 * 1024 * 4; const int ChunkSizeForPolling = 64 * 1024; static readonly string[] DefaultAllowedExtensions = { "7z", "aac", "avi", "bz2", "csv", "doc", "docx", "gif", "gz", "htm", "html", "jpeg", "jpg", "md", "mp3", "mp4", "ods", "odt", "ogg", "pdf", "png", "ppt", "pptx", "svg", "tar", "tgz", "txt", "xls", "xlsx", "xml", "zip" }; public static void Abort(HttpContext context, string fileId) { (new AjaxFileUploadStates(context, fileId)).Abort = true; } public static bool Process(HttpContext context) { var request = context.Request; var fileId = request.QueryString["fileId"]; var fileName = request.QueryString["fileName"]; var extension = Path.HasExtension(fileName) ? Path.GetExtension(fileName).Substring(1) : String.Empty; var allowedExtensions = DefaultAllowedExtensions.Union(ToolkitConfig.AdditionalUploadFileExtensions.Split(',')); if(!allowedExtensions.Any(ext => String.Compare(ext, extension, StringComparison.InvariantCultureIgnoreCase) == 0)) throw new Exception("File extension is not allowed."); var chunked = bool.Parse(request.QueryString["chunked"] ?? "false"); var firstChunk = bool.Parse(request.QueryString["firstChunk"] ?? "false"); var usePoll = bool.Parse(request.QueryString["usePoll"] ?? "false"); using(var stream = GetReadEntityBodyMode(request) != 1 ? request.GetBufferlessInputStream() : request.InputStream) { var success = false; success = ProcessStream( context, stream, fileId, fileName, chunked, firstChunk, usePoll); if(!success) request.Form.Clear(); return success; } } static int GetReadEntityBodyMode(HttpRequest request) { try { return Convert.ToInt32((request as dynamic).ReadEntityBodyMode); } catch { return 0; } } public static bool ProcessStream(HttpContext context, Stream source, string fileId, string fileName, bool chunked, bool isFirstChunk, bool usePoll) { FileHeaderInfo headerInfo = null; Stream destination = null; var states = new AjaxFileUploadStates(context, fileId); using(var tmpStream = new MemoryStream()) { var totalBytesRead = 0; var done = false; var fileLength = 0; while(true) { if(states.Abort) return false; // read per chunk var chunkSize = usePoll ? ChunkSizeForPolling : ChunkSize; if(chunkSize > source.Length) { chunkSize = (int)source.Length; if(usePoll) states.FileLength = chunkSize; } var chunk = new byte[chunkSize]; var index = 0; while(index < chunk.Length) { var bytesRead = source.Read(chunk, index, chunk.Length - index); if(bytesRead == 0) break; if(usePoll) states.Uploaded += bytesRead; index += bytesRead; } totalBytesRead += index; // Byte is not empty nor reach end of file if(index != 0) { // Keep seeking header info until it's found if(headerInfo == null) { // Append every first byte into temporary memory stream tmpStream.Write(chunk, 0, index); // Load it all into byte array, and try parse it so we can get header info var firstBytes = tmpStream.ToArray(); // Find header info from first bytes. headerInfo = MultipartFormDataParser.ParseHeaderInfo(firstBytes, Encoding.UTF8); // If it's found, then start writing to file. if(headerInfo != null) { // Calculate total file length. fileLength = ((int)(source.Length - headerInfo.BoundaryDelimiterLength) - headerInfo.StartIndex); if(usePoll) states.FileLength = fileLength; // Only write file data, so not all bytes are written. var lengthToWrite = totalBytesRead - headerInfo.StartIndex; if(lengthToWrite > fileLength) { lengthToWrite = fileLength; done = true; } // Get first chunk of file data. var firstChunk = new byte[lengthToWrite]; Buffer.BlockCopy(firstBytes, headerInfo.StartIndex, firstChunk, 0, lengthToWrite); var tmpFilePath = GetTempFilePath(fileId, fileName); AjaxFileUpload.CheckTempFilePath(tmpFilePath); CreateTempFilePathFolder(tmpFilePath); if(!chunked || isFirstChunk) // Create new file, if this is a first chunk or file is not chunked. destination = new FileStream(tmpFilePath, FileMode.Create, FileAccess.ReadWrite, FileShare.ReadWrite); else // Append data to existing teporary file for next chunks destination = new FileStream(tmpFilePath, FileMode.Append, FileAccess.Write, FileShare.ReadWrite); // Writing it now. destination.Write(firstChunk, 0, lengthToWrite); } } else { var length = index; // Reach in the end of stream, remove last boundary if(destination.Length + index > fileLength) { length -= headerInfo.BoundaryDelimiterLength; done = true; } destination.Write(chunk, 0, length); } } else { break; } // There is no byte to read anymore, upload is finished. if(done || index != chunk.Length) { if(destination != null) { destination.Close(); destination.Dispose(); } break; } } } return true; } static void CreateTempFilePathFolder(string tmpFilePath) { var tempFolder = Path.GetDirectoryName(tmpFilePath); if(!Directory.Exists(tempFolder)) Directory.CreateDirectory(tempFolder); } private static string GetTempFilePath(string fileId, string fileName) { var tempFolder = AjaxFileUpload.GetTempFolder(fileId); var invalidChars = Path.GetInvalidFileNameChars() .Concat(new[] { '<', '>', '&', '"' }) .Distinct() .ToArray(); foreach (var invalidChar in invalidChars) { fileName = fileName.Replace(invalidChar, '-'); } return Path.Combine(tempFolder, fileName) + Constants.UploadTempFileExtension; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Linq; using System.Reflection; using Xunit; namespace System.Security.Cryptography.Encryption.Tests.Symmetric { public static class TrivialTests { [Theory] [InlineData(-1)] [InlineData(0)] [InlineData(7)] [InlineData(48)] public static void TestBadFeedbackSize(int size) { using (var t = new Trivial { BlockSize = 40 }) { Assert.Throws<CryptographicException>(() => t.FeedbackSize = size); } } [Fact] public static void TestFeedbackSizeDoesntErrorWhenBlockSizeChanged() { using (var t = new Trivial { BlockSize = 104 }) { t.FeedbackSize = 96; t.BlockSize = 40; Assert.Equal(96, t.FeedbackSize); } } [Fact] public static void TestFeedbackSizeDoesntValidateFieldValue() { using (var t = new Trivial()) { t.SetFeedbackSize(9); Assert.Equal(9, t.FeedbackSize); } } [Fact] public static void TestClearIsDispose() { // The obvious test would have been to .Clear() and then check that // next time we get a different autogenerated Key and IV -- but // occasionally by chance they would be the same. using (Trivial t = new Trivial()) { var k = t.Key; // Initialize Assert.False(t.IsDisposed); t.Clear(); Assert.True(t.IsDisposed); } } [Fact] public static void TestAutomaticKey() { using (Trivial t = new Trivial()) { byte[] generatedKey = t.Key; Assert.Equal(generatedKey, Trivial.GeneratedKey); Assert.NotSame(generatedKey, Trivial.GeneratedKey); } } [Fact] public static void TestKey() { using (Trivial s = new Trivial()) { Assert.Equal(0, s.KeySize); Assert.Throws<ArgumentNullException>(() => s.Key = null); // Testing KeySize and Key setter. int[] validKeySizes = { 40, 104, 152, 808, 816, 824, 832 }; for (int keySize = -10; keySize < 200 * 8; keySize++) { if (validKeySizes.Contains(keySize)) { s.KeySize = keySize; Assert.Equal(keySize, s.KeySize); } else { Assert.Throws<CryptographicException>(() => s.KeySize = keySize); } if (keySize >= 0) { int keySizeInBytes = keySize / 8; byte[] key = GenerateRandom(keySizeInBytes); if (validKeySizes.Contains(keySizeInBytes * 8)) { s.SetKeySize(-1); s.Key = key; byte[] copyOfKey = s.Key; Assert.Equal(key, copyOfKey); Assert.Equal(key.Length * 8, s.KeySize); Assert.NotSame(key, copyOfKey); } else { Assert.Throws<CryptographicException>(() => s.Key = key); } } } // Test overflow try { byte[] hugeKey = new byte[536870917]; // value chosen so that when multiplied by 8 (bits) it overflows to the value 40 if (PlatformDetection.IsFullFramework) { // This change should be ported to netfx s.Key = hugeKey; } else { Assert.Throws<CryptographicException>(() => s.Key = hugeKey); } } catch (OutOfMemoryException) { } // in case there isn't enough memory at test-time to allocate the large array } } [Fact] public static void KeySize_Key_LooseCoupling() { using (Trivial t = new Trivial()) { t.GenerateKey(); // Set the KeySizeValue field, then confirm that get_Key doesn't check it at all. const int UnusualKeySize = 51; t.SetKeySize(UnusualKeySize); Assert.Equal(UnusualKeySize, t.KeySize); byte[] key = t.Key; // It doesn't equal it in bytes Assert.NotEqual(UnusualKeySize, key.Length); // It doesn't equal it in bits Assert.NotEqual(UnusualKeySize, key.Length * 8); } } [Fact] public static void KeySize_CurrentValue_NotGrandfathered() { using (Trivial t = new Trivial()) { t.SetKeySize(525600); Assert.Throws<CryptographicException>(() => t.KeySize = t.KeySize); } } [Fact] public static void TestAutomaticIv() { using (Trivial t = new Trivial()) { t.BlockSize = 5 * 8; byte[] generatedIv = t.IV; Assert.Equal(generatedIv, Trivial.GeneratedIV); Assert.NotSame(generatedIv, Trivial.GeneratedIV); } } [Fact] public static void TestIv() { using (Trivial s = new Trivial()) { Assert.Throws<ArgumentNullException>(() => s.IV = null); // Testing IV property setter { s.BlockSize = 5 * 8; { byte[] iv = GenerateRandom(5); s.IV = iv; byte[] copyOfIv = s.IV; Assert.Equal(iv, copyOfIv); Assert.False(Object.ReferenceEquals(iv, copyOfIv)); } { byte[] iv = GenerateRandom(6); Assert.Throws<CryptographicException>(() => s.IV = iv); } } } } [Fact] public static void TestBlockSize() { using (Trivial s = new Trivial()) { Assert.Equal(0, s.BlockSize); // Testing BlockSizeSetter. int[] validBlockSizes = { 40, 104, 152, 808, 816, 824, 832 }; for (int blockSize = -10; blockSize < 200 * 8; blockSize++) { if (validBlockSizes.Contains(blockSize)) { s.BlockSize = blockSize; Assert.Equal(blockSize, s.BlockSize); } else { Assert.Throws<CryptographicException>(() => s.BlockSize = blockSize); } } } return; } [Fact] public static void GetCipherMode_NoValidation() { using (Trivial t = new Trivial()) { t.SetCipherMode(24601); Assert.Equal(24601, (int)t.Mode); } } [Fact] public static void GetPaddingMode_NoValidation() { using (Trivial t = new Trivial()) { t.SetPaddingMode(24601); Assert.Equal(24601, (int)t.Padding); } } [Fact] public static void LegalBlockSizes_CopiesData() { using (Trivial t = new Trivial()) { KeySizes[] a = t.LegalBlockSizes; KeySizes[] b = t.LegalBlockSizes; Assert.NotSame(a, b); } } [Fact] public static void LegalKeySizes_CopiesData() { using (Trivial t = new Trivial()) { KeySizes[] a = t.LegalKeySizes; KeySizes[] b = t.LegalKeySizes; Assert.NotSame(a, b); } } [Fact] public static void SetKey_Uses_LegalKeySizesProperty() { using (SymmetricAlgorithm s = new DoesNotSetKeySizesFields()) { Assert.Throws<CryptographicException>(() => s.Key = Array.Empty<byte>()); s.Key = new byte[16]; } } [Fact] public static void SetKeySize_Uses_LegalKeySizesProperty() { using (SymmetricAlgorithm s = new DoesNotSetKeySizesFields()) { Assert.Throws<CryptographicException>(() => s.KeySize = 0); s.KeySize = 128; } } [Fact] [SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework, "Throws NRE on netfx (https://github.com/dotnet/corefx/issues/18690)")] public static void SetBlockSize_Uses_LegalBlockSizesProperty() { using (SymmetricAlgorithm s = new DoesNotSetKeySizesFields()) { Assert.Throws<CryptographicException>(() => s.BlockSize = 0); s.BlockSize = 8; } } private static byte[] GenerateRandom(int size) { byte[] data = new byte[size]; Random r = new Random(); for (int i = 0; i < size; i++) { data[i] = unchecked((byte)(r.Next())); } return data; } private class Trivial : SymmetricAlgorithm { public Trivial() { // Desktop's SymmetricAlgorithm reads from the field overly aggressively, // but in Core it always reads from the property. By setting the field // we're still happy on Desktop tests, and we can validate the default // behavior of the LegalBlockSizes property. LegalBlockSizesValue = new KeySizes[] { new KeySizes(5*8, -99*8, 0*8), new KeySizes(13*8, 22*8, 6*8), new KeySizes(101*8, 104*8, 1*8), }; // Desktop's SymmetricAlgorithm reads from the property correctly, but // we'll set the field here, anyways, to validate the default behavior // of the LegalKeySizes property. LegalKeySizesValue = new KeySizes[] { new KeySizes(5*8, -99*8, 0*8), new KeySizes(13*8, 22*8, 6*8), new KeySizes(101*8, 104*8, 1*8), }; } public bool IsDisposed { get { return KeyValue == null && IVValue == null; } } public override ICryptoTransform CreateDecryptor(byte[] rgbKey, byte[] rgbIV) { throw new CreateDecryptorNotImplementedException(); } public override ICryptoTransform CreateEncryptor(byte[] rgbKey, byte[] rgbIV) { throw new CreateEncryptorNotImplementedException(); } public override void GenerateIV() { IV = GeneratedIV; } public override void GenerateKey() { Key = GeneratedKey; } public void SetBlockSize(int blockSize) { BlockSizeValue = blockSize; } public void SetKeySize(int keySize) { KeySizeValue = keySize; } public void SetCipherMode(int anyValue) { ModeValue = (CipherMode)anyValue; } public void SetPaddingMode(int anyValue) { PaddingValue = (PaddingMode)anyValue; } public void SetFeedbackSize(int value) { FeedbackSizeValue = value; } public static readonly byte[] GeneratedKey = GenerateRandom(13); public static readonly byte[] GeneratedIV = GenerateRandom(5); } private class DoesNotSetKeySizesFields : SymmetricAlgorithm { public DoesNotSetKeySizesFields() { // Ensure the default values for the fields. Assert.Null(KeyValue); Assert.Null(IVValue); Assert.Null(LegalKeySizesValue); Assert.Null(LegalBlockSizesValue); Assert.Equal(0, KeySizeValue); Assert.Equal(0, BlockSizeValue); Assert.Equal(CipherMode.CBC, ModeValue); Assert.Equal(PaddingMode.PKCS7, PaddingValue); } public override ICryptoTransform CreateDecryptor(byte[] rgbKey, byte[] rgbIV) { throw new CreateDecryptorNotImplementedException(); } public override ICryptoTransform CreateEncryptor(byte[] rgbKey, byte[] rgbIV) { throw new CreateEncryptorNotImplementedException(); } public override void GenerateIV() { throw new GenerateIvNotImplementedException(); } public override void GenerateKey() { throw new GenerateKeyNotImplementedException(); } public override KeySizes[] LegalBlockSizes { get { return new[] { new KeySizes(8, 64, 8) }; } } public override KeySizes[] LegalKeySizes { get { return new[] { new KeySizes(64, 128, 8) }; } } } private class GenerateIvNotImplementedException : Exception { } private class GenerateKeyNotImplementedException : Exception { } private class CreateDecryptorNotImplementedException : Exception { } private class CreateEncryptorNotImplementedException : Exception { } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /****************************************************************************** * This file is auto-generated from a template file by the GenerateTests.csx * * script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make * * changes, please update the corresponding template and run according to the * * directions listed in the file. * ******************************************************************************/ using System; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Runtime.Intrinsics; using System.Runtime.Intrinsics.X86; namespace JIT.HardwareIntrinsics.X86 { public static partial class Program { private static void MaxUInt16() { var test = new SimpleBinaryOpTest__MaxUInt16(); if (test.IsSupported) { // Validates basic functionality works, using Unsafe.Read test.RunBasicScenario_UnsafeRead(); if (Avx.IsSupported) { // Validates basic functionality works, using Load test.RunBasicScenario_Load(); // Validates basic functionality works, using LoadAligned test.RunBasicScenario_LoadAligned(); } // Validates calling via reflection works, using Unsafe.Read test.RunReflectionScenario_UnsafeRead(); if (Avx.IsSupported) { // Validates calling via reflection works, using Load test.RunReflectionScenario_Load(); // Validates calling via reflection works, using LoadAligned test.RunReflectionScenario_LoadAligned(); } // Validates passing a static member works test.RunClsVarScenario(); if (Avx.IsSupported) { // Validates passing a static member works, using pinning and Load test.RunClsVarScenario_Load(); } // Validates passing a local works, using Unsafe.Read test.RunLclVarScenario_UnsafeRead(); if (Avx.IsSupported) { // Validates passing a local works, using Load test.RunLclVarScenario_Load(); // Validates passing a local works, using LoadAligned test.RunLclVarScenario_LoadAligned(); } // Validates passing the field of a local class works test.RunClassLclFldScenario(); if (Avx.IsSupported) { // Validates passing the field of a local class works, using pinning and Load test.RunClassLclFldScenario_Load(); } // Validates passing an instance member of a class works test.RunClassFldScenario(); if (Avx.IsSupported) { // Validates passing an instance member of a class works, using pinning and Load test.RunClassFldScenario_Load(); } // Validates passing the field of a local struct works test.RunStructLclFldScenario(); if (Avx.IsSupported) { // Validates passing the field of a local struct works, using pinning and Load test.RunStructLclFldScenario_Load(); } // Validates passing an instance member of a struct works test.RunStructFldScenario(); if (Avx.IsSupported) { // Validates passing an instance member of a struct works, using pinning and Load test.RunStructFldScenario_Load(); } } else { // Validates we throw on unsupported hardware test.RunUnsupportedScenario(); } if (!test.Succeeded) { throw new Exception("One or more scenarios did not complete as expected."); } } } public sealed unsafe class SimpleBinaryOpTest__MaxUInt16 { private struct DataTable { private byte[] inArray1; private byte[] inArray2; private byte[] outArray; private GCHandle inHandle1; private GCHandle inHandle2; private GCHandle outHandle; private ulong alignment; public DataTable(UInt16[] inArray1, UInt16[] inArray2, UInt16[] outArray, int alignment) { int sizeOfinArray1 = inArray1.Length * Unsafe.SizeOf<UInt16>(); int sizeOfinArray2 = inArray2.Length * Unsafe.SizeOf<UInt16>(); int sizeOfoutArray = outArray.Length * Unsafe.SizeOf<UInt16>(); if ((alignment != 32 && alignment != 16) || (alignment * 2) < sizeOfinArray1 || (alignment * 2) < sizeOfinArray2 || (alignment * 2) < sizeOfoutArray) { throw new ArgumentException("Invalid value of alignment"); } this.inArray1 = new byte[alignment * 2]; this.inArray2 = new byte[alignment * 2]; this.outArray = new byte[alignment * 2]; this.inHandle1 = GCHandle.Alloc(this.inArray1, GCHandleType.Pinned); this.inHandle2 = GCHandle.Alloc(this.inArray2, GCHandleType.Pinned); this.outHandle = GCHandle.Alloc(this.outArray, GCHandleType.Pinned); this.alignment = (ulong)alignment; Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray1Ptr), ref Unsafe.As<UInt16, byte>(ref inArray1[0]), (uint)sizeOfinArray1); Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray2Ptr), ref Unsafe.As<UInt16, byte>(ref inArray2[0]), (uint)sizeOfinArray2); } public void* inArray1Ptr => Align((byte*)(inHandle1.AddrOfPinnedObject().ToPointer()), alignment); public void* inArray2Ptr => Align((byte*)(inHandle2.AddrOfPinnedObject().ToPointer()), alignment); public void* outArrayPtr => Align((byte*)(outHandle.AddrOfPinnedObject().ToPointer()), alignment); public void Dispose() { inHandle1.Free(); inHandle2.Free(); outHandle.Free(); } private static unsafe void* Align(byte* buffer, ulong expectedAlignment) { return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1)); } } private struct TestStruct { public Vector256<UInt16> _fld1; public Vector256<UInt16> _fld2; public static TestStruct Create() { var testStruct = new TestStruct(); for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt16(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<UInt16>, byte>(ref testStruct._fld1), ref Unsafe.As<UInt16, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector256<UInt16>>()); for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt16(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<UInt16>, byte>(ref testStruct._fld2), ref Unsafe.As<UInt16, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector256<UInt16>>()); return testStruct; } public void RunStructFldScenario(SimpleBinaryOpTest__MaxUInt16 testClass) { var result = Avx2.Max(_fld1, _fld2); Unsafe.Write(testClass._dataTable.outArrayPtr, result); testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr); } public void RunStructFldScenario_Load(SimpleBinaryOpTest__MaxUInt16 testClass) { fixed (Vector256<UInt16>* pFld1 = &_fld1) fixed (Vector256<UInt16>* pFld2 = &_fld2) { var result = Avx2.Max( Avx.LoadVector256((UInt16*)(pFld1)), Avx.LoadVector256((UInt16*)(pFld2)) ); Unsafe.Write(testClass._dataTable.outArrayPtr, result); testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr); } } } private static readonly int LargestVectorSize = 32; private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector256<UInt16>>() / sizeof(UInt16); private static readonly int Op2ElementCount = Unsafe.SizeOf<Vector256<UInt16>>() / sizeof(UInt16); private static readonly int RetElementCount = Unsafe.SizeOf<Vector256<UInt16>>() / sizeof(UInt16); private static UInt16[] _data1 = new UInt16[Op1ElementCount]; private static UInt16[] _data2 = new UInt16[Op2ElementCount]; private static Vector256<UInt16> _clsVar1; private static Vector256<UInt16> _clsVar2; private Vector256<UInt16> _fld1; private Vector256<UInt16> _fld2; private DataTable _dataTable; static SimpleBinaryOpTest__MaxUInt16() { for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt16(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<UInt16>, byte>(ref _clsVar1), ref Unsafe.As<UInt16, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector256<UInt16>>()); for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt16(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<UInt16>, byte>(ref _clsVar2), ref Unsafe.As<UInt16, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector256<UInt16>>()); } public SimpleBinaryOpTest__MaxUInt16() { Succeeded = true; for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt16(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<UInt16>, byte>(ref _fld1), ref Unsafe.As<UInt16, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector256<UInt16>>()); for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt16(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<UInt16>, byte>(ref _fld2), ref Unsafe.As<UInt16, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector256<UInt16>>()); for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt16(); } for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt16(); } _dataTable = new DataTable(_data1, _data2, new UInt16[RetElementCount], LargestVectorSize); } public bool IsSupported => Avx2.IsSupported; public bool Succeeded { get; set; } public void RunBasicScenario_UnsafeRead() { TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead)); var result = Avx2.Max( Unsafe.Read<Vector256<UInt16>>(_dataTable.inArray1Ptr), Unsafe.Read<Vector256<UInt16>>(_dataTable.inArray2Ptr) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunBasicScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load)); var result = Avx2.Max( Avx.LoadVector256((UInt16*)(_dataTable.inArray1Ptr)), Avx.LoadVector256((UInt16*)(_dataTable.inArray2Ptr)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunBasicScenario_LoadAligned() { TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_LoadAligned)); var result = Avx2.Max( Avx.LoadAlignedVector256((UInt16*)(_dataTable.inArray1Ptr)), Avx.LoadAlignedVector256((UInt16*)(_dataTable.inArray2Ptr)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_UnsafeRead() { TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead)); var result = typeof(Avx2).GetMethod(nameof(Avx2.Max), new Type[] { typeof(Vector256<UInt16>), typeof(Vector256<UInt16>) }) .Invoke(null, new object[] { Unsafe.Read<Vector256<UInt16>>(_dataTable.inArray1Ptr), Unsafe.Read<Vector256<UInt16>>(_dataTable.inArray2Ptr) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector256<UInt16>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load)); var result = typeof(Avx2).GetMethod(nameof(Avx2.Max), new Type[] { typeof(Vector256<UInt16>), typeof(Vector256<UInt16>) }) .Invoke(null, new object[] { Avx.LoadVector256((UInt16*)(_dataTable.inArray1Ptr)), Avx.LoadVector256((UInt16*)(_dataTable.inArray2Ptr)) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector256<UInt16>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_LoadAligned() { TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_LoadAligned)); var result = typeof(Avx2).GetMethod(nameof(Avx2.Max), new Type[] { typeof(Vector256<UInt16>), typeof(Vector256<UInt16>) }) .Invoke(null, new object[] { Avx.LoadAlignedVector256((UInt16*)(_dataTable.inArray1Ptr)), Avx.LoadAlignedVector256((UInt16*)(_dataTable.inArray2Ptr)) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector256<UInt16>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunClsVarScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario)); var result = Avx2.Max( _clsVar1, _clsVar2 ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr); } public void RunClsVarScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario_Load)); fixed (Vector256<UInt16>* pClsVar1 = &_clsVar1) fixed (Vector256<UInt16>* pClsVar2 = &_clsVar2) { var result = Avx2.Max( Avx.LoadVector256((UInt16*)(pClsVar1)), Avx.LoadVector256((UInt16*)(pClsVar2)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr); } } public void RunLclVarScenario_UnsafeRead() { TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead)); var op1 = Unsafe.Read<Vector256<UInt16>>(_dataTable.inArray1Ptr); var op2 = Unsafe.Read<Vector256<UInt16>>(_dataTable.inArray2Ptr); var result = Avx2.Max(op1, op2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(op1, op2, _dataTable.outArrayPtr); } public void RunLclVarScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load)); var op1 = Avx.LoadVector256((UInt16*)(_dataTable.inArray1Ptr)); var op2 = Avx.LoadVector256((UInt16*)(_dataTable.inArray2Ptr)); var result = Avx2.Max(op1, op2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(op1, op2, _dataTable.outArrayPtr); } public void RunLclVarScenario_LoadAligned() { TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_LoadAligned)); var op1 = Avx.LoadAlignedVector256((UInt16*)(_dataTable.inArray1Ptr)); var op2 = Avx.LoadAlignedVector256((UInt16*)(_dataTable.inArray2Ptr)); var result = Avx2.Max(op1, op2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(op1, op2, _dataTable.outArrayPtr); } public void RunClassLclFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario)); var test = new SimpleBinaryOpTest__MaxUInt16(); var result = Avx2.Max(test._fld1, test._fld2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr); } public void RunClassLclFldScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario_Load)); var test = new SimpleBinaryOpTest__MaxUInt16(); fixed (Vector256<UInt16>* pFld1 = &test._fld1) fixed (Vector256<UInt16>* pFld2 = &test._fld2) { var result = Avx2.Max( Avx.LoadVector256((UInt16*)(pFld1)), Avx.LoadVector256((UInt16*)(pFld2)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr); } } public void RunClassFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario)); var result = Avx2.Max(_fld1, _fld2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr); } public void RunClassFldScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario_Load)); fixed (Vector256<UInt16>* pFld1 = &_fld1) fixed (Vector256<UInt16>* pFld2 = &_fld2) { var result = Avx2.Max( Avx.LoadVector256((UInt16*)(pFld1)), Avx.LoadVector256((UInt16*)(pFld2)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr); } } public void RunStructLclFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario)); var test = TestStruct.Create(); var result = Avx2.Max(test._fld1, test._fld2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr); } public void RunStructLclFldScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario_Load)); var test = TestStruct.Create(); var result = Avx2.Max( Avx.LoadVector256((UInt16*)(&test._fld1)), Avx.LoadVector256((UInt16*)(&test._fld2)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr); } public void RunStructFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario)); var test = TestStruct.Create(); test.RunStructFldScenario(this); } public void RunStructFldScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario_Load)); var test = TestStruct.Create(); test.RunStructFldScenario_Load(this); } public void RunUnsupportedScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario)); bool succeeded = false; try { RunBasicScenario_UnsafeRead(); } catch (PlatformNotSupportedException) { succeeded = true; } if (!succeeded) { Succeeded = false; } } private void ValidateResult(Vector256<UInt16> op1, Vector256<UInt16> op2, void* result, [CallerMemberName] string method = "") { UInt16[] inArray1 = new UInt16[Op1ElementCount]; UInt16[] inArray2 = new UInt16[Op2ElementCount]; UInt16[] outArray = new UInt16[RetElementCount]; Unsafe.WriteUnaligned(ref Unsafe.As<UInt16, byte>(ref inArray1[0]), op1); Unsafe.WriteUnaligned(ref Unsafe.As<UInt16, byte>(ref inArray2[0]), op2); Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt16, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector256<UInt16>>()); ValidateResult(inArray1, inArray2, outArray, method); } private void ValidateResult(void* op1, void* op2, void* result, [CallerMemberName] string method = "") { UInt16[] inArray1 = new UInt16[Op1ElementCount]; UInt16[] inArray2 = new UInt16[Op2ElementCount]; UInt16[] outArray = new UInt16[RetElementCount]; Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt16, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(op1), (uint)Unsafe.SizeOf<Vector256<UInt16>>()); Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt16, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(op2), (uint)Unsafe.SizeOf<Vector256<UInt16>>()); Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt16, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector256<UInt16>>()); ValidateResult(inArray1, inArray2, outArray, method); } private void ValidateResult(UInt16[] left, UInt16[] right, UInt16[] result, [CallerMemberName] string method = "") { bool succeeded = true; if (result[0] != Math.Max(left[0], right[0])) { succeeded = false; } else { for (var i = 1; i < RetElementCount; i++) { if (result[i] != Math.Max(left[i], right[i])) { succeeded = false; break; } } } if (!succeeded) { TestLibrary.TestFramework.LogInformation($"{nameof(Avx2)}.{nameof(Avx2.Max)}<UInt16>(Vector256<UInt16>, Vector256<UInt16>): {method} failed:"); TestLibrary.TestFramework.LogInformation($" left: ({string.Join(", ", left)})"); TestLibrary.TestFramework.LogInformation($" right: ({string.Join(", ", right)})"); TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})"); TestLibrary.TestFramework.LogInformation(string.Empty); Succeeded = false; } } } }
using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.ComponentModel; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.IO; using System.Linq; using System.Net.Http; using System.Net.Http.Formatting; using System.Net.Http.Headers; using System.Web.Http.Description; using System.Xml.Linq; using Newtonsoft.Json; namespace Linky.Sandbox.Areas.HelpPage { /// <summary> /// This class will generate the samples for the help page. /// </summary> public class HelpPageSampleGenerator { /// <summary> /// Initializes a new instance of the <see cref="HelpPageSampleGenerator"/> class. /// </summary> public HelpPageSampleGenerator() { ActualHttpMessageTypes = new Dictionary<HelpPageSampleKey, Type>(); ActionSamples = new Dictionary<HelpPageSampleKey, object>(); SampleObjects = new Dictionary<Type, object>(); SampleObjectFactories = new List<Func<HelpPageSampleGenerator, Type, object>> { DefaultSampleObjectFactory, }; } /// <summary> /// Gets CLR types that are used as the content of <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/>. /// </summary> public IDictionary<HelpPageSampleKey, Type> ActualHttpMessageTypes { get; internal set; } /// <summary> /// Gets the objects that are used directly as samples for certain actions. /// </summary> public IDictionary<HelpPageSampleKey, object> ActionSamples { get; internal set; } /// <summary> /// Gets the objects that are serialized as samples by the supported formatters. /// </summary> public IDictionary<Type, object> SampleObjects { get; internal set; } /// <summary> /// Gets factories for the objects that the supported formatters will serialize as samples. Processed in order, /// stopping when the factory successfully returns a non-<see langref="null"/> object. /// </summary> /// <remarks> /// Collection includes just <see cref="ObjectGenerator.GenerateObject(Type)"/> initially. Use /// <code>SampleObjectFactories.Insert(0, func)</code> to provide an override and /// <code>SampleObjectFactories.Add(func)</code> to provide a fallback.</remarks> [SuppressMessage("Microsoft.Design", "CA1006:DoNotNestGenericTypesInMemberSignatures", Justification = "This is an appropriate nesting of generic types")] public IList<Func<HelpPageSampleGenerator, Type, object>> SampleObjectFactories { get; private set; } /// <summary> /// Gets the request body samples for a given <see cref="ApiDescription"/>. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The samples keyed by media type.</returns> public IDictionary<MediaTypeHeaderValue, object> GetSampleRequests(ApiDescription api) { return GetSample(api, SampleDirection.Request); } /// <summary> /// Gets the response body samples for a given <see cref="ApiDescription"/>. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The samples keyed by media type.</returns> public IDictionary<MediaTypeHeaderValue, object> GetSampleResponses(ApiDescription api) { return GetSample(api, SampleDirection.Response); } /// <summary> /// Gets the request or response body samples. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param> /// <returns>The samples keyed by media type.</returns> public virtual IDictionary<MediaTypeHeaderValue, object> GetSample(ApiDescription api, SampleDirection sampleDirection) { if (api == null) { throw new ArgumentNullException("api"); } string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName; string actionName = api.ActionDescriptor.ActionName; IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name); Collection<MediaTypeFormatter> formatters; Type type = ResolveType(api, controllerName, actionName, parameterNames, sampleDirection, out formatters); var samples = new Dictionary<MediaTypeHeaderValue, object>(); // Use the samples provided directly for actions var actionSamples = GetAllActionSamples(controllerName, actionName, parameterNames, sampleDirection); foreach (var actionSample in actionSamples) { samples.Add(actionSample.Key.MediaType, WrapSampleIfString(actionSample.Value)); } // Do the sample generation based on formatters only if an action doesn't return an HttpResponseMessage. // Here we cannot rely on formatters because we don't know what's in the HttpResponseMessage, it might not even use formatters. if (type != null && !typeof(HttpResponseMessage).IsAssignableFrom(type)) { object sampleObject = GetSampleObject(type); foreach (var formatter in formatters) { foreach (MediaTypeHeaderValue mediaType in formatter.SupportedMediaTypes) { if (!samples.ContainsKey(mediaType)) { object sample = GetActionSample(controllerName, actionName, parameterNames, type, formatter, mediaType, sampleDirection); // If no sample found, try generate sample using formatter and sample object if (sample == null && sampleObject != null) { sample = WriteSampleObjectUsingFormatter(formatter, sampleObject, type, mediaType); } samples.Add(mediaType, WrapSampleIfString(sample)); } } } } return samples; } /// <summary> /// Search for samples that are provided directly through <see cref="ActionSamples"/>. /// </summary> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> /// <param name="type">The CLR type.</param> /// <param name="formatter">The formatter.</param> /// <param name="mediaType">The media type.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param> /// <returns>The sample that matches the parameters.</returns> public virtual object GetActionSample(string controllerName, string actionName, IEnumerable<string> parameterNames, Type type, MediaTypeFormatter formatter, MediaTypeHeaderValue mediaType, SampleDirection sampleDirection) { object sample; // First, try to get the sample provided for the specified mediaType, sampleDirection, controllerName, actionName and parameterNames. // If not found, try to get the sample provided for the specified mediaType, sampleDirection, controllerName and actionName regardless of the parameterNames. // If still not found, try to get the sample provided for the specified mediaType and type. // Finally, try to get the sample provided for the specified mediaType. if (ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, parameterNames), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, new[] { "*" }), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, type), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType), out sample)) { return sample; } return null; } /// <summary> /// Gets the sample object that will be serialized by the formatters. /// First, it will look at the <see cref="SampleObjects"/>. If no sample object is found, it will try to create /// one using <see cref="DefaultSampleObjectFactory"/> (which wraps an <see cref="ObjectGenerator"/>) and other /// factories in <see cref="SampleObjectFactories"/>. /// </summary> /// <param name="type">The type.</param> /// <returns>The sample object.</returns> [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Even if all items in SampleObjectFactories throw, problem will be visible as missing sample.")] public virtual object GetSampleObject(Type type) { object sampleObject; if (!SampleObjects.TryGetValue(type, out sampleObject)) { // No specific object available, try our factories. foreach (Func<HelpPageSampleGenerator, Type, object> factory in SampleObjectFactories) { if (factory == null) { continue; } try { sampleObject = factory(this, type); if (sampleObject != null) { break; } } catch { // Ignore any problems encountered in the factory; go on to the next one (if any). } } } return sampleObject; } /// <summary> /// Resolves the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The type.</returns> public virtual Type ResolveHttpRequestMessageType(ApiDescription api) { string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName; string actionName = api.ActionDescriptor.ActionName; IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name); Collection<MediaTypeFormatter> formatters; return ResolveType(api, controllerName, actionName, parameterNames, SampleDirection.Request, out formatters); } /// <summary> /// Resolves the type of the action parameter or return value when <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/> is used. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or a response.</param> /// <param name="formatters">The formatters.</param> [SuppressMessage("Microsoft.Design", "CA1021:AvoidOutParameters", Justification = "This is only used in advanced scenarios.")] public virtual Type ResolveType(ApiDescription api, string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection, out Collection<MediaTypeFormatter> formatters) { if (!Enum.IsDefined(typeof(SampleDirection), sampleDirection)) { throw new InvalidEnumArgumentException("sampleDirection", (int)sampleDirection, typeof(SampleDirection)); } if (api == null) { throw new ArgumentNullException("api"); } Type type; if (ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, parameterNames), out type) || ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, new[] { "*" }), out type)) { // Re-compute the supported formatters based on type Collection<MediaTypeFormatter> newFormatters = new Collection<MediaTypeFormatter>(); foreach (var formatter in api.ActionDescriptor.Configuration.Formatters) { if (IsFormatSupported(sampleDirection, formatter, type)) { newFormatters.Add(formatter); } } formatters = newFormatters; } else { switch (sampleDirection) { case SampleDirection.Request: ApiParameterDescription requestBodyParameter = api.ParameterDescriptions.FirstOrDefault(p => p.Source == ApiParameterSource.FromBody); type = requestBodyParameter == null ? null : requestBodyParameter.ParameterDescriptor.ParameterType; formatters = api.SupportedRequestBodyFormatters; break; case SampleDirection.Response: default: type = api.ResponseDescription.ResponseType ?? api.ResponseDescription.DeclaredType; formatters = api.SupportedResponseFormatters; break; } } return type; } /// <summary> /// Writes the sample object using formatter. /// </summary> /// <param name="formatter">The formatter.</param> /// <param name="value">The value.</param> /// <param name="type">The type.</param> /// <param name="mediaType">Type of the media.</param> /// <returns></returns> [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as InvalidSample.")] public virtual object WriteSampleObjectUsingFormatter(MediaTypeFormatter formatter, object value, Type type, MediaTypeHeaderValue mediaType) { if (formatter == null) { throw new ArgumentNullException("formatter"); } if (mediaType == null) { throw new ArgumentNullException("mediaType"); } object sample = String.Empty; MemoryStream ms = null; HttpContent content = null; try { if (formatter.CanWriteType(type)) { ms = new MemoryStream(); content = new ObjectContent(type, value, formatter, mediaType); formatter.WriteToStreamAsync(type, value, ms, content, null).Wait(); ms.Position = 0; StreamReader reader = new StreamReader(ms); string serializedSampleString = reader.ReadToEnd(); if (mediaType.MediaType.ToUpperInvariant().Contains("XML")) { serializedSampleString = TryFormatXml(serializedSampleString); } else if (mediaType.MediaType.ToUpperInvariant().Contains("JSON")) { serializedSampleString = TryFormatJson(serializedSampleString); } sample = new TextSample(serializedSampleString); } else { sample = new InvalidSample(String.Format( CultureInfo.CurrentCulture, "Failed to generate the sample for media type '{0}'. Cannot use formatter '{1}' to write type '{2}'.", mediaType, formatter.GetType().Name, type.Name)); } } catch (Exception e) { sample = new InvalidSample(String.Format( CultureInfo.CurrentCulture, "An exception has occurred while using the formatter '{0}' to generate sample for media type '{1}'. Exception message: {2}", formatter.GetType().Name, mediaType.MediaType, UnwrapException(e).Message)); } finally { if (ms != null) { ms.Dispose(); } if (content != null) { content.Dispose(); } } return sample; } internal static Exception UnwrapException(Exception exception) { AggregateException aggregateException = exception as AggregateException; if (aggregateException != null) { return aggregateException.Flatten().InnerException; } return exception; } // Default factory for sample objects private static object DefaultSampleObjectFactory(HelpPageSampleGenerator sampleGenerator, Type type) { // Try to create a default sample object ObjectGenerator objectGenerator = new ObjectGenerator(); return objectGenerator.GenerateObject(type); } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")] private static string TryFormatJson(string str) { try { object parsedJson = JsonConvert.DeserializeObject(str); return JsonConvert.SerializeObject(parsedJson, Formatting.Indented); } catch { // can't parse JSON, return the original string return str; } } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")] private static string TryFormatXml(string str) { try { XDocument xml = XDocument.Parse(str); return xml.ToString(); } catch { // can't parse XML, return the original string return str; } } private static bool IsFormatSupported(SampleDirection sampleDirection, MediaTypeFormatter formatter, Type type) { switch (sampleDirection) { case SampleDirection.Request: return formatter.CanReadType(type); case SampleDirection.Response: return formatter.CanWriteType(type); } return false; } private IEnumerable<KeyValuePair<HelpPageSampleKey, object>> GetAllActionSamples(string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection) { HashSet<string> parameterNamesSet = new HashSet<string>(parameterNames, StringComparer.OrdinalIgnoreCase); foreach (var sample in ActionSamples) { HelpPageSampleKey sampleKey = sample.Key; if (String.Equals(controllerName, sampleKey.ControllerName, StringComparison.OrdinalIgnoreCase) && String.Equals(actionName, sampleKey.ActionName, StringComparison.OrdinalIgnoreCase) && (sampleKey.ParameterNames.SetEquals(new[] { "*" }) || parameterNamesSet.SetEquals(sampleKey.ParameterNames)) && sampleDirection == sampleKey.SampleDirection) { yield return sample; } } } private static object WrapSampleIfString(object sample) { string stringSample = sample as string; if (stringSample != null) { return new TextSample(stringSample); } return sample; } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. // Warning: This code was generated by a tool. // // Changes to this file may cause incorrect behavior and will be lost if the // code is regenerated. using System; using System.Collections.Generic; using System.Linq; using System.Net; using System.Net.Http; using System.Net.Http.Headers; using System.Text; using System.Threading; using System.Threading.Tasks; using Hyak.Common; using Microsoft.Azure.Management.Automation; using Microsoft.Azure.Management.Automation.Models; using Newtonsoft.Json.Linq; namespace Microsoft.Azure.Management.Automation { /// <summary> /// Service operation for automation agent registration information. (see /// http://aka.ms/azureautomationsdk/agentregistrationoperations for more /// information) /// </summary> internal partial class AgentRegistrationOperation : IServiceOperations<AutomationManagementClient>, IAgentRegistrationOperation { /// <summary> /// Initializes a new instance of the AgentRegistrationOperation class. /// </summary> /// <param name='client'> /// Reference to the service client. /// </param> internal AgentRegistrationOperation(AutomationManagementClient client) { this._client = client; } private AutomationManagementClient _client; /// <summary> /// Gets a reference to the /// Microsoft.Azure.Management.Automation.AutomationManagementClient. /// </summary> public AutomationManagementClient Client { get { return this._client; } } /// <summary> /// Retrieve the automation agent registration information. (see /// http://aka.ms/azureautomationsdk/agentregistrationoperations for /// more information) /// </summary> /// <param name='resourceGroupName'> /// Required. The name of the resource group /// </param> /// <param name='automationAccount'> /// Required. The automation account name. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// The response model for the get agent registration information /// operation. /// </returns> public async Task<AgentRegistrationGetResponse> GetAsync(string resourceGroupName, string automationAccount, CancellationToken cancellationToken) { // Validate if (resourceGroupName == null) { throw new ArgumentNullException("resourceGroupName"); } if (automationAccount == null) { throw new ArgumentNullException("automationAccount"); } // Tracing bool shouldTrace = TracingAdapter.IsEnabled; string invocationId = null; if (shouldTrace) { invocationId = TracingAdapter.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("automationAccount", automationAccount); TracingAdapter.Enter(invocationId, this, "GetAsync", tracingParameters); } // Construct URL string url = ""; url = url + "/subscriptions/"; if (this.Client.Credentials.SubscriptionId != null) { url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId); } url = url + "/resourceGroups/"; url = url + Uri.EscapeDataString(resourceGroupName); url = url + "/providers/"; if (this.Client.ResourceNamespace != null) { url = url + Uri.EscapeDataString(this.Client.ResourceNamespace); } url = url + "/automationAccounts/"; url = url + Uri.EscapeDataString(automationAccount); url = url + "/agentRegistrationInformation"; List<string> queryParameters = new List<string>(); queryParameters.Add("api-version=2015-01-01-preview"); if (queryParameters.Count > 0) { url = url + "?" + string.Join("&", queryParameters); } string baseUrl = this.Client.BaseUri.AbsoluteUri; // Trim '/' character from the end of baseUrl and beginning of url. if (baseUrl[baseUrl.Length - 1] == '/') { baseUrl = baseUrl.Substring(0, baseUrl.Length - 1); } if (url[0] == '/') { url = url.Substring(1); } url = baseUrl + "/" + url; url = url.Replace(" ", "%20"); // Create HTTP transport objects HttpRequestMessage httpRequest = null; try { httpRequest = new HttpRequestMessage(); httpRequest.Method = HttpMethod.Get; httpRequest.RequestUri = new Uri(url); // Set Headers httpRequest.Headers.Add("Accept", "application/json"); httpRequest.Headers.Add("x-ms-version", "2014-06-01"); // Set Credentials cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); // Send Request HttpResponseMessage httpResponse = null; try { if (shouldTrace) { TracingAdapter.SendRequest(invocationId, httpRequest); } cancellationToken.ThrowIfCancellationRequested(); httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); if (shouldTrace) { TracingAdapter.ReceiveResponse(invocationId, httpResponse); } HttpStatusCode statusCode = httpResponse.StatusCode; if (statusCode != HttpStatusCode.OK) { cancellationToken.ThrowIfCancellationRequested(); CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false)); if (shouldTrace) { TracingAdapter.Error(invocationId, ex); } throw ex; } // Create Result AgentRegistrationGetResponse result = null; // Deserialize Response if (statusCode == HttpStatusCode.OK) { cancellationToken.ThrowIfCancellationRequested(); string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); result = new AgentRegistrationGetResponse(); JToken responseDoc = null; if (string.IsNullOrEmpty(responseContent) == false) { responseDoc = JToken.Parse(responseContent); } if (responseDoc != null && responseDoc.Type != JTokenType.Null) { AgentRegistration agentRegistrationInstance = new AgentRegistration(); result.AgentRegistration = agentRegistrationInstance; JToken dscMetaConfigurationValue = responseDoc["dscMetaConfiguration"]; if (dscMetaConfigurationValue != null && dscMetaConfigurationValue.Type != JTokenType.Null) { string dscMetaConfigurationInstance = ((string)dscMetaConfigurationValue); agentRegistrationInstance.DscMetaConfiguration = dscMetaConfigurationInstance; } JToken endpointValue = responseDoc["endpoint"]; if (endpointValue != null && endpointValue.Type != JTokenType.Null) { string endpointInstance = ((string)endpointValue); agentRegistrationInstance.Endpoint = endpointInstance; } JToken keysValue = responseDoc["keys"]; if (keysValue != null && keysValue.Type != JTokenType.Null) { AgentRegistrationKeys keysInstance = new AgentRegistrationKeys(); agentRegistrationInstance.Keys = keysInstance; JToken primaryValue = keysValue["primary"]; if (primaryValue != null && primaryValue.Type != JTokenType.Null) { string primaryInstance = ((string)primaryValue); keysInstance.Primary = primaryInstance; } JToken secondaryValue = keysValue["secondary"]; if (secondaryValue != null && secondaryValue.Type != JTokenType.Null) { string secondaryInstance = ((string)secondaryValue); keysInstance.Secondary = secondaryInstance; } } } } result.StatusCode = statusCode; if (httpResponse.Headers.Contains("x-ms-request-id")) { result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (shouldTrace) { TracingAdapter.Exit(invocationId, result); } return result; } finally { if (httpResponse != null) { httpResponse.Dispose(); } } } finally { if (httpRequest != null) { httpRequest.Dispose(); } } } /// <summary> /// Regenerate a primary or secondary agent registration key (see /// http://aka.ms/azureautomationsdk/agentregistrationoperations for /// more information) /// </summary> /// <param name='resourceGroupName'> /// Required. The name of the resource group /// </param> /// <param name='automationAccount'> /// Required. The automation account name. /// </param> /// <param name='keyName'> /// Required. The name of the agent registration key to be regenerated /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// The response model for the agent registration key regenerate /// operation. /// </returns> public async Task<AgentRegistrationRegenerateKeyResponse> RegenerateKeyAsync(string resourceGroupName, string automationAccount, AgentRegistrationRegenerateKeyParameter keyName, CancellationToken cancellationToken) { // Validate if (resourceGroupName == null) { throw new ArgumentNullException("resourceGroupName"); } if (automationAccount == null) { throw new ArgumentNullException("automationAccount"); } if (keyName == null) { throw new ArgumentNullException("keyName"); } if (keyName.KeyName == null) { throw new ArgumentNullException("keyName.KeyName"); } // Tracing bool shouldTrace = TracingAdapter.IsEnabled; string invocationId = null; if (shouldTrace) { invocationId = TracingAdapter.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("automationAccount", automationAccount); tracingParameters.Add("keyName", keyName); TracingAdapter.Enter(invocationId, this, "RegenerateKeyAsync", tracingParameters); } // Construct URL string url = ""; url = url + "/subscriptions/"; if (this.Client.Credentials.SubscriptionId != null) { url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId); } url = url + "/resourceGroups/"; url = url + Uri.EscapeDataString(resourceGroupName); url = url + "/providers/"; if (this.Client.ResourceNamespace != null) { url = url + Uri.EscapeDataString(this.Client.ResourceNamespace); } url = url + "/automationAccounts/"; url = url + Uri.EscapeDataString(automationAccount); url = url + "/agentRegistrationInformation/regenerateKey"; List<string> queryParameters = new List<string>(); queryParameters.Add("api-version=2015-01-01-preview"); if (queryParameters.Count > 0) { url = url + "?" + string.Join("&", queryParameters); } string baseUrl = this.Client.BaseUri.AbsoluteUri; // Trim '/' character from the end of baseUrl and beginning of url. if (baseUrl[baseUrl.Length - 1] == '/') { baseUrl = baseUrl.Substring(0, baseUrl.Length - 1); } if (url[0] == '/') { url = url.Substring(1); } url = baseUrl + "/" + url; url = url.Replace(" ", "%20"); // Create HTTP transport objects HttpRequestMessage httpRequest = null; try { httpRequest = new HttpRequestMessage(); httpRequest.Method = HttpMethod.Post; httpRequest.RequestUri = new Uri(url); // Set Headers httpRequest.Headers.Add("Accept", "application/json"); httpRequest.Headers.Add("x-ms-version", "2014-06-01"); // Set Credentials cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); // Serialize Request string requestContent = null; JToken requestDoc = null; JObject agentRegistrationRegenerateKeyParameterValue = new JObject(); requestDoc = agentRegistrationRegenerateKeyParameterValue; agentRegistrationRegenerateKeyParameterValue["keyName"] = keyName.KeyName; if (keyName.Name != null) { agentRegistrationRegenerateKeyParameterValue["name"] = keyName.Name; } if (keyName.Location != null) { agentRegistrationRegenerateKeyParameterValue["location"] = keyName.Location; } if (keyName.Tags != null) { JObject tagsDictionary = new JObject(); foreach (KeyValuePair<string, string> pair in keyName.Tags) { string tagsKey = pair.Key; string tagsValue = pair.Value; tagsDictionary[tagsKey] = tagsValue; } agentRegistrationRegenerateKeyParameterValue["tags"] = tagsDictionary; } requestContent = requestDoc.ToString(Newtonsoft.Json.Formatting.Indented); httpRequest.Content = new StringContent(requestContent, Encoding.UTF8); httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/json; charset=utf-8"); // Send Request HttpResponseMessage httpResponse = null; try { if (shouldTrace) { TracingAdapter.SendRequest(invocationId, httpRequest); } cancellationToken.ThrowIfCancellationRequested(); httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); if (shouldTrace) { TracingAdapter.ReceiveResponse(invocationId, httpResponse); } HttpStatusCode statusCode = httpResponse.StatusCode; if (statusCode != HttpStatusCode.OK) { cancellationToken.ThrowIfCancellationRequested(); CloudException ex = CloudException.Create(httpRequest, requestContent, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false)); if (shouldTrace) { TracingAdapter.Error(invocationId, ex); } throw ex; } // Create Result AgentRegistrationRegenerateKeyResponse result = null; // Deserialize Response if (statusCode == HttpStatusCode.OK) { cancellationToken.ThrowIfCancellationRequested(); string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); result = new AgentRegistrationRegenerateKeyResponse(); JToken responseDoc = null; if (string.IsNullOrEmpty(responseContent) == false) { responseDoc = JToken.Parse(responseContent); } if (responseDoc != null && responseDoc.Type != JTokenType.Null) { AgentRegistration agentRegistrationInstance = new AgentRegistration(); result.AgentRegistration = agentRegistrationInstance; JToken dscMetaConfigurationValue = responseDoc["dscMetaConfiguration"]; if (dscMetaConfigurationValue != null && dscMetaConfigurationValue.Type != JTokenType.Null) { string dscMetaConfigurationInstance = ((string)dscMetaConfigurationValue); agentRegistrationInstance.DscMetaConfiguration = dscMetaConfigurationInstance; } JToken endpointValue = responseDoc["endpoint"]; if (endpointValue != null && endpointValue.Type != JTokenType.Null) { string endpointInstance = ((string)endpointValue); agentRegistrationInstance.Endpoint = endpointInstance; } JToken keysValue = responseDoc["keys"]; if (keysValue != null && keysValue.Type != JTokenType.Null) { AgentRegistrationKeys keysInstance = new AgentRegistrationKeys(); agentRegistrationInstance.Keys = keysInstance; JToken primaryValue = keysValue["primary"]; if (primaryValue != null && primaryValue.Type != JTokenType.Null) { string primaryInstance = ((string)primaryValue); keysInstance.Primary = primaryInstance; } JToken secondaryValue = keysValue["secondary"]; if (secondaryValue != null && secondaryValue.Type != JTokenType.Null) { string secondaryInstance = ((string)secondaryValue); keysInstance.Secondary = secondaryInstance; } } } } result.StatusCode = statusCode; if (httpResponse.Headers.Contains("x-ms-request-id")) { result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (shouldTrace) { TracingAdapter.Exit(invocationId, result); } return result; } finally { if (httpResponse != null) { httpResponse.Dispose(); } } } finally { if (httpRequest != null) { httpRequest.Dispose(); } } } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Fixtures.AcceptanceTestsHttp { using System; using System.Collections; using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; using Microsoft.Rest; using Models; /// <summary> /// Extension methods for HttpClientFailure. /// </summary> public static partial class HttpClientFailureExtensions { /// <summary> /// Return 400 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static Error Head400(this IHttpClientFailure operations) { return Task.Factory.StartNew(s => ((IHttpClientFailure)s).Head400Async(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Return 400 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Error> Head400Async(this IHttpClientFailure operations, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.Head400WithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Return 400 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static Error Get400(this IHttpClientFailure operations) { return Task.Factory.StartNew(s => ((IHttpClientFailure)s).Get400Async(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Return 400 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Error> Get400Async(this IHttpClientFailure operations, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.Get400WithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Return 400 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='booleanValue'> /// Simple boolean value true /// </param> public static Error Put400(this IHttpClientFailure operations, bool? booleanValue = default(bool?)) { return Task.Factory.StartNew(s => ((IHttpClientFailure)s).Put400Async(booleanValue), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Return 400 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='booleanValue'> /// Simple boolean value true /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Error> Put400Async(this IHttpClientFailure operations, bool? booleanValue = default(bool?), CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.Put400WithHttpMessagesAsync(booleanValue, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Return 400 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='booleanValue'> /// Simple boolean value true /// </param> public static Error Patch400(this IHttpClientFailure operations, bool? booleanValue = default(bool?)) { return Task.Factory.StartNew(s => ((IHttpClientFailure)s).Patch400Async(booleanValue), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Return 400 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='booleanValue'> /// Simple boolean value true /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Error> Patch400Async(this IHttpClientFailure operations, bool? booleanValue = default(bool?), CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.Patch400WithHttpMessagesAsync(booleanValue, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Return 400 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='booleanValue'> /// Simple boolean value true /// </param> public static Error Post400(this IHttpClientFailure operations, bool? booleanValue = default(bool?)) { return Task.Factory.StartNew(s => ((IHttpClientFailure)s).Post400Async(booleanValue), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Return 400 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='booleanValue'> /// Simple boolean value true /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Error> Post400Async(this IHttpClientFailure operations, bool? booleanValue = default(bool?), CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.Post400WithHttpMessagesAsync(booleanValue, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Return 400 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='booleanValue'> /// Simple boolean value true /// </param> public static Error Delete400(this IHttpClientFailure operations, bool? booleanValue = default(bool?)) { return Task.Factory.StartNew(s => ((IHttpClientFailure)s).Delete400Async(booleanValue), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Return 400 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='booleanValue'> /// Simple boolean value true /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Error> Delete400Async(this IHttpClientFailure operations, bool? booleanValue = default(bool?), CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.Delete400WithHttpMessagesAsync(booleanValue, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Return 401 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static Error Head401(this IHttpClientFailure operations) { return Task.Factory.StartNew(s => ((IHttpClientFailure)s).Head401Async(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Return 401 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Error> Head401Async(this IHttpClientFailure operations, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.Head401WithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Return 402 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static Error Get402(this IHttpClientFailure operations) { return Task.Factory.StartNew(s => ((IHttpClientFailure)s).Get402Async(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Return 402 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Error> Get402Async(this IHttpClientFailure operations, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.Get402WithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Return 403 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static Error Get403(this IHttpClientFailure operations) { return Task.Factory.StartNew(s => ((IHttpClientFailure)s).Get403Async(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Return 403 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Error> Get403Async(this IHttpClientFailure operations, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.Get403WithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Return 404 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='booleanValue'> /// Simple boolean value true /// </param> public static Error Put404(this IHttpClientFailure operations, bool? booleanValue = default(bool?)) { return Task.Factory.StartNew(s => ((IHttpClientFailure)s).Put404Async(booleanValue), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Return 404 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='booleanValue'> /// Simple boolean value true /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Error> Put404Async(this IHttpClientFailure operations, bool? booleanValue = default(bool?), CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.Put404WithHttpMessagesAsync(booleanValue, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Return 405 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='booleanValue'> /// Simple boolean value true /// </param> public static Error Patch405(this IHttpClientFailure operations, bool? booleanValue = default(bool?)) { return Task.Factory.StartNew(s => ((IHttpClientFailure)s).Patch405Async(booleanValue), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Return 405 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='booleanValue'> /// Simple boolean value true /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Error> Patch405Async(this IHttpClientFailure operations, bool? booleanValue = default(bool?), CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.Patch405WithHttpMessagesAsync(booleanValue, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Return 406 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='booleanValue'> /// Simple boolean value true /// </param> public static Error Post406(this IHttpClientFailure operations, bool? booleanValue = default(bool?)) { return Task.Factory.StartNew(s => ((IHttpClientFailure)s).Post406Async(booleanValue), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Return 406 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='booleanValue'> /// Simple boolean value true /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Error> Post406Async(this IHttpClientFailure operations, bool? booleanValue = default(bool?), CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.Post406WithHttpMessagesAsync(booleanValue, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Return 407 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='booleanValue'> /// Simple boolean value true /// </param> public static Error Delete407(this IHttpClientFailure operations, bool? booleanValue = default(bool?)) { return Task.Factory.StartNew(s => ((IHttpClientFailure)s).Delete407Async(booleanValue), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Return 407 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='booleanValue'> /// Simple boolean value true /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Error> Delete407Async(this IHttpClientFailure operations, bool? booleanValue = default(bool?), CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.Delete407WithHttpMessagesAsync(booleanValue, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Return 409 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='booleanValue'> /// Simple boolean value true /// </param> public static Error Put409(this IHttpClientFailure operations, bool? booleanValue = default(bool?)) { return Task.Factory.StartNew(s => ((IHttpClientFailure)s).Put409Async(booleanValue), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Return 409 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='booleanValue'> /// Simple boolean value true /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Error> Put409Async(this IHttpClientFailure operations, bool? booleanValue = default(bool?), CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.Put409WithHttpMessagesAsync(booleanValue, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Return 410 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static Error Head410(this IHttpClientFailure operations) { return Task.Factory.StartNew(s => ((IHttpClientFailure)s).Head410Async(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Return 410 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Error> Head410Async(this IHttpClientFailure operations, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.Head410WithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Return 411 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static Error Get411(this IHttpClientFailure operations) { return Task.Factory.StartNew(s => ((IHttpClientFailure)s).Get411Async(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Return 411 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Error> Get411Async(this IHttpClientFailure operations, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.Get411WithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Return 412 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static Error Get412(this IHttpClientFailure operations) { return Task.Factory.StartNew(s => ((IHttpClientFailure)s).Get412Async(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Return 412 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Error> Get412Async(this IHttpClientFailure operations, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.Get412WithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Return 413 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='booleanValue'> /// Simple boolean value true /// </param> public static Error Put413(this IHttpClientFailure operations, bool? booleanValue = default(bool?)) { return Task.Factory.StartNew(s => ((IHttpClientFailure)s).Put413Async(booleanValue), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Return 413 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='booleanValue'> /// Simple boolean value true /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Error> Put413Async(this IHttpClientFailure operations, bool? booleanValue = default(bool?), CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.Put413WithHttpMessagesAsync(booleanValue, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Return 414 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='booleanValue'> /// Simple boolean value true /// </param> public static Error Patch414(this IHttpClientFailure operations, bool? booleanValue = default(bool?)) { return Task.Factory.StartNew(s => ((IHttpClientFailure)s).Patch414Async(booleanValue), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Return 414 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='booleanValue'> /// Simple boolean value true /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Error> Patch414Async(this IHttpClientFailure operations, bool? booleanValue = default(bool?), CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.Patch414WithHttpMessagesAsync(booleanValue, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Return 415 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='booleanValue'> /// Simple boolean value true /// </param> public static Error Post415(this IHttpClientFailure operations, bool? booleanValue = default(bool?)) { return Task.Factory.StartNew(s => ((IHttpClientFailure)s).Post415Async(booleanValue), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Return 415 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='booleanValue'> /// Simple boolean value true /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Error> Post415Async(this IHttpClientFailure operations, bool? booleanValue = default(bool?), CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.Post415WithHttpMessagesAsync(booleanValue, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Return 416 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static Error Get416(this IHttpClientFailure operations) { return Task.Factory.StartNew(s => ((IHttpClientFailure)s).Get416Async(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Return 416 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Error> Get416Async(this IHttpClientFailure operations, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.Get416WithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Return 417 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='booleanValue'> /// Simple boolean value true /// </param> public static Error Delete417(this IHttpClientFailure operations, bool? booleanValue = default(bool?)) { return Task.Factory.StartNew(s => ((IHttpClientFailure)s).Delete417Async(booleanValue), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Return 417 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='booleanValue'> /// Simple boolean value true /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Error> Delete417Async(this IHttpClientFailure operations, bool? booleanValue = default(bool?), CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.Delete417WithHttpMessagesAsync(booleanValue, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Return 429 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static Error Head429(this IHttpClientFailure operations) { return Task.Factory.StartNew(s => ((IHttpClientFailure)s).Head429Async(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Return 429 status code - should be represented in the client as an error /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Error> Head429Async(this IHttpClientFailure operations, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.Head429WithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } } }
using System; using System.IO; // ReSharper disable AssignNullToNotNullAttribute namespace Thinktecture.IO.Adapters { /// <summary> /// Adapter for <see cref="Path"/>. /// </summary> public class PathAdapter : IPath { /// <inheritdoc /> public char DirectorySeparatorChar => Path.DirectorySeparatorChar; /// <inheritdoc /> public char AltDirectorySeparatorChar => Path.AltDirectorySeparatorChar; /// <inheritdoc /> public char VolumeSeparatorChar => Path.VolumeSeparatorChar; /// <inheritdoc /> public char PathSeparator => Path.PathSeparator; /// <inheritdoc /> public string? ChangeExtension(string? path, string? extension) { return Path.ChangeExtension(path, extension); } /// <inheritdoc /> public string Combine(params string[] paths) { return Path.Combine(paths); } /// <inheritdoc /> public string GetRelativePath(string relativeTo, string path) { return Path.GetRelativePath(relativeTo, path); } /// <inheritdoc /> public ReadOnlySpan<char> GetDirectoryName(ReadOnlySpan<char> path) { return Path.GetDirectoryName(path); } /// <inheritdoc /> public ReadOnlySpan<char> GetExtension(ReadOnlySpan<char> path) { return Path.GetExtension(path); } /// <inheritdoc /> public ReadOnlySpan<char> GetFileName(ReadOnlySpan<char> path) { return Path.GetFileName(path); } /// <inheritdoc /> public ReadOnlySpan<char> GetFileNameWithoutExtension(ReadOnlySpan<char> path) { return Path.GetFileNameWithoutExtension(path); } /// <inheritdoc /> public bool IsPathFullyQualified(string path) { return Path.IsPathFullyQualified(path); } /// <inheritdoc /> public bool IsPathFullyQualified(ReadOnlySpan<char> path) { return Path.IsPathFullyQualified(path); } /// <inheritdoc /> public bool HasExtension(ReadOnlySpan<char> path) { return Path.HasExtension(path); } /// <inheritdoc /> public string Join(ReadOnlySpan<char> path1, ReadOnlySpan<char> path2) { return Path.Join(path1, path2); } /// <inheritdoc /> public string Join(ReadOnlySpan<char> path1, ReadOnlySpan<char> path2, ReadOnlySpan<char> path3) { return Path.Join(path1, path2, path3); } #if NETCOREAPP || NET5_0 /// <inheritdoc /> public string Join(ReadOnlySpan<char> path1, ReadOnlySpan<char> path2, ReadOnlySpan<char> path3, ReadOnlySpan<char> path4) { return Path.Join(path1, path2, path3, path4); } /// <inheritdoc /> public string Join(string? path1, string? path2) { return Path.Join(path1, path2); } /// <inheritdoc /> public string Join(string? path1, string? path2, string? path3) { return Path.Join(path1, path2, path3); } /// <inheritdoc /> public string Join(string? path1, string? path2, string? path3, string? path4) { return Path.Join(path1, path2, path3, path4); } /// <inheritdoc /> public string Join(params string?[] paths) { return Path.Join(paths); } /// <inheritdoc /> public string TrimEndingDirectorySeparator(string path) { return Path.TrimEndingDirectorySeparator(path); } /// <inheritdoc /> public ReadOnlySpan<char> TrimEndingDirectorySeparator(ReadOnlySpan<char> path) { return Path.TrimEndingDirectorySeparator(path); } /// <inheritdoc /> public bool EndsInDirectorySeparator(ReadOnlySpan<char> path) { return Path.EndsInDirectorySeparator(path); } /// <inheritdoc /> public bool EndsInDirectorySeparator(string path) { return Path.EndsInDirectorySeparator(path); } #endif /// <inheritdoc /> public bool TryJoin(ReadOnlySpan<char> path1, ReadOnlySpan<char> path2, Span<char> destination, out int charsWritten) { return Path.TryJoin(path1, path2, destination, out charsWritten); } /// <inheritdoc /> public bool TryJoin(ReadOnlySpan<char> path1, ReadOnlySpan<char> path2, ReadOnlySpan<char> path3, Span<char> destination, out int charsWritten) { return Path.TryJoin(path1, path2, path3, destination, out charsWritten); } /// <inheritdoc /> public string GetFullPath(string path, string basePath) { return Path.GetFullPath(path, basePath); } /// <inheritdoc /> public bool IsPathRooted(ReadOnlySpan<char> path) { return Path.IsPathRooted(path); } /// <inheritdoc /> public ReadOnlySpan<char> GetPathRoot(ReadOnlySpan<char> path) { return Path.GetPathRoot(path); } /// <inheritdoc /> public string? GetDirectoryName(string? path) { return Path.GetDirectoryName(path); } /// <inheritdoc /> public string? GetExtension(string? path) { return Path.GetExtension(path); } /// <inheritdoc /> public string? GetFileName(string? path) { return Path.GetFileName(path); } /// <inheritdoc /> public string? GetFileNameWithoutExtension(string? path) { return Path.GetFileNameWithoutExtension(path); } /// <inheritdoc /> public char[] GetInvalidFileNameChars() { return Path.GetInvalidFileNameChars(); } /// <inheritdoc /> public char[] GetInvalidPathChars() { return Path.GetInvalidPathChars(); } /// <inheritdoc /> public string? GetPathRoot(string? path) { return Path.GetPathRoot(path); } /// <inheritdoc /> public string GetRandomFileName() { return Path.GetRandomFileName(); } /// <inheritdoc /> public bool HasExtension(string? path) { return Path.HasExtension(path); } /// <inheritdoc /> public bool IsPathRooted(string? path) { return Path.IsPathRooted(path); } /// <inheritdoc /> public string Combine(string path1, string path2) { return Path.Combine(path1, path2); } /// <inheritdoc /> public string Combine(string path1, string path2, string path3) { return Path.Combine(path1, path2, path3); } /// <inheritdoc /> public string Combine(string path1, string path2, string path3, string path4) { return Path.Combine(path1, path2, path3, path4); } /// <inheritdoc /> public string GetFullPath(string path) { return Path.GetFullPath(path); } /// <inheritdoc /> public string GetTempPath() { return Path.GetTempPath(); } /// <inheritdoc /> public string GetTempFileName() { return Path.GetTempFileName(); } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System.Composition; using System.Linq; using System.Threading; using System.Threading.Tasks; using Microsoft.CodeAnalysis; using Microsoft.CodeAnalysis.CSharp; using Microsoft.CodeAnalysis.CSharp.Extensions; using Microsoft.CodeAnalysis.CSharp.Syntax; using Microsoft.CodeAnalysis.Host; using Microsoft.CodeAnalysis.Host.Mef; using Microsoft.CodeAnalysis.LanguageServices; using Microsoft.CodeAnalysis.Shared.Extensions; using Microsoft.CodeAnalysis.Shared.Utilities; using Microsoft.CodeAnalysis.Text; using Microsoft.VisualStudio.LanguageServices.Implementation.F1Help; using Roslyn.Utilities; namespace Microsoft.VisualStudio.LanguageServices.CSharp.LanguageService { [ExportLanguageService(typeof(IHelpContextService), LanguageNames.CSharp), Shared] internal class CSharpHelpContextService : AbstractHelpContextService { public override string Language { get { return "csharp"; } } public override string Product { get { return "csharp"; } } private static string Keyword(string text) { return text + "_CSharpKeyword"; } public override async Task<string> GetHelpTermAsync(Document document, TextSpan span, CancellationToken cancellationToken) { var root = await document.GetSyntaxRootAsync(cancellationToken).ConfigureAwait(false); var syntaxFacts = document.GetLanguageService<ISyntaxFactsService>(); // For now, find the token under the start of the selection. var syntaxTree = await document.GetSyntaxTreeAsync(cancellationToken).ConfigureAwait(false); var token = syntaxTree.GetTouchingToken(span.Start, cancellationToken, findInsideTrivia: true); if (IsValid(token, span)) { var semanticModel = await document.GetSemanticModelForSpanAsync(span, cancellationToken).ConfigureAwait(false); var result = TryGetText(token, semanticModel, document, syntaxFacts); if (string.IsNullOrEmpty(result)) { var previousToken = token.GetPreviousToken(); if (IsValid(previousToken, span)) { result = TryGetText(previousToken, semanticModel, document, syntaxFacts); } } return result; } var trivia = root.FindTrivia(span.Start, findInsideTrivia: true); if (trivia.Span.IntersectsWith(span) && trivia.Kind() == SyntaxKind.PreprocessingMessageTrivia && trivia.Token.GetAncestor<RegionDirectiveTriviaSyntax>() != null) { return "#region"; } if (trivia.MatchesKind(SyntaxKind.MultiLineDocumentationCommentTrivia, SyntaxKind.SingleLineDocumentationCommentTrivia, SyntaxKind.SingleLineCommentTrivia, SyntaxKind.MultiLineCommentTrivia)) { // just find the first "word" that intersects with our position var text = await syntaxTree.GetTextAsync(cancellationToken).ConfigureAwait(false); int start = span.Start; int end = span.Start; while (start > 0 && syntaxFacts.IsIdentifierPartCharacter(text[start - 1])) { start--; } while (end < text.Length - 1 && syntaxFacts.IsIdentifierPartCharacter(text[end])) { end++; } return text.GetSubText(TextSpan.FromBounds(start, end)).ToString(); } return string.Empty; } private bool IsValid(SyntaxToken token, TextSpan span) { // If the token doesn't actually intersect with our position, give up return token.Kind() == SyntaxKind.EndIfDirectiveTrivia || token.Span.IntersectsWith(span); } private string TryGetText(SyntaxToken token, SemanticModel semanticModel, Document document, ISyntaxFactsService syntaxFacts) { string text = null; if (TryGetTextForContextualKeyword(token, document, syntaxFacts, out text) || TryGetTextForKeyword(token, document, syntaxFacts, out text) || TryGetTextForPreProcessor(token, document, syntaxFacts, out text) || TryGetTextForSymbol(token, semanticModel, document, out text) || TryGetTextForOperator(token, document, out text)) { return text; } return string.Empty; } private bool TryGetTextForSymbol(SyntaxToken token, SemanticModel semanticModel, Document document, out string text) { ISymbol symbol; if (token.Parent is TypeArgumentListSyntax) { var genericName = token.GetAncestor<GenericNameSyntax>(); symbol = semanticModel.GetSymbolInfo(genericName, CancellationToken.None).Symbol ?? semanticModel.GetTypeInfo(genericName, CancellationToken.None).Type; } else if (token.Parent is NullableTypeSyntax && token.IsKind(SyntaxKind.QuestionToken)) { text = "System.Nullable`1"; return true; } else { var symbols = semanticModel.GetSymbols(token, document.Project.Solution.Workspace, bindLiteralsToUnderlyingType: true, cancellationToken: CancellationToken.None); symbol = symbols.FirstOrDefault(); if (symbol == null) { var bindableParent = document.GetLanguageService<ISyntaxFactsService>().GetBindableParent(token); var overloads = semanticModel.GetMemberGroup(bindableParent); symbol = overloads.FirstOrDefault(); } } // Local: return the name if it's the declaration, otherwise the type if (symbol is ILocalSymbol && !symbol.DeclaringSyntaxReferences.Any(d => d.GetSyntax().DescendantTokens().Contains(token))) { symbol = ((ILocalSymbol)symbol).Type; } // Just use syntaxfacts for operators if (symbol is IMethodSymbol && ((IMethodSymbol)symbol).MethodKind == MethodKind.BuiltinOperator) { text = null; return false; } text = symbol != null ? Format(symbol) : null; return symbol != null; } private bool TryGetTextForOperator(SyntaxToken token, Document document, out string text) { var syntaxFacts = document.GetLanguageService<ISyntaxFactsService>(); if (syntaxFacts.IsOperator(token) || syntaxFacts.IsPredefinedOperator(token) || SyntaxFacts.IsAssignmentExpressionOperatorToken(token.Kind())) { text = Keyword(syntaxFacts.GetText(token.RawKind)); return true; } if (token.IsKind(SyntaxKind.ColonColonToken)) { text = "::_CSharpKeyword"; return true; } if (token.Kind() == SyntaxKind.ColonToken && token.Parent is NameColonSyntax) { text = "cs_namedParameter"; return true; } if (token.IsKind(SyntaxKind.QuestionToken) && token.Parent is ConditionalExpressionSyntax) { text = "?_CSharpKeyword"; return true; } if (token.IsKind(SyntaxKind.EqualsGreaterThanToken)) { text = "=>_CSharpKeyword"; return true; } if (token.IsKind(SyntaxKind.PlusEqualsToken)) { text = "+=_CSharpKeyword"; return true; } if (token.IsKind(SyntaxKind.MinusEqualsToken)) { text = "-=_CSharpKeyword"; return true; } text = null; return false; } private bool TryGetTextForPreProcessor(SyntaxToken token, Document document, ISyntaxFactsService syntaxFacts, out string text) { if (syntaxFacts.IsPreprocessorKeyword(token)) { text = "#" + token.Text; return true; } if (token.IsKind(SyntaxKind.EndOfDirectiveToken) && token.GetAncestor<RegionDirectiveTriviaSyntax>() != null) { text = "#region"; return true; } text = null; return false; } private bool TryGetTextForContextualKeyword(SyntaxToken token, Document document, ISyntaxFactsService syntaxFacts, out string text) { if (token.IsContextualKeyword()) { switch (token.Kind()) { case SyntaxKind.PartialKeyword: if (token.Parent.GetAncestorOrThis<MethodDeclarationSyntax>() != null) { text = "partialmethod_CSharpKeyword"; return true; } else if (token.Parent.GetAncestorOrThis<ClassDeclarationSyntax>() != null) { text = "partialtype_CSharpKeyword"; return true; } break; case SyntaxKind.WhereKeyword: if (token.Parent.GetAncestorOrThis<TypeParameterConstraintClauseSyntax>() != null) { text = "whereconstraint_CSharpKeyword"; } else { text = "whereclause_CSharpKeyword"; } return true; } } text = null; return false; } private bool TryGetTextForKeyword(SyntaxToken token, Document document, ISyntaxFactsService syntaxFacts, out string text) { if (token.Kind() == SyntaxKind.InKeyword) { if (token.GetAncestor<FromClauseSyntax>() != null) { text = "from_CSharpKeyword"; return true; } if (token.GetAncestor<JoinClauseSyntax>() != null) { text = "join_CSharpKeyword"; return true; } } if (token.IsKeyword()) { text = Keyword(token.Text); return true; } if (token.ValueText == "var" && token.IsKind(SyntaxKind.IdentifierToken) && token.Parent.Parent is VariableDeclarationSyntax && token.Parent == ((VariableDeclarationSyntax)token.Parent.Parent).Type) { text = "var_CSharpKeyword"; return true; } if (syntaxFacts.IsTypeNamedDynamic(token, token.Parent)) { text = "dynamic_CSharpKeyword"; return true; } text = null; return false; } private string FormatTypeOrNamespace(INamespaceOrTypeSymbol symbol) { var displayString = symbol.ToDisplayString(TypeFormat); var type = symbol as ITypeSymbol; if (type != null && type.OriginalDefinition.SpecialType == SpecialType.System_Nullable_T) { return "System.Nullable`1"; } if (symbol.GetTypeArguments().Any()) { return string.Format("{0}`{1}", displayString, symbol.GetTypeArguments().Count()); } return displayString; } private string Format(ISymbol symbol) { if (symbol is ITypeSymbol || symbol is INamespaceSymbol) { return FormatTypeOrNamespace((INamespaceOrTypeSymbol)symbol); } if (symbol.MatchesKind(SymbolKind.Alias, SymbolKind.Local, SymbolKind.Parameter)) { return Format(symbol.GetSymbolType()); } var containingType = FormatTypeOrNamespace(symbol.ContainingType); var name = symbol.ToDisplayString(NameFormat); if (symbol.IsConstructor()) { return string.Format("{0}.#ctor", containingType); } if (symbol.GetTypeArguments().Any()) { return string.Format("{0}.{1}``{2}", containingType, name, symbol.GetTypeArguments().Count()); } return string.Format("{0}.{1}", containingType, name); } } }
// // Copyright (c) 2008-2011, Kenneth Bell // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the "Software"), // to deal in the Software without restriction, including without limitation // the rights to use, copy, modify, merge, publish, distribute, sublicense, // and/or sell copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER // DEALINGS IN THE SOFTWARE. // using System; using System.Collections.Generic; using System.IO; namespace DiscUtils.Fat { internal class ClusterStream : Stream { private readonly FileAccess _access; private readonly byte[] _clusterBuffer; private readonly FileAllocationTable _fat; private readonly List<uint> _knownClusters; private readonly ClusterReader _reader; private bool _atEOF; private uint _currentCluster; private uint _length; private long _position; internal ClusterStream(FatFileSystem fileSystem, FileAccess access, uint firstCluster, uint length) { _access = access; _reader = fileSystem.ClusterReader; _fat = fileSystem.Fat; _length = length; _knownClusters = new List<uint>(); if (firstCluster != 0) { _knownClusters.Add(firstCluster); } else { _knownClusters.Add(FatBuffer.EndOfChain); } if (_length == uint.MaxValue) { _length = DetectLength(); } _currentCluster = uint.MaxValue; _clusterBuffer = new byte[_reader.ClusterSize]; } public override bool CanRead { get { return _access == FileAccess.Read || _access == FileAccess.ReadWrite; } } public override bool CanSeek { get { return true; } } public override bool CanWrite { get { return _access == FileAccess.ReadWrite || _access == FileAccess.Write; } } public override long Length { get { return _length; } } public override long Position { get { return _position; } set { if (value >= 0) { _position = value; _atEOF = false; } else { throw new ArgumentOutOfRangeException(nameof(value), "Attempt to move before beginning of stream"); } } } public event FirstClusterChangedDelegate FirstClusterChanged; public override void Flush() {} public override int Read(byte[] buffer, int offset, int count) { if (!CanRead) { throw new IOException("Attempt to read from file not opened for read"); } if (_position > _length) { throw new IOException("Attempt to read beyond end of file"); } if (count < 0) { throw new ArgumentOutOfRangeException(nameof(count), "Attempt to read negative number of bytes"); } int target = count; if (_length - _position < count) { target = (int)(_length - _position); } if (!TryLoadCurrentCluster()) { if ((_position == _length || _position == DetectLength()) && !_atEOF) { _atEOF = true; return 0; } throw new IOException("Attempt to read beyond known clusters"); } int numRead = 0; while (numRead < target) { int clusterOffset = (int)(_position % _reader.ClusterSize); int toCopy = Math.Min(_reader.ClusterSize - clusterOffset, target - numRead); Array.Copy(_clusterBuffer, clusterOffset, buffer, offset + numRead, toCopy); // Remember how many we've read in total numRead += toCopy; // Increment the position _position += toCopy; // Abort if we've hit the end of the file if (!TryLoadCurrentCluster()) { break; } } if (numRead == 0) { _atEOF = true; } return numRead; } public override long Seek(long offset, SeekOrigin origin) { long newPos = offset; if (origin == SeekOrigin.Current) { newPos += _position; } else if (origin == SeekOrigin.End) { newPos += Length; } _position = newPos; _atEOF = false; return newPos; } public override void SetLength(long value) { long desiredNumClusters = (value + _reader.ClusterSize - 1) / _reader.ClusterSize; long actualNumClusters = (_length + _reader.ClusterSize - 1) / _reader.ClusterSize; if (desiredNumClusters < actualNumClusters) { uint cluster; if (!TryGetClusterByPosition(value, out cluster)) { throw new IOException("Internal state corrupt - unable to find cluster"); } uint firstToFree = _fat.GetNext(cluster); _fat.SetEndOfChain(cluster); _fat.FreeChain(firstToFree); while (_knownClusters.Count > desiredNumClusters) { _knownClusters.RemoveAt(_knownClusters.Count - 1); } _knownClusters.Add(FatBuffer.EndOfChain); if (desiredNumClusters == 0) { FireFirstClusterAllocated(0); } } else if (desiredNumClusters > actualNumClusters) { uint cluster; while (!TryGetClusterByPosition(value, out cluster)) { cluster = ExtendChain(); _reader.WipeCluster(cluster); } } if (_length != value) { _length = (uint)value; if (_position > _length) { _position = _length; } } } public override void Write(byte[] buffer, int offset, int count) { int bytesRemaining = count; if (!CanWrite) { throw new IOException("Attempting to write to file not opened for writing"); } if (count < 0) { throw new ArgumentOutOfRangeException(nameof(count), count, "Attempting to write negative number of bytes"); } if (offset > buffer.Length || offset + count > buffer.Length) { throw new ArgumentException("Attempt to write bytes outside of the buffer"); } // TODO: Free space check... try { while (bytesRemaining > 0) { // Extend the stream until it encompasses _position uint cluster; while (!TryGetClusterByPosition(_position, out cluster)) { cluster = ExtendChain(); _reader.WipeCluster(cluster); } // Fill this cluster with as much data as we can (WriteToCluster preserves existing cluster // data, if necessary) int numWritten = WriteToCluster(cluster, (int)(_position % _reader.ClusterSize), buffer, offset, bytesRemaining); offset += numWritten; bytesRemaining -= numWritten; _position += numWritten; } _length = (uint)Math.Max(_length, _position); } finally { _fat.Flush(); } _atEOF = false; } /// <summary> /// Writes up to the next cluster boundary, making sure to preserve existing data in the cluster /// that falls outside of the updated range. /// </summary> /// <param name="cluster">The cluster to write to.</param> /// <param name="pos">The file position of the write (within the cluster).</param> /// <param name="buffer">The buffer with the new data.</param> /// <param name="offset">Offset into buffer of the first byte to write.</param> /// <param name="count">The maximum number of bytes to write.</param> /// <returns>The number of bytes written - either count, or the number that fit up to /// the cluster boundary.</returns> private int WriteToCluster(uint cluster, int pos, byte[] buffer, int offset, int count) { if (pos == 0 && count >= _reader.ClusterSize) { _currentCluster = cluster; Array.Copy(buffer, offset, _clusterBuffer, 0, _reader.ClusterSize); WriteCurrentCluster(); return _reader.ClusterSize; } // Partial cluster, so need to read existing cluster data first LoadCluster(cluster); int copyLength = Math.Min(count, _reader.ClusterSize - pos % _reader.ClusterSize); Array.Copy(buffer, offset, _clusterBuffer, pos, copyLength); WriteCurrentCluster(); return copyLength; } /// <summary> /// Adds a new cluster to the end of the existing chain, by allocating a free cluster. /// </summary> /// <returns>The cluster allocated.</returns> /// <remarks>This method does not initialize the data in the cluster, the caller should /// perform a write to ensure the cluster data is in known state.</remarks> private uint ExtendChain() { // Sanity check - make sure the final known cluster is the EOC marker if (!_fat.IsEndOfChain(_knownClusters[_knownClusters.Count - 1])) { throw new IOException("Corrupt file system: final cluster isn't End-of-Chain"); } uint cluster; if (!_fat.TryGetFreeCluster(out cluster)) { throw new IOException("Out of disk space"); } _fat.SetEndOfChain(cluster); if (_knownClusters.Count == 1) { FireFirstClusterAllocated(cluster); } else { _fat.SetNext(_knownClusters[_knownClusters.Count - 2], cluster); } _knownClusters[_knownClusters.Count - 1] = cluster; _knownClusters.Add(_fat.GetNext(cluster)); return cluster; } private void FireFirstClusterAllocated(uint cluster) { if (FirstClusterChanged != null) { FirstClusterChanged(cluster); } } private bool TryLoadCurrentCluster() { return TryLoadClusterByPosition(_position); } private bool TryLoadClusterByPosition(long pos) { uint cluster; if (!TryGetClusterByPosition(pos, out cluster)) { return false; } // Read the cluster, it's different to the one currently loaded if (cluster != _currentCluster) { _reader.ReadCluster(cluster, _clusterBuffer, 0); _currentCluster = cluster; } return true; } private void LoadCluster(uint cluster) { // Read the cluster, it's different to the one currently loaded if (cluster != _currentCluster) { _reader.ReadCluster(cluster, _clusterBuffer, 0); _currentCluster = cluster; } } private void WriteCurrentCluster() { _reader.WriteCluster(_currentCluster, _clusterBuffer, 0); } private bool TryGetClusterByPosition(long pos, out uint cluster) { int index = (int)(pos / _reader.ClusterSize); if (_knownClusters.Count <= index) { if (!TryPopulateKnownClusters(index)) { cluster = uint.MaxValue; return false; } } // Chain is shorter than the current stream position if (_knownClusters.Count <= index) { cluster = uint.MaxValue; return false; } cluster = _knownClusters[index]; // This is the 'special' End-of-chain cluster identifer, so the stream position // is greater than the actual file length. if (_fat.IsEndOfChain(cluster)) { return false; } return true; } private bool TryPopulateKnownClusters(int index) { uint lastKnown = _knownClusters[_knownClusters.Count - 1]; while (!_fat.IsEndOfChain(lastKnown) && _knownClusters.Count <= index) { lastKnown = _fat.GetNext(lastKnown); _knownClusters.Add(lastKnown); } return _knownClusters.Count > index; } private uint DetectLength() { while (!_fat.IsEndOfChain(_knownClusters[_knownClusters.Count - 1])) { if (!TryPopulateKnownClusters(_knownClusters.Count)) { throw new IOException("Corrupt file stream - unable to discover end of cluster chain"); } } return (uint)((_knownClusters.Count - 1) * (long)_reader.ClusterSize); } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.Reflection; using log4net; using Nini.Config; using OpenMetaverse; using OpenSim.Framework; using OpenSim.Region.Framework.Interfaces; using OpenSim.Region.Framework.Scenes; using Mono.Addins; namespace OpenSim.Region.CoreModules.Agent.AssetTransaction { [Extension(Path = "/OpenSim/RegionModules", NodeName = "RegionModule", Id = "AssetTransactionModule")] public class AssetTransactionModule : INonSharedRegionModule, IAgentAssetTransactions { // private static readonly ILog m_log = LogManager.GetLogger( // MethodBase.GetCurrentMethod().DeclaringType); protected Scene m_Scene; private bool m_dumpAssetsToFile = false; /// <summary> /// Each agent has its own singleton collection of transactions /// </summary> private Dictionary<UUID, AgentAssetTransactions> AgentTransactions = new Dictionary<UUID, AgentAssetTransactions>(); #region IRegionModule Members public void Initialise(IConfigSource config) { } public void AddRegion(Scene scene) { m_Scene = scene; scene.RegisterModuleInterface<IAgentAssetTransactions>(this); scene.EventManager.OnNewClient += NewClient; } public void RegionLoaded(Scene scene) { } public void RemoveRegion(Scene scene) { } public void Close() { } public string Name { get { return "AgentTransactionModule"; } } public Type ReplaceableInterface { get { return typeof(IAgentAssetTransactions); } } #endregion public void NewClient(IClientAPI client) { client.OnAssetUploadRequest += HandleUDPUploadRequest; client.OnXferReceive += HandleXfer; } #region AgentAssetTransactions /// <summary> /// Get the collection of asset transactions for the given user. /// If one does not already exist, it is created. /// </summary> /// <param name="userID"></param> /// <returns></returns> private AgentAssetTransactions GetUserTransactions(UUID userID) { lock (AgentTransactions) { if (!AgentTransactions.ContainsKey(userID)) { AgentAssetTransactions transactions = new AgentAssetTransactions(userID, m_Scene, m_dumpAssetsToFile); AgentTransactions.Add(userID, transactions); } return AgentTransactions[userID]; } } /// <summary> /// Remove the given agent asset transactions. This should be called /// when a client is departing from a scene (and hence won't be making /// any more transactions here). /// </summary> /// <param name="userID"></param> public void RemoveAgentAssetTransactions(UUID userID) { // m_log.DebugFormat("Removing agent asset transactions structure for agent {0}", userID); lock (AgentTransactions) { AgentTransactions.Remove(userID); } } /// <summary> /// Create an inventory item from data that has been received through /// a transaction. /// This is called when new clothing or body parts are created. /// It may also be called in other situations. /// </summary> /// <param name="remoteClient"></param> /// <param name="transactionID"></param> /// <param name="folderID"></param> /// <param name="callbackID"></param> /// <param name="description"></param> /// <param name="name"></param> /// <param name="invType"></param> /// <param name="type"></param> /// <param name="wearableType"></param> /// <param name="nextOwnerMask"></param> public void HandleItemCreationFromTransaction(IClientAPI remoteClient, UUID transactionID, UUID folderID, uint callbackID, string description, string name, sbyte invType, sbyte type, byte wearableType, uint nextOwnerMask) { // m_log.DebugFormat( // "[TRANSACTIONS MANAGER] Called HandleItemCreationFromTransaction with item {0}", name); AgentAssetTransactions transactions = GetUserTransactions(remoteClient.AgentId); transactions.RequestCreateInventoryItem(remoteClient, transactionID, folderID, callbackID, description, name, invType, type, wearableType, nextOwnerMask); } /// <summary> /// Update an inventory item with data that has been received through a /// transaction. /// /// This is called when clothing or body parts are updated (for /// instance, with new textures or colours). It may also be called in /// other situations. /// </summary> /// <param name="remoteClient"></param> /// <param name="transactionID"></param> /// <param name="item"></param> public void HandleItemUpdateFromTransaction(IClientAPI remoteClient, UUID transactionID, InventoryItemBase item) { // m_log.DebugFormat( // "[TRANSACTIONS MANAGER] Called HandleItemUpdateFromTransaction with item {0}", // item.Name); AgentAssetTransactions transactions = GetUserTransactions(remoteClient.AgentId); transactions.RequestUpdateInventoryItem(remoteClient, transactionID, item); } /// <summary> /// Update a task inventory item with data that has been received /// through a transaction. /// /// This is currently called when, for instance, a notecard in a prim /// is saved. The data is sent up through a single AssetUploadRequest. /// A subsequent UpdateTaskInventory then references the transaction /// and comes through this method. /// </summary> /// <param name="remoteClient"></param> /// <param name="transactionID"></param> /// <param name="item"></param> public void HandleTaskItemUpdateFromTransaction(IClientAPI remoteClient, SceneObjectPart part, UUID transactionID, TaskInventoryItem item) { // m_log.DebugFormat( // "[TRANSACTIONS MANAGER] Called HandleTaskItemUpdateFromTransaction with item {0}", // item.Name); AgentAssetTransactions transactions = GetUserTransactions(remoteClient.AgentId); transactions.RequestUpdateTaskInventoryItem(remoteClient, part, transactionID, item); } /// <summary> /// Request that a client (agent) begin an asset transfer. /// </summary> /// <param name="remoteClient"></param> /// <param name="assetID"></param> /// <param name="transaction"></param> /// <param name="type"></param> /// <param name="data"></param></param> /// <param name="tempFile"></param> public void HandleUDPUploadRequest(IClientAPI remoteClient, UUID assetID, UUID transaction, sbyte type, byte[] data, bool storeLocal, bool tempFile) { // m_log.Debug("HandleUDPUploadRequest - assetID: " + assetID.ToString() + " transaction: " + transaction.ToString() + " type: " + type.ToString() + " storelocal: " + storeLocal + " tempFile: " + tempFile); if (((AssetType)type == AssetType.Texture || (AssetType)type == AssetType.Sound || (AssetType)type == AssetType.TextureTGA || (AssetType)type == AssetType.Animation) && tempFile == false) { Scene scene = (Scene)remoteClient.Scene; IMoneyModule mm = scene.RequestModuleInterface<IMoneyModule>(); if (mm != null) { if (!mm.UploadCovered(remoteClient, mm.UploadCharge)) { remoteClient.SendAgentAlertMessage("Unable to upload asset. Insufficient funds.", false); return; } } } AgentAssetTransactions transactions = GetUserTransactions(remoteClient.AgentId); AssetXferUploader uploader = transactions.RequestXferUploader(transaction); if (uploader != null) { uploader.Initialise(remoteClient, assetID, transaction, type, data, storeLocal, tempFile); } } /// <summary> /// Handle asset transfer data packets received in response to the /// asset upload request in HandleUDPUploadRequest() /// </summary> /// <param name="remoteClient"></param> /// <param name="xferID"></param> /// <param name="packetID"></param> /// <param name="data"></param> public void HandleXfer(IClientAPI remoteClient, ulong xferID, uint packetID, byte[] data) { //m_log.Debug("xferID: " + xferID + " packetID: " + packetID + " data!"); AgentAssetTransactions transactions = GetUserTransactions(remoteClient.AgentId); transactions.HandleXfer(xferID, packetID, data); } #endregion } }
// Copyright (c) 2006, ComponentAce // http://www.componentace.com // All rights reserved. // Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: // Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. // Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. // Neither the name of ComponentAce nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. /* Copyright (c) 2000,2001,2002,2003 ymnk, JCraft,Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. The names of the authors may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JCRAFT, INC. OR ANY CONTRIBUTORS TO THIS SOFTWARE BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ /* * This program is based on zlib-1.1.3, so all credit should go authors * Jean-loup Gailly(jloup@gzip.org) and Mark Adler(madler@alumni.caltech.edu) * and contributors of zlib. */ using System; namespace Cocos2D.Compression.Zlib { internal sealed class InfCodes { private const int Z_OK = 0; private const int Z_STREAM_END = 1; private const int Z_NEED_DICT = 2; private const int Z_ERRNO = - 1; private const int Z_STREAM_ERROR = - 2; private const int Z_DATA_ERROR = - 3; private const int Z_MEM_ERROR = - 4; private const int Z_BUF_ERROR = - 5; private const int Z_VERSION_ERROR = - 6; // waiting for "i:"=input, // "o:"=output, // "x:"=nothing private const int START = 0; // x: set up for LEN private const int LEN = 1; // i: get length/literal/eob next private const int LENEXT = 2; // i: getting length extra (have base) private const int DIST = 3; // i: get distance next private const int DISTEXT = 4; // i: getting distance extra private const int COPY = 5; // o: copying bytes in window, waiting for space private const int LIT = 6; // o: got literal, waiting for output space private const int WASH = 7; // o: got eob, possibly still output waiting private const int END = 8; // x: got eob and all data flushed private const int BADCODE = 9; // x: got error private static readonly int[] inflate_mask = new[] { 0x00000000, 0x00000001, 0x00000003, 0x00000007, 0x0000000f, 0x0000001f, 0x0000003f, 0x0000007f, 0x000000ff, 0x000001ff, 0x000003ff, 0x000007ff, 0x00000fff, 0x00001fff, 0x00003fff, 0x00007fff, 0x0000ffff }; internal byte dbits; // dtree bits decoder per branch internal int dist; // distance back to copy from internal int[] dtree; // distance tree internal int dtree_index; // distance tree internal int get_Renamed; // bits to get for extra internal byte lbits; // ltree bits decoded per branch internal int len; internal int lit; internal int[] ltree; // literal/length/eob tree internal int ltree_index; // literal/length/eob tree internal int mode; // current inflate_codes mode internal int need; // bits needed internal int[] tree; // pointer into tree internal int tree_index; internal InfCodes(int bl, int bd, int[] tl, int tl_index, int[] td, int td_index, ZStream z) { mode = START; lbits = (byte) bl; dbits = (byte) bd; ltree = tl; ltree_index = tl_index; dtree = td; dtree_index = td_index; } internal InfCodes(int bl, int bd, int[] tl, int[] td, ZStream z) { mode = START; lbits = (byte) bl; dbits = (byte) bd; ltree = tl; ltree_index = 0; dtree = td; dtree_index = 0; } internal int proc(InfBlocks s, ZStream z, int r) { int j; // temporary storage //int[] t; // temporary pointer int tindex; // temporary pointer int e; // extra bits or operation int b = 0; // bit buffer int k = 0; // bits in bit buffer int p = 0; // input data pointer int n; // bytes available there int q; // output window write pointer int m; // bytes to end of window or read pointer int f; // pointer to copy strings from // copy input/output information to locals (UPDATE macro restores) p = z.next_in_index; n = z.avail_in; b = s.bitb; k = s.bitk; q = s.write; m = q < s.read ? s.read - q - 1 : s.end - q; // process input and output based on current state while (true) { switch (mode) { // waiting for "i:"=input, "o:"=output, "x:"=nothing case START: // x: set up for LEN if (m >= 258 && n >= 10) { s.bitb = b; s.bitk = k; z.avail_in = n; z.total_in += p - z.next_in_index; z.next_in_index = p; s.write = q; r = inflate_fast(lbits, dbits, ltree, ltree_index, dtree, dtree_index, s, z); p = z.next_in_index; n = z.avail_in; b = s.bitb; k = s.bitk; q = s.write; m = q < s.read ? s.read - q - 1 : s.end - q; if (r != Z_OK) { mode = r == Z_STREAM_END ? WASH : BADCODE; break; } } need = lbits; tree = ltree; tree_index = ltree_index; mode = LEN; goto case LEN; case LEN: // i: get length/literal/eob next j = need; while (k < (j)) { if (n != 0) r = Z_OK; else { s.bitb = b; s.bitk = k; z.avail_in = n; z.total_in += p - z.next_in_index; z.next_in_index = p; s.write = q; return s.inflate_flush(z, r); } n--; b |= (z.next_in[p++] & 0xff) << k; k += 8; } tindex = (tree_index + (b & inflate_mask[j])) * 3; b = SupportClass.URShift(b, (tree[tindex + 1])); k -= (tree[tindex + 1]); e = tree[tindex]; if (e == 0) { // literal lit = tree[tindex + 2]; mode = LIT; break; } if ((e & 16) != 0) { // length get_Renamed = e & 15; len = tree[tindex + 2]; mode = LENEXT; break; } if ((e & 64) == 0) { // next table need = e; tree_index = tindex / 3 + tree[tindex + 2]; break; } if ((e & 32) != 0) { // end of block mode = WASH; break; } mode = BADCODE; // invalid code z.msg = "invalid literal/length code"; r = Z_DATA_ERROR; s.bitb = b; s.bitk = k; z.avail_in = n; z.total_in += p - z.next_in_index; z.next_in_index = p; s.write = q; return s.inflate_flush(z, r); case LENEXT: // i: getting length extra (have base) j = get_Renamed; while (k < (j)) { if (n != 0) r = Z_OK; else { s.bitb = b; s.bitk = k; z.avail_in = n; z.total_in += p - z.next_in_index; z.next_in_index = p; s.write = q; return s.inflate_flush(z, r); } n--; b |= (z.next_in[p++] & 0xff) << k; k += 8; } len += (b & inflate_mask[j]); b >>= j; k -= j; need = dbits; tree = dtree; tree_index = dtree_index; mode = DIST; goto case DIST; case DIST: // i: get distance next j = need; while (k < (j)) { if (n != 0) r = Z_OK; else { s.bitb = b; s.bitk = k; z.avail_in = n; z.total_in += p - z.next_in_index; z.next_in_index = p; s.write = q; return s.inflate_flush(z, r); } n--; b |= (z.next_in[p++] & 0xff) << k; k += 8; } tindex = (tree_index + (b & inflate_mask[j])) * 3; b >>= tree[tindex + 1]; k -= tree[tindex + 1]; e = (tree[tindex]); if ((e & 16) != 0) { // distance get_Renamed = e & 15; dist = tree[tindex + 2]; mode = DISTEXT; break; } if ((e & 64) == 0) { // next table need = e; tree_index = tindex / 3 + tree[tindex + 2]; break; } mode = BADCODE; // invalid code z.msg = "invalid distance code"; r = Z_DATA_ERROR; s.bitb = b; s.bitk = k; z.avail_in = n; z.total_in += p - z.next_in_index; z.next_in_index = p; s.write = q; return s.inflate_flush(z, r); case DISTEXT: // i: getting distance extra j = get_Renamed; while (k < (j)) { if (n != 0) r = Z_OK; else { s.bitb = b; s.bitk = k; z.avail_in = n; z.total_in += p - z.next_in_index; z.next_in_index = p; s.write = q; return s.inflate_flush(z, r); } n--; b |= (z.next_in[p++] & 0xff) << k; k += 8; } dist += (b & inflate_mask[j]); b >>= j; k -= j; mode = COPY; goto case COPY; case COPY: // o: copying bytes in window, waiting for space f = q - dist; while (f < 0) { // modulo window size-"while" instead f += s.end; // of "if" handles invalid distances } while (len != 0) { if (m == 0) { if (q == s.end && s.read != 0) { q = 0; m = q < s.read ? s.read - q - 1 : s.end - q; } if (m == 0) { s.write = q; r = s.inflate_flush(z, r); q = s.write; m = q < s.read ? s.read - q - 1 : s.end - q; if (q == s.end && s.read != 0) { q = 0; m = q < s.read ? s.read - q - 1 : s.end - q; } if (m == 0) { s.bitb = b; s.bitk = k; z.avail_in = n; z.total_in += p - z.next_in_index; z.next_in_index = p; s.write = q; return s.inflate_flush(z, r); } } } s.window[q++] = s.window[f++]; m--; if (f == s.end) f = 0; len--; } mode = START; break; case LIT: // o: got literal, waiting for output space if (m == 0) { if (q == s.end && s.read != 0) { q = 0; m = q < s.read ? s.read - q - 1 : s.end - q; } if (m == 0) { s.write = q; r = s.inflate_flush(z, r); q = s.write; m = q < s.read ? s.read - q - 1 : s.end - q; if (q == s.end && s.read != 0) { q = 0; m = q < s.read ? s.read - q - 1 : s.end - q; } if (m == 0) { s.bitb = b; s.bitk = k; z.avail_in = n; z.total_in += p - z.next_in_index; z.next_in_index = p; s.write = q; return s.inflate_flush(z, r); } } } r = Z_OK; s.window[q++] = (byte) lit; m--; mode = START; break; case WASH: // o: got eob, possibly more output if (k > 7) { // return unused byte, if any k -= 8; n++; p--; // can always return one } s.write = q; r = s.inflate_flush(z, r); q = s.write; m = q < s.read ? s.read - q - 1 : s.end - q; if (s.read != s.write) { s.bitb = b; s.bitk = k; z.avail_in = n; z.total_in += p - z.next_in_index; z.next_in_index = p; s.write = q; return s.inflate_flush(z, r); } mode = END; goto case END; case END: r = Z_STREAM_END; s.bitb = b; s.bitk = k; z.avail_in = n; z.total_in += p - z.next_in_index; z.next_in_index = p; s.write = q; return s.inflate_flush(z, r); case BADCODE: // x: got error r = Z_DATA_ERROR; s.bitb = b; s.bitk = k; z.avail_in = n; z.total_in += p - z.next_in_index; z.next_in_index = p; s.write = q; return s.inflate_flush(z, r); default: r = Z_STREAM_ERROR; s.bitb = b; s.bitk = k; z.avail_in = n; z.total_in += p - z.next_in_index; z.next_in_index = p; s.write = q; return s.inflate_flush(z, r); } } } internal void free(ZStream z) { // ZFREE(z, c); } // Called with number of bytes left to write in window at least 258 // (the maximum string length) and number of input bytes available // at least ten. The ten bytes are six bytes for the longest length/ // distance pair plus four bytes for overloading the bit buffer. internal int inflate_fast(int bl, int bd, int[] tl, int tl_index, int[] td, int td_index, InfBlocks s, ZStream z) { int t; // temporary pointer int[] tp; // temporary pointer int tp_index; // temporary pointer int e; // extra bits or operation int b; // bit buffer int k; // bits in bit buffer int p; // input data pointer int n; // bytes available there int q; // output window write pointer int m; // bytes to end of window or read pointer int ml; // mask for literal/length tree int md; // mask for distance tree int c; // bytes to copy int d; // distance back to copy from int r; // copy source pointer // load input, output, bit values p = z.next_in_index; n = z.avail_in; b = s.bitb; k = s.bitk; q = s.write; m = q < s.read ? s.read - q - 1 : s.end - q; // initialize masks ml = inflate_mask[bl]; md = inflate_mask[bd]; // do until not enough input or output space for fast loop do { // assume called with m >= 258 && n >= 10 // get literal/length code while (k < (20)) { // max bits for literal/length code n--; b |= (z.next_in[p++] & 0xff) << k; k += 8; } t = b & ml; tp = tl; tp_index = tl_index; if ((e = tp[(tp_index + t) * 3]) == 0) { b >>= (tp[(tp_index + t) * 3 + 1]); k -= (tp[(tp_index + t) * 3 + 1]); s.window[q++] = (byte) tp[(tp_index + t) * 3 + 2]; m--; continue; } do { b >>= (tp[(tp_index + t) * 3 + 1]); k -= (tp[(tp_index + t) * 3 + 1]); if ((e & 16) != 0) { e &= 15; c = tp[(tp_index + t) * 3 + 2] + (b & inflate_mask[e]); b >>= e; k -= e; // decode distance base of block to copy while (k < (15)) { // max bits for distance code n--; b |= (z.next_in[p++] & 0xff) << k; k += 8; } t = b & md; tp = td; tp_index = td_index; e = tp[(tp_index + t) * 3]; do { b >>= (tp[(tp_index + t) * 3 + 1]); k -= (tp[(tp_index + t) * 3 + 1]); if ((e & 16) != 0) { // get extra bits to add to distance base e &= 15; while (k < (e)) { // get extra bits (up to 13) n--; b |= (z.next_in[p++] & 0xff) << k; k += 8; } d = tp[(tp_index + t) * 3 + 2] + (b & inflate_mask[e]); b >>= (e); k -= (e); // do the copy m -= c; if (q >= d) { // offset before dest // just copy r = q - d; if (q - r > 0 && 2 > (q - r)) { s.window[q++] = s.window[r++]; c--; // minimum count is three, s.window[q++] = s.window[r++]; c--; // so unroll loop a little } else { Array.Copy(s.window, r, s.window, q, 2); q += 2; r += 2; c -= 2; } } else { // else offset after destination r = q - d; do { r += s.end; // force pointer in window } while (r < 0); // covers invalid distances e = s.end - r; if (c > e) { // if source crosses, c -= e; // wrapped copy if (q - r > 0 && e > (q - r)) { do { s.window[q++] = s.window[r++]; } while (--e != 0); } else { Array.Copy(s.window, r, s.window, q, e); q += e; r += e; e = 0; } r = 0; // copy rest from start of window } } // copy all or what's left if (q - r > 0 && c > (q - r)) { do { s.window[q++] = s.window[r++]; } while (--c != 0); } else { Array.Copy(s.window, r, s.window, q, c); q += c; r += c; c = 0; } break; } else if ((e & 64) == 0) { t += tp[(tp_index + t) * 3 + 2]; t += (b & inflate_mask[e]); e = tp[(tp_index + t) * 3]; } else { z.msg = "invalid distance code"; c = z.avail_in - n; c = (k >> 3) < c ? k >> 3 : c; n += c; p -= c; k -= (c << 3); s.bitb = b; s.bitk = k; z.avail_in = n; z.total_in += p - z.next_in_index; z.next_in_index = p; s.write = q; return Z_DATA_ERROR; } } while (true); break; } if ((e & 64) == 0) { t += tp[(tp_index + t) * 3 + 2]; t += (b & inflate_mask[e]); if ((e = tp[(tp_index + t) * 3]) == 0) { b >>= (tp[(tp_index + t) * 3 + 1]); k -= (tp[(tp_index + t) * 3 + 1]); s.window[q++] = (byte) tp[(tp_index + t) * 3 + 2]; m--; break; } } else if ((e & 32) != 0) { c = z.avail_in - n; c = (k >> 3) < c ? k >> 3 : c; n += c; p -= c; k -= (c << 3); s.bitb = b; s.bitk = k; z.avail_in = n; z.total_in += p - z.next_in_index; z.next_in_index = p; s.write = q; return Z_STREAM_END; } else { z.msg = "invalid literal/length code"; c = z.avail_in - n; c = (k >> 3) < c ? k >> 3 : c; n += c; p -= c; k -= (c << 3); s.bitb = b; s.bitk = k; z.avail_in = n; z.total_in += p - z.next_in_index; z.next_in_index = p; s.write = q; return Z_DATA_ERROR; } } while (true); } while (m >= 258 && n >= 10); // not enough input or output--restore pointers and return c = z.avail_in - n; c = (k >> 3) < c ? k >> 3 : c; n += c; p -= c; k -= (c << 3); s.bitb = b; s.bitk = k; z.avail_in = n; z.total_in += p - z.next_in_index; z.next_in_index = p; s.write = q; return Z_OK; } } }
using System; using System.Linq; using System.IO; using System.Text; using System.Collections; using System.Collections.Generic; using System.Runtime.Serialization; using Newtonsoft.Json; namespace IO.Swagger.Model { /// <summary> /// /// </summary> [DataContract] public class TrackingReminder : IEquatable<TrackingReminder> { /// <summary> /// Initializes a new instance of the <see cref="TrackingReminder" /> class. /// </summary> public TrackingReminder() { } /// <summary> /// id /// </summary> /// <value>id</value> [DataMember(Name="id", EmitDefaultValue=false)] public int? Id { get; set; } /// <summary> /// client_id /// </summary> /// <value>client_id</value> [DataMember(Name="client_id", EmitDefaultValue=false)] public string ClientId { get; set; } /// <summary> /// ID of User /// </summary> /// <value>ID of User</value> [DataMember(Name="user_id", EmitDefaultValue=false)] public int? UserId { get; set; } /// <summary> /// Id for the variable to be tracked /// </summary> /// <value>Id for the variable to be tracked</value> [DataMember(Name="variable_id", EmitDefaultValue=false)] public int? VariableId { get; set; } /// <summary> /// Default value to use for the measurement when tracking /// </summary> /// <value>Default value to use for the measurement when tracking</value> [DataMember(Name="default_value", EmitDefaultValue=false)] public float? DefaultValue { get; set; } /// <summary> /// Earliest time of day at which reminders should appear /// </summary> /// <value>Earliest time of day at which reminders should appear</value> [DataMember(Name="reminder_start_time", EmitDefaultValue=false)] public string ReminderStartTime { get; set; } /// <summary> /// Latest time of day at which reminders should appear /// </summary> /// <value>Latest time of day at which reminders should appear</value> [DataMember(Name="reminder_end_time", EmitDefaultValue=false)] public string ReminderEndTime { get; set; } /// <summary> /// String identifier for the sound to accompany the reminder /// </summary> /// <value>String identifier for the sound to accompany the reminder</value> [DataMember(Name="reminder_sound", EmitDefaultValue=false)] public string ReminderSound { get; set; } /// <summary> /// Number of seconds between one reminder and the next /// </summary> /// <value>Number of seconds between one reminder and the next</value> [DataMember(Name="reminder_frequency", EmitDefaultValue=false)] public int? ReminderFrequency { get; set; } /// <summary> /// True if the reminders should appear as a popup notification /// </summary> /// <value>True if the reminders should appear as a popup notification</value> [DataMember(Name="pop_up", EmitDefaultValue=false)] public bool? PopUp { get; set; } /// <summary> /// True if the reminders should be delivered via SMS /// </summary> /// <value>True if the reminders should be delivered via SMS</value> [DataMember(Name="sms", EmitDefaultValue=false)] public bool? Sms { get; set; } /// <summary> /// True if the reminders should be delivered via email /// </summary> /// <value>True if the reminders should be delivered via email</value> [DataMember(Name="email", EmitDefaultValue=false)] public bool? Email { get; set; } /// <summary> /// True if the reminders should appear in the notification bar /// </summary> /// <value>True if the reminders should appear in the notification bar</value> [DataMember(Name="notification_bar", EmitDefaultValue=false)] public bool? NotificationBar { get; set; } /// <summary> /// ISO 8601 timestamp for the last time a reminder was sent /// </summary> /// <value>ISO 8601 timestamp for the last time a reminder was sent</value> [DataMember(Name="last_reminded", EmitDefaultValue=false)] public DateTime? LastReminded { get; set; } /// <summary> /// ISO 8601 timestamp for the last time a measurement was received for this user and variable /// </summary> /// <value>ISO 8601 timestamp for the last time a measurement was received for this user and variable</value> [DataMember(Name="last_tracked", EmitDefaultValue=false)] public DateTime? LastTracked { get; set; } /// <summary> /// When the record was first created. Use ISO 8601 datetime format /// </summary> /// <value>When the record was first created. Use ISO 8601 datetime format</value> [DataMember(Name="created_at", EmitDefaultValue=false)] public DateTime? CreatedAt { get; set; } /// <summary> /// When the record in the database was last updated. Use ISO 8601 datetime format /// </summary> /// <value>When the record in the database was last updated. Use ISO 8601 datetime format</value> [DataMember(Name="updated_at", EmitDefaultValue=false)] public DateTime? UpdatedAt { get; set; } /// <summary> /// Returns the string presentation of the object /// </summary> /// <returns>String presentation of the object</returns> public override string ToString() { var sb = new StringBuilder(); sb.Append("class TrackingReminder {\n"); sb.Append(" Id: ").Append(Id).Append("\n"); sb.Append(" ClientId: ").Append(ClientId).Append("\n"); sb.Append(" UserId: ").Append(UserId).Append("\n"); sb.Append(" VariableId: ").Append(VariableId).Append("\n"); sb.Append(" DefaultValue: ").Append(DefaultValue).Append("\n"); sb.Append(" ReminderStartTime: ").Append(ReminderStartTime).Append("\n"); sb.Append(" ReminderEndTime: ").Append(ReminderEndTime).Append("\n"); sb.Append(" ReminderSound: ").Append(ReminderSound).Append("\n"); sb.Append(" ReminderFrequency: ").Append(ReminderFrequency).Append("\n"); sb.Append(" PopUp: ").Append(PopUp).Append("\n"); sb.Append(" Sms: ").Append(Sms).Append("\n"); sb.Append(" Email: ").Append(Email).Append("\n"); sb.Append(" NotificationBar: ").Append(NotificationBar).Append("\n"); sb.Append(" LastReminded: ").Append(LastReminded).Append("\n"); sb.Append(" LastTracked: ").Append(LastTracked).Append("\n"); sb.Append(" CreatedAt: ").Append(CreatedAt).Append("\n"); sb.Append(" UpdatedAt: ").Append(UpdatedAt).Append("\n"); sb.Append("}\n"); return sb.ToString(); } /// <summary> /// Returns the JSON string presentation of the object /// </summary> /// <returns>JSON string presentation of the object</returns> public string ToJson() { return JsonConvert.SerializeObject(this, Formatting.Indented); } /// <summary> /// Returns true if objects are equal /// </summary> /// <param name="obj">Object to be compared</param> /// <returns>Boolean</returns> public override bool Equals(object obj) { // credit: http://stackoverflow.com/a/10454552/677735 return this.Equals(obj as TrackingReminder); } /// <summary> /// Returns true if TrackingReminder instances are equal /// </summary> /// <param name="obj">Instance of TrackingReminder to be compared</param> /// <returns>Boolean</returns> public bool Equals(TrackingReminder other) { // credit: http://stackoverflow.com/a/10454552/677735 if (other == null) return false; return ( this.Id == other.Id || this.Id != null && this.Id.Equals(other.Id) ) && ( this.ClientId == other.ClientId || this.ClientId != null && this.ClientId.Equals(other.ClientId) ) && ( this.UserId == other.UserId || this.UserId != null && this.UserId.Equals(other.UserId) ) && ( this.VariableId == other.VariableId || this.VariableId != null && this.VariableId.Equals(other.VariableId) ) && ( this.DefaultValue == other.DefaultValue || this.DefaultValue != null && this.DefaultValue.Equals(other.DefaultValue) ) && ( this.ReminderStartTime == other.ReminderStartTime || this.ReminderStartTime != null && this.ReminderStartTime.Equals(other.ReminderStartTime) ) && ( this.ReminderEndTime == other.ReminderEndTime || this.ReminderEndTime != null && this.ReminderEndTime.Equals(other.ReminderEndTime) ) && ( this.ReminderSound == other.ReminderSound || this.ReminderSound != null && this.ReminderSound.Equals(other.ReminderSound) ) && ( this.ReminderFrequency == other.ReminderFrequency || this.ReminderFrequency != null && this.ReminderFrequency.Equals(other.ReminderFrequency) ) && ( this.PopUp == other.PopUp || this.PopUp != null && this.PopUp.Equals(other.PopUp) ) && ( this.Sms == other.Sms || this.Sms != null && this.Sms.Equals(other.Sms) ) && ( this.Email == other.Email || this.Email != null && this.Email.Equals(other.Email) ) && ( this.NotificationBar == other.NotificationBar || this.NotificationBar != null && this.NotificationBar.Equals(other.NotificationBar) ) && ( this.LastReminded == other.LastReminded || this.LastReminded != null && this.LastReminded.Equals(other.LastReminded) ) && ( this.LastTracked == other.LastTracked || this.LastTracked != null && this.LastTracked.Equals(other.LastTracked) ) && ( this.CreatedAt == other.CreatedAt || this.CreatedAt != null && this.CreatedAt.Equals(other.CreatedAt) ) && ( this.UpdatedAt == other.UpdatedAt || this.UpdatedAt != null && this.UpdatedAt.Equals(other.UpdatedAt) ); } /// <summary> /// Gets the hash code /// </summary> /// <returns>Hash code</returns> public override int GetHashCode() { // credit: http://stackoverflow.com/a/263416/677735 unchecked // Overflow is fine, just wrap { int hash = 41; // Suitable nullity checks etc, of course :) if (this.Id != null) hash = hash * 57 + this.Id.GetHashCode(); if (this.ClientId != null) hash = hash * 57 + this.ClientId.GetHashCode(); if (this.UserId != null) hash = hash * 57 + this.UserId.GetHashCode(); if (this.VariableId != null) hash = hash * 57 + this.VariableId.GetHashCode(); if (this.DefaultValue != null) hash = hash * 57 + this.DefaultValue.GetHashCode(); if (this.ReminderStartTime != null) hash = hash * 57 + this.ReminderStartTime.GetHashCode(); if (this.ReminderEndTime != null) hash = hash * 57 + this.ReminderEndTime.GetHashCode(); if (this.ReminderSound != null) hash = hash * 57 + this.ReminderSound.GetHashCode(); if (this.ReminderFrequency != null) hash = hash * 57 + this.ReminderFrequency.GetHashCode(); if (this.PopUp != null) hash = hash * 57 + this.PopUp.GetHashCode(); if (this.Sms != null) hash = hash * 57 + this.Sms.GetHashCode(); if (this.Email != null) hash = hash * 57 + this.Email.GetHashCode(); if (this.NotificationBar != null) hash = hash * 57 + this.NotificationBar.GetHashCode(); if (this.LastReminded != null) hash = hash * 57 + this.LastReminded.GetHashCode(); if (this.LastTracked != null) hash = hash * 57 + this.LastTracked.GetHashCode(); if (this.CreatedAt != null) hash = hash * 57 + this.CreatedAt.GetHashCode(); if (this.UpdatedAt != null) hash = hash * 57 + this.UpdatedAt.GetHashCode(); return hash; } } } }
/* * Copyright (c) 2006-2008, openmetaverse.org * All rights reserved. * * - Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * - Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * - Neither the name of the openmetaverse.org nor the names * of its contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using OpenMetaverse.StructuredData; namespace OpenMetaverse { public partial class Primitive : IEquatable<Primitive> { // Used for packing and unpacking parameters protected const float CUT_QUANTA = 0.00002f; protected const float SCALE_QUANTA = 0.01f; protected const float SHEAR_QUANTA = 0.01f; protected const float TAPER_QUANTA = 0.01f; protected const float REV_QUANTA = 0.015f; protected const float HOLLOW_QUANTA = 0.00002f; #region Subclasses /// <summary> /// Parameters used to construct a visual representation of a primitive /// </summary> public struct ConstructionData { private const byte PROFILE_MASK = 0x0F; private const byte HOLE_MASK = 0xF0; /// <summary></summary> public byte profileCurve; /// <summary></summary> public PathCurve PathCurve; /// <summary></summary> public float PathEnd; /// <summary></summary> public float PathRadiusOffset; /// <summary></summary> public float PathSkew; /// <summary></summary> public float PathScaleX; /// <summary></summary> public float PathScaleY; /// <summary></summary> public float PathShearX; /// <summary></summary> public float PathShearY; /// <summary></summary> public float PathTaperX; /// <summary></summary> public float PathTaperY; /// <summary></summary> public float PathBegin; /// <summary></summary> public float PathTwist; /// <summary></summary> public float PathTwistBegin; /// <summary></summary> public float PathRevolutions; /// <summary></summary> public float ProfileBegin; /// <summary></summary> public float ProfileEnd; /// <summary></summary> public float ProfileHollow; /// <summary></summary> public Material Material; /// <summary></summary> public byte State; /// <summary></summary> public PCode PCode; #region Properties /// <summary>Attachment point to an avatar</summary> public AttachmentPoint AttachmentPoint { get { return (AttachmentPoint)Utils.SwapWords(State); } set { State = (byte)Utils.SwapWords((byte)value); } } /// <summary></summary> public ProfileCurve ProfileCurve { get { return (ProfileCurve)(profileCurve & PROFILE_MASK); } set { profileCurve &= HOLE_MASK; profileCurve |= (byte)value; } } /// <summary></summary> public HoleType ProfileHole { get { return (HoleType)(profileCurve & HOLE_MASK); } set { profileCurve &= PROFILE_MASK; profileCurve |= (byte)value; } } /// <summary></summary> public Vector2 PathBeginScale { get { Vector2 begin = new Vector2(1f, 1f); if (PathScaleX > 1f) begin.X = 2f - PathScaleX; if (PathScaleY > 1f) begin.Y = 2f - PathScaleY; return begin; } } /// <summary></summary> public Vector2 PathEndScale { get { Vector2 end = new Vector2(1f, 1f); if (PathScaleX < 1f) end.X = PathScaleX; if (PathScaleY < 1f) end.Y = PathScaleY; return end; } } #endregion Properties /// <summary> /// Calculdates hash code for prim construction data /// </summary> /// <returns>The has</returns> public override int GetHashCode() { return profileCurve.GetHashCode() ^ PathCurve.GetHashCode() ^ PathEnd.GetHashCode() ^ PathRadiusOffset.GetHashCode() ^ PathSkew.GetHashCode() ^ PathScaleX.GetHashCode() ^ PathScaleY.GetHashCode() ^ PathShearX.GetHashCode() ^ PathShearY.GetHashCode() ^ PathTaperX.GetHashCode() ^ PathTaperY.GetHashCode() ^ PathBegin.GetHashCode() ^ PathTwist.GetHashCode() ^ PathTwistBegin.GetHashCode() ^ PathRevolutions.GetHashCode() ^ ProfileBegin.GetHashCode() ^ ProfileEnd.GetHashCode() ^ ProfileHollow.GetHashCode() ^ Material.GetHashCode() ^ State.GetHashCode() ^ PCode.GetHashCode(); } } /// <summary> /// Information on the flexible properties of a primitive /// </summary> public class FlexibleData { /// <summary></summary> public int Softness; /// <summary></summary> public float Gravity; /// <summary></summary> public float Drag; /// <summary></summary> public float Wind; /// <summary></summary> public float Tension; /// <summary></summary> public Vector3 Force; /// <summary> /// Default constructor /// </summary> public FlexibleData() { } /// <summary> /// /// </summary> /// <param name="data"></param> /// <param name="pos"></param> public FlexibleData(byte[] data, int pos) { if (data.Length >= 5) { Softness = ((data[pos] & 0x80) >> 6) | ((data[pos + 1] & 0x80) >> 7); Tension = (float)(data[pos++] & 0x7F) / 10.0f; Drag = (float)(data[pos++] & 0x7F) / 10.0f; Gravity = (float)(data[pos++] / 10.0f) - 10.0f; Wind = (float)data[pos++] / 10.0f; Force = new Vector3(data, pos); } else { Softness = 0; Tension = 0.0f; Drag = 0.0f; Gravity = 0.0f; Wind = 0.0f; Force = Vector3.Zero; } } /// <summary> /// /// </summary> /// <returns></returns> public byte[] GetBytes() { byte[] data = new byte[16]; int i = 0; // Softness is packed in the upper bits of tension and drag data[i] = (byte)((Softness & 2) << 6); data[i + 1] = (byte)((Softness & 1) << 7); data[i++] |= (byte)((byte)(Tension * 10.01f) & 0x7F); data[i++] |= (byte)((byte)(Drag * 10.01f) & 0x7F); data[i++] = (byte)((Gravity + 10.0f) * 10.01f); data[i++] = (byte)(Wind * 10.01f); Force.GetBytes().CopyTo(data, i); return data; } /// <summary> /// /// </summary> /// <returns></returns> public OSD GetOSD() { OSDMap map = new OSDMap(); map["simulate_lod"] = OSD.FromInteger(Softness); map["gravity"] = OSD.FromReal(Gravity); map["air_friction"] = OSD.FromReal(Drag); map["wind_sensitivity"] = OSD.FromReal(Wind); map["tension"] = OSD.FromReal(Tension); map["user_force"] = OSD.FromVector3(Force); return map; } public static FlexibleData FromOSD(OSD osd) { FlexibleData flex = new FlexibleData(); if (osd.Type == OSDType.Map) { OSDMap map = (OSDMap)osd; flex.Softness = map["simulate_lod"].AsInteger(); flex.Gravity = (float)map["gravity"].AsReal(); flex.Drag = (float)map["air_friction"].AsReal(); flex.Wind = (float)map["wind_sensitivity"].AsReal(); flex.Tension = (float)map["tension"].AsReal(); flex.Force = ((OSDArray)map["user_force"]).AsVector3(); } return flex; } public override int GetHashCode() { return Softness.GetHashCode() ^ Gravity.GetHashCode() ^ Drag.GetHashCode() ^ Wind.GetHashCode() ^ Tension.GetHashCode() ^ Force.GetHashCode(); } } /// <summary> /// Information on the light properties of a primitive /// </summary> public class LightData { /// <summary></summary> public Color4 Color; /// <summary></summary> public float Intensity; /// <summary></summary> public float Radius; /// <summary></summary> public float Cutoff; /// <summary></summary> public float Falloff; /// <summary> /// Default constructor /// </summary> public LightData() { } /// <summary> /// /// </summary> /// <param name="data"></param> /// <param name="pos"></param> public LightData(byte[] data, int pos) { if (data.Length - pos >= 16) { Color = new Color4(data, pos, false); Radius = Utils.BytesToFloat(data, pos + 4); Cutoff = Utils.BytesToFloat(data, pos + 8); Falloff = Utils.BytesToFloat(data, pos + 12); // Alpha in color is actually intensity Intensity = Color.A; Color.A = 1f; } else { Color = Color4.Black; Radius = 0f; Cutoff = 0f; Falloff = 0f; Intensity = 0f; } } /// <summary> /// /// </summary> /// <returns></returns> public byte[] GetBytes() { byte[] data = new byte[16]; // Alpha channel in color is intensity Color4 tmpColor = Color; tmpColor.A = Intensity; tmpColor.GetBytes().CopyTo(data, 0); Utils.FloatToBytes(Radius).CopyTo(data, 4); Utils.FloatToBytes(Cutoff).CopyTo(data, 8); Utils.FloatToBytes(Falloff).CopyTo(data, 12); return data; } public OSD GetOSD() { OSDMap map = new OSDMap(); map["color"] = OSD.FromColor4(Color); map["intensity"] = OSD.FromReal(Intensity); map["radius"] = OSD.FromReal(Radius); map["cutoff"] = OSD.FromReal(Cutoff); map["falloff"] = OSD.FromReal(Falloff); return map; } public static LightData FromOSD(OSD osd) { LightData light = new LightData(); if (osd.Type == OSDType.Map) { OSDMap map = (OSDMap)osd; light.Color = ((OSDArray)map["color"]).AsColor4(); light.Intensity = (float)map["intensity"].AsReal(); light.Radius = (float)map["radius"].AsReal(); light.Cutoff = (float)map["cutoff"].AsReal(); light.Falloff = (float)map["falloff"].AsReal(); } return light; } public override int GetHashCode() { return Color.GetHashCode() ^ Intensity.GetHashCode() ^ Radius.GetHashCode() ^ Cutoff.GetHashCode() ^ Falloff.GetHashCode(); } /// <summary> /// /// </summary> /// <returns></returns> public override string ToString() { return String.Format("Color: {0} Intensity: {1} Radius: {2} Cutoff: {3} Falloff: {4}", Color, Intensity, Radius, Cutoff, Falloff); } } /// <summary> /// Information on the light properties of a primitive as texture map /// </summary> public class LightImage { /// <summary></summary> public UUID LightTexture; /// <summary></summary> public Vector3 Params; /// <summary> /// Default constructor /// </summary> public LightImage() { } /// <summary> /// /// </summary> /// <param name="data"></param> /// <param name="pos"></param> public LightImage(byte[] data, int pos) { if (data.Length - pos >= 28) { LightTexture = new UUID(data, pos); Params = new Vector3(data, pos + 16); } else { LightTexture = UUID.Zero; Params = Vector3.Zero; } } /// <summary> /// /// </summary> /// <returns></returns> public byte[] GetBytes() { byte[] data = new byte[28]; // Alpha channel in color is intensity LightTexture.ToBytes(data, 0); Params.ToBytes(data, 16); return data; } public OSD GetOSD() { OSDMap map = new OSDMap(); map["texture"] = OSD.FromUUID(LightTexture); map["params"] = OSD.FromVector3(Params); return map; } public static LightImage FromOSD(OSD osd) { LightImage light = new LightImage(); if (osd.Type == OSDType.Map) { OSDMap map = (OSDMap)osd; light.LightTexture = map["texture"].AsUUID(); light.Params = map["params"].AsVector3(); } return light; } public override int GetHashCode() { return LightTexture.GetHashCode() ^ Params.GetHashCode(); } /// <summary> /// /// </summary> /// <returns></returns> public override string ToString() { return String.Format("LightTexture: {0} Params; {1]", LightTexture, Params); } } /// <summary> /// Information on the sculpt properties of a sculpted primitive /// </summary> public class SculptData { public UUID SculptTexture; private byte type; public SculptType Type { get { return (SculptType)(type & 7); } set { type = (byte)value; } } /// <summary> /// Render inside out (inverts the normals). /// </summary> public bool Invert { get { return ((type & (byte)SculptType.Invert) != 0); } } /// <summary> /// Render an X axis mirror of the sculpty. /// </summary> public bool Mirror { get { return ((type & (byte)SculptType.Mirror) != 0); } } /// <summary> /// Default constructor /// </summary> public SculptData() { } /// <summary> /// /// </summary> /// <param name="data"></param> /// <param name="pos"></param> public SculptData(byte[] data, int pos) { if (data.Length >= 17) { SculptTexture = new UUID(data, pos); type = data[pos + 16]; } else { SculptTexture = UUID.Zero; type = (byte)SculptType.None; } } public byte[] GetBytes() { byte[] data = new byte[17]; SculptTexture.GetBytes().CopyTo(data, 0); data[16] = type; return data; } public OSD GetOSD() { OSDMap map = new OSDMap(); map["texture"] = OSD.FromUUID(SculptTexture); map["type"] = OSD.FromInteger(type); return map; } public static SculptData FromOSD(OSD osd) { SculptData sculpt = new SculptData(); if (osd.Type == OSDType.Map) { OSDMap map = (OSDMap)osd; sculpt.SculptTexture = map["texture"].AsUUID(); sculpt.type = (byte)map["type"].AsInteger(); } return sculpt; } public override int GetHashCode() { return SculptTexture.GetHashCode() ^ type.GetHashCode(); } } /// <summary> /// Extended properties to describe an object /// </summary> public class ObjectProperties { /// <summary></summary> public UUID ObjectID; /// <summary></summary> public UUID CreatorID; /// <summary></summary> public UUID OwnerID; /// <summary></summary> public UUID GroupID; /// <summary></summary> public DateTime CreationDate; /// <summary></summary> public Permissions Permissions; /// <summary></summary> public int OwnershipCost; /// <summary></summary> public SaleType SaleType; /// <summary></summary> public int SalePrice; /// <summary></summary> public byte AggregatePerms; /// <summary></summary> public byte AggregatePermTextures; /// <summary></summary> public byte AggregatePermTexturesOwner; /// <summary></summary> public ObjectCategory Category; /// <summary></summary> public short InventorySerial; /// <summary></summary> public UUID ItemID; /// <summary></summary> public UUID FolderID; /// <summary></summary> public UUID FromTaskID; /// <summary></summary> public UUID LastOwnerID; /// <summary></summary> public string Name; /// <summary></summary> public string Description; /// <summary></summary> public string TouchName; /// <summary></summary> public string SitName; /// <summary></summary> public UUID[] TextureIDs; /// <summary> /// Default constructor /// </summary> public ObjectProperties() { Name = String.Empty; Description = String.Empty; TouchName = String.Empty; SitName = String.Empty; } /// <summary> /// Set the properties that are set in an ObjectPropertiesFamily packet /// </summary> /// <param name="props"><seealso cref="ObjectProperties"/> that has /// been partially filled by an ObjectPropertiesFamily packet</param> public void SetFamilyProperties(ObjectProperties props) { ObjectID = props.ObjectID; OwnerID = props.OwnerID; GroupID = props.GroupID; Permissions = props.Permissions; OwnershipCost = props.OwnershipCost; SaleType = props.SaleType; SalePrice = props.SalePrice; Category = props.Category; LastOwnerID = props.LastOwnerID; Name = props.Name; Description = props.Description; } public byte[] GetTextureIDBytes() { if (TextureIDs == null || TextureIDs.Length == 0) return Utils.EmptyBytes; byte[] bytes = new byte[16 * TextureIDs.Length]; for (int i = 0; i < TextureIDs.Length; i++) TextureIDs[i].ToBytes(bytes, 16 * i); return bytes; } } /// <summary> /// Describes physics attributes of the prim /// </summary> public class PhysicsProperties { /// <summary>Primitive's local ID</summary> public uint LocalID; /// <summary>Density (1000 for normal density)</summary> public float Density; /// <summary>Friction</summary> public float Friction; /// <summary>Gravity multiplier (1 for normal gravity) </summary> public float GravityMultiplier; /// <summary>Type of physics representation of this primitive in the simulator</summary> public PhysicsShapeType PhysicsShapeType; /// <summary>Restitution</summary> public float Restitution; /// <summary> /// Creates PhysicsProperties from OSD /// </summary> /// <param name="osd">OSDMap with incoming data</param> /// <returns>Deserialized PhysicsProperties object</returns> public static PhysicsProperties FromOSD(OSD osd) { PhysicsProperties ret = new PhysicsProperties(); if (osd is OSDMap) { OSDMap map = (OSDMap)osd; ret.LocalID = map["LocalID"]; ret.Density = map["Density"]; ret.Friction = map["Friction"]; ret.GravityMultiplier = map["GravityMultiplier"]; ret.Restitution = map["Restitution"]; ret.PhysicsShapeType = (PhysicsShapeType)map["PhysicsShapeType"].AsInteger(); } return ret; } /// <summary> /// Serializes PhysicsProperties to OSD /// </summary> /// <returns>OSDMap with serialized PhysicsProperties data</returns> public OSD GetOSD() { OSDMap map = new OSDMap(6); map["LocalID"] = LocalID; map["Density"] = Density; map["Friction"] = Friction; map["GravityMultiplier"] = GravityMultiplier; map["Restitution"] = Restitution; map["PhysicsShapeType"] = (int)PhysicsShapeType; return map; } } #endregion Subclasses #region Public Members /// <summary></summary> public UUID ID; /// <summary></summary> public UUID GroupID; /// <summary></summary> public uint LocalID; /// <summary></summary> public uint ParentID; /// <summary></summary> public ulong RegionHandle; /// <summary></summary> public PrimFlags Flags; /// <summary>Foliage type for this primitive. Only applicable if this /// primitive is foliage</summary> public Tree TreeSpecies; /// <summary>Unknown</summary> public byte[] ScratchPad; /// <summary></summary> public Vector3 Position; /// <summary></summary> public Vector3 Scale; /// <summary></summary> public Quaternion Rotation = Quaternion.Identity; /// <summary></summary> public Vector3 Velocity; /// <summary></summary> public Vector3 AngularVelocity; /// <summary></summary> public Vector3 Acceleration; /// <summary></summary> public Vector4 CollisionPlane; /// <summary></summary> public FlexibleData Flexible; /// <summary></summary> public LightData Light; /// <summary></summary> public LightImage LightMap; /// <summary></summary> public SculptData Sculpt; /// <summary></summary> public ClickAction ClickAction; /// <summary></summary> public UUID Sound; /// <summary>Identifies the owner if audio or a particle system is /// active</summary> public UUID OwnerID; /// <summary></summary> public SoundFlags SoundFlags; /// <summary></summary> public float SoundGain; /// <summary></summary> public float SoundRadius; /// <summary></summary> public string Text; /// <summary></summary> public Color4 TextColor; /// <summary></summary> public string MediaURL; /// <summary></summary> public JointType Joint; /// <summary></summary> public Vector3 JointPivot; /// <summary></summary> public Vector3 JointAxisOrAnchor; /// <summary></summary> public NameValue[] NameValues; /// <summary></summary> public ConstructionData PrimData; /// <summary></summary> public ObjectProperties Properties; /// <summary>Objects physics engine propertis</summary> public PhysicsProperties PhysicsProps; /// <summary>Extra data about primitive</summary> public object Tag; /// <summary>Indicates if prim is attached to an avatar</summary> public bool IsAttachment; /// <summary>Number of clients referencing this prim</summary> public int ActiveClients = 0; #endregion Public Members #region Properties /// <summary>Uses basic heuristics to estimate the primitive shape</summary> public PrimType Type { get { if (Sculpt != null && Sculpt.Type != SculptType.None && Sculpt.SculptTexture != UUID.Zero) { if (Sculpt.Type == SculptType.Mesh) return PrimType.Mesh; else return PrimType.Sculpt; } bool linearPath = (PrimData.PathCurve == PathCurve.Line || PrimData.PathCurve == PathCurve.Flexible); float scaleY = PrimData.PathScaleY; if (linearPath) { switch (PrimData.ProfileCurve) { case ProfileCurve.Circle: return PrimType.Cylinder; case ProfileCurve.Square: return PrimType.Box; case ProfileCurve.IsoTriangle: case ProfileCurve.EqualTriangle: case ProfileCurve.RightTriangle: return PrimType.Prism; case ProfileCurve.HalfCircle: default: return PrimType.Unknown; } } else { switch (PrimData.PathCurve) { case PathCurve.Flexible: return PrimType.Unknown; case PathCurve.Circle: switch (PrimData.ProfileCurve) { case ProfileCurve.Circle: if (scaleY > 0.75f) return PrimType.Sphere; else return PrimType.Torus; case ProfileCurve.HalfCircle: return PrimType.Sphere; case ProfileCurve.EqualTriangle: return PrimType.Ring; case ProfileCurve.Square: if (scaleY <= 0.75f) return PrimType.Tube; else return PrimType.Unknown; default: return PrimType.Unknown; } case PathCurve.Circle2: if (PrimData.ProfileCurve == ProfileCurve.Circle) return PrimType.Sphere; else return PrimType.Unknown; default: return PrimType.Unknown; } } } } #endregion Properties #region Constructors /// <summary> /// Default constructor /// </summary> public Primitive() { // Default a few null property values to String.Empty Text = String.Empty; MediaURL = String.Empty; } public Primitive(Primitive prim) { ID = prim.ID; GroupID = prim.GroupID; LocalID = prim.LocalID; ParentID = prim.ParentID; RegionHandle = prim.RegionHandle; Flags = prim.Flags; TreeSpecies = prim.TreeSpecies; if (prim.ScratchPad != null) { ScratchPad = new byte[prim.ScratchPad.Length]; Buffer.BlockCopy(prim.ScratchPad, 0, ScratchPad, 0, ScratchPad.Length); } else ScratchPad = Utils.EmptyBytes; Position = prim.Position; Scale = prim.Scale; Rotation = prim.Rotation; Velocity = prim.Velocity; AngularVelocity = prim.AngularVelocity; Acceleration = prim.Acceleration; CollisionPlane = prim.CollisionPlane; Flexible = prim.Flexible; Light = prim.Light; LightMap = prim.LightMap; Sculpt = prim.Sculpt; ClickAction = prim.ClickAction; Sound = prim.Sound; OwnerID = prim.OwnerID; SoundFlags = prim.SoundFlags; SoundGain = prim.SoundGain; SoundRadius = prim.SoundRadius; Text = prim.Text; TextColor = prim.TextColor; MediaURL = prim.MediaURL; Joint = prim.Joint; JointPivot = prim.JointPivot; JointAxisOrAnchor = prim.JointAxisOrAnchor; if (prim.NameValues != null) { if (NameValues == null || NameValues.Length != prim.NameValues.Length) NameValues = new NameValue[prim.NameValues.Length]; Array.Copy(prim.NameValues, NameValues, prim.NameValues.Length); } else NameValues = null; PrimData = prim.PrimData; Properties = prim.Properties; // FIXME: Get a real copy constructor for TextureEntry instead of serializing to bytes and back if (prim.Textures != null) { byte[] textureBytes = prim.Textures.GetBytes(); Textures = new TextureEntry(textureBytes, 0, textureBytes.Length); } else { Textures = null; } TextureAnim = prim.TextureAnim; ParticleSys = prim.ParticleSys; } #endregion Constructors #region Public Methods public virtual OSD GetOSD() { OSDMap path = new OSDMap(14); path["begin"] = OSD.FromReal(PrimData.PathBegin); path["curve"] = OSD.FromInteger((int)PrimData.PathCurve); path["end"] = OSD.FromReal(PrimData.PathEnd); path["radius_offset"] = OSD.FromReal(PrimData.PathRadiusOffset); path["revolutions"] = OSD.FromReal(PrimData.PathRevolutions); path["scale_x"] = OSD.FromReal(PrimData.PathScaleX); path["scale_y"] = OSD.FromReal(PrimData.PathScaleY); path["shear_x"] = OSD.FromReal(PrimData.PathShearX); path["shear_y"] = OSD.FromReal(PrimData.PathShearY); path["skew"] = OSD.FromReal(PrimData.PathSkew); path["taper_x"] = OSD.FromReal(PrimData.PathTaperX); path["taper_y"] = OSD.FromReal(PrimData.PathTaperY); path["twist"] = OSD.FromReal(PrimData.PathTwist); path["twist_begin"] = OSD.FromReal(PrimData.PathTwistBegin); OSDMap profile = new OSDMap(4); profile["begin"] = OSD.FromReal(PrimData.ProfileBegin); profile["curve"] = OSD.FromInteger((int)PrimData.ProfileCurve); profile["hole"] = OSD.FromInteger((int)PrimData.ProfileHole); profile["end"] = OSD.FromReal(PrimData.ProfileEnd); profile["hollow"] = OSD.FromReal(PrimData.ProfileHollow); OSDMap volume = new OSDMap(2); volume["path"] = path; volume["profile"] = profile; OSDMap prim = new OSDMap(20); if (Properties != null) { prim["name"] = OSD.FromString(Properties.Name); prim["description"] = OSD.FromString(Properties.Description); } else { prim["name"] = OSD.FromString("Object"); prim["description"] = OSD.FromString(String.Empty); } prim["phantom"] = OSD.FromBoolean(((Flags & PrimFlags.Phantom) != 0)); prim["physical"] = OSD.FromBoolean(((Flags & PrimFlags.Physics) != 0)); prim["position"] = OSD.FromVector3(Position); prim["rotation"] = OSD.FromQuaternion(Rotation); prim["scale"] = OSD.FromVector3(Scale); prim["pcode"] = OSD.FromInteger((int)PrimData.PCode); prim["material"] = OSD.FromInteger((int)PrimData.Material); prim["shadows"] = OSD.FromBoolean(((Flags & PrimFlags.CastShadows) != 0)); prim["state"] = OSD.FromInteger(PrimData.State); prim["id"] = OSD.FromUUID(ID); prim["localid"] = OSD.FromUInteger(LocalID); prim["parentid"] = OSD.FromUInteger(ParentID); prim["volume"] = volume; if (Textures != null) prim["textures"] = Textures.GetOSD(); if ((TextureAnim.Flags & TextureAnimMode.ANIM_ON) != 0) prim["texture_anim"] = TextureAnim.GetOSD(); if (Light != null) prim["light"] = Light.GetOSD(); if (LightMap != null) prim["light_image"] = LightMap.GetOSD(); if (Flexible != null) prim["flex"] = Flexible.GetOSD(); if (Sculpt != null) prim["sculpt"] = Sculpt.GetOSD(); return prim; } public static Primitive FromOSD(OSD osd) { Primitive prim = new Primitive(); Primitive.ConstructionData data; OSDMap map = (OSDMap)osd; OSDMap volume = (OSDMap)map["volume"]; OSDMap path = (OSDMap)volume["path"]; OSDMap profile = (OSDMap)volume["profile"]; #region Path/Profile data.profileCurve = (byte)0; data.Material = (Material)map["material"].AsInteger(); data.PCode = (PCode)map["pcode"].AsInteger(); data.State = (byte)map["state"].AsInteger(); data.PathBegin = (float)path["begin"].AsReal(); data.PathCurve = (PathCurve)path["curve"].AsInteger(); data.PathEnd = (float)path["end"].AsReal(); data.PathRadiusOffset = (float)path["radius_offset"].AsReal(); data.PathRevolutions = (float)path["revolutions"].AsReal(); data.PathScaleX = (float)path["scale_x"].AsReal(); data.PathScaleY = (float)path["scale_y"].AsReal(); data.PathShearX = (float)path["shear_x"].AsReal(); data.PathShearY = (float)path["shear_y"].AsReal(); data.PathSkew = (float)path["skew"].AsReal(); data.PathTaperX = (float)path["taper_x"].AsReal(); data.PathTaperY = (float)path["taper_y"].AsReal(); data.PathTwist = (float)path["twist"].AsReal(); data.PathTwistBegin = (float)path["twist_begin"].AsReal(); data.ProfileBegin = (float)profile["begin"].AsReal(); data.ProfileEnd = (float)profile["end"].AsReal(); data.ProfileHollow = (float)profile["hollow"].AsReal(); data.ProfileCurve = (ProfileCurve)profile["curve"].AsInteger(); data.ProfileHole = (HoleType)profile["hole"].AsInteger(); #endregion Path/Profile prim.PrimData = data; if (map["phantom"].AsBoolean()) prim.Flags |= PrimFlags.Phantom; if (map["physical"].AsBoolean()) prim.Flags |= PrimFlags.Physics; if (map["shadows"].AsBoolean()) prim.Flags |= PrimFlags.CastShadows; prim.ID = map["id"].AsUUID(); prim.LocalID = map["localid"].AsUInteger(); prim.ParentID = map["parentid"].AsUInteger(); prim.Position = ((OSDArray)map["position"]).AsVector3(); prim.Rotation = ((OSDArray)map["rotation"]).AsQuaternion(); prim.Scale = ((OSDArray)map["scale"]).AsVector3(); if (map["flex"]) prim.Flexible = FlexibleData.FromOSD(map["flex"]); if (map["light"]) prim.Light = LightData.FromOSD(map["light"]); if (map["light_image"]) prim.LightMap = LightImage.FromOSD(map["light_image"]); if (map["sculpt"]) prim.Sculpt = SculptData.FromOSD(map["sculpt"]); prim.Textures = TextureEntry.FromOSD(map["textures"]); if (map["texture_anim"]) prim.TextureAnim = TextureAnimation.FromOSD(map["texture_anim"]); prim.Properties = new ObjectProperties(); if (!string.IsNullOrEmpty(map["name"].AsString())) { prim.Properties.Name = map["name"].AsString(); } if (!string.IsNullOrEmpty(map["description"].AsString())) { prim.Properties.Description = map["description"].AsString(); } return prim; } public int SetExtraParamsFromBytes(byte[] data, int pos) { int i = pos; int totalLength = 1; if (data.Length == 0 || pos >= data.Length) return 0; byte extraParamCount = data[i++]; for (int k = 0; k < extraParamCount; k++) { ExtraParamType type = (ExtraParamType)Utils.BytesToUInt16(data, i); i += 2; uint paramLength = Utils.BytesToUInt(data, i); i += 4; if (type == ExtraParamType.Flexible) Flexible = new FlexibleData(data, i); else if (type == ExtraParamType.Light) Light = new LightData(data, i); else if (type == ExtraParamType.LightImage) LightMap = new LightImage(data, i); else if (type == ExtraParamType.Sculpt || type == ExtraParamType.Mesh) Sculpt = new SculptData(data, i); i += (int)paramLength; totalLength += (int)paramLength + 6; } return totalLength; } public byte[] GetExtraParamsBytes() { byte[] flexible = null; byte[] light = null; byte[] lightmap = null; byte[] sculpt = null; byte[] buffer = null; int size = 1; int pos = 0; byte count = 0; if (Flexible != null) { flexible = Flexible.GetBytes(); size += flexible.Length + 6; ++count; } if (Light != null) { light = Light.GetBytes(); size += light.Length + 6; ++count; } if (LightMap != null) { lightmap = LightMap.GetBytes(); size += lightmap.Length + 6; ++count; } if (Sculpt != null) { sculpt = Sculpt.GetBytes(); size += sculpt.Length + 6; ++count; } buffer = new byte[size]; buffer[0] = count; ++pos; if (flexible != null) { Buffer.BlockCopy(Utils.UInt16ToBytes((ushort)ExtraParamType.Flexible), 0, buffer, pos, 2); pos += 2; Buffer.BlockCopy(Utils.UIntToBytes((uint)flexible.Length), 0, buffer, pos, 4); pos += 4; Buffer.BlockCopy(flexible, 0, buffer, pos, flexible.Length); pos += flexible.Length; } if (light != null) { Buffer.BlockCopy(Utils.UInt16ToBytes((ushort)ExtraParamType.Light), 0, buffer, pos, 2); pos += 2; Buffer.BlockCopy(Utils.UIntToBytes((uint)light.Length), 0, buffer, pos, 4); pos += 4; Buffer.BlockCopy(light, 0, buffer, pos, light.Length); pos += light.Length; } if (lightmap != null) { Buffer.BlockCopy(Utils.UInt16ToBytes((ushort)ExtraParamType.LightImage), 0, buffer, pos, 2); pos += 2; Buffer.BlockCopy(Utils.UIntToBytes((uint)lightmap.Length), 0, buffer, pos, 4); pos += 4; Buffer.BlockCopy(lightmap, 0, buffer, pos, lightmap.Length); pos += lightmap.Length; } if (sculpt != null) { if (Sculpt.Type == SculptType.Mesh) { Buffer.BlockCopy(Utils.UInt16ToBytes((ushort)ExtraParamType.Mesh), 0, buffer, pos, 2); } else { Buffer.BlockCopy(Utils.UInt16ToBytes((ushort)ExtraParamType.Sculpt), 0, buffer, pos, 2); } pos += 2; Buffer.BlockCopy(Utils.UIntToBytes((uint)sculpt.Length), 0, buffer, pos, 4); pos += 4; Buffer.BlockCopy(sculpt, 0, buffer, pos, sculpt.Length); pos += sculpt.Length; } return buffer; } #endregion Public Methods #region Overrides public override bool Equals(object obj) { return (obj is Primitive) ? this == (Primitive)obj : false; } public bool Equals(Primitive other) { return this == other; } public override string ToString() { switch (PrimData.PCode) { case PCode.Prim: return String.Format("{0} ({1})", Type, ID); default: return String.Format("{0} ({1})", PrimData.PCode, ID); } } public override int GetHashCode() { return Position.GetHashCode() ^ Velocity.GetHashCode() ^ Acceleration.GetHashCode() ^ Rotation.GetHashCode() ^ AngularVelocity.GetHashCode() ^ ClickAction.GetHashCode() ^ (Flexible != null ? Flexible.GetHashCode() : 0) ^ (Light != null ? Light.GetHashCode() : 0) ^ (Sculpt != null ? Sculpt.GetHashCode() : 0) ^ Flags.GetHashCode() ^ PrimData.Material.GetHashCode() ^ MediaURL.GetHashCode() ^ //TODO: NameValues? (Properties != null ? Properties.OwnerID.GetHashCode() : 0) ^ ParentID.GetHashCode() ^ PrimData.PathBegin.GetHashCode() ^ PrimData.PathCurve.GetHashCode() ^ PrimData.PathEnd.GetHashCode() ^ PrimData.PathRadiusOffset.GetHashCode() ^ PrimData.PathRevolutions.GetHashCode() ^ PrimData.PathScaleX.GetHashCode() ^ PrimData.PathScaleY.GetHashCode() ^ PrimData.PathShearX.GetHashCode() ^ PrimData.PathShearY.GetHashCode() ^ PrimData.PathSkew.GetHashCode() ^ PrimData.PathTaperX.GetHashCode() ^ PrimData.PathTaperY.GetHashCode() ^ PrimData.PathTwist.GetHashCode() ^ PrimData.PathTwistBegin.GetHashCode() ^ PrimData.PCode.GetHashCode() ^ PrimData.ProfileBegin.GetHashCode() ^ PrimData.ProfileCurve.GetHashCode() ^ PrimData.ProfileEnd.GetHashCode() ^ PrimData.ProfileHollow.GetHashCode() ^ ParticleSys.GetHashCode() ^ TextColor.GetHashCode() ^ TextureAnim.GetHashCode() ^ (Textures != null ? Textures.GetHashCode() : 0) ^ SoundRadius.GetHashCode() ^ Scale.GetHashCode() ^ Sound.GetHashCode() ^ PrimData.State.GetHashCode() ^ Text.GetHashCode() ^ TreeSpecies.GetHashCode(); } #endregion Overrides #region Operators public static bool operator ==(Primitive lhs, Primitive rhs) { if ((Object)lhs == null || (Object)rhs == null) { return (Object)rhs == (Object)lhs; } return (lhs.ID == rhs.ID); } public static bool operator !=(Primitive lhs, Primitive rhs) { if ((Object)lhs == null || (Object)rhs == null) { return (Object)rhs != (Object)lhs; } return !(lhs.ID == rhs.ID); } #endregion Operators #region Parameter Packing Methods public static ushort PackBeginCut(float beginCut) { return (ushort)Math.Round(beginCut / CUT_QUANTA); } public static ushort PackEndCut(float endCut) { return (ushort)(50000 - (ushort)Math.Round(endCut / CUT_QUANTA)); } public static byte PackPathScale(float pathScale) { return (byte)(200 - (byte)Math.Round(pathScale / SCALE_QUANTA)); } public static sbyte PackPathShear(float pathShear) { return (sbyte)Math.Round(pathShear / SHEAR_QUANTA); } /// <summary> /// Packs PathTwist, PathTwistBegin, PathRadiusOffset, and PathSkew /// parameters in to signed eight bit values /// </summary> /// <param name="pathTwist">Floating point parameter to pack</param> /// <returns>Signed eight bit value containing the packed parameter</returns> public static sbyte PackPathTwist(float pathTwist) { return (sbyte)Math.Round(pathTwist / SCALE_QUANTA); } public static sbyte PackPathTaper(float pathTaper) { return (sbyte)Math.Round(pathTaper / TAPER_QUANTA); } public static byte PackPathRevolutions(float pathRevolutions) { return (byte)Math.Round((pathRevolutions - 1f) / REV_QUANTA); } public static ushort PackProfileHollow(float profileHollow) { return (ushort)Math.Round(profileHollow / HOLLOW_QUANTA); } #endregion Parameter Packing Methods #region Parameter Unpacking Methods public static float UnpackBeginCut(ushort beginCut) { return (float)beginCut * CUT_QUANTA; } public static float UnpackEndCut(ushort endCut) { return (float)(50000 - endCut) * CUT_QUANTA; } public static float UnpackPathScale(byte pathScale) { return (float)(200 - pathScale) * SCALE_QUANTA; } public static float UnpackPathShear(sbyte pathShear) { return (float)pathShear * SHEAR_QUANTA; } /// <summary> /// Unpacks PathTwist, PathTwistBegin, PathRadiusOffset, and PathSkew /// parameters from signed eight bit integers to floating point values /// </summary> /// <param name="pathTwist">Signed eight bit value to unpack</param> /// <returns>Unpacked floating point value</returns> public static float UnpackPathTwist(sbyte pathTwist) { return (float)pathTwist * SCALE_QUANTA; } public static float UnpackPathTaper(sbyte pathTaper) { return (float)pathTaper * TAPER_QUANTA; } public static float UnpackPathRevolutions(byte pathRevolutions) { return (float)pathRevolutions * REV_QUANTA + 1f; } public static float UnpackProfileHollow(ushort profileHollow) { return (float)profileHollow * HOLLOW_QUANTA; } #endregion Parameter Unpacking Methods } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /****************************************************************************** * This file is auto-generated from a template file by the GenerateTests.csx * * script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make * * changes, please update the corresponding template and run according to the * * directions listed in the file. * ******************************************************************************/ using System; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Runtime.Intrinsics; using System.Runtime.Intrinsics.X86; namespace JIT.HardwareIntrinsics.X86 { public static partial class Program { private static void CompareNotEqualDouble() { var test = new SimpleBinaryOpTest__CompareNotEqualDouble(); if (test.IsSupported) { // Validates basic functionality works, using Unsafe.Read test.RunBasicScenario_UnsafeRead(); if (Sse2.IsSupported) { // Validates basic functionality works, using Load test.RunBasicScenario_Load(); // Validates basic functionality works, using LoadAligned test.RunBasicScenario_LoadAligned(); } // Validates calling via reflection works, using Unsafe.Read test.RunReflectionScenario_UnsafeRead(); if (Sse2.IsSupported) { // Validates calling via reflection works, using Load test.RunReflectionScenario_Load(); // Validates calling via reflection works, using LoadAligned test.RunReflectionScenario_LoadAligned(); } // Validates passing a static member works test.RunClsVarScenario(); if (Sse2.IsSupported) { // Validates passing a static member works, using pinning and Load test.RunClsVarScenario_Load(); } // Validates passing a local works, using Unsafe.Read test.RunLclVarScenario_UnsafeRead(); if (Sse2.IsSupported) { // Validates passing a local works, using Load test.RunLclVarScenario_Load(); // Validates passing a local works, using LoadAligned test.RunLclVarScenario_LoadAligned(); } // Validates passing the field of a local class works test.RunClassLclFldScenario(); if (Sse2.IsSupported) { // Validates passing the field of a local class works, using pinning and Load test.RunClassLclFldScenario_Load(); } // Validates passing an instance member of a class works test.RunClassFldScenario(); if (Sse2.IsSupported) { // Validates passing an instance member of a class works, using pinning and Load test.RunClassFldScenario_Load(); } // Validates passing the field of a local struct works test.RunStructLclFldScenario(); if (Sse2.IsSupported) { // Validates passing the field of a local struct works, using pinning and Load test.RunStructLclFldScenario_Load(); } // Validates passing an instance member of a struct works test.RunStructFldScenario(); if (Sse2.IsSupported) { // Validates passing an instance member of a struct works, using pinning and Load test.RunStructFldScenario_Load(); } } else { // Validates we throw on unsupported hardware test.RunUnsupportedScenario(); } if (!test.Succeeded) { throw new Exception("One or more scenarios did not complete as expected."); } } } public sealed unsafe class SimpleBinaryOpTest__CompareNotEqualDouble { private struct DataTable { private byte[] inArray1; private byte[] inArray2; private byte[] outArray; private GCHandle inHandle1; private GCHandle inHandle2; private GCHandle outHandle; private ulong alignment; public DataTable(Double[] inArray1, Double[] inArray2, Double[] outArray, int alignment) { int sizeOfinArray1 = inArray1.Length * Unsafe.SizeOf<Double>(); int sizeOfinArray2 = inArray2.Length * Unsafe.SizeOf<Double>(); int sizeOfoutArray = outArray.Length * Unsafe.SizeOf<Double>(); if ((alignment != 32 && alignment != 16) || (alignment * 2) < sizeOfinArray1 || (alignment * 2) < sizeOfinArray2 || (alignment * 2) < sizeOfoutArray) { throw new ArgumentException("Invalid value of alignment"); } this.inArray1 = new byte[alignment * 2]; this.inArray2 = new byte[alignment * 2]; this.outArray = new byte[alignment * 2]; this.inHandle1 = GCHandle.Alloc(this.inArray1, GCHandleType.Pinned); this.inHandle2 = GCHandle.Alloc(this.inArray2, GCHandleType.Pinned); this.outHandle = GCHandle.Alloc(this.outArray, GCHandleType.Pinned); this.alignment = (ulong)alignment; Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray1Ptr), ref Unsafe.As<Double, byte>(ref inArray1[0]), (uint)sizeOfinArray1); Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray2Ptr), ref Unsafe.As<Double, byte>(ref inArray2[0]), (uint)sizeOfinArray2); } public void* inArray1Ptr => Align((byte*)(inHandle1.AddrOfPinnedObject().ToPointer()), alignment); public void* inArray2Ptr => Align((byte*)(inHandle2.AddrOfPinnedObject().ToPointer()), alignment); public void* outArrayPtr => Align((byte*)(outHandle.AddrOfPinnedObject().ToPointer()), alignment); public void Dispose() { inHandle1.Free(); inHandle2.Free(); outHandle.Free(); } private static unsafe void* Align(byte* buffer, ulong expectedAlignment) { return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1)); } } private struct TestStruct { public Vector128<Double> _fld1; public Vector128<Double> _fld2; public static TestStruct Create() { var testStruct = new TestStruct(); for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetDouble(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Double>, byte>(ref testStruct._fld1), ref Unsafe.As<Double, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Double>>()); for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetDouble(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Double>, byte>(ref testStruct._fld2), ref Unsafe.As<Double, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<Double>>()); return testStruct; } public void RunStructFldScenario(SimpleBinaryOpTest__CompareNotEqualDouble testClass) { var result = Sse2.CompareNotEqual(_fld1, _fld2); Unsafe.Write(testClass._dataTable.outArrayPtr, result); testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr); } public void RunStructFldScenario_Load(SimpleBinaryOpTest__CompareNotEqualDouble testClass) { fixed (Vector128<Double>* pFld1 = &_fld1) fixed (Vector128<Double>* pFld2 = &_fld2) { var result = Sse2.CompareNotEqual( Sse2.LoadVector128((Double*)(pFld1)), Sse2.LoadVector128((Double*)(pFld2)) ); Unsafe.Write(testClass._dataTable.outArrayPtr, result); testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr); } } } private static readonly int LargestVectorSize = 16; private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector128<Double>>() / sizeof(Double); private static readonly int Op2ElementCount = Unsafe.SizeOf<Vector128<Double>>() / sizeof(Double); private static readonly int RetElementCount = Unsafe.SizeOf<Vector128<Double>>() / sizeof(Double); private static Double[] _data1 = new Double[Op1ElementCount]; private static Double[] _data2 = new Double[Op2ElementCount]; private static Vector128<Double> _clsVar1; private static Vector128<Double> _clsVar2; private Vector128<Double> _fld1; private Vector128<Double> _fld2; private DataTable _dataTable; static SimpleBinaryOpTest__CompareNotEqualDouble() { for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetDouble(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Double>, byte>(ref _clsVar1), ref Unsafe.As<Double, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Double>>()); for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetDouble(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Double>, byte>(ref _clsVar2), ref Unsafe.As<Double, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<Double>>()); } public SimpleBinaryOpTest__CompareNotEqualDouble() { Succeeded = true; for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetDouble(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Double>, byte>(ref _fld1), ref Unsafe.As<Double, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Double>>()); for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetDouble(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Double>, byte>(ref _fld2), ref Unsafe.As<Double, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<Double>>()); for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetDouble(); } for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetDouble(); } _dataTable = new DataTable(_data1, _data2, new Double[RetElementCount], LargestVectorSize); } public bool IsSupported => Sse2.IsSupported; public bool Succeeded { get; set; } public void RunBasicScenario_UnsafeRead() { TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead)); var result = Sse2.CompareNotEqual( Unsafe.Read<Vector128<Double>>(_dataTable.inArray1Ptr), Unsafe.Read<Vector128<Double>>(_dataTable.inArray2Ptr) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunBasicScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load)); var result = Sse2.CompareNotEqual( Sse2.LoadVector128((Double*)(_dataTable.inArray1Ptr)), Sse2.LoadVector128((Double*)(_dataTable.inArray2Ptr)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunBasicScenario_LoadAligned() { TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_LoadAligned)); var result = Sse2.CompareNotEqual( Sse2.LoadAlignedVector128((Double*)(_dataTable.inArray1Ptr)), Sse2.LoadAlignedVector128((Double*)(_dataTable.inArray2Ptr)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_UnsafeRead() { TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead)); var result = typeof(Sse2).GetMethod(nameof(Sse2.CompareNotEqual), new Type[] { typeof(Vector128<Double>), typeof(Vector128<Double>) }) .Invoke(null, new object[] { Unsafe.Read<Vector128<Double>>(_dataTable.inArray1Ptr), Unsafe.Read<Vector128<Double>>(_dataTable.inArray2Ptr) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Double>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load)); var result = typeof(Sse2).GetMethod(nameof(Sse2.CompareNotEqual), new Type[] { typeof(Vector128<Double>), typeof(Vector128<Double>) }) .Invoke(null, new object[] { Sse2.LoadVector128((Double*)(_dataTable.inArray1Ptr)), Sse2.LoadVector128((Double*)(_dataTable.inArray2Ptr)) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Double>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_LoadAligned() { TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_LoadAligned)); var result = typeof(Sse2).GetMethod(nameof(Sse2.CompareNotEqual), new Type[] { typeof(Vector128<Double>), typeof(Vector128<Double>) }) .Invoke(null, new object[] { Sse2.LoadAlignedVector128((Double*)(_dataTable.inArray1Ptr)), Sse2.LoadAlignedVector128((Double*)(_dataTable.inArray2Ptr)) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Double>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunClsVarScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario)); var result = Sse2.CompareNotEqual( _clsVar1, _clsVar2 ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr); } public void RunClsVarScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario_Load)); fixed (Vector128<Double>* pClsVar1 = &_clsVar1) fixed (Vector128<Double>* pClsVar2 = &_clsVar2) { var result = Sse2.CompareNotEqual( Sse2.LoadVector128((Double*)(pClsVar1)), Sse2.LoadVector128((Double*)(pClsVar2)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr); } } public void RunLclVarScenario_UnsafeRead() { TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead)); var op1 = Unsafe.Read<Vector128<Double>>(_dataTable.inArray1Ptr); var op2 = Unsafe.Read<Vector128<Double>>(_dataTable.inArray2Ptr); var result = Sse2.CompareNotEqual(op1, op2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(op1, op2, _dataTable.outArrayPtr); } public void RunLclVarScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load)); var op1 = Sse2.LoadVector128((Double*)(_dataTable.inArray1Ptr)); var op2 = Sse2.LoadVector128((Double*)(_dataTable.inArray2Ptr)); var result = Sse2.CompareNotEqual(op1, op2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(op1, op2, _dataTable.outArrayPtr); } public void RunLclVarScenario_LoadAligned() { TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_LoadAligned)); var op1 = Sse2.LoadAlignedVector128((Double*)(_dataTable.inArray1Ptr)); var op2 = Sse2.LoadAlignedVector128((Double*)(_dataTable.inArray2Ptr)); var result = Sse2.CompareNotEqual(op1, op2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(op1, op2, _dataTable.outArrayPtr); } public void RunClassLclFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario)); var test = new SimpleBinaryOpTest__CompareNotEqualDouble(); var result = Sse2.CompareNotEqual(test._fld1, test._fld2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr); } public void RunClassLclFldScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario_Load)); var test = new SimpleBinaryOpTest__CompareNotEqualDouble(); fixed (Vector128<Double>* pFld1 = &test._fld1) fixed (Vector128<Double>* pFld2 = &test._fld2) { var result = Sse2.CompareNotEqual( Sse2.LoadVector128((Double*)(pFld1)), Sse2.LoadVector128((Double*)(pFld2)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr); } } public void RunClassFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario)); var result = Sse2.CompareNotEqual(_fld1, _fld2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr); } public void RunClassFldScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario_Load)); fixed (Vector128<Double>* pFld1 = &_fld1) fixed (Vector128<Double>* pFld2 = &_fld2) { var result = Sse2.CompareNotEqual( Sse2.LoadVector128((Double*)(pFld1)), Sse2.LoadVector128((Double*)(pFld2)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr); } } public void RunStructLclFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario)); var test = TestStruct.Create(); var result = Sse2.CompareNotEqual(test._fld1, test._fld2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr); } public void RunStructLclFldScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario_Load)); var test = TestStruct.Create(); var result = Sse2.CompareNotEqual( Sse2.LoadVector128((Double*)(&test._fld1)), Sse2.LoadVector128((Double*)(&test._fld2)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr); } public void RunStructFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario)); var test = TestStruct.Create(); test.RunStructFldScenario(this); } public void RunStructFldScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario_Load)); var test = TestStruct.Create(); test.RunStructFldScenario_Load(this); } public void RunUnsupportedScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario)); bool succeeded = false; try { RunBasicScenario_UnsafeRead(); } catch (PlatformNotSupportedException) { succeeded = true; } if (!succeeded) { Succeeded = false; } } private void ValidateResult(Vector128<Double> op1, Vector128<Double> op2, void* result, [CallerMemberName] string method = "") { Double[] inArray1 = new Double[Op1ElementCount]; Double[] inArray2 = new Double[Op2ElementCount]; Double[] outArray = new Double[RetElementCount]; Unsafe.WriteUnaligned(ref Unsafe.As<Double, byte>(ref inArray1[0]), op1); Unsafe.WriteUnaligned(ref Unsafe.As<Double, byte>(ref inArray2[0]), op2); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Double, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<Double>>()); ValidateResult(inArray1, inArray2, outArray, method); } private void ValidateResult(void* op1, void* op2, void* result, [CallerMemberName] string method = "") { Double[] inArray1 = new Double[Op1ElementCount]; Double[] inArray2 = new Double[Op2ElementCount]; Double[] outArray = new Double[RetElementCount]; Unsafe.CopyBlockUnaligned(ref Unsafe.As<Double, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(op1), (uint)Unsafe.SizeOf<Vector128<Double>>()); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Double, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(op2), (uint)Unsafe.SizeOf<Vector128<Double>>()); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Double, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<Double>>()); ValidateResult(inArray1, inArray2, outArray, method); } private void ValidateResult(Double[] left, Double[] right, Double[] result, [CallerMemberName] string method = "") { bool succeeded = true; if (BitConverter.DoubleToInt64Bits(result[0]) != ((left[0] != right[0]) ? -1 : 0)) { succeeded = false; } else { for (var i = 1; i < RetElementCount; i++) { if (BitConverter.DoubleToInt64Bits(result[i]) != ((left[i] != right[i]) ? -1 : 0)) { succeeded = false; break; } } } if (!succeeded) { TestLibrary.TestFramework.LogInformation($"{nameof(Sse2)}.{nameof(Sse2.CompareNotEqual)}<Double>(Vector128<Double>, Vector128<Double>): {method} failed:"); TestLibrary.TestFramework.LogInformation($" left: ({string.Join(", ", left)})"); TestLibrary.TestFramework.LogInformation($" right: ({string.Join(", ", right)})"); TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})"); TestLibrary.TestFramework.LogInformation(string.Empty); Succeeded = false; } } } }
using System; using Org.BouncyCastle.Crypto.Parameters; namespace Org.BouncyCastle.Crypto.Engines { /** * A class that provides Twofish encryption operations. * * This Java implementation is based on the Java reference * implementation provided by Bruce Schneier and developed * by Raif S. Naffah. */ public sealed class TwofishEngine : IBlockCipher { private static readonly byte[,] P = { { // p0 (byte) 0xA9, (byte) 0x67, (byte) 0xB3, (byte) 0xE8, (byte) 0x04, (byte) 0xFD, (byte) 0xA3, (byte) 0x76, (byte) 0x9A, (byte) 0x92, (byte) 0x80, (byte) 0x78, (byte) 0xE4, (byte) 0xDD, (byte) 0xD1, (byte) 0x38, (byte) 0x0D, (byte) 0xC6, (byte) 0x35, (byte) 0x98, (byte) 0x18, (byte) 0xF7, (byte) 0xEC, (byte) 0x6C, (byte) 0x43, (byte) 0x75, (byte) 0x37, (byte) 0x26, (byte) 0xFA, (byte) 0x13, (byte) 0x94, (byte) 0x48, (byte) 0xF2, (byte) 0xD0, (byte) 0x8B, (byte) 0x30, (byte) 0x84, (byte) 0x54, (byte) 0xDF, (byte) 0x23, (byte) 0x19, (byte) 0x5B, (byte) 0x3D, (byte) 0x59, (byte) 0xF3, (byte) 0xAE, (byte) 0xA2, (byte) 0x82, (byte) 0x63, (byte) 0x01, (byte) 0x83, (byte) 0x2E, (byte) 0xD9, (byte) 0x51, (byte) 0x9B, (byte) 0x7C, (byte) 0xA6, (byte) 0xEB, (byte) 0xA5, (byte) 0xBE, (byte) 0x16, (byte) 0x0C, (byte) 0xE3, (byte) 0x61, (byte) 0xC0, (byte) 0x8C, (byte) 0x3A, (byte) 0xF5, (byte) 0x73, (byte) 0x2C, (byte) 0x25, (byte) 0x0B, (byte) 0xBB, (byte) 0x4E, (byte) 0x89, (byte) 0x6B, (byte) 0x53, (byte) 0x6A, (byte) 0xB4, (byte) 0xF1, (byte) 0xE1, (byte) 0xE6, (byte) 0xBD, (byte) 0x45, (byte) 0xE2, (byte) 0xF4, (byte) 0xB6, (byte) 0x66, (byte) 0xCC, (byte) 0x95, (byte) 0x03, (byte) 0x56, (byte) 0xD4, (byte) 0x1C, (byte) 0x1E, (byte) 0xD7, (byte) 0xFB, (byte) 0xC3, (byte) 0x8E, (byte) 0xB5, (byte) 0xE9, (byte) 0xCF, (byte) 0xBF, (byte) 0xBA, (byte) 0xEA, (byte) 0x77, (byte) 0x39, (byte) 0xAF, (byte) 0x33, (byte) 0xC9, (byte) 0x62, (byte) 0x71, (byte) 0x81, (byte) 0x79, (byte) 0x09, (byte) 0xAD, (byte) 0x24, (byte) 0xCD, (byte) 0xF9, (byte) 0xD8, (byte) 0xE5, (byte) 0xC5, (byte) 0xB9, (byte) 0x4D, (byte) 0x44, (byte) 0x08, (byte) 0x86, (byte) 0xE7, (byte) 0xA1, (byte) 0x1D, (byte) 0xAA, (byte) 0xED, (byte) 0x06, (byte) 0x70, (byte) 0xB2, (byte) 0xD2, (byte) 0x41, (byte) 0x7B, (byte) 0xA0, (byte) 0x11, (byte) 0x31, (byte) 0xC2, (byte) 0x27, (byte) 0x90, (byte) 0x20, (byte) 0xF6, (byte) 0x60, (byte) 0xFF, (byte) 0x96, (byte) 0x5C, (byte) 0xB1, (byte) 0xAB, (byte) 0x9E, (byte) 0x9C, (byte) 0x52, (byte) 0x1B, (byte) 0x5F, (byte) 0x93, (byte) 0x0A, (byte) 0xEF, (byte) 0x91, (byte) 0x85, (byte) 0x49, (byte) 0xEE, (byte) 0x2D, (byte) 0x4F, (byte) 0x8F, (byte) 0x3B, (byte) 0x47, (byte) 0x87, (byte) 0x6D, (byte) 0x46, (byte) 0xD6, (byte) 0x3E, (byte) 0x69, (byte) 0x64, (byte) 0x2A, (byte) 0xCE, (byte) 0xCB, (byte) 0x2F, (byte) 0xFC, (byte) 0x97, (byte) 0x05, (byte) 0x7A, (byte) 0xAC, (byte) 0x7F, (byte) 0xD5, (byte) 0x1A, (byte) 0x4B, (byte) 0x0E, (byte) 0xA7, (byte) 0x5A, (byte) 0x28, (byte) 0x14, (byte) 0x3F, (byte) 0x29, (byte) 0x88, (byte) 0x3C, (byte) 0x4C, (byte) 0x02, (byte) 0xB8, (byte) 0xDA, (byte) 0xB0, (byte) 0x17, (byte) 0x55, (byte) 0x1F, (byte) 0x8A, (byte) 0x7D, (byte) 0x57, (byte) 0xC7, (byte) 0x8D, (byte) 0x74, (byte) 0xB7, (byte) 0xC4, (byte) 0x9F, (byte) 0x72, (byte) 0x7E, (byte) 0x15, (byte) 0x22, (byte) 0x12, (byte) 0x58, (byte) 0x07, (byte) 0x99, (byte) 0x34, (byte) 0x6E, (byte) 0x50, (byte) 0xDE, (byte) 0x68, (byte) 0x65, (byte) 0xBC, (byte) 0xDB, (byte) 0xF8, (byte) 0xC8, (byte) 0xA8, (byte) 0x2B, (byte) 0x40, (byte) 0xDC, (byte) 0xFE, (byte) 0x32, (byte) 0xA4, (byte) 0xCA, (byte) 0x10, (byte) 0x21, (byte) 0xF0, (byte) 0xD3, (byte) 0x5D, (byte) 0x0F, (byte) 0x00, (byte) 0x6F, (byte) 0x9D, (byte) 0x36, (byte) 0x42, (byte) 0x4A, (byte) 0x5E, (byte) 0xC1, (byte) 0xE0 }, { // p1 (byte) 0x75, (byte) 0xF3, (byte) 0xC6, (byte) 0xF4, (byte) 0xDB, (byte) 0x7B, (byte) 0xFB, (byte) 0xC8, (byte) 0x4A, (byte) 0xD3, (byte) 0xE6, (byte) 0x6B, (byte) 0x45, (byte) 0x7D, (byte) 0xE8, (byte) 0x4B, (byte) 0xD6, (byte) 0x32, (byte) 0xD8, (byte) 0xFD, (byte) 0x37, (byte) 0x71, (byte) 0xF1, (byte) 0xE1, (byte) 0x30, (byte) 0x0F, (byte) 0xF8, (byte) 0x1B, (byte) 0x87, (byte) 0xFA, (byte) 0x06, (byte) 0x3F, (byte) 0x5E, (byte) 0xBA, (byte) 0xAE, (byte) 0x5B, (byte) 0x8A, (byte) 0x00, (byte) 0xBC, (byte) 0x9D, (byte) 0x6D, (byte) 0xC1, (byte) 0xB1, (byte) 0x0E, (byte) 0x80, (byte) 0x5D, (byte) 0xD2, (byte) 0xD5, (byte) 0xA0, (byte) 0x84, (byte) 0x07, (byte) 0x14, (byte) 0xB5, (byte) 0x90, (byte) 0x2C, (byte) 0xA3, (byte) 0xB2, (byte) 0x73, (byte) 0x4C, (byte) 0x54, (byte) 0x92, (byte) 0x74, (byte) 0x36, (byte) 0x51, (byte) 0x38, (byte) 0xB0, (byte) 0xBD, (byte) 0x5A, (byte) 0xFC, (byte) 0x60, (byte) 0x62, (byte) 0x96, (byte) 0x6C, (byte) 0x42, (byte) 0xF7, (byte) 0x10, (byte) 0x7C, (byte) 0x28, (byte) 0x27, (byte) 0x8C, (byte) 0x13, (byte) 0x95, (byte) 0x9C, (byte) 0xC7, (byte) 0x24, (byte) 0x46, (byte) 0x3B, (byte) 0x70, (byte) 0xCA, (byte) 0xE3, (byte) 0x85, (byte) 0xCB, (byte) 0x11, (byte) 0xD0, (byte) 0x93, (byte) 0xB8, (byte) 0xA6, (byte) 0x83, (byte) 0x20, (byte) 0xFF, (byte) 0x9F, (byte) 0x77, (byte) 0xC3, (byte) 0xCC, (byte) 0x03, (byte) 0x6F, (byte) 0x08, (byte) 0xBF, (byte) 0x40, (byte) 0xE7, (byte) 0x2B, (byte) 0xE2, (byte) 0x79, (byte) 0x0C, (byte) 0xAA, (byte) 0x82, (byte) 0x41, (byte) 0x3A, (byte) 0xEA, (byte) 0xB9, (byte) 0xE4, (byte) 0x9A, (byte) 0xA4, (byte) 0x97, (byte) 0x7E, (byte) 0xDA, (byte) 0x7A, (byte) 0x17, (byte) 0x66, (byte) 0x94, (byte) 0xA1, (byte) 0x1D, (byte) 0x3D, (byte) 0xF0, (byte) 0xDE, (byte) 0xB3, (byte) 0x0B, (byte) 0x72, (byte) 0xA7, (byte) 0x1C, (byte) 0xEF, (byte) 0xD1, (byte) 0x53, (byte) 0x3E, (byte) 0x8F, (byte) 0x33, (byte) 0x26, (byte) 0x5F, (byte) 0xEC, (byte) 0x76, (byte) 0x2A, (byte) 0x49, (byte) 0x81, (byte) 0x88, (byte) 0xEE, (byte) 0x21, (byte) 0xC4, (byte) 0x1A, (byte) 0xEB, (byte) 0xD9, (byte) 0xC5, (byte) 0x39, (byte) 0x99, (byte) 0xCD, (byte) 0xAD, (byte) 0x31, (byte) 0x8B, (byte) 0x01, (byte) 0x18, (byte) 0x23, (byte) 0xDD, (byte) 0x1F, (byte) 0x4E, (byte) 0x2D, (byte) 0xF9, (byte) 0x48, (byte) 0x4F, (byte) 0xF2, (byte) 0x65, (byte) 0x8E, (byte) 0x78, (byte) 0x5C, (byte) 0x58, (byte) 0x19, (byte) 0x8D, (byte) 0xE5, (byte) 0x98, (byte) 0x57, (byte) 0x67, (byte) 0x7F, (byte) 0x05, (byte) 0x64, (byte) 0xAF, (byte) 0x63, (byte) 0xB6, (byte) 0xFE, (byte) 0xF5, (byte) 0xB7, (byte) 0x3C, (byte) 0xA5, (byte) 0xCE, (byte) 0xE9, (byte) 0x68, (byte) 0x44, (byte) 0xE0, (byte) 0x4D, (byte) 0x43, (byte) 0x69, (byte) 0x29, (byte) 0x2E, (byte) 0xAC, (byte) 0x15, (byte) 0x59, (byte) 0xA8, (byte) 0x0A, (byte) 0x9E, (byte) 0x6E, (byte) 0x47, (byte) 0xDF, (byte) 0x34, (byte) 0x35, (byte) 0x6A, (byte) 0xCF, (byte) 0xDC, (byte) 0x22, (byte) 0xC9, (byte) 0xC0, (byte) 0x9B, (byte) 0x89, (byte) 0xD4, (byte) 0xED, (byte) 0xAB, (byte) 0x12, (byte) 0xA2, (byte) 0x0D, (byte) 0x52, (byte) 0xBB, (byte) 0x02, (byte) 0x2F, (byte) 0xA9, (byte) 0xD7, (byte) 0x61, (byte) 0x1E, (byte) 0xB4, (byte) 0x50, (byte) 0x04, (byte) 0xF6, (byte) 0xC2, (byte) 0x16, (byte) 0x25, (byte) 0x86, (byte) 0x56, (byte) 0x55, (byte) 0x09, (byte) 0xBE, (byte) 0x91 } }; /** * Define the fixed p0/p1 permutations used in keyed S-box lookup. * By changing the following constant definitions, the S-boxes will * automatically Get changed in the Twofish engine. */ private const int P_00 = 1; private const int P_01 = 0; private const int P_02 = 0; private const int P_03 = P_01 ^ 1; private const int P_04 = 1; private const int P_10 = 0; private const int P_11 = 0; private const int P_12 = 1; private const int P_13 = P_11 ^ 1; private const int P_14 = 0; private const int P_20 = 1; private const int P_21 = 1; private const int P_22 = 0; private const int P_23 = P_21 ^ 1; private const int P_24 = 0; private const int P_30 = 0; private const int P_31 = 1; private const int P_32 = 1; private const int P_33 = P_31 ^ 1; private const int P_34 = 1; /* Primitive polynomial for GF(256) */ private const int GF256_FDBK = 0x169; private const int GF256_FDBK_2 = GF256_FDBK / 2; private const int GF256_FDBK_4 = GF256_FDBK / 4; private const int RS_GF_FDBK = 0x14D; // field generator //==================================== // Useful constants //==================================== private const int ROUNDS = 16; private const int MAX_ROUNDS = 16; // bytes = 128 bits private const int BLOCK_SIZE = 16; // bytes = 128 bits private const int MAX_KEY_BITS = 256; private const int INPUT_WHITEN=0; private const int OUTPUT_WHITEN=INPUT_WHITEN+BLOCK_SIZE/4; // 4 private const int ROUND_SUBKEYS=OUTPUT_WHITEN+BLOCK_SIZE/4;// 8 private const int TOTAL_SUBKEYS=ROUND_SUBKEYS+2*MAX_ROUNDS;// 40 private const int SK_STEP = 0x02020202; private const int SK_BUMP = 0x01010101; private const int SK_ROTL = 9; private bool encrypting; private int[] gMDS0 = new int[MAX_KEY_BITS]; private int[] gMDS1 = new int[MAX_KEY_BITS]; private int[] gMDS2 = new int[MAX_KEY_BITS]; private int[] gMDS3 = new int[MAX_KEY_BITS]; /** * gSubKeys[] and gSBox[] are eventually used in the * encryption and decryption methods. */ private int[] gSubKeys; private int[] gSBox; private int k64Cnt; private byte[] workingKey; public TwofishEngine() { // calculate the MDS matrix int[] m1 = new int[2]; int[] mX = new int[2]; int[] mY = new int[2]; int j; for (int i=0; i< MAX_KEY_BITS ; i++) { j = P[0,i] & 0xff; m1[0] = j; mX[0] = Mx_X(j) & 0xff; mY[0] = Mx_Y(j) & 0xff; j = P[1,i] & 0xff; m1[1] = j; mX[1] = Mx_X(j) & 0xff; mY[1] = Mx_Y(j) & 0xff; gMDS0[i] = m1[P_00] | mX[P_00] << 8 | mY[P_00] << 16 | mY[P_00] << 24; gMDS1[i] = mY[P_10] | mY[P_10] << 8 | mX[P_10] << 16 | m1[P_10] << 24; gMDS2[i] = mX[P_20] | mY[P_20] << 8 | m1[P_20] << 16 | mY[P_20] << 24; gMDS3[i] = mX[P_30] | m1[P_30] << 8 | mY[P_30] << 16 | mX[P_30] << 24; } } /** * initialise a Twofish cipher. * * @param forEncryption whether or not we are for encryption. * @param parameters the parameters required to set up the cipher. * @exception ArgumentException if the parameters argument is * inappropriate. */ public void Init( bool forEncryption, ICipherParameters parameters) { if (!(parameters is KeyParameter)) throw new ArgumentException("invalid parameter passed to Twofish init - " + parameters.GetType().ToString()); this.encrypting = forEncryption; this.workingKey = ((KeyParameter)parameters).GetKey(); this.k64Cnt = (this.workingKey.Length / 8); // pre-padded ? SetKey(this.workingKey); } public string AlgorithmName { get { return "Twofish"; } } public bool IsPartialBlockOkay { get { return false; } } public int ProcessBlock( byte[] input, int inOff, byte[] output, int outOff) { if (workingKey == null) throw new InvalidOperationException("Twofish not initialised"); Check.DataLength(input, inOff, BLOCK_SIZE, "input buffer too short"); Check.OutputLength(output, outOff, BLOCK_SIZE, "output buffer too short"); if (encrypting) { EncryptBlock(input, inOff, output, outOff); } else { DecryptBlock(input, inOff, output, outOff); } return BLOCK_SIZE; } public void Reset() { if (this.workingKey != null) { SetKey(this.workingKey); } } public int GetBlockSize() { return BLOCK_SIZE; } //================================== // Private Implementation //================================== private void SetKey(byte[] key) { int[] k32e = new int[MAX_KEY_BITS/64]; // 4 int[] k32o = new int[MAX_KEY_BITS/64]; // 4 int[] sBoxKeys = new int[MAX_KEY_BITS/64]; // 4 gSubKeys = new int[TOTAL_SUBKEYS]; if (k64Cnt < 1) { throw new ArgumentException("Key size less than 64 bits"); } if (k64Cnt > 4) { throw new ArgumentException("Key size larger than 256 bits"); } /* * k64Cnt is the number of 8 byte blocks (64 chunks) * that are in the input key. The input key is a * maximum of 32 bytes ( 256 bits ), so the range * for k64Cnt is 1..4 */ for (int i=0,p=0; i<k64Cnt ; i++) { p = i* 8; k32e[i] = BytesTo32Bits(key, p); k32o[i] = BytesTo32Bits(key, p+4); sBoxKeys[k64Cnt-1-i] = RS_MDS_Encode(k32e[i], k32o[i]); } int q,A,B; for (int i=0; i < TOTAL_SUBKEYS / 2 ; i++) { q = i*SK_STEP; A = F32(q, k32e); B = F32(q+SK_BUMP, k32o); B = B << 8 | (int)((uint)B >> 24); A += B; gSubKeys[i*2] = A; A += B; gSubKeys[i*2 + 1] = A << SK_ROTL | (int)((uint)A >> (32-SK_ROTL)); } /* * fully expand the table for speed */ int k0 = sBoxKeys[0]; int k1 = sBoxKeys[1]; int k2 = sBoxKeys[2]; int k3 = sBoxKeys[3]; int b0, b1, b2, b3; gSBox = new int[4*MAX_KEY_BITS]; for (int i=0; i<MAX_KEY_BITS; i++) { b0 = b1 = b2 = b3 = i; switch (k64Cnt & 3) { case 1: gSBox[i*2] = gMDS0[(P[P_01,b0] & 0xff) ^ M_b0(k0)]; gSBox[i*2+1] = gMDS1[(P[P_11,b1] & 0xff) ^ M_b1(k0)]; gSBox[i*2+0x200] = gMDS2[(P[P_21,b2] & 0xff) ^ M_b2(k0)]; gSBox[i*2+0x201] = gMDS3[(P[P_31,b3] & 0xff) ^ M_b3(k0)]; break; case 0: // 256 bits of key b0 = (P[P_04,b0] & 0xff) ^ M_b0(k3); b1 = (P[P_14,b1] & 0xff) ^ M_b1(k3); b2 = (P[P_24,b2] & 0xff) ^ M_b2(k3); b3 = (P[P_34,b3] & 0xff) ^ M_b3(k3); // fall through, having pre-processed b[0]..b[3] with k32[3] goto case 3; case 3: // 192 bits of key b0 = (P[P_03,b0] & 0xff) ^ M_b0(k2); b1 = (P[P_13,b1] & 0xff) ^ M_b1(k2); b2 = (P[P_23,b2] & 0xff) ^ M_b2(k2); b3 = (P[P_33,b3] & 0xff) ^ M_b3(k2); // fall through, having pre-processed b[0]..b[3] with k32[2] goto case 2; case 2: // 128 bits of key gSBox[i * 2] = gMDS0[(P[P_01, (P[P_02, b0] & 0xff) ^ M_b0(k1)] & 0xff) ^ M_b0(k0)]; gSBox[i*2+1] = gMDS1[(P[P_11,(P[P_12,b1] & 0xff) ^ M_b1(k1)] & 0xff) ^ M_b1(k0)]; gSBox[i*2+0x200] = gMDS2[(P[P_21,(P[P_22,b2] & 0xff) ^ M_b2(k1)] & 0xff) ^ M_b2(k0)]; gSBox[i * 2 + 0x201] = gMDS3[(P[P_31, (P[P_32, b3] & 0xff) ^ M_b3(k1)] & 0xff) ^ M_b3(k0)]; break; } } /* * the function exits having setup the gSBox with the * input key material. */ } /** * Encrypt the given input starting at the given offset and place * the result in the provided buffer starting at the given offset. * The input will be an exact multiple of our blocksize. * * encryptBlock uses the pre-calculated gSBox[] and subKey[] * arrays. */ private void EncryptBlock( byte[] src, int srcIndex, byte[] dst, int dstIndex) { int x0 = BytesTo32Bits(src, srcIndex) ^ gSubKeys[INPUT_WHITEN]; int x1 = BytesTo32Bits(src, srcIndex + 4) ^ gSubKeys[INPUT_WHITEN + 1]; int x2 = BytesTo32Bits(src, srcIndex + 8) ^ gSubKeys[INPUT_WHITEN + 2]; int x3 = BytesTo32Bits(src, srcIndex + 12) ^ gSubKeys[INPUT_WHITEN + 3]; int k = ROUND_SUBKEYS; int t0, t1; for (int r = 0; r < ROUNDS; r +=2) { t0 = Fe32_0(x0); t1 = Fe32_3(x1); x2 ^= t0 + t1 + gSubKeys[k++]; x2 = (int)((uint)x2 >>1) | x2 << 31; x3 = (x3 << 1 | (int) ((uint)x3 >> 31)) ^ (t0 + 2*t1 + gSubKeys[k++]); t0 = Fe32_0(x2); t1 = Fe32_3(x3); x0 ^= t0 + t1 + gSubKeys[k++]; x0 = (int) ((uint)x0 >>1) | x0 << 31; x1 = (x1 << 1 | (int)((uint)x1 >> 31)) ^ (t0 + 2*t1 + gSubKeys[k++]); } Bits32ToBytes(x2 ^ gSubKeys[OUTPUT_WHITEN], dst, dstIndex); Bits32ToBytes(x3 ^ gSubKeys[OUTPUT_WHITEN + 1], dst, dstIndex + 4); Bits32ToBytes(x0 ^ gSubKeys[OUTPUT_WHITEN + 2], dst, dstIndex + 8); Bits32ToBytes(x1 ^ gSubKeys[OUTPUT_WHITEN + 3], dst, dstIndex + 12); } /** * Decrypt the given input starting at the given offset and place * the result in the provided buffer starting at the given offset. * The input will be an exact multiple of our blocksize. */ private void DecryptBlock( byte[] src, int srcIndex, byte[] dst, int dstIndex) { int x2 = BytesTo32Bits(src, srcIndex) ^ gSubKeys[OUTPUT_WHITEN]; int x3 = BytesTo32Bits(src, srcIndex+4) ^ gSubKeys[OUTPUT_WHITEN + 1]; int x0 = BytesTo32Bits(src, srcIndex+8) ^ gSubKeys[OUTPUT_WHITEN + 2]; int x1 = BytesTo32Bits(src, srcIndex+12) ^ gSubKeys[OUTPUT_WHITEN + 3]; int k = ROUND_SUBKEYS + 2 * ROUNDS -1 ; int t0, t1; for (int r = 0; r< ROUNDS ; r +=2) { t0 = Fe32_0(x2); t1 = Fe32_3(x3); x1 ^= t0 + 2*t1 + gSubKeys[k--]; x0 = (x0 << 1 | (int)((uint) x0 >> 31)) ^ (t0 + t1 + gSubKeys[k--]); x1 = (int) ((uint)x1 >>1) | x1 << 31; t0 = Fe32_0(x0); t1 = Fe32_3(x1); x3 ^= t0 + 2*t1 + gSubKeys[k--]; x2 = (x2 << 1 | (int)((uint)x2 >> 31)) ^ (t0 + t1 + gSubKeys[k--]); x3 = (int)((uint)x3 >>1) | x3 << 31; } Bits32ToBytes(x0 ^ gSubKeys[INPUT_WHITEN], dst, dstIndex); Bits32ToBytes(x1 ^ gSubKeys[INPUT_WHITEN + 1], dst, dstIndex + 4); Bits32ToBytes(x2 ^ gSubKeys[INPUT_WHITEN + 2], dst, dstIndex + 8); Bits32ToBytes(x3 ^ gSubKeys[INPUT_WHITEN + 3], dst, dstIndex + 12); } /* * TODO: This can be optimised and made cleaner by combining * the functionality in this function and applying it appropriately * to the creation of the subkeys during key setup. */ private int F32(int x, int[] k32) { int b0 = M_b0(x); int b1 = M_b1(x); int b2 = M_b2(x); int b3 = M_b3(x); int k0 = k32[0]; int k1 = k32[1]; int k2 = k32[2]; int k3 = k32[3]; int result = 0; switch (k64Cnt & 3) { case 1: result = gMDS0[(P[P_01,b0] & 0xff) ^ M_b0(k0)] ^ gMDS1[(P[P_11,b1] & 0xff) ^ M_b1(k0)] ^ gMDS2[(P[P_21,b2] & 0xff) ^ M_b2(k0)] ^ gMDS3[(P[P_31,b3] & 0xff) ^ M_b3(k0)]; break; case 0: /* 256 bits of key */ b0 = (P[P_04,b0] & 0xff) ^ M_b0(k3); b1 = (P[P_14,b1] & 0xff) ^ M_b1(k3); b2 = (P[P_24,b2] & 0xff) ^ M_b2(k3); b3 = (P[P_34,b3] & 0xff) ^ M_b3(k3); goto case 3; case 3: b0 = (P[P_03,b0] & 0xff) ^ M_b0(k2); b1 = (P[P_13,b1] & 0xff) ^ M_b1(k2); b2 = (P[P_23,b2] & 0xff) ^ M_b2(k2); b3 = (P[P_33,b3] & 0xff) ^ M_b3(k2); goto case 2; case 2: result = gMDS0[(P[P_01,(P[P_02,b0]&0xff)^M_b0(k1)]&0xff)^M_b0(k0)] ^ gMDS1[(P[P_11,(P[P_12,b1]&0xff)^M_b1(k1)]&0xff)^M_b1(k0)] ^ gMDS2[(P[P_21,(P[P_22,b2]&0xff)^M_b2(k1)]&0xff)^M_b2(k0)] ^ gMDS3[(P[P_31,(P[P_32,b3]&0xff)^M_b3(k1)]&0xff)^M_b3(k0)]; break; } return result; } /** * Use (12, 8) Reed-Solomon code over GF(256) to produce * a key S-box 32-bit entity from 2 key material 32-bit * entities. * * @param k0 first 32-bit entity * @param k1 second 32-bit entity * @return Remainder polynomial Generated using RS code */ private int RS_MDS_Encode(int k0, int k1) { int r = k1; for (int i = 0 ; i < 4 ; i++) // shift 1 byte at a time { r = RS_rem(r); } r ^= k0; for (int i=0 ; i < 4 ; i++) { r = RS_rem(r); } return r; } /** * Reed-Solomon code parameters: (12,8) reversible code: * <p> * <pre> * G(x) = x^4 + (a+1/a)x^3 + ax^2 + (a+1/a)x + 1 * </pre> * where a = primitive root of field generator 0x14D * </p> */ private int RS_rem(int x) { int b = (int) (((uint)x >> 24) & 0xff); int g2 = ((b << 1) ^ ((b & 0x80) != 0 ? RS_GF_FDBK : 0)) & 0xff; int g3 = ( (int)((uint)b >> 1) ^ ((b & 0x01) != 0 ? (int)((uint)RS_GF_FDBK >> 1) : 0)) ^ g2 ; return ((x << 8) ^ (g3 << 24) ^ (g2 << 16) ^ (g3 << 8) ^ b); } private int LFSR1(int x) { return (x >> 1) ^ (((x & 0x01) != 0) ? GF256_FDBK_2 : 0); } private int LFSR2(int x) { return (x >> 2) ^ (((x & 0x02) != 0) ? GF256_FDBK_2 : 0) ^ (((x & 0x01) != 0) ? GF256_FDBK_4 : 0); } private int Mx_X(int x) { return x ^ LFSR2(x); } // 5B private int Mx_Y(int x) { return x ^ LFSR1(x) ^ LFSR2(x); } // EF private int M_b0(int x) { return x & 0xff; } private int M_b1(int x) { return (int)((uint)x >> 8) & 0xff; } private int M_b2(int x) { return (int)((uint)x >> 16) & 0xff; } private int M_b3(int x) { return (int)((uint)x >> 24) & 0xff; } private int Fe32_0(int x) { return gSBox[ 0x000 + 2*(x & 0xff) ] ^ gSBox[ 0x001 + 2*((int)((uint)x >> 8) & 0xff) ] ^ gSBox[ 0x200 + 2*((int)((uint)x >> 16) & 0xff) ] ^ gSBox[ 0x201 + 2*((int)((uint)x >> 24) & 0xff) ]; } private int Fe32_3(int x) { return gSBox[ 0x000 + 2*((int)((uint)x >> 24) & 0xff) ] ^ gSBox[ 0x001 + 2*(x & 0xff) ] ^ gSBox[ 0x200 + 2*((int)((uint)x >> 8) & 0xff) ] ^ gSBox[ 0x201 + 2*((int)((uint)x >> 16) & 0xff) ]; } private int BytesTo32Bits(byte[] b, int p) { return ((b[p] & 0xff) ) | ((b[p+1] & 0xff) << 8) | ((b[p+2] & 0xff) << 16) | ((b[p+3] & 0xff) << 24); } private void Bits32ToBytes(int inData, byte[] b, int offset) { b[offset] = (byte)inData; b[offset + 1] = (byte)(inData >> 8); b[offset + 2] = (byte)(inData >> 16); b[offset + 3] = (byte)(inData >> 24); } } }
// Copyright 2020 The Tilt Brush Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. using System; using System.Collections.Generic; using System.Linq; using UnityEngine; #if UNITY_EDITOR using UnityEditor; #endif using UObject = UnityEngine.Object; namespace TiltBrush { #if UNITY_EDITOR public class SceneSurgery : EditorWindow { [MenuItem("Window/Scene Surgery")] static void OpenWindow() { EditorWindow.GetWindow<SceneSurgery>().Show(); } // GUI state static public GUIContent NAME_Color = new GUIContent("Color", "Brush color"); bool m_DoColor = false; Color m_Color = Color.white; static public GUIContent NAME_Brush = new GUIContent("Brush", "Brush guid"); bool m_DoBrush = false; string m_Brush = "<enter guid here>"; static GUIContent NAME_TimeOffset = new GUIContent( "Time offset (ms)", "Shift time of all control points by the specified amount"); bool m_DoTimeOffset = false; int m_TimeOffset = 0; static GUIContent NAME_TimeStart = new GUIContent( "Start time (ms, absolute)", "Absolute time of first stroke's first control point; other points are shifted/stretched to match"); bool m_DoTimeStart = false; int m_TimeStart = 0; static GUIContent NAME_TimeEnd = new GUIContent( "End time (ms, absolute)", "Absolute time of last stroke's last control point; other points are shifted/stretched to match"); bool m_DoTimeEnd = false; int m_TimeEnd = 1; static GUIContent NAME_CpOffset = new GUIContent( "Control point offset", "Push control points up to this far along their normal. Exact value is random per stroke."); bool m_DoCpOffset = false; float m_CpOffset = 2e-3f; static GUIContent NAME_ResequenceStrokes = new GUIContent( "Resequence (X axis)", "Adjust timestamps to make strokes sequential, ordered by head control point along X axis."); bool m_ResequenceStrokes = false; static GUIContent NAME_CpLiftIncrement = new GUIContent( "Control point increment", "Push control points this far along their normal, multiplied by distance along stroke"); bool m_DoCpLiftIncrement = false; float m_CpLiftIncrement = 1e-4f; bool m_ApplyOnSelect = false; // Internal state private bool m_haveSelection = false; // Better than Update() because it's only called at 10Hz private bool m_startCalled = false; void OnInspectorUpdate() { // Emulate GameObject.Start() functionality. if (! EditorApplication.isPlaying) { m_startCalled = false; } else { if (!m_startCalled && App.CurrentState == App.AppState.Standard) { m_startCalled = true; Start(); } } } // Called sort-of when GameObject Start() is called void Start() { m_ApplyOnSelect = false; if (BrushController.m_Instance != null) { BrushController.m_Instance.StrokeSelected += OnStrokeSelected; BrushController.m_Instance.BrushChanged += brushDescriptor => { m_Brush = brushDescriptor.m_Guid.ToString("D"); Repaint(); }; } if (PanelManager.m_Instance != null) { // TODO : This path has been deprecated. Repair this. //List<PanelManager.PanelData> allPanels = PanelManager.m_Instance.GetAllPanels(); //for (int i = 0; i < allPanels.Count; ++i) { // ColorPickerPanel colorsPanel = allPanels[i].m_Panel as ColorPickerPanel; // if (colorsPanel != null) { // colorsPanel.ColorUpdated += color => { // m_Color = (Color)color; // Repaint(); // }; // } //} } } void OnStrokeSelected(Stroke stroke) { // XXX: should fix the Dropper's event to pass along the sender object var sender = App.Config.m_Dropper; if (m_ApplyOnSelect && stroke != null) { Apply(new [] { stroke }); sender.DisableRequestExit_HackForSceneSurgeon(); } } /// Apply selected mutations to strokes associated with the passed transforms public void ApplyToTransforms(IEnumerable<Transform> transforms) { Apply(StrokesForTransforms(transforms).ToArray()); } /// Applies selected mutations to all strokes public void ApplyToAll() { Apply(SketchMemoryScript.AllStrokes().ToArray()); } /// Pushes non-identity transforms from GameObjects into strokes. /// Zeroes out the GameObject transforms as a side effect. public void BakeTransforms() { foreach (var mo in SketchMemoryScript.AllStrokes()) { BakeGameObjTransform(mo); } // Zeroing can't happen during baking, because a single transform may affect // multiple strokes. Also, we should take care to zero transforms for batches // that may not currently have any strokes. foreach (var t in AllStrokeTransforms()) { t.position = Vector3.zero; t.rotation = Quaternion.identity; t.localScale = Vector3.one; } SketchMemoryScript.m_Instance.Redraw(doSort: false); } public void RemoveOriginFromMeshBounds() { foreach (var t in AllStrokeTransforms()) { var mf = t.GetComponent<MeshFilter>(); if (mf != null) { mf.sharedMesh.bounds = BoundsWithoutOrigin(mf.sharedMesh.vertices); } } } // // Helpers // static IEnumerable<Transform> AllStrokeTransforms() { var transforms = new HashSet<Transform>(new ReferenceComparer<Transform>()); foreach (var canvas in App.Scene.AllCanvases) { transforms.UnionWith(from batch in canvas.BatchManager.AllBatches() select batch.transform); } transforms.UnionWith(from mo in SketchMemoryScript.AllStrokes() select TransformForStroke(mo)); return transforms; } public static IEnumerable<Stroke> StrokesForTransforms(IEnumerable<Transform> transforms) { foreach (var t in transforms) { Batch batch = t.GetComponent<Batch>(); if (batch != null) { foreach (var group in batch.m_Groups) { yield return group.m_Stroke; } } BaseBrushScript brush = t.GetComponent<BaseBrushScript>(); if (brush != null) { yield return brush.Stroke; } } } private static Transform TransformForStroke(Stroke stroke) { if (stroke.m_BatchSubset != null) { Batch batch = stroke.m_BatchSubset.m_ParentBatch; return batch.transform; } else if (stroke != null) { return stroke.m_Object.transform; } else { return null; } } static Bounds BoundsWithoutOrigin(Vector3[] vs) { Bounds b = new Bounds(); bool seenFirst = false; foreach (var v in vs) { if (v != Vector3.zero) { if (!seenFirst) { b = new Bounds(v, Vector3.zero); seenFirst = true; } else { b.Encapsulate(v); } } } return b; } /// Raises InvalidOperationException if no timestamps. public static void GetMinMaxTimes( Stroke[] strokes, out uint min, out uint max) { try { min = strokes.Select(stroke => stroke.m_ControlPoints[0].m_TimestampMs).Min(); max = strokes.Select(stroke => stroke.m_ControlPoints.Last().m_TimestampMs).Max(); } catch (NullReferenceException e) { // Strange that Linq throws NullReferenceException // Change to InvalidOperationException throw new InvalidOperationException(e.ToString()); } } // // Mutators // private void BakeGameObjTransform(Stroke stroke) { TrTransform xf_CS = Coords.AsCanvas[TransformForStroke(stroke)]; if (xf_CS == TrTransform.identity) { return; } var cps = stroke.m_ControlPoints; for (int i = 0; i < cps.Length; ++i) { var cp = xf_CS * TrTransform.TR(cps[i].m_Pos, cps[i].m_Orient); cps[i].m_Pos = cp.translation; cps[i].m_Orient = cp.rotation; } stroke.m_BrushScale *= xf_CS.scale; } private void Apply(Stroke[] strokes) { BrushDescriptor brush = null; if (m_DoBrush && !String.IsNullOrEmpty(m_Brush)) { try { var guid = new Guid(m_Brush); brush = BrushCatalog.m_Instance.GetBrush(guid); if (brush == null) { Debug.LogFormat("No Brush {0}", guid); } } catch (Exception e) { Debug.LogFormat("Invalid guid {0}: {1}", m_Brush, e); brush = null; } } bool needsTimeAdjust = true; uint new0 = 0, new1 = 1; // invariant: new0 <= new1 uint old0 = 0, old1 = 1; // invariant: old0 < old1 try { GetMinMaxTimes(strokes, out old0, out old1); if (old0 == old1) { // our choice here determines whether the timestamp goes to new0 or new1 old1 = old0 + 1; } } catch (InvalidOperationException) { needsTimeAdjust = false; } if (m_DoTimeOffset) { new0 = (uint)(old0 + m_TimeOffset); new1 = (uint)(old1 + m_TimeOffset); } else if (m_DoTimeStart && m_DoTimeEnd) { new0 = (uint) m_TimeStart; new1 = (uint) m_TimeEnd; } else if (m_DoTimeStart) { new0 = (uint) m_TimeStart; new1 = (uint) (new0 + (old1 - old0)); } else if (m_DoTimeEnd) { new1 = (uint) m_TimeEnd; new0 = (uint) (new1 - (old1 - old0)); } else { new0 = old0; new1 = old1; needsTimeAdjust = needsTimeAdjust && m_ResequenceStrokes; } if (new0 > new1) { Debug.LogFormat("Invalid retime {0} {1}", new0, new1); needsTimeAdjust = false; } uint pointIndex = 0; int pointCount = strokes.Select(obj => obj.m_ControlPoints.Length).Sum(); float durationPerPointMs = (float)(new1 - new0) / pointCount; if (m_ResequenceStrokes) { // TODO: axis options strokes = strokes.OrderBy(obj => obj.m_ControlPoints[0].m_Pos.x).ToArray(); } foreach (var stroke in strokes) { var cps = stroke.m_ControlPoints; if (cps.Length > 0 && needsTimeAdjust) { for (int i = 0; i < cps.Length; ++i) { if (m_ResequenceStrokes) { cps[i].m_TimestampMs = new0 + (uint)(pointIndex * durationPerPointMs); ++pointIndex; } else { // Use long to avoid overflow long ts = cps[i].m_TimestampMs; ts = (ts - old0) * (new1 - new0) / (old1 - old0) + new0; cps[i].m_TimestampMs = (uint)ts; } } } if (m_DoColor) { stroke.m_Color = m_Color; } if (brush != null) { stroke.m_BrushGuid = brush.m_Guid; } if (m_DoCpOffset || m_DoCpLiftIncrement) { float offset = m_DoCpOffset ? UnityEngine.Random.Range(0, m_CpOffset) : 0; float increment = m_DoCpLiftIncrement ? m_CpLiftIncrement : 0; for (int i = 0; i < cps.Length; ++i) { Vector3 norm = -(cps[i].m_Orient * Vector3.forward); cps[i].m_Pos += norm * (offset + i * increment); } } } SketchMemoryScript.m_Instance.Redraw(doSort: needsTimeAdjust); } // // GUI code // void OnSelectionChange() { var transforms = Selection.transforms; m_haveSelection = (transforms.Length > 0); if (m_haveSelection) { var strokes = StrokesForTransforms(transforms).ToArray(); uint min, max; try { GetMinMaxTimes(strokes, out min, out max); } catch (InvalidOperationException) { return; } m_TimeStart = (int)min; m_TimeEnd = (int)max; Repaint(); } } public void ToggledVector(ref bool toggle, ref Vector3 val, GUIContent name) { EditorGUILayout.BeginHorizontal(); EditorGUILayout.LabelField(name, GUILayout.Width(150), GUILayout.MaxWidth(150)); toggle = EditorGUILayout.Toggle(toggle, GUILayout.ExpandWidth(false)); GUI.enabled = toggle; val = EditorGUILayout.Vector3Field("", val, GUILayout.ExpandWidth(true)); GUI.enabled = true; EditorGUILayout.EndHorizontal(); } public void ToggledColor(ref bool toggle, ref Color val, GUIContent name) { EditorGUILayout.BeginHorizontal(); EditorGUILayout.LabelField(name, GUILayout.Width(150), GUILayout.MaxWidth(150)); toggle = EditorGUILayout.Toggle(toggle, GUILayout.ExpandWidth(false)); val = EditorGUILayout.ColorField("", val, GUILayout.ExpandWidth(true)); EditorGUILayout.EndHorizontal(); } public void ToggledInt(ref bool toggle, ref int val, GUIContent name) { EditorGUILayout.BeginHorizontal(); EditorGUILayout.LabelField(name, GUILayout.Width(150), GUILayout.MaxWidth(150)); toggle = EditorGUILayout.Toggle(toggle, GUILayout.ExpandWidth(false)); val = EditorGUILayout.IntField("", val, GUILayout.ExpandWidth(true)); EditorGUILayout.EndHorizontal(); } static public void Toggled(ref bool toggle, ref float val, GUIContent name) { EditorGUILayout.BeginHorizontal(); EditorGUILayout.LabelField(name, GUILayout.Width(150), GUILayout.MaxWidth(150)); toggle = EditorGUILayout.Toggle(toggle, GUILayout.ExpandWidth(false)); val = EditorGUILayout.FloatField("", val, GUILayout.ExpandWidth(true)); EditorGUILayout.EndHorizontal(); } public void ToggledGuid(ref bool toggle, ref string val, GUIContent name) { EditorGUILayout.BeginHorizontal(); EditorGUILayout.LabelField(name, GUILayout.Width(150), GUILayout.MaxWidth(150)); toggle = EditorGUILayout.Toggle(toggle, GUILayout.ExpandWidth(false)); GUI.enabled = toggle; string newval = EditorGUILayout.TextField(val, GUILayout.ExpandWidth(true)); if (newval != val) { try { var guid = new Guid(newval); val = guid.ToString("D"); } catch (Exception) { val = newval; } } GUI.enabled = true; EditorGUILayout.EndHorizontal(); } void FloodSelectByColor() { var targetColor = StrokesForTransforms(Selection.transforms).First().m_Color; var selection = new HashSet<GameObject>(new ReferenceComparer<GameObject>()); selection.UnionWith(from stroke in SketchMemoryScript.AllStrokes() where stroke.m_Color == targetColor select TransformForStroke(stroke).gameObject); Selection.objects = selection.Cast<UObject>().ToArray(); } void OnGUI() { EditorGUILayout.Space(); // Easier to use unity manipulations then "bake transforms" ToggledColor(ref m_DoColor, ref m_Color, SceneSurgery.NAME_Color); ToggledGuid(ref m_DoBrush, ref m_Brush, SceneSurgery.NAME_Brush); ToggledInt(ref m_DoTimeOffset, ref m_TimeOffset, SceneSurgery.NAME_TimeOffset); ToggledInt(ref m_DoTimeStart, ref m_TimeStart, SceneSurgery.NAME_TimeStart); ToggledInt(ref m_DoTimeEnd, ref m_TimeEnd, SceneSurgery.NAME_TimeEnd); m_ResequenceStrokes = EditorGUILayout.Toggle(NAME_ResequenceStrokes, m_ResequenceStrokes); Toggled(ref m_DoCpOffset, ref m_CpOffset, SceneSurgery.NAME_CpOffset); Toggled(ref m_DoCpLiftIncrement, ref m_CpLiftIncrement, SceneSurgery.NAME_CpLiftIncrement); EditorGUILayout.Space(); m_ApplyOnSelect = EditorGUILayout.Toggle("Use dropper to apply", m_ApplyOnSelect); EditorGUILayout.BeginHorizontal(); { GUI.enabled = m_haveSelection; if (GUILayout.Button("Apply (editor selection)")) { ApplyToTransforms(UnityEditor.Selection.transforms); } GUI.enabled = true; if (GUILayout.Button("Apply (all)")) { ApplyToAll(); } } EditorGUILayout.EndHorizontal(); if (GUILayout.Button("Bake transforms")) { BakeTransforms(); } if (GUILayout.Button("Remove origin from mesh bounds")) { RemoveOriginFromMeshBounds(); } if (GUILayout.Button("Flood-select by color")) { RemoveOriginFromMeshBounds(); FloodSelectByColor(); } if (GUILayout.Button("Filter non-strokes from selection")) { // Batch or BaseBrushScript Selection.objects = ( from t in Selection.transforms where (t.GetComponent<Batch>() != null || t.GetComponent<BaseBrushScript>() != null) select t.gameObject).Cast<UObject>().ToArray(); } } } #endif } // namespace TiltBrush
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /*============================================================ ** ** Class: LowLevelList<T> ** ** Private version of List<T> for internal System.Private.CoreLib use. This ** permits sharing more source between BCL and System.Private.CoreLib (as well as the ** fact that List<T> is just a useful class in general.) ** ** This does not strive to implement the full api surface area ** (but any portion it does implement should match the real List<T>'s ** behavior.) ** ** This file is a subset of System.Collections\System\Collections\Generics\List.cs ** and should be kept in sync with that file. ** ===========================================================*/ using System; using System.Diagnostics; using System.Diagnostics.Contracts; namespace System.Collections.Generic { // Implements a variable-size List that uses an array of objects to store the // elements. A List has a capacity, which is the allocated length // of the internal array. As elements are added to a List, the capacity // of the List is automatically increased as required by reallocating the // internal array. // /// <summary> /// LowLevelList with no interface implementation to minimize both code and data size /// Data size is smaller because there will be minimal virtual function table. /// Code size is smaller because only functions called will be in the binary. /// Use LowLevelListWithIList<T> for IList support /// </summary> [DebuggerDisplay("Count = {Count}")] #if TYPE_LOADER_IMPLEMENTATION [System.Runtime.CompilerServices.ForceDictionaryLookups] #endif internal class LowLevelList<T> { private const int _defaultCapacity = 4; protected T[] _items; [ContractPublicPropertyName("Count")] protected int _size; protected int _version; // Constructs a List. The list is initially empty and has a capacity // of zero. Upon adding the first element to the list the capacity is // increased to 16, and then increased in multiples of two as required. public LowLevelList() { _items = Array.Empty<T>(); } // Constructs a List with a given initial capacity. The list is // initially empty, but will have room for the given number of elements // before any reallocations are required. // public LowLevelList(int capacity) { if (capacity < 0) throw new ArgumentOutOfRangeException("capacity"); Contract.EndContractBlock(); if (capacity == 0) _items = Array.Empty<T>(); else _items = new T[capacity]; } // Constructs a List, copying the contents of the given collection. The // size and capacity of the new list will both be equal to the size of the // given collection. // public LowLevelList(IEnumerable<T> collection) { if (collection == null) throw new ArgumentNullException("collection"); Contract.EndContractBlock(); ICollection<T> c = collection as ICollection<T>; if (c != null) { int count = c.Count; if (count == 0) { _items = Array.Empty<T>(); } else { _items = new T[count]; c.CopyTo(_items, 0); _size = count; } } else { _size = 0; _items = Array.Empty<T>(); // This enumerable could be empty. Let Add allocate a new array, if needed. // Note it will also go to _defaultCapacity first, not 1, then 2, etc. using (IEnumerator<T> en = collection.GetEnumerator()) { while (en.MoveNext()) { Add(en.Current); } } } } // Gets and sets the capacity of this list. The capacity is the size of // the internal array used to hold items. When set, the internal // array of the list is reallocated to the given capacity. // public int Capacity { get { Contract.Ensures(Contract.Result<int>() >= 0); return _items.Length; } set { if (value < _size) { throw new ArgumentOutOfRangeException("value"); } Contract.EndContractBlock(); if (value != _items.Length) { if (value > 0) { T[] newItems = new T[value]; Array.Copy(_items, 0, newItems, 0, _size); _items = newItems; } else { _items = Array.Empty<T>(); } } } } // Read-only property describing how many elements are in the List. public int Count { get { Contract.Ensures(Contract.Result<int>() >= 0); return _size; } } // Sets or Gets the element at the given index. // public T this[int index] { get { // Following trick can reduce the range check by one if ((uint)index >= (uint)_size) { throw new ArgumentOutOfRangeException(); } Contract.EndContractBlock(); return _items[index]; } set { if ((uint)index >= (uint)_size) { throw new ArgumentOutOfRangeException(); } Contract.EndContractBlock(); _items[index] = value; _version++; } } // Adds the given object to the end of this list. The size of the list is // increased by one. If required, the capacity of the list is doubled // before adding the new element. // public void Add(T item) { if (_size == _items.Length) EnsureCapacity(_size + 1); _items[_size++] = item; _version++; } // Ensures that the capacity of this list is at least the given minimum // value. If the currect capacity of the list is less than min, the // capacity is increased to twice the current capacity or to min, // whichever is larger. private void EnsureCapacity(int min) { if (_items.Length < min) { int newCapacity = _items.Length == 0 ? _defaultCapacity : _items.Length * 2; // Allow the list to grow to maximum possible capacity (~2G elements) before encountering overflow. // Note that this check works even when _items.Length overflowed thanks to the (uint) cast //if ((uint)newCapacity > Array.MaxArrayLength) newCapacity = Array.MaxArrayLength; if (newCapacity < min) newCapacity = min; Capacity = newCapacity; } } #if !TYPE_LOADER_IMPLEMENTATION // Adds the elements of the given collection to the end of this list. If // required, the capacity of the list is increased to twice the previous // capacity or the new size, whichever is larger. // public void AddRange(IEnumerable<T> collection) { Contract.Ensures(Count >= Contract.OldValue(Count)); InsertRange(_size, collection); } // Clears the contents of List. public void Clear() { if (_size > 0) { Array.Clear(_items, 0, _size); // Don't need to doc this but we clear the elements so that the gc can reclaim the references. _size = 0; } _version++; } // Contains returns true if the specified element is in the List. // It does a linear, O(n) search. Equality is determined by calling // item.Equals(). // public bool Contains(T item) { if ((object)item == null) { for (int i = 0; i < _size; i++) if ((object)_items[i] == null) return true; return false; } else { int index = IndexOf(item); if (index >= 0) return true; return false; } } // Copies a section of this list to the given array at the given index. // // The method uses the Array.Copy method to copy the elements. // public void CopyTo(int index, T[] array, int arrayIndex, int count) { if (_size - index < count) { throw new ArgumentException(); } Contract.EndContractBlock(); // Delegate rest of error checking to Array.Copy. Array.Copy(_items, index, array, arrayIndex, count); } public void CopyTo(T[] array, int arrayIndex) { // Delegate rest of error checking to Array.Copy. Array.Copy(_items, 0, array, arrayIndex, _size); } // Returns the index of the first occurrence of a given value in a range of // this list. The list is searched forwards from beginning to end. // The elements of the list are compared to the given value using the // Object.Equals method. // // This method uses the Array.IndexOf method to perform the // search. // public int IndexOf(T item) { Contract.Ensures(Contract.Result<int>() >= -1); Contract.Ensures(Contract.Result<int>() < Count); return Array.IndexOf(_items, item, 0, _size); } // Returns the index of the first occurrence of a given value in a range of // this list. The list is searched forwards, starting at index // index and ending at count number of elements. The // elements of the list are compared to the given value using the // Object.Equals method. // // This method uses the Array.IndexOf method to perform the // search. // public int IndexOf(T item, int index) { if (index > _size) throw new ArgumentOutOfRangeException("index"); Contract.Ensures(Contract.Result<int>() >= -1); Contract.Ensures(Contract.Result<int>() < Count); Contract.EndContractBlock(); return Array.IndexOf(_items, item, index, _size - index); } // Inserts an element into this list at a given index. The size of the list // is increased by one. If required, the capacity of the list is doubled // before inserting the new element. // public void Insert(int index, T item) { // Note that insertions at the end are legal. if ((uint)index > (uint)_size) { throw new ArgumentOutOfRangeException("index"); } Contract.EndContractBlock(); if (_size == _items.Length) EnsureCapacity(_size + 1); if (index < _size) { Array.Copy(_items, index, _items, index + 1, _size - index); } _items[index] = item; _size++; _version++; } // Inserts the elements of the given collection at a given index. If // required, the capacity of the list is increased to twice the previous // capacity or the new size, whichever is larger. Ranges may be added // to the end of the list by setting index to the List's size. // public void InsertRange(int index, IEnumerable<T> collection) { if (collection == null) { throw new ArgumentNullException("collection"); } if ((uint)index > (uint)_size) { throw new ArgumentOutOfRangeException("index"); } Contract.EndContractBlock(); ICollection<T> c = collection as ICollection<T>; if (c != null) { // if collection is ICollection<T> int count = c.Count; if (count > 0) { EnsureCapacity(_size + count); if (index < _size) { Array.Copy(_items, index, _items, index + count, _size - index); } // If we're inserting a List into itself, we want to be able to deal with that. if (this == c) { // Copy first part of _items to insert location Array.Copy(_items, 0, _items, index, index); // Copy last part of _items back to inserted location Array.Copy(_items, index + count, _items, index * 2, _size - index); } else { T[] itemsToInsert = new T[count]; c.CopyTo(itemsToInsert, 0); Array.Copy(itemsToInsert, 0, _items, index, count); } _size += count; } } else { using (IEnumerator<T> en = collection.GetEnumerator()) { while (en.MoveNext()) { Insert(index++, en.Current); } } } _version++; } // Removes the element at the given index. The size of the list is // decreased by one. // public bool Remove(T item) { int index = IndexOf(item); if (index >= 0) { RemoveAt(index); return true; } return false; } // This method removes all items which matches the predicate. // The complexity is O(n). public int RemoveAll(Predicate<T> match) { if (match == null) { throw new ArgumentNullException("match"); } Contract.Ensures(Contract.Result<int>() >= 0); Contract.Ensures(Contract.Result<int>() <= Contract.OldValue(Count)); Contract.EndContractBlock(); int freeIndex = 0; // the first free slot in items array // Find the first item which needs to be removed. while (freeIndex < _size && !match(_items[freeIndex])) freeIndex++; if (freeIndex >= _size) return 0; int current = freeIndex + 1; while (current < _size) { // Find the first item which needs to be kept. while (current < _size && match(_items[current])) current++; if (current < _size) { // copy item to the free slot. _items[freeIndex++] = _items[current++]; } } Array.Clear(_items, freeIndex, _size - freeIndex); int result = _size - freeIndex; _size = freeIndex; _version++; return result; } // Removes the element at the given index. The size of the list is // decreased by one. // public void RemoveAt(int index) { if ((uint)index >= (uint)_size) { throw new ArgumentOutOfRangeException("index"); } Contract.EndContractBlock(); _size--; if (index < _size) { Array.Copy(_items, index + 1, _items, index, _size - index); } _items[_size] = default(T); _version++; } // ToArray returns a new Object array containing the contents of the List. // This requires copying the List, which is an O(n) operation. public T[] ToArray() { Contract.Ensures(Contract.Result<T[]>() != null); Contract.Ensures(Contract.Result<T[]>().Length == Count); T[] array = new T[_size]; Array.Copy(_items, 0, array, 0, _size); return array; } #endif } #if !TYPE_LOADER_IMPLEMENTATION /// <summary> /// LowLevelList<T> with full IList<T> implementation /// </summary> internal sealed class LowLevelListWithIList<T> : LowLevelList<T>, IList<T> { public LowLevelListWithIList() { } public LowLevelListWithIList(int capacity) : base(capacity) { } // Is this List read-only? bool ICollection<T>.IsReadOnly { get { return false; } } /// <internalonly/> IEnumerator<T> IEnumerable<T>.GetEnumerator() { return new Enumerator(this); } System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() { return new Enumerator(this); } private struct Enumerator : IEnumerator<T>, System.Collections.IEnumerator { private LowLevelListWithIList<T> _list; private int _index; private int _version; private T _current; internal Enumerator(LowLevelListWithIList<T> list) { _list = list; _index = 0; _version = list._version; _current = default(T); } public void Dispose() { } public bool MoveNext() { LowLevelListWithIList<T> localList = _list; if (_version == localList._version && ((uint)_index < (uint)localList._size)) { _current = localList._items[_index]; _index++; return true; } return MoveNextRare(); } private bool MoveNextRare() { if (_version != _list._version) { throw new InvalidOperationException(); } _index = _list._size + 1; _current = default(T); return false; } public T Current { get { return _current; } } object System.Collections.IEnumerator.Current { get { if (_index == 0 || _index == _list._size + 1) { throw new InvalidOperationException(); } return Current; } } void System.Collections.IEnumerator.Reset() { if (_version != _list._version) { throw new InvalidOperationException(); } _index = 0; _current = default(T); } } } #endif // !TYPE_LOADER_IMPLEMENTATION }
using Xunit; namespace Jint.Tests.Ecma { public class Test_15_5_4_11 : EcmaTest { [Fact] [Trait("Category", "15.5.4.11")] public void ThisObjectUsedByTheReplacevalueFunctionOfAStringPrototypeReplaceInvocation() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/15.5.4.11-1.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void TheStringPrototypeReplaceLengthPropertyHasTheAttributeReadonly() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A10.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void TheLengthPropertyOfTheReplaceMethodIs2() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A11.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void CallReplacevaluePassingUndefinedAsTheThisValue() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A12.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void StringPrototypeReplaceSearchvalueReplacevalue() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A1_T1.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void StringPrototypeReplaceSearchvalueReplacevalue2() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A1_T10.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void StringPrototypeReplaceSearchvalueReplacevalue3() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A1_T11.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void StringPrototypeReplaceSearchvalueReplacevalue4() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A1_T12.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void StringPrototypeReplaceSearchvalueReplacevalue5() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A1_T13.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void StringPrototypeReplaceSearchvalueReplacevalue6() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A1_T14.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void StringPrototypeReplaceSearchvalueReplacevalue7() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A1_T15.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void StringPrototypeReplaceSearchvalueReplacevalue8() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A1_T16.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void StringPrototypeReplaceSearchvalueReplacevalue9() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A1_T17.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void StringPrototypeReplaceSearchvalueReplacevalue10() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A1_T2.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void StringPrototypeReplaceSearchvalueReplacevalue11() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A1_T4.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void StringPrototypeReplaceSearchvalueReplacevalue12() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A1_T5.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void StringPrototypeReplaceSearchvalueReplacevalue13() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A1_T6.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void StringPrototypeReplaceSearchvalueReplacevalue14() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A1_T7.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void StringPrototypeReplaceSearchvalueReplacevalue15() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A1_T8.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void StringPrototypeReplaceSearchvalueReplacevalue16() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A1_T9.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void TheReplacementsAreDoneLeftToRightAndOnceSuchArePlacementIsPerformedTheNewReplacementTextIsNotSubjectToFurtherReplacements() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A2_T1.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void TheReplacementsAreDoneLeftToRightAndOnceSuchArePlacementIsPerformedTheNewReplacementTextIsNotSubjectToFurtherReplacements2() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A2_T10.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void TheReplacementsAreDoneLeftToRightAndOnceSuchArePlacementIsPerformedTheNewReplacementTextIsNotSubjectToFurtherReplacements3() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A2_T2.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void TheReplacementsAreDoneLeftToRightAndOnceSuchArePlacementIsPerformedTheNewReplacementTextIsNotSubjectToFurtherReplacements4() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A2_T3.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void TheReplacementsAreDoneLeftToRightAndOnceSuchArePlacementIsPerformedTheNewReplacementTextIsNotSubjectToFurtherReplacements5() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A2_T4.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void TheReplacementsAreDoneLeftToRightAndOnceSuchArePlacementIsPerformedTheNewReplacementTextIsNotSubjectToFurtherReplacements6() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A2_T5.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void TheReplacementsAreDoneLeftToRightAndOnceSuchArePlacementIsPerformedTheNewReplacementTextIsNotSubjectToFurtherReplacements7() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A2_T6.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void TheReplacementsAreDoneLeftToRightAndOnceSuchArePlacementIsPerformedTheNewReplacementTextIsNotSubjectToFurtherReplacements8() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A2_T7.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void TheReplacementsAreDoneLeftToRightAndOnceSuchArePlacementIsPerformedTheNewReplacementTextIsNotSubjectToFurtherReplacements9() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A2_T8.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void TheReplacementsAreDoneLeftToRightAndOnceSuchArePlacementIsPerformedTheNewReplacementTextIsNotSubjectToFurtherReplacements10() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A2_T9.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void ReplaceWithRegexpUidDReturns() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A3_T1.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void ReplaceWithRegexpUidDReturns2() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A3_T2.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void ReplaceWithRegexpUidDReturns3() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A3_T3.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void ReplaceWithRegexpAZ09AndReplaceFunctionReturns() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A4_T1.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void ReplaceWithRegexpAZ09AndReplaceFunctionReturns2() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A4_T2.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void ReplaceWithRegexpAZ09AndReplaceFunctionReturns3() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A4_T3.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void ReplaceWithRegexpAZ09AndReplaceFunctionReturns4() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A4_T4.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void UseReplaceWithRegexpAsSearchvalueAndUseInReplacevalue() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A5_T1.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void StringPrototypeReplaceHasNotPrototypeProperty() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A6.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void StringPrototypeReplaceCanTBeUsedAsConstructor() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A7.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void TheStringPrototypeReplaceLengthPropertyHasTheAttributeDontenum() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A8.js", false); } [Fact] [Trait("Category", "15.5.4.11")] public void TheStringPrototypeReplaceLengthPropertyHasTheAttributeDontdelete() { RunTest(@"TestCases/ch15/15.5/15.5.4/15.5.4.11/S15.5.4.11_A9.js", false); } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.IO; using System.Reflection; using System.Xml; using Nini.Config; using log4net; using OpenMetaverse; using OpenSim.Framework; using OpenSim.Server.Base; using OpenSim.Services.Interfaces; using OpenSim.Framework.ServiceAuth; using OpenSim.Framework.Servers.HttpServer; using OpenSim.Server.Handlers.Base; using GridRegion = OpenSim.Services.Interfaces.GridRegion; namespace OpenSim.Server.Handlers.MapImage { public class MapAddServiceConnector : ServiceConnector { private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); private IMapImageService m_MapService; private IGridService m_GridService; private string m_ConfigName = "MapImageService"; public MapAddServiceConnector(IConfigSource config, IHttpServer server, string configName) : base(config, server, configName) { IConfig serverConfig = config.Configs[m_ConfigName]; if (serverConfig == null) throw new Exception(String.Format("No section {0} in config file", m_ConfigName)); string mapService = serverConfig.GetString("LocalServiceModule", String.Empty); if (mapService == String.Empty) throw new Exception("No LocalServiceModule in config file"); Object[] args = new Object[] { config }; m_MapService = ServerUtils.LoadPlugin<IMapImageService>(mapService, args); string gridService = serverConfig.GetString("GridService", String.Empty); if (gridService != string.Empty) m_GridService = ServerUtils.LoadPlugin<IGridService>(gridService, args); if (m_GridService != null) m_log.InfoFormat("[MAP IMAGE HANDLER]: GridService check is ON"); else m_log.InfoFormat("[MAP IMAGE HANDLER]: GridService check is OFF"); bool proxy = serverConfig.GetBoolean("HasProxy", false); IServiceAuth auth = ServiceAuth.Create(config, m_ConfigName); server.AddStreamHandler(new MapServerPostHandler(m_MapService, m_GridService, proxy, auth)); } } class MapServerPostHandler : BaseStreamHandler { private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); private IMapImageService m_MapService; private IGridService m_GridService; bool m_Proxy; public MapServerPostHandler(IMapImageService service, IGridService grid, bool proxy, IServiceAuth auth) : base("POST", "/map", auth) { m_MapService = service; m_GridService = grid; m_Proxy = proxy; } protected override byte[] ProcessRequest(string path, Stream requestData, IOSHttpRequest httpRequest, IOSHttpResponse httpResponse) { // m_log.DebugFormat("[MAP SERVICE IMAGE HANDLER]: Received {0}", path); StreamReader sr = new StreamReader(requestData); string body = sr.ReadToEnd(); sr.Close(); body = body.Trim(); try { Dictionary<string, object> request = ServerUtils.ParseQueryString(body); if (!request.ContainsKey("X") || !request.ContainsKey("Y") || !request.ContainsKey("DATA")) { httpResponse.StatusCode = (int)OSHttpStatusCode.ClientErrorBadRequest; return FailureResult("Bad request."); } int x = 0, y = 0; // UUID scopeID = new UUID("07f8d88e-cd5e-4239-a0ed-843f75d09992"); UUID scopeID = UUID.Zero; Int32.TryParse(request["X"].ToString(), out x); Int32.TryParse(request["Y"].ToString(), out y); if (request.ContainsKey("SCOPE")) UUID.TryParse(request["SCOPE"].ToString(), out scopeID); m_log.DebugFormat("[MAP ADD SERVER CONNECTOR]: Received map data for region at {0}-{1}", x, y); // string type = "image/jpeg"; // // if (request.ContainsKey("TYPE")) // type = request["TYPE"].ToString(); if (m_GridService != null) { System.Net.IPAddress ipAddr = GetCallerIP(httpRequest); GridRegion r = m_GridService.GetRegionByPosition(UUID.Zero, (int)Util.RegionToWorldLoc((uint)x), (int)Util.RegionToWorldLoc((uint)y)); if (r != null) { if (r.ExternalEndPoint.Address.ToString() != ipAddr.ToString()) { m_log.WarnFormat("[MAP IMAGE HANDLER]: IP address {0} may be trying to impersonate region in IP {1}", ipAddr, r.ExternalEndPoint.Address); return FailureResult("IP address of caller does not match IP address of registered region"); } } else { m_log.WarnFormat("[MAP IMAGE HANDLER]: IP address {0} may be rogue. Region not found at coordinates {1}-{2}", ipAddr, x, y); return FailureResult("Region not found at given coordinates"); } } byte[] data = Convert.FromBase64String(request["DATA"].ToString()); string reason = string.Empty; bool result = m_MapService.AddMapTile((int)x, (int)y, data, scopeID, out reason); if (result) return SuccessResult(); else return FailureResult(reason); } catch (Exception e) { m_log.ErrorFormat("[MAP SERVICE IMAGE HANDLER]: Exception {0} {1}", e.Message, e.StackTrace); } return FailureResult("Unexpected server error"); } private byte[] SuccessResult() { XmlDocument doc = new XmlDocument(); XmlNode xmlnode = doc.CreateNode(XmlNodeType.XmlDeclaration, "", ""); doc.AppendChild(xmlnode); XmlElement rootElement = doc.CreateElement("", "ServerResponse", ""); doc.AppendChild(rootElement); XmlElement result = doc.CreateElement("", "Result", ""); result.AppendChild(doc.CreateTextNode("Success")); rootElement.AppendChild(result); return Util.DocToBytes(doc); } private byte[] FailureResult(string msg) { XmlDocument doc = new XmlDocument(); XmlNode xmlnode = doc.CreateNode(XmlNodeType.XmlDeclaration, "", ""); doc.AppendChild(xmlnode); XmlElement rootElement = doc.CreateElement("", "ServerResponse", ""); doc.AppendChild(rootElement); XmlElement result = doc.CreateElement("", "Result", ""); result.AppendChild(doc.CreateTextNode("Failure")); rootElement.AppendChild(result); XmlElement message = doc.CreateElement("", "Message", ""); message.AppendChild(doc.CreateTextNode(msg)); rootElement.AppendChild(message); return Util.DocToBytes(doc); } private System.Net.IPAddress GetCallerIP(IOSHttpRequest request) { if (!m_Proxy) return request.RemoteIPEndPoint.Address; // We're behind a proxy string xff = "X-Forwarded-For"; string xffValue = request.Headers[xff.ToLower()]; if (xffValue == null || (xffValue != null && xffValue == string.Empty)) xffValue = request.Headers[xff]; if (xffValue == null || (xffValue != null && xffValue == string.Empty)) { m_log.WarnFormat("[MAP IMAGE HANDLER]: No XFF header"); return request.RemoteIPEndPoint.Address; } System.Net.IPEndPoint ep = Util.GetClientIPFromXFF(xffValue); if (ep != null) return ep.Address; // Oops return request.RemoteIPEndPoint.Address; } } }
using System; using System.Drawing; using System.Collections; using System.ComponentModel; using System.Windows.Forms; using System.Data; using System.Management; using System.Resources; namespace HWD { public delegate void addItemDelegate(ListViewItem lvitem); public class Details : System.Windows.Forms.Form { private HWD.DetailsControls.PortScan portscan; private HWD.DetailsControls.Performance perf; private HWD.DetailsControls.SharedItems shareditems; private HWD.DetailsControls.EventLog eventlog; private HWD.DetailsControls.HotFix hotfixc; private HWD.DetailsControls.Process proc; private HWD.DetailsControls.Services services; private HWD.DetailsControls.Software software; private HWD.DetailsControls.Hardware hardware; public string name; public string computer; public bool hotfixfile; public static bool anotherLogin; public static string username; public static string password; public static string insys; private System.ComponentModel.IContainer components; private ResourceManager m_ResourceManager; private Crownwood.DotNetMagic.Controls.TabControl tabControl2; private Crownwood.DotNetMagic.Controls.TabPage tabPage1; private Crownwood.DotNetMagic.Controls.TabPage tabPage5; private Crownwood.DotNetMagic.Controls.TabPage tabPage8; private Crownwood.DotNetMagic.Controls.TabPage tabPage3; private Crownwood.DotNetMagic.Controls.TabPage tabPage9; private Crownwood.DotNetMagic.Controls.TabPage tabPage2; private Crownwood.DotNetMagic.Controls.TabPage tabPage4; private Crownwood.DotNetMagic.Controls.TabPage tabPage6; private Crownwood.DotNetMagic.Controls.TabPage tabPage7; private Crownwood.DotNetMagic.Controls.TitleBar titleBar1; private Crownwood.DotNetMagic.Controls.ButtonWithStyle button1; private Crownwood.DotNetMagic.Controls.ButtonWithStyle btnCancel; public ResourceManager rsxmgr { set { this.m_ResourceManager = value; } } public Details() { SetStyle(ControlStyles.DoubleBuffer, true); SetStyle(ControlStyles.AllPaintingInWmPaint, true); InitializeComponent(); } protected override void Dispose( bool disposing ) { if( disposing ) { if(components != null) { components.Dispose(); } } base.Dispose( disposing ); } #region Windows Form Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { System.Resources.ResourceManager resources = new System.Resources.ResourceManager(typeof(Details)); this.tabControl2 = new Crownwood.DotNetMagic.Controls.TabControl(); this.tabPage1 = new Crownwood.DotNetMagic.Controls.TabPage(); this.tabPage5 = new Crownwood.DotNetMagic.Controls.TabPage(); this.tabPage2 = new Crownwood.DotNetMagic.Controls.TabPage(); this.tabPage8 = new Crownwood.DotNetMagic.Controls.TabPage(); this.tabPage3 = new Crownwood.DotNetMagic.Controls.TabPage(); this.tabPage9 = new Crownwood.DotNetMagic.Controls.TabPage(); this.tabPage4 = new Crownwood.DotNetMagic.Controls.TabPage(); this.tabPage6 = new Crownwood.DotNetMagic.Controls.TabPage(); this.tabPage7 = new Crownwood.DotNetMagic.Controls.TabPage(); this.titleBar1 = new Crownwood.DotNetMagic.Controls.TitleBar(); this.button1 = new Crownwood.DotNetMagic.Controls.ButtonWithStyle(); this.btnCancel = new Crownwood.DotNetMagic.Controls.ButtonWithStyle(); this.tabControl2.SuspendLayout(); this.SuspendLayout(); // // tabControl2 // this.tabControl2.Appearance = Crownwood.DotNetMagic.Controls.VisualAppearance.MultiBox; this.tabControl2.BackColor = System.Drawing.SystemColors.Control; this.tabControl2.ButtonActiveColor = System.Drawing.Color.FromArgb(((System.Byte)(128)), ((System.Byte)(0)), ((System.Byte)(0)), ((System.Byte)(0))); this.tabControl2.ButtonInactiveColor = System.Drawing.Color.FromArgb(((System.Byte)(128)), ((System.Byte)(0)), ((System.Byte)(0)), ((System.Byte)(0))); this.tabControl2.Font = new System.Drawing.Font("Verdana", 9F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.World); this.tabControl2.HotTextColor = System.Drawing.SystemColors.ActiveCaption; this.tabControl2.ImageList = null; this.tabControl2.Location = new System.Drawing.Point(0, 40); this.tabControl2.Name = "tabControl2"; this.tabControl2.OfficeDockSides = false; this.tabControl2.OfficeHeaderBorder = true; this.tabControl2.OfficeStyle = Crownwood.DotNetMagic.Controls.OfficeStyle.LightEnhanced; this.tabControl2.SelectedIndex = 0; this.tabControl2.Size = new System.Drawing.Size(616, 320); this.tabControl2.TabIndex = 10; this.tabControl2.TabPages.AddRange(new Crownwood.DotNetMagic.Controls.TabPage[] { this.tabPage1, this.tabPage5, this.tabPage2, this.tabPage8, this.tabPage3, this.tabPage9, this.tabPage4, this.tabPage6, this.tabPage7}); this.tabControl2.TextColor = System.Drawing.SystemColors.ControlText; this.tabControl2.TextInactiveColor = System.Drawing.Color.FromArgb(((System.Byte)(128)), ((System.Byte)(0)), ((System.Byte)(0)), ((System.Byte)(0))); this.tabControl2.TextTips = true; this.tabControl2.SelectionChanged += new Crownwood.DotNetMagic.Controls.SelectTabHandler(this.tabControl2_SelectionChanged); // // tabPage1 // this.tabPage1.InactiveBackColor = System.Drawing.Color.Empty; this.tabPage1.InactiveTextBackColor = System.Drawing.Color.Empty; this.tabPage1.InactiveTextColor = System.Drawing.Color.Empty; this.tabPage1.Location = new System.Drawing.Point(1, 1); this.tabPage1.Name = "tabPage1"; this.tabPage1.SelectBackColor = System.Drawing.Color.Empty; this.tabPage1.SelectTextBackColor = System.Drawing.Color.Empty; this.tabPage1.SelectTextColor = System.Drawing.Color.Empty; this.tabPage1.Size = new System.Drawing.Size(614, 289); this.tabPage1.TabIndex = 3; this.tabPage1.Title = "Hardware"; this.tabPage1.ToolTip = "Page"; // // tabPage5 // this.tabPage5.InactiveBackColor = System.Drawing.Color.Empty; this.tabPage5.InactiveTextBackColor = System.Drawing.Color.Empty; this.tabPage5.InactiveTextColor = System.Drawing.Color.Empty; this.tabPage5.Location = new System.Drawing.Point(1, 1); this.tabPage5.Name = "tabPage5"; this.tabPage5.SelectBackColor = System.Drawing.Color.Empty; this.tabPage5.Selected = false; this.tabPage5.SelectTextBackColor = System.Drawing.Color.Empty; this.tabPage5.SelectTextColor = System.Drawing.Color.Empty; this.tabPage5.Size = new System.Drawing.Size(614, 289); this.tabPage5.TabIndex = 4; this.tabPage5.Title = "Performance"; this.tabPage5.ToolTip = "Page"; // // tabPage2 // this.tabPage2.InactiveBackColor = System.Drawing.Color.Empty; this.tabPage2.InactiveTextBackColor = System.Drawing.Color.Empty; this.tabPage2.InactiveTextColor = System.Drawing.Color.Empty; this.tabPage2.Location = new System.Drawing.Point(1, 1); this.tabPage2.Name = "tabPage2"; this.tabPage2.SelectBackColor = System.Drawing.Color.Empty; this.tabPage2.Selected = false; this.tabPage2.SelectTextBackColor = System.Drawing.Color.Empty; this.tabPage2.SelectTextColor = System.Drawing.Color.Empty; this.tabPage2.Size = new System.Drawing.Size(614, 289); this.tabPage2.TabIndex = 8; this.tabPage2.Title = "Software"; this.tabPage2.ToolTip = "Page"; // // tabPage8 // this.tabPage8.InactiveBackColor = System.Drawing.Color.Empty; this.tabPage8.InactiveTextBackColor = System.Drawing.Color.Empty; this.tabPage8.InactiveTextColor = System.Drawing.Color.Empty; this.tabPage8.Location = new System.Drawing.Point(1, 1); this.tabPage8.Name = "tabPage8"; this.tabPage8.SelectBackColor = System.Drawing.Color.Empty; this.tabPage8.Selected = false; this.tabPage8.SelectTextBackColor = System.Drawing.Color.Empty; this.tabPage8.SelectTextColor = System.Drawing.Color.Empty; this.tabPage8.Size = new System.Drawing.Size(614, 289); this.tabPage8.TabIndex = 5; this.tabPage8.Title = "Hotfixes"; this.tabPage8.ToolTip = "Page"; // // tabPage3 // this.tabPage3.InactiveBackColor = System.Drawing.Color.Empty; this.tabPage3.InactiveTextBackColor = System.Drawing.Color.Empty; this.tabPage3.InactiveTextColor = System.Drawing.Color.Empty; this.tabPage3.Location = new System.Drawing.Point(1, 1); this.tabPage3.Name = "tabPage3"; this.tabPage3.SelectBackColor = System.Drawing.Color.Empty; this.tabPage3.Selected = false; this.tabPage3.SelectTextBackColor = System.Drawing.Color.Empty; this.tabPage3.SelectTextColor = System.Drawing.Color.Empty; this.tabPage3.Size = new System.Drawing.Size(614, 289); this.tabPage3.TabIndex = 6; this.tabPage3.Title = "Processes"; this.tabPage3.ToolTip = "Page"; // // tabPage9 // this.tabPage9.InactiveBackColor = System.Drawing.Color.Empty; this.tabPage9.InactiveTextBackColor = System.Drawing.Color.Empty; this.tabPage9.InactiveTextColor = System.Drawing.Color.Empty; this.tabPage9.Location = new System.Drawing.Point(1, 1); this.tabPage9.Name = "tabPage9"; this.tabPage9.SelectBackColor = System.Drawing.Color.Empty; this.tabPage9.Selected = false; this.tabPage9.SelectTextBackColor = System.Drawing.Color.Empty; this.tabPage9.SelectTextColor = System.Drawing.Color.Empty; this.tabPage9.Size = new System.Drawing.Size(614, 289); this.tabPage9.TabIndex = 7; this.tabPage9.Title = "Event Log"; this.tabPage9.ToolTip = "Page"; // // tabPage4 // this.tabPage4.InactiveBackColor = System.Drawing.Color.Empty; this.tabPage4.InactiveTextBackColor = System.Drawing.Color.Empty; this.tabPage4.InactiveTextColor = System.Drawing.Color.Empty; this.tabPage4.Location = new System.Drawing.Point(1, 1); this.tabPage4.Name = "tabPage4"; this.tabPage4.SelectBackColor = System.Drawing.Color.Empty; this.tabPage4.Selected = false; this.tabPage4.SelectTextBackColor = System.Drawing.Color.Empty; this.tabPage4.SelectTextColor = System.Drawing.Color.Empty; this.tabPage4.Size = new System.Drawing.Size(614, 289); this.tabPage4.TabIndex = 9; this.tabPage4.Title = "Services"; this.tabPage4.ToolTip = "Page"; // // tabPage6 // this.tabPage6.InactiveBackColor = System.Drawing.Color.Empty; this.tabPage6.InactiveTextBackColor = System.Drawing.Color.Empty; this.tabPage6.InactiveTextColor = System.Drawing.Color.Empty; this.tabPage6.Location = new System.Drawing.Point(1, 1); this.tabPage6.Name = "tabPage6"; this.tabPage6.SelectBackColor = System.Drawing.Color.Empty; this.tabPage6.Selected = false; this.tabPage6.SelectTextBackColor = System.Drawing.Color.Empty; this.tabPage6.SelectTextColor = System.Drawing.Color.Empty; this.tabPage6.Size = new System.Drawing.Size(614, 289); this.tabPage6.TabIndex = 10; this.tabPage6.Title = "Shared Items"; this.tabPage6.ToolTip = "Page"; // // tabPage7 // this.tabPage7.InactiveBackColor = System.Drawing.Color.Empty; this.tabPage7.InactiveTextBackColor = System.Drawing.Color.Empty; this.tabPage7.InactiveTextColor = System.Drawing.Color.Empty; this.tabPage7.Location = new System.Drawing.Point(1, 1); this.tabPage7.Name = "tabPage7"; this.tabPage7.SelectBackColor = System.Drawing.Color.Empty; this.tabPage7.Selected = false; this.tabPage7.SelectTextBackColor = System.Drawing.Color.Empty; this.tabPage7.SelectTextColor = System.Drawing.Color.Empty; this.tabPage7.Size = new System.Drawing.Size(614, 289); this.tabPage7.TabIndex = 11; this.tabPage7.Title = "Ports"; this.tabPage7.ToolTip = "Page"; // // titleBar1 // this.titleBar1.GradientActiveColor = System.Drawing.Color.FromArgb(((System.Byte)(136)), ((System.Byte)(144)), ((System.Byte)(156))); this.titleBar1.GradientColoring = Crownwood.DotNetMagic.Controls.GradientColoring.LightBackToDarkBack; this.titleBar1.GradientDirection = Crownwood.DotNetMagic.Controls.GradientDirection.TopToBottom; this.titleBar1.GradientInactiveColor = System.Drawing.Color.FromArgb(((System.Byte)(230)), ((System.Byte)(231)), ((System.Byte)(228))); this.titleBar1.Icon = ((System.Drawing.Icon)(resources.GetObject("titleBar1.Icon"))); this.titleBar1.Location = new System.Drawing.Point(0, 0); this.titleBar1.MouseOverColor = System.Drawing.Color.Empty; this.titleBar1.Name = "titleBar1"; this.titleBar1.PostText = "Online"; this.titleBar1.PreText = "Details"; this.titleBar1.Size = new System.Drawing.Size(616, 40); this.titleBar1.TabIndex = 11; this.titleBar1.Text = "-"; // // button1 // this.button1.Location = new System.Drawing.Point(448, 368); this.button1.Name = "button1"; this.button1.Size = new System.Drawing.Size(72, 32); this.button1.TabIndex = 12; this.button1.Text = "&OK"; this.button1.Click += new System.EventHandler(this.button1_Click); // // btnCancel // this.btnCancel.Location = new System.Drawing.Point(536, 368); this.btnCancel.Name = "btnCancel"; this.btnCancel.Size = new System.Drawing.Size(72, 32); this.btnCancel.TabIndex = 13; this.btnCancel.Text = "Cancel"; this.btnCancel.Click += new System.EventHandler(this.btnCancel_Click); // // Details // this.AutoScaleBaseSize = new System.Drawing.Size(5, 13); this.BackColor = System.Drawing.SystemColors.Control; this.ClientSize = new System.Drawing.Size(616, 404); this.Controls.Add(this.btnCancel); this.Controls.Add(this.button1); this.Controls.Add(this.titleBar1); this.Controls.Add(this.tabControl2); this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedDialog; this.Icon = ((System.Drawing.Icon)(resources.GetObject("$this.Icon"))); this.MaximizeBox = false; this.MaximumSize = new System.Drawing.Size(622, 432); this.MinimizeBox = false; this.MinimumSize = new System.Drawing.Size(622, 432); this.Name = "Details"; this.StartPosition = System.Windows.Forms.FormStartPosition.CenterParent; this.Text = "HWD - Details"; this.Load += new System.EventHandler(this.Details_Load); this.tabControl2.ResumeLayout(false); this.ResumeLayout(false); } #endregion private void Details_Load(object sender, System.EventArgs e) { this.titleBar1.Text = this.name; HWD.Details.insys = this.name; ConnectionOptions co = new ConnectionOptions(); if (HWD.Details.anotherLogin == true && HWD.Details.username.Length > 0) { co.Username = HWD.Details.username; co.Password = HWD.Details.password; } System.Management.ManagementScope ms = new System.Management.ManagementScope("\\\\" + this.name + "\\root\\cimv2", co); try { ManagementObjectCollection queryCollection = new ManagementObjectSearcher(ms, new ObjectQuery("SELECT * FROM Win32_NetworkAdapter")).Get(); } catch { MessageBox.Show(this, "Failed connection to system.", "Warning", MessageBoxButtons.OK, MessageBoxIcon.Warning); this.DialogResult = DialogResult.Abort; } this.btnCancel.Text = m_ResourceManager.GetString("dbtnCancel"); this.LoadMods(); } private void ChangeStatus(string text) { this.titleBar1.PostText = text; this.Update(); } private void LoadMods() { this.titleBar1.PostText = "Loading mods..."; this.portscan = new HWD.DetailsControls.PortScan(); this.portscan.Location = new System.Drawing.Point(0, 0); this.portscan.Name = "portscan"; this.portscan.Size = new System.Drawing.Size(614, 289); this.portscan.Dock = DockStyle.Fill; this.tabPage7.Controls.Add(this.portscan); this.tabPage7.Text = m_ResourceManager.GetString("dtabPage7"); this.portscan.rsxmgr = this.m_ResourceManager; this.perf = new HWD.DetailsControls.Performance(); this.perf.Location = new System.Drawing.Point(0, 0); this.perf.Name = "perf"; this.perf.Size = new System.Drawing.Size(614, 289); this.perf.Dock = DockStyle.Fill; this.tabPage5.Controls.Add(this.perf); this.tabPage5.Text = m_ResourceManager.GetString("dtabPage5"); this.perf.rsxmgr = this.m_ResourceManager; this.shareditems = new HWD.DetailsControls.SharedItems(); this.shareditems.Location = new System.Drawing.Point(0, 0); this.shareditems.Name = "shareditems"; this.shareditems.Size = new System.Drawing.Size(614, 289); this.shareditems.Dock = DockStyle.Fill; this.tabPage6.Controls.Add(this.shareditems); this.tabPage6.Text = m_ResourceManager.GetString("dtabPage6"); this.shareditems.rsxmgr = this.m_ResourceManager; this.eventlog = new HWD.DetailsControls.EventLog(); this.eventlog.Location = new System.Drawing.Point(0, 0); this.eventlog.Name = "eventlog"; this.eventlog.Size = new System.Drawing.Size(614, 289); this.eventlog.Dock = DockStyle.Fill; this.tabPage9.Controls.Add(this.eventlog); this.tabPage9.Text = m_ResourceManager.GetString("dtabPage9"); this.eventlog.rsxmgr = this.m_ResourceManager; this.eventlog.ChangeStatus += new HWD.DetailsControls.EventLog.Status(this.ChangeStatus); if (this.hotfixfile) { this.hotfixc = new HWD.DetailsControls.HotFix(); this.hotfixc.Location = new System.Drawing.Point(0, 0); this.hotfixc.Name = "hotfixc"; this.hotfixc.Size = new System.Drawing.Size(614, 289); this.hotfixc.Dock = DockStyle.Fill; this.tabPage8.Controls.Add(this.hotfixc); this.tabPage8.Text = m_ResourceManager.GetString("dtabPage8"); this.hotfixc.rsxmgr = this.m_ResourceManager; this.hotfixc.ChangeStatus += new HWD.DetailsControls.HotFix.Status(this.ChangeStatus); } else { this.tabControl2.TabPages.Remove(this.tabPage8); } this.proc = new HWD.DetailsControls.Process(); this.proc.Location = new System.Drawing.Point(0, 0); this.proc.Name = "proc"; this.proc.Size = new System.Drawing.Size(614, 289); this.proc.Dock = DockStyle.Fill; this.tabPage3.Controls.Add(this.proc); this.proc.ChangeStatus += new HWD.DetailsControls.Process.Status(this.ChangeStatus); this.services = new HWD.DetailsControls.Services(); this.services.Location = new System.Drawing.Point(0, 0); this.services.Name = "portscan"; this.services.Size = new System.Drawing.Size(614, 289); this.services.Dock = DockStyle.Fill; this.tabPage4.Controls.Add(this.services); this.tabPage4.Text = m_ResourceManager.GetString("dtabPage4"); this.services.rsxmgr = this.m_ResourceManager; this.services.ChangeStatus += new HWD.DetailsControls.Services.Status(this.ChangeStatus); this.software = new HWD.DetailsControls.Software(); this.software.Location = new System.Drawing.Point(0, 0); this.software.Name = "software"; this.software.Size = new System.Drawing.Size(614, 289); this.software.Dock = DockStyle.Fill; this.tabPage2.Controls.Add(this.software); this.software.rsxmgr = this.m_ResourceManager; this.software.ChangeStatus += new HWD.DetailsControls.Software.Status(this.ChangeStatus); this.hardware = new HWD.DetailsControls.Hardware(); this.hardware.Location = new System.Drawing.Point(0, 0); this.hardware.Name = "hardware"; this.hardware.Size = new System.Drawing.Size(614, 289); this.hardware.Dock = DockStyle.Fill; this.tabPage1.Controls.Add(this.hardware); this.hardware.rsxmgr = this.m_ResourceManager; this.hardware.ChangeStatus += new HWD.DetailsControls.Hardware.Status(this.ChangeStatus); this.titleBar1.PostText = "Online"; } private void button1_Click(object sender, System.EventArgs e) { this.Close(); } public static System.Management.ManagementObjectCollection Consulta(string strQuery) { ManagementObjectCollection queryCollection; try { ConnectionOptions co = new ConnectionOptions(); if (HWD.Details.anotherLogin && HWD.Details.username.Length > 0) { co.Username = HWD.Details.username; co.Password = HWD.Details.password; } System.Management.ManagementScope ms = new System.Management.ManagementScope("\\\\" + HWD.Details.insys + "\\root\\cimv2", co); queryCollection = new ManagementObjectSearcher(ms,new System.Management.ObjectQuery(strQuery)).Get(); } catch { queryCollection=null; } return queryCollection; } private void btnCancel_Click(object sender, System.EventArgs e) { this.DialogResult = DialogResult.Cancel; } private void tabControl2_SelectionChanged(Crownwood.DotNetMagic.Controls.TabControl sender, Crownwood.DotNetMagic.Controls.TabPage oldPage, Crownwood.DotNetMagic.Controls.TabPage newPage) { } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. // =+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+ // // A barrier allows multiple tasks to cooperatively work on some algorithm in parallel. // A group of tasks cooperate by moving through a series of phases, where each in the group signals it has arrived at // the barrier in a given phase and implicitly waits for all others to arrive. // The same barrier can be used for multiple phases. // // =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- using System.Diagnostics; using System.Runtime.Serialization; using System.Security; namespace System.Threading { /// <summary> /// The exception that is thrown when the post-phase action of a <see cref="Barrier"/> fails. /// </summary> public class BarrierPostPhaseException : Exception { /// <summary> /// Initializes a new instance of the <see cref="BarrierPostPhaseException"/> class. /// </summary> public BarrierPostPhaseException() : this((string)null) { } /// <summary> /// Initializes a new instance of the <see cref="BarrierPostPhaseException"/> class with the specified inner exception. /// </summary> /// <param name="innerException">The exception that is the cause of the current exception.</param> public BarrierPostPhaseException(Exception innerException) : this(null, innerException) { } /// <summary> /// Initializes a new instance of the <see cref="BarrierPostPhaseException"/> class with a specified error message. /// </summary> /// <param name="message">A string that describes the exception.</param> public BarrierPostPhaseException(string message) : this(message, null) { } /// <summary> /// Initializes a new instance of the <see cref="BarrierPostPhaseException"/> class with a specified error message and inner exception. /// </summary> /// <param name="message">A string that describes the exception.</param> /// <param name="innerException">The exception that is the cause of the current exception.</param> public BarrierPostPhaseException(string message, Exception innerException) : base(message == null ? SR.BarrierPostPhaseException : message, innerException) { } /// <summary> /// Initializes a new instance of the BarrierPostPhaseException class with serialized data. /// </summary> /// <param name="info">The object that holds the serialized object data.</param> /// <param name="context">The contextual information about the source or destination.</param> protected BarrierPostPhaseException(SerializationInfo info, StreamingContext context) : base(info, context) { } } /// <summary> /// Enables multiple tasks to cooperatively work on an algorithm in parallel through multiple phases. /// </summary> /// <remarks> /// <para> /// A group of tasks cooperate by moving through a series of phases, where each in the group signals it /// has arrived at the <see cref="Barrier"/> in a given phase and implicitly waits for all others to /// arrive. The same <see cref="Barrier"/> can be used for multiple phases. /// </para> /// <para> /// All public and protected members of <see cref="Barrier"/> are thread-safe and may be used /// concurrently from multiple threads, with the exception of Dispose, which /// must only be used when all other operations on the <see cref="Barrier"/> have /// completed. /// </para> /// </remarks> [DebuggerDisplay("Participant Count={ParticipantCount},Participants Remaining={ParticipantsRemaining}")] public class Barrier : IDisposable { //This variable holds the basic barrier variables: // 1- The current participants count // 2- The total participants count // 3- The sense flag (true if the current phase is even, false otherwise) // The first 15 bits are for the total count which means the maximum participants for the barrier is about 32K // The 16th bit is dummy // The next 15th bit for the current // And the last highest bit is for the sense private volatile int _currentTotalCount; // Bitmask to extract the current count private const int CURRENT_MASK = 0x7FFF0000; // Bitmask to extract the total count private const int TOTAL_MASK = 0x00007FFF; // Bitmask to extract the sense flag private const int SENSE_MASK = unchecked((int)0x80000000); // The maximum participants the barrier can operate = 32767 ( 2 power 15 - 1 ) private const int MAX_PARTICIPANTS = TOTAL_MASK; // The current barrier phase // We don't need to worry about overflow, the max value is 2^63-1; If it starts from 0 at a // rate of 4 billion increments per second, it will takes about 64 years to overflow. private long _currentPhase; // dispose flag private bool _disposed; // Odd phases event private ManualResetEventSlim _oddEvent; // Even phases event private ManualResetEventSlim _evenEvent; // The execution context of the creator thread private ExecutionContext _ownerThreadContext; // The EC callback that invokes the post phase action [SecurityCritical] private static ContextCallback s_invokePostPhaseAction; // Post phase action after each phase private Action<Barrier> _postPhaseAction; // In case the post phase action throws an exception, wraps it in BarrierPostPhaseException private Exception _exception; // This is the ManagedThreadID of the postPhaseAction caller thread, this is used to determine if the SignalAndWait, Dispose or Add/RemoveParticipant caller thread is // the same thread as the postPhaseAction thread which means this method was called from the postPhaseAction which is illegal. // This value is captured before calling the action and reset back to zero after it. private int _actionCallerID; #region Properties /// <summary> /// Gets the number of participants in the barrier that haven't yet signaled /// in the current phase. /// </summary> /// <remarks> /// This could be 0 during a post-phase action delegate execution or if the /// ParticipantCount is 0. /// </remarks> public int ParticipantsRemaining { get { int currentTotal = _currentTotalCount; int total = (int)(currentTotal & TOTAL_MASK); int current = (int)((currentTotal & CURRENT_MASK) >> 16); return total - current; } } /// <summary> /// Gets the total number of participants in the barrier. /// </summary> public int ParticipantCount { get { return (int)(_currentTotalCount & TOTAL_MASK); } } /// <summary> /// Gets the number of the barrier's current phase. /// </summary> public long CurrentPhaseNumber { // use the new Volatile.Read/Write method because it is cheaper than Interlocked.Read on AMD64 architecture get { return Volatile.Read(ref _currentPhase); } internal set { Volatile.Write(ref _currentPhase, value); } } #endregion /// <summary> /// Initializes a new instance of the <see cref="Barrier"/> class. /// </summary> /// <param name="participantCount">The number of participating threads.</param> /// <exception cref="ArgumentOutOfRangeException"> <paramref name="participantCount"/> is less than 0 /// or greater than <see cref="T:System.Int16.MaxValue"/>.</exception> public Barrier(int participantCount) : this(participantCount, null) { } /// <summary> /// Initializes a new instance of the <see cref="Barrier"/> class. /// </summary> /// <param name="participantCount">The number of participating threads.</param> /// <param name="postPhaseAction">The <see cref="T:System.Action`1"/> to be executed after each /// phase.</param> /// <exception cref="T:System.ArgumentOutOfRangeException"> <paramref name="participantCount"/> is less than 0 /// or greater than <see cref="T:System.Int32.MaxValue"/>.</exception> /// <remarks> /// The <paramref name="postPhaseAction"/> delegate will be executed after /// all participants have arrived at the barrier in one phase. The participants /// will not be released to the next phase until the postPhaseAction delegate /// has completed execution. /// </remarks> public Barrier(int participantCount, Action<Barrier> postPhaseAction) { // the count must be non negative value if (participantCount < 0 || participantCount > MAX_PARTICIPANTS) { throw new ArgumentOutOfRangeException(nameof(participantCount), participantCount, SR.Barrier_ctor_ArgumentOutOfRange); } _currentTotalCount = (int)participantCount; _postPhaseAction = postPhaseAction; //Lazily initialize the events _oddEvent = new ManualResetEventSlim(true); _evenEvent = new ManualResetEventSlim(false); // Capture the context if the post phase action is not null if (postPhaseAction != null) { _ownerThreadContext = ExecutionContext.Capture(); } _actionCallerID = 0; } /// <summary> /// Extract the three variables current, total and sense from a given big variable /// </summary> /// <param name="currentTotal">The integer variable that contains the other three variables</param> /// <param name="current">The current participant count</param> /// <param name="total">The total participants count</param> /// <param name="sense">The sense flag</param> private void GetCurrentTotal(int currentTotal, out int current, out int total, out bool sense) { total = (int)(currentTotal & TOTAL_MASK); current = (int)((currentTotal & CURRENT_MASK) >> 16); sense = (currentTotal & SENSE_MASK) == 0 ? true : false; } /// <summary> /// Write the three variables current. total and the sense to the m_currentTotal /// </summary> /// <param name="currentTotal">The old current total to compare</param> /// <param name="current">The current participant count</param> /// <param name="total">The total participants count</param> /// <param name="sense">The sense flag</param> /// <returns>True if the CAS succeeded, false otherwise</returns> private bool SetCurrentTotal(int currentTotal, int current, int total, bool sense) { int newCurrentTotal = (current << 16) | total; if (!sense) { newCurrentTotal |= SENSE_MASK; } #pragma warning disable 0420 return Interlocked.CompareExchange(ref _currentTotalCount, newCurrentTotal, currentTotal) == currentTotal; #pragma warning restore 0420 } /// <summary> /// Notifies the <see cref="Barrier"/> that there will be an additional participant. /// </summary> /// <returns>The phase number of the barrier in which the new participants will first /// participate.</returns> /// <exception cref="T:System.InvalidOperationException"> /// Adding a participant would cause the barrier's participant count to /// exceed <see cref="T:System.Int16.MaxValue"/>. /// </exception> /// <exception cref="T:System.InvalidOperationException"> /// The method was invoked from within a post-phase action. /// </exception> /// <exception cref="T:System.ObjectDisposedException">The current instance has already been /// disposed.</exception> public long AddParticipant() { try { return AddParticipants(1); } catch (ArgumentOutOfRangeException) { throw new InvalidOperationException(SR.Barrier_AddParticipants_Overflow_ArgumentOutOfRange); } } /// <summary> /// Notifies the <see cref="Barrier"/> that there will be additional participants. /// </summary> /// <param name="participantCount">The number of additional participants to add to the /// barrier.</param> /// <returns>The phase number of the barrier in which the new participants will first /// participate.</returns> /// <exception cref="T:System.ArgumentOutOfRangeException"><paramref name="participantCount"/> is less than /// 0.</exception> /// <exception cref="T:System.ArgumentOutOfRangeException">Adding <paramref name="participantCount"/> participants would cause the /// barrier's participant count to exceed <see cref="T:System.Int16.MaxValue"/>.</exception> /// <exception cref="T:System.InvalidOperationException"> /// The method was invoked from within a post-phase action. /// </exception> /// <exception cref="T:System.ObjectDisposedException">The current instance has already been /// disposed.</exception> public long AddParticipants(int participantCount) { // check dispose ThrowIfDisposed(); if (participantCount < 1) { throw new ArgumentOutOfRangeException(nameof(participantCount), participantCount, SR.Barrier_AddParticipants_NonPositive_ArgumentOutOfRange); } else if (participantCount > MAX_PARTICIPANTS) //overflow { throw new ArgumentOutOfRangeException(nameof(participantCount), SR.Barrier_AddParticipants_Overflow_ArgumentOutOfRange); } // in case of this is called from the PHA if (_actionCallerID != 0 && Environment.CurrentManagedThreadId == _actionCallerID) { throw new InvalidOperationException(SR.Barrier_InvalidOperation_CalledFromPHA); } SpinWait spinner = new SpinWait(); long newPhase = 0; while (true) { int currentTotal = _currentTotalCount; int total; int current; bool sense; GetCurrentTotal(currentTotal, out current, out total, out sense); if (participantCount + total > MAX_PARTICIPANTS) //overflow { throw new ArgumentOutOfRangeException(nameof(participantCount), SR.Barrier_AddParticipants_Overflow_ArgumentOutOfRange); } if (SetCurrentTotal(currentTotal, current, total + participantCount, sense)) { // Calculating the first phase for that participant, if the current phase already finished return the next phase else return the current phase // To know that the current phase is the sense doesn't match the // phase odd even, so that means it didn't yet change the phase count, so currentPhase +1 is returned, otherwise currentPhase is returned long currPhase = CurrentPhaseNumber; newPhase = (sense != (currPhase % 2 == 0)) ? currPhase + 1 : currPhase; // If this participant is going to join the next phase, which means the postPhaseAction is being running, this participants must wait until this done // and its event is reset. // Without that, if the postPhaseAction takes long time, this means the event that the current participant is going to wait on is still set // (FinishPhase didn't reset it yet) so it should wait until it reset if (newPhase != currPhase) { // Wait on the opposite event if (sense) { _oddEvent.Wait(); } else { _evenEvent.Wait(); } } //This else to fix the racing where the current phase has been finished, m_currentPhase has been updated but the events have not been set/reset yet // otherwise when this participant calls SignalAndWait it will wait on a set event however all other participants have not arrived yet. else { if (sense && _evenEvent.IsSet) _evenEvent.Reset(); else if (!sense && _oddEvent.IsSet) _oddEvent.Reset(); } break; } spinner.SpinOnce(); } return newPhase; } /// <summary> /// Notifies the <see cref="Barrier"/> that there will be one less participant. /// </summary> /// <exception cref="T:System.InvalidOperationException">The barrier already has 0 /// participants.</exception> /// <exception cref="T:System.InvalidOperationException"> /// The method was invoked from within a post-phase action. /// </exception> /// <exception cref="T:System.ObjectDisposedException">The current instance has already been /// disposed.</exception> public void RemoveParticipant() { RemoveParticipants(1); } /// <summary> /// Notifies the <see cref="Barrier"/> that there will be fewer participants. /// </summary> /// <param name="participantCount">The number of additional participants to remove from the barrier.</param> /// <exception cref="T:System.ArgumentOutOfRangeException"><paramref name="participantCount"/> is less than /// 0.</exception> /// <exception cref="T:System.InvalidOperationException">The barrier already has 0 participants.</exception> /// <exception cref="T:System.InvalidOperationException"> /// The method was invoked from within a post-phase action. /// </exception> /// <exception cref="T:System.ObjectDisposedException">The current instance has already been /// disposed.</exception> public void RemoveParticipants(int participantCount) { // check dispose ThrowIfDisposed(); // Validate input if (participantCount < 1) { throw new ArgumentOutOfRangeException(nameof(participantCount), participantCount, SR.Barrier_RemoveParticipants_NonPositive_ArgumentOutOfRange); } // in case of this is called from the PHA if (_actionCallerID != 0 && Environment.CurrentManagedThreadId == _actionCallerID) { throw new InvalidOperationException(SR.Barrier_InvalidOperation_CalledFromPHA); } SpinWait spinner = new SpinWait(); while (true) { int currentTotal = _currentTotalCount; int total; int current; bool sense; GetCurrentTotal(currentTotal, out current, out total, out sense); if (total < participantCount) { throw new ArgumentOutOfRangeException(nameof(participantCount), SR.Barrier_RemoveParticipants_ArgumentOutOfRange); } if (total - participantCount < current) { throw new InvalidOperationException(SR.Barrier_RemoveParticipants_InvalidOperation); } // If the remaining participants = current participants, then finish the current phase int remaingParticipants = total - participantCount; if (remaingParticipants > 0 && current == remaingParticipants) { if (SetCurrentTotal(currentTotal, 0, total - participantCount, !sense)) { FinishPhase(sense); break; } } else { if (SetCurrentTotal(currentTotal, current, total - participantCount, sense)) { break; } } spinner.SpinOnce(); } } /// <summary> /// Signals that a participant has reached the <see cref="Barrier"/> and waits for all other /// participants to reach the barrier as well. /// </summary> /// <exception cref="T:System.InvalidOperationException"> /// The method was invoked from within a post-phase action, the barrier currently has 0 participants, /// or the barrier is being used by more threads than are registered as participants. /// </exception> /// <exception cref="T:System.ObjectDisposedException">The current instance has already been /// disposed.</exception> public void SignalAndWait() { SignalAndWait(new CancellationToken()); } /// <summary> /// Signals that a participant has reached the <see cref="Barrier"/> and waits for all other /// participants to reach the barrier, while observing a <see /// cref="T:System.Threading.CancellationToken"/>. /// </summary> /// <param name="cancellationToken">The <see cref="T:System.Threading.CancellationToken"/> to /// observe.</param> /// <exception cref="T:System.InvalidOperationException"> /// The method was invoked from within a post-phase action, the barrier currently has 0 participants, /// or the barrier is being used by more threads than are registered as participants. /// </exception> /// <exception cref="T:System.OperationCanceledException"><paramref name="cancellationToken"/> has been /// canceled.</exception> /// <exception cref="T:System.ObjectDisposedException">The current instance has already been /// disposed.</exception> public void SignalAndWait(CancellationToken cancellationToken) { #if DEBUG bool result = #endif SignalAndWait(Timeout.Infinite, cancellationToken); #if DEBUG Debug.Assert(result); #endif } /// <summary> /// Signals that a participant has reached the <see cref="Barrier"/> and waits for all other /// participants to reach the barrier as well, using a /// <see cref="T:System.TimeSpan"/> to measure the time interval. /// </summary> /// <param name="timeout">A <see cref="T:System.TimeSpan"/> that represents the number of /// milliseconds to wait, or a <see cref="T:System.TimeSpan"/> that represents -1 milliseconds to /// wait indefinitely.</param> /// <returns>true if all other participants reached the barrier; otherwise, false.</returns> /// <exception cref="T:System.ArgumentOutOfRangeException"><paramref name="timeout"/>is a negative number /// other than -1 milliseconds, which represents an infinite time-out, or it is greater than /// <see cref="T:System.Int32.MaxValue"/>.</exception> /// <exception cref="T:System.InvalidOperationException"> /// The method was invoked from within a post-phase action, the barrier currently has 0 participants, /// or the barrier is being used by more threads than are registered as participants. /// </exception> /// <exception cref="T:System.ObjectDisposedException">The current instance has already been /// disposed.</exception> public Boolean SignalAndWait(TimeSpan timeout) { return SignalAndWait(timeout, new CancellationToken()); } /// <summary> /// Signals that a participant has reached the <see cref="Barrier"/> and waits for all other /// participants to reach the barrier as well, using a /// <see cref="T:System.TimeSpan"/> to measure the time interval, while observing a <see /// cref="T:System.Threading.CancellationToken"/>. /// </summary> /// <param name="timeout">A <see cref="T:System.TimeSpan"/> that represents the number of /// milliseconds to wait, or a <see cref="T:System.TimeSpan"/> that represents -1 milliseconds to /// wait indefinitely.</param> /// <param name="cancellationToken">The <see cref="T:System.Threading.CancellationToken"/> to /// observe.</param> /// <returns>true if all other participants reached the barrier; otherwise, false.</returns> /// <exception cref="T:System.ArgumentOutOfRangeException"><paramref name="timeout"/>is a negative number /// other than -1 milliseconds, which represents an infinite time-out.</exception> /// <exception cref="T:System.InvalidOperationException"> /// The method was invoked from within a post-phase action, the barrier currently has 0 participants, /// or the barrier is being used by more threads than are registered as participants. /// </exception> /// <exception cref="T:System.OperationCanceledException"><paramref name="cancellationToken"/> has been /// canceled.</exception> /// <exception cref="T:System.ObjectDisposedException">The current instance has already been /// disposed.</exception> public Boolean SignalAndWait(TimeSpan timeout, CancellationToken cancellationToken) { Int64 totalMilliseconds = (Int64)timeout.TotalMilliseconds; if (totalMilliseconds < -1 || totalMilliseconds > int.MaxValue) { throw new System.ArgumentOutOfRangeException(nameof(timeout), timeout, SR.Barrier_SignalAndWait_ArgumentOutOfRange); } return SignalAndWait((int)timeout.TotalMilliseconds, cancellationToken); } /// <summary> /// Signals that a participant has reached the <see cref="Barrier"/> and waits for all other /// participants to reach the barrier as well, using a /// 32-bit signed integer to measure the time interval. /// </summary> /// <param name="millisecondsTimeout">The number of milliseconds to wait, or <see /// cref="Timeout.Infinite"/>(-1) to wait indefinitely.</param> /// <returns>true if all other participants reached the barrier; otherwise, false.</returns> /// <exception cref="T:System.ArgumentOutOfRangeException"><paramref name="millisecondsTimeout"/> is a /// negative number other than -1, which represents an infinite time-out.</exception> /// <exception cref="T:System.InvalidOperationException"> /// The method was invoked from within a post-phase action, the barrier currently has 0 participants, /// or the barrier is being used by more threads than are registered as participants. /// </exception> /// <exception cref="T:System.ObjectDisposedException">The current instance has already been /// disposed.</exception> public bool SignalAndWait(int millisecondsTimeout) { return SignalAndWait(millisecondsTimeout, new CancellationToken()); } /// <summary> /// Signals that a participant has reached the barrier and waits for all other participants to reach /// the barrier as well, using a /// 32-bit signed integer to measure the time interval, while observing a <see /// cref="T:System.Threading.CancellationToken"/>. /// </summary> /// <param name="millisecondsTimeout">The number of milliseconds to wait, or <see /// cref="Timeout.Infinite"/>(-1) to wait indefinitely.</param> /// <param name="cancellationToken">The <see cref="T:System.Threading.CancellationToken"/> to /// observe.</param> /// <returns>true if all other participants reached the barrier; otherwise, false.</returns> /// <exception cref="T:System.ArgumentOutOfRangeException"><paramref name="millisecondsTimeout"/> is a /// negative number other than -1, which represents an infinite time-out.</exception> /// <exception cref="T:System.InvalidOperationException"> /// The method was invoked from within a post-phase action, the barrier currently has 0 participants, /// or the barrier is being used by more threads than are registered as participants. /// </exception> /// <exception cref="T:System.OperationCanceledException"><paramref name="cancellationToken"/> has been /// canceled.</exception> /// <exception cref="T:System.ObjectDisposedException">The current instance has already been /// disposed.</exception> public bool SignalAndWait(int millisecondsTimeout, CancellationToken cancellationToken) { ThrowIfDisposed(); cancellationToken.ThrowIfCancellationRequested(); if (millisecondsTimeout < -1) { throw new System.ArgumentOutOfRangeException(nameof(millisecondsTimeout), millisecondsTimeout, SR.Barrier_SignalAndWait_ArgumentOutOfRange); } // in case of this is called from the PHA if (_actionCallerID != 0 && Environment.CurrentManagedThreadId == _actionCallerID) { throw new InvalidOperationException(SR.Barrier_InvalidOperation_CalledFromPHA); } // local variables to extract the basic barrier variable and update them // The are declared here instead of inside the loop body because the will be used outside the loop bool sense; // The sense of the barrier *before* the phase associated with this SignalAndWait call completes int total; int current; int currentTotal; long phase; SpinWait spinner = new SpinWait(); while (true) { currentTotal = _currentTotalCount; GetCurrentTotal(currentTotal, out current, out total, out sense); phase = CurrentPhaseNumber; // throw if zero participants if (total == 0) { throw new InvalidOperationException(SR.Barrier_SignalAndWait_InvalidOperation_ZeroTotal); } // Try to detect if the number of threads for this phase exceeded the total number of participants or not // This can be detected if the current is zero which means all participants for that phase has arrived and the phase number is not changed yet if (current == 0 && sense != (CurrentPhaseNumber % 2 == 0)) { throw new InvalidOperationException(SR.Barrier_SignalAndWait_InvalidOperation_ThreadsExceeded); } //This is the last thread, finish the phase if (current + 1 == total) { if (SetCurrentTotal(currentTotal, 0, total, !sense)) { #if !uapaot if (CdsSyncEtwBCLProvider.Log.IsEnabled()) { CdsSyncEtwBCLProvider.Log.Barrier_PhaseFinished(sense, CurrentPhaseNumber); } #endif FinishPhase(sense); return true; } } else if (SetCurrentTotal(currentTotal, current + 1, total, sense)) { break; } spinner.SpinOnce(); } // ** Perform the real wait ** // select the correct event to wait on, based on the current sense. ManualResetEventSlim eventToWaitOn = (sense) ? _evenEvent : _oddEvent; bool waitWasCanceled = false; bool waitResult = false; try { waitResult = DiscontinuousWait(eventToWaitOn, millisecondsTimeout, cancellationToken, phase); } catch (OperationCanceledException) { waitWasCanceled = true; } catch (ObjectDisposedException)// in case a race happen where one of the thread returned from SignalAndWait and the current thread calls Wait on a disposed event { // make sure the current phase for this thread is already finished, otherwise propagate the exception if (phase < CurrentPhaseNumber) waitResult = true; else throw; } if (!waitResult) { //reset the spinLock to prepare it for the next loop spinner.Reset(); //If the wait timeout expired and all other thread didn't reach the barrier yet, update the current count back while (true) { bool newSense; currentTotal = _currentTotalCount; GetCurrentTotal(currentTotal, out current, out total, out newSense); // If the timeout expired and the phase has just finished, return true and this is considered as succeeded SignalAndWait //otherwise the timeout expired and the current phase has not been finished yet, return false //The phase is finished if the phase member variable is changed (incremented) or the sense has been changed // we have to use the statements in the comparison below for two cases: // 1- The sense is changed but the last thread didn't update the phase yet // 2- The phase is already incremented but the sense flipped twice due to the termination of the next phase if (phase < CurrentPhaseNumber || sense != newSense) { // The current phase has been finished, but we shouldn't return before the events are set/reset otherwise this thread could start // next phase and the appropriate event has not reset yet which could make it return immediately from the next phase SignalAndWait // before waiting other threads WaitCurrentPhase(eventToWaitOn, phase); Debug.Assert(phase < CurrentPhaseNumber); break; } //The phase has not been finished yet, try to update the current count. if (SetCurrentTotal(currentTotal, current - 1, total, sense)) { //if here, then the attempt to back out was successful. //throw (a fresh) OCE if cancellation woke the wait //or return false if it was the timeout that woke the wait. // if (waitWasCanceled) throw new OperationCanceledException(SR.Common_OperationCanceled, cancellationToken); else return false; } spinner.SpinOnce(); } } if (_exception != null) throw new BarrierPostPhaseException(_exception); return true; } /// <summary> /// Finish the phase by invoking the post phase action, and setting the event, this must be called by the /// last arrival thread /// </summary> /// <param name="observedSense">The current phase sense</param> [SecuritySafeCritical] private void FinishPhase(bool observedSense) { // Execute the PHA in try/finally block to reset the variables back in case of it threw an exception if (_postPhaseAction != null) { try { // Capture the caller thread ID to check if the Add/RemoveParticipant(s) is called from the PHA _actionCallerID = Environment.CurrentManagedThreadId; if (_ownerThreadContext != null) { var currentContext = _ownerThreadContext; ContextCallback handler = s_invokePostPhaseAction; if (handler == null) { s_invokePostPhaseAction = handler = InvokePostPhaseAction; } ExecutionContext.Run(_ownerThreadContext, handler, this); } else { _postPhaseAction(this); } _exception = null; // reset the exception if it was set previously } catch (Exception ex) { _exception = ex; } finally { _actionCallerID = 0; SetResetEvents(observedSense); if (_exception != null) throw new BarrierPostPhaseException(_exception); } } else { SetResetEvents(observedSense); } } /// <summary> /// Helper method to call the post phase action /// </summary> /// <param name="obj"></param> [SecurityCritical] private static void InvokePostPhaseAction(object obj) { var thisBarrier = (Barrier)obj; thisBarrier._postPhaseAction(thisBarrier); } /// <summary> /// Sets the current phase event and reset the next phase event /// </summary> /// <param name="observedSense">The current phase sense</param> private void SetResetEvents(bool observedSense) { // Increment the phase count using Volatile class because m_currentPhase is 64 bit long type, that could cause torn write on 32 bit machines CurrentPhaseNumber = CurrentPhaseNumber + 1; if (observedSense) { _oddEvent.Reset(); _evenEvent.Set(); } else { _evenEvent.Reset(); _oddEvent.Set(); } } /// <summary> /// Wait until the current phase finishes completely by spinning until either the event is set, /// or the phase count is incremented more than one time /// </summary> /// <param name="currentPhaseEvent">The current phase event</param> /// <param name="observedPhase">The current phase for that thread</param> private void WaitCurrentPhase(ManualResetEventSlim currentPhaseEvent, long observedPhase) { //spin until either of these two conditions succeeds //1- The event is set //2- the phase count is incremented more than one time, this means the next phase is finished as well, //but the event will be reset again, so we check the phase count instead SpinWait spinner = new SpinWait(); while (!currentPhaseEvent.IsSet && CurrentPhaseNumber - observedPhase <= 1) { spinner.SpinOnce(); } } /// <summary> /// The reason of discontinuous waiting instead of direct waiting on the event is to avoid the race where the sense is /// changed twice because the next phase is finished (due to either RemoveParticipant is called or another thread joined /// the next phase instead of the current thread) so the current thread will be stuck on the event because it is reset back /// The maxWait and the shift numbers are arbitrarily chosen, there were no references picking them /// </summary> /// <param name="currentPhaseEvent">The current phase event</param> /// <param name="totalTimeout">wait timeout in milliseconds</param> /// <param name="token">cancellation token passed to SignalAndWait</param> /// <param name="observedPhase">The current phase number for this thread</param> /// <returns>True if the event is set or the phase number changed, false if the timeout expired</returns> private bool DiscontinuousWait(ManualResetEventSlim currentPhaseEvent, int totalTimeout, CancellationToken token, long observedPhase) { int maxWait = 100; // 100 ms int waitTimeCeiling = 10000; // 10 seconds while (observedPhase == CurrentPhaseNumber) { // the next wait time, the min of the maxWait and the totalTimeout int waitTime = totalTimeout == Timeout.Infinite ? maxWait : Math.Min(maxWait, totalTimeout); if (currentPhaseEvent.Wait(waitTime, token)) return true; //update the total wait time if (totalTimeout != Timeout.Infinite) { totalTimeout -= waitTime; if (totalTimeout <= 0) return false; } //if the maxwait exceeded 10 seconds then we will stop increasing the maxWait time and keep it 10 seconds, otherwise keep doubling it maxWait = maxWait >= waitTimeCeiling ? waitTimeCeiling : Math.Min(maxWait << 1, waitTimeCeiling); } //if we exited the loop because the observed phase doesn't match the current phase, then we have to spin to make sure //the event is set or the next phase is finished WaitCurrentPhase(currentPhaseEvent, observedPhase); return true; } /// <summary> /// Releases all resources used by the current instance of <see cref="Barrier"/>. /// </summary> /// <exception cref="T:System.InvalidOperationException"> /// The method was invoked from within a post-phase action. /// </exception> /// <remarks> /// Unlike most of the members of <see cref="Barrier"/>, Dispose is not thread-safe and may not be /// used concurrently with other members of this instance. /// </remarks> public void Dispose() { // in case of this is called from the PHA if (_actionCallerID != 0 && Environment.CurrentManagedThreadId == _actionCallerID) { throw new InvalidOperationException(SR.Barrier_InvalidOperation_CalledFromPHA); } Dispose(true); GC.SuppressFinalize(this); } /// <summary> /// When overridden in a derived class, releases the unmanaged resources used by the /// <see cref="Barrier"/>, and optionally releases the managed resources. /// </summary> /// <param name="disposing">true to release both managed and unmanaged resources; false to release /// only unmanaged resources.</param> /// <remarks> /// Unlike most of the members of <see cref="Barrier"/>, Dispose is not thread-safe and may not be /// used concurrently with other members of this instance. /// </remarks> protected virtual void Dispose(bool disposing) { if (!_disposed) { if (disposing) { _oddEvent.Dispose(); _evenEvent.Dispose(); } _disposed = true; } } /// <summary> /// Throw ObjectDisposedException if the barrier is disposed /// </summary> private void ThrowIfDisposed() { if (_disposed) { throw new ObjectDisposedException("Barrier", SR.Barrier_Dispose); } } } }
using System.Collections.Generic; using System.Linq; namespace NuGet.Client.VisualStudio.UI { // SelectedAction -> SelectedVersion // SelectedVersion -> ProjectList update public class PackageSolutionDetailControlModel : PackageDetailControlModel { // list of projects where the package is installed private List<PackageInstallationInfo> _projects; private VsSolution _solution; private List<string> _actions; public List<PackageInstallationInfo> Projects { get { return _projects; } } public List<string> Actions { get { return _actions; } } private string _selectedAction; public string SelectedAction { get { return _selectedAction; } set { _selectedAction = value; InstallOptionsVisible = SelectedAction != Resources.Resources.Action_Uninstall; CreateVersions(); OnPropertyChanged("SelectedAction"); } } private bool _actionEnabled; // Indicates if the action button and preview button is enabled. public bool ActionEnabled { get { return _actionEnabled; } set { _actionEnabled = value; OnPropertyChanged("ActionEnabled"); } } protected override void OnSelectedVersionChanged() { CreateProjectList(); UiDetailedPackage selectedPackage = null; if (_allPackages.TryGetValue(SelectedVersion.Version, out selectedPackage)) { Package = selectedPackage; } else { Package = null; } } private void CreateVersions() { if (_selectedAction == Resources.Resources.Action_Consolidate || _selectedAction == Resources.Resources.Action_Uninstall) { var installedVersions = _solution.Projects .Select(project => project.InstalledPackages.GetInstalledPackage(Id)) .ToList(); installedVersions.Add(_solution.InstalledPackages.GetInstalledPackage(Package.Id)); _versions = installedVersions.Where(package => package != null) .OrderByDescending(p => p.Identity.Version) .Select(package => new VersionForDisplay(package.Identity.Version, string.Empty)) .ToList(); } else if (_selectedAction == Resources.Resources.Action_Install || _selectedAction == Resources.Resources.Action_Update) { _versions = new List<VersionForDisplay>(); var allVersions = _allPackages.Keys.OrderByDescending(v => v); var latestStableVersion = allVersions.FirstOrDefault(v => !v.IsPrerelease); if (latestStableVersion != null) { _versions.Add(new VersionForDisplay(latestStableVersion, "Latest stable ")); } // add a separator if (_versions.Count > 0) { _versions.Add(null); } foreach (var version in allVersions) { _versions.Add(new VersionForDisplay(version, string.Empty)); } } if (_versions.Count > 0) { SelectedVersion = _versions[0]; } OnPropertyChanged("Versions"); } public PackageSolutionDetailControlModel( UiSearchResultPackage searchResultPackage, VsSolution solution) : base(searchResultPackage, installedVersion: null) { _solution = solution; SelectedVersion = new VersionForDisplay(Package.Version, null); CreateActions(); } private bool CanUpdate() { var canUpdateInProjects = _solution.Projects .Any(project => { return project.InstalledPackages.IsInstalled(Id) && _allPackages.Count >= 2; }); var installedInSolution = _solution.InstalledPackages.IsInstalled(Package.Id); var canUpdateInSolution = installedInSolution && _allPackages.Count >= 2; return canUpdateInProjects || canUpdateInSolution; } private bool CanInstall() { var canInstallInProjects = _solution.Projects .Any(project => { return !project.InstalledPackages.IsInstalled(Package.Id); }); var installedInSolution = _solution.InstalledPackages.IsInstalled(Package.Id); return !installedInSolution && canInstallInProjects; } private bool CanUninstall() { var canUninstallFromProjects = _solution.Projects .Any(project => { return project.InstalledPackages.IsInstalled(Package.Id); }); var installedInSolution = _solution.InstalledPackages.IsInstalled(Package.Id); return installedInSolution || canUninstallFromProjects; } private bool CanConsolidate() { var installedVersions = _solution.Projects .Select(project => project.InstalledPackages.GetInstalledPackage(Package.Id)) .Where(package => package != null) .Select(package => package.Identity.Version) .Distinct(); return installedVersions.Count() >= 2; } // indicates if the install options expander is visible or not bool _installOptionsVisible; public bool InstallOptionsVisible { get { return _installOptionsVisible; } set { if (_installOptionsVisible != value) { _installOptionsVisible = value; OnPropertyChanged("InstallOptionsVisible"); } } } private void CreateActions() { // initialize actions _actions = new List<string>(); if (CanUpdate()) { _actions.Add(Resources.Resources.Action_Update); } if (CanInstall()) { _actions.Add(Resources.Resources.Action_Install); } if (CanUninstall()) { _actions.Add(Resources.Resources.Action_Uninstall); } if (CanConsolidate()) { _actions.Add(Resources.Resources.Action_Consolidate); } if (_actions.Count > 0) { SelectedAction = _actions[0]; } else { InstallOptionsVisible = false; } OnPropertyChanged("Actions"); } private void CreateProjectList() { _projects = new List<PackageInstallationInfo>(); if (_selectedAction == Resources.Resources.Action_Consolidate) { // project list contains projects that have the package installed. // The project with the same version installed is included, but disabled. foreach (var project in _solution.Projects) { var installed = project.InstalledPackages.GetInstalledPackage(Package.Id); if (installed != null) { var enabled = installed.Identity.Version != SelectedVersion.Version; var info = new PackageInstallationInfo(project, installed.Identity.Version, enabled); _projects.Add(info); } } } else if (_selectedAction == Resources.Resources.Action_Update) { // project list contains projects/solution that have the package // installed. The project/solution with the same version installed // is included, but disabled. foreach (var project in _solution.Projects) { var installed = project.InstalledPackages.GetInstalledPackage(Package.Id); if (installed != null) { var enabled = installed.Identity.Version != SelectedVersion.Version; var info = new PackageInstallationInfo(project, installed.Identity.Version, enabled); _projects.Add(info); } } var v = _solution.InstalledPackages.GetInstalledPackage(Package.Id); if (v != null) { var enabled = v.Identity.Version != SelectedVersion.Version; var info = new PackageInstallationInfo( _solution.Name, SelectedVersion.Version, enabled, _solution.Projects.First()); _projects.Add(info); } } else if (_selectedAction == Resources.Resources.Action_Install) { // project list contains projects that do not have the package installed. foreach (var project in _solution.Projects) { var installed = project.InstalledPackages.GetInstalledPackage(Package.Id); if (installed == null) { var info = new PackageInstallationInfo(project, null, enabled: true); _projects.Add(info); } } } else if (_selectedAction == Resources.Resources.Action_Uninstall) { // project list contains projects/solution that have the same version installed. foreach (var project in _solution.Projects) { var installed = project.InstalledPackages.GetInstalledPackage(Package.Id); if (installed != null && installed.Identity.Version == SelectedVersion.Version) { var info = new PackageInstallationInfo(project, installed.Identity.Version, enabled: true); _projects.Add(info); } } var v = _solution.InstalledPackages.GetInstalledPackage(Package.Id); if (v != null) { var enabled = v.Identity.Version == SelectedVersion.Version; var info = new PackageInstallationInfo( _solution.Name, SelectedVersion.Version, enabled, _solution.Projects.First()); _projects.Add(info); } } foreach (var p in _projects) { p.SelectedChanged += (sender, e) => { ActionEnabled = _projects.Any(i => i.Selected); }; } ActionEnabled = _projects.Any(i => i.Selected); OnPropertyChanged("Projects"); } public void Refresh() { SelectedVersion = new VersionForDisplay(Package.Version, null); CreateActions(); } } }
using System; using System.Collections.Generic; using System.Threading; using Ionic.Zlib; using System.IO; namespace Ionic.Zlib { internal class WorkItem { public byte[] buffer; public byte[] compressed; public int crc; public int index; public int ordinal; public int inputBytesAvailable; public int compressedBytesAvailable; public ZlibCodec compressor; public WorkItem(int size, Ionic.Zlib.CompressionLevel compressLevel, CompressionStrategy strategy, int ix) { this.buffer= new byte[size]; // alloc 5 bytes overhead for every block (margin of safety= 2) int n = size + ((size / 32768)+1) * 5 * 2; this.compressed = new byte[n]; this.compressor = new ZlibCodec(); this.compressor.InitializeDeflate(compressLevel, false); this.compressor.OutputBuffer = this.compressed; this.compressor.InputBuffer = this.buffer; this.index = ix; } } /// <summary> /// A class for compressing streams using the /// Deflate algorithm with multiple threads. /// </summary> /// /// <remarks> /// <para> /// This class performs DEFLATE compression through writing. For /// more information on the Deflate algorithm, see IETF RFC 1951, /// "DEFLATE Compressed Data Format Specification version 1.3." /// </para> /// /// <para> /// This class is similar to <see cref="Ionic.Zlib.DeflateStream"/>, except /// that this class is for compression only, and this implementation uses an /// approach that employs multiple worker threads to perform the DEFLATE. On /// a multi-cpu or multi-core computer, the performance of this class can be /// significantly higher than the single-threaded DeflateStream, particularly /// for larger streams. How large? Anything over 10mb is a good candidate /// for parallel compression. /// </para> /// /// <para> /// The tradeoff is that this class uses more memory and more CPU than the /// vanilla DeflateStream, and also is less efficient as a compressor. For /// large files the size of the compressed data stream can be less than 1% /// larger than the size of a compressed data stream from the vanialla /// DeflateStream. For smaller files the difference can be larger. The /// difference will also be larger if you set the BufferSize to be lower than /// the default value. Your mileage may vary. Finally, for small files, the /// ParallelDeflateOutputStream can be much slower than the vanilla /// DeflateStream, because of the overhead associated to using the thread /// pool. /// </para> /// /// </remarks> /// <seealso cref="Ionic.Zlib.DeflateStream" /> public class ParallelDeflateOutputStream : System.IO.Stream { private static readonly int IO_BUFFER_SIZE_DEFAULT = 64 * 1024; // 128k private static readonly int BufferPairsPerCore = 4; private System.Collections.Generic.List<WorkItem> _pool; private bool _leaveOpen; private bool emitting; private System.IO.Stream _outStream; private int _maxBufferPairs; private int _bufferSize = IO_BUFFER_SIZE_DEFAULT; private AutoResetEvent _newlyCompressedBlob; //private ManualResetEvent _writingDone; //private ManualResetEvent _sessionReset; private object _outputLock = new object(); private bool _isClosed; private bool _firstWriteDone; private int _currentlyFilling; private int _lastFilled; private int _lastWritten; private int _latestCompressed; private int _Crc32; private Ionic.Crc.CRC32 _runningCrc; private object _latestLock = new object(); private System.Collections.Generic.Queue<int> _toWrite; private System.Collections.Generic.Queue<int> _toFill; private Int64 _totalBytesProcessed; private Ionic.Zlib.CompressionLevel _compressLevel; private volatile Exception _pendingException; private bool _handlingException; private object _eLock = new Object(); // protects _pendingException // This bitfield is used only when Trace is defined. //private TraceBits _DesiredTrace = TraceBits.Write | TraceBits.WriteBegin | //TraceBits.WriteDone | TraceBits.Lifecycle | TraceBits.Fill | TraceBits.Flush | //TraceBits.Session; //private TraceBits _DesiredTrace = TraceBits.WriteBegin | TraceBits.WriteDone | TraceBits.Synch | TraceBits.Lifecycle | TraceBits.Session ; private TraceBits _DesiredTrace = TraceBits.Session | TraceBits.Compress | TraceBits.WriteTake | TraceBits.WriteEnter | TraceBits.EmitEnter | TraceBits.EmitDone | TraceBits.EmitLock | TraceBits.EmitSkip | TraceBits.EmitBegin; /// <summary> /// Create a ParallelDeflateOutputStream. /// </summary> /// <remarks> /// /// <para> /// This stream compresses data written into it via the DEFLATE /// algorithm (see RFC 1951), and writes out the compressed byte stream. /// </para> /// /// <para> /// The instance will use the default compression level, the default /// buffer sizes and the default number of threads and buffers per /// thread. /// </para> /// /// <para> /// This class is similar to <see cref="Ionic.Zlib.DeflateStream"/>, /// except that this implementation uses an approach that employs /// multiple worker threads to perform the DEFLATE. On a multi-cpu or /// multi-core computer, the performance of this class can be /// significantly higher than the single-threaded DeflateStream, /// particularly for larger streams. How large? Anything over 10mb is /// a good candidate for parallel compression. /// </para> /// /// </remarks> /// /// <example> /// /// This example shows how to use a ParallelDeflateOutputStream to compress /// data. It reads a file, compresses it, and writes the compressed data to /// a second, output file. /// /// <code> /// byte[] buffer = new byte[WORKING_BUFFER_SIZE]; /// int n= -1; /// String outputFile = fileToCompress + ".compressed"; /// using (System.IO.Stream input = System.IO.File.OpenRead(fileToCompress)) /// { /// using (var raw = System.IO.File.Create(outputFile)) /// { /// using (Stream compressor = new ParallelDeflateOutputStream(raw)) /// { /// while ((n= input.Read(buffer, 0, buffer.Length)) != 0) /// { /// compressor.Write(buffer, 0, n); /// } /// } /// } /// } /// </code> /// <code lang="VB"> /// Dim buffer As Byte() = New Byte(4096) {} /// Dim n As Integer = -1 /// Dim outputFile As String = (fileToCompress &amp; ".compressed") /// Using input As Stream = File.OpenRead(fileToCompress) /// Using raw As FileStream = File.Create(outputFile) /// Using compressor As Stream = New ParallelDeflateOutputStream(raw) /// Do While (n &lt;&gt; 0) /// If (n &gt; 0) Then /// compressor.Write(buffer, 0, n) /// End If /// n = input.Read(buffer, 0, buffer.Length) /// Loop /// End Using /// End Using /// End Using /// </code> /// </example> /// <param name="stream">The stream to which compressed data will be written.</param> public ParallelDeflateOutputStream(System.IO.Stream stream) : this(stream, CompressionLevel.Default, CompressionStrategy.Default, false) { } /// <summary> /// Create a ParallelDeflateOutputStream using the specified CompressionLevel. /// </summary> /// <remarks> /// See the <see cref="ParallelDeflateOutputStream(System.IO.Stream)"/> /// constructor for example code. /// </remarks> /// <param name="stream">The stream to which compressed data will be written.</param> /// <param name="level">A tuning knob to trade speed for effectiveness.</param> public ParallelDeflateOutputStream(System.IO.Stream stream, CompressionLevel level) : this(stream, level, CompressionStrategy.Default, false) { } /// <summary> /// Create a ParallelDeflateOutputStream and specify whether to leave the captive stream open /// when the ParallelDeflateOutputStream is closed. /// </summary> /// <remarks> /// See the <see cref="ParallelDeflateOutputStream(System.IO.Stream)"/> /// constructor for example code. /// </remarks> /// <param name="stream">The stream to which compressed data will be written.</param> /// <param name="leaveOpen"> /// true if the application would like the stream to remain open after inflation/deflation. /// </param> public ParallelDeflateOutputStream(System.IO.Stream stream, bool leaveOpen) : this(stream, CompressionLevel.Default, CompressionStrategy.Default, leaveOpen) { } /// <summary> /// Create a ParallelDeflateOutputStream and specify whether to leave the captive stream open /// when the ParallelDeflateOutputStream is closed. /// </summary> /// <remarks> /// See the <see cref="ParallelDeflateOutputStream(System.IO.Stream)"/> /// constructor for example code. /// </remarks> /// <param name="stream">The stream to which compressed data will be written.</param> /// <param name="level">A tuning knob to trade speed for effectiveness.</param> /// <param name="leaveOpen"> /// true if the application would like the stream to remain open after inflation/deflation. /// </param> public ParallelDeflateOutputStream(System.IO.Stream stream, CompressionLevel level, bool leaveOpen) : this(stream, CompressionLevel.Default, CompressionStrategy.Default, leaveOpen) { } /// <summary> /// Create a ParallelDeflateOutputStream using the specified /// CompressionLevel and CompressionStrategy, and specifying whether to /// leave the captive stream open when the ParallelDeflateOutputStream is /// closed. /// </summary> /// <remarks> /// See the <see cref="ParallelDeflateOutputStream(System.IO.Stream)"/> /// constructor for example code. /// </remarks> /// <param name="stream">The stream to which compressed data will be written.</param> /// <param name="level">A tuning knob to trade speed for effectiveness.</param> /// <param name="strategy"> /// By tweaking this parameter, you may be able to optimize the compression for /// data with particular characteristics. /// </param> /// <param name="leaveOpen"> /// true if the application would like the stream to remain open after inflation/deflation. /// </param> public ParallelDeflateOutputStream(System.IO.Stream stream, CompressionLevel level, CompressionStrategy strategy, bool leaveOpen) { TraceOutput(TraceBits.Lifecycle | TraceBits.Session, "-------------------------------------------------------"); TraceOutput(TraceBits.Lifecycle | TraceBits.Session, "Create {0:X8}", this.GetHashCode()); _outStream = stream; _compressLevel= level; Strategy = strategy; _leaveOpen = leaveOpen; this.MaxBufferPairs = 16; // default } /// <summary> /// The ZLIB strategy to be used during compression. /// </summary> /// public CompressionStrategy Strategy { get; private set; } /// <summary> /// The maximum number of buffer pairs to use. /// </summary> /// /// <remarks> /// <para> /// This property sets an upper limit on the number of memory buffer /// pairs to create. The implementation of this stream allocates /// multiple buffers to facilitate parallel compression. As each buffer /// fills up, this stream uses <see /// cref="System.Threading.ThreadPool.QueueUserWorkItem(WaitCallback)"> /// ThreadPool.QueueUserWorkItem()</see> /// to compress those buffers in a background threadpool thread. After a /// buffer is compressed, it is re-ordered and written to the output /// stream. /// </para> /// /// <para> /// A higher number of buffer pairs enables a higher degree of /// parallelism, which tends to increase the speed of compression on /// multi-cpu computers. On the other hand, a higher number of buffer /// pairs also implies a larger memory consumption, more active worker /// threads, and a higher cpu utilization for any compression. This /// property enables the application to limit its memory consumption and /// CPU utilization behavior depending on requirements. /// </para> /// /// <para> /// For each compression "task" that occurs in parallel, there are 2 /// buffers allocated: one for input and one for output. This property /// sets a limit for the number of pairs. The total amount of storage /// space allocated for buffering will then be (N*S*2), where N is the /// number of buffer pairs, S is the size of each buffer (<see /// cref="BufferSize"/>). By default, DotNetZip allocates 4 buffer /// pairs per CPU core, so if your machine has 4 cores, and you retain /// the default buffer size of 128k, then the /// ParallelDeflateOutputStream will use 4 * 4 * 2 * 128kb of buffer /// memory in total, or 4mb, in blocks of 128kb. If you then set this /// property to 8, then the number will be 8 * 2 * 128kb of buffer /// memory, or 2mb. /// </para> /// /// <para> /// CPU utilization will also go up with additional buffers, because a /// larger number of buffer pairs allows a larger number of background /// threads to compress in parallel. If you find that parallel /// compression is consuming too much memory or CPU, you can adjust this /// value downward. /// </para> /// /// <para> /// The default value is 16. Different values may deliver better or /// worse results, depending on your priorities and the dynamic /// performance characteristics of your storage and compute resources. /// </para> /// /// <para> /// This property is not the number of buffer pairs to use; it is an /// upper limit. An illustration: Suppose you have an application that /// uses the default value of this property (which is 16), and it runs /// on a machine with 2 CPU cores. In that case, DotNetZip will allocate /// 4 buffer pairs per CPU core, for a total of 8 pairs. The upper /// limit specified by this property has no effect. /// </para> /// /// <para> /// The application can set this value at any time, but it is effective /// only before the first call to Write(), which is when the buffers are /// allocated. /// </para> /// </remarks> public int MaxBufferPairs { get { return _maxBufferPairs; } set { if (value < 4) throw new ArgumentException("MaxBufferPairs", "Value must be 4 or greater."); _maxBufferPairs = value; } } /// <summary> /// The size of the buffers used by the compressor threads. /// </summary> /// <remarks> /// /// <para> /// The default buffer size is 128k. The application can set this value /// at any time, but it is effective only before the first Write(). /// </para> /// /// <para> /// Larger buffer sizes implies larger memory consumption but allows /// more efficient compression. Using smaller buffer sizes consumes less /// memory but may result in less effective compression. For example, /// using the default buffer size of 128k, the compression delivered is /// within 1% of the compression delivered by the single-threaded <see /// cref="Ionic.Zlib.DeflateStream"/>. On the other hand, using a /// BufferSize of 8k can result in a compressed data stream that is 5% /// larger than that delivered by the single-threaded /// <c>DeflateStream</c>. Excessively small buffer sizes can also cause /// the speed of the ParallelDeflateOutputStream to drop, because of /// larger thread scheduling overhead dealing with many many small /// buffers. /// </para> /// /// <para> /// The total amount of storage space allocated for buffering will be /// (N*S*2), where N is the number of buffer pairs, and S is the size of /// each buffer (this property). There are 2 buffers used by the /// compressor, one for input and one for output. By default, DotNetZip /// allocates 4 buffer pairs per CPU core, so if your machine has 4 /// cores, then the number of buffer pairs used will be 16. If you /// accept the default value of this property, 128k, then the /// ParallelDeflateOutputStream will use 16 * 2 * 128kb of buffer memory /// in total, or 4mb, in blocks of 128kb. If you set this property to /// 64kb, then the number will be 16 * 2 * 64kb of buffer memory, or /// 2mb. /// </para> /// /// </remarks> public int BufferSize { get { return _bufferSize;} set { if (value < 1024) throw new ArgumentOutOfRangeException("BufferSize", "BufferSize must be greater than 1024 bytes"); _bufferSize = value; } } /// <summary> /// The CRC32 for the data that was written out, prior to compression. /// </summary> /// <remarks> /// This value is meaningful only after a call to Close(). /// </remarks> public int Crc32 { get { return _Crc32; } } /// <summary> /// The total number of uncompressed bytes processed by the ParallelDeflateOutputStream. /// </summary> /// <remarks> /// This value is meaningful only after a call to Close(). /// </remarks> public Int64 BytesProcessed { get { return _totalBytesProcessed; } } private void _InitializePoolOfWorkItems() { _toWrite = new Queue<int>(); _toFill = new Queue<int>(); _pool = new System.Collections.Generic.List<WorkItem>(); int nTasks = BufferPairsPerCore * Environment.ProcessorCount; nTasks = Math.Min(nTasks, _maxBufferPairs); for(int i=0; i < nTasks; i++) { _pool.Add(new WorkItem(_bufferSize, _compressLevel, Strategy, i)); _toFill.Enqueue(i); } _newlyCompressedBlob = new AutoResetEvent(false); _runningCrc = new Ionic.Crc.CRC32(); _currentlyFilling = -1; _lastFilled = -1; _lastWritten = -1; _latestCompressed = -1; } /// <summary> /// Write data to the stream. /// </summary> /// /// <remarks> /// /// <para> /// To use the ParallelDeflateOutputStream to compress data, create a /// ParallelDeflateOutputStream with CompressionMode.Compress, passing a /// writable output stream. Then call Write() on that /// ParallelDeflateOutputStream, providing uncompressed data as input. The /// data sent to the output stream will be the compressed form of the data /// written. /// </para> /// /// <para> /// To decompress data, use the <see cref="Ionic.Zlib.DeflateStream"/> class. /// </para> /// /// </remarks> /// <param name="buffer">The buffer holding data to write to the stream.</param> /// <param name="offset">the offset within that data array to find the first byte to write.</param> /// <param name="count">the number of bytes to write.</param> public override void Write(byte[] buffer, int offset, int count) { bool mustWait = false; // This method does this: // 0. handles any pending exceptions // 1. write any buffers that are ready to be written, // 2. fills a work buffer; when full, flip state to 'Filled', // 3. if more data to be written, goto step 1 if (_isClosed) throw new InvalidOperationException(); // dispense any exceptions that occurred on the BG threads if (_pendingException != null) { _handlingException = true; var pe = _pendingException; _pendingException = null; throw pe; } if (count == 0) return; if (!_firstWriteDone) { // Want to do this on first Write, first session, and not in the // constructor. We want to allow MaxBufferPairs to // change after construction, but before first Write. _InitializePoolOfWorkItems(); _firstWriteDone = true; } do { // may need to make buffers available EmitPendingBuffers(false, mustWait); mustWait = false; // use current buffer, or get a new buffer to fill int ix = -1; if (_currentlyFilling >= 0) { ix = _currentlyFilling; TraceOutput(TraceBits.WriteTake, "Write notake wi({0}) lf({1})", ix, _lastFilled); } else { TraceOutput(TraceBits.WriteTake, "Write take?"); if (_toFill.Count == 0) { // no available buffers, so... need to emit // compressed buffers. mustWait = true; continue; } ix = _toFill.Dequeue(); TraceOutput(TraceBits.WriteTake, "Write take wi({0}) lf({1})", ix, _lastFilled); ++_lastFilled; // TODO: consider rollover? } WorkItem workitem = _pool[ix]; int limit = ((workitem.buffer.Length - workitem.inputBytesAvailable) > count) ? count : (workitem.buffer.Length - workitem.inputBytesAvailable); workitem.ordinal = _lastFilled; TraceOutput(TraceBits.Write, "Write lock wi({0}) ord({1}) iba({2})", workitem.index, workitem.ordinal, workitem.inputBytesAvailable ); // copy from the provided buffer to our workitem, starting at // the tail end of whatever data we might have in there currently. Buffer.BlockCopy(buffer, offset, workitem.buffer, workitem.inputBytesAvailable, limit); count -= limit; offset += limit; workitem.inputBytesAvailable += limit; if (workitem.inputBytesAvailable == workitem.buffer.Length) { // No need for interlocked.increment: the Write() // method is documented as not multi-thread safe, so // we can assume Write() calls come in from only one // thread. TraceOutput(TraceBits.Write, "Write QUWI wi({0}) ord({1}) iba({2}) nf({3})", workitem.index, workitem.ordinal, workitem.inputBytesAvailable ); if (!ThreadPool.QueueUserWorkItem( _DeflateOne, workitem )) throw new Exception("Cannot enqueue workitem"); _currentlyFilling = -1; // will get a new buffer next time } else _currentlyFilling = ix; if (count > 0) TraceOutput(TraceBits.WriteEnter, "Write more"); } while (count > 0); // until no more to write TraceOutput(TraceBits.WriteEnter, "Write exit"); return; } private void _FlushFinish() { // After writing a series of compressed buffers, each one closed // with Flush.Sync, we now write the final one as Flush.Finish, // and then stop. byte[] buffer = new byte[128]; var compressor = new ZlibCodec(); int rc = compressor.InitializeDeflate(_compressLevel, false); compressor.InputBuffer = null; compressor.NextIn = 0; compressor.AvailableBytesIn = 0; compressor.OutputBuffer = buffer; compressor.NextOut = 0; compressor.AvailableBytesOut = buffer.Length; rc = compressor.Deflate(FlushType.Finish); if (rc != ZlibConstants.Z_STREAM_END && rc != ZlibConstants.Z_OK) throw new Exception("deflating: " + compressor.Message); if (buffer.Length - compressor.AvailableBytesOut > 0) { TraceOutput(TraceBits.EmitBegin, "Emit begin flush bytes({0})", buffer.Length - compressor.AvailableBytesOut); _outStream.Write(buffer, 0, buffer.Length - compressor.AvailableBytesOut); TraceOutput(TraceBits.EmitDone, "Emit done flush"); } compressor.EndDeflate(); _Crc32 = _runningCrc.Crc32Result; } private void _Flush(bool lastInput) { if (_isClosed) throw new InvalidOperationException(); if (emitting) return; // compress any partial buffer if (_currentlyFilling >= 0) { WorkItem workitem = _pool[_currentlyFilling]; _DeflateOne(workitem); _currentlyFilling = -1; // get a new buffer next Write() } if (lastInput) { EmitPendingBuffers(true, false); _FlushFinish(); } else { EmitPendingBuffers(false, false); } } /// <summary> /// Flush the stream. /// </summary> public override void Flush() { if (_pendingException != null) { _handlingException = true; var pe = _pendingException; _pendingException = null; throw pe; } if (_handlingException) return; _Flush(false); } /// <summary> /// Close the stream. /// </summary> /// <remarks> /// You must call Close on the stream to guarantee that all of the data written in has /// been compressed, and the compressed data has been written out. /// </remarks> public override void Close() { TraceOutput(TraceBits.Session, "Close {0:X8}", this.GetHashCode()); if (_pendingException != null) { _handlingException = true; var pe = _pendingException; _pendingException = null; throw pe; } if (_handlingException) return; if (_isClosed) return; _Flush(true); if (!_leaveOpen) _outStream.Close(); _isClosed= true; } // workitem 10030 - implement a new Dispose method /// <summary>Dispose the object</summary> /// <remarks> /// <para> /// Because ParallelDeflateOutputStream is IDisposable, the /// application must call this method when finished using the instance. /// </para> /// <para> /// This method is generally called implicitly upon exit from /// a <c>using</c> scope in C# (<c>Using</c> in VB). /// </para> /// </remarks> new public void Dispose() { TraceOutput(TraceBits.Lifecycle, "Dispose {0:X8}", this.GetHashCode()); Close(); _pool = null; Dispose(true); } /// <summary>The Dispose method</summary> /// <param name="disposing"> /// indicates whether the Dispose method was invoked by user code. /// </param> protected override void Dispose(bool disposing) { base.Dispose(disposing); } /// <summary> /// Resets the stream for use with another stream. /// </summary> /// <remarks> /// Because the ParallelDeflateOutputStream is expensive to create, it /// has been designed so that it can be recycled and re-used. You have /// to call Close() on the stream first, then you can call Reset() on /// it, to use it again on another stream. /// </remarks> /// /// <param name="stream"> /// The new output stream for this era. /// </param> /// /// <example> /// <code> /// ParallelDeflateOutputStream deflater = null; /// foreach (var inputFile in listOfFiles) /// { /// string outputFile = inputFile + ".compressed"; /// using (System.IO.Stream input = System.IO.File.OpenRead(inputFile)) /// { /// using (var outStream = System.IO.File.Create(outputFile)) /// { /// if (deflater == null) /// deflater = new ParallelDeflateOutputStream(outStream, /// CompressionLevel.Best, /// CompressionStrategy.Default, /// true); /// deflater.Reset(outStream); /// /// while ((n= input.Read(buffer, 0, buffer.Length)) != 0) /// { /// deflater.Write(buffer, 0, n); /// } /// } /// } /// } /// </code> /// </example> public void Reset(Stream stream) { TraceOutput(TraceBits.Session, "-------------------------------------------------------"); TraceOutput(TraceBits.Session, "Reset {0:X8} firstDone({1})", this.GetHashCode(), _firstWriteDone); if (!_firstWriteDone) return; // reset all status _toWrite.Clear(); _toFill.Clear(); foreach (var workitem in _pool) { _toFill.Enqueue(workitem.index); workitem.ordinal = -1; } _firstWriteDone = false; _totalBytesProcessed = 0L; _runningCrc = new Ionic.Crc.CRC32(); _isClosed= false; _currentlyFilling = -1; _lastFilled = -1; _lastWritten = -1; _latestCompressed = -1; _outStream = stream; } private void EmitPendingBuffers(bool doAll, bool mustWait) { // When combining parallel deflation with a ZipSegmentedStream, it's // possible for the ZSS to throw from within this method. In that // case, Close/Dispose will be called on this stream, if this stream // is employed within a using or try/finally pair as required. But // this stream is unaware of the pending exception, so the Close() // method invokes this method AGAIN. This can lead to a deadlock. // Therefore, failfast if re-entering. if (emitting) return; emitting = true; if (doAll || mustWait) _newlyCompressedBlob.WaitOne(); do { int firstSkip = -1; int millisecondsToWait = doAll ? 200 : (mustWait ? -1 : 0); int nextToWrite = -1; do { if (Monitor.TryEnter(_toWrite, millisecondsToWait)) { nextToWrite = -1; try { if (_toWrite.Count > 0) nextToWrite = _toWrite.Dequeue(); } finally { Monitor.Exit(_toWrite); } if (nextToWrite >= 0) { WorkItem workitem = _pool[nextToWrite]; if (workitem.ordinal != _lastWritten + 1) { // out of order. requeue and try again. TraceOutput(TraceBits.EmitSkip, "Emit skip wi({0}) ord({1}) lw({2}) fs({3})", workitem.index, workitem.ordinal, _lastWritten, firstSkip); lock(_toWrite) { _toWrite.Enqueue(nextToWrite); } if (firstSkip == nextToWrite) { // We went around the list once. // None of the items in the list is the one we want. // Now wait for a compressor to signal again. _newlyCompressedBlob.WaitOne(); firstSkip = -1; } else if (firstSkip == -1) firstSkip = nextToWrite; continue; } firstSkip = -1; TraceOutput(TraceBits.EmitBegin, "Emit begin wi({0}) ord({1}) cba({2})", workitem.index, workitem.ordinal, workitem.compressedBytesAvailable); _outStream.Write(workitem.compressed, 0, workitem.compressedBytesAvailable); _runningCrc.Combine(workitem.crc, workitem.inputBytesAvailable); _totalBytesProcessed += workitem.inputBytesAvailable; workitem.inputBytesAvailable = 0; TraceOutput(TraceBits.EmitDone, "Emit done wi({0}) ord({1}) cba({2}) mtw({3})", workitem.index, workitem.ordinal, workitem.compressedBytesAvailable, millisecondsToWait); _lastWritten = workitem.ordinal; _toFill.Enqueue(workitem.index); // don't wait next time through if (millisecondsToWait == -1) millisecondsToWait = 0; } } else nextToWrite = -1; } while (nextToWrite >= 0); } while (doAll && (_lastWritten != _latestCompressed)); emitting = false; } #if OLD private void _PerpetualWriterMethod(object state) { TraceOutput(TraceBits.WriterThread, "_PerpetualWriterMethod START"); try { do { // wait for the next session TraceOutput(TraceBits.Synch | TraceBits.WriterThread, "Synch _sessionReset.WaitOne(begin) PWM"); _sessionReset.WaitOne(); TraceOutput(TraceBits.Synch | TraceBits.WriterThread, "Synch _sessionReset.WaitOne(done) PWM"); if (_isDisposed) break; TraceOutput(TraceBits.Synch | TraceBits.WriterThread, "Synch _sessionReset.Reset() PWM"); _sessionReset.Reset(); // repeatedly write buffers as they become ready WorkItem workitem = null; Ionic.Zlib.CRC32 c= new Ionic.Zlib.CRC32(); do { workitem = _pool[_nextToWrite % _pc]; lock(workitem) { if (_noMoreInputForThisSegment) TraceOutput(TraceBits.Write, "Write drain wi({0}) stat({1}) canuse({2}) cba({3})", workitem.index, workitem.status, (workitem.status == (int)WorkItem.Status.Compressed), workitem.compressedBytesAvailable); do { if (workitem.status == (int)WorkItem.Status.Compressed) { TraceOutput(TraceBits.WriteBegin, "Write begin wi({0}) stat({1}) cba({2})", workitem.index, workitem.status, workitem.compressedBytesAvailable); workitem.status = (int)WorkItem.Status.Writing; _outStream.Write(workitem.compressed, 0, workitem.compressedBytesAvailable); c.Combine(workitem.crc, workitem.inputBytesAvailable); _totalBytesProcessed += workitem.inputBytesAvailable; _nextToWrite++; workitem.inputBytesAvailable= 0; workitem.status = (int)WorkItem.Status.Done; TraceOutput(TraceBits.WriteDone, "Write done wi({0}) stat({1}) cba({2})", workitem.index, workitem.status, workitem.compressedBytesAvailable); Monitor.Pulse(workitem); break; } else { int wcycles = 0; // I've locked a workitem I cannot use. // Therefore, wake someone else up, and then release the lock. while (workitem.status != (int)WorkItem.Status.Compressed) { TraceOutput(TraceBits.WriteWait, "Write waiting wi({0}) stat({1}) nw({2}) nf({3}) nomore({4})", workitem.index, workitem.status, _nextToWrite, _nextToFill, _noMoreInputForThisSegment ); if (_noMoreInputForThisSegment && _nextToWrite == _nextToFill) break; wcycles++; // wake up someone else Monitor.Pulse(workitem); // release and wait Monitor.Wait(workitem); if (workitem.status == (int)WorkItem.Status.Compressed) TraceOutput(TraceBits.WriteWait, "Write A-OK wi({0}) stat({1}) iba({2}) cba({3}) cyc({4})", workitem.index, workitem.status, workitem.inputBytesAvailable, workitem.compressedBytesAvailable, wcycles); } if (_noMoreInputForThisSegment && _nextToWrite == _nextToFill) break; } } while (true); } if (_noMoreInputForThisSegment) TraceOutput(TraceBits.Write, "Write nomore nw({0}) nf({1}) break({2})", _nextToWrite, _nextToFill, (_nextToWrite == _nextToFill)); if (_noMoreInputForThisSegment && _nextToWrite == _nextToFill) break; } while (true); // Finish: // After writing a series of buffers, closing each one with // Flush.Sync, we now write the final one as Flush.Finish, and // then stop. byte[] buffer = new byte[128]; ZlibCodec compressor = new ZlibCodec(); int rc = compressor.InitializeDeflate(_compressLevel, false); compressor.InputBuffer = null; compressor.NextIn = 0; compressor.AvailableBytesIn = 0; compressor.OutputBuffer = buffer; compressor.NextOut = 0; compressor.AvailableBytesOut = buffer.Length; rc = compressor.Deflate(FlushType.Finish); if (rc != ZlibConstants.Z_STREAM_END && rc != ZlibConstants.Z_OK) throw new Exception("deflating: " + compressor.Message); if (buffer.Length - compressor.AvailableBytesOut > 0) { TraceOutput(TraceBits.WriteBegin, "Write begin flush bytes({0})", buffer.Length - compressor.AvailableBytesOut); _outStream.Write(buffer, 0, buffer.Length - compressor.AvailableBytesOut); TraceOutput(TraceBits.WriteBegin, "Write done flush"); } compressor.EndDeflate(); _Crc32 = c.Crc32Result; // signal that writing is complete: TraceOutput(TraceBits.Synch, "Synch _writingDone.Set() PWM"); _writingDone.Set(); } while (true); } catch (System.Exception exc1) { lock(_eLock) { // expose the exception to the main thread if (_pendingException!=null) _pendingException = exc1; } } TraceOutput(TraceBits.WriterThread, "_PerpetualWriterMethod FINIS"); } #endif private void _DeflateOne(Object wi) { // compress one buffer WorkItem workitem = (WorkItem) wi; try { int myItem = workitem.index; Ionic.Crc.CRC32 crc = new Ionic.Crc.CRC32(); // calc CRC on the buffer crc.SlurpBlock(workitem.buffer, 0, workitem.inputBytesAvailable); // deflate it DeflateOneSegment(workitem); // update status workitem.crc = crc.Crc32Result; TraceOutput(TraceBits.Compress, "Compress wi({0}) ord({1}) len({2})", workitem.index, workitem.ordinal, workitem.compressedBytesAvailable ); lock(_latestLock) { if (workitem.ordinal > _latestCompressed) _latestCompressed = workitem.ordinal; } lock (_toWrite) { _toWrite.Enqueue(workitem.index); } _newlyCompressedBlob.Set(); } catch (System.Exception exc1) { lock(_eLock) { // expose the exception to the main thread if (_pendingException!=null) _pendingException = exc1; } } } private bool DeflateOneSegment(WorkItem workitem) { ZlibCodec compressor = workitem.compressor; int rc= 0; compressor.ResetDeflate(); compressor.NextIn = 0; compressor.AvailableBytesIn = workitem.inputBytesAvailable; // step 1: deflate the buffer compressor.NextOut = 0; compressor.AvailableBytesOut = workitem.compressed.Length; do { compressor.Deflate(FlushType.None); } while (compressor.AvailableBytesIn > 0 || compressor.AvailableBytesOut == 0); // step 2: flush (sync) rc = compressor.Deflate(FlushType.Sync); workitem.compressedBytesAvailable= (int) compressor.TotalBytesOut; return true; } [System.Diagnostics.ConditionalAttribute("Trace")] private void TraceOutput(TraceBits bits, string format, params object[] varParams) { if ((bits & _DesiredTrace) != 0) { lock(_outputLock) { int tid = Thread.CurrentThread.GetHashCode(); #if !SILVERLIGHT Console.ForegroundColor = (ConsoleColor) (tid % 8 + 8); #endif Console.Write("{0:000} PDOS ", tid); Console.WriteLine(format, varParams); #if !SILVERLIGHT Console.ResetColor(); #endif } } } // used only when Trace is defined [Flags] enum TraceBits : uint { None = 0, NotUsed1 = 1, EmitLock = 2, EmitEnter = 4, // enter _EmitPending EmitBegin = 8, // begin to write out EmitDone = 16, // done writing out EmitSkip = 32, // writer skipping a workitem EmitAll = 58, // All Emit flags Flush = 64, Lifecycle = 128, // constructor/disposer Session = 256, // Close/Reset Synch = 512, // thread synchronization Instance = 1024, // instance settings Compress = 2048, // compress task Write = 4096, // filling buffers, when caller invokes Write() WriteEnter = 8192, // upon entry to Write() WriteTake = 16384, // on _toFill.Take() All = 0xffffffff, } /// <summary> /// Indicates whether the stream supports Seek operations. /// </summary> /// <remarks> /// Always returns false. /// </remarks> public override bool CanSeek { get { return false; } } /// <summary> /// Indicates whether the stream supports Read operations. /// </summary> /// <remarks> /// Always returns false. /// </remarks> public override bool CanRead { get {return false;} } /// <summary> /// Indicates whether the stream supports Write operations. /// </summary> /// <remarks> /// Returns true if the provided stream is writable. /// </remarks> public override bool CanWrite { get { return _outStream.CanWrite; } } /// <summary> /// Reading this property always throws a NotSupportedException. /// </summary> public override long Length { get { throw new NotSupportedException(); } } /// <summary> /// Returns the current position of the output stream. /// </summary> /// <remarks> /// <para> /// Because the output gets written by a background thread, /// the value may change asynchronously. Setting this /// property always throws a NotSupportedException. /// </para> /// </remarks> public override long Position { get { return _outStream.Position; } set { throw new NotSupportedException(); } } /// <summary> /// This method always throws a NotSupportedException. /// </summary> /// <param name="buffer"> /// The buffer into which data would be read, IF THIS METHOD /// ACTUALLY DID ANYTHING. /// </param> /// <param name="offset"> /// The offset within that data array at which to insert the /// data that is read, IF THIS METHOD ACTUALLY DID /// ANYTHING. /// </param> /// <param name="count"> /// The number of bytes to write, IF THIS METHOD ACTUALLY DID /// ANYTHING. /// </param> /// <returns>nothing.</returns> public override int Read(byte[] buffer, int offset, int count) { throw new NotSupportedException(); } /// <summary> /// This method always throws a NotSupportedException. /// </summary> /// <param name="offset"> /// The offset to seek to.... /// IF THIS METHOD ACTUALLY DID ANYTHING. /// </param> /// <param name="origin"> /// The reference specifying how to apply the offset.... IF /// THIS METHOD ACTUALLY DID ANYTHING. /// </param> /// <returns>nothing. It always throws.</returns> public override long Seek(long offset, System.IO.SeekOrigin origin) { throw new NotSupportedException(); } /// <summary> /// This method always throws a NotSupportedException. /// </summary> /// <param name="value"> /// The new value for the stream length.... IF /// THIS METHOD ACTUALLY DID ANYTHING. /// </param> public override void SetLength(long value) { throw new NotSupportedException(); } } }
// ------------------------------------- // Domain : Avariceonline.com // Author : Nicholas Ventimiglia // Product : Unity3d Foundation // Published : 2015 // ------------------------------------- using System; using Foundation.Tasks; using FullSerializer; using UnityEngine; namespace Foundation.Server { /// <summary> /// Encapsulates Api Http Communication /// </summary> /// <remarks> /// Inherent to access your own strongly typed server side controllers /// </remarks> public abstract class ServiceClientBase { #region Shared protected ServerConfig Config { get { return ServerConfig.Instance; } } protected AccountService AccountService { get { return AccountService.Instance; } } public bool IsAuthenticated { get { return HttpService.IsAuthenticated; } } private HttpService _client; protected HttpService Client { get { if (_client == null) { _client = new HttpService(); } return _client; } } public string ControllerName { get; protected set; } /// <summary> /// Unique Id for this application instance /// </summary> public static readonly string ClientId = Guid.NewGuid().ToString(); public ServiceClientBase(string controllerName) { ControllerName = controllerName; } #endregion #region Public Callback Method /// <summary> /// Posts a get request against a IQueryable OData data source /// </summary> /// <typeparam name="T">return type</typeparam> /// <param name="query">odata query</param> /// <param name="callback">handler for the response</param> /// <returns>found entity array of type T</returns> public void HttpPostAsync<T>(ODataQuery<T> query, Action<Response<T[]>> callback) where T : class { if (!Config.IsValid) { callback(new Response<T[]>(new Exception("Invalid configuration."))); return; } if (Application.internetReachability == NetworkReachability.NotReachable) { callback(new Response<T[]>(new Exception("Invalid configuration."))); return; } var action = string.Format("api/{1}/Query/{0}", query, ControllerName); var url = Config.Path.EndsWith("/") ? Config.Path + action : Config.Path + "/" + action; Client.PostAsync(url, callback); } /// <summary> /// Posts a get request against a IQueryable OData data source /// </summary> /// <typeparam name="T">return type</typeparam> /// <param name="prefix">prefix to odata query</param> /// <param name="callback">handler for the response</param> /// <param name="query">odata query</param> public void HttpPostAsync<T>(string prefix, ODataQuery<T> query, Action<Response<T[]>> callback) where T : class { if (!Config.IsValid) { callback(new Response<T[]>(new Exception("Invalid configuration."))); return; } if (Application.internetReachability == NetworkReachability.NotReachable) { callback(new Response<T[]>(new Exception("Invalid configuration."))); return; } var action = string.Format("api/{0}/Query/{1}{2}", ControllerName, prefix, query); var url = Config.Path.EndsWith("/") ? Config.Path + action : Config.Path + "/" + action; Client.PostAsync(url, callback); } /// <summary> /// Posts the HTTP request to the server /// </summary> /// <typeparam name="T">return type</typeparam> /// <param name="callback">handler for the response</param> /// <param name="method">controller method to call</param> public void HttpPostAsync<T>(string method, Action<Response<T>> callback) where T : class { if (!Config.IsValid) { callback(new Response<T>(new Exception("Invalid configuration."))); return; } if (Application.internetReachability == NetworkReachability.NotReachable) { callback(new Response<T>(new Exception("Invalid configuration."))); return; } var action = string.Format("api/{0}/{1}", ControllerName, method); var url = Config.Path.EndsWith("/") ? Config.Path + action : Config.Path + "/" + action; Client.PostAsync(url, callback); } /// <summary> /// Posts the HTTP request to the server /// </summary> /// <typeparam name="T">return type</typeparam> /// <param name="method">controller method to call</param> /// <param name="id">id parameter</param> /// <param name="callback">handler for the response</param> public void HttpPostAsync<T>(string method, string id, Action<Response<T>> callback) where T : class { if (!Config.IsValid) { callback(new Response<T>(new Exception("Invalid configuration."))); return; } if (Application.internetReachability == NetworkReachability.NotReachable) { callback(new Response<T>(new Exception("Invalid configuration."))); return; } var action = string.Format("api/{0}/{1}/{2}", ControllerName, method, id); var url = Config.Path.EndsWith("/") ? Config.Path + action : Config.Path + "/" + action; Client.PostAsync(url, callback); } /// <summary> /// Posts the HTTP request to the server /// </summary> /// <typeparam name="T">return type</typeparam> /// <param name="method">controller method to call</param> /// <param name="entity">dto</param> /// <param name="callback">handler for the response</param> public void HttpPostAsync<T>(string method, object entity, Action<Response<T>> callback) where T : class { if (!Config.IsValid) { callback(new Response<T>(new Exception("Invalid configuration."))); return; } if (Application.internetReachability == NetworkReachability.NotReachable) { callback(new Response<T>(new Exception("Invalid configuration."))); return; } var action = string.Format("api/{0}/{1}", ControllerName, method); var url = Config.Path.EndsWith("/") ? Config.Path + action : Config.Path + "/" + action; Client.PostAsync(url, JsonSerializer.Serialize(entity), callback); } /// <summary> /// Posts the HTTP request to the server /// </summary> /// <param name="method">controller method to call</param> /// <param name="entity">dto</param> /// <param name="callback">handler for the response</param> public void HttpPostAsync(string method, object entity, Action<Response> callback) { if (!Config.IsValid) { callback(new Response(new Exception("Invalid configuration."))); return; } if (Application.internetReachability == NetworkReachability.NotReachable) { callback(new Response(new Exception("Invalid configuration."))); return; } var action = string.Format("api/{0}/{1}", ControllerName, method); var url = Config.Path.EndsWith("/") ? Config.Path + action : Config.Path + "/" + action; Client.PostAsync(url, JsonSerializer.Serialize(entity), callback); } /// <summary> /// Posts the HTTP request to the server /// </summary> /// <param name="method">controller method to call</param> /// <param name="id">id parameter</param> /// <param name="callback">handler for the response</param> public void HttpPostAsync(string method, string id, Action<Response> callback) { if (!Config.IsValid) { callback(new Response(new Exception("Invalid configuration."))); return; } if (Application.internetReachability == NetworkReachability.NotReachable) { callback(new Response(new Exception("Invalid configuration."))); return; } var action = string.Format("api/{0}/{1}/{2}", ControllerName, method, id); var url = Config.Path.EndsWith("/") ? Config.Path + action : Config.Path + "/" + action; Client.PostAsync(url, callback); } /// <summary> /// Posts the HTTP request to the server /// </summary> /// <param name="method">controller method to call</param> /// <param name="callback">handler for the response</param> public void HttpPostAsync(string method, Action<Response> callback) { if (!Config.IsValid) { callback(new Response(new Exception("Invalid configuration."))); return; } if (Application.internetReachability == NetworkReachability.NotReachable) { callback(new Response(new Exception("Invalid configuration."))); return; } var action = string.Format("api/{0}/{1}", ControllerName, method); var url = Config.Path.EndsWith("/") ? Config.Path + action : Config.Path + "/" + action; Client.PostAsync(url, callback); } #endregion #region Public Async Method /// <summary> /// Posts a get request against a IQueryable OData data source /// </summary> /// <typeparam name="T">return type</typeparam> /// <param name="query">odata query</param> /// <returns>found entity array of type T</returns> public UnityTask<T[]> HttpPostAsync<T>(ODataQuery<T> query) where T : class { if (!Config.IsValid) return UnityTask.FailedTask<T[]>("Invalid configuration."); if (Application.internetReachability == NetworkReachability.NotReachable) return UnityTask.FailedTask<T[]>("Internet not reachable."); var action = string.Format("api/{1}/Query/{0}", query, ControllerName); var url = Config.Path.EndsWith("/") ? Config.Path + action : Config.Path + "/" + action; return Client.PostAsync<T[]>(url); } /// <summary> /// Posts a get request against a IQueryable OData data source /// </summary> /// <typeparam name="T">return type</typeparam> /// <param name="prefix">prefix to odata query</param> /// <param name="query">odata query</param> /// <returns>found entity array of type T</returns> public UnityTask<T[]> HttpPostAsync<T>(string prefix, ODataQuery<T> query) where T : class { if (!Config.IsValid) return UnityTask.FailedTask<T[]>("Invalid configuration."); if (Application.internetReachability == NetworkReachability.NotReachable) return UnityTask.FailedTask<T[]>("Internet not reachable."); var action = string.Format("api/{0}/Query/{1}{2}", ControllerName, prefix, query); var url = Config.Path.EndsWith("/") ? Config.Path + action : Config.Path + "/" + action; return Client.PostAsync<T[]>(url); } /// <summary> /// Posts the HTTP request to the server /// </summary> /// <typeparam name="T">return type</typeparam> /// <param name="method">controller method to call</param> /// <returns>response of type T</returns> public UnityTask<T> HttpPostAsync<T>(string method) where T : class { if (!Config.IsValid) return UnityTask.FailedTask<T>("Invalid configuration."); if (Application.internetReachability == NetworkReachability.NotReachable) return UnityTask.FailedTask<T>("Internet not reachable."); var action = string.Format("api/{0}/{1}", ControllerName, method); var url = Config.Path.EndsWith("/") ? Config.Path + action : Config.Path + "/" + action; return Client.PostAsync<T>(url); } /// <summary> /// Posts the HTTP request to the server /// </summary> /// <typeparam name="T">return type</typeparam> /// <param name="method">controller method to call</param> /// <param name="id">id paramater</param> /// <returns>response of type T</returns> public UnityTask<T> HttpPostAsync<T>(string method, string id) where T : class { if (!Config.IsValid) return UnityTask.FailedTask<T>("Invalid configuration."); if (Application.internetReachability == NetworkReachability.NotReachable) return UnityTask.FailedTask<T>("Internet not reachable."); var action = string.Format("api/{0}/{1}/{2}", ControllerName, method, id); var url = Config.Path.EndsWith("/") ? Config.Path + action : Config.Path + "/" + action; return Client.PostAsync<T>(url); } /// <summary> /// Posts the HTTP request to the server /// </summary> /// <typeparam name="T">return type</typeparam> /// <param name="method">controller method to call</param> /// <param name="entity">dto</param> /// <returns>response of type T</returns> public UnityTask<T> HttpPostAsync<T>(string method, object entity) where T : class { if (!Config.IsValid) return UnityTask.FailedTask<T>("Invalid configuration."); if (Application.internetReachability == NetworkReachability.NotReachable) return UnityTask.FailedTask<T>("Internet not reachable."); var action = string.Format("api/{0}/{1}", ControllerName, method); var url = Config.Path.EndsWith("/") ? Config.Path + action : Config.Path + "/" + action; return Client.PostAsync<T>(url, JsonSerializer.Serialize(entity)); } /// <summary> /// Posts the HTTP request to the server /// </summary> /// <param name="method">controller method to call</param> /// <param name="entity">dto</param> /// <returns>Metadata</returns> public UnityTask HttpPostAsync(string method, object entity) { if (!Config.IsValid) return UnityTask.FailedTask("Invalid configuration."); if (Application.internetReachability == NetworkReachability.NotReachable) return UnityTask.FailedTask("Internet not reachable."); var action = string.Format("api/{0}/{1}", ControllerName, method); var url = Config.Path.EndsWith("/") ? Config.Path + action : Config.Path + "/" + action; return Client.PostAsync(url, JsonSerializer.Serialize(entity)); } /// <summary> /// Posts the HTTP request to the server /// </summary> /// <param name="method">controller method to call</param> /// <param name="id">id paramater</param> /// <returns>Metadata</returns> public UnityTask HttpPostAsync(string method, string id) { if (!Config.IsValid) return UnityTask.FailedTask("Invalid configuration."); if (Application.internetReachability == NetworkReachability.NotReachable) return UnityTask.FailedTask("Internet not reachable."); var action = string.Format("api/{0}/{1}/{2}", ControllerName, method, id); var url = Config.Path.EndsWith("/") ? Config.Path + action : Config.Path + "/" + action; return Client.PostAsync(url); } /// <summary> /// Posts the HTTP request to the server /// </summary> /// <param name="method">controller method to call</param> /// <returns>Metadata</returns> public UnityTask HttpPostAsync(string method) { if (!Config.IsValid) return UnityTask.FailedTask("Invalid configuration."); if (Application.internetReachability == NetworkReachability.NotReachable) return UnityTask.FailedTask("Internet not reachable."); var action = string.Format("api/{0}/{1}", ControllerName, method); var url = Config.Path.EndsWith("/") ? Config.Path + action : Config.Path + "/" + action; return Client.PostAsync(url); } #endregion } }
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! namespace Google.Cloud.Compute.V1.Snippets { using Google.Api.Gax; using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using lro = Google.LongRunning; /// <summary>Generated snippets.</summary> public sealed class GeneratedInterconnectAttachmentsClientSnippets { /// <summary>Snippet for AggregatedList</summary> public void AggregatedListRequestObject() { // Snippet: AggregatedList(AggregatedListInterconnectAttachmentsRequest, CallSettings) // Create client InterconnectAttachmentsClient interconnectAttachmentsClient = InterconnectAttachmentsClient.Create(); // Initialize request argument(s) AggregatedListInterconnectAttachmentsRequest request = new AggregatedListInterconnectAttachmentsRequest { OrderBy = "", Project = "", Filter = "", IncludeAllScopes = false, ReturnPartialSuccess = false, }; // Make the request PagedEnumerable<InterconnectAttachmentAggregatedList, KeyValuePair<string, InterconnectAttachmentsScopedList>> response = interconnectAttachmentsClient.AggregatedList(request); // Iterate over all response items, lazily performing RPCs as required foreach (KeyValuePair<string, InterconnectAttachmentsScopedList> item in response) { // Do something with each item Console.WriteLine(item); } // Or iterate over pages (of server-defined size), performing one RPC per page foreach (InterconnectAttachmentAggregatedList page in response.AsRawResponses()) { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (KeyValuePair<string, InterconnectAttachmentsScopedList> item in page) { // Do something with each item Console.WriteLine(item); } } // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<KeyValuePair<string, InterconnectAttachmentsScopedList>> singlePage = response.ReadPage(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (KeyValuePair<string, InterconnectAttachmentsScopedList> item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for AggregatedListAsync</summary> public async Task AggregatedListRequestObjectAsync() { // Snippet: AggregatedListAsync(AggregatedListInterconnectAttachmentsRequest, CallSettings) // Create client InterconnectAttachmentsClient interconnectAttachmentsClient = await InterconnectAttachmentsClient.CreateAsync(); // Initialize request argument(s) AggregatedListInterconnectAttachmentsRequest request = new AggregatedListInterconnectAttachmentsRequest { OrderBy = "", Project = "", Filter = "", IncludeAllScopes = false, ReturnPartialSuccess = false, }; // Make the request PagedAsyncEnumerable<InterconnectAttachmentAggregatedList, KeyValuePair<string, InterconnectAttachmentsScopedList>> response = interconnectAttachmentsClient.AggregatedListAsync(request); // Iterate over all response items, lazily performing RPCs as required await response.ForEachAsync((KeyValuePair<string, InterconnectAttachmentsScopedList> item) => { // Do something with each item Console.WriteLine(item); }); // Or iterate over pages (of server-defined size), performing one RPC per page await response.AsRawResponses().ForEachAsync((InterconnectAttachmentAggregatedList page) => { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (KeyValuePair<string, InterconnectAttachmentsScopedList> item in page) { // Do something with each item Console.WriteLine(item); } }); // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<KeyValuePair<string, InterconnectAttachmentsScopedList>> singlePage = await response.ReadPageAsync(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (KeyValuePair<string, InterconnectAttachmentsScopedList> item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for AggregatedList</summary> public void AggregatedList() { // Snippet: AggregatedList(string, string, int?, CallSettings) // Create client InterconnectAttachmentsClient interconnectAttachmentsClient = InterconnectAttachmentsClient.Create(); // Initialize request argument(s) string project = ""; // Make the request PagedEnumerable<InterconnectAttachmentAggregatedList, KeyValuePair<string, InterconnectAttachmentsScopedList>> response = interconnectAttachmentsClient.AggregatedList(project); // Iterate over all response items, lazily performing RPCs as required foreach (KeyValuePair<string, InterconnectAttachmentsScopedList> item in response) { // Do something with each item Console.WriteLine(item); } // Or iterate over pages (of server-defined size), performing one RPC per page foreach (InterconnectAttachmentAggregatedList page in response.AsRawResponses()) { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (KeyValuePair<string, InterconnectAttachmentsScopedList> item in page) { // Do something with each item Console.WriteLine(item); } } // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<KeyValuePair<string, InterconnectAttachmentsScopedList>> singlePage = response.ReadPage(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (KeyValuePair<string, InterconnectAttachmentsScopedList> item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for AggregatedListAsync</summary> public async Task AggregatedListAsync() { // Snippet: AggregatedListAsync(string, string, int?, CallSettings) // Create client InterconnectAttachmentsClient interconnectAttachmentsClient = await InterconnectAttachmentsClient.CreateAsync(); // Initialize request argument(s) string project = ""; // Make the request PagedAsyncEnumerable<InterconnectAttachmentAggregatedList, KeyValuePair<string, InterconnectAttachmentsScopedList>> response = interconnectAttachmentsClient.AggregatedListAsync(project); // Iterate over all response items, lazily performing RPCs as required await response.ForEachAsync((KeyValuePair<string, InterconnectAttachmentsScopedList> item) => { // Do something with each item Console.WriteLine(item); }); // Or iterate over pages (of server-defined size), performing one RPC per page await response.AsRawResponses().ForEachAsync((InterconnectAttachmentAggregatedList page) => { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (KeyValuePair<string, InterconnectAttachmentsScopedList> item in page) { // Do something with each item Console.WriteLine(item); } }); // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<KeyValuePair<string, InterconnectAttachmentsScopedList>> singlePage = await response.ReadPageAsync(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (KeyValuePair<string, InterconnectAttachmentsScopedList> item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for Delete</summary> public void DeleteRequestObject() { // Snippet: Delete(DeleteInterconnectAttachmentRequest, CallSettings) // Create client InterconnectAttachmentsClient interconnectAttachmentsClient = InterconnectAttachmentsClient.Create(); // Initialize request argument(s) DeleteInterconnectAttachmentRequest request = new DeleteInterconnectAttachmentRequest { RequestId = "", Region = "", Project = "", InterconnectAttachment = "", }; // Make the request lro::Operation<Operation, Operation> response = interconnectAttachmentsClient.Delete(request); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = response.PollUntilCompleted(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = interconnectAttachmentsClient.PollOnceDelete(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for DeleteAsync</summary> public async Task DeleteRequestObjectAsync() { // Snippet: DeleteAsync(DeleteInterconnectAttachmentRequest, CallSettings) // Additional: DeleteAsync(DeleteInterconnectAttachmentRequest, CancellationToken) // Create client InterconnectAttachmentsClient interconnectAttachmentsClient = await InterconnectAttachmentsClient.CreateAsync(); // Initialize request argument(s) DeleteInterconnectAttachmentRequest request = new DeleteInterconnectAttachmentRequest { RequestId = "", Region = "", Project = "", InterconnectAttachment = "", }; // Make the request lro::Operation<Operation, Operation> response = await interconnectAttachmentsClient.DeleteAsync(request); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = await response.PollUntilCompletedAsync(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = await interconnectAttachmentsClient.PollOnceDeleteAsync(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for Delete</summary> public void Delete() { // Snippet: Delete(string, string, string, CallSettings) // Create client InterconnectAttachmentsClient interconnectAttachmentsClient = InterconnectAttachmentsClient.Create(); // Initialize request argument(s) string project = ""; string region = ""; string interconnectAttachment = ""; // Make the request lro::Operation<Operation, Operation> response = interconnectAttachmentsClient.Delete(project, region, interconnectAttachment); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = response.PollUntilCompleted(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = interconnectAttachmentsClient.PollOnceDelete(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for DeleteAsync</summary> public async Task DeleteAsync() { // Snippet: DeleteAsync(string, string, string, CallSettings) // Additional: DeleteAsync(string, string, string, CancellationToken) // Create client InterconnectAttachmentsClient interconnectAttachmentsClient = await InterconnectAttachmentsClient.CreateAsync(); // Initialize request argument(s) string project = ""; string region = ""; string interconnectAttachment = ""; // Make the request lro::Operation<Operation, Operation> response = await interconnectAttachmentsClient.DeleteAsync(project, region, interconnectAttachment); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = await response.PollUntilCompletedAsync(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = await interconnectAttachmentsClient.PollOnceDeleteAsync(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for Get</summary> public void GetRequestObject() { // Snippet: Get(GetInterconnectAttachmentRequest, CallSettings) // Create client InterconnectAttachmentsClient interconnectAttachmentsClient = InterconnectAttachmentsClient.Create(); // Initialize request argument(s) GetInterconnectAttachmentRequest request = new GetInterconnectAttachmentRequest { Region = "", Project = "", InterconnectAttachment = "", }; // Make the request InterconnectAttachment response = interconnectAttachmentsClient.Get(request); // End snippet } /// <summary>Snippet for GetAsync</summary> public async Task GetRequestObjectAsync() { // Snippet: GetAsync(GetInterconnectAttachmentRequest, CallSettings) // Additional: GetAsync(GetInterconnectAttachmentRequest, CancellationToken) // Create client InterconnectAttachmentsClient interconnectAttachmentsClient = await InterconnectAttachmentsClient.CreateAsync(); // Initialize request argument(s) GetInterconnectAttachmentRequest request = new GetInterconnectAttachmentRequest { Region = "", Project = "", InterconnectAttachment = "", }; // Make the request InterconnectAttachment response = await interconnectAttachmentsClient.GetAsync(request); // End snippet } /// <summary>Snippet for Get</summary> public void Get() { // Snippet: Get(string, string, string, CallSettings) // Create client InterconnectAttachmentsClient interconnectAttachmentsClient = InterconnectAttachmentsClient.Create(); // Initialize request argument(s) string project = ""; string region = ""; string interconnectAttachment = ""; // Make the request InterconnectAttachment response = interconnectAttachmentsClient.Get(project, region, interconnectAttachment); // End snippet } /// <summary>Snippet for GetAsync</summary> public async Task GetAsync() { // Snippet: GetAsync(string, string, string, CallSettings) // Additional: GetAsync(string, string, string, CancellationToken) // Create client InterconnectAttachmentsClient interconnectAttachmentsClient = await InterconnectAttachmentsClient.CreateAsync(); // Initialize request argument(s) string project = ""; string region = ""; string interconnectAttachment = ""; // Make the request InterconnectAttachment response = await interconnectAttachmentsClient.GetAsync(project, region, interconnectAttachment); // End snippet } /// <summary>Snippet for Insert</summary> public void InsertRequestObject() { // Snippet: Insert(InsertInterconnectAttachmentRequest, CallSettings) // Create client InterconnectAttachmentsClient interconnectAttachmentsClient = InterconnectAttachmentsClient.Create(); // Initialize request argument(s) InsertInterconnectAttachmentRequest request = new InsertInterconnectAttachmentRequest { RequestId = "", Region = "", InterconnectAttachmentResource = new InterconnectAttachment(), Project = "", ValidateOnly = false, }; // Make the request lro::Operation<Operation, Operation> response = interconnectAttachmentsClient.Insert(request); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = response.PollUntilCompleted(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = interconnectAttachmentsClient.PollOnceInsert(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for InsertAsync</summary> public async Task InsertRequestObjectAsync() { // Snippet: InsertAsync(InsertInterconnectAttachmentRequest, CallSettings) // Additional: InsertAsync(InsertInterconnectAttachmentRequest, CancellationToken) // Create client InterconnectAttachmentsClient interconnectAttachmentsClient = await InterconnectAttachmentsClient.CreateAsync(); // Initialize request argument(s) InsertInterconnectAttachmentRequest request = new InsertInterconnectAttachmentRequest { RequestId = "", Region = "", InterconnectAttachmentResource = new InterconnectAttachment(), Project = "", ValidateOnly = false, }; // Make the request lro::Operation<Operation, Operation> response = await interconnectAttachmentsClient.InsertAsync(request); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = await response.PollUntilCompletedAsync(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = await interconnectAttachmentsClient.PollOnceInsertAsync(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for Insert</summary> public void Insert() { // Snippet: Insert(string, string, InterconnectAttachment, CallSettings) // Create client InterconnectAttachmentsClient interconnectAttachmentsClient = InterconnectAttachmentsClient.Create(); // Initialize request argument(s) string project = ""; string region = ""; InterconnectAttachment interconnectAttachmentResource = new InterconnectAttachment(); // Make the request lro::Operation<Operation, Operation> response = interconnectAttachmentsClient.Insert(project, region, interconnectAttachmentResource); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = response.PollUntilCompleted(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = interconnectAttachmentsClient.PollOnceInsert(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for InsertAsync</summary> public async Task InsertAsync() { // Snippet: InsertAsync(string, string, InterconnectAttachment, CallSettings) // Additional: InsertAsync(string, string, InterconnectAttachment, CancellationToken) // Create client InterconnectAttachmentsClient interconnectAttachmentsClient = await InterconnectAttachmentsClient.CreateAsync(); // Initialize request argument(s) string project = ""; string region = ""; InterconnectAttachment interconnectAttachmentResource = new InterconnectAttachment(); // Make the request lro::Operation<Operation, Operation> response = await interconnectAttachmentsClient.InsertAsync(project, region, interconnectAttachmentResource); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = await response.PollUntilCompletedAsync(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = await interconnectAttachmentsClient.PollOnceInsertAsync(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for List</summary> public void ListRequestObject() { // Snippet: List(ListInterconnectAttachmentsRequest, CallSettings) // Create client InterconnectAttachmentsClient interconnectAttachmentsClient = InterconnectAttachmentsClient.Create(); // Initialize request argument(s) ListInterconnectAttachmentsRequest request = new ListInterconnectAttachmentsRequest { Region = "", OrderBy = "", Project = "", Filter = "", ReturnPartialSuccess = false, }; // Make the request PagedEnumerable<InterconnectAttachmentList, InterconnectAttachment> response = interconnectAttachmentsClient.List(request); // Iterate over all response items, lazily performing RPCs as required foreach (InterconnectAttachment item in response) { // Do something with each item Console.WriteLine(item); } // Or iterate over pages (of server-defined size), performing one RPC per page foreach (InterconnectAttachmentList page in response.AsRawResponses()) { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (InterconnectAttachment item in page) { // Do something with each item Console.WriteLine(item); } } // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<InterconnectAttachment> singlePage = response.ReadPage(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (InterconnectAttachment item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for ListAsync</summary> public async Task ListRequestObjectAsync() { // Snippet: ListAsync(ListInterconnectAttachmentsRequest, CallSettings) // Create client InterconnectAttachmentsClient interconnectAttachmentsClient = await InterconnectAttachmentsClient.CreateAsync(); // Initialize request argument(s) ListInterconnectAttachmentsRequest request = new ListInterconnectAttachmentsRequest { Region = "", OrderBy = "", Project = "", Filter = "", ReturnPartialSuccess = false, }; // Make the request PagedAsyncEnumerable<InterconnectAttachmentList, InterconnectAttachment> response = interconnectAttachmentsClient.ListAsync(request); // Iterate over all response items, lazily performing RPCs as required await response.ForEachAsync((InterconnectAttachment item) => { // Do something with each item Console.WriteLine(item); }); // Or iterate over pages (of server-defined size), performing one RPC per page await response.AsRawResponses().ForEachAsync((InterconnectAttachmentList page) => { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (InterconnectAttachment item in page) { // Do something with each item Console.WriteLine(item); } }); // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<InterconnectAttachment> singlePage = await response.ReadPageAsync(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (InterconnectAttachment item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for List</summary> public void List() { // Snippet: List(string, string, string, int?, CallSettings) // Create client InterconnectAttachmentsClient interconnectAttachmentsClient = InterconnectAttachmentsClient.Create(); // Initialize request argument(s) string project = ""; string region = ""; // Make the request PagedEnumerable<InterconnectAttachmentList, InterconnectAttachment> response = interconnectAttachmentsClient.List(project, region); // Iterate over all response items, lazily performing RPCs as required foreach (InterconnectAttachment item in response) { // Do something with each item Console.WriteLine(item); } // Or iterate over pages (of server-defined size), performing one RPC per page foreach (InterconnectAttachmentList page in response.AsRawResponses()) { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (InterconnectAttachment item in page) { // Do something with each item Console.WriteLine(item); } } // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<InterconnectAttachment> singlePage = response.ReadPage(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (InterconnectAttachment item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for ListAsync</summary> public async Task ListAsync() { // Snippet: ListAsync(string, string, string, int?, CallSettings) // Create client InterconnectAttachmentsClient interconnectAttachmentsClient = await InterconnectAttachmentsClient.CreateAsync(); // Initialize request argument(s) string project = ""; string region = ""; // Make the request PagedAsyncEnumerable<InterconnectAttachmentList, InterconnectAttachment> response = interconnectAttachmentsClient.ListAsync(project, region); // Iterate over all response items, lazily performing RPCs as required await response.ForEachAsync((InterconnectAttachment item) => { // Do something with each item Console.WriteLine(item); }); // Or iterate over pages (of server-defined size), performing one RPC per page await response.AsRawResponses().ForEachAsync((InterconnectAttachmentList page) => { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (InterconnectAttachment item in page) { // Do something with each item Console.WriteLine(item); } }); // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<InterconnectAttachment> singlePage = await response.ReadPageAsync(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (InterconnectAttachment item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for Patch</summary> public void PatchRequestObject() { // Snippet: Patch(PatchInterconnectAttachmentRequest, CallSettings) // Create client InterconnectAttachmentsClient interconnectAttachmentsClient = InterconnectAttachmentsClient.Create(); // Initialize request argument(s) PatchInterconnectAttachmentRequest request = new PatchInterconnectAttachmentRequest { RequestId = "", Region = "", InterconnectAttachmentResource = new InterconnectAttachment(), Project = "", InterconnectAttachment = "", }; // Make the request lro::Operation<Operation, Operation> response = interconnectAttachmentsClient.Patch(request); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = response.PollUntilCompleted(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = interconnectAttachmentsClient.PollOncePatch(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for PatchAsync</summary> public async Task PatchRequestObjectAsync() { // Snippet: PatchAsync(PatchInterconnectAttachmentRequest, CallSettings) // Additional: PatchAsync(PatchInterconnectAttachmentRequest, CancellationToken) // Create client InterconnectAttachmentsClient interconnectAttachmentsClient = await InterconnectAttachmentsClient.CreateAsync(); // Initialize request argument(s) PatchInterconnectAttachmentRequest request = new PatchInterconnectAttachmentRequest { RequestId = "", Region = "", InterconnectAttachmentResource = new InterconnectAttachment(), Project = "", InterconnectAttachment = "", }; // Make the request lro::Operation<Operation, Operation> response = await interconnectAttachmentsClient.PatchAsync(request); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = await response.PollUntilCompletedAsync(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = await interconnectAttachmentsClient.PollOncePatchAsync(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for Patch</summary> public void Patch() { // Snippet: Patch(string, string, string, InterconnectAttachment, CallSettings) // Create client InterconnectAttachmentsClient interconnectAttachmentsClient = InterconnectAttachmentsClient.Create(); // Initialize request argument(s) string project = ""; string region = ""; string interconnectAttachment = ""; InterconnectAttachment interconnectAttachmentResource = new InterconnectAttachment(); // Make the request lro::Operation<Operation, Operation> response = interconnectAttachmentsClient.Patch(project, region, interconnectAttachment, interconnectAttachmentResource); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = response.PollUntilCompleted(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = interconnectAttachmentsClient.PollOncePatch(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for PatchAsync</summary> public async Task PatchAsync() { // Snippet: PatchAsync(string, string, string, InterconnectAttachment, CallSettings) // Additional: PatchAsync(string, string, string, InterconnectAttachment, CancellationToken) // Create client InterconnectAttachmentsClient interconnectAttachmentsClient = await InterconnectAttachmentsClient.CreateAsync(); // Initialize request argument(s) string project = ""; string region = ""; string interconnectAttachment = ""; InterconnectAttachment interconnectAttachmentResource = new InterconnectAttachment(); // Make the request lro::Operation<Operation, Operation> response = await interconnectAttachmentsClient.PatchAsync(project, region, interconnectAttachment, interconnectAttachmentResource); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = await response.PollUntilCompletedAsync(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = await interconnectAttachmentsClient.PollOncePatchAsync(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } } }
using System; using System.Collections.Generic; using System.Diagnostics; using System.Text; using GitVersion; using LibGit2Sharp; using Shouldly; public abstract class RepositoryFixtureBase : IDisposable { Dictionary<string, string> participants = new Dictionary<string, string>(); Config configuration; StringBuilder diagramBuilder; protected RepositoryFixtureBase(Func<string, IRepository> repoBuilder, Config configuration) : this(configuration, repoBuilder(PathHelper.GetTempPath())) { } protected RepositoryFixtureBase(Config configuration, IRepository repository) { ConfigurationProvider.ApplyDefaultsTo(configuration); diagramBuilder = new StringBuilder(); diagramBuilder.AppendLine("@startuml"); this.configuration = configuration; Repository = repository; Repository.Config.Set("user.name", "Test"); Repository.Config.Set("user.email", "test@email.com"); IsForTrackedBranchOnly = true; } public bool IsForTrackedBranchOnly { private get; set; } public IRepository Repository { get; private set; } public string RepositoryPath { get { return Repository.Info.WorkingDirectory.TrimEnd('\\'); } } public void Checkout(string branch) { Repository.Checkout(branch); } public void Activate(string branch) { diagramBuilder.AppendLineFormat("activate {0}", GetParticipant(branch)); } public void Destroy(string branch) { diagramBuilder.AppendLineFormat("destroy {0}", GetParticipant(branch)); } public void Participant(string participant, string @as = null) { participants.Add(participant, @as ?? participant); if (@as == null) diagramBuilder.AppendLineFormat("participant {0}", participant); else diagramBuilder.AppendLineFormat("participant \"{0}\" as {1}", participant, @as); } public void Divider(string text) { diagramBuilder.AppendLineFormat("== {0} ==", text); } public void NoteOver(string noteText, string startNode, string endNode = null, string prefix = null) { diagramBuilder.AppendLineFormat( prefix + @"note over {0}{1} {2} end note", GetParticipant(startNode), endNode == null ? null : ", " + GetParticipant(endNode), noteText.Replace("\n", "\n ")); } public void MakeATaggedCommit(string tag) { MakeACommit(); ApplyTag(tag); } public void ApplyTag(string tag) { diagramBuilder.AppendLineFormat("{0} -> {0}: tag {1}", GetParticipant(Repository.Head.Name), tag); Repository.ApplyTag(tag); } public void BranchTo(string branchName, string @as = null) { if (!participants.ContainsKey(branchName)) { diagramBuilder.Append("create "); Participant(branchName, @as); } var branch = Repository.Head.Name; diagramBuilder.AppendLineFormat("{0} -> {1}: branch from {2}", GetParticipant(branch), GetParticipant(branchName), branch); Repository.Checkout(Repository.CreateBranch(branchName)); } public void BranchToFromTag(string branchName, string fromTag, string onBranch, string @as = null) { if (!participants.ContainsKey(branchName)) { diagramBuilder.Append("create "); Participant(branchName, @as); } diagramBuilder.AppendLineFormat("{0} -> {1}: branch from tag ({2})", GetParticipant(onBranch), GetParticipant(branchName), fromTag); Repository.Checkout(Repository.CreateBranch(branchName)); } public void MakeACommit() { diagramBuilder.AppendLineFormat("{0} -> {0}: commit", GetParticipant(Repository.Head.Name)); Repository.MakeACommit(); } public void MergeNoFF(string mergeTarget) { diagramBuilder.AppendLineFormat("{0} -> {1}: merge", GetParticipant(mergeTarget), GetParticipant(Repository.Head.Name)); Repository.MergeNoFF(mergeTarget, Constants.SignatureNow()); } public void AssertFullSemver(string fullSemver, IRepository repository = null, string commitId = null) { Console.WriteLine("---------"); try { var variables = GetVersion(repository, commitId); variables.FullSemVer.ShouldBe(fullSemver); } catch (Exception) { (repository ?? Repository).DumpGraph(); throw; } if (commitId == null) diagramBuilder.AppendLineFormat("note over {0} #D3D3D3: {1}", GetParticipant(Repository.Head.Name), fullSemver); } string GetParticipant(string branch) { if (participants.ContainsKey(branch)) return participants[branch]; return branch; } VersionVariables GetVersion(IRepository repository = null, string commitId = null) { var gitVersionContext = new GitVersionContext(repository ?? Repository, configuration, IsForTrackedBranchOnly, commitId); var executeGitVersion = ExecuteGitVersion(gitVersionContext); var variables = VariableProvider.GetVariablesFor(executeGitVersion, gitVersionContext.Configuration.AssemblyVersioningScheme, gitVersionContext.Configuration.VersioningMode, gitVersionContext.Configuration.ContinuousDeploymentFallbackTag, gitVersionContext.IsCurrentCommitTagged); try { return variables; } catch (Exception) { Console.WriteLine("Test failing, dumping repository graph"); gitVersionContext.Repository.DumpGraph(); throw; } } SemanticVersion ExecuteGitVersion(GitVersionContext context) { var vf = new GitVersionFinder(); return vf.FindVersion(context); } public virtual void Dispose() { Repository.Dispose(); try { DirectoryHelper.DeleteDirectory(RepositoryPath); } catch (Exception e) { Trace.WriteLine(string.Format("Failed to clean up repository path at {0}. Received exception: {1}", RepositoryPath, e.Message)); } diagramBuilder.AppendLine("@enduml"); Trace.WriteLine("**Visualisation of test:**"); Trace.WriteLine(string.Empty); Trace.WriteLine(diagramBuilder.ToString()); } public LocalRepositoryFixture CloneRepository(Config config = null) { var localPath = PathHelper.GetTempPath(); LibGit2Sharp.Repository.Clone(RepositoryPath, localPath); return new LocalRepositoryFixture(config ?? new Config(), new Repository(localPath)); } }
namespace DD.CBU.Compute.Api.Contracts.Network { /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.18020")] [System.SerializableAttribute()] [System.Diagnostics.DebuggerStepThroughAttribute()] [System.ComponentModel.DesignerCategoryAttribute("code")] [System.Xml.Serialization.XmlTypeAttribute(Namespace="http://oec.api.opsource.net/schemas/network")] [System.Xml.Serialization.XmlRootAttribute("NetworkConfiguration", Namespace="http://oec.api.opsource.net/schemas/network", IsNullable=false)] public partial class NetworkConfigurationType { private NetworkType networkField; private string hostNameField; private int aggField; private bool aggFieldSpecified; private string locationField; private int contextField; private bool contextFieldSpecified; private int acePairField; private bool acePairFieldSpecified; private int intVlanField; private bool intVlanFieldSpecified; private int outVlanField; private bool outVlanFieldSpecified; private string publicSnatField; private string privateSnatField; private string publicNetField; private string privateNetField; private IpBlockType[] publicIpsField; private Ips privateIpsField; /// <remarks/> public NetworkType network { get { return this.networkField; } set { this.networkField = value; } } /// <remarks/> public string hostName { get { return this.hostNameField; } set { this.hostNameField = value; } } /// <remarks/> public int agg { get { return this.aggField; } set { this.aggField = value; } } /// <remarks/> [System.Xml.Serialization.XmlIgnoreAttribute()] public bool aggSpecified { get { return this.aggFieldSpecified; } set { this.aggFieldSpecified = value; } } /// <remarks/> public string location { get { return this.locationField; } set { this.locationField = value; } } /// <remarks/> public int context { get { return this.contextField; } set { this.contextField = value; } } /// <remarks/> [System.Xml.Serialization.XmlIgnoreAttribute()] public bool contextSpecified { get { return this.contextFieldSpecified; } set { this.contextFieldSpecified = value; } } /// <remarks/> public int acePair { get { return this.acePairField; } set { this.acePairField = value; } } /// <remarks/> [System.Xml.Serialization.XmlIgnoreAttribute()] public bool acePairSpecified { get { return this.acePairFieldSpecified; } set { this.acePairFieldSpecified = value; } } /// <remarks/> public int intVlan { get { return this.intVlanField; } set { this.intVlanField = value; } } /// <remarks/> [System.Xml.Serialization.XmlIgnoreAttribute()] public bool intVlanSpecified { get { return this.intVlanFieldSpecified; } set { this.intVlanFieldSpecified = value; } } /// <remarks/> public int outVlan { get { return this.outVlanField; } set { this.outVlanField = value; } } /// <remarks/> [System.Xml.Serialization.XmlIgnoreAttribute()] public bool outVlanSpecified { get { return this.outVlanFieldSpecified; } set { this.outVlanFieldSpecified = value; } } /// <remarks/> public string publicSnat { get { return this.publicSnatField; } set { this.publicSnatField = value; } } /// <remarks/> public string privateSnat { get { return this.privateSnatField; } set { this.privateSnatField = value; } } /// <remarks/> public string publicNet { get { return this.publicNetField; } set { this.publicNetField = value; } } /// <remarks/> public string privateNet { get { return this.privateNetField; } set { this.privateNetField = value; } } /// <remarks/> [System.Xml.Serialization.XmlArrayItemAttribute("IpBlock", IsNullable=false)] public IpBlockType[] publicIps { get { return this.publicIpsField; } set { this.publicIpsField = value; } } /// <remarks/> public Ips privateIps { get { return this.privateIpsField; } set { this.privateIpsField = value; } } } }
// <copyright file="CommonDistributionTests.cs" company="Math.NET"> // Math.NET Numerics, part of the Math.NET Project // http://numerics.mathdotnet.com // http://github.com/mathnet/mathnet-numerics // http://mathnetnumerics.codeplex.com // // Copyright (c) 2009-2015 Math.NET // // Permission is hereby granted, free of charge, to any person // obtaining a copy of this software and associated documentation // files (the "Software"), to deal in the Software without // restriction, including without limitation the rights to use, // copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following // conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES // OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT // HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, // WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR // OTHER DEALINGS IN THE SOFTWARE. // </copyright> using System; using System.Collections.Generic; using System.Linq; using MathNet.Numerics.Distributions; using MathNet.Numerics.Random; using MathNet.Numerics.Statistics; using NUnit.Framework; namespace MathNet.Numerics.UnitTests.DistributionTests { /// <summary> /// This class will perform various tests on discrete and continuous univariate distributions. /// The multivariate distributions will implement these respective tests in their local unit /// test classes as they do not adhere to the same interfaces. /// </summary> [TestFixture, Category("Distributions")] public class CommonDistributionTests { public const int NumberOfTestSamples = 3500000; public const int NumberOfHistogramBuckets = 100; public const double ErrorTolerance = 0.01; public const double ErrorProbability = 0.001; readonly List<IDiscreteDistribution> _discreteDistributions = new List<IDiscreteDistribution> { new Bernoulli(0.6), new Binomial(0.7, 10), new Categorical(new[] { 0.7, 0.3 }), //new ConwayMaxwellPoisson(0.2, 0.4), new DiscreteUniform(1, 10), new Geometric(0.2), new Hypergeometric(20, 3, 5), //new NegativeBinomial(4, 0.6), //new Poisson(0.4), new Zipf(3.0, 10), }; readonly List<IContinuousDistribution> _continuousDistributions = new List<IContinuousDistribution> { new Beta(1.0, 1.0), new BetaScaled(1.0, 1.5, 0.5, 2.0), new Cauchy(1.0, 1.0), new Chi(3.0), new ChiSquared(3.0), new ContinuousUniform(0.0, 1.0), new Erlang(3, 0.4), new Exponential(0.4), new FisherSnedecor(0.3, 0.4), new Gamma(1.0, 1.0), new InverseGamma(1.0, 1.0), new Laplace(1.0, 0.5), new LogNormal(1.0, 1.0), new Normal(0.0, 1.0), new Pareto(1.0, 0.5), new Rayleigh(0.8), new Stable(0.5, 1.0, 0.5, 1.0), new StudentT(0.0, 1.0, 5.0), new Triangular(0, 1, 0.7), new Weibull(1.0, 1.0), }; [Test] public void ValidateThatUnivariateDistributionsHaveRandomSource() { foreach (var dd in _discreteDistributions) { Assert.IsNotNull(dd.RandomSource); } foreach (var cd in _continuousDistributions) { Assert.IsNotNull(cd.RandomSource); } } [Test] public void CanSetRandomSource() { foreach (var dd in _discreteDistributions) { dd.RandomSource = MersenneTwister.Default; } foreach (var cd in _continuousDistributions) { cd.RandomSource = MersenneTwister.Default; } } [Test] public void HasRandomSourceEvenAfterSetToNull() { foreach (var dd in _discreteDistributions) { Assert.DoesNotThrow(() => dd.RandomSource = null); Assert.IsNotNull(dd.RandomSource); } foreach (var cd in _continuousDistributions) { Assert.DoesNotThrow(() => cd.RandomSource = null); Assert.IsNotNull(cd.RandomSource); } } [Test, Category("LongRunning")] public void DiscreteSampleIsDistributedCorrectly() { foreach (var dist in _discreteDistributions) { dist.RandomSource = new SystemRandomSource(1, false); var samples = new int[NumberOfTestSamples]; for (var i = 0; i < NumberOfTestSamples; i++) { samples[i] = dist.Sample(); } DiscreteVapnikChervonenkisTest(ErrorTolerance, ErrorProbability, samples, dist); } } [Test, Category("LongRunning")] public void DiscreteSampleSequenceIsDistributedCorrectly() { foreach (var dist in _discreteDistributions) { dist.RandomSource = new SystemRandomSource(1, false); var samples = dist.Samples().Take(NumberOfTestSamples).ToArray(); DiscreteVapnikChervonenkisTest(ErrorTolerance, ErrorProbability, samples, dist); } } [Test, Category("LongRunning")] public void ContinuousSampleIsDistributedCorrectly() { foreach (var dist in _continuousDistributions) { dist.RandomSource = new SystemRandomSource(1, false); var samples = new double[NumberOfTestSamples]; for (var i = 0; i < NumberOfTestSamples; i++) { samples[i] = dist.Sample(); } ContinuousVapnikChervonenkisTest(ErrorTolerance, ErrorProbability, samples, dist); } } [Test, Category("LongRunning")] public void ContinuousSampleSequenceIsDistributedCorrectly() { foreach (var dist in _continuousDistributions) { dist.RandomSource = new SystemRandomSource(1, false); var samples = dist.Samples().Take(NumberOfTestSamples).ToArray(); ContinuousVapnikChervonenkisTest(ErrorTolerance, ErrorProbability, samples, dist); } } /// <summary> /// Vapnik Chervonenkis test. /// </summary> /// <param name="epsilon">The error we are willing to tolerate.</param> /// <param name="delta">The error probability we are willing to tolerate.</param> /// <param name="s">The samples to use for testing.</param> /// <param name="dist">The distribution we are testing.</param> public static void ContinuousVapnikChervonenkisTest(double epsilon, double delta, double[] s, IContinuousDistribution dist) { // Using VC-dimension, we can bound the probability of making an error when estimating empirical probability // distributions. We are using Theorem 2.41 in "All Of Nonparametric Statistics". // http://books.google.com/books?id=MRFlzQfRg7UC&lpg=PP1&dq=all%20of%20nonparametric%20statistics&pg=PA22#v=onepage&q=%22shatter%20coe%EF%AC%83cients%20do%20not%22&f=false .</para> // For intervals on the real line the VC-dimension is 2. Assert.Greater(s.Length, Math.Ceiling(32.0 * Math.Log(16.0 / delta) / epsilon / epsilon)); var histogram = new Histogram(s, NumberOfHistogramBuckets); for (var i = 0; i < NumberOfHistogramBuckets; i++) { var p = dist.CumulativeDistribution(histogram[i].UpperBound) - dist.CumulativeDistribution(histogram[i].LowerBound); var pe = histogram[i].Count/(double)s.Length; Assert.Less(Math.Abs(p - pe), epsilon, dist.ToString()); } } /// <summary> /// Vapnik Chervonenkis test. /// </summary> /// <param name="epsilon">The error we are willing to tolerate.</param> /// <param name="delta">The error probability we are willing to tolerate.</param> /// <param name="s">The samples to use for testing.</param> /// <param name="dist">The distribution we are testing.</param> public static void DiscreteVapnikChervonenkisTest(double epsilon, double delta, int[] s, IDiscreteDistribution dist) { // Using VC-dimension, we can bound the probability of making an error when estimating empirical probability // distributions. We are using Theorem 2.41 in "All Of Nonparametric Statistics". // http://books.google.com/books?id=MRFlzQfRg7UC&lpg=PP1&dq=all%20of%20nonparametric%20statistics&pg=PA22#v=onepage&q=%22shatter%20coe%EF%AC%83cients%20do%20not%22&f=false .</para> // For intervals on the real line the VC-dimension is 2. Assert.Greater(s.Length, Math.Ceiling(32.0 * Math.Log(16.0 / delta) / epsilon / epsilon)); var min = s.Min(); var max = s.Max(); var histogram = new int[max - min + 1]; for (int i = 0; i < s.Length; i++) { histogram[s[i] - min]++; } for (int i = 0; i < histogram.Length; i++) { var p = dist.CumulativeDistribution(i + min) - dist.CumulativeDistribution(i + min - 1.0); var pe = histogram[i]/(double)s.Length; Assert.Less(Math.Abs(p - pe), epsilon, dist.ToString()); } } } }
// Copyright 2006-2008 Splicer Project - http://www.codeplex.com/splicer/ // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. using System; using NUnit.Framework; using Splicer.Timeline; namespace Splicer.Renderer.Tests { [TestFixture] public class NullRendererFixture : AbstractFixture { [Test] public void AddAndRemoveHandler() { bool eventTriggered = false; using (ITimeline timeline = new DefaultTimeline()) { IGroup audioGroup = timeline.AddAudioGroup(); ITrack track = audioGroup.AddTrack(); track.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Absolute, 0, 0, -1); using (var renderer = new NullRenderer(timeline)) { EventHandler handler = delegate { eventTriggered = true; }; renderer.RenderCompleted += handler; renderer.RenderCompleted -= handler; renderer.BeginRender(null, null); renderer.Cancel(); Assert.IsFalse(eventTriggered); } } } [Test] [ExpectedException(ExceptionType = typeof (SplicerException), ExpectedMessage = "Graph not yet started")] public void CancelBeforeStart() { using (ITimeline timeline = new DefaultTimeline()) { IGroup audioGroup = timeline.AddAudioGroup(); ITrack track = audioGroup.AddTrack(); track.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Absolute, 0, 0, -1); using (var renderer = new NullRenderer(timeline)) { renderer.Cancel(); } } } [Test] public void CancelRender() { bool eventTriggered = false; using (ITimeline timeline = new DefaultTimeline()) { IGroup audioGroup = timeline.AddAudioGroup(); ITrack track = audioGroup.AddTrack(); track.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Absolute, 0, 0, -1); using (var renderer = new NullRenderer(timeline)) { renderer.RenderCompleted += delegate { eventTriggered = true; }; renderer.BeginRender(null, null); renderer.Cancel(); Assert.AreEqual(RendererState.Canceled, renderer.State); Assert.IsTrue(eventTriggered); } } } [Test] public void CanRenderAudioVideoAndImages() { using (ITimeline timeline = new DefaultTimeline()) { IGroup audioGroup = timeline.AddAudioGroup(); ITrack audioTrack = audioGroup.AddTrack(); audioTrack.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Absolute, 0, 0, 2); IGroup videoGroup = timeline.AddVideoGroup(24, 160, 100); ITrack videoTrack = videoGroup.AddTrack(); videoTrack.AddClip("transitions.wmv", GroupMediaType.Video, InsertPosition.Relative, 0, 0, 1); videoTrack.AddClip("image1.jpg", GroupMediaType.Image, InsertPosition.Relative, 0, 0, 1); using (var renderer = new NullRenderer(timeline)) { ExecuteRenderer(renderer, @"<timeline framerate=""30.0000000""> <group type=""audio"" framerate=""30.0000000"" previewmode=""0""> <track> <clip start=""0"" stop=""2"" src=""testinput.mp3"" mstart=""0"" /> </track> </group> <group type=""video"" bitdepth=""24"" width=""160"" height=""100"" framerate=""30.0000000"" previewmode=""0""> <track> <clip start=""0"" stop=""1"" src=""transitions.wmv"" mstart=""0"" /> <clip start=""1"" stop=""2"" src=""image1.jpg"" /> </track> </group> </timeline>"); } } } [Test] public void RenderAudio() { // create the timeline using (ITimeline timeline = new DefaultTimeline()) { IGroup audioGroup = timeline.AddAudioGroup(); ITrack rootTrack = audioGroup.AddTrack(); rootTrack.AddClip("testinput.wav", GroupMediaType.Audio, InsertPosition.Relative, 0, 0, 2); // render the timeline using (var renderer = new NullRenderer(timeline)) { ExecuteRenderer(renderer, @"<timeline framerate=""30.0000000""> <group type=""audio"" framerate=""30.0000000"" previewmode=""0""> <track> <clip start=""0"" stop=""2"" src=""testinput.wav"" mstart=""0""/> </track> </group> </timeline>"); } } } [Test] public void RenderAudioAndVideo() { // create the timeline using (ITimeline timeline = new DefaultTimeline()) { IGroup videoGroup = timeline.AddVideoGroup(24, 320, 240); ITrack videoTrack = videoGroup.AddTrack(); videoTrack.AddClip("transitions.wmv", GroupMediaType.Video, InsertPosition.Relative, 0, 0, 2); IGroup audioGroup = timeline.AddAudioGroup(); ITrack audioTrack = audioGroup.AddTrack(); audioTrack.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Relative, 0, 0, 2); // render the timeline using (var renderer = new NullRenderer(timeline)) { ExecuteRenderer(renderer, @"<timeline framerate=""30.0000000""> <group type=""video"" bitdepth=""24"" framerate=""30.0000000"" previewmode=""0""> <track> <clip start=""0"" stop=""2"" src=""transitions.wmv"" mstart=""0"" /> </track> </group> <group type=""audio"" framerate=""30.0000000"" previewmode=""0""> <track> <clip start=""0"" stop=""2"" src=""testinput.mp3"" mstart=""0"" /> </track> </group> </timeline>"); } } } [Test] public void RenderToCompletion() { bool eventTriggered = false; using (ITimeline timeline = new DefaultTimeline()) { IGroup audioGroup = timeline.AddAudioGroup(); ITrack track = audioGroup.AddTrack(); track.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Absolute, 0, 0, 1); using (var renderer = new NullRenderer(timeline)) { renderer.RenderCompleted += delegate { eventTriggered = true; }; renderer.Render(); Assert.AreEqual(RendererState.GraphCompleted, renderer.State); Assert.IsTrue(eventTriggered); } } } [Test] public void RenderVideo() { // create the timeline using (ITimeline timeline = new DefaultTimeline()) { IGroup videoGroup = timeline.AddVideoGroup(24, 320, 240); ITrack rootTrack = videoGroup.AddTrack(); rootTrack.AddClip("transitions.wmv", GroupMediaType.Video, InsertPosition.Relative, 0, 0, 2); // render the timeline using (var renderer = new NullRenderer(timeline)) { ExecuteRenderer(renderer, @"<timeline framerate=""30.0000000""> <group type=""video"" bitdepth=""24"" framerate=""30.0000000"" previewmode=""0""> <track> <clip start=""0"" stop=""2"" src=""transitions.wmv"" mstart=""0""/> </track> </group> </timeline>"); } } } } }
////////////////////////// // used in both RawBencher and RawBencher.Core ////////////////////////// using System; using System.Collections.Generic; #if !NETCOREAPP using System.Configuration; using AdventureWorks.Dal.Adapter.DatabaseSpecific; #endif using System.Data.SqlClient; using System.Linq; using System.Threading; using Dapper; #if NETCOREAPP using Microsoft.Extensions.PlatformAbstractions; #endif using RawBencher.Benchers; using SD.LLBLGen.Pro.ORMSupportClasses; namespace RawBencher { /// <summary> /// The actual bencher management code. Pass '/a' on the command line as argument to make the program exit automatically. If no argument /// is specified it will wait for ENTER after reporting the results. /// </summary> public class BenchController { private const int LoopAmount = 10; private const int IndividualKeysAmount = 100; private const int InsertSetSize = 1000; // insert this amount of elements during insert benchmarks. private const int InsertBatchSizeDefault = 100; private const int ProfileLoopAmount = 1; private const bool PerformSetInsertBenchmarks = true; // flag to signal whether the set insert benchmarks have to run. Not every bencher will perform this benchmark. private const bool PerformSetBenchmarks = true; // flag to signal whether the set fetch benchmarks have to be run. private const bool PerformIndividualBenchMarks = true; // flag to signal whether the single element fetch benchmarks have to be run. private const bool PerformEagerLoadBenchmarks = true; // flag to signal whether the eager load fetch benchmarks have to be run. Not every bencher will perform this benchmnark. private const bool PerformAsyncBenchmarks = true; // flag to signal whether the async fetch benchmarks have to be run. Not every bencher will perform this benchmark. private const bool ApplyAntiFloodForVMUsage = false; // set to true if your target DB server is hosted on a VM, otherwise set it to false. Used in individual fetch bench. #if NETCOREAPP private static string ConnectionString = @"data source=ATHENA\SQLEXPRESS2017;initial catalog=AdventureWorks;integrated security=SSPI;persist security info=False;packet size=4096"; #else private static string ConnectionString = ConfigurationManager.ConnectionStrings["AdventureWorks.ConnectionString.SQL Server (SqlClient)"].ConnectionString; #endif private static string SqlSelectCommandText = @"SELECT [SalesOrderID],[RevisionNumber],[OrderDate],[DueDate],[ShipDate],[Status],[OnlineOrderFlag],[SalesOrderNumber],[PurchaseOrderNumber],[AccountNumber],[CustomerID],[SalesPersonID],[TerritoryID],[BillToAddressID],[ShipToAddressID],[ShipMethodID],[CreditCardID],[CreditCardApprovalCode],[CurrencyRateID],[SubTotal],[TaxAmt],[Freight],[TotalDue],[Comment],[rowguid],[ModifiedDate] FROM [Sales].[SalesOrderHeader]"; private static List<IBencher> RegisteredBenchers = new List<IBencher>(); private static List<int> KeysForIndividualFetches = new List<int>(); public static void Run(string[] args) { bool autoExit = false; if(args.Length > 0) { autoExit = args[0] == "/a"; } BenchController.InitConnectionString(); CacheController.RegisterCache(ConnectionString, new ResultsetCache()); RegisteredBenchers.Add(new HandCodedBencher() { CommandText = SqlSelectCommandText, ConnectionStringToUse = ConnectionString }); RegisteredBenchers.Add(new HandCodedBencherUsingGetFieldValue() { CommandText = SqlSelectCommandText, ConnectionStringToUse = ConnectionString }); RegisteredBenchers.Add(new RepoDbRawSqlBencher() { ConnectionStringToUse = ConnectionString, CommandText = SqlSelectCommandText }); RegisteredBenchers.Add(new RepoDbPocoBencher() { ConnectionStringToUse = ConnectionString, CommandText = SqlSelectCommandText }); RegisteredBenchers.Add(new HandCodedBencherUsingBoxing() { CommandText = SqlSelectCommandText, ConnectionStringToUse = ConnectionString }); RegisteredBenchers.Add(new HandCodedBencherUsingBoxingGetValue() { CommandText = SqlSelectCommandText, ConnectionStringToUse = ConnectionString }); RegisteredBenchers.Add(new RawDbDataReaderBencher() { CommandText = SqlSelectCommandText, ConnectionStringToUse = ConnectionString }); RegisteredBenchers.Add(new NPocoBencher() { CommandText = SqlSelectCommandText, ConnectionStringToUse = ConnectionString }); RegisteredBenchers.Add(new LINQ2DBNormalBencher(ConnectionString)); RegisteredBenchers.Add(new LLBLGenProNoChangeTrackingRawSQLPocoBencher(ConnectionString) { CommandText = SqlSelectCommandText }); RegisteredBenchers.Add(new LLBLGenProNoChangeTrackingQuerySpecPocoBencher(ConnectionString)); RegisteredBenchers.Add(new LLBLGenProNoChangeTrackingLinqPocoBencher(ConnectionString)); RegisteredBenchers.Add(new LLBLGenProNoChangeTrackingBencher(ConnectionString)); RegisteredBenchers.Add(new LLBLGenProResultsetCachingBencher(ConnectionString)); RegisteredBenchers.Add(new LLBLGenProNormalBencher(ConnectionString)); RegisteredBenchers.Add(new LLBLGenProDTOBencher(ConnectionString)); RegisteredBenchers.Add(new DapperBencher() { CommandText = SqlSelectCommandText, ConnectionStringToUse = ConnectionString }); RegisteredBenchers.Add(new ChainBencher() { CommandText = SqlSelectCommandText, ConnectionStringToUse = ConnectionString }); RegisteredBenchers.Add(new OrmLiteBencher() { CommandText = SqlSelectCommandText, ConnectionStringToUse = ConnectionString }); RegisteredBenchers.Add(new DataTableBencher() { CommandText = SqlSelectCommandText, ConnectionStringToUse = ConnectionString }); RegisteredBenchers.Add(new ChainCompiledBencher() { CommandText = SqlSelectCommandText, ConnectionStringToUse = ConnectionString }); #if NETCOREAPP // EF Core 3.x does support netstandard 2.0 but the benchers fail to build on .NET 4.8 so we'll skip them on netfx RegisteredBenchers.Add(new EntityFrameworkCoreNoChangeTrackingBencher() { ConnectionStringToUse = ConnectionString }); RegisteredBenchers.Add(new EntityFrameworkCoreNormalBencher() { ConnectionStringToUse = ConnectionString }); RegisteredBenchers.Add(new EntityFrameworkCoreDTOBencher() { ConnectionStringToUse = ConnectionString }); #else RegisteredBenchers.Add(new MassiveBencher()); RegisteredBenchers.Add(new NHibernateNormalBencher()); RegisteredBenchers.Add(new LinqToSqlNoChangeTrackingBencher()); RegisteredBenchers.Add(new LinqToSqlNormalBencher()); RegisteredBenchers.Add(new EntityFrameworkNoChangeTrackingBencher()); RegisteredBenchers.Add(new EntityFrameworkNormalBencher()); RegisteredBenchers.Add(new PetaPocoBencher() { CommandText = SqlSelectCommandText, ConnectionStringToUse = ConnectionString }); RegisteredBenchers.Add(new PetaPocoFastBencher() { CommandText = SqlSelectCommandText, ConnectionStringToUse = ConnectionString }); #endif BenchController.DisplayHeader(); BenchController.WarmupDB(); BenchController.FetchKeysForIndividualFetches(); // Uncomment the line below if you want to profile a bencher. Specify the bencher instance and follow the guides on the screen. //ProfileBenchers(RegisteredBenchers.FirstOrDefault(b => b.GetType() == typeof(LINQ2DBNormalBencher))); BenchController.RunRegisteredBenchers(); BenchController.ReportResultStatistics(autoExit); } private static void DisplayHeader() { bool releaseBuild = true; #if DEBUG releaseBuild = false; #endif var conBuilder = new SqlConnectionStringBuilder(ConnectionString); string sqlServerVersion = "Unknown"; using(var conForHeader = new SqlConnection(ConnectionString)) { conForHeader.Open(); sqlServerVersion = conForHeader.ServerVersion; conForHeader.Close(); } Console.WriteLine("+-------------------------------------------------------------------------------------------"); Console.WriteLine("| Raw Data Access / ORM Benchmarks."); Console.WriteLine(@"| Code available at : https://github.com/FransBouma/RawDataAccessBencher"); Console.WriteLine("| Benchmarks run on : {0}", DateTime.Now.ToString("F")); Console.WriteLine("| Registered benchmarks :"); foreach(var bencher in RegisteredBenchers) { BenchController.DisplayBencherInfo(bencher, "| \t", suffixWithDashLine: false); } Console.WriteLine("| Run set benchmarks : {0}", PerformSetBenchmarks); Console.WriteLine("| Run individual fetch benchmarks: {0}", PerformIndividualBenchMarks); Console.WriteLine("| Number of set fetches : {0}", LoopAmount); Console.WriteLine("| Number of individual keys : {0}", IndividualKeysAmount); Console.WriteLine("| Release build : {0}", releaseBuild); Console.WriteLine("| Client OS : {0} ({1}bit)", Environment.OSVersion, Environment.Is64BitOperatingSystem ? "64" : "32"); Console.WriteLine("| Bencher runs as 64bit : {0}", Environment.Is64BitProcess); #if NETCOREAPP Console.WriteLine("| .NET Type : .NET Core"); Console.WriteLine("| CLR version : {0} {1}", PlatformServices.Default.Application.RuntimeFramework.Identifier, PlatformServices.Default.Application.RuntimeFramework.Version); #else Console.WriteLine("| .NET Type : .NET Full"); Console.WriteLine("| CLR version : {0}", Environment.Version); #endif Console.WriteLine("| Number of CPUs : {0}", Environment.ProcessorCount); Console.WriteLine("| Server used : {0}", conBuilder.DataSource); Console.WriteLine("| Catalog used : {0}", conBuilder.InitialCatalog); Console.WriteLine("| SQL Server version used : {0}", sqlServerVersion); Console.WriteLine("+-------------------------------------------------------------------------------------------\n"); } /// <summary> /// Displays a pre-amble so the user can attach the .net profiler, then runs the benchers specified and then displays a text to stop profiling. /// </summary> /// <param name="benchersToProfile">The benchers to profile.</param> private static void ProfileBenchers(params IBencher[] benchersToProfile) { // run the benchers before profiling. foreach(var b in benchersToProfile) { if(b == null) { Console.WriteLine("The bencher you are trying to profile hasn't been registered. Can't continue."); return; } Console.WriteLine("Running set benchmark for bencher '{0}' before profiling to warm up constructs", b.CreateFrameworkName()); b.PerformSetBenchmark(); b.PerformIndividualBenchMark(KeysForIndividualFetches); if(b.SupportsEagerLoading) { b.PerformEagerLoadBenchmark(); if(b.SupportsAsync) { b.PerformAsyncEagerLoadBenchmark(discardResults:true); } } if(b.SupportsInserts) { b.PerformInsertSetBenchmark(InsertSetSize, InsertBatchSizeDefault, discardResults: true); } } Console.WriteLine("Attach profiler and press ENTER to continue"); Console.ReadLine(); for(int i = 0; i < ProfileLoopAmount; i++) { foreach(var b in benchersToProfile) { if(PerformSetInsertBenchmarks && b.SupportsInserts) { Console.WriteLine("Running set insert benchmark for profile for bencher: {0}.", b.CreateFrameworkName()); b.PerformInsertSetBenchmark(InsertSetSize, InsertBatchSizeDefault); } if(PerformSetBenchmarks && b.SupportsSetFetch) { Console.WriteLine("Running set benchmark for profile for bencher: {0}. Change tracking: {1}", b.CreateFrameworkName(), b.UsesChangeTracking); b.PerformSetBenchmark(); } if(PerformIndividualBenchMarks && b.SupportsIndividualFetch) { Console.WriteLine("Running individual fetch benchmark for profile for bencher: {0}. Change tracking: {1}", b.CreateFrameworkName(), b.UsesChangeTracking); b.PerformIndividualBenchMark(KeysForIndividualFetches); } if(PerformEagerLoadBenchmarks && b.SupportsEagerLoading) { Console.WriteLine("Running eager load fetch benchmark for profile for bencher: {0}. Change tracking: {1}", b.CreateFrameworkName(), b.UsesChangeTracking); b.PerformEagerLoadBenchmark(); } if(PerformAsyncBenchmarks && b.SupportsEagerLoading && b.SupportsAsync) { Console.WriteLine("Running async eager load fetch benchmark for profile for bencher: {0}. Change tracking: {1}", b.CreateFrameworkName(), b.UsesChangeTracking); b.PerformAsyncEagerLoadBenchmark(discardResults: false); } } } Console.WriteLine("Done. Grab snapshot and stop profiler. Press ENTER to continue."); Console.ReadLine(); } private static void InitConnectionString() { #if !NETCOREAPP // Use the connection string from app.config instead of the static variable if the connection string exists var connectionStringFromConfig = ConfigurationManager.ConnectionStrings[DataAccessAdapter.ConnectionStringKeyName]; if(connectionStringFromConfig != null) { ConnectionString = string.IsNullOrEmpty(connectionStringFromConfig.ConnectionString) ? ConnectionString : connectionStringFromConfig.ConnectionString; } #endif } private static void FetchKeysForIndividualFetches() { using(var conn = new SqlConnection(ConnectionString)) { KeysForIndividualFetches = conn.Query<int>("select top {=count} SalesOrderId from AdventureWorks.Sales.SalesOrderHeader order by SalesOrderNumber", new {count = IndividualKeysAmount}).AsList(); } if(KeysForIndividualFetches.Count != IndividualKeysAmount) { throw new InvalidOperationException("Can't fetch the keys for the individual benchmarks"); } } private static void RunRegisteredBenchers() { Console.WriteLine("\nStarting benchmarks."); Console.WriteLine("===================================================================="); foreach(var bencher in RegisteredBenchers) { BenchController.DisplayBencherInfo(bencher); try { BenchController.RunBencher(bencher); } catch(Exception ex) { BencherUtils.DisplayException(ex); } } Console.WriteLine("\nPerforming memory measurement runs."); Console.WriteLine("===================================================================="); #if !NETCOREAPP AppDomain.MonitoringIsEnabled = true; #endif foreach(var bencher in RegisteredBenchers) { BenchController.DisplayBencherInfo(bencher); bencher.CollectMemoryAllocated = true; try { BenchController.RunMemoryAnalysisForBencher(bencher); } catch(Exception ex) { BencherUtils.DisplayException(ex); } bencher.CollectMemoryAllocated = false; } } private static void RunMemoryAnalysisForBencher(IBencher bencher) { Console.WriteLine("\nStarting bench runs..."); BenchResult result; if(PerformSetBenchmarks && bencher.SupportsSetFetch) { // set benches Console.WriteLine("Set fetches"); Console.WriteLine("-------------------------"); result = bencher.PerformSetBenchmark(discardResults:true); BenchController.ReportMemoryUsageSetResult(result); bencher.MemorySetBenchmarks = result.NumberOfBytesAllocated; // avoid having the GC collect in the middle of a run. BenchController.ForceGCCollect(); } if(PerformIndividualBenchMarks && bencher.SupportsIndividualFetch) { // individual benches Console.WriteLine("\nSingle element fetches"); Console.WriteLine("-------------------------"); result = bencher.PerformIndividualBenchMark(KeysForIndividualFetches, discardResults:true); BenchController.ReportMemoryUsageIndividualResult(result); bencher.MemoryIndividualBenchmarks = result.NumberOfBytesAllocated; // avoid having the GC collect in the middle of a run. BenchController.ForceGCCollect(); if(ApplyAntiFloodForVMUsage) { // sleep is to avoid hammering the network layer on the target server. If the target server is a VM, it might stall once or twice // during benching, which is not what we want at it can skew the results a lot. In a very short time, a lot of queries are executed // on the target server (LoopAmount * IndividualKeysAmount), which will hurt performance on VMs with very fast frameworks in some // cases in some runs (so more than 2 runs are slow). #pragma warning disable CS0162 Thread.Sleep(400); #pragma warning restore CS0162 } } if(PerformEagerLoadBenchmarks && bencher.SupportsEagerLoading) { // eager load benches Console.WriteLine("\nEager Load fetches"); Console.WriteLine("-------------------------"); result = bencher.PerformEagerLoadBenchmark(discardResults:true); BenchController.ReportMemoryUsageEagerLoadResult(result); bencher.MemoryEagerLoadBenchmarks = result.NumberOfBytesAllocated; // avoid having the GC collect in the middle of a run. BenchController.ForceGCCollect(); } if(PerformAsyncBenchmarks && bencher.SupportsEagerLoading && bencher.SupportsAsync) { // eager load benches Console.WriteLine("\nAsync eager Load fetches"); Console.WriteLine("-------------------------"); result = bencher.PerformAsyncEagerLoadBenchmark(discardResults: true); BenchController.ReportMemoryUsageEagerLoadResult(result); bencher.MemoryAsyncEagerLoadBenchmarks = result.NumberOfBytesAllocated; // avoid having the GC collect in the middle of a run. BenchController.ForceGCCollect(); } if(PerformSetInsertBenchmarks && bencher.SupportsInserts) { // set inserts Console.WriteLine("\nSet inserts"); Console.WriteLine("-------------------------"); result = bencher.PerformInsertSetBenchmark(InsertSetSize, InsertBatchSizeDefault, discardResults:true); BenchController.ReportMemoryUsageInsertSetResult(result); bencher.MemorySetInsertBenchmarks = result.NumberOfBytesAllocated; // avoid having the GC collect in the middle of a run. BenchController.ForceGCCollect(); } } private static void RunBencher(IBencher bencher) { bencher.ResetResults(); Console.WriteLine("First one warm-up run of each bench type to initialize constructs. Results will not be collected."); BenchResult result = null; if(PerformSetBenchmarks && bencher.SupportsSetFetch) { result = bencher.PerformSetBenchmark(discardResults: true); BenchController.ReportSetResult(result); } if(bencher.SupportsEagerLoading) { result = bencher.PerformEagerLoadBenchmark(discardResults: true); BenchController.ReportEagerLoadResult(result); if(PerformAsyncBenchmarks && bencher.SupportsAsync) { result = bencher.PerformAsyncEagerLoadBenchmark(discardResults: true); BenchController.ReportEagerLoadResult(result); } } if(PerformIndividualBenchMarks && bencher.SupportsIndividualFetch) { result = bencher.PerformIndividualBenchMark(KeysForIndividualFetches, discardResults: true); BenchController.ReportIndividualResult(result); } if(PerformSetInsertBenchmarks && bencher.SupportsInserts) { result = bencher.PerformInsertSetBenchmark(InsertSetSize, InsertBatchSizeDefault, discardResults: true); BenchController.ReportInsertSetResult(result); } Console.WriteLine("Doing a GC collect..."); BenchController.ForceGCCollect(); Console.WriteLine("Done."); Console.WriteLine("\nStarting bench runs..."); if(PerformSetBenchmarks && bencher.SupportsSetFetch) { // set benches Console.WriteLine("Set fetches"); Console.WriteLine("-------------------------"); for(int i = 0; i < LoopAmount; i++) { result = bencher.PerformSetBenchmark(); BenchController.ReportSetResult(result); // avoid having the GC collect in the middle of a run. BenchController.ForceGCCollect(); } } if(PerformIndividualBenchMarks && bencher.SupportsIndividualFetch) { // individual benches Console.WriteLine("\nSingle element fetches"); Console.WriteLine("-------------------------"); for(int i = 0; i < LoopAmount; i++) { result = bencher.PerformIndividualBenchMark(KeysForIndividualFetches); BenchController.ReportIndividualResult(result); // avoid having the GC collect in the middle of a run. BenchController.ForceGCCollect(); if(ApplyAntiFloodForVMUsage) { // sleep is to avoid hammering the network layer on the target server. If the target server is a VM, it might stall once or twice // during benching, which is not what we want at it can skew the results a lot. In a very short time, a lot of queries are executed // on the target server (LoopAmount * IndividualKeysAmount), which will hurt performance on VMs with very fast frameworks in some // cases in some runs (so more than 2 runs are slow). #pragma warning disable CS0162 Thread.Sleep(400); #pragma warning restore CS0162 } } } if(PerformEagerLoadBenchmarks && bencher.SupportsEagerLoading) { // eager load benches Console.WriteLine("\nEager Load fetches"); Console.WriteLine("-------------------------"); for(int i = 0; i < LoopAmount; i++) { result = bencher.PerformEagerLoadBenchmark(); BenchController.ReportEagerLoadResult(result); // avoid having the GC collect in the middle of a run. BenchController.ForceGCCollect(); } } if(PerformAsyncBenchmarks && bencher.SupportsEagerLoading && bencher.SupportsAsync) { // eager load benches Console.WriteLine("\nAsync eager Load fetches"); Console.WriteLine("-------------------------"); for(int i = 0; i < LoopAmount; i++) { result = bencher.PerformAsyncEagerLoadBenchmark(discardResults: false); BenchController.ReportEagerLoadResult(result); // avoid having the GC collect in the middle of a run. BenchController.ForceGCCollect(); } } if(PerformSetInsertBenchmarks && bencher.SupportsInserts) { // set insert benches Console.WriteLine("\nSet Inserts"); Console.WriteLine("-------------------------"); for(int i = 0; i < LoopAmount; i++) { result = bencher.PerformInsertSetBenchmark(InsertSetSize, InsertBatchSizeDefault); BenchController.ReportInsertSetResult(result); // avoid having the GC collect in the middle of a run. BenchController.ForceGCCollect(); } } } private static void WarmupDB() { IBencher dbWarmer = new DataTableBencher() {CommandText = SqlSelectCommandText, ConnectionStringToUse = ConnectionString}; Console.WriteLine("\nWarming up DB, DB client code and CLR"); Console.WriteLine("===================================================================="); BenchController.DisplayBencherInfo(dbWarmer); for(int i = 0; i < LoopAmount; i++) { var result = dbWarmer.PerformSetBenchmark(); BenchController.ReportSetResult(result); } } private static void DisplayBencherInfo(IBencher bencher) { BenchController.DisplayBencherInfo(bencher, "\n", suffixWithDashLine: true); } private static void DisplayBencherInfo(IBencher bencher, string linePrefix, bool suffixWithDashLine) { Console.Write(linePrefix); Console.WriteLine("{0}. Change tracking: {1}. Caching: {2}.", bencher.CreateFrameworkName(), bencher.UsesChangeTracking, bencher.UsesCaching); if(suffixWithDashLine) { Console.WriteLine("--------------------------------------------------------------------------------------------"); } } private static void ReportSetResult(BenchResult result) { Console.WriteLine("[{0:HH:mm:ss}] # of elements fetched: {1}.\tFetch took: {2:N2}ms.\tEnumerating result took: {3:N2}ms.", DateTime.Now, result.TotalNumberOfRowsAffected, result.ActionTimeInMilliseconds, result.EnumerationTimeInMilliseconds); } private static void ReportIndividualResult(BenchResult result) { Console.WriteLine("[{0:HH:mm:ss}] # of elements fetched individually: {1}.\tTotal time: {2:N2}ms.\tTime per element: {3:N2}ms", DateTime.Now, KeysForIndividualFetches.Count, result.ActionTimeInMilliseconds, result.ActionTimeInMilliseconds / KeysForIndividualFetches.Count); } private static void ReportEagerLoadResult(BenchResult result) { Console.WriteLine("[{0:HH:mm:ss}] # of elements fetched: {1} ({2}).\tFetch took: {3:N2}ms.", DateTime.Now, result.TotalNumberOfRowsAffected, string.Join(" + ", result.NumberOfRowsAffectedPerType.Select(kvp => kvp.Value).ToArray()), result.ActionTimeInMilliseconds); } private static void ReportMemoryUsageSetResult(BenchResult result) { Console.WriteLine("[{0:HH:mm:ss}] # of elements fetched: {1}.\tFetch took: {2:N2}ms.\tAllocated bytes: {3}.", DateTime.Now, result.TotalNumberOfRowsAffected, result.ActionTimeInMilliseconds, result.NumberOfBytesAllocated); } private static void ReportMemoryUsageIndividualResult(BenchResult result) { Console.WriteLine("[{0:HH:mm:ss}] # of elements fetched individually: {1}.\tTotal time: {2:N2}ms.\tAllocated bytes per element: {3}.", DateTime.Now, KeysForIndividualFetches.Count, result.ActionTimeInMilliseconds, result.NumberOfBytesAllocated); } private static void ReportMemoryUsageEagerLoadResult(BenchResult result) { Console.WriteLine("[{0:HH:mm:ss}] # of elements fetched: {1} ({2}).\tFetch took: {3:N2}ms. Allocated bytes: {3}.", DateTime.Now, result.TotalNumberOfRowsAffected, string.Join(" + ", result.NumberOfRowsAffectedPerType.Select(kvp => kvp.Value).ToArray()), result.NumberOfBytesAllocated); } private static void ReportInsertSetResult(BenchResult result) { Console.WriteLine("[{0:HH:mm:ss}] # of elements inserted as set: {1} (batch size: {2}).\tSet insert took: {3:N2}ms.", DateTime.Now, result.TotalNumberOfRowsAffected, result.InsertBatchSize, result.ActionTimeInMilliseconds); } private static void ReportMemoryUsageInsertSetResult(BenchResult result) { Console.WriteLine("[{0:HH:mm:ss}] # of elements inserted as set: {1}.\tSet insert took: {2:N2}ms.\tAllocated bytes: {3}.", DateTime.Now, result.TotalNumberOfRowsAffected, result.ActionTimeInMilliseconds, result.NumberOfBytesAllocated); } /// <summary> /// Reports the resulting statistics (mean/standard deviation) to standard out /// </summary> /// <param name="autoExit">if set to <c>true</c> the method won't wait for ENTER to exit but will exit immediately.</param> private static void ReportResultStatistics(bool autoExit) { Console.WriteLine("\nResults per framework. Values are given as: 'mean (standard deviation)'"); Console.WriteLine("=============================================================================="); int longestNameLength = 0; foreach(var bencher in RegisteredBenchers) { string name = bencher.CreateFrameworkName(); if(name.Length > longestNameLength) { longestNameLength = name.Length; } bencher.CalculateStatistics(); } if(PerformSetBenchmarks) { var benchersToList = RegisteredBenchers.Where(b => b.SupportsSetFetch && !b.UsesChangeTracking && !b.UsesCaching).OrderBy(b => b.SetFetchMean).ToList(); if(benchersToList.Count > 0) { Console.WriteLine("Non-change tracking fetches, set fetches ({0} runs), no caching", LoopAmount); Console.WriteLine("------------------------------------------------------------------------------"); foreach(var bencher in benchersToList) { Console.WriteLine("{0,-" + longestNameLength + "} : {1:N2}ms ({2:N2}ms)\tEnum: {3:N2}ms ({4:N2}ms)", bencher.CreateFrameworkName(), bencher.SetFetchMean, bencher.SetFetchSD, bencher.EnumerationMean, bencher.EnumerationSD); } Console.WriteLine("\nMemory usage, per iteration"); Console.WriteLine("------------------------------------------------------------------------------"); foreach(var bencher in benchersToList.OrderBy(b=>b.MemorySetBenchmarks)) { Console.WriteLine("{0,-" + longestNameLength + "} : {1:0,0} KB ({2:0,0} bytes)", bencher.CreateFrameworkName(), bencher.MemorySetBenchmarks / 1024, bencher.MemorySetBenchmarks); } } benchersToList = RegisteredBenchers.Where(b =>b.SupportsSetFetch && b.UsesChangeTracking && !b.UsesCaching).OrderBy(b => b.SetFetchMean).ToList(); if(benchersToList.Count > 0) { Console.WriteLine("\nChange tracking fetches, set fetches ({0} runs), no caching", LoopAmount); Console.WriteLine("------------------------------------------------------------------------------"); foreach(var bencher in benchersToList) { Console.WriteLine("{0,-" + longestNameLength + "} : {1:N2}ms ({2:N2}ms)\tEnum: {3:N2}ms ({4:N2}ms)", bencher.CreateFrameworkName(), bencher.SetFetchMean, bencher.SetFetchSD, bencher.EnumerationMean, bencher.EnumerationSD); } Console.WriteLine("\nMemory usage, per iteration"); Console.WriteLine("------------------------------------------------------------------------------"); foreach(var bencher in benchersToList.OrderBy(b=>b.MemorySetBenchmarks)) { Console.WriteLine("{0,-" + longestNameLength + "} : {1:0,0} KB ({2:0,0} bytes)", bencher.CreateFrameworkName(), bencher.MemorySetBenchmarks / 1024, bencher.MemorySetBenchmarks); } } } if(PerformIndividualBenchMarks) { var benchersToList = RegisteredBenchers.Where(b =>b.SupportsIndividualFetch && !b.UsesChangeTracking && !b.UsesCaching).OrderBy(b => b.IndividualFetchMean).ToList(); if(benchersToList.Count > 0) { Console.WriteLine("\nNon-change tracking individual fetches ({0} elements, {1} runs), no caching", IndividualKeysAmount, LoopAmount); Console.WriteLine("------------------------------------------------------------------------------"); foreach(var bencher in benchersToList) { Console.WriteLine("{0,-" + longestNameLength + "} : {1:N2}ms ({2:N2}ms) per individual fetch", bencher.CreateFrameworkName(), bencher.IndividualFetchMean / IndividualKeysAmount, bencher.IndividualFetchSD / IndividualKeysAmount); } Console.WriteLine("\nMemory usage, per individual element"); Console.WriteLine("------------------------------------------------------------------------------"); foreach(var bencher in benchersToList.OrderBy(b=>b.MemoryIndividualBenchmarks)) { Console.WriteLine("{0,-" + longestNameLength + "} : {1:0,0} KB ({2:0,0} bytes)", bencher.CreateFrameworkName(), bencher.MemoryIndividualBenchmarks / 1024, bencher.MemoryIndividualBenchmarks); } } benchersToList = RegisteredBenchers.Where(b => b.UsesChangeTracking && !b.UsesCaching).OrderBy(b => b.IndividualFetchMean).ToList(); if(benchersToList.Count > 0) { Console.WriteLine("\nChange tracking individual fetches ({0} elements, {1} runs), no caching", IndividualKeysAmount, LoopAmount); Console.WriteLine("------------------------------------------------------------------------------"); foreach(var bencher in benchersToList) { Console.WriteLine("{0,-" + longestNameLength + "} : {1:N2}ms ({2:N2}ms) per individual fetch", bencher.CreateFrameworkName(), bencher.IndividualFetchMean / IndividualKeysAmount, bencher.IndividualFetchSD / IndividualKeysAmount); } Console.WriteLine("\nMemory usage, per individual element"); Console.WriteLine("------------------------------------------------------------------------------"); foreach(var bencher in benchersToList.OrderBy(b=>b.MemoryIndividualBenchmarks)) { Console.WriteLine("{0,-" + longestNameLength + "} : {1:0,0} KB ({2:0,0} bytes)", bencher.CreateFrameworkName(), bencher.MemoryIndividualBenchmarks / 1024, bencher.MemoryIndividualBenchmarks); } } } if(PerformEagerLoadBenchmarks) { var benchersToList = RegisteredBenchers.Where(b => b.SupportsEagerLoading && !b.UsesChangeTracking && !b.UsesCaching).OrderBy(b => b.EagerLoadFetchMean).ToList(); if(benchersToList.Count > 0) { Console.WriteLine("\nNon-change tracking fetches, eager load fetches, 3-node split graph, 1000 root elements ({0} runs), no caching", LoopAmount); Console.WriteLine("------------------------------------------------------------------------------"); foreach(var bencher in benchersToList) { Console.WriteLine("{0,-" + longestNameLength + "} : {1:N2}ms ({2:N2}ms)", bencher.CreateFrameworkName(), bencher.EagerLoadFetchMean, bencher.EagerLoadFetchSD); } Console.WriteLine("\nMemory usage, per iteration"); Console.WriteLine("------------------------------------------------------------------------------"); foreach(var bencher in benchersToList.OrderBy(b=>b.MemoryEagerLoadBenchmarks)) { Console.WriteLine("{0,-" + longestNameLength + "} : {1:0,0} KB ({2:0,0} bytes)", bencher.CreateFrameworkName(), bencher.MemoryEagerLoadBenchmarks / 1024, bencher.MemoryEagerLoadBenchmarks); } } benchersToList = RegisteredBenchers.Where(b => b.SupportsEagerLoading && b.UsesChangeTracking && !b.UsesCaching).OrderBy(b => b.EagerLoadFetchMean).ToList(); if(benchersToList.Count > 0) { Console.WriteLine("\nChange tracking fetches, eager load fetches, 3-node split graph, 1000 root elements ({0} runs), no caching", LoopAmount); Console.WriteLine("------------------------------------------------------------------------------"); foreach(var bencher in benchersToList) { Console.WriteLine("{0,-" + longestNameLength + "} : {1:N2}ms ({2:N2}ms)", bencher.CreateFrameworkName(), bencher.EagerLoadFetchMean, bencher.EagerLoadFetchSD); } Console.WriteLine("\nMemory usage, per iteration"); Console.WriteLine("------------------------------------------------------------------------------"); foreach(var bencher in benchersToList.OrderBy(b=>b.MemoryEagerLoadBenchmarks)) { Console.WriteLine("{0,-" + longestNameLength + "} : {1:0,0} KB ({2:0,0} bytes)", bencher.CreateFrameworkName(), bencher.MemoryEagerLoadBenchmarks / 1024, bencher.MemoryEagerLoadBenchmarks); } } benchersToList = RegisteredBenchers.Where(b => b.SupportsEagerLoading && b.SupportsAsync && b.UsesChangeTracking && !b.UsesCaching).OrderBy(b => b.EagerLoadFetchMean).ToList(); if(benchersToList.Count > 0) { Console.WriteLine("\nAsync change tracking fetches, eager load fetches, 3-node split graph, 1000 root elements ({0} runs), no caching", LoopAmount); Console.WriteLine("------------------------------------------------------------------------------"); foreach(var bencher in benchersToList) { Console.WriteLine("{0,-" + longestNameLength + "} : {1:N2}ms ({2:N2}ms)", bencher.CreateFrameworkName(), bencher.AsyncEagerLoadFetchMean, bencher.AsyncEagerLoadFetchSD); } Console.WriteLine("\nMemory usage, per iteration"); Console.WriteLine("------------------------------------------------------------------------------"); foreach(var bencher in benchersToList.OrderBy(b=>b.MemoryAsyncEagerLoadBenchmarks)) { Console.WriteLine("{0,-" + longestNameLength + "} : {1:0,0} KB ({2:0,0} bytes)", bencher.CreateFrameworkName(), bencher.MemoryAsyncEagerLoadBenchmarks / 1024, bencher.MemoryAsyncEagerLoadBenchmarks); } } benchersToList = RegisteredBenchers.Where(b => b.SupportsEagerLoading && b.SupportsAsync && !b.UsesChangeTracking && !b.UsesCaching).OrderBy(b => b.EagerLoadFetchMean).ToList(); if(benchersToList.Count > 0) { Console.WriteLine("\nAsync Non-change tracking fetches, eager load fetches, 3-node split graph, 1000 root elements ({0} runs), no caching", LoopAmount); Console.WriteLine("------------------------------------------------------------------------------"); foreach(var bencher in benchersToList) { Console.WriteLine("{0,-" + longestNameLength + "} : {1:N2}ms ({2:N2}ms)", bencher.CreateFrameworkName(), bencher.AsyncEagerLoadFetchMean, bencher.AsyncEagerLoadFetchSD); } Console.WriteLine("\nMemory usage, per iteration"); Console.WriteLine("------------------------------------------------------------------------------"); foreach(var bencher in benchersToList.OrderBy(b=>b.MemoryAsyncEagerLoadBenchmarks)) { Console.WriteLine("{0,-" + longestNameLength + "} : {1:0,0} KB ({2:0,0} bytes)", bencher.CreateFrameworkName(), bencher.MemoryAsyncEagerLoadBenchmarks / 1024, bencher.MemoryAsyncEagerLoadBenchmarks); } } } if(PerformSetBenchmarks) { var benchersToList = RegisteredBenchers.Where(b => b.UsesChangeTracking && b.UsesCaching).OrderBy(b => b.SetFetchMean).ToList(); if(benchersToList.Count > 0) { Console.WriteLine("\nChange tracking fetches, set fetches ({0} runs), caching", LoopAmount); Console.WriteLine("------------------------------------------------------------------------------"); foreach(var bencher in benchersToList) { Console.WriteLine("{0,-" + longestNameLength + "} : {1:N2}ms ({2:N2}ms)\tEnum: {3:N2}ms ({4:N2}ms)", bencher.CreateFrameworkName(), bencher.SetFetchMean, bencher.SetFetchSD, bencher.EnumerationMean, bencher.EnumerationSD); } Console.WriteLine("\nMemory usage, per iteration"); Console.WriteLine("------------------------------------------------------------------------------"); foreach(var bencher in benchersToList.OrderBy(b=>b.MemorySetBenchmarks)) { Console.WriteLine("{0,-" + longestNameLength + "} : {1:0,0} KB ({2:0,0} bytes)", bencher.CreateFrameworkName(), bencher.MemorySetBenchmarks / 1024, bencher.MemorySetBenchmarks); } } } if(PerformIndividualBenchMarks) { var benchersToList = RegisteredBenchers.Where(b => b.UsesChangeTracking && b.UsesCaching).OrderBy(b => b.IndividualFetchMean).ToList(); if(benchersToList.Count > 0) { Console.WriteLine("\nChange tracking individual fetches ({0} elements, {1} runs), caching", IndividualKeysAmount, LoopAmount); Console.WriteLine("------------------------------------------------------------------------------"); foreach(var bencher in benchersToList) { Console.WriteLine("{0,-" + longestNameLength + "} : {1:N2}ms ({2:N2}ms) per individual fetch", bencher.CreateFrameworkName(), bencher.IndividualFetchMean / IndividualKeysAmount, bencher.IndividualFetchSD / IndividualKeysAmount); } Console.WriteLine("\nMemory usage, per individual element"); Console.WriteLine("------------------------------------------------------------------------------"); foreach(var bencher in benchersToList.OrderBy(b=>b.MemoryIndividualBenchmarks)) { Console.WriteLine("{0,-" + longestNameLength + "} : {1:0,0} KB ({2:0,0} bytes)", bencher.CreateFrameworkName(), bencher.MemoryIndividualBenchmarks / 1024, bencher.MemoryIndividualBenchmarks); } } } if(PerformSetInsertBenchmarks) { var benchersToList = RegisteredBenchers.Where(b => b.SupportsInserts).OrderBy(b => b.SetInsertMean).ToList(); if(benchersToList.Count > 0) { Console.WriteLine("\nSet inserts of {0} elements in one go ({1} runs with batchsize {2})", InsertSetSize, LoopAmount, InsertBatchSizeDefault); Console.WriteLine("------------------------------------------------------------------------------"); foreach(var bencher in benchersToList) { Console.WriteLine("{0,-" + longestNameLength + "} : {1:N2}ms ({2:N2}ms)", bencher.CreateFrameworkName(), bencher.SetInsertMean, bencher.SetInsertSD); } Console.WriteLine("\nMemory usage, per iteration"); Console.WriteLine("------------------------------------------------------------------------------"); foreach(var bencher in benchersToList.OrderBy(b=>b.MemorySetInsertBenchmarks)) { Console.WriteLine("{0,-" + longestNameLength + "} : {1:0,0} KB ({2:0,0} bytes)", bencher.CreateFrameworkName(), bencher.MemorySetInsertBenchmarks / 1024, bencher.MemorySetInsertBenchmarks); } } } Console.Write("\nComplete."); if(autoExit) { return; } Console.WriteLine(" Press enter to exit."); Console.ReadLine(); } private static void ForceGCCollect() { GC.Collect(); GC.WaitForPendingFinalizers(); GC.Collect(); } } }
#region File Description //----------------------------------------------------------------------------- // GameScreen.cs // // Microsoft XNA Community Game Platform // Copyright (C) Microsoft Corporation. All rights reserved. //----------------------------------------------------------------------------- #endregion using System; using System.IO; using Microsoft.Xna.Framework; using Microsoft.Xna.Framework.Input.Touch; namespace AuroraManagement { /// <summary> /// Enum describes the screen transition state. /// </summary> public enum ScreenState { TransitionOn, Active, TransitionOff, Hidden, } /// <summary> /// A screen is a single layer that has update and draw logic, and which /// can be combined with other layers to build up a complex menu system. /// For instance the main menu, the options menu, the "are you sure you /// want to quit" message box, and the main game itself are all implemented /// as screens. /// </summary> public abstract class GameScreen { /// <summary> /// Normally when one screen is brought up over the top of another, /// the first screen will transition off to make room for the new /// one. This property indicates whether the screen is only a small /// popup, in which case screens underneath it do not need to bother /// transitioning off. /// </summary> public bool IsPopup { get { return isPopup; } protected set { isPopup = value; } } bool isPopup = false; /// <summary> /// Indicates how long the screen takes to /// transition on when it is activated. /// </summary> public TimeSpan TransitionOnTime { get { return transitionOnTime; } protected set { transitionOnTime = value; } } TimeSpan transitionOnTime = TimeSpan.Zero; /// <summary> /// Indicates how long the screen takes to /// transition off when it is deactivated. /// </summary> public TimeSpan TransitionOffTime { get { return transitionOffTime; } protected set { transitionOffTime = value; } } TimeSpan transitionOffTime = TimeSpan.Zero; /// <summary> /// Gets the current position of the screen transition, ranging /// from zero (fully active, no transition) to one (transitioned /// fully off to nothing). /// </summary> public float TransitionPosition { get { return transitionPosition; } protected set { transitionPosition = value; } } float transitionPosition = 1; /// <summary> /// Gets the current alpha of the screen transition, ranging /// from 1 (fully active, no transition) to 0 (transitioned /// fully off to nothing). /// </summary> public float TransitionAlpha { get { return 1f - TransitionPosition; } } /// <summary> /// Gets the current screen transition state. /// </summary> public ScreenState ScreenState { get { return screenState; } protected set { screenState = value; } } ScreenState screenState = ScreenState.TransitionOn; /// <summary> /// There are two possible reasons why a screen might be transitioning /// off. It could be temporarily going away to make room for another /// screen that is on top of it, or it could be going away for good. /// This property indicates whether the screen is exiting for real: /// if set, the screen will automatically remove itself as soon as the /// transition finishes. /// </summary> public bool IsExiting { get { return isExiting; } protected internal set { isExiting = value; } } bool isExiting = false; /// <summary> /// Checks whether this screen is active and can respond to user input. /// </summary> public bool IsActive { get { return !otherScreenHasFocus && (screenState == ScreenState.TransitionOn || screenState == ScreenState.Active); } } bool otherScreenHasFocus; /// <summary> /// Gets the manager that this screen belongs to. /// </summary> public ScreenManager ScreenManager { get { return screenManager; } internal set { screenManager = value; } } ScreenManager screenManager; /// <summary> /// Gets the index of the player who is currently controlling this screen, /// or null if it is accepting input from any player. This is used to lock /// the game to a specific player profile. The main menu responds to input /// from any connected gamepad, but whichever player makes a selection from /// this menu is given control over all subsequent screens, so other gamepads /// are inactive until the controlling player returns to the main menu. /// </summary> public PlayerIndex? ControllingPlayer { get { return controllingPlayer; } internal set { controllingPlayer = value; } } PlayerIndex? controllingPlayer; /// <summary> /// Gets the gestures the screen is interested in. Screens should be as specific /// as possible with gestures to increase the accuracy of the gesture engine. /// For example, most menus only need Tap or perhaps Tap and VerticalDrag to operate. /// These gestures are handled by the ScreenManager when screens change and /// all gestures are placed in the InputState passed to the HandleInput method. /// </summary> public GestureType EnabledGestures { get { return enabledGestures; } protected set { enabledGestures = value; // the screen manager handles this during screen changes, but // if this screen is active and the gesture types are changing, // we have to update the TouchPanel ourself. if (ScreenState == ScreenState.Active) { TouchPanel.EnabledGestures = value; } } } GestureType enabledGestures = GestureType.None; /// <summary> /// Gets whether or not this screen is serializable. If this is true, /// the screen will be recorded into the screen manager's state and /// its Serialize and Deserialize methods will be called as appropriate. /// If this is false, the screen will be ignored during serialization. /// By default, all screens are assumed to be serializable. /// </summary> public bool IsSerializable { get { return isSerializable; } protected set { isSerializable = value; } } bool isSerializable = true; /// <summary> /// Activates the screen. Called when the screen is added to the screen manager or if the game resumes /// from being paused or tombstoned. /// </summary> /// <param name="instancePreserved"> /// True if the game was preserved during deactivation, false if the screen is just being added or if the game was tombstoned. /// On Xbox and Windows this will always be false. /// </param> public virtual void Activate(bool instancePreserved) { } /// <summary> /// Deactivates the screen. Called when the game is being deactivated due to pausing or tombstoning. /// </summary> public virtual void Deactivate() { } /// <summary> /// Unload content for the screen. Called when the screen is removed from the screen manager. /// </summary> public virtual void Unload() { } /// <summary> /// Allows the screen to run logic, such as updating the transition position. /// Unlike HandleInput, this method is called regardless of whether the screen /// is active, hidden, or in the middle of a transition. /// </summary> public virtual void Update(GameTime gameTime, bool otherScreenHasFocus, bool coveredByOtherScreen) { this.otherScreenHasFocus = otherScreenHasFocus; if (isExiting) { // If the screen is going away to die, it should transition off. screenState = ScreenState.TransitionOff; if (!UpdateTransition(gameTime, transitionOffTime, 1)) { // When the transition finishes, remove the screen. ScreenManager.RemoveScreen(this); } } else if (coveredByOtherScreen) { // If the screen is covered by another, it should transition off. if (UpdateTransition(gameTime, transitionOffTime, 1)) { // Still busy transitioning. screenState = ScreenState.TransitionOff; } else { // Transition finished! screenState = ScreenState.Hidden; } } else { // Otherwise the screen should transition on and become active. if (UpdateTransition(gameTime, transitionOnTime, -1)) { // Still busy transitioning. screenState = ScreenState.TransitionOn; } else { // Transition finished! screenState = ScreenState.Active; } } } /// <summary> /// Helper for updating the screen transition position. /// </summary> bool UpdateTransition(GameTime gameTime, TimeSpan time, int direction) { // How much should we move by? float transitionDelta; if (time == TimeSpan.Zero) transitionDelta = 1; else transitionDelta = (float)(gameTime.ElapsedGameTime.TotalMilliseconds / time.TotalMilliseconds); // Update the transition position. transitionPosition += transitionDelta * direction; // Did we reach the end of the transition? if (((direction < 0) && (transitionPosition <= 0)) || ((direction > 0) && (transitionPosition >= 1))) { transitionPosition = MathHelper.Clamp(transitionPosition, 0, 1); return false; } // Otherwise we are still busy transitioning. return true; } /// <summary> /// Allows the screen to handle user input. Unlike Update, this method /// is only called when the screen is active, and not when some other /// screen has taken the focus. /// </summary> public virtual void HandleInput(GameTime gameTime, InputState input) { } /// <summary> /// This is called when the screen should draw itself. /// </summary> public virtual void Draw(GameTime gameTime) { } /// <summary> /// Tells the screen to go away. Unlike ScreenManager.RemoveScreen, which /// instantly kills the screen, this method respects the transition timings /// and will give the screen a chance to gradually transition off. /// </summary> public void ExitScreen() { if (TransitionOffTime == TimeSpan.Zero) { // If the screen has a zero transition time, remove it immediately. ScreenManager.RemoveScreen(this); } else { // Otherwise flag that it should transition off and then exit. isExiting = true; } } } }
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) Under one or more contributor license agreements. See the NOTICE file distributed with this work for Additional information regarding copyright ownership. The ASF licenses this file to You Under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed Under the License is distributed on an "AS Is" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations Under the License. ==================================================================== */ namespace NPOI.HSSF.Record { using System; using System.Text; using System.Collections; using NPOI.DDF; using NPOI.Util; using System.Collections.Generic; using NPOI.HSSF.Util; /** * The escher container record is used to hold escher records. It is abstract and * must be subclassed for maximum benefit. * * @author Glen Stampoultzis (glens at apache.org) * @author Michael Zalewski (zalewski at optonline.net) */ internal abstract class AbstractEscherHolderRecord : Record { private static bool DESERIALISE; static AbstractEscherHolderRecord() { DESERIALISE = false; //try //{ // DESERIALISE = (System.Configuration.ConfigurationManager.AppSettings["poi.deserialize.escher"] != null); //} //catch (Exception) //{ // DESERIALISE = false; //} } private List<EscherRecord> escherRecords; private LazilyConcatenatedByteArray rawDataContainer = new LazilyConcatenatedByteArray(); //private byte[] rawData; public AbstractEscherHolderRecord() { escherRecords = new List<EscherRecord>(); } /** * Constructs a Bar record and Sets its fields appropriately. * * @param in the RecordInputstream to Read the record from */ public AbstractEscherHolderRecord(RecordInputStream in1) { escherRecords = new List<EscherRecord>(); if (!DESERIALISE) { rawDataContainer.Concatenate(in1.ReadRemainder()); } else { byte[] data = in1.ReadAllContinuedRemainder(); ConvertToEscherRecords(0, data.Length, data); } } protected void ConvertRawBytesToEscherRecords() { byte[] rawData = RawData; ConvertToEscherRecords(0, rawData.Length, rawData); } private void ConvertToEscherRecords(int offset, int size, byte[] data) { escherRecords.Clear(); EscherRecordFactory recordFactory = new DefaultEscherRecordFactory(); int pos = offset; while (pos < offset + size) { EscherRecord r = recordFactory.CreateRecord(data, pos); int bytesRead = r.FillFields(data, pos, recordFactory); escherRecords.Add(r); pos += bytesRead; } } public override String ToString() { StringBuilder buffer = new StringBuilder(); String nl = Environment.NewLine; buffer.Append('[' + RecordName + ']' + nl); if (escherRecords.Count == 0) buffer.Append("No Escher Records Decoded" + nl); foreach (EscherRecord r in escherRecords) { buffer.Append(r.ToString()); } buffer.Append("[/" + RecordName + ']' + nl); return buffer.ToString(); } protected abstract String RecordName { get; } public override int Serialize(int offset, byte[] data) { LittleEndian.PutShort(data, 0 + offset, Sid); LittleEndian.PutShort(data, 2 + offset, (short)(RecordSize - 4)); byte[] rawData = RawData; if (escherRecords.Count == 0 && rawData != null) { LittleEndian.PutShort(data, 0 + offset, Sid); LittleEndian.PutShort(data, 2 + offset, (short)(RecordSize - 4)); Array.Copy(rawData, 0, data, 4 + offset, rawData.Length); return rawData.Length + 4; } LittleEndian.PutShort(data, 0 + offset, Sid); LittleEndian.PutShort(data, 2 + offset, (short)(RecordSize - 4)); int pos = offset + 4; foreach (EscherRecord r in escherRecords) { pos += r.Serialize(pos, data, new NullEscherSerializationListener()); } return RecordSize; } /** * Size of record (including 4 byte header) */ public override int RecordSize { get { byte[] rawData = RawData; if (escherRecords.Count == 0 && rawData != null) { return rawData.Length + 4; } else { int size = 4; foreach (EscherRecord r in escherRecords) { //EscherRecord r = (EscherRecord)iterator.Current; size += r.RecordSize; } return size; } } } public override object Clone() { return CloneViaReserialise(); } /** * Clone the current record, via a call to serialise * it, and another to Create a new record from the * bytes. * May only be used for classes which don't have * internal counts / ids in them. For those which * do, a full record-aware serialise is needed, which * allocates new ids / counts as needed. */ //public override Record CloneViaReserialise() //{ // // Do it via a re-serialise // // It's a cheat, but it works... // byte[] b = this.Serialize(); // using (var ms = new System.IO.MemoryStream(b)) // { // RecordInputStream rinp = new RecordInputStream(ms); // rinp.NextRecord(); // Record[] r = RecordFactory.CreateRecord(rinp); // if (r.Length != 1) // { // throw new InvalidOperationException("Re-serialised a record to Clone it, but got " + r.Length + " records back!"); // } // return r[0]; // } //} public void AddEscherRecord(int index, EscherRecord element) { escherRecords.Insert(index, element); } public bool AddEscherRecord(EscherRecord element) { escherRecords.Add(element); return true; } public List<EscherRecord> EscherRecords { get { return escherRecords; } } public void ClearEscherRecords() { escherRecords.Clear(); } /** * If we have a EscherContainerRecord as one of our * children (and most top level escher holders do), * then return that. */ public EscherContainerRecord GetEscherContainer() { for (IEnumerator it = escherRecords.GetEnumerator(); it.MoveNext(); ) { Object er = it.Current; if (er is EscherContainerRecord) { return (EscherContainerRecord)er; } } return null; } /** * Descends into all our children, returning the * first EscherRecord with the given id, or null * if none found */ public EscherRecord FindFirstWithId(short id) { return FindFirstWithId(id, EscherRecords); } private EscherRecord FindFirstWithId(short id, List<EscherRecord> records) { // Check at our level for (IEnumerator it = records.GetEnumerator(); it.MoveNext(); ) { EscherRecord r = (EscherRecord)it.Current; if (r.RecordId == id) { return r; } } // Then Check our children in turn for (IEnumerator it = records.GetEnumerator(); it.MoveNext(); ) { EscherRecord r = (EscherRecord)it.Current; if (r.IsContainerRecord) { EscherRecord found = FindFirstWithId(id, r.ChildRecords); if (found != null) { return found; } } } // Not found in this lot return null; } public EscherRecord GetEscherRecord(int index) { return (EscherRecord)escherRecords[index]; } /** * Big drawing Group records are split but it's easier to deal with them * as a whole Group so we need to join them toGether. */ public void Join(AbstractEscherHolderRecord record) { rawDataContainer.Concatenate(record.RawData); } public void ProcessContinueRecord(byte[] record) { rawDataContainer.Concatenate(record); } public byte[] RawData { get { return rawDataContainer.ToArray(); } set { rawDataContainer.Clear(); rawDataContainer.Concatenate(value); } } /** * Convert raw data to escher records. */ public void Decode() { byte[] rawData = RawData; ConvertToEscherRecords(0, rawData.Length, rawData); } } }
// Copyright (c) Brock Allen & Dominick Baier. All rights reserved. // Licensed under the Apache License, Version 2.0. See LICENSE in the project root for license information. using IdentityModel; using IdentityServer4.Extensions; using IdentityServer4.Models; using IdentityServer4.Services; using IdentityServer4.Stores; using IdentityServer4.Validation; using Microsoft.Extensions.Logging; using System; using System.Linq; using System.Threading.Tasks; using Microsoft.AspNetCore.Authentication; namespace IdentityServer4.ResponseHandling { /// <summary> /// The default token response generator /// </summary> /// <seealso cref="IdentityServer4.ResponseHandling.ITokenResponseGenerator" /> public class TokenResponseGenerator : ITokenResponseGenerator { /// <summary> /// The logger /// </summary> protected readonly ILogger Logger; /// <summary> /// The token service /// </summary> protected readonly ITokenService TokenService; /// <summary> /// The refresh token service /// </summary> protected readonly IRefreshTokenService RefreshTokenService; /// <summary> /// The resource store /// </summary> protected readonly IResourceStore Resources; /// <summary> /// The clients store /// </summary> protected readonly IClientStore Clients; /// <summary> /// The clock /// </summary> protected readonly ISystemClock Clock; /// <summary> /// Initializes a new instance of the <see cref="TokenResponseGenerator" /> class. /// </summary> /// <param name="clock">The clock.</param> /// <param name="tokenService">The token service.</param> /// <param name="refreshTokenService">The refresh token service.</param> /// <param name="resources">The resources.</param> /// <param name="clients">The clients.</param> /// <param name="logger">The logger.</param> public TokenResponseGenerator(ISystemClock clock, ITokenService tokenService, IRefreshTokenService refreshTokenService, IResourceStore resources, IClientStore clients, ILogger<TokenResponseGenerator> logger) { Clock = clock; TokenService = tokenService; RefreshTokenService = refreshTokenService; Resources = resources; Clients = clients; Logger = logger; } /// <summary> /// Processes the response. /// </summary> /// <param name="request">The request.</param> /// <returns></returns> public virtual async Task<TokenResponse> ProcessAsync(TokenRequestValidationResult request) { switch (request.ValidatedRequest.GrantType) { case OidcConstants.GrantTypes.ClientCredentials: return await ProcessClientCredentialsRequestAsync(request); case OidcConstants.GrantTypes.Password: return await ProcessPasswordRequestAsync(request); case OidcConstants.GrantTypes.AuthorizationCode: return await ProcessAuthorizationCodeRequestAsync(request); case OidcConstants.GrantTypes.RefreshToken: return await ProcessRefreshTokenRequestAsync(request); default: return await ProcessExtensionGrantRequestAsync(request); } } /// <summary> /// Creates the response for an client credentials request. /// </summary> /// <param name="request">The request.</param> /// <returns></returns> protected virtual Task<TokenResponse> ProcessClientCredentialsRequestAsync(TokenRequestValidationResult request) { Logger.LogTrace("Creating response for client credentials request"); return ProcessTokenRequestAsync(request); } /// <summary> /// Creates the response for a password request. /// </summary> /// <param name="request">The request.</param> /// <returns></returns> protected virtual Task<TokenResponse> ProcessPasswordRequestAsync(TokenRequestValidationResult request) { Logger.LogTrace("Creating response for password request"); return ProcessTokenRequestAsync(request); } /// <summary> /// Creates the response for an authorization code request. /// </summary> /// <param name="request">The request.</param> /// <returns></returns> /// <exception cref="System.InvalidOperationException">Client does not exist anymore.</exception> protected virtual async Task<TokenResponse> ProcessAuthorizationCodeRequestAsync(TokenRequestValidationResult request) { Logger.LogTrace("Creating response for authorization code request"); ////////////////////////// // access token ///////////////////////// (var accessToken, var refreshToken) = await CreateAccessTokenAsync(request.ValidatedRequest); var response = new TokenResponse { AccessToken = accessToken, AccessTokenLifetime = request.ValidatedRequest.AccessTokenLifetime, Custom = request.CustomResponse }; ////////////////////////// // refresh token ///////////////////////// if (refreshToken.IsPresent()) { response.RefreshToken = refreshToken; } ////////////////////////// // id token ///////////////////////// if (request.ValidatedRequest.AuthorizationCode.IsOpenId) { // load the client that belongs to the authorization code Client client = null; if (request.ValidatedRequest.AuthorizationCode.ClientId != null) { client = await Clients.FindEnabledClientByIdAsync(request.ValidatedRequest.AuthorizationCode.ClientId); } if (client == null) { throw new InvalidOperationException("Client does not exist anymore."); } var resources = await Resources.FindEnabledResourcesByScopeAsync(request.ValidatedRequest.AuthorizationCode.RequestedScopes); var tokenRequest = new TokenCreationRequest { Subject = request.ValidatedRequest.AuthorizationCode.Subject, Resources = resources, Nonce = request.ValidatedRequest.AuthorizationCode.Nonce, AccessTokenToHash = response.AccessToken, ValidatedRequest = request.ValidatedRequest }; var idToken = await TokenService.CreateIdentityTokenAsync(tokenRequest); var jwt = await TokenService.CreateSecurityTokenAsync(idToken); response.IdentityToken = jwt; } return response; } /// <summary> /// Creates the response for a refresh token request. /// </summary> /// <param name="request">The request.</param> /// <returns></returns> protected virtual async Task<TokenResponse> ProcessRefreshTokenRequestAsync(TokenRequestValidationResult request) { Logger.LogTrace("Creating response for refresh token request"); var oldAccessToken = request.ValidatedRequest.RefreshToken.AccessToken; string accessTokenString; if (request.ValidatedRequest.Client.UpdateAccessTokenClaimsOnRefresh) { var subject = request.ValidatedRequest.RefreshToken.Subject; var creationRequest = new TokenCreationRequest { Subject = subject, ValidatedRequest = request.ValidatedRequest, Resources = await Resources.FindEnabledResourcesByScopeAsync(oldAccessToken.Scopes) }; var newAccessToken = await TokenService.CreateAccessTokenAsync(creationRequest); accessTokenString = await TokenService.CreateSecurityTokenAsync(newAccessToken); } else { oldAccessToken.CreationTime = Clock.UtcNow.UtcDateTime; oldAccessToken.Lifetime = request.ValidatedRequest.AccessTokenLifetime; accessTokenString = await TokenService.CreateSecurityTokenAsync(oldAccessToken); } var handle = await RefreshTokenService.UpdateRefreshTokenAsync(request.ValidatedRequest.RefreshTokenHandle, request.ValidatedRequest.RefreshToken, request.ValidatedRequest.Client); return new TokenResponse { IdentityToken = await CreateIdTokenFromRefreshTokenRequestAsync(request.ValidatedRequest, accessTokenString), AccessToken = accessTokenString, AccessTokenLifetime = request.ValidatedRequest.AccessTokenLifetime, RefreshToken = handle, Custom = request.CustomResponse }; } /// <summary> /// Creates the response for an extension grant request. /// </summary> /// <param name="request">The request.</param> /// <returns></returns> protected virtual Task<TokenResponse> ProcessExtensionGrantRequestAsync(TokenRequestValidationResult request) { Logger.LogTrace("Creating response for extension grant request"); return ProcessTokenRequestAsync(request); } /// <summary> /// Creates the response for a token request. /// </summary> /// <param name="validationResult">The validation result.</param> /// <returns></returns> protected virtual async Task<TokenResponse> ProcessTokenRequestAsync(TokenRequestValidationResult validationResult) { (var accessToken, var refreshToken) = await CreateAccessTokenAsync(validationResult.ValidatedRequest); var response = new TokenResponse { AccessToken = accessToken, AccessTokenLifetime = validationResult.ValidatedRequest.AccessTokenLifetime, Custom = validationResult.CustomResponse }; if (refreshToken.IsPresent()) { response.RefreshToken = refreshToken; } return response; } /// <summary> /// Creates the access/refresh token. /// </summary> /// <param name="request">The request.</param> /// <returns></returns> /// <exception cref="System.InvalidOperationException">Client does not exist anymore.</exception> protected virtual async Task<(string accessToken, string refreshToken)> CreateAccessTokenAsync(ValidatedTokenRequest request) { TokenCreationRequest tokenRequest; bool createRefreshToken; if (request.AuthorizationCode != null) { createRefreshToken = request.AuthorizationCode.RequestedScopes.Contains(IdentityServerConstants.StandardScopes.OfflineAccess); // load the client that belongs to the authorization code Client client = null; if (request.AuthorizationCode.ClientId != null) { client = await Clients.FindEnabledClientByIdAsync(request.AuthorizationCode.ClientId); } if (client == null) { throw new InvalidOperationException("Client does not exist anymore."); } var resources = await Resources.FindEnabledResourcesByScopeAsync(request.AuthorizationCode.RequestedScopes); tokenRequest = new TokenCreationRequest { Subject = request.AuthorizationCode.Subject, Resources = resources, ValidatedRequest = request }; } else { createRefreshToken = request.ValidatedScopes.ContainsOfflineAccessScope; tokenRequest = new TokenCreationRequest { Subject = request.Subject, Resources = request.ValidatedScopes.GrantedResources, ValidatedRequest = request }; } var at = await TokenService.CreateAccessTokenAsync(tokenRequest); var accessToken = await TokenService.CreateSecurityTokenAsync(at); if (createRefreshToken) { var refreshToken = await RefreshTokenService.CreateRefreshTokenAsync(tokenRequest.Subject, at, request.Client); return (accessToken, refreshToken); } return (accessToken, null); } /// <summary> /// Creates an id_token for a refresh token request if identity resources have been requested. /// </summary> /// <param name="request">The request.</param> /// <param name="newAccessToken">The new access token.</param> /// <returns></returns> protected virtual async Task<string> CreateIdTokenFromRefreshTokenRequestAsync(ValidatedTokenRequest request, string newAccessToken) { var resources = await Resources.FindResourcesByScopeAsync(request.RefreshToken.Scopes); if (resources.IdentityResources.Any()) { var oldAccessToken = request.RefreshToken.AccessToken; var tokenRequest = new TokenCreationRequest { Subject = request.RefreshToken.Subject, Resources = await Resources.FindEnabledResourcesByScopeAsync(oldAccessToken.Scopes), ValidatedRequest = request, AccessTokenToHash = newAccessToken }; var idToken = await TokenService.CreateIdentityTokenAsync(tokenRequest); return await TokenService.CreateSecurityTokenAsync(idToken); } return null; } } }
// // Copyright (c) Microsoft Corporation. All rights reserved. // namespace Microsoft.Zelig.CodeGeneration.IR { using System; using System.Collections.Generic; using Microsoft.Zelig.Runtime.TypeSystem; public abstract class VariableExpression : Expression { [Flags] public enum Property { AddressTaken = 0x00000001, Volatile = 0x00000002, PhysicalRegister = 0x00000004, } public sealed class DebugInfo { // // State // private MethodRepresentation m_context; private string m_name; private int m_number; private bool m_isLocal; // // Constructor Methods // public DebugInfo( MethodRepresentation context , string name , int number , bool isLocal ) { m_context = context; m_name = name; m_number = number; m_isLocal = isLocal; } // // Helper Methods // public void ApplyTransformation( TransformationContextForIR context ) { context.Push( this ); context.Transform( ref m_context ); context.Transform( ref m_name ); context.Transform( ref m_number ); context.Transform( ref m_isLocal ); context.Pop(); } // // Access Methods // public MethodRepresentation Context { get { return m_context; } } public string Name { get { return m_name; } } public int Number { get { return m_number; } } public bool IsLocal { get { return m_isLocal; } } } //--// public static new readonly VariableExpression[] SharedEmptyArray = new VariableExpression[0]; protected const int c_VariableKind_Argument = 0; protected const int c_VariableKind_Local = 1; protected const int c_VariableKind_Exception = 2; protected const int c_VariableKind_Temporary = 3; // // State // protected DebugInfo m_debugInfo; protected int m_number; // // Constructor Methods // protected VariableExpression( TypeRepresentation type , DebugInfo debugInfo ) : base( type ) { m_debugInfo = debugInfo; m_number = -1; } //--// // // Helper Methods // public static Comparison< VariableExpression > GetSorter() { return SortVariables; } private static int SortVariables( VariableExpression x , VariableExpression y ) { int xKind = x.GetVariableKind(); int yKind = y.GetVariableKind(); if(xKind < yKind) return -1; if(xKind > yKind) return 1; return x.Number - y.Number; } public abstract int GetVariableKind(); //--// public override void ApplyTransformation( TransformationContextForIR context ) { context.Push( this ); base.ApplyTransformation( context ); context.Transform( ref m_number ); context.Transform( ref m_debugInfo ); context.Pop(); } //--// public static VariableExpression ExtractAliased( Expression ex ) { VariableExpression var = ex as VariableExpression; if(var != null) { return var.AliasedVariable; } return null; } public static VariableExpression[] ToArray( VariableExpression ex ) { return ex != null ? new VariableExpression[] { ex } : SharedEmptyArray; } // // Access Methods // public IInliningPathAnnotation InliningPath { get; set; } public DebugInfo DebugName { get { return m_debugInfo; } } public int Number { get { return m_number; } set { m_number = value; } } public override CanBeNull CanBeNull { get { return CanBeNull.Unknown; } } public override bool CanTakeAddress { get { return true; } } public virtual VariableExpression AliasedVariable { get { return this; } } public bool SkipReferenceCounting { get; set; } //--// // // Debug Methods // public override void InnerToString( System.Text.StringBuilder sb ) { string name; string fmt; if (m_number >= 0) { sb.Append( m_number ); } if(m_index >= 0) { sb.AppendFormat( "_{0}", m_index ); } AppendIdentity( sb ); if(m_debugInfo != null) { name = m_debugInfo.Name; } else { name = null; } if(name != null) { fmt = "({0} {1})"; } else { fmt = "({0})"; } sb.AppendFormat( fmt, m_type.FullNameWithAbbreviation, name ); } } }
#if !UNITY_EDITOR #if (UNITY_IPHONE && EVERYPLAY_IPHONE) #define EVERYPLAY_IPHONE_ENABLED #elif (UNITY_ANDROID && EVERYPLAY_ANDROID) #define EVERYPLAY_ANDROID_ENABLED #endif #endif #if EVERYPLAY_IPHONE_ENABLED || EVERYPLAY_ANDROID_ENABLED #define EVERYPLAY_BINDINGS_ENABLED #endif using UnityEngine; using System.Runtime.InteropServices; using System; using System.Collections.Generic; using System.Collections; using EveryplayMiniJSON; public class Everyplay : MonoBehaviour { // Enumerations public enum FaceCamPreviewOrigin { TopLeft = 0, TopRight, BottomLeft, BottomRight }; public enum UserInterfaceIdiom { Phone = 0, Tablet, iPhone = Phone, iPad = Tablet }; // Delegates and events public delegate void WasClosedDelegate(); public static event WasClosedDelegate WasClosed; public delegate void ReadyForRecordingDelegate(bool enabled); public static event ReadyForRecordingDelegate ReadyForRecording; public delegate void RecordingStartedDelegate(); public static event RecordingStartedDelegate RecordingStarted; public delegate void RecordingStoppedDelegate(); public static event RecordingStoppedDelegate RecordingStopped; public delegate void FaceCamSessionStartedDelegate(); public static event FaceCamSessionStartedDelegate FaceCamSessionStarted; public delegate void FaceCamRecordingPermissionDelegate(bool granted); public static event FaceCamRecordingPermissionDelegate FaceCamRecordingPermission; public delegate void FaceCamSessionStoppedDelegate(); public static event FaceCamSessionStoppedDelegate FaceCamSessionStopped; [Obsolete("Use ThumbnailTextureReadyDelegate(Texture2D texture,bool portrait) instead.")] public delegate void ThumbnailReadyAtTextureIdDelegate(int textureId, bool portrait); [Obsolete("Use ThumbnailTextureReady instead.")] public static event ThumbnailReadyAtTextureIdDelegate ThumbnailReadyAtTextureId; public delegate void ThumbnailTextureReadyDelegate(Texture2D texture, bool portrait); public static event ThumbnailTextureReadyDelegate ThumbnailTextureReady; public delegate void UploadDidStartDelegate(int videoId); public static event UploadDidStartDelegate UploadDidStart; public delegate void UploadDidProgressDelegate(int videoId, float progress); public static event UploadDidProgressDelegate UploadDidProgress; public delegate void UploadDidCompleteDelegate(int videoId); public static event UploadDidCompleteDelegate UploadDidComplete; public delegate void RequestReadyDelegate(string response); public delegate void RequestFailedDelegate(string error); // Private member variables private static string clientId; private static bool appIsClosing = false; // For some time we want to support calling Everyplay with the old SharedInstance. // This requires us to use a EveryplayLegacy instance wrapper. // We can deprecate SharedInstance and notify the user when // using the old way. After some time we can remove it totally. [Obsolete("Calling Everyplay with SharedInstance is deprecated, you may remove SharedInstance.")] public static EveryplayLegacy SharedInstance { get { // Reference to EveryplayInstance to make sure the real instance exists, also create a legacy wrapper if (EveryplayInstance != null) { if (everyplayLegacy == null) { // Add legacy wrapper only when SharedInstance is referenced everyplayLegacy = everyplayInstance.gameObject.AddComponent<EveryplayLegacy>(); } } return everyplayLegacy; } } private static EveryplayLegacy everyplayLegacy = null; // The real singleton, SharedInstance is for legacy support only private static Everyplay everyplayInstance = null; private static Everyplay EveryplayInstance { get { if (everyplayInstance == null && !appIsClosing) { EveryplaySettings settings = (EveryplaySettings) Resources.Load("EveryplaySettings"); if (settings != null) { if (settings.IsEnabled) { GameObject everyplayGameObject = new GameObject("Everyplay"); if (everyplayGameObject != null) { everyplayInstance = everyplayGameObject.AddComponent<Everyplay>(); if (everyplayInstance != null) { clientId = settings.clientId; // Initialize the native #if EVERYPLAY_IPHONE_ENABLED || EVERYPLAY_ANDROID_ENABLED InitEveryplay(settings.clientId, settings.clientSecret, settings.redirectURI); #endif // Add test buttons if requested if (settings.testButtonsEnabled) { AddTestButtons(everyplayGameObject); } DontDestroyOnLoad(everyplayGameObject); } } } } } return everyplayInstance; } } // Public static methods public static void Initialize() { // If everyplayInstance is not yet initialized, calling EveryplayInstance property getter will trigger the initialization if (EveryplayInstance == null) { Debug.Log("Unable to initialize Everyplay. Everyplay might be disabled for this platform or the app is closing."); } } public static void Show() { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED EveryplayShow(); #endif } } public static void ShowWithPath(string path) { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED EveryplayShowWithPath(path); #endif } } public static void PlayVideoWithURL(string url) { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED EveryplayPlayVideoWithURL(url); #endif } } public static void PlayVideoWithDictionary(Dictionary<string, object> dict) { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED EveryplayPlayVideoWithDictionary(Json.Serialize(dict)); #endif } } public static void MakeRequest(string method, string url, Dictionary<string, object> data, Everyplay.RequestReadyDelegate readyDelegate, Everyplay.RequestFailedDelegate failedDelegate) { if (EveryplayInstance != null) { EveryplayInstance.AsyncMakeRequest(method, url, data, readyDelegate, failedDelegate); } } public static string AccessToken() { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED return EveryplayAccountAccessToken(); #endif } return null; } public static void ShowSharingModal() { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED EveryplayShowSharingModal(); #endif } } public static void StartRecording() { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED EveryplayStartRecording(); #endif } } public static void StopRecording() { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED EveryplayStopRecording(); #endif } } public static void PauseRecording() { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED EveryplayPauseRecording(); #endif } } public static void ResumeRecording() { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED EveryplayResumeRecording(); #endif } } public static bool IsRecording() { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED return EveryplayIsRecording(); #endif } return false; } public static bool IsRecordingSupported() { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED return EveryplayIsRecordingSupported(); #endif } return false; } public static bool IsPaused() { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED return EveryplayIsPaused(); #endif } return false; } public static bool SnapshotRenderbuffer() { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED return EveryplaySnapshotRenderbuffer(); #endif } return false; } public static bool IsSupported() { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED return EveryplayIsSupported(); #endif } return false; } public static bool IsSingleCoreDevice() { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED return EveryplayIsSingleCoreDevice(); #endif } return false; } public static int GetUserInterfaceIdiom() { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED return EveryplayGetUserInterfaceIdiom(); #endif } return 0; } public static void PlayLastRecording() { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED EveryplayPlayLastRecording(); #endif } } public static void SetMetadata(string key, object val) { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED if (key != null && val != null) { Dictionary<string, object> dict = new Dictionary<string, object>(); dict.Add(key, val); EveryplaySetMetadata(Json.Serialize(dict)); } #endif } } public static void SetMetadata(Dictionary<string, object> dict) { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED if (dict != null) { if (dict.Count > 0) { EveryplaySetMetadata(Json.Serialize(dict)); } } #endif } } public static void SetTargetFPS(int fps) { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED EveryplaySetTargetFPS(fps); #endif } } public static void SetMotionFactor(int factor) { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED EveryplaySetMotionFactor(factor); #endif } } public static void SetMaxRecordingMinutesLength(int minutes) { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED EveryplaySetMaxRecordingMinutesLength(minutes); #endif } } public static void SetLowMemoryDevice(bool state) { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED EveryplaySetLowMemoryDevice(state); #endif } } public static void SetDisableSingleCoreDevices(bool state) { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED EveryplaySetDisableSingleCoreDevices(state); #endif } } public static bool FaceCamIsVideoRecordingSupported() { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED return EveryplayFaceCamIsVideoRecordingSupported(); #endif } return false; } public static bool FaceCamIsAudioRecordingSupported() { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED return EveryplayFaceCamIsAudioRecordingSupported(); #endif } return false; } public static bool FaceCamIsHeadphonesPluggedIn() { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED return EveryplayFaceCamIsHeadphonesPluggedIn(); #endif } return false; } public static bool FaceCamIsSessionRunning() { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED return EveryplayFaceCamIsSessionRunning(); #endif } return false; } public static bool FaceCamIsRecordingPermissionGranted() { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED return EveryplayFaceCamIsRecordingPermissionGranted(); #endif } return false; } public static float FaceCamAudioPeakLevel() { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED return EveryplayFaceCamAudioPeakLevel(); #endif } return 0.0f; } public static float FaceCamAudioPowerLevel() { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED return EveryplayFaceCamAudioPowerLevel(); #endif } return 0.0f; } public static void FaceCamSetMonitorAudioLevels(bool enabled) { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED EveryplayFaceCamSetMonitorAudioLevels(enabled); #endif } } public static void FaceCamSetAudioOnly(bool audioOnly) { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED EveryplayFaceCamSetAudioOnly(audioOnly); #endif } } public static void FaceCamSetPreviewVisible(bool visible) { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED EveryplayFaceCamSetPreviewVisible(visible); #endif } } public static void FaceCamSetPreviewScaleRetina(bool autoScale) { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED EveryplayFaceCamSetPreviewScaleRetina(autoScale); #endif } } public static void FaceCamSetPreviewSideWidth(int width) { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED EveryplayFaceCamSetPreviewSideWidth(width); #endif } } public static void FaceCamSetPreviewBorderWidth(int width) { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED EveryplayFaceCamSetPreviewBorderWidth(width); #endif } } public static void FaceCamSetPreviewPositionX(int x) { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED EveryplayFaceCamSetPreviewPositionX(x); #endif } } public static void FaceCamSetPreviewPositionY(int y) { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED EveryplayFaceCamSetPreviewPositionY(y); #endif } } public static void FaceCamSetPreviewBorderColor(float r, float g, float b, float a) { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED EveryplayFaceCamSetPreviewBorderColor(r, g, b, a); #endif } } public static void FaceCamSetPreviewOrigin(Everyplay.FaceCamPreviewOrigin origin) { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED EveryplayFaceCamSetPreviewOrigin((int) origin); #endif } } public static void FaceCamSetTargetTexture(Texture2D texture) { if (EveryplayInstance != null) { #if !UNITY_3_5 #if EVERYPLAY_IPHONE_ENABLED if (texture != null) { EveryplayFaceCamSetTargetTexture(texture.GetNativeTexturePtr()); EveryplayFaceCamSetTargetTextureWidth(texture.width); EveryplayFaceCamSetTargetTextureHeight(texture.height); } else { EveryplayFaceCamSetTargetTexture(System.IntPtr.Zero); } #elif EVERYPLAY_ANDROID_ENABLED if (texture != null) { EveryplayFaceCamSetTargetTextureId(texture.GetNativeTextureID()); EveryplayFaceCamSetTargetTextureWidth(texture.width); EveryplayFaceCamSetTargetTextureHeight(texture.height); } else { EveryplayFaceCamSetTargetTextureId(0); } #endif #endif } } [Obsolete("Use FaceCamSetTargetTexture(Texture2D texture) instead.")] public static void FaceCamSetTargetTextureId(int textureId) { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED EveryplayFaceCamSetTargetTextureId(textureId); #endif } } [Obsolete("Defining texture width is no longer required when FaceCamSetTargetTexture(Texture2D texture) is used.")] public static void FaceCamSetTargetTextureWidth(int textureWidth) { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED EveryplayFaceCamSetTargetTextureWidth(textureWidth); #endif } } [Obsolete("Defining texture height is no longer required when FaceCamSetTargetTexture(Texture2D texture) is used.")] public static void FaceCamSetTargetTextureHeight(int textureHeight) { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED EveryplayFaceCamSetTargetTextureHeight(textureHeight); #endif } } public static void FaceCamStartSession() { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED EveryplayFaceCamStartSession(); #endif } } public static void FaceCamRequestRecordingPermission() { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED EveryplayFaceCamRequestRecordingPermission(); #endif } } public static void FaceCamStopSession() { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED EveryplayFaceCamStopSession(); #endif } } private static Texture2D currentThumbnailTargetTexture = null; public static void SetThumbnailTargetTexture(Texture2D texture) { if (EveryplayInstance != null) { currentThumbnailTargetTexture = texture; #if !UNITY_3_5 #if EVERYPLAY_IPHONE_ENABLED if (texture != null) { EveryplaySetThumbnailTargetTexture(currentThumbnailTargetTexture.GetNativeTexturePtr()); EveryplaySetThumbnailTargetTextureWidth(currentThumbnailTargetTexture.width); EveryplaySetThumbnailTargetTextureHeight(currentThumbnailTargetTexture.height); } else { EveryplaySetThumbnailTargetTexture(System.IntPtr.Zero); } #elif EVERYPLAY_ANDROID_ENABLED if (texture != null) { EveryplaySetThumbnailTargetTextureId(currentThumbnailTargetTexture.GetNativeTextureID()); EveryplaySetThumbnailTargetTextureWidth(currentThumbnailTargetTexture.width); EveryplaySetThumbnailTargetTextureHeight(currentThumbnailTargetTexture.height); } else { EveryplaySetThumbnailTargetTextureId(0); } #endif #endif } } [Obsolete("Use SetThumbnailTargetTexture(Texture2D texture) instead.")] public static void SetThumbnailTargetTextureId(int textureId) { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED EveryplaySetThumbnailTargetTextureId(textureId); #endif } } [Obsolete("Defining texture width is no longer required when SetThumbnailTargetTexture(Texture2D texture) is used.")] public static void SetThumbnailTargetTextureWidth(int textureWidth) { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED EveryplaySetThumbnailTargetTextureWidth(textureWidth); #endif } } [Obsolete("Defining texture height is no longer required when SetThumbnailTargetTexture(Texture2D texture) is used.")] public static void SetThumbnailTargetTextureHeight(int textureHeight) { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED EveryplaySetThumbnailTargetTextureHeight(textureHeight); #endif } } public static void TakeThumbnail() { if (EveryplayInstance != null) { #if EVERYPLAY_BINDINGS_ENABLED EveryplayTakeThumbnail(); #endif } } // Private static methods private static void RemoveAllEventHandlers() { WasClosed = null; ReadyForRecording = null; RecordingStarted = null; RecordingStopped = null; FaceCamSessionStarted = null; FaceCamRecordingPermission = null; FaceCamSessionStopped = null; #pragma warning disable 612, 618 ThumbnailReadyAtTextureId = null; #pragma warning restore 612, 618 ThumbnailTextureReady = null; UploadDidStart = null; UploadDidProgress = null; UploadDidComplete = null; } private static void AddTestButtons(GameObject gameObject) { Texture2D textureAtlas = (Texture2D) Resources.Load("everyplay-test-buttons", typeof(Texture2D)); if (textureAtlas != null) { EveryplayRecButtons recButtons = gameObject.AddComponent<EveryplayRecButtons>(); if (recButtons != null) { recButtons.atlasTexture = textureAtlas; } } } // Private instance methods private void AsyncMakeRequest(string method, string url, Dictionary<string, object> data, Everyplay.RequestReadyDelegate readyDelegate, Everyplay.RequestFailedDelegate failedDelegate) { StartCoroutine(MakeRequestEnumerator(method, url, data, readyDelegate, failedDelegate)); } private IEnumerator MakeRequestEnumerator(string method, string url, Dictionary<string, object> data, Everyplay.RequestReadyDelegate readyDelegate, Everyplay.RequestFailedDelegate failedDelegate) { if (data == null) { data = new Dictionary<string, object>(); } if (url.IndexOf("http") != 0) { if (url.IndexOf("/") != 0) { url = "/" + url; } url = "https://api.everyplay.com" + url; } method = method.ToLower(); #if UNITY_3_5 || UNITY_4_0 || UNITY_4_0_1 || UNITY_4_1 || UNITY_4_2 || UNITY_4_3 Hashtable headers = new Hashtable(); #else Dictionary<string, string> headers = new Dictionary<string, string>(); #endif string accessToken = AccessToken(); if (accessToken != null) { headers["Authorization"] = "Bearer " + accessToken; } else { if (url.IndexOf("client_id") == -1) { if (url.IndexOf("?") == -1) { url += "?"; } else { url += "&"; } url += "client_id=" + clientId; } } data.Add("_method", method); string dataString = Json.Serialize(data); byte[] dataArray = System.Text.Encoding.UTF8.GetBytes(dataString); headers["Accept"] = "application/json"; headers["Content-Type"] = "application/json"; headers["Data-Type"] = "json"; headers["Content-Length"] = dataArray.Length.ToString(); WWW www = new WWW(url, dataArray, headers); yield return www; if (!string.IsNullOrEmpty(www.error) && failedDelegate != null) { failedDelegate("Everyplay error: " + www.error); } else if (string.IsNullOrEmpty(www.error) && readyDelegate != null) { readyDelegate(www.text); } } // Monobehaviour methods void OnApplicationQuit() { if (currentThumbnailTargetTexture != null) { SetThumbnailTargetTexture(null); currentThumbnailTargetTexture = null; } RemoveAllEventHandlers(); appIsClosing = true; everyplayInstance = null; } // Private instance methods called by native private void EveryplayHidden(string msg) { if (WasClosed != null) { WasClosed(); } } private void EveryplayReadyForRecording(string jsonMsg) { if (ReadyForRecording != null) { Dictionary<string, object> dict = EveryplayDictionaryExtensions.JsonToDictionary(jsonMsg); bool enabled; if (EveryplayDictionaryExtensions.TryGetValue(dict, "enabled", out enabled)) { ReadyForRecording(enabled); } } } private void EveryplayRecordingStarted(string msg) { if (RecordingStarted != null) { RecordingStarted(); } } private void EveryplayRecordingStopped(string msg) { if (RecordingStopped != null) { RecordingStopped(); } } private void EveryplayFaceCamSessionStarted(string msg) { if (FaceCamSessionStarted != null) { FaceCamSessionStarted(); } } private void EveryplayFaceCamRecordingPermission(string jsonMsg) { if (FaceCamRecordingPermission != null) { Dictionary<string, object> dict = EveryplayDictionaryExtensions.JsonToDictionary(jsonMsg); bool granted; if (EveryplayDictionaryExtensions.TryGetValue(dict, "granted", out granted)) { FaceCamRecordingPermission(granted); } } } private void EveryplayFaceCamSessionStopped(string msg) { if (FaceCamSessionStopped != null) { FaceCamSessionStopped(); } } private void EveryplayThumbnailReadyAtTextureId(string jsonMsg) { #pragma warning disable 612, 618 if (ThumbnailReadyAtTextureId != null || ThumbnailTextureReady != null) { Dictionary<string, object> dict = EveryplayDictionaryExtensions.JsonToDictionary(jsonMsg); int textureId; bool portrait; if (EveryplayDictionaryExtensions.TryGetValue(dict, "textureId", out textureId) && EveryplayDictionaryExtensions.TryGetValue(dict, "portrait", out portrait)) { if (ThumbnailReadyAtTextureId != null) { ThumbnailReadyAtTextureId(textureId, portrait); } #if !UNITY_3_5 if (ThumbnailTextureReady != null && currentThumbnailTargetTexture != null) { if (currentThumbnailTargetTexture.GetNativeTextureID() == textureId) { ThumbnailTextureReady(currentThumbnailTargetTexture, portrait); } } #endif } } #pragma warning restore 612, 618 } private void EveryplayThumbnailTextureReady(string jsonMsg) { #if !UNITY_3_5 if (ThumbnailTextureReady != null) { Dictionary<string, object> dict = EveryplayDictionaryExtensions.JsonToDictionary(jsonMsg); long texturePtr; bool portrait; if (currentThumbnailTargetTexture != null && EveryplayDictionaryExtensions.TryGetValue(dict, "texturePtr", out texturePtr) && EveryplayDictionaryExtensions.TryGetValue(dict, "portrait", out portrait)) { long currentPtr = (long) currentThumbnailTargetTexture.GetNativeTexturePtr(); if (currentPtr == texturePtr) { ThumbnailTextureReady(currentThumbnailTargetTexture, portrait); } } } #endif } private void EveryplayUploadDidStart(string jsonMsg) { if (UploadDidStart != null) { Dictionary<string, object> dict = EveryplayDictionaryExtensions.JsonToDictionary(jsonMsg); int videoId; if (EveryplayDictionaryExtensions.TryGetValue(dict, "videoId", out videoId)) { UploadDidStart(videoId); } } } private void EveryplayUploadDidProgress(string jsonMsg) { if (UploadDidProgress != null) { Dictionary<string, object> dict = EveryplayDictionaryExtensions.JsonToDictionary(jsonMsg); int videoId; double progress; if (EveryplayDictionaryExtensions.TryGetValue(dict, "videoId", out videoId) && EveryplayDictionaryExtensions.TryGetValue(dict, "progress", out progress)) { UploadDidProgress(videoId, (float) progress); } } } private void EveryplayUploadDidComplete(string jsonMsg) { if (UploadDidComplete != null) { Dictionary<string, object> dict = EveryplayDictionaryExtensions.JsonToDictionary(jsonMsg); int videoId; if (EveryplayDictionaryExtensions.TryGetValue(dict, "videoId", out videoId)) { UploadDidComplete(videoId); } } } // Native calls #if EVERYPLAY_IPHONE_ENABLED [DllImport("__Internal")] public static extern void InitEveryplay(string clientId, string clientSecret, string redirectURI); [DllImport("__Internal")] private static extern void EveryplayShow(); [DllImport("__Internal")] private static extern void EveryplayShowWithPath(string path); [DllImport("__Internal")] private static extern void EveryplayPlayVideoWithURL(string url); [DllImport("__Internal")] private static extern void EveryplayPlayVideoWithDictionary(string dic); [DllImport("__Internal")] private static extern string EveryplayAccountAccessToken(); [DllImport("__Internal")] private static extern void EveryplayShowSharingModal(); [DllImport("__Internal")] private static extern void EveryplayStartRecording(); [DllImport("__Internal")] private static extern void EveryplayStopRecording(); [DllImport("__Internal")] private static extern void EveryplayPauseRecording(); [DllImport("__Internal")] private static extern void EveryplayResumeRecording(); [DllImport("__Internal")] private static extern bool EveryplayIsRecording(); [DllImport("__Internal")] private static extern bool EveryplayIsRecordingSupported(); [DllImport("__Internal")] private static extern bool EveryplayIsPaused(); [DllImport("__Internal")] private static extern bool EveryplaySnapshotRenderbuffer(); [DllImport("__Internal")] private static extern void EveryplayPlayLastRecording(); [DllImport("__Internal")] private static extern void EveryplaySetMetadata(string json); [DllImport("__Internal")] private static extern void EveryplaySetTargetFPS(int fps); [DllImport("__Internal")] private static extern void EveryplaySetMotionFactor(int factor); [DllImport("__Internal")] private static extern void EveryplaySetMaxRecordingMinutesLength(int minutes); [DllImport("__Internal")] private static extern void EveryplaySetLowMemoryDevice(bool state); [DllImport("__Internal")] private static extern void EveryplaySetDisableSingleCoreDevices(bool state); [DllImport("__Internal")] private static extern bool EveryplayIsSupported(); [DllImport("__Internal")] private static extern bool EveryplayIsSingleCoreDevice(); [DllImport("__Internal")] private static extern int EveryplayGetUserInterfaceIdiom(); [DllImport("__Internal")] private static extern bool EveryplayFaceCamIsVideoRecordingSupported(); [DllImport("__Internal")] private static extern bool EveryplayFaceCamIsAudioRecordingSupported(); [DllImport("__Internal")] private static extern bool EveryplayFaceCamIsHeadphonesPluggedIn(); [DllImport("__Internal")] private static extern bool EveryplayFaceCamIsSessionRunning(); [DllImport("__Internal")] private static extern bool EveryplayFaceCamIsRecordingPermissionGranted(); [DllImport("__Internal")] private static extern float EveryplayFaceCamAudioPeakLevel(); [DllImport("__Internal")] private static extern float EveryplayFaceCamAudioPowerLevel(); [DllImport("__Internal")] private static extern void EveryplayFaceCamSetMonitorAudioLevels(bool enabled); [DllImport("__Internal")] private static extern void EveryplayFaceCamSetAudioOnly(bool audioOnly); [DllImport("__Internal")] private static extern void EveryplayFaceCamSetPreviewVisible(bool visible); [DllImport("__Internal")] private static extern void EveryplayFaceCamSetPreviewScaleRetina(bool autoScale); [DllImport("__Internal")] private static extern void EveryplayFaceCamSetPreviewSideWidth(int width); [DllImport("__Internal")] private static extern void EveryplayFaceCamSetPreviewBorderWidth(int width); [DllImport("__Internal")] private static extern void EveryplayFaceCamSetPreviewPositionX(int x); [DllImport("__Internal")] private static extern void EveryplayFaceCamSetPreviewPositionY(int y); [DllImport("__Internal")] private static extern void EveryplayFaceCamSetPreviewBorderColor(float r, float g, float b, float a); [DllImport("__Internal")] private static extern void EveryplayFaceCamSetPreviewOrigin(int origin); [DllImport("__Internal")] private static extern void EveryplayFaceCamSetTargetTexture(System.IntPtr texturePtr); [DllImport("__Internal")] private static extern void EveryplayFaceCamSetTargetTextureId(int textureId); [DllImport("__Internal")] private static extern void EveryplayFaceCamSetTargetTextureWidth(int textureWidth); [DllImport("__Internal")] private static extern void EveryplayFaceCamSetTargetTextureHeight(int textureHeight); [DllImport("__Internal")] private static extern void EveryplayFaceCamStartSession(); [DllImport("__Internal")] private static extern void EveryplayFaceCamRequestRecordingPermission(); [DllImport("__Internal")] private static extern void EveryplayFaceCamStopSession(); [DllImport("__Internal")] private static extern void EveryplaySetThumbnailTargetTexture(System.IntPtr texturePtr); [DllImport("__Internal")] private static extern void EveryplaySetThumbnailTargetTextureId(int textureId); [DllImport("__Internal")] private static extern void EveryplaySetThumbnailTargetTextureWidth(int textureWidth); [DllImport("__Internal")] private static extern void EveryplaySetThumbnailTargetTextureHeight(int textureHeight); [DllImport("__Internal")] private static extern void EveryplayTakeThumbnail(); #elif EVERYPLAY_ANDROID_ENABLED private static AndroidJavaObject everyplayUnity; public static void InitEveryplay(string clientId, string clientSecret, string redirectURI) { AndroidJavaClass jc = new AndroidJavaClass("com.unity3d.player.UnityPlayer"); AndroidJavaObject activity = jc.GetStatic<AndroidJavaObject>("currentActivity"); everyplayUnity = new AndroidJavaObject("com.everyplay.Everyplay.unity.EveryplayUnity3DWrapper"); everyplayUnity.Call("initEveryplay", activity, clientId, clientSecret, redirectURI); } public static void EveryplayShow() { everyplayUnity.Call<bool>("showEveryplay"); } public static void EveryplayShowWithPath(string path) { everyplayUnity.Call<bool>("showEveryplay", path); } public static void EveryplayPlayVideoWithURL(string url) { everyplayUnity.Call("playVideoWithURL", url); } public static void EveryplayPlayVideoWithDictionary(string dic) { everyplayUnity.Call("playVideoWithDictionary", dic); } public static string EveryplayAccountAccessToken() { return everyplayUnity.Call<string>("getAccessToken"); } public static void EveryplayShowSharingModal() { everyplayUnity.Call("showSharingModal"); } public static void EveryplayStartRecording() { everyplayUnity.Call("startRecording"); } public static void EveryplayStopRecording() { everyplayUnity.Call("stopRecording"); } public static void EveryplayPauseRecording() { everyplayUnity.Call("pauseRecording"); } public static void EveryplayResumeRecording() { everyplayUnity.Call("resumeRecording"); } public static bool EveryplayIsRecording() { return everyplayUnity.Call<bool>("isRecording"); } public static bool EveryplayIsRecordingSupported() { return everyplayUnity.Call<bool>("isRecordingSupported"); } public static bool EveryplayIsPaused() { return everyplayUnity.Call<bool>("isPaused"); } public static bool EveryplaySnapshotRenderbuffer() { return everyplayUnity.Call<bool>("snapshotRenderbuffer"); } public static void EveryplayPlayLastRecording() { everyplayUnity.Call("playLastRecording"); } public static void EveryplaySetMetadata(string json) { everyplayUnity.Call("setMetadata", json); } public static void EveryplaySetTargetFPS(int fps) { everyplayUnity.Call("setTargetFPS", fps); } public static void EveryplaySetMotionFactor(int factor) { everyplayUnity.Call("setMotionFactor", factor); } public static void EveryplaySetMaxRecordingMinutesLength(int minutes) { everyplayUnity.Call("setMaxRecordingMinutesLength", minutes); } public static void EveryplaySetLowMemoryDevice(bool state) { everyplayUnity.Call("setLowMemoryDevice", state ? 1 : 0); } public static void EveryplaySetDisableSingleCoreDevices(bool state) { everyplayUnity.Call("setDisableSingleCoreDevices", state ? 1 : 0); } public static bool EveryplayIsSupported() { return everyplayUnity.Call<bool>("isSupported"); } public static bool EveryplayIsSingleCoreDevice() { return everyplayUnity.Call<bool>("isSingleCoreDevice"); } public static int EveryplayGetUserInterfaceIdiom() { return everyplayUnity.Call<int>("getUserInterfaceIdiom"); } public static bool EveryplayFaceCamIsVideoRecordingSupported() { return false; } public static bool EveryplayFaceCamIsAudioRecordingSupported() { return false; } public static bool EveryplayFaceCamIsHeadphonesPluggedIn() { return false; } public static bool EveryplayFaceCamIsSessionRunning() { return false; } public static bool EveryplayFaceCamIsRecordingPermissionGranted() { Debug.Log(System.Reflection.MethodBase.GetCurrentMethod().Name + " not yet implemented"); return false; } public static float EveryplayFaceCamAudioPeakLevel() { return 0.0f; } public static float EveryplayFaceCamAudioPowerLevel() { return 0.0f; } public static void EveryplayFaceCamSetMonitorAudioLevels(bool enabled) { Debug.Log(System.Reflection.MethodBase.GetCurrentMethod().Name + " not yet implemented"); } public static void EveryplayFaceCamSetAudioOnly(bool audioOnly) { Debug.Log(System.Reflection.MethodBase.GetCurrentMethod().Name + " not yet implemented"); } public static void EveryplayFaceCamSetPreviewVisible(bool visible) { Debug.Log(System.Reflection.MethodBase.GetCurrentMethod().Name + " not yet implemented"); } public static void EveryplayFaceCamSetPreviewScaleRetina(bool autoScale) { Debug.Log(System.Reflection.MethodBase.GetCurrentMethod().Name + " not yet implemented"); } public static void EveryplayFaceCamSetPreviewSideWidth(int width) { Debug.Log(System.Reflection.MethodBase.GetCurrentMethod().Name + " not yet implemented"); } public static void EveryplayFaceCamSetPreviewBorderWidth(int width) { Debug.Log(System.Reflection.MethodBase.GetCurrentMethod().Name + " not yet implemented"); } public static void EveryplayFaceCamSetPreviewPositionX(int x) { Debug.Log(System.Reflection.MethodBase.GetCurrentMethod().Name + " not yet implemented"); } public static void EveryplayFaceCamSetPreviewPositionY(int y) { Debug.Log(System.Reflection.MethodBase.GetCurrentMethod().Name + " not yet implemented"); } public static void EveryplayFaceCamSetPreviewBorderColor(float r, float g, float b, float a) { Debug.Log(System.Reflection.MethodBase.GetCurrentMethod().Name + " not yet implemented"); } public static void EveryplayFaceCamSetPreviewOrigin(int origin) { Debug.Log(System.Reflection.MethodBase.GetCurrentMethod().Name + " not yet implemented"); } public static void EveryplayFaceCamSetTargetTextureId(int textureId) { Debug.Log(System.Reflection.MethodBase.GetCurrentMethod().Name + " not yet implemented"); } public static void EveryplayFaceCamSetTargetTextureWidth(int textureHeight) { Debug.Log(System.Reflection.MethodBase.GetCurrentMethod().Name + " not yet implemented"); } public static void EveryplayFaceCamSetTargetTextureHeight(int textureWidth) { Debug.Log(System.Reflection.MethodBase.GetCurrentMethod().Name + " not yet implemented"); } public static void EveryplayFaceCamStartSession() { Debug.Log(System.Reflection.MethodBase.GetCurrentMethod().Name + " not yet implemented"); } public static void EveryplayFaceCamRequestRecordingPermission() { Debug.Log(System.Reflection.MethodBase.GetCurrentMethod().Name + " not yet implemented"); } public static void EveryplayFaceCamStopSession() { Debug.Log(System.Reflection.MethodBase.GetCurrentMethod().Name + " not yet implemented"); } public static void EveryplaySetThumbnailTargetTextureId(int textureId) { everyplayUnity.Call("setThumbnailTargetTextureId", textureId); } public static void EveryplaySetThumbnailTargetTextureWidth(int textureWidth) { everyplayUnity.Call("setThumbnailTargetTextureWidth", textureWidth); } public static void EveryplaySetThumbnailTargetTextureHeight(int textureHeight) { everyplayUnity.Call("setThumbnailTargetTextureHeight", textureHeight); } public static void EveryplayTakeThumbnail() { everyplayUnity.Call("takeThumbnail"); } #endif }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Diagnostics; using System.Threading; #if !ES_BUILD_AGAINST_DOTNET_V35 using Contract = System.Diagnostics.Contracts.Contract; #else using Contract = Microsoft.Diagnostics.Contracts.Internal.Contract; #endif #if ES_BUILD_STANDALONE namespace Microsoft.Diagnostics.Tracing #else namespace System.Diagnostics.Tracing #endif { /// <summary> /// Tracks activities. This is meant to be a singleton (accessed by the ActivityTracer.Instance static property) /// /// Logically this is simply holds the m_current variable that holds the async local that holds the current ActivityInfo /// An ActivityInfo is represents a activity (which knows its creator and thus knows its path). /// /// Most of the magic is in the async local (it gets copied to new tasks) /// /// On every start event call OnStart /// /// Guid activityID; /// Guid relatedActivityID; /// if (OnStart(activityName, out activityID, out relatedActivityID, ForceStop, options)) /// // Log Start event with activityID and relatedActivityID /// /// On every stop event call OnStop /// /// Guid activityID; /// if (OnStop(activityName, ref activityID ForceStop)) /// // Stop event with activityID /// /// On any normal event log the event with activityTracker.CurrentActivityId /// </summary> internal class ActivityTracker { /// <summary> /// Called on work item begins. The activity name = providerName + activityName without 'Start' suffix. /// It updates CurrentActivityId to track. /// /// It returns true if the Start should be logged, otherwise (if it is illegal recursion) it return false. /// /// The start event should use as its activity ID the CurrentActivityId AFTER calling this routine and its /// RelatedActivityID the CurrentActivityId BEFORE calling this routine (the creator). /// /// If activity tracing is not on, then activityId and relatedActivityId are not set /// </summary> public void OnStart(string providerName, string activityName, int task, ref Guid activityId, ref Guid relatedActivityId, EventActivityOptions options) { if (m_current == null) // We are not enabled { // We used to rely on the TPL provider turning us on, but that has the disadvantage that you don't get Start-Stop tracking // until you use Tasks for the first time (which you may never do). Thus we change it to pull rather tan push for whether // we are enabled. if (m_checkedForEnable) return; m_checkedForEnable = true; if (TplEtwProvider.Log.IsEnabled(EventLevel.Informational, TplEtwProvider.Keywords.TasksFlowActivityIds)) Enable(); if (m_current == null) return; } Contract.Assert((options & EventActivityOptions.Disable) == 0); var currentActivity = m_current.Value; var fullActivityName = NormalizeActivityName(providerName, activityName, task); var etwLog = TplEtwProvider.Log; if (etwLog.Debug) { etwLog.DebugFacilityMessage("OnStartEnter", fullActivityName); etwLog.DebugFacilityMessage("OnStartEnterActivityState", ActivityInfo.LiveActivities(currentActivity)); } if (currentActivity != null) { // Stop activity tracking if we reached the maximum allowed depth if (currentActivity.m_level >= MAX_ACTIVITY_DEPTH) { activityId = Guid.Empty; relatedActivityId = Guid.Empty; if (etwLog.Debug) etwLog.DebugFacilityMessage("OnStartRET", "Fail"); return; } // Check for recursion, and force-stop any activities if the activity already started. if ((options & EventActivityOptions.Recursive) == 0) { ActivityInfo existingActivity = FindActiveActivity(fullActivityName, currentActivity); if (existingActivity != null) { OnStop(providerName, activityName, task, ref activityId); currentActivity = m_current.Value; } } } // Get a unique ID for this activity. long id; if (currentActivity == null) id = Interlocked.Increment(ref m_nextId); else id = Interlocked.Increment(ref currentActivity.m_lastChildID); // The previous ID is my 'causer' and becomes my related activity ID relatedActivityId = EventSource.CurrentThreadActivityId; // Add to the list of started but not stopped activities. ActivityInfo newActivity = new ActivityInfo(fullActivityName, id, currentActivity, relatedActivityId, options); m_current.Value = newActivity; // Remember the current ID so we can log it activityId = newActivity.ActivityId; if (etwLog.Debug) { etwLog.DebugFacilityMessage("OnStartRetActivityState", ActivityInfo.LiveActivities(newActivity)); etwLog.DebugFacilityMessage1("OnStartRet", activityId.ToString(), relatedActivityId.ToString()); } } /// <summary> /// Called when a work item stops. The activity name = providerName + activityName without 'Stop' suffix. /// It updates m_current variable to track this fact. The Stop event associated with stop should log the ActivityID associated with the event. /// /// If activity tracing is not on, then activityId and relatedActivityId are not set /// </summary> public void OnStop(string providerName, string activityName, int task, ref Guid activityId) { if (m_current == null) // We are not enabled return; var fullActivityName = NormalizeActivityName(providerName, activityName, task); var etwLog = TplEtwProvider.Log; if (etwLog.Debug) { etwLog.DebugFacilityMessage("OnStopEnter", fullActivityName); etwLog.DebugFacilityMessage("OnStopEnterActivityState", ActivityInfo.LiveActivities(m_current.Value)); } for (; ; ) // This is a retry loop. { ActivityInfo currentActivity = m_current.Value; ActivityInfo newCurrentActivity = null; // if we have seen any live activities (orphans), at he first one we have seen. // Search to find the activity to stop in one pass. This insures that we don't let one mistake // (stopping something that was not started) cause all active starts to be stopped // By first finding the target start to stop we are more robust. ActivityInfo activityToStop = FindActiveActivity(fullActivityName, currentActivity); // ignore stops where we can't find a start because we may have popped them previously. if (activityToStop == null) { activityId = Guid.Empty; // TODO add some logging about this. Basically could not find matching start. if (etwLog.Debug) etwLog.DebugFacilityMessage("OnStopRET", "Fail"); return; } activityId = activityToStop.ActivityId; // See if there are any orphans that need to be stopped. ActivityInfo orphan = currentActivity; while (orphan != activityToStop && orphan != null) { if (orphan.m_stopped != 0) // Skip dead activities. { orphan = orphan.m_creator; continue; } if (orphan.CanBeOrphan()) { // We can't pop anything after we see a valid orphan, remember this for later when we update m_current. if (newCurrentActivity == null) newCurrentActivity = orphan; } else { orphan.m_stopped = 1; Contract.Assert(orphan.m_stopped != 0); } orphan = orphan.m_creator; } // try to Stop the activity atomically. Other threads may be trying to do this as well. if (Interlocked.CompareExchange(ref activityToStop.m_stopped, 1, 0) == 0) { // I succeeded stopping this activity. Now we update our m_current pointer // If I haven't yet determined the new current activity, it is my creator. if (newCurrentActivity == null) newCurrentActivity = activityToStop.m_creator; m_current.Value = newCurrentActivity; if (etwLog.Debug) { etwLog.DebugFacilityMessage("OnStopRetActivityState", ActivityInfo.LiveActivities(newCurrentActivity)); etwLog.DebugFacilityMessage("OnStopRet", activityId.ToString()); } return; } // We failed to stop it. We must have hit a race to stop it. Just start over and try again. } } /// <summary> /// Turns on activity tracking. It is sticky, once on it stays on (race issues otherwise) /// </summary> [System.Security.SecuritySafeCritical] public void Enable() { if (m_current == null) { // Catch the not Implemented try { m_current = new AsyncLocal<ActivityInfo>(ActivityChanging); } catch (NotImplementedException) { #if (!ES_BUILD_PCL && ! PROJECTN) // send message to debugger without delay System.Diagnostics.Debugger.Log(0, null, "Activity Enabled() called but AsyncLocals Not Supported (pre V4.6). Ignoring Enable"); #endif } } } /// <summary> /// An activity tracker is a singleton, this is how you get the one and only instance. /// </summary> public static ActivityTracker Instance { get { return s_activityTrackerInstance; } } #region private /// <summary> /// The current activity ID. Use this to log normal events. /// </summary> private Guid CurrentActivityId { get { return m_current.Value.ActivityId; } } /// <summary> /// Searched for a active (nonstopped) activity with the given name. Returns null if not found. /// </summary> private ActivityInfo FindActiveActivity(string name, ActivityInfo startLocation) { var activity = startLocation; while (activity != null) { if (name == activity.m_name && activity.m_stopped == 0) return activity; activity = activity.m_creator; } return null; } /// <summary> /// Strip out "Start" or "End" suffix from activity name and add providerName prefix. /// If 'task' it does not end in Start or Stop and Task is non-zero use that as the name of the activity /// </summary> private string NormalizeActivityName(string providerName, string activityName, int task) { if (activityName.EndsWith(EventSource.s_ActivityStartSuffix)) activityName = activityName.Substring(0, activityName.Length - EventSource.s_ActivityStartSuffix.Length); else if (activityName.EndsWith(EventSource.s_ActivityStopSuffix)) activityName = activityName.Substring(0, activityName.Length - EventSource.s_ActivityStopSuffix.Length); else if (task != 0) activityName = "task" + task.ToString(); // We use provider name to distinguish between activities from different providers. return providerName + activityName; } // ******************************************************************************* /// <summary> /// An ActivityInfo represents a particular activity. It is almost read-only. The only /// fields that change after creation are /// m_lastChildID - used to generate unique IDs for the children activities and for the most part can be ignored. /// m_stopped - indicates that this activity is dead /// This read-only-ness is important because an activity's m_creator chain forms the /// 'Path of creation' for the activity (which is also its unique ID) but is also used as /// the 'list of live parents' which indicate of those ancestors, which are alive (if they /// are not marked dead they are alive). /// </summary> private class ActivityInfo { public ActivityInfo(string name, long uniqueId, ActivityInfo creator, Guid activityIDToRestore, EventActivityOptions options) { m_name = name; m_eventOptions = options; m_creator = creator; m_uniqueId = uniqueId; m_level = creator != null ? creator.m_level + 1 : 0; m_activityIdToRestore = activityIDToRestore; // Create a nice GUID that encodes the chain of activities that started this one. CreateActivityPathGuid(out m_guid, out m_activityPathGuidOffset); } public Guid ActivityId { get { return m_guid; } } public static string Path(ActivityInfo activityInfo) { if (activityInfo == null) return (""); return Path(activityInfo.m_creator) + "/" + activityInfo.m_uniqueId; } public override string ToString() { string dead = ""; if (m_stopped != 0) dead = ",DEAD"; return m_name + "(" + Path(this) + dead + ")"; } public static string LiveActivities(ActivityInfo list) { if (list == null) return ""; return list.ToString() + ";" + LiveActivities(list.m_creator); } public bool CanBeOrphan() { if ((m_eventOptions & EventActivityOptions.Detachable) != 0) return true; return false; } #region private #region CreateActivityPathGuid /// <summary> /// Logically every activity Path (see Path()) that describes the activities that caused this /// (rooted in an activity that predates activity tracking. /// /// We wish to encode this path in the Guid to the extent that we can. Many of the paths have /// many small numbers in them and we take advantage of this in the encoding to output as long /// a path in the GUID as possible. /// /// Because of the possibility of GUID collision, we only use 96 of the 128 bits of the GUID /// for encoding the path. The last 32 bits are a simple checksum (and random number) that /// identifies this as using the convention defined here. /// /// It returns both the GUID which has the path as well as the offset that points just beyond /// the end of the activity (so it can be appended to). Note that if the end is in a nibble /// (it uses nibbles instead of bytes as the unit of encoding, then it will point at the unfinished /// byte (since the top nibble can't be zero you can determine if this is true by seeing if /// this byte is nonZero. This offset is needed to efficiently create the ID for child activities. /// </summary> [System.Security.SecuritySafeCritical] private unsafe void CreateActivityPathGuid(out Guid idRet, out int activityPathGuidOffset) { fixed (Guid* outPtr = &idRet) { int activityPathGuidOffsetStart = 0; if (m_creator != null) { activityPathGuidOffsetStart = m_creator.m_activityPathGuidOffset; idRet = m_creator.m_guid; } else { // TODO FIXME - differentiate between AD inside PCL int appDomainID = 0; #if (!ES_BUILD_STANDALONE && !PROJECTN) appDomainID = System.Threading.Thread.GetDomainID(); #endif // We start with the appdomain number to make this unique among appdomains. activityPathGuidOffsetStart = AddIdToGuid(outPtr, activityPathGuidOffsetStart, (uint)appDomainID); } activityPathGuidOffset = AddIdToGuid(outPtr, activityPathGuidOffsetStart, (uint)m_uniqueId); // If the path does not fit, Make a GUID by incrementing rather than as a path, keeping as much of the path as possible if (12 < activityPathGuidOffset) CreateOverflowGuid(outPtr); } } /// <summary> /// If we can't fit the activity Path into the GUID we come here. What we do is simply /// generate a 4 byte number (s_nextOverflowId). Then look for an ancestor that has /// sufficient space for this ID. By doing this, we preserve the fact that this activity /// is a child (of unknown depth) from that ancestor. /// </summary> [System.Security.SecurityCritical] private unsafe void CreateOverflowGuid(Guid* outPtr) { // Search backwards for an ancestor that has sufficient space to put the ID. for (ActivityInfo ancestor = m_creator; ancestor != null; ancestor = ancestor.m_creator) { if (ancestor.m_activityPathGuidOffset <= 10) // we need at least 2 bytes. { uint id = unchecked((uint)Interlocked.Increment(ref ancestor.m_lastChildID)); // Get a unique ID // Try to put the ID into the GUID *outPtr = ancestor.m_guid; int endId = AddIdToGuid(outPtr, ancestor.m_activityPathGuidOffset, id, true); // Does it fit? if (endId <= 12) break; } } } /// <summary> /// The encoding for a list of numbers used to make Activity GUIDs. Basically /// we operate on nibbles (which are nice because they show up as hex digits). The /// list is ended with a end nibble (0) and depending on the nibble value (Below) /// the value is either encoded into nibble itself or it can spill over into the /// bytes that follow. /// </summary> enum NumberListCodes : byte { End = 0x0, // ends the list. No valid value has this prefix. LastImmediateValue = 0xA, PrefixCode = 0xB, // all the 'long' encodings go here. If the next nibble is MultiByte1-4 // than this is a 'overflow' id. Unlike the hierarchical IDs these are // allocated densely but don't tell you anything about nesting. we use // these when we run out of space in the GUID to store the path. MultiByte1 = 0xC, // 1 byte follows. If this Nibble is in the high bits, it the high bits of the number are stored in the low nibble. // commented out because the code does not explicitly reference the names (but they are logically defined). // MultiByte2 = 0xD, // 2 bytes follow (we don't bother with the nibble optimization) // MultiByte3 = 0xE, // 3 bytes follow (we don't bother with the nibble optimization) // MultiByte4 = 0xF, // 4 bytes follow (we don't bother with the nibble optimization) } /// Add the activity id 'id' to the output Guid 'outPtr' starting at the offset 'whereToAddId' /// Thus if this number is 6 that is where 'id' will be added. This will return 13 (12 /// is the maximum number of bytes that fit in a GUID) if the path did not fit. /// If 'overflow' is true, then the number is encoded as an 'overflow number (which has a /// special (longer prefix) that indicates that this ID is allocated differently [System.Security.SecurityCritical] private static unsafe int AddIdToGuid(Guid* outPtr, int whereToAddId, uint id, bool overflow = false) { byte* ptr = (byte*)outPtr; byte* endPtr = ptr + 12; ptr += whereToAddId; if (endPtr <= ptr) return 13; // 12 means we might exactly fit, 13 means we definately did not fit if (0 < id && id <= (uint)NumberListCodes.LastImmediateValue && !overflow) WriteNibble(ref ptr, endPtr, id); else { uint len = 4; if (id <= 0xFF) len = 1; else if (id <= 0xFFFF) len = 2; else if (id <= 0xFFFFFF) len = 3; if (overflow) { if (endPtr <= ptr + 2) // I need at least 2 bytes return 13; // Write out the prefix code nibble and the length nibble WriteNibble(ref ptr, endPtr, (uint)NumberListCodes.PrefixCode); } // The rest is the same for overflow and non-overflow case WriteNibble(ref ptr, endPtr, (uint)NumberListCodes.MultiByte1 + (len - 1)); // Do we have an odd nibble? If so flush it or use it for the 12 byte case. if (ptr < endPtr && *ptr != 0) { // If the value < 4096 we can use the nibble we are otherwise just outputting as padding. if (id < 4096) { // Indicate this is a 1 byte multicode with 4 high order bits in the lower nibble. *ptr = (byte)(((uint)NumberListCodes.MultiByte1 << 4) + (id >> 8)); id &= 0xFF; // Now we only want the low order bits. } ptr++; } // Write out the bytes. while (0 < len) { if (endPtr <= ptr) { ptr++; // Indicate that we have overflowed break; } *ptr++ = (byte)id; id = (id >> 8); --len; } } // Compute the checksum uint* sumPtr = (uint*)outPtr; // We set the last DWORD the sum of the first 3 DWORDS in the GUID. This sumPtr[3] = sumPtr[0] + sumPtr[1] + sumPtr[2] + 0x599D99AD; // This last number is a random number (it identifies us as us) return (int)(ptr - ((byte*)outPtr)); } /// <summary> /// Write a single Nible 'value' (must be 0-15) to the byte buffer represented by *ptr. /// Will not go past 'endPtr'. Also it assumes that we never write 0 so we can detect /// whether a nibble has already been written to ptr because it will be nonzero. /// Thus if it is non-zero it adds to the current byte, otherwise it advances and writes /// the new byte (in the high bits) of the next byte. /// </summary> [System.Security.SecurityCritical] private static unsafe void WriteNibble(ref byte* ptr, byte* endPtr, uint value) { Contract.Assert(0 <= value && value < 16); Contract.Assert(ptr < endPtr); if (*ptr != 0) *ptr++ |= (byte)value; else *ptr = (byte)(value << 4); } #endregion // CreateGuidForActivityPath readonly internal string m_name; // The name used in the 'start' and 'stop' APIs to help match up readonly long m_uniqueId; // a small number that makes this activity unique among its siblings internal readonly Guid m_guid; // Activity Guid, it is basically an encoding of the Path() (see CreateActivityPathGuid) internal readonly int m_activityPathGuidOffset; // Keeps track of where in m_guid the causality path stops (used to generated child GUIDs) internal readonly int m_level; // current depth of the Path() of the activity (used to keep recursion under control) readonly internal EventActivityOptions m_eventOptions; // Options passed to start. internal long m_lastChildID; // used to create a unique ID for my children activities internal int m_stopped; // This work item has stopped readonly internal ActivityInfo m_creator; // My parent (creator). Forms the Path() for the activity. readonly internal Guid m_activityIdToRestore; // The Guid to restore after a stop. #endregion } // This callback is used to initialize the m_current AsyncLocal Variable. // Its job is to keep the ETW Activity ID (part of thread local storage) in sync // with m_current.ActivityID void ActivityChanging(AsyncLocalValueChangedArgs<ActivityInfo> args) { ActivityInfo cur = args.CurrentValue; ActivityInfo prev = args.PreviousValue; // Are we popping off a value? (we have a prev, and it creator is cur) // Then check if we should use the GUID at the time of the start event if (prev != null && prev.m_creator == cur) { // If the saved activity ID is not the same as the creator activity // that takes precedence (it means someone explicitly did a SetActivityID) // Set it to that and get out if (cur == null || prev.m_activityIdToRestore != cur.ActivityId) { EventSource.SetCurrentThreadActivityId(prev.m_activityIdToRestore); return; } } // OK we did not have an explicit SetActivityID set. Then we should be // setting the activity to current ActivityInfo. However that activity // might be dead, in which case we should skip it, so we never set // the ID to dead things. while (cur != null) { // We found a live activity (typically the first time), set it to that. if (cur.m_stopped == 0) { EventSource.SetCurrentThreadActivityId(cur.ActivityId); return; } cur = cur.m_creator; } // we can get here if there is no information on our activity stack (everything is dead) // currently we do nothing, as that seems better than setting to Guid.Emtpy. } /// <summary> /// Async local variables have the property that the are automatically copied whenever a task is created and used /// while that task is running. Thus m_current 'flows' to any task that is caused by the current thread that /// last set it. /// /// This variable points a a linked list that represents all Activities that have started but have not stopped. /// </summary> AsyncLocal<ActivityInfo> m_current; bool m_checkedForEnable; // Singleton private static ActivityTracker s_activityTrackerInstance = new ActivityTracker(); // Used to create unique IDs at the top level. Not used for nested Ids (each activity has its own id generator) static long m_nextId = 0; private const ushort MAX_ACTIVITY_DEPTH = 100; // Limit maximum depth of activities to be tracked at 100. // This will avoid leaking memory in case of activities that are never stopped. #endregion } #if ES_BUILD_STANDALONE || PROJECTN /******************************** SUPPORT *****************************/ /// <summary> /// This is supplied by the framework. It is has the semantics that the value is copied to any new Tasks that is created /// by the current task. Thus all causally related code gets this value. Note that reads and writes to this VARIABLE /// (not what it points it) to this does not need to be protected by locks because it is inherently thread local (you always /// only get your thread local copy which means that you never have races. /// </summary> /// #if ES_BUILD_STANDALONE [EventSource(Name = "Microsoft.Tasks.Nuget")] #else [EventSource(Name = "System.Diagnostics.Tracing.TplEtwProvider")] #endif internal class TplEtwProvider : EventSource { public class Keywords { public const EventKeywords TasksFlowActivityIds = (EventKeywords)0x80; public const EventKeywords Debug = (EventKeywords)0x20000; } public static TplEtwProvider Log = new TplEtwProvider(); public bool Debug { get { return IsEnabled(EventLevel.Verbose, Keywords.Debug); } } public void DebugFacilityMessage(string Facility, string Message) { WriteEvent(1, Facility, Message); } public void DebugFacilityMessage1(string Facility, string Message, string Arg) { WriteEvent(2, Facility, Message, Arg); } public void SetActivityId(Guid Id) { WriteEvent(3, Id); } } #endif #if ES_BUILD_AGAINST_DOTNET_V35 || ES_BUILD_PCL || NO_ASYNC_LOCAL // In these cases we don't have any Async local support. Do nothing. internal sealed class AsyncLocalValueChangedArgs<T> { public T PreviousValue { get { return default(T); } } public T CurrentValue { get { return default(T); } } } internal sealed class AsyncLocal<T> { public AsyncLocal(Action<AsyncLocalValueChangedArgs<T>> valueChangedHandler) { throw new NotImplementedException("AsyncLocal only available on V4.6 and above"); } public T Value { get { return default(T); } set { } } } #endif }
// CodeContracts // // Copyright (c) Microsoft Corporation // // All rights reserved. // // MIT License // // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. using System; using System.Diagnostics.Contracts; namespace System { public abstract class Enum { #if !SILVERLIGHT [Pure] public static string Format(Type enumType, object value, string format) { Contract.Requires(enumType != null); Contract.Requires(value != null); Contract.Requires(format != null); Contract.Ensures(Contract.Result<string>() != null); return default(string); } #endif [Pure] public static string GetName(Type enumType, object value) { Contract.Requires(enumType != null); Contract.Requires(value != null); Contract.Ensures(Contract.Result<string>() != null); return default(string); } #if !SILVERLIGHT [Pure] public static string[] GetNames(Type enumType) { Contract.Requires(enumType != null); Contract.Ensures(Contract.Result<string[]>() != null); return default(string[]); } #endif #if NETFRAMEWORK_4_0 || SILVERLIGHT_4_0 || SILVERLIGHT_5_0 [Pure] extern public virtual TypeCode GetTypeCode(); [Pure] extern public bool HasFlag(Enum flag); #endif [Pure] extern public static bool IsDefined(Type enumType, object value); [Pure] public string ToString(string format) { Contract.Ensures(Contract.Result<string>() != null); return default(string); } #if NETFRAMEWORK_4_0 || SILVERLIGHT_4_0 || SILVERLIGHT_5_0 [Pure] extern public static bool TryParse<TEnum>(string value, out TEnum result) where TEnum : struct; [Pure] extern public static bool TryParse<TEnum>(string value, bool ignoreCase, out TEnum result) where TEnum : struct; #endif #if !SILVERLIGHT [Pure] public static object Parse(Type enumType, string value) { Contract.Requires(enumType != null); Contract.Requires(value != null); Contract.Ensures(Contract.Result<object>() != null); return default(object); } #endif [Pure] public static object Parse(Type enumType, string value, bool ignoreCase) { Contract.Requires(enumType != null); Contract.Requires(value != null); Contract.Ensures(Contract.Result<object>() != null); return default(object); } [Pure] public static Type GetUnderlyingType(Type enumType) { Contract.Requires(enumType != null); Contract.Ensures(Contract.Result<Type>() != null); return default(Type); } #if !SILVERLIGHT // // Summary: // Returns an instance of the specified enumeration type set to the specified // 8-bit unsigned integer value. // // Parameters: // enumType: // The enumeration for which to create a value. // // value: // The value to set. // // Returns: // An instance of the enumeration set to value. // // Exceptions: // System.ArgumentNullException: // enumType is null. // // System.ArgumentException: // enumType is not an System.Enum. [Pure] public static Array GetValues(Type enumType) { Contract.Requires(enumType != null); Contract.Ensures(Contract.Result<Array>() != null); return default(Array); } #endif #if !SILVERLIGHT // // Summary: // Returns an instance of the specified enumeration type set to the specified // 32-bit signed integer value. // // Parameters: // enumType: // The enumeration for which to create a value. // // value: // The value to set. // // Returns: // An instance of the enumeration set to value. // // Exceptions: // System.ArgumentNullException: // enumType is null. // // System.ArgumentException: // enumType is not an System.Enum. [Pure] public static object ToObject(Type enumType, byte value) { Contract.Requires(enumType != null); Contract.Ensures(Contract.Result<object>() != null); return default(object); } [Pure] public static object ToObject(Type enumType, int value) { // // Summary: // Returns an instance of the specified enumeration type set to the specified // 64-bit signed integer value. // // Parameters: // enumType: // The enumeration for which to create a value. // // value: // The value to set. // // Returns: // An instance of the enumeration set to value. // // Exceptions: // System.ArgumentNullException: // enumType is null. // // System.ArgumentException: // enumType is not an System.Enum. Contract.Requires(enumType != null); Contract.Ensures(Contract.Result<object>() != null); return default(object); } [Pure] public static object ToObject(Type enumType, long value) { // // Summary: // Returns an instance of the specified enumeration set to the specified value. // // Parameters: // enumType: // An enumeration. // // value: // The value. // // Returns: // An enumeration object whose value is value. // // Exceptions: // System.ArgumentNullException: // enumType is null. // // System.ArgumentException: // enumType is not an System.Enum.-or- value is not type System.SByte, System.Int16, // System.Int32, System.Int64, System.Byte, System.UInt16, System.UInt32, or // System.UInt64. Contract.Requires(enumType != null); Contract.Ensures(Contract.Result<object>() != null); return default(object); } #endif [Pure] public static object ToObject(Type enumType, object value) { Contract.Requires(enumType != null); Contract.Ensures(Contract.Result<object>() != null); return default(object); } #if !SILVERLIGHT [Pure] public static object ToObject(Type enumType, sbyte value) { Contract.Requires(enumType != null); Contract.Ensures(Contract.Result<object>() != null); return default(object); } [Pure] public static object ToObject(Type enumType, short value) { Contract.Requires(enumType != null); Contract.Ensures(Contract.Result<object>() != null); return default(object); } [Pure] public static object ToObject(Type enumType, uint value) { Contract.Requires(enumType != null); Contract.Ensures(Contract.Result<object>() != null); return default(object); } #endif #if !SILVERLIGHT [Pure] public static object ToObject(Type enumType, ulong value) { Contract.Requires(enumType != null); Contract.Ensures(Contract.Result<object>() != null); return default(object); } #endif // // Summary: // Returns an instance of the specified enumeration type set to the specified // 16-bit unsigned integer value. // // Parameters: // enumType: // The enumeration for which to create a value. // // value: // The value to set. // // Returns: // An instance of the enumeration set to value. // // Exceptions: // System.ArgumentNullException: // enumType is null. // // System.ArgumentException: // enumType is not an System.Enum. #if !SILVERLIGHT [Pure] public static object ToObject(Type enumType, ushort value) { Contract.Requires(enumType != null); Contract.Ensures(Contract.Result<object>() != null); return default(object); } #endif } }
using System; using System.Collections.Generic; using System.Collections.Specialized; using System.ComponentModel; using System.Data; using System.Data.SqlClient; using System.Linq; using Csla; using Csla.Data; namespace Invoices.Business { /// <summary> /// SupplierList (read only list).<br/> /// This is a generated <see cref="SupplierList"/> business object. /// This class is a root collection. /// </summary> /// <remarks> /// The items of the collection are <see cref="SupplierInfo"/> objects. /// </remarks> [Serializable] #if WINFORMS public partial class SupplierList : ReadOnlyBindingListBase<SupplierList, SupplierInfo> #else public partial class SupplierList : ReadOnlyListBase<SupplierList, SupplierInfo> #endif { #region Event handler properties [NotUndoable] private static bool _singleInstanceSavedHandler = true; /// <summary> /// Gets or sets a value indicating whether only a single instance should handle the Saved event. /// </summary> /// <value> /// <c>true</c> if only a single instance should handle the Saved event; otherwise, <c>false</c>. /// </value> public static bool SingleInstanceSavedHandler { get { return _singleInstanceSavedHandler; } set { _singleInstanceSavedHandler = value; } } #endregion #region Collection Business Methods /// <summary> /// Determines whether a <see cref="SupplierInfo"/> item is in the collection. /// </summary> /// <param name="supplierId">The SupplierId of the item to search for.</param> /// <returns><c>true</c> if the SupplierInfo is a collection item; otherwise, <c>false</c>.</returns> public bool Contains(int supplierId) { foreach (var supplierInfo in this) { if (supplierInfo.SupplierId == supplierId) { return true; } } return false; } #endregion #region Private Fields private static SupplierList _list; #endregion #region Cache Management Methods /// <summary> /// Clears the in-memory SupplierList cache so it is reloaded on the next request. /// </summary> public static void InvalidateCache() { _list = null; } /// <summary> /// Used by async loaders to load the cache. /// </summary> /// <param name="list">The list to cache.</param> internal static void SetCache(SupplierList list) { _list = list; } internal static bool IsCached { get { return _list != null; } } #endregion #region Factory Methods /// <summary> /// Factory method. Loads a <see cref="SupplierList"/> collection. /// </summary> /// <returns>A reference to the fetched <see cref="SupplierList"/> collection.</returns> public static SupplierList GetSupplierList() { if (_list == null) _list = DataPortal.Fetch<SupplierList>(); return _list; } /// <summary> /// Factory method. Loads a <see cref="SupplierList"/> collection, based on given parameters. /// </summary> /// <param name="name">The Name parameter of the SupplierList to fetch.</param> /// <returns>A reference to the fetched <see cref="SupplierList"/> collection.</returns> public static SupplierList GetSupplierList(string name) { return DataPortal.Fetch<SupplierList>(name); } /// <summary> /// Factory method. Asynchronously loads a <see cref="SupplierList"/> collection. /// </summary> /// <param name="callback">The completion callback method.</param> public static void GetSupplierList(EventHandler<DataPortalResult<SupplierList>> callback) { if (_list == null) DataPortal.BeginFetch<SupplierList>((o, e) => { _list = e.Object; callback(o, e); }); else callback(null, new DataPortalResult<SupplierList>(_list, null, null)); } /// <summary> /// Factory method. Asynchronously loads a <see cref="SupplierList"/> collection, based on given parameters. /// </summary> /// <param name="name">The Name parameter of the SupplierList to fetch.</param> /// <param name="callback">The completion callback method.</param> public static void GetSupplierList(string name, EventHandler<DataPortalResult<SupplierList>> callback) { DataPortal.BeginFetch<SupplierList>(name, callback); } #endregion #region Constructor /// <summary> /// Initializes a new instance of the <see cref="SupplierList"/> class. /// </summary> /// <remarks> Do not use to create a Csla object. Use factory methods instead.</remarks> [System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)] public SupplierList() { // Use factory methods and do not use direct creation. SupplierEditSaved.Register(this); var rlce = RaiseListChangedEvents; RaiseListChangedEvents = false; AllowNew = false; AllowEdit = false; AllowRemove = false; RaiseListChangedEvents = rlce; } #endregion #region Saved Event Handler /// <summary> /// Handle Saved events of <see cref="SupplierEdit"/> to update the list of <see cref="SupplierInfo"/> objects. /// </summary> /// <param name="sender">The sender of the event.</param> /// <param name="e">The <see cref="Csla.Core.SavedEventArgs"/> instance containing the event data.</param> internal void SupplierEditSavedHandler(object sender, Csla.Core.SavedEventArgs e) { var obj = (SupplierEdit)e.NewObject; if (((SupplierEdit)sender).IsNew) { IsReadOnly = false; var rlce = RaiseListChangedEvents; RaiseListChangedEvents = true; Add(SupplierInfo.LoadInfo(obj)); RaiseListChangedEvents = rlce; IsReadOnly = true; } else if (((SupplierEdit)sender).IsDeleted) { for (int index = 0; index < this.Count; index++) { var child = this[index]; if (child.SupplierId == obj.SupplierId) { IsReadOnly = false; var rlce = RaiseListChangedEvents; RaiseListChangedEvents = true; this.RemoveItem(index); RaiseListChangedEvents = rlce; IsReadOnly = true; break; } } } else { for (int index = 0; index < this.Count; index++) { var child = this[index]; if (child.SupplierId == obj.SupplierId) { child.UpdatePropertiesOnSaved(obj); #if !WINFORMS var notifyCollectionChangedEventArgs = new NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction.Replace, child, child, index); OnCollectionChanged(notifyCollectionChangedEventArgs); #else var listChangedEventArgs = new ListChangedEventArgs(ListChangedType.ItemChanged, index); OnListChanged(listChangedEventArgs); #endif break; } } } } #endregion #region Data Access /// <summary> /// Loads a <see cref="SupplierList"/> collection from the database or from the cache. /// </summary> protected void DataPortal_Fetch() { if (IsCached) { LoadCachedList(); return; } using (var ctx = ConnectionManager<SqlConnection>.GetManager(Database.InvoicesConnection, false)) { GetQueryGetSupplierList(); using (var cmd = new SqlCommand(getSupplierListInlineQuery, ctx.Connection)) { cmd.CommandType = CommandType.Text; var args = new DataPortalHookArgs(cmd); OnFetchPre(args); LoadCollection(cmd); OnFetchPost(args); } } _list = this; } private void LoadCachedList() { IsReadOnly = false; var rlce = RaiseListChangedEvents; RaiseListChangedEvents = false; AddRange(_list); RaiseListChangedEvents = rlce; IsReadOnly = true; } /// <summary> /// Loads a <see cref="SupplierList"/> collection from the database, based on given criteria. /// </summary> /// <param name="name">The Name.</param> protected void DataPortal_Fetch(string name) { using (var ctx = ConnectionManager<SqlConnection>.GetManager(Database.InvoicesConnection, false)) { GetQueryGetSupplierListByName(name); using (var cmd = new SqlCommand(getSupplierListByNameInlineQuery, ctx.Connection)) { cmd.CommandType = CommandType.Text; cmd.Parameters.AddWithValue("@Name", name).DbType = DbType.String; var args = new DataPortalHookArgs(cmd, name); OnFetchPre(args); LoadCollection(cmd); OnFetchPost(args); } } } private void LoadCollection(SqlCommand cmd) { using (var dr = new SafeDataReader(cmd.ExecuteReader())) { Fetch(dr); } } /// <summary> /// Loads all <see cref="SupplierList"/> collection items from the given SafeDataReader. /// </summary> /// <param name="dr">The SafeDataReader to use.</param> private void Fetch(SafeDataReader dr) { IsReadOnly = false; var rlce = RaiseListChangedEvents; RaiseListChangedEvents = false; while (dr.Read()) { Add(DataPortal.FetchChild<SupplierInfo>(dr)); } RaiseListChangedEvents = rlce; IsReadOnly = true; } #endregion #region Inline queries fields and partial methods [NotUndoable, NonSerialized] private string getSupplierListInlineQuery; [NotUndoable, NonSerialized] private string getSupplierListByNameInlineQuery; partial void GetQueryGetSupplierList(); partial void GetQueryGetSupplierListByName(string name); #endregion #region DataPortal Hooks /// <summary> /// Occurs after setting query parameters and before the fetch operation. /// </summary> partial void OnFetchPre(DataPortalHookArgs args); /// <summary> /// Occurs after the fetch operation (object or collection is fully loaded and set up). /// </summary> partial void OnFetchPost(DataPortalHookArgs args); #endregion #region SupplierEditSaved nested class // TODO: edit "SupplierList.cs", uncomment the "OnDeserialized" method and add the following line: // TODO: SupplierEditSaved.Register(this); /// <summary> /// Nested class to manage the Saved events of <see cref="SupplierEdit"/> /// to update the list of <see cref="SupplierInfo"/> objects. /// </summary> private static class SupplierEditSaved { private static List<WeakReference> _references; private static bool Found(object obj) { return _references.Any(reference => Equals(reference.Target, obj)); } /// <summary> /// Registers a SupplierList instance to handle Saved events. /// to update the list of <see cref="SupplierInfo"/> objects. /// </summary> /// <param name="obj">The SupplierList instance.</param> public static void Register(SupplierList obj) { var mustRegister = _references == null; if (mustRegister) _references = new List<WeakReference>(); if (SupplierList.SingleInstanceSavedHandler) _references.Clear(); if (!Found(obj)) _references.Add(new WeakReference(obj)); if (mustRegister) SupplierEdit.SupplierEditSaved += SupplierEditSavedHandler; } /// <summary> /// Handles Saved events of <see cref="SupplierEdit"/>. /// </summary> /// <param name="sender">The sender of the event.</param> /// <param name="e">The <see cref="Csla.Core.SavedEventArgs"/> instance containing the event data.</param> public static void SupplierEditSavedHandler(object sender, Csla.Core.SavedEventArgs e) { foreach (var reference in _references) { if (reference.IsAlive) ((SupplierList) reference.Target).SupplierEditSavedHandler(sender, e); } } /// <summary> /// Removes event handling and clears all registered SupplierList instances. /// </summary> public static void Unregister() { SupplierEdit.SupplierEditSaved -= SupplierEditSavedHandler; _references = null; } } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections; using System.Diagnostics; using System.Xml; namespace System.Configuration { [DebuggerDisplay("Count = {Count}")] public abstract class ConfigurationElementCollection : ConfigurationElement, ICollection { internal const string DefaultAddItemName = "add"; internal const string DefaultRemoveItemName = "remove"; internal const string DefaultClearItemsName = "clear"; private readonly IComparer _comparer; private string _addElement = DefaultAddItemName; private string _clearElement = DefaultClearItemsName; private bool _collectionCleared; private bool _emitClearTag; private int _inheritedCount; // Total number of inherited items private bool _modified; private bool _readOnly; private int _removedItemCount; // Number of items removed for this collection (not including parent) private string _removeElement = DefaultRemoveItemName; internal bool InternalAddToEnd = false; internal string InternalElementTagName = string.Empty; protected ConfigurationElementCollection() { } protected ConfigurationElementCollection(IComparer comparer) { if (comparer == null) throw new ArgumentNullException(nameof(comparer)); _comparer = comparer; } private ArrayList Items { get; } = new ArrayList(); protected internal string AddElementName { get { return _addElement; } set { _addElement = value; if (BaseConfigurationRecord.IsReservedAttributeName(value)) throw new ArgumentException(SR.Format(SR.Item_name_reserved, DefaultAddItemName, value)); } } protected internal string RemoveElementName { get { return _removeElement; } set { if (BaseConfigurationRecord.IsReservedAttributeName(value)) throw new ArgumentException(SR.Format(SR.Item_name_reserved, DefaultRemoveItemName, value)); _removeElement = value; } } protected internal string ClearElementName { get { return _clearElement; } set { if (BaseConfigurationRecord.IsReservedAttributeName(value)) throw new ArgumentException(SR.Format(SR.Item_name_reserved, DefaultClearItemsName, value)); _clearElement = value; } } public bool EmitClear { get { return _emitClearTag; } set { if (IsReadOnly()) throw new ConfigurationErrorsException(SR.Config_base_read_only); if (value) { CheckLockedElement(_clearElement, null); // has clear been locked? CheckLockedElement(_removeElement, null); // has remove been locked? Clear implies remove } _modified = true; _emitClearTag = value; } } protected virtual string ElementName => ""; internal string LockableElements { get { if ((CollectionType == ConfigurationElementCollectionType.AddRemoveClearMap) || (CollectionType == ConfigurationElementCollectionType.AddRemoveClearMapAlternate)) { string elementNames = "'" + AddElementName + "'"; // Must have an add if (RemoveElementName.Length != 0) elementNames += ", '" + RemoveElementName + "'"; if (ClearElementName.Length != 0) elementNames += ", '" + ClearElementName + "'"; return elementNames; } if (!string.IsNullOrEmpty(ElementName)) return "'" + ElementName + "'"; return string.Empty; } } protected virtual bool ThrowOnDuplicate => (CollectionType == ConfigurationElementCollectionType.AddRemoveClearMap) || (CollectionType == ConfigurationElementCollectionType.AddRemoveClearMapAlternate); public virtual ConfigurationElementCollectionType CollectionType => ConfigurationElementCollectionType.AddRemoveClearMap; public int Count => Items.Count - _removedItemCount; public bool IsSynchronized => false; public object SyncRoot => null; void ICollection.CopyTo(Array arr, int index) { foreach (Entry entry in Items) if (entry.EntryType != EntryType.Removed) arr.SetValue(entry.Value, index++); } public IEnumerator GetEnumerator() { return GetEnumeratorImpl(); } internal override void AssociateContext(BaseConfigurationRecord configRecord) { base.AssociateContext(configRecord); foreach (Entry entry in Items) entry.Value?.AssociateContext(configRecord); } protected internal override bool IsModified() { if (_modified) return true; if (base.IsModified()) return true; foreach (Entry entry in Items) { if (entry.EntryType == EntryType.Removed) continue; ConfigurationElement elem = entry.Value; if (elem.IsModified()) return true; } return false; } protected internal override void ResetModified() { _modified = false; base.ResetModified(); foreach (Entry entry in Items) { if (entry.EntryType == EntryType.Removed) continue; ConfigurationElement elem = entry.Value; elem.ResetModified(); } } public override bool IsReadOnly() { return _readOnly; } protected internal override void SetReadOnly() { _readOnly = true; foreach (Entry entry in Items) { if (entry.EntryType == EntryType.Removed) continue; ConfigurationElement elem = entry.Value; elem.SetReadOnly(); } } internal virtual IEnumerator GetEnumeratorImpl() { return new Enumerator(Items, this); } internal IEnumerator GetElementsEnumerator() { // Return an enumerator over the collection's config elements. // This is different then the std GetEnumerator because the second one // can return different set of items if overriden in a derived class return new Enumerator(Items, this); } public override bool Equals(object compareTo) { if (compareTo == null || compareTo.GetType() != GetType()) return false; ConfigurationElementCollection compareToElem = (ConfigurationElementCollection)compareTo; if (Count != compareToElem.Count) return false; foreach (Entry thisEntry in Items) { bool found = false; foreach (Entry compareEntry in compareToElem.Items) { if (!Equals(thisEntry.Value, compareEntry.Value)) continue; found = true; break; } if (found == false) { // not in the collection must be different return false; } } return true; } public override int GetHashCode() { int hHashCode = 0; foreach (Entry thisEntry in Items) { ConfigurationElement elem = thisEntry.Value; hHashCode ^= elem.GetHashCode(); } return hHashCode; } protected internal override void Unmerge(ConfigurationElement sourceElement, ConfigurationElement parentElement, ConfigurationSaveMode saveMode) { base.Unmerge(sourceElement, parentElement, saveMode); if (sourceElement == null) return; ConfigurationElementCollection parentCollection = parentElement as ConfigurationElementCollection; ConfigurationElementCollection sourceCollection = sourceElement as ConfigurationElementCollection; Hashtable inheritance = new Hashtable(); _lockedAllExceptAttributesList = sourceElement._lockedAllExceptAttributesList; _lockedAllExceptElementsList = sourceElement._lockedAllExceptElementsList; _itemLockedFlag = sourceElement._itemLockedFlag; _lockedAttributesList = sourceElement._lockedAttributesList; _lockedElementsList = sourceElement._lockedElementsList; AssociateContext(sourceElement._configRecord); if (parentElement != null) { if (parentElement._lockedAttributesList != null) { _lockedAttributesList = UnMergeLockList(sourceElement._lockedAttributesList, parentElement._lockedAttributesList, saveMode); } if (parentElement._lockedElementsList != null) { _lockedElementsList = UnMergeLockList(sourceElement._lockedElementsList, parentElement._lockedElementsList, saveMode); } if (parentElement._lockedAllExceptAttributesList != null) { _lockedAllExceptAttributesList = UnMergeLockList(sourceElement._lockedAllExceptAttributesList, parentElement._lockedAllExceptAttributesList, saveMode); } if (parentElement._lockedAllExceptElementsList != null) { _lockedAllExceptElementsList = UnMergeLockList(sourceElement._lockedAllExceptElementsList, parentElement._lockedAllExceptElementsList, saveMode); } } if ((CollectionType == ConfigurationElementCollectionType.AddRemoveClearMap) || (CollectionType == ConfigurationElementCollectionType.AddRemoveClearMapAlternate)) { // When writing out portable configurations the <clear/> tag should be written _collectionCleared = sourceCollection._collectionCleared; EmitClear = ((saveMode == ConfigurationSaveMode.Full) && (_clearElement.Length != 0)) || ((saveMode == ConfigurationSaveMode.Modified) && _collectionCleared) || sourceCollection.EmitClear; if ((parentCollection != null) && (EmitClear != true)) { foreach (Entry entry in parentCollection.Items) if (entry.EntryType != EntryType.Removed) inheritance[entry.GetKey(this)] = InheritedType.InParent; } foreach (Entry entry in sourceCollection.Items) { if (entry.EntryType == EntryType.Removed) continue; if (inheritance.Contains(entry.GetKey(this))) { Entry parentEntry = (Entry)parentCollection.Items[parentCollection.RealIndexOf(entry.Value)]; ConfigurationElement elem = entry.Value; if (elem.Equals(parentEntry.Value)) { // in modified mode we consider any change to be different than the parent inheritance[entry.GetKey(this)] = InheritedType.InBothSame; if (saveMode != ConfigurationSaveMode.Modified) continue; if (elem.IsModified()) inheritance[entry.GetKey(this)] = InheritedType.InBothDiff; else { if (elem.ElementPresent) { // This is when the source file contained the entry but it was an // exact copy. We don't want to emit a remove so we treat it as // a special case. inheritance[entry.GetKey(this)] = InheritedType.InBothCopyNoRemove; } } } else { inheritance[entry.GetKey(this)] = InheritedType.InBothDiff; if ((CollectionType == ConfigurationElementCollectionType.AddRemoveClearMapAlternate) && (entry.EntryType == EntryType.Added)) { // this is a special case for deailing with defect number 529517 // this code allow the config to write out the same xml when no remove was // present during deserialization. inheritance[entry.GetKey(this)] = InheritedType.InBothCopyNoRemove; } } } else { // not in parent inheritance[entry.GetKey(this)] = InheritedType.InSelf; } } if ((parentCollection != null) && (EmitClear != true)) { foreach (Entry entry in parentCollection.Items) { if (entry.EntryType == EntryType.Removed) continue; InheritedType tp = (InheritedType)inheritance[entry.GetKey(this)]; if ((tp != InheritedType.InParent) && (tp != InheritedType.InBothDiff)) continue; ConfigurationElement elem = CallCreateNewElement(entry.GetKey(this).ToString()); elem.Reset(entry.Value); // copy this entry BaseAdd(elem, ThrowOnDuplicate, true); // Add it (so that is once existed in the temp BaseRemove(entry.GetKey(this), false); // now remove it to for a remove instruction } } foreach (Entry entry in sourceCollection.Items) { if (entry.EntryType == EntryType.Removed) continue; InheritedType tp = (InheritedType)inheritance[entry.GetKey(this)]; if ((tp != InheritedType.InSelf) && (tp != InheritedType.InBothDiff) && (tp != InheritedType.InBothCopyNoRemove)) continue; ConfigurationElement elem = CallCreateNewElement(entry.GetKey(this).ToString()); elem.Unmerge(entry.Value, null, saveMode); if (tp == InheritedType.InSelf) elem.RemoveAllInheritedLocks(); // If the key changed only local locks are kept BaseAdd(elem, ThrowOnDuplicate, true); // Add it } } else { if ((CollectionType != ConfigurationElementCollectionType.BasicMap) && (CollectionType != ConfigurationElementCollectionType.BasicMapAlternate)) return; foreach (Entry entry in sourceCollection.Items) { bool foundKeyInParent = false; Entry parentEntrySaved = null; if ((entry.EntryType != EntryType.Added) && (entry.EntryType != EntryType.Replaced)) continue; bool inParent = false; if (parentCollection != null) { foreach (Entry parentEntry in parentCollection.Items) { if (Equals(entry.GetKey(this), parentEntry.GetKey(this))) { // for basic map collection where the key is actually an element // we do not want the merging behavior or data will not get written // out for the properties if they match the first element deamed to be a parent // For example <allow verbs="NewVerb" users="*"/> will loose the users because // an entry exists in the root element. if (!IsElementName(entry.GetKey(this).ToString())) { // For elements which are not keyed by the element name // need to be unmerged foundKeyInParent = true; parentEntrySaved = parentEntry; } } if (!Equals(entry.Value, parentEntry.Value)) continue; foundKeyInParent = true; inParent = true; // in parent and the same exact values parentEntrySaved = parentEntry; break; } } ConfigurationElement elem = CallCreateNewElement(entry.GetKey(this).ToString()); if (!foundKeyInParent) { // Unmerge is similar to a reset when used like this // except that it handles the different update modes // which Reset does not understand elem.Unmerge(entry.Value, null, saveMode); // copy this entry BaseAdd(-1, elem, true); // Add it } else { ConfigurationElement sourceItem = entry.Value; if (inParent && ((saveMode != ConfigurationSaveMode.Modified) || !sourceItem.IsModified()) && (saveMode != ConfigurationSaveMode.Full)) continue; elem.Unmerge(entry.Value, parentEntrySaved.Value, saveMode); BaseAdd(-1, elem, true); // Add it } } } } protected internal override void Reset(ConfigurationElement parentElement) { ConfigurationElementCollection parentCollection = parentElement as ConfigurationElementCollection; ResetLockLists(parentElement); if (parentCollection != null) { foreach (Entry entry in parentCollection.Items) { ConfigurationElement elem = CallCreateNewElement(entry.GetKey(this).ToString()); elem.Reset(entry.Value); if (((CollectionType == ConfigurationElementCollectionType.AddRemoveClearMap) || (CollectionType == ConfigurationElementCollectionType.AddRemoveClearMapAlternate)) && ((entry.EntryType == EntryType.Added) || (entry.EntryType == EntryType.Replaced))) { // do not add removed items from the parent BaseAdd(elem, true, true); // This version combines dups and throws (unless overridden) } else { if ((CollectionType == ConfigurationElementCollectionType.BasicMap) || (CollectionType == ConfigurationElementCollectionType.BasicMapAlternate)) BaseAdd(-1, elem, true); // this version appends regardless of if it is a dup. } } _inheritedCount = Count; // After reset the count is the number of items actually inherited. } } public void CopyTo(ConfigurationElement[] array, int index) { ((ICollection)this).CopyTo(array, index); } protected virtual void BaseAdd(ConfigurationElement element) { BaseAdd(element, ThrowOnDuplicate); } protected internal void BaseAdd(ConfigurationElement element, bool throwIfExists) { BaseAdd(element, throwIfExists, false); } private void BaseAdd(ConfigurationElement element, bool throwIfExists, bool ignoreLocks) { bool flagAsReplaced = false; bool localAddToEnd = InternalAddToEnd; if (IsReadOnly()) throw new ConfigurationErrorsException(SR.Config_base_read_only); if (LockItem && (ignoreLocks == false)) throw new ConfigurationErrorsException(SR.Format(SR.Config_base_element_locked, _addElement)); object key = GetElementKeyInternal(element); int iFoundItem = -1; for (int index = 0; index < Items.Count; index++) { Entry entry = (Entry)Items[index]; if (!CompareKeys(key, entry.GetKey(this))) continue; if ((entry.Value != null) && entry.Value.LockItem && (ignoreLocks == false)) throw new ConfigurationErrorsException(SR.Config_base_collection_item_locked); if ((entry.EntryType != EntryType.Removed) && throwIfExists) { if (!element.Equals(entry.Value)) { throw new ConfigurationErrorsException( SR.Format(SR.Config_base_collection_entry_already_exists, key), element.PropertyFileName(""), element.PropertyLineNumber("")); } entry.Value = element; return; } if (entry.EntryType != EntryType.Added) { if (((CollectionType == ConfigurationElementCollectionType.AddRemoveClearMap) || (CollectionType == ConfigurationElementCollectionType.AddRemoveClearMapAlternate)) && (entry.EntryType == EntryType.Removed) && (_removedItemCount > 0)) _removedItemCount--; // account for the value entry.EntryType = EntryType.Replaced; flagAsReplaced = true; } if (localAddToEnd || (CollectionType == ConfigurationElementCollectionType.AddRemoveClearMapAlternate)) { iFoundItem = index; if (entry.EntryType == EntryType.Added) { // this is a special case for defect number 529517 to emulate Everett behavior localAddToEnd = true; } break; } // check to see if the element is trying to set a locked property. if (ignoreLocks == false) { element.HandleLockedAttributes(entry.Value); // copy the lock from the removed element before setting the new element element.MergeLocks(entry.Value); } entry.Value = element; _modified = true; return; } // Brand new item. if (iFoundItem >= 0) { Items.RemoveAt(iFoundItem); // if the item being removed was inherited adjust the cout if ((CollectionType == ConfigurationElementCollectionType.AddRemoveClearMapAlternate) && (iFoundItem > Count + _removedItemCount - _inheritedCount)) _inheritedCount--; } BaseAddInternal(localAddToEnd ? -1 : iFoundItem, element, flagAsReplaced, ignoreLocks); _modified = true; } protected int BaseIndexOf(ConfigurationElement element) { int index = 0; object key = GetElementKeyInternal(element); foreach (Entry entry in Items) { if (entry.EntryType == EntryType.Removed) continue; if (CompareKeys(key, entry.GetKey(this))) return index; index++; } return -1; } internal int RealIndexOf(ConfigurationElement element) { int index = 0; object key = GetElementKeyInternal(element); foreach (Entry entry in Items) { if (CompareKeys(key, entry.GetKey(this))) return index; index++; } return -1; } private void BaseAddInternal(int index, ConfigurationElement element, bool flagAsReplaced, bool ignoreLocks) { // Allow the element to initialize itself after its // constructor has been run so that it may access // virtual methods. element.AssociateContext(_configRecord); element.CallInit(); if (IsReadOnly()) throw new ConfigurationErrorsException(SR.Config_base_read_only); if (!ignoreLocks) { // during reset we ignore locks so we can copy the elements if ((CollectionType == ConfigurationElementCollectionType.BasicMap) || (CollectionType == ConfigurationElementCollectionType.BasicMapAlternate)) { if (BaseConfigurationRecord.IsReservedAttributeName(ElementName)) throw new ArgumentException(SR.Format(SR.Basicmap_item_name_reserved, ElementName)); CheckLockedElement(ElementName, null); } if ((CollectionType == ConfigurationElementCollectionType.AddRemoveClearMap) || (CollectionType == ConfigurationElementCollectionType.AddRemoveClearMapAlternate)) CheckLockedElement(_addElement, null); } if ((CollectionType == ConfigurationElementCollectionType.BasicMapAlternate) || (CollectionType == ConfigurationElementCollectionType.AddRemoveClearMapAlternate)) { if (index == -1) { // insert before inherited, but after any removed index = Count + _removedItemCount - _inheritedCount; } else { if ((index > Count + _removedItemCount - _inheritedCount) && (flagAsReplaced == false)) throw new ConfigurationErrorsException(SR.Config_base_cannot_add_items_below_inherited_items); } } if ((CollectionType == ConfigurationElementCollectionType.BasicMap) && (index >= 0) && (index < _inheritedCount)) throw new ConfigurationErrorsException(SR.Config_base_cannot_add_items_above_inherited_items); EntryType entryType = flagAsReplaced == false ? EntryType.Added : EntryType.Replaced; object key = GetElementKeyInternal(element); if (index >= 0) { if (index > Items.Count) throw new ConfigurationErrorsException(SR.Format(SR.IndexOutOfRange, index)); Items.Insert(index, new Entry(entryType, key, element)); } else { Items.Add(new Entry(entryType, key, element)); } _modified = true; } protected virtual void BaseAdd(int index, ConfigurationElement element) { BaseAdd(index, element, false); } private void BaseAdd(int index, ConfigurationElement element, bool ignoreLocks) { if (IsReadOnly()) throw new ConfigurationErrorsException(SR.Config_base_read_only); if (index < -1) throw new ConfigurationErrorsException(SR.Format(SR.IndexOutOfRange, index)); if ((index != -1) && ((CollectionType == ConfigurationElementCollectionType.AddRemoveClearMap) || (CollectionType == ConfigurationElementCollectionType.AddRemoveClearMapAlternate))) { // If it's an AddRemoveClearMap*** collection, turn the index passed into into a real internal index int realIndex = 0; if (index > 0) { foreach (Entry entryfound in Items) { if (entryfound.EntryType != EntryType.Removed) index--; if (index == 0) break; realIndex++; } index = ++realIndex; } // check for duplicates object key = GetElementKeyInternal(element); foreach (Entry entry in Items) { if (!CompareKeys(key, entry.GetKey(this)) || (entry.EntryType == EntryType.Removed)) continue; if (!element.Equals(entry.Value)) { throw new ConfigurationErrorsException( SR.Format(SR.Config_base_collection_entry_already_exists, key), element.PropertyFileName(""), element.PropertyLineNumber("")); } return; } } BaseAddInternal(index, element, false, ignoreLocks); } protected internal void BaseRemove(object key) { BaseRemove(key, false); } private void BaseRemove(object key, bool throwIfMissing) { if (IsReadOnly()) throw new ConfigurationErrorsException(SR.Config_base_read_only); int index = 0; foreach (Entry entry in Items) { if (CompareKeys(key, entry.GetKey(this))) { if (entry.Value == null) // A phoney delete is already present { if (throwIfMissing) { throw new ConfigurationErrorsException( SR.Format(SR.Config_base_collection_entry_not_found, key)); } return; } if (entry.Value.LockItem) throw new ConfigurationErrorsException(SR.Format(SR.Config_base_attribute_locked, key)); if (entry.Value.ElementPresent == false) CheckLockedElement(_removeElement, null); // has remove been locked? switch (entry.EntryType) { case EntryType.Added: if ((CollectionType != ConfigurationElementCollectionType.AddRemoveClearMap) && (CollectionType != ConfigurationElementCollectionType.AddRemoveClearMapAlternate)) { if (CollectionType == ConfigurationElementCollectionType.BasicMapAlternate) { if (index >= Count - _inheritedCount) { throw new ConfigurationErrorsException( SR.Config_base_cannot_remove_inherited_items); } } if (CollectionType == ConfigurationElementCollectionType.BasicMap) { if (index < _inheritedCount) { throw new ConfigurationErrorsException( SR.Config_base_cannot_remove_inherited_items); } } Items.RemoveAt(index); } else { // don't really remove it from the collection just mark it removed entry.EntryType = EntryType.Removed; _removedItemCount++; } break; case EntryType.Removed: if (throwIfMissing) throw new ConfigurationErrorsException(SR.Config_base_collection_entry_already_removed); break; default: if ((CollectionType != ConfigurationElementCollectionType.AddRemoveClearMap) && (CollectionType != ConfigurationElementCollectionType.AddRemoveClearMapAlternate)) { throw new ConfigurationErrorsException( SR.Config_base_collection_elements_may_not_be_removed); } entry.EntryType = EntryType.Removed; _removedItemCount++; break; } _modified = true; return; } index++; } // Note because it is possible for removes to get orphaned by the API they will // not cause a throw from the base classes. The scenerio is: // Add an item in a parent level // remove the item in a child level // remove the item at the parent level. if (throwIfMissing) throw new ConfigurationErrorsException(SR.Format(SR.Config_base_collection_entry_not_found, key)); if ((CollectionType != ConfigurationElementCollectionType.AddRemoveClearMap) && (CollectionType != ConfigurationElementCollectionType.AddRemoveClearMapAlternate)) return; if (CollectionType == ConfigurationElementCollectionType.AddRemoveClearMapAlternate) { Items.Insert(Count + _removedItemCount - _inheritedCount, new Entry(EntryType.Removed, key, null)); } else Items.Add(new Entry(EntryType.Removed, key, null)); _removedItemCount++; } protected internal ConfigurationElement BaseGet(object key) { foreach (Entry entry in Items) if (entry.EntryType != EntryType.Removed) if (CompareKeys(key, entry.GetKey(this))) return entry.Value; return null; } protected internal bool BaseIsRemoved(object key) { foreach (Entry entry in Items) if (CompareKeys(key, entry.GetKey(this))) return entry.EntryType == EntryType.Removed; return false; } protected internal ConfigurationElement BaseGet(int index) { if (index < 0) throw new ConfigurationErrorsException(SR.Format(SR.IndexOutOfRange, index)); int virtualIndex = 0; Entry entry = null; foreach (Entry entryfound in Items) { if ((virtualIndex == index) && (entryfound.EntryType != EntryType.Removed)) { entry = entryfound; break; } if (entryfound.EntryType != EntryType.Removed) virtualIndex++; } if (entry != null) return entry.Value; throw new ConfigurationErrorsException(SR.Format(SR.IndexOutOfRange, index)); } protected internal object[] BaseGetAllKeys() { object[] keys = new object[Count]; int index = 0; foreach (Entry entry in Items) { if (entry.EntryType == EntryType.Removed) continue; keys[index] = entry.GetKey(this); index++; } return keys; } protected internal object BaseGetKey(int index) { int virtualIndex = 0; Entry entry = null; if (index < 0) throw new ConfigurationErrorsException(SR.Format(SR.IndexOutOfRange, index)); foreach (Entry entryfound in Items) { if ((virtualIndex == index) && (entryfound.EntryType != EntryType.Removed)) { entry = entryfound; break; } if (entryfound.EntryType != EntryType.Removed) virtualIndex++; } // Entry entry = (Entry)_items[index]; if (entry == null) throw new ConfigurationErrorsException(SR.Format(SR.IndexOutOfRange, index)); return entry.GetKey(this); } protected internal void BaseClear() { if (IsReadOnly()) throw new ConfigurationErrorsException(SR.Config_base_read_only); CheckLockedElement(_clearElement, null); // has clear been locked? CheckLockedElement(_removeElement, null); // has remove been locked? Clear implies remove _modified = true; _collectionCleared = true; if (((CollectionType == ConfigurationElementCollectionType.BasicMap) || (CollectionType == ConfigurationElementCollectionType.BasicMapAlternate)) && (_inheritedCount > 0)) { int removeIndex = 0; if (CollectionType == ConfigurationElementCollectionType.BasicMapAlternate) removeIndex = 0; // Inherited items are at the bottom and cannot be removed if (CollectionType == ConfigurationElementCollectionType.BasicMap) removeIndex = _inheritedCount; // inherited items are at the top and cannot be removed while (Count - _inheritedCount > 0) Items.RemoveAt(removeIndex); } else { // do not clear any locked items // _items.Clear(); int inheritedRemoved = 0; int removedRemoved = 0; int initialCount = Count; // check for locks before removing any items from the collection for (int checkIndex = 0; checkIndex < Items.Count; checkIndex++) { Entry entry = (Entry)Items[checkIndex]; if ((entry.Value != null) && entry.Value.LockItem) throw new ConfigurationErrorsException(SR.Config_base_collection_item_locked_cannot_clear); } for (int removeIndex = Items.Count - 1; removeIndex >= 0; removeIndex--) { Entry entry = (Entry)Items[removeIndex]; if (((CollectionType == ConfigurationElementCollectionType.AddRemoveClearMap) && (removeIndex < _inheritedCount)) || ((CollectionType == ConfigurationElementCollectionType.AddRemoveClearMapAlternate) && (removeIndex >= initialCount - _inheritedCount))) inheritedRemoved++; if (entry.EntryType == EntryType.Removed) removedRemoved++; Items.RemoveAt(removeIndex); } _inheritedCount -= inheritedRemoved; _removedItemCount -= removedRemoved; } } protected internal void BaseRemoveAt(int index) { if (IsReadOnly()) throw new ConfigurationErrorsException(SR.Config_base_read_only); int virtualIndex = 0; Entry entry = null; foreach (Entry entryfound in Items) { if ((virtualIndex == index) && (entryfound.EntryType != EntryType.Removed)) { entry = entryfound; break; } if (entryfound.EntryType != EntryType.Removed) virtualIndex++; } if (entry == null) throw new ConfigurationErrorsException(SR.Format(SR.IndexOutOfRange, index)); if (entry.Value.LockItem) { throw new ConfigurationErrorsException(SR.Format(SR.Config_base_attribute_locked, entry.GetKey(this))); } if (entry.Value.ElementPresent == false) CheckLockedElement(_removeElement, null); // has remove been locked? switch (entry.EntryType) { case EntryType.Added: if ((CollectionType != ConfigurationElementCollectionType.AddRemoveClearMap) && (CollectionType != ConfigurationElementCollectionType.AddRemoveClearMapAlternate)) { if ((CollectionType == ConfigurationElementCollectionType.BasicMapAlternate) && (index >= Count - _inheritedCount)) throw new ConfigurationErrorsException(SR.Config_base_cannot_remove_inherited_items); if ((CollectionType == ConfigurationElementCollectionType.BasicMap) && (index < _inheritedCount)) throw new ConfigurationErrorsException(SR.Config_base_cannot_remove_inherited_items); Items.RemoveAt(index); } else { // don't really remove it from the collection just mark it removed if (entry.Value.ElementPresent == false) CheckLockedElement(_removeElement, null); // has remove been locked? entry.EntryType = EntryType.Removed; _removedItemCount++; } break; case EntryType.Removed: throw new ConfigurationErrorsException(SR.Config_base_collection_entry_already_removed); default: if ((CollectionType != ConfigurationElementCollectionType.AddRemoveClearMap) && (CollectionType != ConfigurationElementCollectionType.AddRemoveClearMapAlternate)) throw new ConfigurationErrorsException(SR.Config_base_collection_elements_may_not_be_removed); entry.EntryType = EntryType.Removed; _removedItemCount++; break; } _modified = true; } protected internal override bool SerializeElement(XmlWriter writer, bool serializeCollectionKey) { ConfigurationElementCollectionType type = CollectionType; bool dataToWrite = false; dataToWrite |= base.SerializeElement(writer, serializeCollectionKey); if ((type == ConfigurationElementCollectionType.AddRemoveClearMap) || (type == ConfigurationElementCollectionType.AddRemoveClearMapAlternate)) { // it is possible that the collection only has to be cleared and contains // no real elements if (_emitClearTag && (_clearElement.Length != 0)) { if (writer != null) { writer.WriteStartElement(_clearElement); writer.WriteEndElement(); } dataToWrite = true; } } foreach (Entry entry in Items) switch (type) { case ConfigurationElementCollectionType.BasicMap: case ConfigurationElementCollectionType.BasicMapAlternate: if ((entry.EntryType == EntryType.Added) || (entry.EntryType == EntryType.Replaced)) { if (!string.IsNullOrEmpty(ElementName)) { if (BaseConfigurationRecord.IsReservedAttributeName(ElementName)) { throw new ArgumentException(SR.Format(SR.Basicmap_item_name_reserved, ElementName)); } dataToWrite |= entry.Value.SerializeToXmlElement(writer, ElementName); } else dataToWrite |= entry.Value.SerializeElement(writer, false); } break; case ConfigurationElementCollectionType.AddRemoveClearMap: case ConfigurationElementCollectionType.AddRemoveClearMapAlternate: if (((entry.EntryType == EntryType.Removed) || (entry.EntryType == EntryType.Replaced)) && (entry.Value != null)) { writer?.WriteStartElement(_removeElement); entry.Value.SerializeElement(writer, true); writer?.WriteEndElement(); dataToWrite = true; } if ((entry.EntryType == EntryType.Added) || (entry.EntryType == EntryType.Replaced)) dataToWrite |= entry.Value.SerializeToXmlElement(writer, _addElement); break; } return dataToWrite; } protected override bool OnDeserializeUnrecognizedElement(string elementName, XmlReader reader) { if ((CollectionType == ConfigurationElementCollectionType.AddRemoveClearMap) || (CollectionType == ConfigurationElementCollectionType.AddRemoveClearMapAlternate)) { if (elementName == _addElement) { ConfigurationElement elem = CallCreateNewElement(); elem.ResetLockLists(this); elem.DeserializeElement(reader, false); BaseAdd(elem); } else { if (elementName == _removeElement) { ConfigurationElement elem = CallCreateNewElement(); elem.ResetLockLists(this); elem.DeserializeElement(reader, true); if (IsElementRemovable(elem)) BaseRemove(GetElementKeyInternal(elem), false); } else { if (elementName != _clearElement) return false; if (reader.AttributeCount > 0) { while (reader.MoveToNextAttribute()) { string propertyName = reader.Name; throw new ConfigurationErrorsException( SR.Format(SR.Config_base_unrecognized_attribute, propertyName), reader); } } CheckLockedElement(elementName, reader); reader.MoveToElement(); BaseClear(); // _emitClearTag = true; } } } else { if (elementName == ElementName) { if (BaseConfigurationRecord.IsReservedAttributeName(elementName)) throw new ArgumentException(SR.Format(SR.Basicmap_item_name_reserved, elementName)); ConfigurationElement elem = CallCreateNewElement(); elem.ResetLockLists(this); elem.DeserializeElement(reader, false); BaseAdd(elem); } else { if (!IsElementName(elementName)) return false; // this section handle the collection like the allow deny senario which if (BaseConfigurationRecord.IsReservedAttributeName(elementName)) throw new ArgumentException(SR.Format(SR.Basicmap_item_name_reserved, elementName)); // have multiple tags for the collection ConfigurationElement elem = CallCreateNewElement(elementName); elem.ResetLockLists(this); elem.DeserializeElement(reader, false); BaseAdd(-1, elem); } } return true; } private ConfigurationElement CallCreateNewElement(string elementName) { ConfigurationElement elem = CreateNewElement(elementName); elem.AssociateContext(_configRecord); elem.CallInit(); return elem; } private ConfigurationElement CallCreateNewElement() { ConfigurationElement elem = CreateNewElement(); elem.AssociateContext(_configRecord); elem.CallInit(); return elem; } protected virtual ConfigurationElement CreateNewElement(string elementName) { return CreateNewElement(); } protected abstract ConfigurationElement CreateNewElement(); protected abstract object GetElementKey(ConfigurationElement element); internal object GetElementKeyInternal(ConfigurationElement element) { object key = GetElementKey(element); if (key == null) throw new ConfigurationErrorsException(SR.Config_base_invalid_element_key); return key; } protected virtual bool IsElementRemovable(ConfigurationElement element) { return true; } private bool CompareKeys(object key1, object key2) { if (_comparer != null) return _comparer.Compare(key1, key2) == 0; return key1.Equals(key2); } protected virtual bool IsElementName(string elementName) { return false; } internal bool IsLockableElement(string elementName) { if ((CollectionType == ConfigurationElementCollectionType.AddRemoveClearMap) || (CollectionType == ConfigurationElementCollectionType.AddRemoveClearMapAlternate)) { return (elementName == AddElementName) || (elementName == RemoveElementName) || (elementName == ClearElementName); } return (elementName == ElementName) || IsElementName(elementName); } private enum InheritedType { InNeither = 0, InParent = 1, InSelf = 2, InBothSame = 3, InBothDiff = 4, InBothCopyNoRemove = 5, } private enum EntryType { Inherited, Replaced, Removed, Added, } private class Entry { private readonly object _key; internal EntryType EntryType; internal ConfigurationElement Value; internal Entry(EntryType type, object key, ConfigurationElement value) { EntryType = type; _key = key; Value = value; } internal object GetKey(ConfigurationElementCollection thisCollection) { // For items that have been really inserted... return Value != null ? thisCollection.GetElementKeyInternal(Value) : _key; } } private class Enumerator : IDictionaryEnumerator { private readonly IEnumerator _itemsEnumerator; private readonly ConfigurationElementCollection _thisCollection; private DictionaryEntry _current; internal Enumerator(ArrayList items, ConfigurationElementCollection collection) { _itemsEnumerator = items.GetEnumerator(); _thisCollection = collection; } bool IEnumerator.MoveNext() { while (_itemsEnumerator.MoveNext()) { Entry entry = (Entry)_itemsEnumerator.Current; if (entry.EntryType == EntryType.Removed) continue; _current.Key = entry.GetKey(_thisCollection) != null ? entry.GetKey(_thisCollection) : "key"; _current.Value = entry.Value; return true; } return false; } void IEnumerator.Reset() { _itemsEnumerator.Reset(); } object IEnumerator.Current => _current.Value; DictionaryEntry IDictionaryEnumerator.Entry => _current; object IDictionaryEnumerator.Key => _current.Key; object IDictionaryEnumerator.Value => _current.Value; } } }
using System; using System.Text; using System.Data; using System.Data.SqlClient; using System.Data.Common; using System.Collections; using System.Collections.Generic; using System.ComponentModel; using System.Configuration; using System.Xml; using System.Xml.Serialization; using SubSonic; using SubSonic.Utilities; // <auto-generated /> namespace SouthwindRepository { /// <summary> /// Strongly-typed collection for the Category class. /// </summary> [Serializable] public partial class CategoryCollection : RepositoryList<Category, CategoryCollection> { public CategoryCollection() {} /// <summary> /// Filters an existing collection based on the set criteria. This is an in-memory filter /// Thanks to developingchris for this! /// </summary> /// <returns>CategoryCollection</returns> public CategoryCollection Filter() { for (int i = this.Count - 1; i > -1; i--) { Category o = this[i]; foreach (SubSonic.Where w in this.wheres) { bool remove = false; System.Reflection.PropertyInfo pi = o.GetType().GetProperty(w.ColumnName); if (pi.CanRead) { object val = pi.GetValue(o, null); switch (w.Comparison) { case SubSonic.Comparison.Equals: if (!val.Equals(w.ParameterValue)) { remove = true; } break; } } if (remove) { this.Remove(o); break; } } } return this; } } /// <summary> /// This is an ActiveRecord class which wraps the categories table. /// </summary> [Serializable] public partial class Category : RepositoryRecord<Category>, IRecordBase { #region .ctors and Default Settings public Category() { SetSQLProps(); InitSetDefaults(); MarkNew(); } private void InitSetDefaults() { SetDefaults(); } public Category(bool useDatabaseDefaults) { SetSQLProps(); if(useDatabaseDefaults) ForceDefaults(); MarkNew(); } protected static void SetSQLProps() { GetTableSchema(); } #endregion #region Schema and Query Accessor public static Query CreateQuery() { return new Query(Schema); } public static TableSchema.Table Schema { get { if (BaseSchema == null) SetSQLProps(); return BaseSchema; } } private static void GetTableSchema() { if(!IsSchemaInitialized) { //Schema declaration TableSchema.Table schema = new TableSchema.Table("categories", TableType.Table, DataService.GetInstance("SouthwindRepository")); schema.Columns = new TableSchema.TableColumnCollection(); schema.SchemaName = @""; //columns TableSchema.TableColumn colvarCategoryID = new TableSchema.TableColumn(schema); colvarCategoryID.ColumnName = "CategoryID"; colvarCategoryID.DataType = DbType.Int32; colvarCategoryID.MaxLength = 10; colvarCategoryID.AutoIncrement = true; colvarCategoryID.IsNullable = false; colvarCategoryID.IsPrimaryKey = true; colvarCategoryID.IsForeignKey = false; colvarCategoryID.IsReadOnly = false; colvarCategoryID.DefaultSetting = @""; colvarCategoryID.ForeignKeyTableName = ""; schema.Columns.Add(colvarCategoryID); TableSchema.TableColumn colvarCategoryName = new TableSchema.TableColumn(schema); colvarCategoryName.ColumnName = "CategoryName"; colvarCategoryName.DataType = DbType.String; colvarCategoryName.MaxLength = 15; colvarCategoryName.AutoIncrement = false; colvarCategoryName.IsNullable = false; colvarCategoryName.IsPrimaryKey = false; colvarCategoryName.IsForeignKey = true; colvarCategoryName.IsReadOnly = false; colvarCategoryName.DefaultSetting = @""; colvarCategoryName.ForeignKeyTableName = ""; schema.Columns.Add(colvarCategoryName); TableSchema.TableColumn colvarDescription = new TableSchema.TableColumn(schema); colvarDescription.ColumnName = "Description"; colvarDescription.DataType = DbType.String; colvarDescription.MaxLength = 0; colvarDescription.AutoIncrement = false; colvarDescription.IsNullable = true; colvarDescription.IsPrimaryKey = false; colvarDescription.IsForeignKey = false; colvarDescription.IsReadOnly = false; colvarDescription.DefaultSetting = @""; colvarDescription.ForeignKeyTableName = ""; schema.Columns.Add(colvarDescription); TableSchema.TableColumn colvarPicture = new TableSchema.TableColumn(schema); colvarPicture.ColumnName = "Picture"; colvarPicture.DataType = DbType.Binary; colvarPicture.MaxLength = 0; colvarPicture.AutoIncrement = false; colvarPicture.IsNullable = true; colvarPicture.IsPrimaryKey = false; colvarPicture.IsForeignKey = false; colvarPicture.IsReadOnly = false; colvarPicture.DefaultSetting = @""; colvarPicture.ForeignKeyTableName = ""; schema.Columns.Add(colvarPicture); BaseSchema = schema; //add this schema to the provider //so we can query it later DataService.Providers["SouthwindRepository"].AddSchema("categories",schema); } } #endregion #region Props [XmlAttribute("CategoryID")] [Bindable(true)] public int CategoryID { get { return GetColumnValue<int>(Columns.CategoryID); } set { SetColumnValue(Columns.CategoryID, value); } } [XmlAttribute("CategoryName")] [Bindable(true)] public string CategoryName { get { return GetColumnValue<string>(Columns.CategoryName); } set { SetColumnValue(Columns.CategoryName, value); } } [XmlAttribute("Description")] [Bindable(true)] public string Description { get { return GetColumnValue<string>(Columns.Description); } set { SetColumnValue(Columns.Description, value); } } [XmlAttribute("Picture")] [Bindable(true)] public byte[] Picture { get { return GetColumnValue<byte[]>(Columns.Picture); } set { SetColumnValue(Columns.Picture, value); } } #endregion //no foreign key tables defined (0) //no ManyToMany tables defined (0) #region Typed Columns public static TableSchema.TableColumn CategoryIDColumn { get { return Schema.Columns[0]; } } public static TableSchema.TableColumn CategoryNameColumn { get { return Schema.Columns[1]; } } public static TableSchema.TableColumn DescriptionColumn { get { return Schema.Columns[2]; } } public static TableSchema.TableColumn PictureColumn { get { return Schema.Columns[3]; } } #endregion #region Columns Struct public struct Columns { public static string CategoryID = @"CategoryID"; public static string CategoryName = @"CategoryName"; public static string Description = @"Description"; public static string Picture = @"Picture"; } #endregion #region Update PK Collections #endregion #region Deep Save #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.IO; using System.Net.Security; using System.Net.Sockets; using System.Net.Test.Common; using System.Runtime.InteropServices; using System.Security.Authentication; using System.Threading; using System.Threading.Tasks; using Xunit; namespace System.Net.Http.Functional.Tests { public class HttpClientHandler_SslProtocols_Test { [Fact] public void DefaultProtocols_MatchesExpected() { using (var handler = new HttpClientHandler()) { Assert.Equal(SslProtocols.None, handler.SslProtocols); } } [Theory] [InlineData(SslProtocols.None)] [InlineData(SslProtocols.Tls)] [InlineData(SslProtocols.Tls11)] [InlineData(SslProtocols.Tls12)] [InlineData(SslProtocols.Tls | SslProtocols.Tls11)] [InlineData(SslProtocols.Tls11 | SslProtocols.Tls12)] [InlineData(SslProtocols.Tls | SslProtocols.Tls12)] [InlineData(SslProtocols.Tls | SslProtocols.Tls11 | SslProtocols.Tls12)] public void SetGetProtocols_Roundtrips(SslProtocols protocols) { using (var handler = new HttpClientHandler()) { handler.SslProtocols = protocols; Assert.Equal(protocols, handler.SslProtocols); } } [ConditionalFact(nameof(BackendSupportsSslConfiguration))] public async Task SetProtocols_AfterRequest_ThrowsException() { using (var handler = new HttpClientHandler() { ServerCertificateCustomValidationCallback = LoopbackServer.AllowAllCertificates }) using (var client = new HttpClient(handler)) { await LoopbackServer.CreateServerAsync(async (server, url) => { await TestHelper.WhenAllCompletedOrAnyFailed( LoopbackServer.ReadRequestAndSendResponseAsync(server), client.GetAsync(url)); }); Assert.Throws<InvalidOperationException>(() => handler.SslProtocols = SslProtocols.Tls12); } } [Theory] [InlineData(~SslProtocols.None)] #pragma warning disable 0618 // obsolete warning [InlineData(SslProtocols.Ssl2)] [InlineData(SslProtocols.Ssl3)] [InlineData(SslProtocols.Ssl2 | SslProtocols.Ssl3)] [InlineData(SslProtocols.Ssl2 | SslProtocols.Ssl3 | SslProtocols.Tls | SslProtocols.Tls11 | SslProtocols.Tls12)] #pragma warning restore 0618 public void DisabledProtocols_SetSslProtocols_ThrowsException(SslProtocols disabledProtocols) { using (var handler = new HttpClientHandler()) { Assert.Throws<NotSupportedException>(() => handler.SslProtocols = disabledProtocols); } } [ConditionalTheory(nameof(BackendSupportsSslConfiguration))] [InlineData(SslProtocols.Tls, false)] [InlineData(SslProtocols.Tls, true)] [InlineData(SslProtocols.Tls11, false)] [InlineData(SslProtocols.Tls11, true)] [InlineData(SslProtocols.Tls12, false)] [InlineData(SslProtocols.Tls12, true)] public async Task GetAsync_AllowedSSLVersion_Succeeds(SslProtocols acceptedProtocol, bool requestOnlyThisProtocol) { using (var handler = new HttpClientHandler() { ServerCertificateCustomValidationCallback = LoopbackServer.AllowAllCertificates }) using (var client = new HttpClient(handler)) { if (requestOnlyThisProtocol) { handler.SslProtocols = acceptedProtocol; } var options = new LoopbackServer.Options { UseSsl = true, SslProtocols = acceptedProtocol }; await LoopbackServer.CreateServerAsync(async (server, url) => { await TestHelper.WhenAllCompletedOrAnyFailed( LoopbackServer.ReadRequestAndSendResponseAsync(server, options: options), client.GetAsync(url)); }, options); } } public readonly static object [][] SupportedSSLVersionServers = { new object[] {"TLSv1.0", Configuration.Http.TLSv10RemoteServer}, new object[] {"TLSv1.1", Configuration.Http.TLSv11RemoteServer}, new object[] {"TLSv1.2", Configuration.Http.TLSv12RemoteServer}, }; // This test is logically the same as the above test, albeit using remote servers // instead of local ones. We're keeping it for now (as outerloop) because it helps // to validate against another SSL implementation that what we mean by a particular // TLS version matches that other implementation. [OuterLoop] // avoid www.ssllabs.com dependency in innerloop [Theory] [MemberData(nameof(SupportedSSLVersionServers))] public async Task GetAsync_SupportedSSLVersion_Succeeds(string name, string url) { using (var client = new HttpClient()) { (await client.GetAsync(url)).Dispose(); } } public readonly static object[][] NotSupportedSSLVersionServers = { new object[] {"SSLv2", Configuration.Http.SSLv2RemoteServer}, new object[] {"SSLv3", Configuration.Http.SSLv3RemoteServer}, }; // It would be easy to remove the dependency on these remote servers if we didn't // explicitly disallow creating SslStream with SSLv2/3. Since we explicitly throw // when trying to use such an SslStream, we can't stand up a localhost server that // only speaks those protocols. [OuterLoop] // avoid www.ssllabs.com dependency in innerloop [Theory] [MemberData(nameof(NotSupportedSSLVersionServers))] public async Task GetAsync_UnsupportedSSLVersion_Throws(string name, string url) { using (var client = new HttpClient()) { await Assert.ThrowsAsync<HttpRequestException>(() => client.GetAsync(url)); } } [ConditionalFact(nameof(BackendSupportsSslConfiguration), nameof(SslDefaultsToTls12))] public async Task GetAsync_NoSpecifiedProtocol_DefaultsToTls12() { using (var handler = new HttpClientHandler() { ServerCertificateCustomValidationCallback = LoopbackServer.AllowAllCertificates }) using (var client = new HttpClient(handler)) { var options = new LoopbackServer.Options { UseSsl = true }; await LoopbackServer.CreateServerAsync(async (server, url) => { await TestHelper.WhenAllCompletedOrAnyFailed( client.GetAsync(url), LoopbackServer.AcceptSocketAsync(server, async (s, stream, reader, writer) => { Assert.Equal(SslProtocols.Tls12, Assert.IsType<SslStream>(stream).SslProtocol); await LoopbackServer.ReadWriteAcceptedAsync(s, reader, writer); return null; }, options)); }, options); } } [ConditionalTheory(nameof(BackendSupportsSslConfiguration))] [InlineData(SslProtocols.Tls11, SslProtocols.Tls, typeof(IOException))] [InlineData(SslProtocols.Tls12, SslProtocols.Tls11, typeof(IOException))] [InlineData(SslProtocols.Tls, SslProtocols.Tls12, typeof(AuthenticationException))] public async Task GetAsync_AllowedSSLVersionDiffersFromServer_ThrowsException( SslProtocols allowedProtocol, SslProtocols acceptedProtocol, Type exceptedServerException) { using (var handler = new HttpClientHandler() { SslProtocols = allowedProtocol, ServerCertificateCustomValidationCallback = LoopbackServer.AllowAllCertificates }) using (var client = new HttpClient(handler)) { var options = new LoopbackServer.Options { UseSsl = true, SslProtocols = acceptedProtocol }; await LoopbackServer.CreateServerAsync(async (server, url) => { await TestHelper.WhenAllCompletedOrAnyFailed( Assert.ThrowsAsync(exceptedServerException, () => LoopbackServer.ReadRequestAndSendResponseAsync(server, options: options)), Assert.ThrowsAsync<HttpRequestException>(() => client.GetAsync(url))); }, options); } } [ActiveIssue(8538, PlatformID.Windows)] [Fact] public async Task GetAsync_DisallowTls10_AllowTls11_AllowTls12() { using (var handler = new HttpClientHandler() { SslProtocols = SslProtocols.Tls11 | SslProtocols.Tls12, ServerCertificateCustomValidationCallback = LoopbackServer.AllowAllCertificates }) using (var client = new HttpClient(handler)) { if (BackendSupportsSslConfiguration) { LoopbackServer.Options options = new LoopbackServer.Options { UseSsl = true }; options.SslProtocols = SslProtocols.Tls; await LoopbackServer.CreateServerAsync(async (server, url) => { await TestHelper.WhenAllCompletedOrAnyFailed( Assert.ThrowsAsync<IOException>(() => LoopbackServer.ReadRequestAndSendResponseAsync(server, options: options)), Assert.ThrowsAsync<HttpRequestException>(() => client.GetAsync(url))); }, options); foreach (var prot in new[] { SslProtocols.Tls11, SslProtocols.Tls12 }) { options.SslProtocols = prot; await LoopbackServer.CreateServerAsync(async (server, url) => { await TestHelper.WhenAllCompletedOrAnyFailed( LoopbackServer.ReadRequestAndSendResponseAsync(server, options: options), client.GetAsync(url)); }, options); } } else { await Assert.ThrowsAnyAsync<NotSupportedException>(() => client.GetAsync($"http://{Guid.NewGuid().ToString()}/")); } } } private static bool SslDefaultsToTls12 => !PlatformDetection.IsWindows7; // TLS 1.2 may not be enabled on Win7 // https://technet.microsoft.com/en-us/library/dn786418.aspx#BKMK_SchannelTR_TLS12 private static bool BackendSupportsSslConfiguration => RuntimeInformation.IsOSPlatform(OSPlatform.Windows) || (CurlSslVersionDescription()?.StartsWith("OpenSSL") ?? false); [DllImport("System.Net.Http.Native", EntryPoint = "HttpNative_GetSslVersionDescription")] private static extern string CurlSslVersionDescription(); } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.IO; using System.Reflection; using System.Text; using System.Threading; using log4net; using Mono.Addins; using Nini.Config; using OpenMetaverse; using OpenMetaverse.Imaging; using CSJ2K; using OpenSim.Framework; using OpenSim.Region.Framework.Interfaces; using OpenSim.Region.Framework.Scenes; using OpenSim.Services.Interfaces; namespace OpenSim.Region.CoreModules.Agent.TextureSender { public delegate void J2KDecodeDelegate(UUID assetID); [Extension(Path = "/OpenSim/RegionModules", NodeName = "RegionModule", Id = "J2KDecoderModule")] public class J2KDecoderModule : ISharedRegionModule, IJ2KDecoder { private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); /// <summary>Temporarily holds deserialized layer data information in memory</summary> private readonly ExpiringCache<UUID, OpenJPEG.J2KLayerInfo[]> m_decodedCache = new ExpiringCache<UUID,OpenJPEG.J2KLayerInfo[]>(); /// <summary>List of client methods to notify of results of decode</summary> private readonly Dictionary<UUID, List<DecodedCallback>> m_notifyList = new Dictionary<UUID, List<DecodedCallback>>(); /// <summary>Cache that will store decoded JPEG2000 layer boundary data</summary> private IImprovedAssetCache m_cache; private IImprovedAssetCache Cache { get { if (m_cache == null) m_cache = m_scene.RequestModuleInterface<IImprovedAssetCache>(); return m_cache; } } /// <summary>Reference to a scene (doesn't matter which one as long as it can load the cache module)</summary> private UUID m_CreatorID = UUID.Zero; private Scene m_scene; #region ISharedRegionModule private bool m_useCSJ2K = true; public string Name { get { return "J2KDecoderModule"; } } public J2KDecoderModule() { } public void Initialise(IConfigSource source) { IConfig startupConfig = source.Configs["Startup"]; if (startupConfig != null) { m_useCSJ2K = startupConfig.GetBoolean("UseCSJ2K", m_useCSJ2K); } } public void AddRegion(Scene scene) { if (m_scene == null) { m_scene = scene; m_CreatorID = scene.RegionInfo.RegionID; } scene.RegisterModuleInterface<IJ2KDecoder>(this); } public void RemoveRegion(Scene scene) { if (m_scene == scene) m_scene = null; } public void PostInitialise() { } public void Close() { } public void RegionLoaded(Scene scene) { } public Type ReplaceableInterface { get { return null; } } #endregion Region Module interface #region IJ2KDecoder public void BeginDecode(UUID assetID, byte[] j2kData, DecodedCallback callback) { OpenJPEG.J2KLayerInfo[] result; // If it's cached, return the cached results if (m_decodedCache.TryGetValue(assetID, out result)) { // m_log.DebugFormat( // "[J2KDecoderModule]: Returning existing cached {0} layers j2k decode for {1}", // result.Length, assetID); callback(assetID, result); } else { // Not cached, we need to decode it. // Add to notify list and start decoding. // Next request for this asset while it's decoding will only be added to the notify list // once this is decoded, requests will be served from the cache and all clients in the notifylist will be updated bool decode = false; lock (m_notifyList) { if (m_notifyList.ContainsKey(assetID)) { m_notifyList[assetID].Add(callback); } else { List<DecodedCallback> notifylist = new List<DecodedCallback>(); notifylist.Add(callback); m_notifyList.Add(assetID, notifylist); decode = true; } } // Do Decode! if (decode) Util.FireAndForget(delegate { Decode(assetID, j2kData); }); } } public bool Decode(UUID assetID, byte[] j2kData) { OpenJPEG.J2KLayerInfo[] layers; int components; return Decode(assetID, j2kData, out layers, out components); } public bool Decode(UUID assetID, byte[] j2kData, out OpenJPEG.J2KLayerInfo[] layers, out int components) { return DoJ2KDecode(assetID, j2kData, out layers, out components); } #endregion IJ2KDecoder /// <summary> /// Decode Jpeg2000 Asset Data /// </summary> /// <param name="assetID">UUID of Asset</param> /// <param name="j2kData">JPEG2000 data</param> /// <param name="layers">layer data</param> /// <param name="components">number of components</param> /// <returns>true if decode was successful. false otherwise.</returns> private bool DoJ2KDecode(UUID assetID, byte[] j2kData, out OpenJPEG.J2KLayerInfo[] layers, out int components) { // m_log.DebugFormat( // "[J2KDecoderModule]: Doing J2K decoding of {0} bytes for asset {1}", j2kData.Length, assetID); bool decodedSuccessfully = true; //int DecodeTime = 0; //DecodeTime = Environment.TickCount; // We don't get this from CSJ2K. Is it relevant? components = 0; if (!TryLoadCacheForAsset(assetID, out layers)) { if (m_useCSJ2K) { try { List<int> layerStarts = CSJ2K.J2kImage.GetLayerBoundaries(new MemoryStream(j2kData)); if (layerStarts != null && layerStarts.Count > 0) { layers = new OpenJPEG.J2KLayerInfo[layerStarts.Count]; for (int i = 0; i < layerStarts.Count; i++) { OpenJPEG.J2KLayerInfo layer = new OpenJPEG.J2KLayerInfo(); if (i == 0) layer.Start = 0; else layer.Start = layerStarts[i]; if (i == layerStarts.Count - 1) layer.End = j2kData.Length; else layer.End = layerStarts[i + 1] - 1; layers[i] = layer; } } } catch (Exception ex) { m_log.Warn("[J2KDecoderModule]: CSJ2K threw an exception decoding texture " + assetID + ": " + ex.Message); decodedSuccessfully = false; } } else { if (!OpenJPEG.DecodeLayerBoundaries(j2kData, out layers, out components)) { m_log.Warn("[J2KDecoderModule]: OpenJPEG failed to decode texture " + assetID); decodedSuccessfully = false; } } if (layers == null || layers.Length == 0) { m_log.Warn("[J2KDecoderModule]: Failed to decode layer data for texture " + assetID + ", guessing sane defaults"); // Layer decoding completely failed. Guess at sane defaults for the layer boundaries layers = CreateDefaultLayers(j2kData.Length); decodedSuccessfully = false; } // Cache Decoded layers SaveFileCacheForAsset(assetID, layers); } // Notify Interested Parties lock (m_notifyList) { if (m_notifyList.ContainsKey(assetID)) { foreach (DecodedCallback d in m_notifyList[assetID]) { if (d != null) d.DynamicInvoke(assetID, layers); } m_notifyList.Remove(assetID); } } return decodedSuccessfully; } private OpenJPEG.J2KLayerInfo[] CreateDefaultLayers(int j2kLength) { OpenJPEG.J2KLayerInfo[] layers = new OpenJPEG.J2KLayerInfo[5]; for (int i = 0; i < layers.Length; i++) layers[i] = new OpenJPEG.J2KLayerInfo(); // These default layer sizes are based on a small sampling of real-world texture data // with extra padding thrown in for good measure. This is a worst case fallback plan // and may not gracefully handle all real world data layers[0].Start = 0; layers[1].Start = (int)((float)j2kLength * 0.02f); layers[2].Start = (int)((float)j2kLength * 0.05f); layers[3].Start = (int)((float)j2kLength * 0.20f); layers[4].Start = (int)((float)j2kLength * 0.50f); layers[0].End = layers[1].Start - 1; layers[1].End = layers[2].Start - 1; layers[2].End = layers[3].Start - 1; layers[3].End = layers[4].Start - 1; layers[4].End = j2kLength; return layers; } private void SaveFileCacheForAsset(UUID AssetId, OpenJPEG.J2KLayerInfo[] Layers) { m_decodedCache.AddOrUpdate(AssetId, Layers, TimeSpan.FromMinutes(10)); if (Cache != null) { string assetID = "j2kCache_" + AssetId.ToString(); AssetBase layerDecodeAsset = new AssetBase(assetID, assetID, (sbyte)AssetType.Notecard, m_CreatorID.ToString()); layerDecodeAsset.Local = true; layerDecodeAsset.Temporary = true; #region Serialize Layer Data StringBuilder stringResult = new StringBuilder(); string strEnd = "\n"; for (int i = 0; i < Layers.Length; i++) { if (i == Layers.Length - 1) strEnd = String.Empty; stringResult.AppendFormat("{0}|{1}|{2}{3}", Layers[i].Start, Layers[i].End, Layers[i].End - Layers[i].Start, strEnd); } layerDecodeAsset.Data = Util.UTF8.GetBytes(stringResult.ToString()); #endregion Serialize Layer Data Cache.Cache(layerDecodeAsset); } } bool TryLoadCacheForAsset(UUID AssetId, out OpenJPEG.J2KLayerInfo[] Layers) { if (m_decodedCache.TryGetValue(AssetId, out Layers)) { return true; } else if (Cache != null) { string assetName = "j2kCache_" + AssetId.ToString(); AssetBase layerDecodeAsset = Cache.Get(assetName); if (layerDecodeAsset != null) { #region Deserialize Layer Data string readResult = Util.UTF8.GetString(layerDecodeAsset.Data); string[] lines = readResult.Split(new char[] { '\n' }, StringSplitOptions.RemoveEmptyEntries); if (lines.Length == 0) { m_log.Warn("[J2KDecodeCache]: Expiring corrupted layer data (empty) " + assetName); Cache.Expire(assetName); return false; } Layers = new OpenJPEG.J2KLayerInfo[lines.Length]; for (int i = 0; i < lines.Length; i++) { string[] elements = lines[i].Split('|'); if (elements.Length == 3) { int element1, element2; try { element1 = Convert.ToInt32(elements[0]); element2 = Convert.ToInt32(elements[1]); } catch (FormatException) { m_log.Warn("[J2KDecodeCache]: Expiring corrupted layer data (format) " + assetName); Cache.Expire(assetName); return false; } Layers[i] = new OpenJPEG.J2KLayerInfo(); Layers[i].Start = element1; Layers[i].End = element2; } else { m_log.Warn("[J2KDecodeCache]: Expiring corrupted layer data (layout) " + assetName); Cache.Expire(assetName); return false; } } #endregion Deserialize Layer Data return true; } } return false; } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.Drawing; using System.Collections.Generic; using System.Collections.ObjectModel; using Microsoft.Protocols.TestTools; using Microsoft.Protocols.TestTools.StackSdk.RemoteDesktop.Rdpbcgr; using Microsoft.Protocols.TestTools.StackSdk.RemoteDesktop.Rdprfx; using Microsoft.Protocols.TestSuites.Rdp; using Microsoft.Protocols.TestSuites.Rdpbcgr; using Microsoft.Protocols.TestTools.StackSdk; using System.IO; namespace Microsoft.Protocols.TestSuites.Rdprfx { public partial class RdprfxAdapter : ManagedAdapterBase, IRdprfxAdapter { #region Variables private ITestSite site; private RdpbcgrServer rdpbcgrServerStack; private IRdpbcgrAdapter rdpbcgrAdapter; private RdpbcgrServerSessionContext rdpbcgrSessionContext; private Collection<ITsCapsSet> ConfirmCapabilitySets; private RdprfxServerDecoder rdprfxServerDecoder; private RdprfxServer rdprfxServer; //Abstract Data Model private OperationalMode admOperationMode = OperationalMode.ImageMode; private EntropyAlgorithm admEntropyAlgorithm = EntropyAlgorithm.CLW_ENTROPY_RLGR3; private uint admFrameIndex; //the last frame id used in TS_RFX_FRAME_BEGIN. private uint frameMakerFrameId; //the last frame id used in Frame Marker Command. private UInt16Class supportedColorDepths; private ByteClass networkConnectionType; private bool is_Client_Multifragment_Update_CapabilitySet_Received; private TS_MULTIFRAGMENTUPDATE_CAPABILITYSET client_Multifragment_Update_CapabilitySet; private uint s2cMaxRequestSize; //the MaxRequestSize field of the server-to-client Multifragment Update Capability Set. private bool is_Client_Large_Pointer_Capability_Set_Received; private TS_LARGE_POINTER_CAPABILITYSET client_Large_Pointer_Capability_Set; private bool is_Client_Revision2_Bitmap_Cache_Capability_Set_Received; private TS_BITMAPCACHE_CAPABILITYSET_REV2 client_Revision2_Bitmap_Cache_Capability_Set; private bool is_TS_FRAME_ACKNOWLEDGE_CAPABILITYSET_Received; private TS_FRAME_ACKNOWLEDGE_CAPABILITYSET clientTS_FRAME_ACKNOWLEDGE_CAPABILITYSET; private bool is_Client_Surface_Commands_Capability_Set_Received; private TS_SURFCMDS_CAPABILITYSET client_Surface_Commands_Capability_Set; private bool clientupportStreamSurfaceBits; private bool is_Client_Bitmap_Codecs_Capability_Set_Received; private TS_BITMAPCODECS_CAPABILITYSET client_Bitmap_Codecs_Capability_Set; private bool is_TS_RFX_CLNT_CAPS_CONTAINER_Received; private TS_RFX_CLNT_CAPS_CONTAINER client_RFX_Caps_Container; private byte remoteFXCodecID; private List<StackPacket> pduCache; private List<byte> pendingBuffer; private object syncLocker; private RdprfxNegativeType currentTestType; #endregion #region IAdapter Methods public override void Initialize(ITestSite testSite) { this.site = testSite; rdprfxServerDecoder = new RdprfxServerDecoder(); rdprfxServer = new RdprfxServer(); pduCache = new List<StackPacket>(); pendingBuffer = new List<byte>(); syncLocker = new object(); currentTestType = RdprfxNegativeType.None; } public override void Reset() { base.Reset(); pduCache.Clear(); pendingBuffer.Clear(); currentTestType = RdprfxNegativeType.None; } public new TestTools.ITestSite Site { get { return site; } } protected override void Dispose(bool disposing) { base.Dispose(disposing); if (this.rdpbcgrServerStack != null) { this.rdpbcgrServerStack.Dispose(); } } #endregion #region IRdprfxAdapter Methods /// <summary> /// Wait for connection. /// </summary> public void Accept() { this.rdpbcgrAdapter = Site.GetAdapter<IRdpbcgrAdapter>(); this.rdpbcgrServerStack = this.rdpbcgrAdapter.ServerStack; this.rdpbcgrSessionContext = this.rdpbcgrAdapter.SessionContext; this.rdpbcgrAdapter.TS_FRAME_ACKNOWLEDGE_PDUReceived += new TS_FRAME_ACKNOWLEDGE_PDUHandler(this.VerifyTS_FRAME_ACKNOWLEDGE_PDU); } /// <summary> /// Accept an existing RDP session which established outside. /// </summary> /// <param name="rdpbcgrServer">RdpbcgrServer object.</param> /// <param name="serverContext">RdpbcgrServerSessionContext object.</param> public void Accept(RdpbcgrServer rdpbcgrServer, RdpbcgrServerSessionContext serverContext) { this.rdpbcgrAdapter = Site.GetAdapter<IRdpbcgrAdapter>(); this.rdpbcgrServerStack = rdpbcgrServer; this.rdpbcgrSessionContext = serverContext; this.rdpbcgrAdapter.TS_FRAME_ACKNOWLEDGE_PDUReceived += new TS_FRAME_ACKNOWLEDGE_PDUHandler(this.VerifyTS_FRAME_ACKNOWLEDGE_PDU); } /// <summary> /// Method to receive, decode and check client connection type and color depth. /// </summary> public void ReceiveAndCheckClientCoreData() { this.networkConnectionType = rdpbcgrSessionContext.ClientNetworkConnectionType; this.supportedColorDepths = rdpbcgrSessionContext.supportedColorDepths; VerifyColorDepths(); } /// <summary> /// Method to receive and decode client capabilities. /// </summary> /// <param name="serverMaxRequestSize">The MaxRequestSize field of the server-to-client Multifragment Update Capability Set. </param> /// <param name="supportedRfxCaps">Output the TS_RFX_ICAP array supported by the client.</param> public void ReceiveAndCheckClientCapabilities(uint serverMaxRequestSize, out TS_RFX_ICAP[] supportedRfxCaps) { supportedRfxCaps = null; s2cMaxRequestSize = serverMaxRequestSize; ConfirmCapabilitySets = this.rdpbcgrSessionContext.ConfirmCapabilitySets; foreach (ITsCapsSet capSet in ConfirmCapabilitySets) { if (capSet is TS_MULTIFRAGMENTUPDATE_CAPABILITYSET) { this.is_Client_Multifragment_Update_CapabilitySet_Received = true; this.client_Multifragment_Update_CapabilitySet = (TS_MULTIFRAGMENTUPDATE_CAPABILITYSET)capSet; } else if (capSet is TS_LARGE_POINTER_CAPABILITYSET) { this.is_Client_Large_Pointer_Capability_Set_Received = true; this.client_Large_Pointer_Capability_Set = (TS_LARGE_POINTER_CAPABILITYSET)capSet; } else if (capSet is TS_BITMAPCACHE_CAPABILITYSET_REV2) { this.is_Client_Revision2_Bitmap_Cache_Capability_Set_Received = true; this.client_Revision2_Bitmap_Cache_Capability_Set = (TS_BITMAPCACHE_CAPABILITYSET_REV2)capSet; } else if (capSet is TS_FRAME_ACKNOWLEDGE_CAPABILITYSET) { this.is_TS_FRAME_ACKNOWLEDGE_CAPABILITYSET_Received = true; this.clientTS_FRAME_ACKNOWLEDGE_CAPABILITYSET = (TS_FRAME_ACKNOWLEDGE_CAPABILITYSET)capSet; } else if (capSet is TS_SURFCMDS_CAPABILITYSET) { this.is_Client_Surface_Commands_Capability_Set_Received = true; this.client_Surface_Commands_Capability_Set = (TS_SURFCMDS_CAPABILITYSET)capSet; if ((this.client_Surface_Commands_Capability_Set.cmdFlags & CmdFlags_Values.SURFCMDS_STREAMSURFACEBITS) == CmdFlags_Values.SURFCMDS_STREAMSURFACEBITS) { this.clientupportStreamSurfaceBits = true; } } else if (capSet is TS_BITMAPCODECS_CAPABILITYSET) { this.is_Client_Bitmap_Codecs_Capability_Set_Received = true; this.client_Bitmap_Codecs_Capability_Set = (TS_BITMAPCODECS_CAPABILITYSET)capSet; foreach (TS_BITMAPCODEC codec in this.client_Bitmap_Codecs_Capability_Set.supportedBitmapCodecs.bitmapCodecArray) { if (is_REMOTEFX_CODEC_GUID(codec.codecGUID)) { is_TS_RFX_CLNT_CAPS_CONTAINER_Received = true; remoteFXCodecID = codec.codecID; this.client_RFX_Caps_Container = rdprfxServerDecoder.Decode_TS_RFX_CLNT_CAPS_CONTAINER(codec.codecProperties); supportedRfxCaps = this.client_RFX_Caps_Container.capsData.capsetsData[0].icapsData; break; } } } } //Verify Client Capabilities VerifyClientCapabilities(); } /// <summary> /// This method expect a TS_FRAME_ACKNOWLEDGE_PDU from client. /// </summary> /// <param name="expectedFrameId">The expected frame id.</param> /// <param name="ackTimeout">The time span to wait.</param> public void ExpectTsFrameAcknowledgePdu(uint expectedFrameId, TimeSpan ackTimeout) { this.frameMakerFrameId = expectedFrameId; if (this.rdpbcgrAdapter != null) { this.rdpbcgrAdapter.WaitForPacket<TS_FRAME_ACKNOWLEDGE_PDU>(ackTimeout); } else if (this.rdpbcgrServerStack != null && this.rdpbcgrSessionContext != null) { StackPacket receivedPdu = null; TS_FRAME_ACKNOWLEDGE_PDU ackPdu = null; bool isReceived = false; TimeSpan leftTime = ackTimeout; DateTime expiratedTime = DateTime.Now + ackTimeout; foreach (StackPacket pdu in pduCache) { ackPdu = pdu as TS_FRAME_ACKNOWLEDGE_PDU; if (ackPdu != null) { isReceived = true; pduCache.Remove(pdu); break; } } while (!isReceived && leftTime.CompareTo(new TimeSpan(0)) > 0) { try { receivedPdu = this.rdpbcgrServerStack.ExpectPdu(this.rdpbcgrSessionContext, leftTime); ackPdu = receivedPdu as TS_FRAME_ACKNOWLEDGE_PDU; if (ackPdu != null) { isReceived = true; break; } else { Site.Log.Add(LogEntryKind.TestInProgress, "Received and cached Pdu: {0}.", receivedPdu.GetType()); pduCache.Add(receivedPdu); } } catch (TimeoutException) { Site.Assert.Fail("Timeout when expecting {0}", typeof(TS_FRAME_ACKNOWLEDGE_PDU)); } catch (InvalidOperationException ex) { //break; Site.Log.Add(LogEntryKind.Warning, "Exception thrown out when receiving client PDUs {0}.", ex.Message); } finally { System.Threading.Thread.Sleep(100);//Wait some time for next packet. leftTime = expiratedTime - DateTime.Now; } } if (isReceived) { this.VerifyTS_FRAME_ACKNOWLEDGE_PDU(ackPdu); } else { site.Assert.Fail("Timeout when expecting {0}.", typeof(TS_FRAME_ACKNOWLEDGE_PDU)); } } } #region Encode Header Messages /// <summary> /// Method to send TS_RFX_SYNC to client. /// </summary> public void SendTsRfxSync() { TS_RFX_SYNC rfxSync = rdprfxServer.CreateTsRfxSync(); if (currentTestType == RdprfxNegativeType.UnspecifiedBlockType) { rfxSync.BlockT.blockType = blockType_Value.InvalidType; } else if (currentTestType == RdprfxNegativeType.TsRfxSync_InvalidBlockLen) { rfxSync.BlockT.blockLen = rfxSync.BlockT.blockLen - 1; //invalid block length } else if (currentTestType == RdprfxNegativeType.TsRfxSync_InvalidMagic) { rfxSync.magic = 0xBBBBBBBB; //invalid value other than 0xCACCACCA } else if (currentTestType == RdprfxNegativeType.TsRfxSync_InvalidVersion) { rfxSync.version = 0x0000; //invalid value other than 0x0100 } AddToPendingList(rfxSync); } /// <summary> /// Method to send TS_RFX_CODEC_VERSIONS to client. /// </summary> public void SendTsRfxCodecVersions() { TS_RFX_CODEC_VERSIONS rfxVersions = rdprfxServer.CreateTsRfxCodecVersions(); if (currentTestType == RdprfxNegativeType.TsRfxCodecVersions_InvalidCodecId) { rfxVersions.codecs.codecId = 0x00; //invalid value other than 0x01 } else if (currentTestType == RdprfxNegativeType.TsRfxCodecVersions_InvalidVersion) { rfxVersions.codecs.version = 0x0000; //invalid value other than 0x0100; } AddToPendingList(rfxVersions); } /// <summary> /// Method to send TS_RFX_CHANNELS to client. /// </summary> public void SendTsRfxChannels() { TS_RFX_CHANNELS rfxChannels = rdprfxServer.CreateTsRfxChannels(); if (this.currentTestType == RdprfxNegativeType.TsRfxChannelT_InvalidWidth_TooSmall) { rfxChannels.channels[0].width = 0; //set to an invalid value which less than 1. } else if (this.currentTestType == RdprfxNegativeType.TsRfxChannelT_InvalidWidth_TooBig) { rfxChannels.channels[0].width = 4097; //set to an invalid value which greater than 4096. } else if (this.currentTestType == RdprfxNegativeType.TsRfxChannelT_InvalidHeight_TooSmall) { rfxChannels.channels[0].height = 0; //set to an invalid value which less than 1. } else if (this.currentTestType == RdprfxNegativeType.TsRfxChannelT_InvalidHeight_TooBig) { rfxChannels.channels[0].height = 2049; //set to an invalid value which greater than 2048. } else if (this.currentTestType == RdprfxNegativeType.TsRfxChannels_InvalidChannelId) { rfxChannels.channels[0].channelId = 0x01; //set to an invalid value other than 0x00. } AddToPendingList(rfxChannels); } /// <summary> /// Method to send TS_RFX_CHANNELS to client. /// </summary> /// <param name="width">The width of the channel</param> /// <param name="height">The height of the channel</param> public void SendTsRfxChannels(short width, short height) { TS_RFX_CHANNELS rfxChannels = rdprfxServer.CreateTsRfxChannels(); rfxChannels.channels[0].width = width; rfxChannels.channels[0].height = height; AddToPendingList(rfxChannels); } /// <summary> /// Method to send TS_RFX_CONTEXT to client. /// </summary> /// <param name="isImageMode">Indicates the operational mode.</param> /// <param name="entropy">Indicates the entropy algorithm.</param> public void SendTsRfxContext(OperationalMode opMode, EntropyAlgorithm entropy) { this.admEntropyAlgorithm = entropy; this.admOperationMode = opMode; TS_RFX_CONTEXT rfxContext = rdprfxServer.CreateTsRfxContext(opMode, entropy); if (this.currentTestType == RdprfxNegativeType.TsRfxContext_InvalidCtxId) { rfxContext.ctxId = 0x01; //set to an invalid value other than 0x00. } else if (this.currentTestType == RdprfxNegativeType.TsRfxContext_InvalidTileSize) { rfxContext.tileSize = 0x0080; //set to an invalid value other than 0x0040. } else if (this.currentTestType == RdprfxNegativeType.TsRfxContext_InvalidCct) { rfxContext.properties &= 0xFFF7; //set "cct" to an invalid value: 0x0. } else if (this.currentTestType == RdprfxNegativeType.TsRfxContext_InvalidXft) { rfxContext.properties &= 0xFE1F; //set "xft" to an invalid value: 0x0. } else if (this.currentTestType == RdprfxNegativeType.TsRfxContext_InvalidQt) { rfxContext.properties &= 0x9FFF; //set "qt" to an invalid value: 0x0. } AddToPendingList(rfxContext); } #endregion #region Encode Data Messages /// <summary> /// Method to send TS_RFX_FRAME_BEGIN to client. /// </summary> /// <param name="frameIdx">The frame index.</param> public void SendTsRfxFrameBegin(uint frameIdx) { this.admFrameIndex = frameIdx; TS_RFX_FRAME_BEGIN rfxBegin = rdprfxServer.CreateTsRfxFrameBegin(frameIdx); if (this.currentTestType == RdprfxNegativeType.TsRfxFrameBegin_InvalidBlockLen) { rfxBegin.CodecChannelT.blockLen -= 1;//Set to invalid block len which less than the actual. } else if (this.currentTestType == RdprfxNegativeType.TsRfxCodecChannelT_InvalidCodecId) { rfxBegin.CodecChannelT.codecId = 0x00; //set to an invalid value other than 0x01. } else if (this.currentTestType == RdprfxNegativeType.TsRfxCodecChannelT_InvalidChannelId) { rfxBegin.CodecChannelT.channelId = 0x01; //set to an invalid value other than 0x00. } AddToPendingList(rfxBegin); } /// <summary> /// Method to send TS_RFX_REGION to client. /// </summary> /// <param name="rects">Array of rects, if this parameter is null, will send a 64*64 rect </param> /// <param name="numRectsZero">A boolean varialbe to indicate whether the numRectsZero field of TS_RFX_REGION is zero</param> public void SendTsRfxRegion(Rectangle[] rects = null, bool numRectsZero = false) { TS_RFX_REGION rfxRegion = rdprfxServer.CreateTsRfxRegion(rects, numRectsZero); if (this.currentTestType == RdprfxNegativeType.TsRfxRegion_InvalidRegionFlags) { rfxRegion.regionFlags = 0x00; //set to an invalid value other than 0x01. } else if(this.currentTestType == RdprfxNegativeType.TsRfxRegion_InvalidRegionType) { rfxRegion.regionType = 0xBBBB; //set to an invalid value other than 0xCAC1. } if (this.rdpbcgrAdapter.SimulatedScreen != null) { this.rdpbcgrAdapter.SimulatedScreen.SetRemoteFXRegion(rfxRegion); } AddToPendingList(rfxRegion); } /// <summary> /// Method to send TS_RFX_TILESET to client. /// </summary> /// <param name="isImageMode">Indicates the operational mode.</param> /// <param name="entropy">Indicates the entropy algorithm.</param> /// <param name="tileImage">The image for a tile. The width and height must be less than or equals with 64.</param> /// <param name="codecQuantVals">Quant values array</param> /// <param name="quantIdxY">Index of Y component in Quant value array</param> /// <param name="quantIdxCb">Index of Cb component in Quant value array</param> /// <param name="quantIdxCr">Index of Cr component in Quant value array</param> public void SendTsRfxTileSet(OperationalMode opMode, EntropyAlgorithm entropy, Image tileImage, TS_RFX_CODEC_QUANT[] codecQuantVals = null, byte quantIdxY = 0, byte quantIdxCb = 0, byte quantIdxCr = 0) { this.admEntropyAlgorithm = entropy; this.admOperationMode = opMode; TS_RFX_TILESET rfxTileSet = rdprfxServer.CreateTsRfxTileSet(opMode, entropy, tileImage, codecQuantVals, quantIdxY, quantIdxCb, quantIdxCr); if (this.currentTestType == RdprfxNegativeType.TsRfxTileSet_InvalidIdx) { rfxTileSet.idx = 0x0001; //set to an invalid value other than 0x0000. } else if (this.currentTestType == RdprfxNegativeType.TsRfxTileSet_InvalidLt) { rfxTileSet.properties &= 0xFFFE; //set "lt" to an invalid value: 0x0. } else if (this.currentTestType == RdprfxNegativeType.TsRfxTileSet_InvalidCct) { rfxTileSet.properties &= 0xFFCF; //set "cct" to an invalid value: 0x0. } else if (this.currentTestType == RdprfxNegativeType.TsRfxTileSet_InvalidXft) { rfxTileSet.properties &= 0xFC3F; //set "xft" to an invalid value: 0x0. } else if (this.currentTestType == RdprfxNegativeType.TsRfxTileSet_InvalidQt) { rfxTileSet.properties &= 0x3FFF; //set "xft" to an invalid value: 0x0. } else if (this.currentTestType == RdprfxNegativeType.TsRfxTileSet_InvalidTileSize) { rfxTileSet.tileSize = 0x80; //set to an invalid value other than 0x40. } if (this.rdpbcgrAdapter.SimulatedScreen != null) { this.rdpbcgrAdapter.SimulatedScreen.SetRemoteFXTileSet(rfxTileSet, entropy); } AddToPendingList(rfxTileSet); if (!CheckIfClientSupports(opMode, entropy)) { Site.Log.Add(LogEntryKind.Debug, "The client Cap is not supported: OperationalMode = {0}, EntropyAlgorithm = {1}", opMode.ToString(), entropy.ToString()); } } /// <summary> /// Method to send TS_RFX_TILESET to client. /// </summary> /// <param name="opMode">Indicates the operational mode.</param> /// <param name="entropy">Indicates the entropy algorithm.</param> /// <param name="tileImages">The image array for tiles to be sent. The width and height must be less than or equals with 64.</param> /// <param name="positions">A TILE_POSITION array indicating the positions of each tile images</param> /// <param name="codecQuantVals">Quant values array</param> /// <param name="quantIdxYs">Index array of Y component in Quant value array</param> /// <param name="quantIdxCbs">Index array of Cb component in Quant value array</param> /// <param name="quantIdxCrs">Index array of Cr component in Quant value array</param> public void SendTsRfxTileSet(OperationalMode opMode, EntropyAlgorithm entropy, Image[] tileImages, TILE_POSITION[] positions, TS_RFX_CODEC_QUANT[] codecQuantVals = null, byte[] quantIdxYs = null, byte[] quantIdxCbs = null, byte[] quantIdxCrs = null) { this.admEntropyAlgorithm = entropy; this.admOperationMode = opMode; TS_RFX_TILESET rfxTileSet = rdprfxServer.CreateTsRfxTileSet(opMode, entropy, tileImages, positions, codecQuantVals, quantIdxYs, quantIdxCbs, quantIdxCrs); if (this.currentTestType == RdprfxNegativeType.TsRfxTileSet_InvalidIdx) { rfxTileSet.idx = 0x0001; //set to an invalid value other than 0x0000. } else if (this.currentTestType == RdprfxNegativeType.TsRfxTileSet_InvalidLt) { rfxTileSet.properties &= 0xFFFE; //set "lt" to an invalid value: 0x0. } else if (this.currentTestType == RdprfxNegativeType.TsRfxTileSet_InvalidCct) { rfxTileSet.properties &= 0xFFCF; //set "cct" to an invalid value: 0x0. } else if (this.currentTestType == RdprfxNegativeType.TsRfxTileSet_InvalidXft) { rfxTileSet.properties &= 0xFC3F; //set "xft" to an invalid value: 0x0. } else if (this.currentTestType == RdprfxNegativeType.TsRfxTileSet_InvalidQt) { rfxTileSet.properties &= 0x3FFF; //set "xft" to an invalid value: 0x0. } else if (this.currentTestType == RdprfxNegativeType.TsRfxTileSet_InvalidTileSize) { rfxTileSet.tileSize = 0x80; //set to an invalid value other than 0x40. } if (this.rdpbcgrAdapter.SimulatedScreen != null) { this.rdpbcgrAdapter.SimulatedScreen.SetRemoteFXTileSet(rfxTileSet, entropy); } AddToPendingList(rfxTileSet); if (!CheckIfClientSupports(opMode, entropy)) { Site.Log.Add(LogEntryKind.Debug, "The client Cap is not supported: OperationalMode = {0}, EntropyAlgorithm = {1}", opMode.ToString(), entropy.ToString()); } } /// <summary> /// Method to send TS_RFX_FRAME_END to client. /// </summary> public void SendTsRfxFrameEnd() { TS_RFX_FRAME_END rfxEnd = rdprfxServer.CreateTsRfxFrameEnd(); AddToPendingList(rfxEnd); } #endregion /// <summary> /// Method to send one frame of encoded data message to client. /// </summary> /// <param name="image">The image to be sent.</param> /// <param name="opMode">Indicates the operational mode.</param> /// <param name="entropy">Indicates the entropy algorithm.</param> /// <param name="destLeft">Left bound of the frame.</param> /// <param name="destTop">Left bound of the frame.</param> public void SendImageToClient(System.Drawing.Image image, OperationalMode opMode, EntropyAlgorithm entropy, ushort destLeft, ushort destTop) { if (image == null) { Site.Log.Add(LogEntryKind.Debug, "[In iRdprfxAdapter.SendImageToClient Method] The image to be send is null."); return; } TileImage[] tileImageArr = RdprfxTileUtils.SplitToTileImage(image, RdprfxServer.TileSize, RdprfxServer.TileSize); for (int idx = 0; idx < tileImageArr.Length; idx++) { if (idx == 0 || opMode == OperationalMode.ImageMode) { SendTsRfxSync(); SendTsRfxCodecVersions(); SendTsRfxChannels(); SendTsRfxContext(opMode, entropy); } SendTsRfxFrameBegin((uint)idx); SendTsRfxRegion(); SendTsRfxTileSet(opMode, entropy, tileImageArr[idx].image); SendTsRfxFrameEnd(); FlushEncodedData((ushort)(destLeft + tileImageArr[idx].x), (ushort)(destTop + tileImageArr[idx].y)); if (currentTestType != RdprfxNegativeType.None) { // Only send one message if it is in a negative test case. break; } } } /// <summary> /// Method to send one frame of unencoded data message to client. /// </summary> /// <param name="image">The image to be sent.</param> /// <param name="opMode">Indicates the operational mode.</param> /// <param name="entropy">Indicates the entropy algorithm.</param> /// <param name="destLeft">Left bound of the frame.</param> /// <param name="destTop">Left bound of the frame.</param> public void SendImageToClientWithoutEncoding(System.Drawing.Image image, ushort destLeft, ushort destTop) { if (image == null) { Site.Log.Add(LogEntryKind.Debug, "[In iRdprfxAdapter.SendImageToClient Method] The image to be send is null."); return; } TS_SURFCMD_STREAM_SURF_BITS surfStreamCmd = Create_TS_SURFCMD_STREAM_SURF_BITS(0); MemoryStream memoryStream = new MemoryStream(); image.Save(memoryStream, System.Drawing.Imaging.ImageFormat.Bmp); Byte[] byteImage = memoryStream.ToArray(); // remove the BITMAP header data at the beginning int rgbLength = RgbTile.TileSize * RgbTile.TileSize * 4; Byte[] rgbArray = new Byte[rgbLength]; int delta = byteImage.Length - rgbLength; for (int i = 0; i < rgbLength; i++) { rgbArray[i] = byteImage[delta + i]; } surfStreamCmd.bitmapData.bitmapDataLength = (uint)rgbArray.Length; surfStreamCmd.bitmapData.bitmapData = rgbArray; surfStreamCmd.destLeft = destLeft; surfStreamCmd.destTop = destTop; checked { surfStreamCmd.destBottom = (ushort)(destTop + image.Width); surfStreamCmd.destRight = (ushort)(destLeft + image.Height); } surfStreamCmd.bitmapData.width = (ushort)image.Width; surfStreamCmd.bitmapData.height = (ushort)image.Height; SendSurfaceCmd_StreamSurfBits(surfStreamCmd); if (this.rdpbcgrAdapter.SimulatedScreen != null) { this.rdpbcgrAdapter.SimulatedScreen.RenderUncompressedImage(image, destLeft, destTop); } } /// <summary> /// Set the type of current test. /// </summary> /// <param name="testType">The test type.</param> public void SetTestType(RdprfxNegativeType testType) { currentTestType = testType; } /// <summary> /// Method to send all pending encoded data of a frame to RDP client. /// </summary> /// <param name="destLeft">Left bound of the frame.</param> /// <param name="destTop">Left bound of the frame.</param> /// <param name="width">The width of the frame.</param> /// <param name="height">The height of the frame.</param> public void FlushEncodedData(ushort destLeft, ushort destTop, ushort width = RdprfxServer.TileSize, ushort height = RdprfxServer.TileSize) { lock (syncLocker) { if (pendingBuffer.Count > 0) { LogServerADMInfo(); TS_SURFCMD_STREAM_SURF_BITS surfStreamCmd = Create_TS_SURFCMD_STREAM_SURF_BITS(); surfStreamCmd.bitmapData.bitmapDataLength = (uint)(pendingBuffer.Count); surfStreamCmd.bitmapData.bitmapData = pendingBuffer.ToArray(); pendingBuffer.Clear(); surfStreamCmd.destLeft = destLeft; surfStreamCmd.destTop = destTop; checked { surfStreamCmd.destBottom = (ushort)(destTop + height); surfStreamCmd.destRight = (ushort)(destLeft + width); } surfStreamCmd.bitmapData.width = width; surfStreamCmd.bitmapData.height = height; SendSurfaceCmd_StreamSurfBits(surfStreamCmd); } } if (this.rdpbcgrAdapter.SimulatedScreen != null) { this.rdpbcgrAdapter.SimulatedScreen.RenderRemoteFXTile(destLeft, destTop); } } /// <summary> /// Method to convert all encoded data into byte array. /// </summary> /// <return> return RFX encoded data into byte array </return> public byte[] GetEncodedData() { lock (syncLocker) { return pendingBuffer.ToArray(); } } /// <summary> /// Method to check if the input pair of the operation mode and entropy algorithm is supported by the client. /// </summary> /// <param name="opMode">The operation mode.</param> /// <param name="entropy">The entropy algorithm.</param> /// <returns></returns> public bool CheckIfClientSupports(OperationalMode opMode, EntropyAlgorithm entropy) { TS_RFX_ICAP[] iCaps = this.client_RFX_Caps_Container.capsData.capsetsData[0].icapsData; foreach (TS_RFX_ICAP icap in iCaps) { if ((icap.flags & (byte)OperationalMode.ImageMode) == (byte)0 && ((byte)icap.entropyBits == (byte)entropy)) { //OperationalMode.ImageMode is not set, both the image mode and the video mode of the codec are supported return true; } else if ((byte)icap.entropyBits == (byte)entropy) { //OperationalMode.ImageMode is set, only image mode is supported if (opMode == OperationalMode.ImageMode) return true; } } return false; } #endregion #region private methods /// <summary> /// Method to send RDPRFX raw data. /// </summary> /// <param name="surfStreamCmd">TS_SURFCMD_STREAM_SURF_BITS to be send.</param> private void SendSurfaceCmd_StreamSurfBits(TS_SURFCMD_STREAM_SURF_BITS surfStreamCmd) { if (rdpbcgrAdapter != null) { rdpbcgrAdapter.SendStreamSurfaceBitsCommand(surfStreamCmd); } else if (this.rdpbcgrServerStack != null && this.rdpbcgrSessionContext != null) { //TS_FRAME_MARKER frameMakerCmdBegin = new TS_FRAME_MARKER(); //frameMakerCmdBegin.cmdType = cmdType_Values.CMDTYPE_FRAME_MARKER; //frameMakerCmdBegin.frameAction = frameAction_Values.SURFACECMD_FRAMEACTION_BEGIN; //frameMakerCmdBegin.frameId = frameIndex; //TS_FRAME_MARKER frameMakerCmdEnd = new TS_FRAME_MARKER(); //frameMakerCmdEnd.cmdType = cmdType_Values.CMDTYPE_FRAME_MARKER; //frameMakerCmdEnd.frameAction = frameAction_Values.SURFACECMD_FRAMEACTION_END; //frameMakerCmdEnd.frameId = frameIndex++; TS_FP_SURFCMDS surfCmds = new TS_FP_SURFCMDS(); surfCmds.updateHeader = (byte)(((int)updateCode_Values.FASTPATH_UPDATETYPE_SURFCMDS & 0x0f) | (((int)fragmentation_Value.FASTPATH_FRAGMENT_SINGLE) << 4) | ((int)compressedType_Values.None << 6)); surfCmds.compressionFlags = compressedType_Values.None; surfCmds.size = (ushort)(8 + 8 + 22 + surfStreamCmd.bitmapData.bitmapDataLength);//size of TS_SURFCMD_STREAM_SURF_BITS; surfCmds.surfaceCommands = new TS_SURFCMD[1]; surfCmds.surfaceCommands[0] = surfStreamCmd; TS_FP_UPDATE_PDU fpOutput; TS_FP_UPDATE[] updates = new TS_FP_UPDATE[1]; updates[0] = surfCmds; fpOutput = rdpbcgrServerStack.CreateFastPathUpdatePdu(rdpbcgrSessionContext, updates); rdpbcgrServerStack.SendPdu(rdpbcgrSessionContext, fpOutput); } } /// <summary> /// Send a Rdprfx message. /// </summary> /// <param name="rfxMessage">The Rdprfx message to be sent.</param> private void AddToPendingList(IMarshalable rfxMessage) { lock (syncLocker) { byte[] data = rfxMessage.ToBytes(); pendingBuffer.AddRange(data); } } private bool is_REMOTEFX_CODEC_GUID(TS_BITMAPCODEC_GUID guidObj) { //CODEC_GUID_REMOTEFX //0x76772F12 BD72 4463 AF B3 B7 3C 9C 6F 78 86 bool rtnValue; rtnValue = (guidObj.codecGUID1 == 0x76772F12) && (guidObj.codecGUID2 == 0xBD72) && (guidObj.codecGUID3 == 0x4463) && (guidObj.codecGUID4 == 0xAF) && (guidObj.codecGUID5 == 0xB3) && (guidObj.codecGUID6 == 0xB7) && (guidObj.codecGUID7 == 0x3C) && (guidObj.codecGUID8 == 0x9C) && (guidObj.codecGUID9 == 0x6F) && (guidObj.codecGUID10 == 0x78) && (guidObj.codecGUID11== 0x86); return rtnValue; } private TS_SURFCMD_STREAM_SURF_BITS Create_TS_SURFCMD_STREAM_SURF_BITS() { TS_SURFCMD_STREAM_SURF_BITS surfStreamCmd = new TS_SURFCMD_STREAM_SURF_BITS(); surfStreamCmd.cmdType = cmdType_Values.CMDTYPE_STREAM_SURFACE_BITS; surfStreamCmd.destLeft = 0; surfStreamCmd.destTop = 0; surfStreamCmd.destRight = 0; surfStreamCmd.destBottom = 0; surfStreamCmd.bitmapData = new TS_BITMAP_DATA_EX(); surfStreamCmd.bitmapData.bpp = 32; surfStreamCmd.bitmapData.reserved1 = 0; surfStreamCmd.bitmapData.reserved2 = 0; surfStreamCmd.bitmapData.codecID = remoteFXCodecID; surfStreamCmd.bitmapData.width = 0; surfStreamCmd.bitmapData.height = 0; return surfStreamCmd; } private TS_SURFCMD_STREAM_SURF_BITS Create_TS_SURFCMD_STREAM_SURF_BITS(byte codecId) { TS_SURFCMD_STREAM_SURF_BITS surfStreamCmd = new TS_SURFCMD_STREAM_SURF_BITS(); surfStreamCmd.cmdType = cmdType_Values.CMDTYPE_STREAM_SURFACE_BITS; surfStreamCmd.destLeft = 0; surfStreamCmd.destTop = 0; surfStreamCmd.destRight = 0; surfStreamCmd.destBottom = 0; surfStreamCmd.bitmapData = new TS_BITMAP_DATA_EX(); surfStreamCmd.bitmapData.bpp = 32; surfStreamCmd.bitmapData.reserved1 = 0; surfStreamCmd.bitmapData.reserved2 = 0; surfStreamCmd.bitmapData.codecID = codecId; surfStreamCmd.bitmapData.width = 0; surfStreamCmd.bitmapData.height = 0; return surfStreamCmd; } private void LogServerADMInfo() { Site.Log.Add(LogEntryKind.Debug, "Sending encoded bitmap to client with the settings: OperationalMode = {0}, EntropyAlgorithm = {1}, FrameId = {2}", this.admOperationMode, this.admEntropyAlgorithm, this.admFrameIndex); } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using Xunit; using System.IO; using System.Xml.Schema; using System.Xml.XPath; namespace System.Xml.Tests { //[TestCase(Name = "TC_SchemaSet_Add_URL", Desc = "")] public class TC_SchemaSet_Add_URL : TC_SchemaSetBase { //----------------------------------------------------------------------------------- [Fact] //[Variation(Desc = "v1 - ns = null, URL = null", Priority = 0)] public void v1() { try { XmlSchemaSet sc = new XmlSchemaSet(); sc.Add((String)null, (String)null); } catch (ArgumentNullException) { // GLOBALIZATION return; } Assert.True(false); } //----------------------------------------------------------------------------------- [Fact] //[Variation(Desc = "v2 - ns = null, URL = valid", Priority = 0)] public void v2() { XmlSchemaSet sc = new XmlSchemaSet(); XmlSchema Schema = sc.Add((String)null, TestData._FileXSD1); Assert.Equal(Schema != null, true); return; } //----------------------------------------------------------------------------------- [Fact] //[Variation(Desc = "v3 - ns = valid, URL = valid")] public void v3() { XmlSchemaSet sc = new XmlSchemaSet(); XmlSchema Schema = sc.Add("xsdauthor", TestData._XsdAuthor); Assert.Equal(Schema != null, true); return; } //----------------------------------------------------------------------------------- [Fact] //[Variation(Desc = "v4 - ns = valid, URL = invalid")] public void v4() { try { XmlSchemaSet sc = new XmlSchemaSet(); sc.Add("xsdauthor", "http://Bla"); } catch (Exception) { // GLOBALIZATION return; } Assert.True(false); } //----------------------------------------------------------------------------------- [Fact] //[Variation(Desc = "v5 - ns = unmatching, URL = valid")] public void v5() { try { XmlSchemaSet sc = new XmlSchemaSet(); sc.Add("", TestData._FileXSD1); } catch (XmlSchemaException) { // GLOBALIZATION return; } Assert.True(false); } //----------------------------------------------------------------------------------- [Fact] //[Variation(Desc = "v6 - adding same chameleon for diff NS")] public void v6() { XmlSchemaSet sc = new XmlSchemaSet(); XmlSchema Schema1 = sc.Add("xsdauthor1", TestData._XsdNoNs); XmlSchema Schema2 = sc.Add("xsdauthor2", TestData._XsdNoNs); Assert.Equal(sc.Count, 2); Assert.Equal(Schema1 != null, true); // the second call to add should be ignored with Add returning the first obj Assert.Equal((Schema2 == Schema1), false); return; } //----------------------------------------------------------------------------------- [Fact] //[Variation(Desc = "v7 - adding same URL for null ns")] public void v7() { XmlSchemaSet sc = new XmlSchemaSet(); XmlSchema Schema1 = sc.Add(null, TestData._XsdAuthor); XmlSchema Schema2 = sc.Add(null, TestData._XsdAuthor); Assert.Equal(sc.Count, 1); Assert.Equal(Schema1 != null, true); // the second call to add should be ignored with Add returning the first obj Assert.Equal(Schema2, Schema1); return; } //----------------------------------------------------------------------------------- [Fact] //[Variation(Desc = "v8 - adding a schema with NS and one without, to a NS.")] public void v8() { XmlSchemaSet sc = new XmlSchemaSet(); XmlSchema Schema1 = sc.Add("xsdauthor", TestData._XsdAuthor); XmlSchema Schema2 = sc.Add("xsdauthor", TestData._XsdNoNs); Assert.Equal(sc.Count, 2); Assert.Equal(Schema1 != null, true); Assert.Equal((Schema2 == Schema1), false); return; } //----------------------------------------------------------------------------------- [Fact] //[Variation(Desc = "v9 - adding URL to XSD schema")] public void v9() { XmlSchemaSet sc = new XmlSchemaSet(); try { sc.Add(null, Path.Combine(TestData._Root, "schema1.xdr")); } catch (XmlSchemaException) { Assert.Equal(sc.Count, 0); // GLOBALIZATION return; } Assert.True(false); } //----------------------------------------------------------------------------------- [Fact] //[Variation(Desc = "v10 - Adding schema with top level element collision")] public void v10() { XmlSchemaSet sc = new XmlSchemaSet(); XmlSchema Schema1 = sc.Add("xsdauthor", TestData._XsdAuthor); XmlSchema Schema2 = sc.Add("xsdauthor", TestData._XsdAuthorDup); // schemas should be successfully added Assert.Equal(sc.Count, 2); try { sc.Compile(); } catch (XmlSchemaException) { Assert.Equal(sc.Count, 2); // GLOBALIZATION return; } Assert.True(false); } //----------------------------------------------------------------------------------- [Fact] //[Variation(Desc = "v11 - Adding schema with top level element collision to Compiled Schemaset")] public void v11() { XmlSchemaSet sc = new XmlSchemaSet(); XmlSchema Schema1 = sc.Add("xsdauthor", TestData._XsdAuthor); sc.Compile(); XmlSchema Schema2 = sc.Add("xsdauthor", TestData._XsdAuthorDup); // schemas should be successfully added Assert.Equal(sc.Count, 2); try { sc.Compile(); } catch (XmlSchemaException) { Assert.Equal(sc.Count, 2); // GLOBALIZATION return; } Assert.True(false); } //----------------------------------------------------------------------------------- [Fact] //[Variation(Desc = "v12 - Adding schema with no tagetNS with element already existing in NS")] public void v12() { XmlSchemaSet sc = new XmlSchemaSet(); XmlSchema Schema1 = sc.Add("xsdauthor", TestData._XsdAuthor); XmlSchema Schema2 = sc.Add("xsdauthor", TestData._XsdAuthorNoNs); // schemas should be successfully added try { sc.Compile(); } catch (XmlSchemaException) { // GLOBALIZATION return; } Assert.True(false); } //----------------------------------------------------------------------------------- [Fact] //[Variation(Desc = "435368 - schema validation error")] public void v13() { string xsdPath = Path.Combine(TestData._Root, @"bug435368.xsd"); string xmlPath = Path.Combine(TestData._Root, @"bug435368.xml"); XmlSchemaSet xs = new XmlSchemaSet(); xs.Add(null, xsdPath); XmlDocument xd = new XmlDocument(); xd.Load(xmlPath); xd.Schemas = xs; // Modify a, partially validate XPathNavigator xpn = xd.CreateNavigator().SelectSingleNode("/root/a"); xpn.SetValue("b"); xd.Validate(null, ((IHasXmlNode)xpn).GetNode()); // Modify sg1, partially validate- validate will throw exception xpn = xd.CreateNavigator().SelectSingleNode("/root/sg1"); xpn.SetValue("a"); xd.Validate(null, ((IHasXmlNode)xpn).GetNode()); return; } //====================TFS_298991 XMLSchemaSet.Compile of an XSD containing with a large number of elements results in a System.StackOverflow error private static void GenerateSequenceXsdFile(int size, string xsdFileName) { // generate the xsd file, the file is some thing like this //------------------------------------------------------- //<?xml version='1.0'?> //<xsd:schema xmlns:xsd='http://www.w3.org/2001/XMLSchema' > //<xsd:element name='field0' /> //<xsd:element name='field1' /> //<xsd:element name='field2' /> //<xsd:element name='myFields'> // <xsd:complexType> // <xsd:sequence> // <xsd:element ref='field0' minOccurs='0' /> // <xsd:element ref='field1' minOccurs='0' /> // <xsd:element ref='field2' minOccurs='0' /> // </xsd:sequence> // </xsd:complexType> //</xsd:element> //</xsd:schema> //------------------------------------------------------ StreamWriter sw = new StreamWriter(new FileStream(xsdFileName, FileMode.Create, FileAccess.Write)); string head = @"<?xml version='1.0'?> <xsd:schema xmlns:xsd='http://www.w3.org/2001/XMLSchema' >"; string body = @" <xsd:element name='myFields'> <xsd:complexType> <xsd:sequence>"; string end = @" </xsd:sequence> </xsd:complexType> </xsd:element> </xsd:schema>"; sw.WriteLine(head); for (int ii = 0; ii < size; ++ii) sw.WriteLine(" <xsd:element name='field{0}' />", ii); sw.WriteLine(body); for (int ii = 0; ii < size; ++ii) sw.WriteLine(" <xsd:element ref='field{0}' minOccurs='0' />", ii); sw.WriteLine(end); sw.Dispose(); } private static void GenerateChoiceXsdFile(int size, string xsdFileName) { // generate the xsd file, the file is some thing like this //------------------------------------------------------- //<?xml version='1.0'?> //<xsd:schema xmlns:xsd='http://www.w3.org/2001/XMLSchema' > //<xsd:element name='field0' /> //<xsd:element name='field1' /> //<xsd:element name='field2' /> //<xsd:element name='myFields'> // <xsd:complexType> // <xsd:choice> // <xsd:element ref='field0' minOccurs='0' /> // <xsd:element ref='field1' minOccurs='0' /> // <xsd:element ref='field2' minOccurs='0' /> // </xsd:choice> // </xsd:complexType> //</xsd:element> //</xsd:schema> //------------------------------------------------------ StreamWriter sw = new StreamWriter(new FileStream(xsdFileName, FileMode.Create, FileAccess.Write)); string head = @"<?xml version='1.0'?> <xsd:schema xmlns:xsd='http://www.w3.org/2001/XMLSchema' >"; string body = @" <xsd:element name='myFields'> <xsd:complexType> <xsd:choice>"; string end = @" </xsd:choice> </xsd:complexType> </xsd:element> </xsd:schema>"; sw.WriteLine(head); for (int ii = 0; ii < size; ++ii) sw.WriteLine(" <xsd:element name='field{0}' />", ii); sw.WriteLine(body); for (int ii = 0; ii < size; ++ii) sw.WriteLine(" <xsd:element ref='field{0}' minOccurs='0' />", ii); sw.WriteLine(end); sw.Dispose(); } public void verifyXsd(string file) { try { XmlSchemaSet ss = new XmlSchemaSet(); ss.Add("", file); ss.Compile(); // if throws StackOfFlowException will cause test failure } catch (OutOfMemoryException) { // throw OutOfMemoryException is ok since it is catchable. } } [OuterLoop] [Theory] [InlineData(1000, "1000s.xsd")] //[Variation(Desc = "Bug 298991 XMLSchemaSet.Compile cause StackOverflow - Sequence, 5000", Params = new object[] { 5000, "5000s.xsd" })] //[Variation(Desc = "Bug 298991 XMLSchemaSet.Compile cause StackOverflow - Sequence, 10000", Params = new object[] { 10000, "10000s.xsd" })] public void bug298991Sequence(int size, string xsdFileName) { GenerateSequenceXsdFile(size, xsdFileName); verifyXsd(xsdFileName); return; } [OuterLoop] [Theory] [InlineData(5000, "5000c.xsd")] //[Variation(Desc = "Bug 298991 XMLSchemaSet.Compile cause StackOverflow - Choice, 5000", Params = new object[] { 5000, "5000c.xsd" })] public void bug298991Choice(int size, string xsdFileName) { GenerateChoiceXsdFile(size, xsdFileName); verifyXsd(xsdFileName); return; } } }
// Python Tools for Visual Studio // Copyright(c) Microsoft Corporation // All rights reserved. // // Licensed under the Apache License, Version 2.0 (the License); you may not use // this file except in compliance with the License. You may obtain a copy of the // License at http://www.apache.org/licenses/LICENSE-2.0 // // THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS // OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY // IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE, // MERCHANTABLITY OR NON-INFRINGEMENT. // // See the Apache Version 2.0 License for specific language governing // permissions and limitations under the License. using System; using System.Collections.Generic; using System.ComponentModel.Composition; using System.IO; using System.Linq; using System.Reflection; using Microsoft.PythonTools.Interpreter; using MSBuild = Microsoft.Build.Evaluation; namespace Microsoft.PythonTools.Uwp.Interpreter { [InterpreterFactoryId(InterpreterFactoryProviderId)] [Export(typeof(IPythonInterpreterFactoryProvider))] [PartCreationPolicy(CreationPolicy.Shared)] class PythonUwpInterpreterFactoryProvider : IPythonInterpreterFactoryProvider { private readonly Lazy<IProjectContextProvider>[] _contextProviders; private readonly Lazy<IInterpreterLog>[] _loggers; private bool _initialized; private readonly Dictionary<string, ProjectInfo> _projects = new Dictionary<string, ProjectInfo>(); public const string InterpreterFactoryProviderId = "PythonUwpIoT"; public event EventHandler InterpreterFactoriesChanged; [ImportingConstructor] public PythonUwpInterpreterFactoryProvider( [ImportMany]Lazy<IProjectContextProvider>[] contextProviders, [ImportMany]Lazy<IInterpreterLog>[] loggers) { _contextProviders = contextProviders; _loggers = loggers; } private void OnInterpreterFactoriesChanged() { InterpreterFactoriesChanged?.Invoke(this, EventArgs.Empty); } private void Log(string format, params object[] args) { Log(String.Format(format, args)); } private void Log(string msg) { foreach (var logger in _loggers) { IInterpreterLog loggerValue; try { loggerValue = logger.Value; } catch (CompositionException) { continue; } loggerValue.Log(msg); } } private void EnsureInitialized() { lock (this) { if (!_initialized) { foreach (var provider in _contextProviders) { IProjectContextProvider providerValue; try { providerValue = provider.Value; } catch (CompositionException ce) { Log("Failed to get IProjectContextProvider {0}", ce); continue; } providerValue.ProjectsChanaged += ProviderValue_ProjectsChanaged; providerValue.ProjectChanged += ProviderValue_ProjectChanged; ProviderValue_ProjectsChanaged(providerValue, EventArgs.Empty); } _initialized = true; } } } private void ProviderValue_ProjectChanged(object sender, ProjectChangedEventArgs e) { string filename = e.Project as string; if (filename == null) { var proj = e.Project as MSBuild.Project; if (proj != null) { filename = proj.FullPath; } } RediscoverInterpreters(filename); } public void RediscoverInterpreters(string projectFullPath) { ProjectInfo projInfo; if (projectFullPath != null && _projects.TryGetValue(projectFullPath, out projInfo)) { if (projInfo.DiscoverInterpreters()) { OnInterpreterFactoriesChanged(); } } } private void ProviderValue_ProjectsChanaged(object sender, EventArgs e) { var contextProvider = (IProjectContextProvider)sender; if (contextProvider != null) { var hasInterpreterFactoriesChanged = false; var seen = new HashSet<string>(); var removed = new HashSet<ProjectInfo>(); lock (_projects) { foreach (var context in contextProvider.Projects) { var projectInfo = ProjectInfo.CreateFromProjectContext(context, contextProvider); if (projectInfo != null && projectInfo.FullPath != null) { seen.Add(projectInfo.FullPath); if (!_projects.ContainsKey(projectInfo.FullPath)) { _projects[projectInfo.FullPath] = projectInfo; } else { // reuse the old existing project info projectInfo.Dispose(); projectInfo = _projects[projectInfo.FullPath]; } hasInterpreterFactoriesChanged |= projectInfo.DiscoverInterpreters(); } } // Then remove any existing projects that are no longer there var toRemove = _projects .Where(x => x.Value.ContextProvider == contextProvider && !seen.Contains(x.Key)) .Select(x => x.Key) .ToArray(); foreach (var proj in toRemove) { var projInfo = _projects[proj]; _projects.Remove(proj); if (projInfo.Factory != null) { hasInterpreterFactoriesChanged = true; } projInfo.Dispose(); } } if (hasInterpreterFactoriesChanged) { OnInterpreterFactoriesChanged(); } } } /// <summary> /// Represents an MSBuild project file. The file could have either been read from /// disk or it could be a project file running inside of the IDE which is being /// used for a Python project node. /// </summary> private class MSBuildProjectInfo : ProjectInfo { public readonly MSBuild.Project _project; public MSBuildProjectInfo(MSBuild.Project project, IProjectContextProvider contextProvider) : base(contextProvider) { _project = project; } public override object Context { get { return _project; } } public override string FullPath { get { return _project.FullPath; } } public override string GetPropertyValue(string name) { return _project.GetPropertyValue(name); } } /// <summary> /// Gets information about an "in-memory" project. Supports reading interpreters from /// a project when we're out of proc that haven't yet been committed to disk. /// </summary> private class InMemoryProjectInfo : ProjectInfo { private readonly InMemoryProject _project; public InMemoryProjectInfo(InMemoryProject project, IProjectContextProvider contextProvider) : base(contextProvider) { _project = project; } public override object Context { get { return _project; } } public override string FullPath { get { return _project.FullPath; } } public override string GetPropertyValue(string name) { object res; if (_project.Properties.TryGetValue(name, out res) && res is string) { return (string)res; } return String.Empty; } } /// <summary> /// Tracks data about a project. Specific subclasses deal with how the underlying project /// is being stored. /// </summary> private abstract class ProjectInfo : IDisposable { public readonly IProjectContextProvider ContextProvider; private IPythonInterpreterFactory _factory; private static bool _skipMSBuild; public IPythonInterpreterFactory Factory { get { return _factory; } private set { if (_factory != value) { var disp = _factory as IDisposable; if (disp != null) { disp.Dispose(); } _factory = value; } } } private static ProjectInfo CreateFromMSBuildProject(object context, IProjectContextProvider contextProvider) { var projContext = context as MSBuild.Project; if (projContext == null) { var projectFile = context as string; if (projectFile != null && projectFile.EndsWith(".pyproj", StringComparison.OrdinalIgnoreCase)) { projContext = new MSBuild.Project(projectFile); } } if (projContext != null) { return new MSBuildProjectInfo(projContext, contextProvider); } return null; } static public ProjectInfo CreateFromProjectContext(object context, IProjectContextProvider contextProvider) { if (!_skipMSBuild) { try { var msBuild = CreateFromMSBuildProject(context, contextProvider); if (msBuild != null) { return msBuild; } } catch (FileNotFoundException) { _skipMSBuild = true; } } var inMemory = context as InMemoryProject; if (inMemory != null) { return new InMemoryProjectInfo(inMemory, contextProvider); } return null; } private bool SetNotFoundInterpreterFactory(string interpreterId, Version ver) { var factory = Factory as NotFoundInterpreterFactory; if (factory != null && string.Compare(factory.Configuration.Id, interpreterId) == 0 && factory.Configuration.Version == ver) { // No updates. return false; } else { Factory = new NotFoundInterpreterFactory(interpreterId, ver, InterpreterFactoryProviderId); return true; } } private bool SetPythonUwpInterpreterFactory(InterpreterConfiguration config) { var factory = Factory as PythonUwpInterpreterFactory; if (factory != null && factory.Configuration.Equals(config)) { // No updates. return false; } else { Factory = new PythonUwpInterpreterFactory(config); return true; } } /// <summary> /// Call to find interpreters in the associated project. /// </summary> public bool DiscoverInterpreters() { lock (this) { // <InterpreterId>PythonUWP|3.5|$(MSBuildProjectFullPath)</InterpreterId> var projectHome = Path.GetDirectoryName(FullPath); var interpreterId = GetPropertyValue("InterpreterId"); if (string.IsNullOrEmpty(interpreterId)) { return false; } var id = interpreterId.Split(new[] { '|' }, 3); if (id.Length != 3) { return false; } // Compare the tag name and the project full path if (string.Compare(id[0], InterpreterFactoryProviderId) != 0) { return false; } // Get the Python version Version ver; if (!Version.TryParse(id[1], out ver)) { return false; } // Msbuild will sometimes return a wrong "InterpreterId". It will return the path from temp directory during project creation. interpreterId = string.Join("|", InterpreterFactoryProviderId, ver.ToString(), FullPath); if (InstalledPythonUwpInterpreter.GetDirectory(ver) == null) { // We don't have that version of SDK installed. Return "Not found interpreter factory". return SetNotFoundInterpreterFactory(interpreterId, ver); } var interpreterPath = Path.GetFullPath(Path.Combine(projectHome, PythonUwpConstants.InterpreterRelativePath)); var prefixPath = new DirectoryInfo(interpreterPath); if (!prefixPath.Exists) { // Per-project interpreter doesn't. Return "Not found interpreter factory". return SetNotFoundInterpreterFactory(interpreterId, ver); } var targetsFile = prefixPath.GetFiles(PythonUwpConstants.InterpreterFile).FirstOrDefault(); var libPath = prefixPath.GetDirectories(PythonUwpConstants.InterpreterLibPath).FirstOrDefault(); if (targetsFile == null || libPath == null || !targetsFile.Exists || !libPath.Exists) { return SetNotFoundInterpreterFactory(interpreterId, ver); } var projectName = Path.GetFileNameWithoutExtension(FullPath); var descriptionSuffix = string.Format("({0})", projectName); return SetPythonUwpInterpreterFactory(new InterpreterConfiguration( interpreterId, string.Format("{0} ({1})", InterpreterFactoryProviderId, descriptionSuffix), prefixPath.FullName, targetsFile.FullName, "", null, InterpreterArchitecture.Unknown, ver, InterpreterUIMode.CannotBeDefault | InterpreterUIMode.SupportsDatabase )); } } protected ProjectInfo(IProjectContextProvider context) { ContextProvider = context; } public abstract object Context { get; } public abstract string FullPath { get; } public abstract string GetPropertyValue(string name); public void Dispose() { IDisposable disp = Factory as IDisposable; if (disp != null) { disp.Dispose(); } } } public IEnumerable<IPythonInterpreterFactory> GetInterpreterFactories() { EnsureInitialized(); lock (_projects) { return _projects.Where(x => x.Value.Factory != null).Select(x => x.Value.Factory).ToList(); } } public IEnumerable<InterpreterConfiguration> GetInterpreterConfigurations() { EnsureInitialized(); return GetInterpreterFactories().Select(x => x.Configuration); } public IPythonInterpreterFactory GetInterpreterFactory(string id) { EnsureInitialized(); return GetInterpreterFactories() .Where(x => x.Configuration.Id == id) .FirstOrDefault(); } public object GetProperty(string id, string propName) { if (propName != "ProjectMoniker") { return null; } var moniker = id.Substring(id.LastIndexOf('|') + 1); if (string.IsNullOrEmpty(moniker) || moniker.IndexOfAny(Path.GetInvalidPathChars()) >= 0) { return null; } return moniker; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.Text.RegularExpressions; using System.Threading; namespace Apache.Geode.Client.FwkLib { using Apache.Geode.DUnitFramework; using Apache.Geode.Client.Tests; using Apache.Geode.Client; using NEWAPI = Apache.Geode.Client.Tests; [Serializable] struct HostInfo { public bool Started; public string HostType; public string HostName; public string ExtraServerArgs; public HostInfo(bool started, string hostType, string hostName, string extraServerArgs) { Started = started; HostType = hostType; HostName = hostName; ExtraServerArgs = extraServerArgs; } } public class Utils<TKey, TVal> : FwkTest<TKey, TVal> { private const char PathSep = '/'; private const int MaxWaitMillis = 1800000; private const string SetupJSName = "setupJavaServers"; private const string StartJSName = "startJavaServers"; private const string StopJSName = "stopJavaServers"; private const string KillJSName = "killJavaServers"; private const string SleepTime = "sleepTime"; private const string MinServers = "minServers"; private const string JavaServerCountKey = "ServerCount"; private const string JavaServerMapKey = "ServerMap"; private const string JavaServerName = "cacheserver.bat"; private const string JavaServerOtherArgs = " statistic-sampling-enabled=true" + " statistic-archive-file=statArchive.gfs mcast-port=0"; private const string JavaServerJavaArgs = " -J-Xmx1280m -J-Xms512m -J-DCacheClientProxy.MESSAGE_QUEUE_SIZE=100000"; private const string JavaServerJavaArgsUnix = " -J-Xmx2048m -J-Xms1024m -J-XX:+HeapDumpOnOutOfMemoryError " + "-J-DCacheClientProxy.MESSAGE_QUEUE_SIZE=100000"; private static string TempDir = Util.GetEnvironmentVariable("TMP"); // Constants for status private const int GFNoError = 0; private const int GFError = 1; private const int GFTimeout = 2; private static Dictionary<string, HostInfo> m_javaServerHostMap = new Dictionary<string, HostInfo>(); private static int m_numServers = 1; private static int locCount = 0; private static volatile bool m_exiting = false; private static bool m_firstRun = true; private static string m_locatorHost = null; private static string m_locatorType = null; #region Private methods private string[] ParseJavaServerArgs(string argStr, ref int numServers, out int argIndx, out string extraServerArgs) { argIndx = 0; extraServerArgs = string.Empty; if (argStr != null && argStr.Length > 0) { string[] args = argStr.Split(' '); while (args.Length > argIndx && args[argIndx][0] == '-') { FwkAssert(args.Length > (argIndx + 1), "JavaServer() value not provided after option '{0}'.", args[argIndx]); string argName = args[argIndx]; string argValue = args[argIndx + 1]; switch (argName) { case "-t": // Ignore the tagname; we now use the hostGroup name as the tag break; case "-N": break; case "-M": break; case "-X": break; case "-c": numServers = int.Parse(argValue); break; case "-e": // @TODO: this is C++ specific environment variables -- ignored for now break; case "-p": extraServerArgs += (" " + argValue.Replace("\\", ""). Replace("\"", "").Replace("'", "")); break; default: FwkException("JavaServer() Unknown option '{0}'", argName); break; } argIndx += 2; } extraServerArgs += " "; return args; } return new string[0]; } private string GetHostGroup() { string hostGroup; try { hostGroup = Util.BBGet(Util.ClientId, FwkReadData.HostGroupKey) as string; } catch { hostGroup = null; } return (hostGroup == null ? string.Empty : hostGroup); } private static HostInfo GetHostInfo(string serverId) { HostInfo hostInfo = new HostInfo(); lock (((ICollection)m_javaServerHostMap).SyncRoot) { if (m_javaServerHostMap.Count == 0) { try { m_javaServerHostMap = Util.BBGet(JavaServerBB, JavaServerMapKey) as Dictionary<string, HostInfo>; } catch { } } if (m_javaServerHostMap.ContainsKey(serverId)) { hostInfo = m_javaServerHostMap[serverId]; } } return hostInfo; } private static string GetJavaStartDir(string serverId, string hostType) { return Util.GetLogBaseDir(hostType) + PathSep + "GFECS_" + serverId; } private static string GetLocatorStartDir(string locatorId, string hostType) { return Util.GetLogBaseDir(hostType) + PathSep + "GFELOC_" + locatorId; } private string GetJavaLocator() { try { return (string)Util.BBGet(string.Empty, "LOCATOR_ADDRESS"); } catch { return null; } } private string GetJavaLocatorForPoolAttri() { try { return (string)Util.BBGet(string.Empty, "LOCATOR_ADDRESS_POOL"); } catch { return null; } } private bool GetSetJavaLocator(string locatorHost, out int locatorPort) { locatorPort = 0; string locatorAddress = GetJavaLocator(); string locatorAddressForPool = GetJavaLocatorForPoolAttri(); if (locatorAddress == null || locatorAddress.Length == 0) { locatorPort = Util.Rand(31124, 54343); locatorAddress = locatorHost + '[' + locatorPort + ']'; locatorAddressForPool = locatorHost + ':' + locatorPort; Util.BBSet(string.Empty, "LOCATOR_ADDRESS", locatorAddress); Util.BBSet(string.Empty, "LOCATOR_ADDRESS_POOL", locatorAddressForPool); locCount++; return true; } if (locatorAddress != null && locatorAddress.Length > 0 && locCount > 0) { locatorPort = Util.Rand(31124, 54343); locatorAddress += ',' + locatorHost + '[' + locatorPort + ']'; locatorAddressForPool += ',' + locatorHost + ':' + locatorPort; Util.BBSet(string.Empty, "LOCATOR_ADDRESS", locatorAddress); Util.BBSet(string.Empty, "LOCATOR_ADDRESS_POOL", locatorAddressForPool); locCount++; return true; } return false; } private static int StartLocalGFExe(string exeName, string gfeDir, string exeArgs, out string outStr) { int status = GFNoError; Process javaProc; string javaExePath = gfeDir + PathSep + "bin" + PathSep + exeName; Util.ServerLog("Executing local Geode command {0} {1}", javaExePath, exeArgs); if (!Util.StartProcess(javaExePath, exeArgs, false, TempDir, true, false, false, out javaProc)) { outStr = null; return GFError; } StreamReader outSr = javaProc.StandardOutput; // Wait for process to start bool started = javaProc.WaitForExit(MaxWaitMillis); outStr = outSr.ReadLine(); outSr.Close(); if (!started) { try { javaProc.Kill(); } catch { } status = GFTimeout; } else if (javaProc.ExitCode != 0) { status = GFError; } return status; } private static string StartRemoteGFExe(string host, string hostType, string exeName, string exeArgs) { string gfJavaDir = Util.GetEnvironmentVariable("GFE_DIR", hostType); string gfClassPath = Util.GetEnvironmentVariable("CLASSPATH", hostType); string gfJava = Util.GetEnvironmentVariable("GF_JAVA", hostType); string javaCmd = gfJavaDir + "/bin/" + exeName + ' ' + exeArgs; Dictionary<string, string> envVars = new Dictionary<string, string>(); if (gfClassPath != null && gfClassPath.Length > 0) { envVars["CLASSPATH"] = gfClassPath; } if (gfJava != null && gfJava.Length > 0) { envVars["GF_JAVA"] = gfJava; } return Util.RunClientShellTask(Util.ClientId, host, javaCmd, envVars); } private string GetSlaveId(string serverNum) { return "slave." + serverNum; } private string CreateSlaveTaskSpecification(string progName, string serverNum, string extraArgs) { extraArgs = (extraArgs == null ? string.Empty : ' ' + extraArgs); return string.Format("<task name=\"Deleg{1}\" action=\"doRunProcess\" " + "container=\"utils\" waitTime=\"25m\">{0}<data name=\"program\">" + "{1}</data>{0}" + "<data name=\"arguments\">{2}{3}</data>{0}" + "<client-set name=\"{4}\">{0}<client name=\"{5}\"/>{0}" + "</client-set>{0}</task>", Environment.NewLine, progName, serverNum, extraArgs, Util.ClientId, GetSlaveId(serverNum)); } private string GetSslProperty(string hostType, bool forServer,string startDir) { string sslCmdStr = null; //if (!File.Exists(startDir + PathSep + "geode.properties")) //{ TextWriter sw = new StreamWriter(startDir + PathSep + "geode.properties", false); String locatorAddress = GetJavaLocator(); sw.WriteLine("locators={0}", locatorAddress); ResetKey("sslEnable"); bool isSslEnable = GetBoolValue("sslEnable"); if (!isSslEnable) { sw.Close(); return string.Empty; } FwkInfo("ssl is enable"); if (!forServer) { //StreamWriter sw = new StreamWriter("geode.properties",false); sw.WriteLine("ssl-enabled=true"); sw.WriteLine("ssl-require-authentication=true"); sw.WriteLine("mcast-port=0"); sw.WriteLine("ssl-ciphers=SSL_RSA_WITH_NULL_SHA"); } sw.Close(); string keyStorePath = Util.GetFwkLogDir(hostType) + "/data/keystore/sslcommon.jks"; string trustStorePath = Util.GetFwkLogDir(hostType) + "/data/keystore/server_truststore.jks"; sslCmdStr = " -Djavax.net.ssl.keyStore=" + keyStorePath + " -Djavax.net.ssl.keyStorePassword=gemstone -Djavax.net.ssl.trustStore=" + trustStorePath + " -Djavax.net.ssl.trustStorePassword=gemstone"; //string securityParams = GetStringValue(SecurityParams); if (forServer) { sslCmdStr = " -J-Djavax.net.ssl.keyStore=" + keyStorePath + " -J-Djavax.net.ssl.keyStorePassword=gemstone -J-Djavax.net.ssl.trustStore=" + trustStorePath + " -J-Djavax.net.ssl.trustStorePassword=gemstone " + " ssl-enabled=true ssl-require-authentication=true ssl-ciphers=SSL_RSA_WITH_NULL_SHA"; } //} return sslCmdStr; } private string GetServerSecurityArgs(string hostType) { string securityParams = GetStringValue(SecurityParams); CredentialGenerator gen; // no security means security params is not applicable if (securityParams == null || securityParams.Length == 0 || (gen = GetCredentialGenerator()) == null) { FwkInfo("Security is DISABLED."); return string.Empty; } string logDir = Util.GetFwkLogDir(hostType); if (logDir == null || logDir.Length == 0) { logDir = Util.GetLogBaseDir(hostType); logDir = logDir.Substring(0, logDir.LastIndexOf("/")); logDir = logDir.Substring(0, logDir.LastIndexOf("/")); } string dataDir = logDir + "/data"; gen.Init(dataDir, dataDir); Properties<string,string> extraProps = new Properties<string,string>(); if (gen.SystemProperties != null) { extraProps.AddAll(gen.SystemProperties); } // For now only XML based authorization is supported AuthzCredentialGenerator authzGen = new XmlAuthzCredentialGenerator(); authzGen.Init(gen); if (authzGen.SystemProperties != null) { extraProps.AddAll(authzGen.SystemProperties); } return Utility.GetServerArgs(gen.Authenticator, authzGen.AccessControl, null, extraProps, gen.JavaProperties); } private void SetupJavaServers(string argStr) { int argIndx; string endpoints = string.Empty; m_numServers = 1; string gfeDir = Util.GetEnvironmentVariable("GFE_DIR"); FwkAssert(gfeDir != null && gfeDir.Length != 0, "SetupJavaServers() GFE_DIR is not set."); string hostGroup = GetHostGroup(); Match mt; if (argStr == "CPP") { // special string to denote that endpoints have to be obtained // from C++ framework string fwkBBPath = Util.AssemblyDir + "/../../bin/FwkBB"; string fwkBBArgs = "getInt GFE_BB EP_COUNT"; Process fwkBBProc; if (!Util.StartProcess(fwkBBPath, fwkBBArgs, false, null, true, false, false, out fwkBBProc)) { FwkException("SetupJavaServers() Cannot start C++ FwkBB [{0}].", fwkBBPath); } int numEndpoints = int.Parse(fwkBBProc.StandardOutput.ReadToEnd()); fwkBBProc.WaitForExit(); fwkBBProc.StandardOutput.Close(); for (int index = 1; index <= numEndpoints; index++) { fwkBBArgs = "get GFE_BB EndPoints_" + index; if (!Util.StartProcess(fwkBBPath, fwkBBArgs, false, null, true, false, false, out fwkBBProc)) { FwkException("SetupJavaServers() Cannot start C++ FwkBB [{0}].", fwkBBPath); } string endpoint = fwkBBProc.StandardOutput.ReadToEnd(); fwkBBProc.WaitForExit(); fwkBBProc.StandardOutput.Close(); if (endpoints.Length > 0) { endpoints += ',' + endpoint; } else { endpoints = endpoint; } } } else if ((mt = Regex.Match(gfeDir, "^[^:]+:[0-9]+(,[^:]+:[0-9]+)*$")) != null && mt.Length > 0) { // The GFE_DIR is for a remote server; contains an end-point list endpoints = gfeDir; } else { string extraServerArgs; string[] args = ParseJavaServerArgs(argStr, ref m_numServers, out argIndx, out extraServerArgs); Util.BBSet(JavaServerBB, FwkTest<TKey,TVal>.JavaServerEPCountKey, m_numServers); FwkAssert(args.Length == argIndx + 1, "SetupJavaServers() cache XML argument not correct"); string cacheXml = args[argIndx]; FwkAssert(cacheXml.Length > 0, "SetupJavaServers() cacheXml argument is empty."); string xmlDir = Util.GetEnvironmentVariable("GFBASE") + "/framework/xml"; if (xmlDir != null && xmlDir.Length > 0) { cacheXml = Util.NormalizePath(xmlDir + PathSep + cacheXml); } int javaServerPort = Util.RandPort(21321, 29789); List<string> targetHosts = Util.BBGet(FwkReadData.HostGroupKey, hostGroup) as List<string>; for (int serverNum = 1; serverNum <= m_numServers; serverNum++) { if (m_exiting) { return; } string serverId = hostGroup + '_' + serverNum; string startDir = GetJavaStartDir(serverId, Util.SystemType); if (!Directory.Exists(startDir)) { Directory.CreateDirectory(startDir); } string targetHost; int numHosts = (targetHosts == null ? 0 : targetHosts.Count); int lruMemSizeMb = 700; if (numHosts == 0) { targetHost = Util.HostName; lock (((IDictionary)m_javaServerHostMap).SyncRoot) { m_javaServerHostMap[serverId] = new HostInfo(); } } else { int hostIndex; if (numHosts > 1) { hostIndex = ((serverNum - 1) % (numHosts - 1)) + 1; } else { hostIndex = 0; } targetHost = targetHosts[hostIndex]; FwkInfo("Checking the type of host {0}.", targetHost); string hostType = Util.RunClientShellTask(Util.ClientId, targetHost, "uname", null); hostType = Util.GetHostType(hostType); FwkInfo("The host {0} is: {1}", targetHost, hostType); if (hostType != "WIN") { lruMemSizeMb = 1400; } lock (((IDictionary)m_javaServerHostMap).SyncRoot) { m_javaServerHostMap[serverId] = new HostInfo(false, hostType, targetHost, extraServerArgs); } } // Create the cache.xml with correct port javaServerPort++; StreamReader cacheXmlReader = new StreamReader(cacheXml); string cacheXmlContent = cacheXmlReader.ReadToEnd(); cacheXmlReader.Close(); cacheXmlContent = cacheXmlContent.Replace("$PORT_NUM", javaServerPort.ToString()).Replace("$LRU_MEM", lruMemSizeMb.ToString()); StreamWriter cacheXmlWriter = new StreamWriter(startDir + PathSep + "cache.xml"); cacheXmlWriter.Write(cacheXmlContent); cacheXmlWriter.Close(); Util.ServerLog("SetupJavaServers() added '{0}' for host '{1}'", serverId, targetHost); FwkInfo("SetupJavaServers() added '{0}' for host '{1}'", serverId, targetHost); if (serverNum == 1) { endpoints = targetHost + ':' + javaServerPort; } else { endpoints += ',' + targetHost + ':' + javaServerPort; } Util.BBSet(JavaServerBB, FwkTest<TKey,TVal>.EndPoints + "_" + serverNum.ToString(), targetHost + ':' + javaServerPort); } int[] locatorPort = new int[2]; int locatorPort1; HostInfo locatorInfo = GetHostInfo(hostGroup + "_1"); string locatorHost = locatorInfo.HostName; string locatorType = locatorInfo.HostType; if (locatorHost == null) { locatorHost = Util.HostName; locatorType = Util.SystemType; } if (locatorType == Util.SystemType) { locatorHost = Util.HostName; } //ResetKey("multiLocator"); //bool isMultiLocator = GetBoolValue("multiLocator"); //if (isMultiLocator) //{ for (int i = 0; i < 2; i++) { GetSetJavaLocator(locatorHost, out locatorPort1); locatorPort[i] = locatorPort1; } for (int i = 0; i < 2; i++) { //if (GetSetJavaLocator(locatorHost, out locatorPort)) //{ FwkInfo("SetupJavaServers() starting locator on host {0}", locatorHost); string locatorDir = GetLocatorStartDir((i + 1).ToString(), Util.SystemType); if (!Directory.Exists(locatorDir)) { Directory.CreateDirectory(locatorDir); } ResetKey("sslEnable"); bool isSslEnable = GetBoolValue("sslEnable"); /* if (isSslEnable) { locatorDir = locatorDir + "/.."; } */ string sslArg = GetSslProperty(locatorType, false, locatorDir); locatorDir = GetLocatorStartDir((i + 1).ToString(), locatorType); FwkInfo("ssl arguments: {0} {1}", sslArg, locatorDir); string locatorArgs = "start-locator -port=" + locatorPort[i] + " -dir=" + locatorDir + sslArg; if (locatorType != Util.SystemType) { FwkInfo(StartRemoteGFExe(locatorHost, locatorType, "geode", locatorArgs)); } else { string outStr; int status = StartLocalGFExe("geode.bat", gfeDir, locatorArgs, out outStr); if (status == GFTimeout) { FwkException("SetupJavaServers() Timed out while starting " + "locator. Please check the logs in {0}", locatorDir); } else if (status != GFNoError) { FwkException("SetupJavaServers() Error while starting " + "locator. Please check the logs in {0}", locatorDir); } FwkInfo(outStr); } if (isSslEnable) { Util.RegisterTestCompleteDelegate(TestCompleteWithSSL); } else { Util.RegisterTestCompleteDelegate(TestCompleteWithoutSSL); } m_locatorHost = locatorHost; m_locatorType = locatorType; FwkInfo("SetupJavaServers() started locator on host {0}.", locatorHost); //} } //} } FwkInfo("SetupJavaServers() endpoints: {0}", endpoints); // Write the endpoints for both the tag and the cumulative one string globalEndPoints; try { globalEndPoints = Util.BBGet(string.Empty, FwkTest<TKey, TVal>.EndPointTag) as string; } catch (Apache.Geode.DUnitFramework.KeyNotFoundException) { globalEndPoints = null; } if (globalEndPoints != null && globalEndPoints.Length > 0 && endpoints != null && endpoints.Length > 0) { globalEndPoints += ',' + endpoints; } else { globalEndPoints = endpoints; } Util.BBSet(JavaServerBB, FwkTest<TKey, TVal>.EndPointTag, globalEndPoints); Util.BBSet(JavaServerBB, FwkTest<TKey, TVal>.EndPointTag + hostGroup, endpoints); lock (((IDictionary)m_javaServerHostMap).SyncRoot) { Util.BBSet(JavaServerBB, JavaServerMapKey, m_javaServerHostMap); } FwkInfo("SetupJavaServers() completed."); } private void StartJavaServers(string argStr) { int numServers = -1; int argIndx; string extraServerArgs; string[] args = ParseJavaServerArgs(argStr, ref numServers, out argIndx, out extraServerArgs); string gfeDir = Util.GetEnvironmentVariable("GFE_DIR"); string endpoints = string.Empty; string locatorAddress = GetJavaLocator(); FwkAssert(gfeDir != null && gfeDir.Length != 0, "StartJavaServers() GFE_DIR is not set."); FwkAssert(locatorAddress != null && locatorAddress.Length > 0, "StartJavaServers() LOCATOR_ADDRESS is not set."); string hostGroup = GetHostGroup(); Match mt = Regex.Match(gfeDir, "^[^:]+:[0-9]+(,[^:]+:[0-9]+)*$"); if (mt == null || mt.Length == 0) { int startServer = 1; int endServer; if (args.Length == (argIndx + 1)) { startServer = int.Parse(args[argIndx]); endServer = (numServers == -1 ? startServer : (startServer + numServers - 1)); } else { endServer = (numServers == -1 ? m_numServers : numServers); } for (int serverNum = startServer; serverNum <= endServer; serverNum++) { if (m_exiting) { break; } string serverId = hostGroup + '_' + serverNum; HostInfo hostInfo = GetHostInfo(serverId); string targetHost = hostInfo.HostName; string hostType = hostInfo.HostType; string startDir = GetJavaStartDir(serverId, hostType); extraServerArgs = hostInfo.ExtraServerArgs + ' ' + extraServerArgs; string sslArg = GetSslProperty(hostType, true, startDir); FwkInfo("ssl arguments for server: {0} ",sslArg); string javaServerOtherArgs = GetServerSecurityArgs(hostType) + ' ' + sslArg; if (javaServerOtherArgs != null && javaServerOtherArgs.Length > 0) { FwkInfo("StartJavaServers() Using security server args: {0}", javaServerOtherArgs); } javaServerOtherArgs = JavaServerOtherArgs + ' ' + javaServerOtherArgs; if (targetHost == null || targetHost.Length == 0 || Util.IsHostMyself(targetHost)) { string cacheXml = startDir + PathSep + "cache.xml"; string javaServerArgs = "start" + JavaServerJavaArgs + " -dir=" + startDir + " cache-xml-file=" + cacheXml + " locators=" + locatorAddress + extraServerArgs + javaServerOtherArgs ; // Assume the GFE_DIR is for starting a local server FwkInfo("StartJavaServers() starting {0} with GFE_DIR {1} " + "and arguments: {2}", JavaServerName, gfeDir, javaServerArgs); string outStr; int status = StartLocalGFExe(JavaServerName, gfeDir, javaServerArgs, out outStr); if (status == GFTimeout) { FwkException("StartJavaServers() Timed out waiting for Java " + "cacheserver to start. Please check the server log in {0}.", startDir); } else if (status != GFNoError) { FwkSevere("StartJavaServers() Error in starting Java " + "cacheserver. Please check the server log in {0}.", startDir); Thread.Sleep(60000); } FwkInfo("StartJavaServers() output from start script: {0}", outStr); } else if (hostType != Util.SystemType) { FwkInfo("StartJavaServers() starting '{0}' on remote host " + "'{1}' of type {2}", serverId, targetHost, hostType); string javaCSArgs = "start" + JavaServerJavaArgsUnix + " -dir=" + startDir + " cache-xml-file=cache.xml locators=" + locatorAddress + extraServerArgs + javaServerOtherArgs; FwkInfo(StartRemoteGFExe(targetHost, hostType, "cacheserver", javaCSArgs)); } else { string taskSpec = CreateSlaveTaskSpecification( "startJavaServers", serverNum.ToString(), null); FwkInfo("StartJavaServers() starting '{0}' on host '{1}'", serverId, targetHost); Util.BBSet(Util.ClientId + '.' + GetSlaveId( serverNum.ToString()), FwkReadData.HostGroupKey, hostGroup); if (!Util.RunClientWinTask(Util.ClientId, targetHost, taskSpec)) { FwkException("StartJavaServers() failed to start '{0}' on host '{1}'", serverId, targetHost); } } lock (((IDictionary)m_javaServerHostMap).SyncRoot) { hostInfo.Started = true; m_javaServerHostMap[serverId] = hostInfo; } Util.ServerLog("StartJavaServers() started '{0}' on host '{1}'", serverId, targetHost); } } ResetKey("sslEnable"); bool isSslEnabled = GetBoolValue("sslEnable"); if (isSslEnabled) { Util.RegisterTestCompleteDelegate(TestCompleteWithSSL); } else { Util.RegisterTestCompleteDelegate(TestCompleteWithoutSSL); } FwkInfo("StartJavaServers() completed."); } private void StopJavaServers(string argStr) { string gfeDir = Util.GetEnvironmentVariable("GFE_DIR"); FwkAssert(gfeDir != null && gfeDir.Length != 0, "StopJavaServers() GFE_DIR is not set."); Match mt = Regex.Match(gfeDir, "^[^:]+:[0-9]+(,[^:]+:[0-9]+)*$"); if (mt == null || mt.Length == 0) { int numServers = -1; int argIndx; string extraServerArgs; string[] args = ParseJavaServerArgs(argStr, ref numServers, out argIndx, out extraServerArgs); string hostGroup = GetHostGroup(); string javaServerPath = gfeDir + PathSep + "bin" + PathSep + JavaServerName; // Assume the GFE_DIR is for stopping a local server int startServer = 1; int endServer; if (args.Length == (argIndx + 1)) { startServer = int.Parse(args[argIndx]); endServer = (numServers == -1 ? startServer : (startServer + numServers - 1)); } else { endServer = (numServers == -1 ? m_numServers : numServers); } for (int serverNum = startServer; serverNum <= endServer; serverNum++) { if (m_exiting) { break; } string serverId = hostGroup + '_' + serverNum; HostInfo hostInfo = GetHostInfo(serverId); string targetHost = hostInfo.HostName; string hostType = hostInfo.HostType; string startDir = GetJavaStartDir(serverId, hostType); string javaServerArgs = "stop -dir=" + startDir; if (targetHost == null || targetHost.Length == 0 || Util.IsHostMyself(targetHost)) { FwkInfo("StopJavaServers() stopping {0} with GFE_DIR {1} " + "and arguments: {2}", JavaServerName, gfeDir, javaServerArgs); string outStr; int status = StartLocalGFExe(JavaServerName, gfeDir, javaServerArgs, out outStr); if (status != GFNoError) { if (status == GFTimeout) { FwkSevere("StopJavaServers() Timed out waiting for Java " + "cacheserver to stop. Please check the server log in {0}.", startDir); } else { Thread.Sleep(20000); } KillLocalJavaServer(serverId, "-9"); } } else if (hostType != Util.SystemType) { FwkInfo("StopJavaServers() stopping '{0}' on remote host " + "'{1}' of type {2}", serverId, targetHost, hostType); FwkInfo(StartRemoteGFExe(targetHost, hostType, "cacheserver", javaServerArgs)); } else { string taskSpec = CreateSlaveTaskSpecification( "stopJavaServers", serverNum.ToString(), null); FwkInfo("StopJavaServers() stopping '{0}' on host '{1}'", serverId, targetHost); if (!Util.RunClientWinTask(Util.ClientId, targetHost, taskSpec)) { FwkSevere("StopJavaServers() failed to stop '{0}' on host '{1}'", serverId, targetHost); } FwkInfo("StopJavaServers() stopped '{0}' on host '{1}'", serverId, targetHost); } lock (((ICollection)m_javaServerHostMap).SyncRoot) { hostInfo.Started = false; m_javaServerHostMap[serverId] = hostInfo; } Util.ServerLog("StopJavaServers() stopped '{0}' on host '{1}'", serverId, targetHost); } } FwkInfo("StopJavaServers() completed."); } private void KillJavaServers(string argStr) { string gfeDir = Util.GetEnvironmentVariable("GFE_DIR"); FwkAssert(gfeDir != null && gfeDir.Length != 0, "KillJavaServers() GFE_DIR is not set."); Match mt = Regex.Match(gfeDir, "^[^:]+:[0-9]+(,[^:]+:[0-9]+)*$"); if (mt == null || mt.Length == 0) { int numServers = -1; int argIndx; string extraServerArgs; string[] args = ParseJavaServerArgs(argStr, ref numServers, out argIndx, out extraServerArgs); string hostGroup = GetHostGroup(); string javaServerPath = gfeDir + PathSep + "bin" + PathSep + JavaServerName; // Assume the GFE_DIR is for stopping a local server int startServer = 1; int endServer; string signal = "15"; if (args.Length >= (argIndx + 1)) { startServer = int.Parse(args[argIndx++]); endServer = (numServers == -1 ? startServer : (startServer + numServers - 1)); if (args.Length >= (argIndx + 1)) { signal = args[argIndx]; } } else { endServer = (numServers == -1 ? m_numServers : numServers); } for (int serverNum = startServer; serverNum <= endServer; serverNum++) { if (m_exiting) { break; } string serverId = hostGroup + '_' + serverNum; HostInfo hostInfo = GetHostInfo(serverId); string targetHost = hostInfo.HostName; string hostType = hostInfo.HostType; if (targetHost == null || targetHost.Length == 0 || Util.IsHostMyself(targetHost)) { FwkInfo("KillJavaServers() killing '{0}' on localhost", serverId); KillLocalJavaServer(serverId, '-' + signal); } else if (hostType != Util.SystemType) { FwkInfo("KillJavaServers() killing '{0}' on remote host " + "'{1}' of type {2}", serverId, targetHost, hostType); string startDir = GetJavaStartDir(serverId, hostType); string killCmd = '"' + Util.GetFwkLogDir(hostType) + "/data/killJavaServer.sh\" " + signal + " \"" + startDir + '"'; FwkInfo(Util.RunClientShellTask(Util.ClientId, targetHost, killCmd, null)); } else { string taskSpec = CreateSlaveTaskSpecification( "killJavaServers", serverNum.ToString(), signal); FwkInfo("KillJavaServers() killing '{0}' on host '{1}'", serverId, targetHost); if (!Util.RunClientWinTask(Util.ClientId, targetHost, taskSpec)) { FwkSevere("KillJavaServers() failed to kill '{0}' on host '{1}'", serverId, targetHost); } } lock (((ICollection)m_javaServerHostMap).SyncRoot) { hostInfo.Started = false; m_javaServerHostMap[serverId] = hostInfo; } Util.ServerLog("KillJavaServers() killed '{0}' on host '{1}'", serverId, targetHost); } } FwkInfo("KillJavaServers() completed."); } private static string GetGFJavaPID(string javaLog) { string javaPID = null; bool islocator = javaLog.Contains("locator.log"); using (FileStream fs = new FileStream(javaLog, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)) { StreamReader sr = new StreamReader(fs); Regex pidRE = new Regex(@"Process ID: [\s]*(?<PID>[^\s]*)", RegexOptions.Compiled | RegexOptions.IgnoreCase | RegexOptions.Multiline); while (!sr.EndOfStream) { Match mt = pidRE.Match(sr.ReadLine()); if (mt != null && mt.Length > 0) { javaPID = mt.Groups["PID"].Value; if(!islocator) break; } } sr.Close(); fs.Close(); } return javaPID; } private static string GetJavaServerPID(string serverId) { string startDir = GetJavaStartDir(serverId, Util.SystemType); string javaLog = startDir + "/cacheserver.log"; return GetGFJavaPID(javaLog); } private static bool KillLocalGFJava(string javaPID, string signal) { try { Process.GetProcessById(int.Parse(javaPID)).Kill(); return true; } catch (Exception excp) { LogException("KillLocalGFJava: {0}: {1}", excp.GetType().Name, excp.Message); return false; } } private static bool KillLocalGFJava_disabled(string javaPID, string signal) { bool success = false; if (javaPID != null) { Process javaProc; if (!Util.StartProcess("/bin/kill", "-f " + signal + ' ' + javaPID, false, TempDir, false, false, false, out javaProc)) { LogException("KillLocalGFJava(): unable to run 'kill'"); } // Wait for java process to stop bool stopped = javaProc.WaitForExit(MaxWaitMillis); if (!stopped) { try { javaProc.Kill(); } catch { } LogSevere("KillLocalGFJava() Could not execute " + "kill successfully."); } int numTries = 30; while (numTries-- > 0) { if (!Util.StartProcess("/bin/kill", "-f -0 " + javaPID, false, TempDir, false, false, false, out javaProc)) { LogException("KillLocalGFJava(): unable to run 'kill'"); } stopped = javaProc.WaitForExit(MaxWaitMillis); if (stopped && javaProc.ExitCode == 0) { success = true; break; } Thread.Sleep(10000); } } return success; } private static void KillLocalJavaServer(string serverId, string signal) { string javaPID = GetJavaServerPID(serverId); LogInfo("KillLocalJavaServer() killing '{0}' with PID '{1}' using " + "signal '{2}'", serverId, javaPID, signal); if (KillLocalGFJava(javaPID, signal)) { string startDir = GetJavaStartDir(serverId, Util.SystemType); File.Delete(startDir + "/.cacheserver.ser"); } else { LogException("KillLocalJavaServer() Timed out waiting for " + "Java cacheserver to stop."); } } #endregion #region Public methods loaded by XMLs /// <summary> /// Will run a process using Cygwin bash /// </summary> public void DoRunProcess() { string progName = GetStringValue("program"); //bool driverUsingpsexec = false; bool hostIsWindows = false; try { //driverUsingpsexec = (bool)Util.BBGet(string.Empty, "UsePsexec"); hostIsWindows = (bool)Util.BBGet(string.Empty, Util.HostName + ".IsWindows"); } catch { } if (progName == null) { FwkException("DoRunProcess(): program not specified for task {0}.", TaskName); } string args = GetStringValue("arguments"); if (progName == "cp") // for smpke perf xml { if (hostIsWindows )//&& driverUsingpsexec) progName = "copy"; string[] argStr = args.Split(' '); int i = argStr[0].IndexOf("/framework/xml/"); string gfBaseDir = Util.GetEnvironmentVariable("GFBASE"); string sharePath = Util.NormalizePath(gfBaseDir); string specName = argStr[0].Substring(i); string perfStatictic = sharePath + specName; args = perfStatictic + ' ' + (string)Util.BBGet(string.Empty,"XMLLOGDIR") + '/' + argStr[1]; } string fullProg = progName + ' ' + args; fullProg = fullProg.Trim(); // Special treatment for java server scripts since they are C++ specific // (e.g. the environment variables they require, the FwkBBClient program, // the auto-ssh ...) string[] progs = fullProg.Split(';'); for (int index = 0; index < progs.Length; index++) { if (m_exiting) { break; } string prog = progs[index].Trim(); int javaIndx; if ((javaIndx = prog.IndexOf(SetupJSName)) >= 0) { args = prog.Substring(javaIndx + SetupJSName.Length).Trim(); SetupJavaServers(args); } else if ((javaIndx = prog.IndexOf(StartJSName)) >= 0) { args = prog.Substring(javaIndx + StartJSName.Length).Trim(); StartJavaServers(args); } else if ((javaIndx = prog.IndexOf(StopJSName)) >= 0) { args = prog.Substring(javaIndx + StopJSName.Length).Trim(); StopJavaServers(args); } else if ((javaIndx = prog.IndexOf(KillJSName)) >= 0) { args = prog.Substring(javaIndx + KillJSName.Length).Trim(); KillJavaServers(args); } else { FwkInfo("DoRunProcess() starting '{0}' using bash", prog); Process runProc = new Process(); ProcessStartInfo startInfo; if (hostIsWindows)// && driverUsingpsexec) { prog = prog.Replace('/', '\\'); startInfo = new ProcessStartInfo("cmd", string.Format("/C \"{0}\"", prog)); } else startInfo = new ProcessStartInfo("bash", string.Format("-c \"{0}\"", prog)); startInfo.CreateNoWindow = true; startInfo.UseShellExecute = false; startInfo.RedirectStandardOutput = true; startInfo.RedirectStandardError = true; runProc.StartInfo = startInfo; if (!runProc.Start()) { FwkException("DoRunProcess() unable to run '{0}' using bash", prog); } StreamReader outSr = runProc.StandardOutput; StreamReader errSr = runProc.StandardError; string outStr = outSr.ReadToEnd(); string errStr = errSr.ReadToEnd(); runProc.WaitForExit(); errSr.Close(); outSr.Close(); if (outStr != null && outStr.Length > 0) { FwkInfo("DoRunProcess() Output from executing '{0}':" + "{1}{2}{3}{4}", prog, Environment.NewLine, outStr, Environment.NewLine, Util.MarkerString); } if (errStr != null && errStr.Length > 0) { FwkSevere("DoRunProcess() Error output from executing '{0}':" + "{1}{2}{3}{4}", prog, Environment.NewLine, errStr, Environment.NewLine, Util.MarkerString); } FwkInfo("DoRunProcess() completed '{0}'.", prog); } } } public void DoSleep() { int secs = GetUIntValue(SleepTime); if (secs < 1) { secs = 30; } FwkInfo("DoSleep() called for task: '{0}', sleeping for {1} seconds.", TaskName, secs); Thread.Sleep(secs * 1000); } public void DoRunProcessAndSleep() { DoRunProcess(); if (!m_exiting) { int secs = GetUIntValue(SleepTime); if (secs > 0) { Thread.Sleep(secs * 1000); } } } public void DoStopStartServer() { if (m_firstRun) { m_firstRun = false; Util.BBIncrement(JavaServerBB, JavaServerCountKey); } string op = GetStringValue("operation"); if (op == null || op.Length == 0) { FwkException("DoStopStartServer(): operation not specified."); } string serverId = GetStringValue("ServerId"); if (serverId == null || serverId.Length == 0) { FwkException("DoStopStartServer(): server id not specified."); } FwkInfo("DoStopStartServer(): stopping and starting server {0}.", serverId); UnitFnMethod<string> stopDeleg = null; string stopArg = null; if (op == "stop") { stopDeleg = StopJavaServers; stopArg = serverId; } else if (op == "term") { stopDeleg = KillJavaServers; stopArg = serverId; } else if (op == "kill") { stopDeleg = KillJavaServers; stopArg = serverId + " 9"; } else { FwkException("DoStopStartServer(): unknown operation {0}.", op); } int secs = GetUIntValue(SleepTime); int minServers = GetUIntValue(MinServers); minServers = (minServers <= 0) ? 1 : minServers; FwkInfo("DoStopStartServer(): using minServers: {0}", minServers); bool done = false; while (!done) { int numServers = Util.BBDecrement(JavaServerBB, JavaServerCountKey); if (numServers >= minServers) { stopDeleg(stopArg); Thread.Sleep(60000); StartJavaServers(serverId); Thread.Sleep(60000); Util.BBIncrement(JavaServerBB, JavaServerCountKey); done = true; } else { Util.BBIncrement(JavaServerBB, JavaServerCountKey); Thread.Sleep(1000); } } if (secs > 0) { Thread.Sleep(secs * 1000); } } #endregion public static void TestCompleteWithSSL() { TestComplete(true); } public static void TestCompleteWithoutSSL() { TestComplete(false); } public static void TestComplete(bool ssl) { m_exiting = true; lock (((ICollection)m_javaServerHostMap).SyncRoot) { if (m_javaServerHostMap.Count == 0) { try { m_javaServerHostMap = Util.BBGet(JavaServerBB, JavaServerMapKey) as Dictionary<string, HostInfo>; } catch { } } if (m_javaServerHostMap.Count > 0) { // Stop all the remaining java servers here. string gfeDir = Util.GetEnvironmentVariable("GFE_DIR"); LogAssert(gfeDir != null, "ClientExit() GFE_DIR is not set."); LogAssert(gfeDir.Length != 0, "ClientExit() GFE_DIR is not set."); Match mt = Regex.Match(gfeDir, "^[^:]+:[0-9]+(,[^:]+:[0-9]+)*$"); if (mt == null || mt.Length == 0) { foreach (KeyValuePair<string, HostInfo> serverHostPair in m_javaServerHostMap) { string serverId = serverHostPair.Key; string targetHost = serverHostPair.Value.HostName; string hostType = serverHostPair.Value.HostType; string startDir = GetJavaStartDir(serverId, hostType); string javaServerArgs = "stop -dir=" + startDir; if (serverHostPair.Value.Started) { if (targetHost == null || targetHost.Length == 0 || Util.IsHostMyself(targetHost)) { LogInfo("ClientExit() stopping {0} with GFE_DIR {1} and " + "arguments: {2}", JavaServerName, gfeDir, javaServerArgs); string outStr; int status = StartLocalGFExe(JavaServerName, gfeDir, javaServerArgs, out outStr); if (status != GFNoError) { if (status == GFTimeout) { LogSevere("ClientExit() Timed out waiting for Java " + "cacheserver to stop. Please check the server log " + "in {0}.", startDir); } KillLocalJavaServer(serverId, "-9"); } } else if (hostType != Util.SystemType) { // Stop the remote host LogInfo("ClientExit() stopping '{0}' on remote host " + "'{1}' of type {2}", serverId, targetHost, hostType); LogInfo(StartRemoteGFExe(targetHost, hostType, "cacheserver", javaServerArgs)); } } } } m_javaServerHostMap.Clear(); } // Stop the locator here. if (m_locatorHost != null && m_locatorType != null) { LogInfo("ClientExit() stopping locator on host {0}", m_locatorHost); for (int i = 0; i < locCount; i++) { string locatorDir = GetLocatorStartDir((i + 1).ToString(), Util.SystemType); /* if (ssl) { locatorDir = locatorDir + "/.."; } */ string locatorPID = GetGFJavaPID(locatorDir + PathSep + "locator.log"); if (locatorPID != null && locatorPID.Length > 0) { if (m_locatorType != Util.SystemType) { string killCmd = "kill -15 " + locatorPID; LogInfo(Util.RunClientShellTask(Util.ClientId, m_locatorHost, killCmd, null)); Thread.Sleep(3000); killCmd = "kill -9 " + locatorPID; LogInfo(Util.RunClientShellTask(Util.ClientId, m_locatorHost, killCmd, null)); LogInfo("ClientExit() successfully stopped locator PID {0} on host {1}", locatorPID, m_locatorHost); } else { if (!KillLocalGFJava(locatorPID, "-15") && !KillLocalGFJava(locatorPID, "-9")) { LogSevere("ClientExit() Error while stopping " + "locator. Please check the logs in {0}", locatorDir); } else { LogInfo("ClientExit() successfully stopped locator on host {0}", m_locatorHost); } } } } } } locCount = 0; m_locatorHost = null; m_locatorType = null; Util.BBRemove(string.Empty, "LOCATOR_ADDRESS"); Util.BBRemove(string.Empty, "LOCATOR_ADDRESS_POOL"); m_exiting = false; } } }
using System; using System.Runtime.InteropServices; namespace Godot { /// <summary> /// A color represented by red, green, blue, and alpha (RGBA) components. /// The alpha component is often used for transparency. /// Values are in floating-point and usually range from 0 to 1. /// Some properties (such as CanvasItem.modulate) may accept values /// greater than 1 (overbright or HDR colors). /// /// If you want to supply values in a range of 0 to 255, you should use /// <see cref="Color8"/> and the `r8`/`g8`/`b8`/`a8` properties. /// </summary> [Serializable] [StructLayout(LayoutKind.Sequential)] public struct Color : IEquatable<Color> { /// <summary> /// The color's red component, typically on the range of 0 to 1. /// </summary> public float r; /// <summary> /// The color's green component, typically on the range of 0 to 1. /// </summary> public float g; /// <summary> /// The color's blue component, typically on the range of 0 to 1. /// </summary> public float b; /// <summary> /// The color's alpha (transparency) component, typically on the range of 0 to 1. /// </summary> public float a; /// <summary> /// Wrapper for <see cref="r"/> that uses the range 0 to 255 instead of 0 to 1. /// </summary> /// <value>Getting is equivalent to multiplying by 255 and rounding. Setting is equivalent to dividing by 255.</value> public int r8 { get { return (int)Math.Round(r * 255.0f); } set { r = value / 255.0f; } } /// <summary> /// Wrapper for <see cref="g"/> that uses the range 0 to 255 instead of 0 to 1. /// </summary> /// <value>Getting is equivalent to multiplying by 255 and rounding. Setting is equivalent to dividing by 255.</value> public int g8 { get { return (int)Math.Round(g * 255.0f); } set { g = value / 255.0f; } } /// <summary> /// Wrapper for <see cref="b"/> that uses the range 0 to 255 instead of 0 to 1. /// </summary> /// <value>Getting is equivalent to multiplying by 255 and rounding. Setting is equivalent to dividing by 255.</value> public int b8 { get { return (int)Math.Round(b * 255.0f); } set { b = value / 255.0f; } } /// <summary> /// Wrapper for <see cref="a"/> that uses the range 0 to 255 instead of 0 to 1. /// </summary> /// <value>Getting is equivalent to multiplying by 255 and rounding. Setting is equivalent to dividing by 255.</value> public int a8 { get { return (int)Math.Round(a * 255.0f); } set { a = value / 255.0f; } } /// <summary> /// The HSV hue of this color, on the range 0 to 1. /// </summary> /// <value>Getting is a long process, refer to the source code for details. Setting uses <see cref="FromHSV"/>.</value> public float h { get { float max = Math.Max(r, Math.Max(g, b)); float min = Math.Min(r, Math.Min(g, b)); float delta = max - min; if (delta == 0) { return 0; } float h; if (r == max) { h = (g - b) / delta; // Between yellow & magenta } else if (g == max) { h = 2 + (b - r) / delta; // Between cyan & yellow } else { h = 4 + (r - g) / delta; // Between magenta & cyan } h /= 6.0f; if (h < 0) { h += 1.0f; } return h; } set { this = FromHSV(value, s, v, a); } } /// <summary> /// The HSV saturation of this color, on the range 0 to 1. /// </summary> /// <value>Getting is equivalent to the ratio between the min and max RGB value. Setting uses <see cref="FromHSV"/>.</value> public float s { get { float max = Math.Max(r, Math.Max(g, b)); float min = Math.Min(r, Math.Min(g, b)); float delta = max - min; return max == 0 ? 0 : delta / max; } set { this = FromHSV(h, value, v, a); } } /// <summary> /// The HSV value (brightness) of this color, on the range 0 to 1. /// </summary> /// <value>Getting is equivalent to using `Max()` on the RGB components. Setting uses <see cref="FromHSV"/>.</value> public float v { get { return Math.Max(r, Math.Max(g, b)); } set { this = FromHSV(h, s, value, a); } } /// <summary> /// Access color components using their index. /// </summary> /// <value>`[0]` is equivalent to `.r`, `[1]` is equivalent to `.g`, `[2]` is equivalent to `.b`, `[3]` is equivalent to `.a`.</value> public float this[int index] { get { switch (index) { case 0: return r; case 1: return g; case 2: return b; case 3: return a; default: throw new IndexOutOfRangeException(); } } set { switch (index) { case 0: r = value; return; case 1: g = value; return; case 2: b = value; return; case 3: a = value; return; default: throw new IndexOutOfRangeException(); } } } /// <summary> /// Returns a new color resulting from blending this color over another. /// If the color is opaque, the result is also opaque. /// The second color may have a range of alpha values. /// </summary> /// <param name="over">The color to blend over.</param> /// <returns>This color blended over `over`.</returns> public Color Blend(Color over) { Color res; float sa = 1.0f - over.a; res.a = a * sa + over.a; if (res.a == 0) { return new Color(0, 0, 0, 0); } res.r = (r * a * sa + over.r * over.a) / res.a; res.g = (g * a * sa + over.g * over.a) / res.a; res.b = (b * a * sa + over.b * over.a) / res.a; return res; } /// <summary> /// Returns a new color with all components clamped between the /// components of `min` and `max` using /// <see cref="Mathf.Clamp(float, float, float)"/>. /// </summary> /// <param name="min">The color with minimum allowed values.</param> /// <param name="max">The color with maximum allowed values.</param> /// <returns>The color with all components clamped.</returns> public Color Clamp(Color? min = null, Color? max = null) { Color minimum = min ?? new Color(0, 0, 0, 0); Color maximum = max ?? new Color(1, 1, 1, 1); return new Color ( (float)Mathf.Clamp(r, minimum.r, maximum.r), (float)Mathf.Clamp(g, minimum.g, maximum.g), (float)Mathf.Clamp(b, minimum.b, maximum.b), (float)Mathf.Clamp(a, minimum.a, maximum.a) ); } /// <summary> /// Returns a new color resulting from making this color darker /// by the specified ratio (on the range of 0 to 1). /// </summary> /// <param name="amount">The ratio to darken by.</param> /// <returns>The darkened color.</returns> public Color Darkened(float amount) { Color res = this; res.r = res.r * (1.0f - amount); res.g = res.g * (1.0f - amount); res.b = res.b * (1.0f - amount); return res; } /// <summary> /// Returns the inverted color: `(1 - r, 1 - g, 1 - b, a)`. /// </summary> /// <returns>The inverted color.</returns> public Color Inverted() { return new Color( 1.0f - r, 1.0f - g, 1.0f - b, a ); } /// <summary> /// Returns a new color resulting from making this color lighter /// by the specified ratio (on the range of 0 to 1). /// </summary> /// <param name="amount">The ratio to lighten by.</param> /// <returns>The darkened color.</returns> public Color Lightened(float amount) { Color res = this; res.r = res.r + (1.0f - res.r) * amount; res.g = res.g + (1.0f - res.g) * amount; res.b = res.b + (1.0f - res.b) * amount; return res; } /// <summary> /// Returns the result of the linear interpolation between /// this color and `to` by amount `weight`. /// </summary> /// <param name="to">The destination color for interpolation.</param> /// <param name="weight">A value on the range of 0.0 to 1.0, representing the amount of interpolation.</param> /// <returns>The resulting color of the interpolation.</returns> public Color Lerp(Color to, float weight) { return new Color ( Mathf.Lerp(r, to.r, weight), Mathf.Lerp(g, to.g, weight), Mathf.Lerp(b, to.b, weight), Mathf.Lerp(a, to.a, weight) ); } /// <summary> /// Returns the result of the linear interpolation between /// this color and `to` by color amount `weight`. /// </summary> /// <param name="to">The destination color for interpolation.</param> /// <param name="weight">A color with components on the range of 0.0 to 1.0, representing the amount of interpolation.</param> /// <returns>The resulting color of the interpolation.</returns> public Color Lerp(Color to, Color weight) { return new Color ( Mathf.Lerp(r, to.r, weight.r), Mathf.Lerp(g, to.g, weight.g), Mathf.Lerp(b, to.b, weight.b), Mathf.Lerp(a, to.a, weight.a) ); } /// <summary> /// Returns the color converted to an unsigned 32-bit integer in ABGR /// format (each byte represents a color channel). /// ABGR is the reversed version of the default format. /// </summary> /// <returns>A uint representing this color in ABGR32 format.</returns> public uint ToAbgr32() { uint c = (byte)Math.Round(a * 255); c <<= 8; c |= (byte)Math.Round(b * 255); c <<= 8; c |= (byte)Math.Round(g * 255); c <<= 8; c |= (byte)Math.Round(r * 255); return c; } /// <summary> /// Returns the color converted to an unsigned 64-bit integer in ABGR /// format (each word represents a color channel). /// ABGR is the reversed version of the default format. /// </summary> /// <returns>A ulong representing this color in ABGR64 format.</returns> public ulong ToAbgr64() { ulong c = (ushort)Math.Round(a * 65535); c <<= 16; c |= (ushort)Math.Round(b * 65535); c <<= 16; c |= (ushort)Math.Round(g * 65535); c <<= 16; c |= (ushort)Math.Round(r * 65535); return c; } /// <summary> /// Returns the color converted to an unsigned 32-bit integer in ARGB /// format (each byte represents a color channel). /// ARGB is more compatible with DirectX, but not used much in Godot. /// </summary> /// <returns>A uint representing this color in ARGB32 format.</returns> public uint ToArgb32() { uint c = (byte)Math.Round(a * 255); c <<= 8; c |= (byte)Math.Round(r * 255); c <<= 8; c |= (byte)Math.Round(g * 255); c <<= 8; c |= (byte)Math.Round(b * 255); return c; } /// <summary> /// Returns the color converted to an unsigned 64-bit integer in ARGB /// format (each word represents a color channel). /// ARGB is more compatible with DirectX, but not used much in Godot. /// </summary> /// <returns>A ulong representing this color in ARGB64 format.</returns> public ulong ToArgb64() { ulong c = (ushort)Math.Round(a * 65535); c <<= 16; c |= (ushort)Math.Round(r * 65535); c <<= 16; c |= (ushort)Math.Round(g * 65535); c <<= 16; c |= (ushort)Math.Round(b * 65535); return c; } /// <summary> /// Returns the color converted to an unsigned 32-bit integer in RGBA /// format (each byte represents a color channel). /// RGBA is Godot's default and recommended format. /// </summary> /// <returns>A uint representing this color in RGBA32 format.</returns> public uint ToRgba32() { uint c = (byte)Math.Round(r * 255); c <<= 8; c |= (byte)Math.Round(g * 255); c <<= 8; c |= (byte)Math.Round(b * 255); c <<= 8; c |= (byte)Math.Round(a * 255); return c; } /// <summary> /// Returns the color converted to an unsigned 64-bit integer in RGBA /// format (each word represents a color channel). /// RGBA is Godot's default and recommended format. /// </summary> /// <returns>A ulong representing this color in RGBA64 format.</returns> public ulong ToRgba64() { ulong c = (ushort)Math.Round(r * 65535); c <<= 16; c |= (ushort)Math.Round(g * 65535); c <<= 16; c |= (ushort)Math.Round(b * 65535); c <<= 16; c |= (ushort)Math.Round(a * 65535); return c; } /// <summary> /// Returns the color's HTML hexadecimal color string in RGBA format. /// </summary> /// <param name="includeAlpha">Whether or not to include alpha. If false, the color is RGB instead of RGBA.</param> /// <returns>A string for the HTML hexadecimal representation of this color.</returns> public string ToHTML(bool includeAlpha = true) { var txt = string.Empty; txt += ToHex32(r); txt += ToHex32(g); txt += ToHex32(b); if (includeAlpha) { txt += ToHex32(a); } return txt; } /// <summary> /// Constructs a color from RGBA values, typically on the range of 0 to 1. /// </summary> /// <param name="r">The color's red component, typically on the range of 0 to 1.</param> /// <param name="g">The color's green component, typically on the range of 0 to 1.</param> /// <param name="b">The color's blue component, typically on the range of 0 to 1.</param> /// <param name="a">The color's alpha (transparency) value, typically on the range of 0 to 1. Default: 1.</param> public Color(float r, float g, float b, float a = 1.0f) { this.r = r; this.g = g; this.b = b; this.a = a; } /// <summary> /// Constructs a color from an existing color and an alpha value. /// </summary> /// <param name="c">The color to construct from. Only its RGB values are used.</param> /// <param name="a">The color's alpha (transparency) value, typically on the range of 0 to 1. Default: 1.</param> public Color(Color c, float a = 1.0f) { r = c.r; g = c.g; b = c.b; this.a = a; } /// <summary> /// Constructs a color from an unsigned 32-bit integer in RGBA format /// (each byte represents a color channel). /// </summary> /// <param name="rgba">The uint representing the color.</param> public Color(uint rgba) { a = (rgba & 0xFF) / 255.0f; rgba >>= 8; b = (rgba & 0xFF) / 255.0f; rgba >>= 8; g = (rgba & 0xFF) / 255.0f; rgba >>= 8; r = (rgba & 0xFF) / 255.0f; } /// <summary> /// Constructs a color from an unsigned 64-bit integer in RGBA format /// (each word represents a color channel). /// </summary> /// <param name="rgba">The ulong representing the color.</param> public Color(ulong rgba) { a = (rgba & 0xFFFF) / 65535.0f; rgba >>= 16; b = (rgba & 0xFFFF) / 65535.0f; rgba >>= 16; g = (rgba & 0xFFFF) / 65535.0f; rgba >>= 16; r = (rgba & 0xFFFF) / 65535.0f; } /// <summary> /// Constructs a color either from an HTML color code or from a /// standardized color name. Supported /// color names are the same as the <see cref="Colors"/> constants. /// </summary> /// <param name="code">The HTML color code or color name to construct from.</param> public Color(string code) { if (HtmlIsValid(code)) { this = FromHTML(code); } else { this = Named(code); } } /// <summary> /// Constructs a color either from an HTML color code or from a /// standardized color name, with `alpha` on the range of 0 to 1. Supported /// color names are the same as the <see cref="Colors"/> constants. /// </summary> /// <param name="code">The HTML color code or color name to construct from.</param> /// <param name="alpha">The alpha (transparency) value, typically on the range of 0 to 1.</param> public Color(string code, float alpha) { this = new Color(code); a = alpha; } /// <summary> /// Constructs a color from the HTML hexadecimal color string in RGBA format. /// </summary> /// <param name="rgba">A string for the HTML hexadecimal representation of this color.</param> private static Color FromHTML(string rgba) { Color c; if (rgba.Length == 0) { c.r = 0f; c.g = 0f; c.b = 0f; c.a = 1.0f; return c; } if (rgba[0] == '#') { rgba = rgba.Substring(1); } // If enabled, use 1 hex digit per channel instead of 2. // Other sizes aren't in the HTML/CSS spec but we could add them if desired. bool isShorthand = rgba.Length < 5; bool alpha; if (rgba.Length == 8) { alpha = true; } else if (rgba.Length == 6) { alpha = false; } else if (rgba.Length == 4) { alpha = true; } else if (rgba.Length == 3) { alpha = false; } else { throw new ArgumentOutOfRangeException("Invalid color code. Length is " + rgba.Length + " but a length of 6 or 8 is expected: " + rgba); } c.a = 1.0f; if (isShorthand) { c.r = ParseCol4(rgba, 0) / 15f; c.g = ParseCol4(rgba, 1) / 15f; c.b = ParseCol4(rgba, 2) / 15f; if (alpha) { c.a = ParseCol4(rgba, 3) / 15f; } } else { c.r = ParseCol8(rgba, 0) / 255f; c.g = ParseCol8(rgba, 2) / 255f; c.b = ParseCol8(rgba, 4) / 255f; if (alpha) { c.a = ParseCol8(rgba, 6) / 255f; } } if (c.r < 0) { throw new ArgumentOutOfRangeException("Invalid color code. Red part is not valid hexadecimal: " + rgba); } if (c.g < 0) { throw new ArgumentOutOfRangeException("Invalid color code. Green part is not valid hexadecimal: " + rgba); } if (c.b < 0) { throw new ArgumentOutOfRangeException("Invalid color code. Blue part is not valid hexadecimal: " + rgba); } if (c.a < 0) { throw new ArgumentOutOfRangeException("Invalid color code. Alpha part is not valid hexadecimal: " + rgba); } return c; } /// <summary> /// Returns a color constructed from integer red, green, blue, and alpha channels. /// Each channel should have 8 bits of information ranging from 0 to 255. /// </summary> /// <param name="r8">The red component represented on the range of 0 to 255.</param> /// <param name="g8">The green component represented on the range of 0 to 255.</param> /// <param name="b8">The blue component represented on the range of 0 to 255.</param> /// <param name="a8">The alpha (transparency) component represented on the range of 0 to 255.</param> /// <returns>The constructed color.</returns> public static Color Color8(byte r8, byte g8, byte b8, byte a8 = 255) { return new Color(r8 / 255f, g8 / 255f, b8 / 255f, a8 / 255f); } /// <summary> /// Returns a color according to the standardized name, with the /// specified alpha value. Supported color names are the same as /// the constants defined in <see cref="Colors"/>. /// </summary> /// <param name="name">The name of the color.</param> /// <returns>The constructed color.</returns> private static Color Named(string name) { name = name.Replace(" ", String.Empty); name = name.Replace("-", String.Empty); name = name.Replace("_", String.Empty); name = name.Replace("'", String.Empty); name = name.Replace(".", String.Empty); name = name.ToUpper(); if (!Colors.namedColors.ContainsKey(name)) { throw new ArgumentOutOfRangeException($"Invalid Color Name: {name}"); } return Colors.namedColors[name]; } /// <summary> /// Constructs a color from an HSV profile, with values on the /// range of 0 to 1. This is equivalent to using each of /// the `h`/`s`/`v` properties, but much more efficient. /// </summary> /// <param name="hue">The HSV hue, typically on the range of 0 to 1.</param> /// <param name="saturation">The HSV saturation, typically on the range of 0 to 1.</param> /// <param name="value">The HSV value (brightness), typically on the range of 0 to 1.</param> /// <param name="alpha">The alpha (transparency) value, typically on the range of 0 to 1.</param> /// <returns>The constructed color.</returns> public static Color FromHSV(float hue, float saturation, float value, float alpha = 1.0f) { if (saturation == 0) { // Achromatic (grey) return new Color(value, value, value, alpha); } int i; float f, p, q, t; hue *= 6.0f; hue %= 6f; i = (int)hue; f = hue - i; p = value * (1 - saturation); q = value * (1 - saturation * f); t = value * (1 - saturation * (1 - f)); switch (i) { case 0: // Red is the dominant color return new Color(value, t, p, alpha); case 1: // Green is the dominant color return new Color(q, value, p, alpha); case 2: return new Color(p, value, t, alpha); case 3: // Blue is the dominant color return new Color(p, q, value, alpha); case 4: return new Color(t, p, value, alpha); default: // (5) Red is the dominant color return new Color(value, p, q, alpha); } } /// <summary> /// Converts a color to HSV values. This is equivalent to using each of /// the `h`/`s`/`v` properties, but much more efficient. /// </summary> /// <param name="hue">Output parameter for the HSV hue.</param> /// <param name="saturation">Output parameter for the HSV saturation.</param> /// <param name="value">Output parameter for the HSV value.</param> public void ToHSV(out float hue, out float saturation, out float value) { float max = (float)Mathf.Max(r, Mathf.Max(g, b)); float min = (float)Mathf.Min(r, Mathf.Min(g, b)); float delta = max - min; if (delta == 0) { hue = 0; } else { if (r == max) { hue = (g - b) / delta; // Between yellow & magenta } else if (g == max) { hue = 2 + (b - r) / delta; // Between cyan & yellow } else { hue = 4 + (r - g) / delta; // Between magenta & cyan } hue /= 6.0f; if (hue < 0) { hue += 1.0f; } } saturation = max == 0 ? 0 : 1f - 1f * min / max; value = max; } private static int ParseCol4(string str, int ofs) { char character = str[ofs]; if (character >= '0' && character <= '9') { return character - '0'; } else if (character >= 'a' && character <= 'f') { return character + (10 - 'a'); } else if (character >= 'A' && character <= 'F') { return character + (10 - 'A'); } return -1; } private static int ParseCol8(string str, int ofs) { return ParseCol4(str, ofs) * 16 + ParseCol4(str, ofs + 1); } private string ToHex32(float val) { byte b = (byte)Mathf.RoundToInt(Mathf.Clamp(val * 255, 0, 255)); return b.HexEncode(); } internal static bool HtmlIsValid(string color) { if (color.Length == 0) { return false; } if (color[0] == '#') { color = color.Substring(1); } // Check if the amount of hex digits is valid. int len = color.Length; if (!(len == 3 || len == 4 || len == 6 || len == 8)) { return false; } // Check if each hex digit is valid. for (int i = 0; i < len; i++) { if (ParseCol4(color, i) == -1) { return false; } } return true; } public static Color operator +(Color left, Color right) { left.r += right.r; left.g += right.g; left.b += right.b; left.a += right.a; return left; } public static Color operator -(Color left, Color right) { left.r -= right.r; left.g -= right.g; left.b -= right.b; left.a -= right.a; return left; } public static Color operator -(Color color) { return Colors.White - color; } public static Color operator *(Color color, float scale) { color.r *= scale; color.g *= scale; color.b *= scale; color.a *= scale; return color; } public static Color operator *(float scale, Color color) { color.r *= scale; color.g *= scale; color.b *= scale; color.a *= scale; return color; } public static Color operator *(Color left, Color right) { left.r *= right.r; left.g *= right.g; left.b *= right.b; left.a *= right.a; return left; } public static Color operator /(Color color, float scale) { color.r /= scale; color.g /= scale; color.b /= scale; color.a /= scale; return color; } public static Color operator /(Color left, Color right) { left.r /= right.r; left.g /= right.g; left.b /= right.b; left.a /= right.a; return left; } public static bool operator ==(Color left, Color right) { return left.Equals(right); } public static bool operator !=(Color left, Color right) { return !left.Equals(right); } public static bool operator <(Color left, Color right) { if (Mathf.IsEqualApprox(left.r, right.r)) { if (Mathf.IsEqualApprox(left.g, right.g)) { if (Mathf.IsEqualApprox(left.b, right.b)) { return left.a < right.a; } return left.b < right.b; } return left.g < right.g; } return left.r < right.r; } public static bool operator >(Color left, Color right) { if (Mathf.IsEqualApprox(left.r, right.r)) { if (Mathf.IsEqualApprox(left.g, right.g)) { if (Mathf.IsEqualApprox(left.b, right.b)) { return left.a > right.a; } return left.b > right.b; } return left.g > right.g; } return left.r > right.r; } public override bool Equals(object obj) { if (obj is Color) { return Equals((Color)obj); } return false; } public bool Equals(Color other) { return r == other.r && g == other.g && b == other.b && a == other.a; } /// <summary> /// Returns true if this color and `other` are approximately equal, by running /// <see cref="Godot.Mathf.IsEqualApprox(float, float)"/> on each component. /// </summary> /// <param name="other">The other color to compare.</param> /// <returns>Whether or not the colors are approximately equal.</returns> public bool IsEqualApprox(Color other) { return Mathf.IsEqualApprox(r, other.r) && Mathf.IsEqualApprox(g, other.g) && Mathf.IsEqualApprox(b, other.b) && Mathf.IsEqualApprox(a, other.a); } public override int GetHashCode() { return r.GetHashCode() ^ g.GetHashCode() ^ b.GetHashCode() ^ a.GetHashCode(); } public override string ToString() { return $"({r}, {g}, {b}, {a})"; } public string ToString(string format) { return $"({r.ToString(format)}, {g.ToString(format)}, {b.ToString(format)}, {a.ToString(format)})"; } } }
/* * Copyright (c) Contributors, http://aurora-sim.org/, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the Aurora-Sim Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Reflection; using Aurora.DataManager; using Aurora.Simulation.Base; using Nini.Config; using OpenMetaverse; using Aurora.Framework; using OpenSim.Services.Interfaces; namespace OpenSim.Services.AssetService { public class AssetService : ConnectorBase, IAssetService, IService { #region Declares protected IAssetDataPlugin m_database; protected bool doDatabaseCaching = false; #endregion #region IService Members public virtual string Name { get { return GetType().Name; } } public virtual void Initialize(IConfigSource config, IRegistryCore registry) { IConfig handlerConfig = config.Configs["Handlers"]; if (handlerConfig.GetString("AssetHandler", "") != Name) return; Configure(config, registry); Init(registry, Name); } public virtual void Configure(IConfigSource config, IRegistryCore registry) { m_registry = registry; m_database = DataManager.RequestPlugin<IAssetDataPlugin>(); if (m_database == null) throw new Exception("Could not find a storage interface in the given module"); registry.RegisterModuleInterface<IAssetService>(this); IConfig handlers = config.Configs["Handlers"]; if (handlers != null) doDatabaseCaching = handlers.GetBoolean("AssetHandlerUseCache", false); if (MainConsole.Instance != null) { MainConsole.Instance.Commands.AddCommand("show digest", "show digest <ID>", "Show asset digest", HandleShowDigest); MainConsole.Instance.Commands.AddCommand("delete asset", "delete asset <ID>", "Delete asset from database", HandleDeleteAsset); } MainConsole.Instance.Debug("[ASSET SERVICE]: Local asset service enabled"); } public virtual void Start(IConfigSource config, IRegistryCore registry) { } public virtual void FinishedStartup() { } #endregion #region IAssetService Members public IAssetService InnerService { get { return this; } } [CanBeReflected(ThreatLevel = OpenSim.Services.Interfaces.ThreatLevel.Low)] public virtual AssetBase Get(string id) { IImprovedAssetCache cache = m_registry.RequestModuleInterface<IImprovedAssetCache>(); if (doDatabaseCaching && cache != null) { bool found; AssetBase cachedAsset = cache.Get(id, out found); if (found && (cachedAsset == null || cachedAsset.Data.Length != 0)) return cachedAsset; } object remoteValue = DoRemote(id); if (remoteValue != null || m_doRemoteOnly) { if (doDatabaseCaching && cache != null) cache.Cache(id, (AssetBase)remoteValue); return (AssetBase)remoteValue; } AssetBase asset = m_database.GetAsset(UUID.Parse(id)); if (doDatabaseCaching && cache != null) cache.Cache(id, asset); return asset; } [CanBeReflected(ThreatLevel = OpenSim.Services.Interfaces.ThreatLevel.Low)] public virtual AssetBase GetCached(string id) { IImprovedAssetCache cache = m_registry.RequestModuleInterface<IImprovedAssetCache>(); if (doDatabaseCaching && cache != null) return cache.Get(id); return null; } [CanBeReflected(ThreatLevel = OpenSim.Services.Interfaces.ThreatLevel.Low)] public virtual byte[] GetData(string id) { IImprovedAssetCache cache = m_registry.RequestModuleInterface<IImprovedAssetCache>(); if (doDatabaseCaching && cache != null) { bool found; AssetBase cachedAsset = cache.Get(id, out found); if (found && (cachedAsset == null || cachedAsset.Data.Length != 0)) return cachedAsset.Data; } object remoteValue = DoRemote(id); if (remoteValue != null || m_doRemoteOnly) return (byte[])remoteValue; AssetBase asset = m_database.GetAsset(UUID.Parse(id)); if (doDatabaseCaching && cache != null) cache.Cache(id, asset); if (asset != null) return asset.Data; return new byte[0]; } [CanBeReflected(ThreatLevel = OpenSim.Services.Interfaces.ThreatLevel.Low)] public virtual bool GetExists(string id) { object remoteValue = DoRemote(id); if (remoteValue != null || m_doRemoteOnly) return remoteValue == null ? false : (bool)remoteValue; return m_database.ExistsAsset(UUID.Parse(id)); } [CanBeReflected(ThreatLevel = OpenSim.Services.Interfaces.ThreatLevel.Low)] public virtual void Get(String id, Object sender, AssetRetrieved handler) { Util.FireAndForget((o) => { handler(id, sender, Get(id)); }); } [CanBeReflected(ThreatLevel = OpenSim.Services.Interfaces.ThreatLevel.Low)] public virtual UUID Store(AssetBase asset) { object remoteValue = DoRemote(asset); if (remoteValue != null || m_doRemoteOnly) { if (remoteValue == null) return UUID.Zero; asset.ID = (UUID)remoteValue; } else asset.ID = m_database.Store(asset); IImprovedAssetCache cache = m_registry.RequestModuleInterface<IImprovedAssetCache>(); if (doDatabaseCaching && cache != null && asset != null && asset.Data != null && asset.Data.Length != 0) { cache.Expire(asset.ID.ToString()); cache.Cache(asset.ID.ToString(), asset); } return asset != null ? asset.ID : UUID.Zero; } [CanBeReflected(ThreatLevel = OpenSim.Services.Interfaces.ThreatLevel.Low)] public virtual UUID UpdateContent(UUID id, byte[] data) { object remoteValue = DoRemote(id, data); if (remoteValue != null || m_doRemoteOnly) return remoteValue == null ? UUID.Zero : (UUID)remoteValue; UUID newID; m_database.UpdateContent(id, data, out newID); IImprovedAssetCache cache = m_registry.RequestModuleInterface<IImprovedAssetCache>(); if (doDatabaseCaching && cache != null) cache.Expire(id.ToString()); return newID; } [CanBeReflected(ThreatLevel = OpenSim.Services.Interfaces.ThreatLevel.Low)] public virtual bool Delete(UUID id) { object remoteValue = DoRemote(id); if (remoteValue != null || m_doRemoteOnly) return remoteValue == null ? false : (bool)remoteValue; return m_database.Delete(id); } #endregion #region Console Commands private void HandleShowDigest(string[] args) { if (args.Length < 3) { MainConsole.Instance.Info("Syntax: show digest <ID>"); return; } AssetBase asset = Get(args[2]); if (asset == null || asset.Data.Length == 0) { MainConsole.Instance.Info("Asset not found"); return; } int i; MainConsole.Instance.Info(String.Format("Name: {0}", asset.Name)); MainConsole.Instance.Info(String.Format("Description: {0}", asset.Description)); MainConsole.Instance.Info(String.Format("Type: {0}", asset.TypeAsset)); MainConsole.Instance.Info(String.Format("Content-type: {0}", asset.TypeAsset.ToString())); MainConsole.Instance.Info(String.Format("Flags: {0}", asset.Flags)); for (i = 0; i < 5; i++) { int off = i*16; if (asset.Data.Length <= off) break; int len = 16; if (asset.Data.Length < off + len) len = asset.Data.Length - off; byte[] line = new byte[len]; Array.Copy(asset.Data, off, line, 0, len); string text = BitConverter.ToString(line); MainConsole.Instance.Info(String.Format("{0:x4}: {1}", off, text)); } } private void HandleDeleteAsset(string[] args) { if (args.Length < 3) { MainConsole.Instance.Info("Syntax: delete asset <ID>"); return; } AssetBase asset = Get(args[2]); if (asset == null || asset.Data.Length == 0) { MainConsole.Instance.Info("Asset not found"); return; } Delete(UUID.Parse(args[2])); MainConsole.Instance.Info("Asset deleted"); } #endregion } }
/* Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT License. See License.txt in the project root for license information. */ using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Linq; using System.Web.UI; using System.Web.UI.WebControls; using Adxstudio.Xrm.Resources; using Adxstudio.Xrm.Web.UI.WebControls; using Microsoft.Xrm.Client; using Microsoft.Xrm.Portal.Web.UI.CrmEntityFormView; using Microsoft.Xrm.Sdk; namespace Adxstudio.Xrm.Web.UI.CrmEntityFormView { /// <summary> /// Factory pattern class to create a cell template. /// </summary> public class CellTemplateFactory : Microsoft.Xrm.Portal.Web.UI.CrmEntityFormView.CellTemplateFactory, ICellTemplateFactory { /// <summary> /// CellTemplateFactory Initialization. /// </summary> /// <param name="control"></param> /// <param name="fields"></param> /// <param name="metadataFactory"></param> /// <param name="cellBindings"></param> /// <param name="languageCode"></param> /// <param name="validationGroup"></param> /// <param name="enableUnsupportedFields"></param> public void Initialize(Control control, Collection<CrmEntityFormViewField> fields, ICellMetadataFactory metadataFactory, IDictionary<string, CellBinding> cellBindings, int languageCode, string validationGroup, bool enableUnsupportedFields) { Fields = fields; FormView = control as WebControls.CrmEntityFormView; Initialize(control, metadataFactory, cellBindings, languageCode, validationGroup, enableUnsupportedFields); } /// <summary> /// CellTemplateFactory Initialization. /// </summary> /// <param name="control"></param> /// <param name="fields"></param> /// <param name="metadataFactory"></param> /// <param name="cellBindings"></param> /// <param name="languageCode"></param> /// <param name="validationGroup"></param> /// <param name="enableUnsupportedFields"></param> /// <param name="toolTipEnabled"></param> /// <param name="recommendedFieldsRequired"></param> /// <param name="validationText"></param> /// <param name="contextName"></param> /// <param name="renderWebResourcesInline"></param> /// <param name="webFormMetadata"></param> /// <param name="forceAllFieldsRequired"></param> /// <param name="enableValidationSummaryLinks"></param> /// <param name="messages"> </param> public void Initialize(Control control, Collection<CrmEntityFormViewField> fields, ICellMetadataFactory metadataFactory, IDictionary<string, CellBinding> cellBindings, int languageCode, string validationGroup, bool enableUnsupportedFields, bool? toolTipEnabled, bool? recommendedFieldsRequired, string validationText, string contextName, bool? renderWebResourcesInline, IEnumerable<Entity> webFormMetadata, bool? forceAllFieldsRequired, bool? enableValidationSummaryLinks, Dictionary<string, string> messages, bool? showOwnerFields, int baseOrganizationLanguageCode = 0) { Messages = messages; Fields = fields; ToolTipEnabled = toolTipEnabled; ValidationText = validationText; RecommendedFieldsRequired = recommendedFieldsRequired; RenderWebResourcesInline = renderWebResourcesInline; ContextName = contextName; WebFormMetadata = webFormMetadata; ForceAllFieldsRequired = forceAllFieldsRequired; ShowOwnerFields = showOwnerFields; EnableValidationSummaryLinks = enableValidationSummaryLinks; FormView = control as WebControls.CrmEntityFormView; BaseOrganizationLanguageCode = baseOrganizationLanguageCode; Initialize(control, metadataFactory, cellBindings, languageCode, validationGroup, enableUnsupportedFields); } protected Dictionary<string, string> Messages { get; private set; } protected Collection<CrmEntityFormViewField> Fields { get; private set; } protected bool? ToolTipEnabled { get; set; } protected bool? RecommendedFieldsRequired { get; set; } protected bool? RenderWebResourcesInline { get; set; } protected bool? ForceAllFieldsRequired { get; set; } protected bool? ShowOwnerFields { get; set; } protected bool? EnableValidationSummaryLinks { get; set; } protected string ValidationSummaryLinkText { get; set; } protected string ValidationText { get; set; } protected string ContextName { get; set; } protected IEnumerable<Entity> WebFormMetadata { get; private set; } public WebControls.CrmEntityFormView FormView { get; private set; } public int BaseOrganizationLanguageCode { get; set; } /// <summary> /// Method to create the cell template. /// </summary> /// <param name="cellNode"></param> /// <param name="entityMetadata"></param> /// <returns></returns> /// <exception cref="InvalidOperationException"></exception> /// <exception cref="ApplicationException"></exception> public override ICellTemplate CreateTemplate(System.Xml.Linq.XNode cellNode, Microsoft.Xrm.Sdk.Metadata.EntityMetadata entityMetadata) { if (!IsInitialized) throw new InvalidOperationException("Factory is not initialized."); ICellMetadata cellMetadata; if (MetadataFactory is FormXmlCellMetadataFactory) { var formMetadataFactory = MetadataFactory as FormXmlCellMetadataFactory; cellMetadata = formMetadataFactory.GetMetadata(cellNode, entityMetadata, LanguageCode, ToolTipEnabled, RecommendedFieldsRequired, ValidationText, WebFormMetadata, ForceAllFieldsRequired, EnableValidationSummaryLinks, ValidationSummaryLinkText, Messages, BaseOrganizationLanguageCode); } else { cellMetadata = MetadataFactory.GetMetadata(cellNode, entityMetadata, LanguageCode); } return CreateCellTemplate(cellMetadata, entityMetadata); } /// <summary> /// Method to create cell template /// </summary> /// <param name="cellMetadata"></param> /// <param name="entityMetadata"></param> /// <returns></returns> /// <exception cref="ApplicationException"></exception> public virtual ICellTemplate CreateCellTemplate(ICellMetadata cellMetadata, Microsoft.Xrm.Sdk.Metadata.EntityMetadata entityMetadata) { var fields = Fields.Where(f => f.AttributeName == cellMetadata.DataFieldName).ToList(); if (fields.Count() > 1) { throw new ApplicationException("Only one CrmEntityFormViewField with an AttributeName {0} can be specified.".FormatWith(cellMetadata.DataFieldName)); } var field = fields.FirstOrDefault(); var formXmlCellMetadata = cellMetadata as FormXmlCellMetadata; if (formXmlCellMetadata != null) { formXmlCellMetadata.FormView = FormView; if (formXmlCellMetadata.FormView != null && formXmlCellMetadata.FormView.Mode.HasValue) { if (formXmlCellMetadata.FormView.Mode.Value == FormViewMode.Insert && !formXmlCellMetadata.IsValidForCreate) { formXmlCellMetadata.ReadOnly = true; } if (formXmlCellMetadata.FormView.Mode.Value == FormViewMode.Edit && !formXmlCellMetadata.IsValidForUpdate) { formXmlCellMetadata.ReadOnly = true; } } if ((formXmlCellMetadata.IsNotesControl || formXmlCellMetadata.IsActivityTimelineControl) && (FormView.Mode == FormViewMode.Edit || FormView.Mode == FormViewMode.ReadOnly)) { return new NotesControlTemplate(formXmlCellMetadata, ContextName, CellBindings, formXmlCellMetadata.IsActivityTimelineControl); } if (formXmlCellMetadata.IsWebResource) { if (formXmlCellMetadata.WebResourceIsHtml) return new HtmlWebResourceControlTemplate(formXmlCellMetadata, ContextName, RenderWebResourcesInline); if (formXmlCellMetadata.WebResourceIsImage) return new ImageWebResourceControlTemplate(formXmlCellMetadata); } if (formXmlCellMetadata.IsSharePointDocuments) { return new SharePointDocumentsControlTemplate(formXmlCellMetadata, ContextName, CellBindings); } if (formXmlCellMetadata.IsSubgrid && (FormView.Mode == FormViewMode.Edit || FormView.Mode == FormViewMode.ReadOnly)) { return new SubgridControlTemplate(formXmlCellMetadata, ContextName, CellBindings); } if (formXmlCellMetadata.IsQuickForm) { return new CrmQuickFormControlTemplate(formXmlCellMetadata, ContextName, CellBindings); } if (formXmlCellMetadata.HasAttributeType("lookup")) { if (formXmlCellMetadata.LookupTargets.Length >= 1 && formXmlCellMetadata.LookupTargets[0] == "subject") { return new SubjectControlTemplate(field, formXmlCellMetadata, ValidationGroup, CellBindings); } if ((field != null && field.Type == FieldType.Dropdown) || formXmlCellMetadata.ControlStyle == WebForms.WebFormMetadata.ControlStyle.LookupDropdown) { return new LookupControlTemplate(field, formXmlCellMetadata, ValidationGroup, CellBindings); } return new ModalLookupControlTemplate(field, formXmlCellMetadata, ValidationGroup, CellBindings); } if (formXmlCellMetadata.HasAttributeType("customer")) { if ((field != null && field.Type == FieldType.Dropdown) || formXmlCellMetadata.ControlStyle == WebForms.WebFormMetadata.ControlStyle.LookupDropdown) { return new CustomerControlTemplate(field, formXmlCellMetadata, ValidationGroup, CellBindings); } return new ModalLookupControlTemplate(field, formXmlCellMetadata, ValidationGroup, CellBindings); } if (formXmlCellMetadata.IsFullNameControl) return new FullNameControlTemplate(field, formXmlCellMetadata, ValidationGroup, CellBindings, entityMetadata, FormView.Mode); if (formXmlCellMetadata.IsAddressCompositeControl) { return new AddressCompositeControlTemplate(field, formXmlCellMetadata, ValidationGroup, CellBindings, entityMetadata, FormView.Mode); } if (ShowOwnerFields.GetValueOrDefault(false) && formXmlCellMetadata.HasAttributeType("owner")) { if ((field != null && field.Type == FieldType.Dropdown) || formXmlCellMetadata.ControlStyle == WebForms.WebFormMetadata.ControlStyle.LookupDropdown) { return new LookupControlTemplate(field, formXmlCellMetadata, ValidationGroup, CellBindings); } return new ModalLookupControlTemplate(field, formXmlCellMetadata, ValidationGroup, CellBindings); } if (cellMetadata.HasAttributeType("string")) { switch (cellMetadata.Format) { case "Email": return new EmailStringControlTemplate(field, formXmlCellMetadata, ValidationGroup, CellBindings); case "Url": return new UrlStringControlTemplate(field, formXmlCellMetadata, ValidationGroup, CellBindings); case "TickerSymbol": return new TickerSymbolStringControlTemplate(field, formXmlCellMetadata, ValidationGroup, CellBindings); case "TextArea": return new MemoControlTemplate(field, formXmlCellMetadata, ValidationGroup, CellBindings); default: return new StringControlTemplate(field, formXmlCellMetadata, ValidationGroup, CellBindings); } } if (cellMetadata.HasAttributeType("picklist")) { // determine if the picklist should be a multi-select picklist var picklistvaluesfield = entityMetadata.Attributes.FirstOrDefault(a => a.LogicalName == string.Format("{0}selectedvalues", cellMetadata.DataFieldName)); if (picklistvaluesfield != null) { return new MultiSelectPicklistControlTemplate(field, formXmlCellMetadata, ValidationGroup, CellBindings); } return new PicklistControlTemplate(field, formXmlCellMetadata, ValidationGroup, CellBindings); } if (cellMetadata.HasAttributeType("boolean")) { return formXmlCellMetadata.ControlStyle == WebForms.WebFormMetadata.ControlStyle.MultipleChoice ? new MultipleChoiceControlTemplate(field, formXmlCellMetadata, ValidationGroup, CellBindings) : new BooleanControlTemplate(field, formXmlCellMetadata, ValidationGroup, CellBindings); } if (cellMetadata.HasAttributeType("memo")) return new MemoControlTemplate(field, formXmlCellMetadata, ValidationGroup, CellBindings); if (cellMetadata.HasAttributeType("decimal")) return new DecimalControlTemplate(field, formXmlCellMetadata, ValidationGroup, CellBindings); if (cellMetadata.HasAttributeType("datetime")) return new DateTimeControlTemplate(field, formXmlCellMetadata, ValidationGroup, CellBindings); if (cellMetadata.HasAttributeType("double")) return new DoubleControlTemplate(field, formXmlCellMetadata, ValidationGroup, CellBindings); if (cellMetadata.HasAttributeType("integer")) { switch (cellMetadata.Format) { case "Duration": return new DurationControlTemplate(field, formXmlCellMetadata, ValidationGroup, CellBindings); case "Language": return new LanguageControlTemplate(field, formXmlCellMetadata, ValidationGroup, CellBindings); case "TimeZone": return new TimeZoneControlTemplate(field, formXmlCellMetadata, ValidationGroup, CellBindings); default: if (formXmlCellMetadata.ControlStyle == WebForms.WebFormMetadata.ControlStyle.RankOrderAllowTies || formXmlCellMetadata.ControlStyle == WebForms.WebFormMetadata.ControlStyle.RankOrderNoTies) { return new RankOrderControlTemplate(field, formXmlCellMetadata, ValidationGroup, CellBindings); } if (formXmlCellMetadata.ControlStyle == WebForms.WebFormMetadata.ControlStyle.ConstantSum) { return new ConstantSumControlTemplate(field, formXmlCellMetadata, ValidationGroup, CellBindings); } if (formXmlCellMetadata.ControlStyle == WebForms.WebFormMetadata.ControlStyle.StackRank) { return new StackRankControlTemplate(field, formXmlCellMetadata, ValidationGroup, CellBindings); } return new IntegerControlTemplate(field, formXmlCellMetadata, ValidationGroup, CellBindings); } } if (cellMetadata.HasAttributeType("state")) return new StateControlTemplate(field, formXmlCellMetadata, ValidationGroup, CellBindings); if (cellMetadata.HasAttributeType("status")) return new StatusReasonControlTemplate(field, formXmlCellMetadata, ValidationGroup, CellBindings); if (cellMetadata.HasAttributeType("money")) return new MoneyControlTemplate(field, formXmlCellMetadata, ValidationGroup, CellBindings, ContextName); } else { if (cellMetadata.HasAttributeType("string")) return string.Equals("email", cellMetadata.Format, StringComparison.InvariantCultureIgnoreCase) ? new Microsoft.Xrm.Portal.Web.UI.CrmEntityFormView.EmailStringControlTemplate(cellMetadata, ValidationGroup, CellBindings) : new Microsoft.Xrm.Portal.Web.UI.CrmEntityFormView.StringControlTemplate(cellMetadata, ValidationGroup, CellBindings); if (cellMetadata.HasAttributeType("picklist")) return new Microsoft.Xrm.Portal.Web.UI.CrmEntityFormView.PicklistControlTemplate(cellMetadata, ValidationGroup, CellBindings); if (cellMetadata.HasAttributeType("boolean")) return new Microsoft.Xrm.Portal.Web.UI.CrmEntityFormView.BooleanControlTemplate(cellMetadata, ValidationGroup, CellBindings); if (cellMetadata.HasAttributeType("memo")) return new Microsoft.Xrm.Portal.Web.UI.CrmEntityFormView.MemoControlTemplate(cellMetadata, ValidationGroup, CellBindings); if (cellMetadata.HasAttributeType("datetime")) return new Microsoft.Xrm.Portal.Web.UI.CrmEntityFormView.DateTimeControlTemplate(cellMetadata, ValidationGroup, CellBindings); if (cellMetadata.HasAttributeType("integer")) return new Microsoft.Xrm.Portal.Web.UI.CrmEntityFormView.IntegerControlTemplate(cellMetadata, ValidationGroup, CellBindings); if (cellMetadata.HasAttributeType("money")) return new Microsoft.Xrm.Portal.Web.UI.CrmEntityFormView.MoneyControlTemplate(cellMetadata, ValidationGroup, CellBindings); if (cellMetadata.HasAttributeType("datetime")) return new Microsoft.Xrm.Portal.Web.UI.CrmEntityFormView.DateTimeControlTemplate(cellMetadata, ValidationGroup, CellBindings); } if (!string.IsNullOrEmpty(cellMetadata.AttributeType) && EnableUnsupportedFields) return new UnsupportedControlTemplate(cellMetadata, ValidationGroup, CellBindings, EnableUnsupportedFields); return new EmptyCellTemplate(cellMetadata); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. namespace System.Drawing { using System.Diagnostics; using System.Globalization; using System.IO; using DpiHelper = System.Windows.Forms.DpiHelper; /// <include file='doc\ToolboxBitmapAttribute.uex' path='docs/doc[@for="ToolboxBitmapAttribute"]/*' /> /// <devdoc> /// ToolboxBitmapAttribute defines the images associated with /// a specified component. The component can offer a small /// and large image (large is optional). /// /// </devdoc> [AttributeUsage(AttributeTargets.Class)] public class ToolboxBitmapAttribute : Attribute { /// <include file='doc\ToolboxBitmapAttribute.uex' path='docs/doc[@for="ToolboxBitmapAttribute.smallImage"]/*' /> /// <devdoc> /// The small image for this component /// </devdoc> private Image _smallImage; /// <include file='doc\ToolboxBitmapAttribute.uex' path='docs/doc[@for="ToolboxBitmapAttribute.largeImage"]/*' /> /// <devdoc> /// The large image for this component. /// </devdoc> private Image _largeImage; /// <include file='doc\ToolboxBitmapAttribute.uex' path='docs/doc[@for="ToolboxBitmapAttribute.originalBitmap"]/*' /> /// <devdoc> /// The original small image for this component, before scaling per DPI. /// </devdoc> private Bitmap _originalBitmap; /// <include file='doc\ToolboxBitmapAttribute.uex' path='docs/doc[@for="ToolboxBitmapAttribute.imageFile"]/*' /> /// <devdoc> /// The path to the image file for this toolbox item, if any. /// </devdoc> private string _imageFile; /// <include file='doc\ToolboxBitmapAttribute.uex' path='docs/doc[@for="ToolboxBitmapAttribute.imagetype"]/*' /> /// <devdoc> /// The Type used to retrieve the toolbox image for this component, if provided upon initialization of this class. /// </devdoc> private Type _imageType; /// <include file='doc\ToolboxBitmapAttribute.uex' path='docs/doc[@for="ToolboxBitmapAttribute.imageName"]/*' /> /// <devdoc> /// The resource name of the toolbox image for the component, if provided upon initialization of this class. /// </devdoc> private string _imageName; /// <include file='doc\ToolboxBitmapAttribute.uex' path='docs/doc[@for="ToolboxBitmapAttribute.largeSize"]/*' /> /// <devdoc> /// The default size of the large image. /// </devdoc> private static readonly Size s_largeSize = new Size(32, 32); /// <include file='doc\ToolboxBitmapAttribute.uex' path='docs/doc[@for="ToolboxBitmapAttribute.smallSize"]/*' /> /// <devdoc> /// The default size of the large image. /// </devdoc> private static readonly Size s_smallSize = new Size(16, 16); // Used to help cache the last result of BitmapSelector.GetFileName private static string s_lastOriginalFileName; private static string s_lastUpdatedFileName; /// <include file='doc\ToolboxBitmapAttribute.uex' path='docs/doc[@for="ToolboxBitmapAttribute.ToolboxBitmapAttribute"]/*' /> /// <devdoc> /// Constructs a new ToolboxBitmapAttribute. /// </devdoc> public ToolboxBitmapAttribute(string imageFile) : this(GetImageFromFile(imageFile, false), GetImageFromFile(imageFile, true)) { _imageFile = imageFile; } /// <include file='doc\ToolboxBitmapAttribute.uex' path='docs/doc[@for="ToolboxBitmapAttribute.ToolboxBitmapAttribute1"]/*' /> /// <devdoc> /// Constructs a new ToolboxBitmapAttribute. /// </devdoc> public ToolboxBitmapAttribute(Type t) : this(GetImageFromResource(t, null, false), GetImageFromResource(t, null, true)) { _imageType = t; } /// <include file='doc\ToolboxBitmapAttribute.uex' path='docs/doc[@for="ToolboxBitmapAttribute.ToolboxBitmapAttribute2"]/*' /> /// <devdoc> /// Constructs a new ToolboxBitmapAttribute. /// </devdoc> public ToolboxBitmapAttribute(Type t, string name) : this(GetImageFromResource(t, name, false), GetImageFromResource(t, name, true)) { _imageType = t; _imageName = name; } /// <include file='doc\ToolboxBitmapAttribute.uex' path='docs/doc[@for="ToolboxBitmapAttribute.ToolboxBitmapAttribute3"]/*' /> /// <devdoc> /// Constructs a new ToolboxBitmapAttribute. /// </devdoc> private ToolboxBitmapAttribute(Image smallImage, Image largeImage) { _smallImage = smallImage; _largeImage = largeImage; } /// <include file='doc\ToolboxBitmapAttribute.uex' path='docs/doc[@for="ToolboxBitmapAttribute.Equals"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> public override bool Equals(object value) { if (value == this) { return true; } ToolboxBitmapAttribute attr = value as ToolboxBitmapAttribute; if (attr != null) { return attr._smallImage == _smallImage && attr._largeImage == _largeImage; } return false; } /// <include file='doc\ToolboxBitmapAttribute.uex' path='docs/doc[@for="ToolboxBitmapAttribute.GetHashCode"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> public override int GetHashCode() { return base.GetHashCode(); } /// <include file='doc\ToolboxBitmapAttribute.uex' path='docs/doc[@for="ToolboxBitmapAttribute.GetImage"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> public Image GetImage(object component) { return GetImage(component, true); } /// <include file='doc\ToolboxBitmapAttribute.uex' path='docs/doc[@for="ToolboxBitmapAttribute.GetImage1"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> public Image GetImage(object component, bool large) { if (component != null) { return GetImage(component.GetType(), large); } return null; } /// <include file='doc\ToolboxBitmapAttribute.uex' path='docs/doc[@for="ToolboxBitmapAttribute.GetImage2"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> public Image GetImage(Type type) { return GetImage(type, false); } /// <include file='doc\ToolboxBitmapAttribute.uex' path='docs/doc[@for="ToolboxBitmapAttribute.GetImage3"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> public Image GetImage(Type type, bool large) { return GetImage(type, null, large); } /// <include file='doc\ToolboxBitmapAttribute.uex' path='docs/doc[@for="ToolboxBitmapAttribute.GetImage4"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> public Image GetImage(Type type, string imgName, bool large) { if ((large && _largeImage == null) || (!large && _smallImage == null)) { Image img = null; if (large) { img = _largeImage; } else { img = _smallImage; } if (img == null) { img = GetImageFromResource(type, imgName, large); } //last resort for large images. if (large && _largeImage == null && _smallImage != null) { img = new Bitmap((Bitmap)_smallImage, s_largeSize.Width, s_largeSize.Height); } Bitmap b = img as Bitmap; if (b != null) { MakeBackgroundAlphaZero(b); } if (img == null) { img = s_defaultComponent.GetImage(type, large); } if (large) { _largeImage = img; } else { _smallImage = img; } } Image toReturn = (large) ? _largeImage : _smallImage; if (Equals(Default)) { _largeImage = null; _smallImage = null; } return toReturn; } internal Bitmap GetOriginalBitmap() { if (_originalBitmap != null) { return _originalBitmap; } // If the control does not have a toolbox icon associated with it, then exit. if (_smallImage == null) { return null; } // If we are not scaling for DPI, then the small icon had not been modified if (!DpiHelper.IsScalingRequired) { return null; } // Get small unscaled icon (toolbox can handle only 16x16). if (!string.IsNullOrEmpty(_imageFile)) { _originalBitmap = GetImageFromFile(_imageFile, false, false) as Bitmap; } else if (_imageType != null) { _originalBitmap = GetImageFromResource(_imageType, _imageName, false, false) as Bitmap; } return _originalBitmap; } //helper to get the right icon from the given stream that represents an icon private static Image GetIconFromStream(Stream stream, bool large, bool scaled) { if (stream == null) { return null; } Icon ico = new Icon(stream); Icon sizedico = new Icon(ico, large ? s_largeSize : s_smallSize); Bitmap b = sizedico.ToBitmap(); if (DpiHelper.IsScalingRequired && scaled) { DpiHelper.ScaleBitmapLogicalToDevice(ref b); } return b; } // Cache the last result of BitmapSelector.GetFileName because we commonly load images twice // in succession from the same file and we don't need to compute the name twice. private static string GetFileNameFromBitmapSelector(string originalName) { if (originalName != s_lastOriginalFileName) { s_lastOriginalFileName = originalName; s_lastUpdatedFileName = BitmapSelector.GetFileName(originalName); } return s_lastUpdatedFileName; } // Just forwards to Image.FromFile eating any non-critical exceptions that may result. private static Image GetImageFromFile(string imageFile, bool large, bool scaled = true) { Image image = null; try { if (imageFile != null) { imageFile = GetFileNameFromBitmapSelector(imageFile); string ext = Path.GetExtension(imageFile); if (ext != null && string.Equals(ext, ".ico", StringComparison.OrdinalIgnoreCase)) { //ico files support both large and small, so we respect the large flag here. FileStream reader = System.IO.File.Open(imageFile, FileMode.Open); if (reader != null) { try { image = GetIconFromStream(reader, large, scaled); } finally { reader.Close(); } } } else if (!large) { //we only read small from non-ico files. image = Image.FromFile(imageFile); Bitmap b = image as Bitmap; if (DpiHelper.IsScalingRequired && scaled) { DpiHelper.ScaleBitmapLogicalToDevice(ref b); } } } } catch (Exception e) { if (ClientUtils.IsCriticalException(e)) { throw; } Debug.Fail("Failed to load toolbox image '" + imageFile + "':\r\n" + e.ToString()); } return image; } static private Image GetBitmapFromResource(Type t, string bitmapname, bool large, bool scaled) { if (bitmapname == null) { return null; } Image img = null; // load the image from the manifest resources. // Stream stream = BitmapSelector.GetResourceStream(t, bitmapname); if (stream != null) { Bitmap b = new Bitmap(stream); img = b; MakeBackgroundAlphaZero(b); if (large) { img = new Bitmap(b, s_largeSize.Width, s_largeSize.Height); } if (DpiHelper.IsScalingRequired && scaled) { b = (Bitmap)img; DpiHelper.ScaleBitmapLogicalToDevice(ref b); img = b; } } return img; } static private Image GetIconFromResource(Type t, string bitmapname, bool large, bool scaled) { if (bitmapname == null) { return null; } return GetIconFromStream(BitmapSelector.GetResourceStream(t, bitmapname), large, scaled); } /// <include file='doc\ToolboxBitmapAttribute.uex' path='docs/doc[@for="ToolboxBitmapAttribute.GetImageFromResource"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> public static Image GetImageFromResource(Type t, string imageName, bool large) { return GetImageFromResource(t, imageName, large, true /*scaled*/); } /// <include file='doc\ToolboxBitmapAttribute.uex' path='docs/doc[@for="ToolboxBitmapAttribute.GetImageFromResource"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> internal static Image GetImageFromResource(Type t, string imageName, bool large, bool scaled) { Image img = null; try { string name = imageName; string iconname = null; string bmpname = null; string rawbmpname = null; // if we didn't get a name, use the class name // if (name == null) { name = t.FullName; int indexDot = name.LastIndexOf('.'); if (indexDot != -1) { name = name.Substring(indexDot + 1); } iconname = name + ".ico"; bmpname = name + ".bmp"; } else { if (String.Compare(Path.GetExtension(imageName), ".ico", true, CultureInfo.CurrentCulture) == 0) { iconname = name; } else if (String.Compare(Path.GetExtension(imageName), ".bmp", true, CultureInfo.CurrentCulture) == 0) { bmpname = name; } else { //we dont recognize the name as either bmp or ico. we need to try three things. //1. the name as a bitmap (back compat) //2. name+.bmp //3. name+.ico rawbmpname = name; bmpname = name + ".bmp"; iconname = name + ".ico"; } } if (rawbmpname != null) { img = GetBitmapFromResource(t, rawbmpname, large, scaled); } if (img == null && bmpname != null) { img = GetBitmapFromResource(t, bmpname, large, scaled); } if (img == null && iconname != null) { img = GetIconFromResource(t, iconname, large, scaled); } } catch (Exception e) { if (t == null) { Debug.Fail("Failed to load toolbox image for null type:\r\n" + e.ToString()); } else { Debug.Fail("Failed to load toolbox image for '" + t.FullName + "':\r\n" + e.ToString()); } } return img; } private static void MakeBackgroundAlphaZero(Bitmap img) { Color bottomLeft = img.GetPixel(0, img.Height - 1); img.MakeTransparent(); Color newBottomLeft = Color.FromArgb(0, bottomLeft); img.SetPixel(0, img.Height - 1, newBottomLeft); } /// <include file='doc\ToolboxBitmapAttribute.uex' path='docs/doc[@for="ToolboxBitmapAttribute.Default"]/*' /> /// <devdoc> /// Default name is null /// </devdoc> public static readonly ToolboxBitmapAttribute Default = new ToolboxBitmapAttribute((Image)null, (Image)null); private static readonly ToolboxBitmapAttribute s_defaultComponent; static ToolboxBitmapAttribute() { //Fix for Dev10 560430. When we call Gdip.DummyFunction, JIT will make sure Gdip..cctor will be called before SafeNativeMethods.Gdip.DummyFunction(); Bitmap bitmap = null; Stream stream = BitmapSelector.GetResourceStream(typeof(ToolboxBitmapAttribute), "DefaultComponent.bmp"); if (stream != null) { bitmap = new Bitmap(stream); MakeBackgroundAlphaZero(bitmap); } s_defaultComponent = new ToolboxBitmapAttribute(bitmap, null); } } }
using System.Collections.Generic; using UnityEngine; using System.Diagnostics; using System.ComponentModel; using System; using System.Reflection; namespace UnuGames.MVVM { /// <summary> /// ViewModel behavior (part of MVVM pattern). /// </summary> public class ViewModelBehaviour : MonoBehaviour, IObservable { Dictionary<string, Action<object>> actionDict = new Dictionary<string, Action<object>>(); Dictionary<string, PropertyInfo> propertyCache = new Dictionary<string, PropertyInfo>(); List<MemberInfo> notifyableMembers = new List<MemberInfo>(); RectTransform mRecttransform; public RectTransform Recttransform { get { if(mRecttransform == null) mRecttransform = GetComponent<RectTransform>(); return mRecttransform; } } Transform mTransform; public Transform Transform { get { if(mTransform == null) mTransform = GetComponent<Transform>(); return mRecttransform; } } /// <summary> /// Notify the property which has change to all binder that has been subcribed with property name and value. /// </summary> /// <param name="propertyName"></param> /// <param name="value"></param> public virtual void NotifyPropertyChanged (string propertyName, object value) { Action<object> actions = null; if(actionDict.TryGetValue(propertyName, out actions)) { try { if(actions != null) actions(value); } catch(Exception e) { UnuLogger.LogError(e.Message); } } else { //UnuLogger.LogWarning(BindingDefine.NO_BINDER_REGISTERED); } } void SafeInvoke (Action<object> actions, object value) { } /// <summary> /// Raise the change event automatically without name and value, /// only use this function in property getter /// </summary> public void OnPropertyChanged () { string propertyName = GetCaller(); PropertyInfo property = null; if(propertyCache.TryGetValue(propertyName, out property)) { object newValue = property.GetValue(this, null); NotifyPropertyChanged(propertyName, newValue); } } /// <summary> /// Get the caller of current function /// </summary> /// <param name="level"></param> /// <returns></returns> static string GetCaller(int level = 2) { StackFrame sf = new StackFrame(level); return sf.GetMethod().Name; } /// <summary> /// Subcribe action to notify on property changed /// </summary> /// <param name="propertyName"></param> /// <param name="updateAction"></param> public void SubscribeAction (string propertyName, Action<object> updateAction) { string propertyKey = "set_" + propertyName; if(actionDict.ContainsKey(propertyKey)) { actionDict[propertyKey] += updateAction; } else { actionDict.Add(propertyKey, updateAction); propertyCache.Add(propertyKey, this.GetCachedType().GetProperty(propertyName)); } } /// <summary> /// Unsubcribe action from notify on property changed /// </summary> /// <param name="propertyName"></param> /// <param name="updateAction"></param> public void UnSubscribeAction (string propertyName, Action<object> updateAction) { string propertyKey = "set_" + propertyName; if(actionDict.ContainsKey(propertyKey)) { actionDict[propertyKey] -= updateAction; } } /// <summary> /// Sets the value. /// </summary> /// <param name="propertyName">Property name.</param> /// <param name="value">Value.</param> public void SetValue (string propertyName, object value) { PropertyInfo property = null; if(propertyCache.TryGetValue("set_" + propertyName, out property)) { property.SetValue(this, value, null); } } /// <summary> /// Notifies the model change. /// </summary> /// <param name="obj">Object.</param> public void NotifyModelChange (object obj) { if (notifyableMembers == null) return; for (int i = 0; i < notifyableMembers.Count; i++) { object value = null; if (notifyableMembers [i] is FieldInfo) { FieldInfo field = notifyableMembers [i].ToField(); value = field.GetValue (obj); } else { PropertyInfo property = notifyableMembers [i].ToProperty(); value = property.GetValue (obj, null); } NotifyPropertyChanged ("set_" + notifyableMembers[i].Name, value); } } /// <summary> /// Subcripts the object action. /// </summary> /// <param name="obj">Object.</param> /// <param name="onChange">On change.</param> public void SubcriptObjectAction (object obj) { MemberInfo[] members = obj.GetCachedType ().GetMembers (); for (int i = 0; i < members.Length; i++) { if (members [i] is FieldInfo || members [i] is PropertyInfo) { notifyableMembers.Add (members [i]); } } } /// <summary> /// Subcripts the object action. /// </summary> /// <param name="obj">Object.</param> /// <param name="onChange">On change.</param> public void SubcriptObjectAction (PropertyInfo property) { MemberInfo[] members = property.GetType().GetMembers (); for (int i = 0; i < members.Length; i++) { if (members [i] is FieldInfo || members [i] is PropertyInfo) { notifyableMembers.Add (members [i]); } } } /// <summary> /// Determines whether this instance is binding to the specified modelInstance. /// </summary> /// <returns><c>true</c> if this instance is binding to the specified modelInstance; otherwise, <c>false</c>.</returns> /// <param name="modelInstance">Model instance.</param> public PropertyInfo IsBindingTo (object modelInstance) { foreach(KeyValuePair<string, PropertyInfo> property in propertyCache) { if (property.Value != null) { object propertyVal = property.Value.GetValue (this, null); if (propertyVal != null && propertyVal.Equals (modelInstance)) { return property.Value; } } } return null; } } /// <summary> /// Binding type. /// </summary> public enum ContextType { [DescriptionAttribute("Nothing")] NONE = 0, [DescriptionAttribute("MonoBehaviour")] MONO_BEHAVIOR, [DescriptionAttribute("Type Instance")] PROPERTY } }
#region WatiN Copyright (C) 2006-2007 Jeroen van Menen //Copyright 2006-2007 Jeroen van Menen // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #endregion Copyright using System.Collections; using System.Text.RegularExpressions; using mshtml; namespace ItiN { /// <summary> /// This delegate is mainly used by <see cref="BaseElementCollection"/> to /// delegate the creation of a specialized element type. /// </summary> public delegate Element CreateElementInstance(DomContainer domContainer, IHTMLElement element); /// <summary> /// This class is mainly used by Watin internally as the base class for all /// of the element collections. /// </summary> public abstract class BaseElementCollection : IEnumerable { protected DomContainer domContainer; private ArrayList elements; private CreateElementInstance createElementInstance; private ElementFinder finder; /// <summary> /// Initializes a new instance of the <see cref="ButtonCollection"/> class. /// Mainly used by WatiN internally. /// </summary> /// <param name="domContainer">The DOM container.</param> /// <param name="finder">The finder.</param> /// <param name="createElementInstance">The create element instance.</param> public BaseElementCollection(DomContainer domContainer, ElementFinder finder, CreateElementInstance createElementInstance) : this(domContainer, (ArrayList)null, createElementInstance) { this.finder = finder; } /// <summary> /// Initializes a new instance of the <see cref="ButtonCollection"/> class. /// Mainly used by WatiN internally. /// </summary> /// <param name="domContainer">The DOM container.</param> /// <param name="elements">The elements.</param> /// <param name="createElementInstance">The create element instance.</param> public BaseElementCollection(DomContainer domContainer, ArrayList elements, CreateElementInstance createElementInstance) { this.elements = elements; this.domContainer = domContainer; this.createElementInstance = createElementInstance; } /// <summary> /// Gets the length. /// </summary> /// <value>The length.</value> public int Length { get { return Elements.Count; } } protected ArrayList Elements { get { if (elements == null) { if (finder != null) { elements = finder.FindAll(); } else { elements = new ArrayList(); } } return elements; } } public bool Exists(string elementId) { return Exists(new Id(elementId)); } public bool Exists(Regex elementId) { return Exists(new Id(elementId)); } public bool Exists(Attribute findBy) { ElementAttributeBag attributeBag = new ElementAttributeBag(); foreach (IHTMLElement element in Elements) { attributeBag.IHTMLElement = element; if (findBy.Compare(attributeBag)) { return true; } } return false; } protected ArrayList DoFilter(Attribute findBy) { ArrayList returnElements; if (elements == null) { if (finder != null) { returnElements = finder.FindAll(findBy); } else { returnElements = new ArrayList(); } } else { returnElements = new ArrayList(); ElementAttributeBag attributeBag = new ElementAttributeBag(); foreach (IHTMLElement element in Elements) { attributeBag.IHTMLElement = element; if (findBy.Compare(attributeBag)) { returnElements.Add(element); } } } return returnElements; } /// <exclude /> public Enumerator GetEnumerator() { return new Enumerator(domContainer, Elements, createElementInstance); } IEnumerator IEnumerable.GetEnumerator() { return GetEnumerator(); } /// <exclude /> public class Enumerator : IEnumerator { ArrayList children; DomContainer domContainer; CreateElementInstance createElementInstance; int index; /// <exclude /> public Enumerator(DomContainer domContainer, ArrayList children, CreateElementInstance createElementInstance) { this.children = children; this.domContainer = domContainer; this.createElementInstance = createElementInstance; Reset(); } /// <exclude /> public void Reset() { index = -1; } /// <exclude /> public bool MoveNext() { ++index; return index < children.Count; } /// <exclude /> public virtual object Current { get { return createElementInstance(domContainer, (IHTMLElement)children[index]); } } /// <exclude /> object IEnumerator.Current { get { return Current; } } } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Linq; using Microsoft.CodeAnalysis.Editor.Shared.Extensions; using Microsoft.CodeAnalysis.Editor.Shared.Utilities; using Microsoft.CodeAnalysis.SignatureHelp; using Microsoft.VisualStudio.Language.Intellisense; using Microsoft.VisualStudio.Text; using Microsoft.VisualStudio.Text.Editor; using Roslyn.Utilities; namespace Microsoft.CodeAnalysis.Editor.Implementation.IntelliSense.SignatureHelp.Presentation { internal partial class SignatureHelpPresenter { private class SignatureHelpPresenterSession : ForegroundThreadAffinitizedObject, ISignatureHelpPresenterSession { private readonly ISignatureHelpBroker _sigHelpBroker; private readonly ITextView _textView; private readonly ITextBuffer _subjectBuffer; public event EventHandler<EventArgs> Dismissed; public event EventHandler<SignatureHelpItemEventArgs> ItemSelected; private IBidirectionalMap<SignatureHelpItem, Signature> _signatureMap; private IList<SignatureHelpItem> _signatureHelpItems; private SignatureHelpItem _selectedItem; private ISignatureHelpSession _editorSessionOpt; private bool _ignoreSelectionStatusChangedEvent; public bool EditorSessionIsActive => _editorSessionOpt?.IsDismissed == false; public SignatureHelpPresenterSession( ISignatureHelpBroker sigHelpBroker, ITextView textView, ITextBuffer subjectBuffer) { _sigHelpBroker = sigHelpBroker; _textView = textView; _subjectBuffer = subjectBuffer; } public void PresentItems( ITrackingSpan triggerSpan, IList<SignatureHelpItem> signatureHelpItems, SignatureHelpItem selectedItem, int? selectedParameter) { _signatureHelpItems = signatureHelpItems; _selectedItem = selectedItem; // Create all the editor signatures for the sig help items we have. this.CreateSignatures(triggerSpan, selectedParameter); // It's a new list of items. Either create the editor session if this is the // first time, or ask the editor session that we already have to recalculate. if (_editorSessionOpt == null) { // We're tracking the caret. Don't have the editor do it. _editorSessionOpt = _sigHelpBroker.CreateSignatureHelpSession( _textView, triggerSpan.GetStartTrackingPoint(PointTrackingMode.Negative), trackCaret: false); var debugTextView = _textView as IDebuggerTextView; if (debugTextView != null && !debugTextView.IsImmediateWindow) { debugTextView.HACK_StartCompletionSession(_editorSessionOpt); } _editorSessionOpt.Dismissed += (s, e) => OnEditorSessionDismissed(); _editorSessionOpt.SelectedSignatureChanged += OnSelectedSignatureChanged; } // So here's the deal. We cannot create the editor session and give it the right // signatures (even though we know what they are). Instead, the session with // call back into the ISignatureHelpSourceProvider (which is us) to get those // values. It will pass itself along with the calls back into // ISignatureHelpSourceProvider. So, in order to make that connection work, we // add properties to the session so that we can call back into ourselves, get // the signatures and add it to the session. if (!_editorSessionOpt.Properties.ContainsProperty(s_augmentSessionKey)) { _editorSessionOpt.Properties.AddProperty(s_augmentSessionKey, this); } try { // Don't want to get any callbacks while we do this. _ignoreSelectionStatusChangedEvent = true; _editorSessionOpt.Recalculate(); // Now let the editor know what the currently selected item is. Contract.Requires(_signatureMap.ContainsKey(selectedItem)); Contract.ThrowIfNull(_signatureMap); var defaultValue = _signatureMap.GetValueOrDefault(_selectedItem); if (_editorSessionOpt != null) { _editorSessionOpt.SelectedSignature = defaultValue; } } finally { _ignoreSelectionStatusChangedEvent = false; } } private void CreateSignatures( ITrackingSpan triggerSpan, int? selectedParameter) { _signatureMap = BidirectionalMap<SignatureHelpItem, Signature>.Empty; foreach (var item in _signatureHelpItems) { _signatureMap = _signatureMap.Add(item, new Signature(triggerSpan, item, GetParameterIndexForItem(item, selectedParameter))); } } private static int GetParameterIndexForItem(SignatureHelpItem item, int? selectedParameter) { if (selectedParameter.HasValue) { if (selectedParameter.Value < item.Parameters.Length) { // If the selected parameter is within the range of parameters of this item then set // that as the current parameter. return selectedParameter.Value; } else if (item.IsVariadic) { // It wasn't in range, but the item takes an unlimited number of parameters. So // just set current parameter to the last parameter (the variadic one). return item.Parameters.Length - 1; } } // It was out of bounds, there is no current parameter now. return -1; } private void OnEditorSessionDismissed() { AssertIsForeground(); this.Dismissed?.Invoke(this, new EventArgs()); } private void OnSelectedSignatureChanged(object sender, SelectedSignatureChangedEventArgs eventArgs) { AssertIsForeground(); if (_ignoreSelectionStatusChangedEvent) { return; } Contract.ThrowIfFalse(_signatureMap.TryGetKey((Signature)eventArgs.NewSelectedSignature, out var helpItem)); var helpItemSelected = this.ItemSelected; if (helpItemSelected != null && helpItem != null) { helpItemSelected(this, new SignatureHelpItemEventArgs(helpItem)); } } public void Dismiss() { AssertIsForeground(); if (_editorSessionOpt == null) { // No editor session, nothing to do here. return; } _editorSessionOpt.Dismiss(); _editorSessionOpt = null; } private bool ExecuteKeyboardCommand(IntellisenseKeyboardCommand command) { var target = _editorSessionOpt != null ? _editorSessionOpt.Presenter as IIntellisenseCommandTarget : null; return target != null && target.ExecuteKeyboardCommand(command); } public void SelectPreviousItem() { ExecuteKeyboardCommand(IntellisenseKeyboardCommand.Up); } public void SelectNextItem() { ExecuteKeyboardCommand(IntellisenseKeyboardCommand.Down); } // Call backs from our ISignatureHelpSourceProvider. Used to actually populate the vs // session. internal void AugmentSignatureHelpSession(IList<ISignature> signatures) { signatures.Clear(); signatures.AddRange(_signatureHelpItems.Select(_signatureMap.GetValueOrDefault)); } } } }
using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics; using System.Linq; using System.Reflection; using StructureMap.Configuration.DSL; using StructureMap.Diagnostics; using StructureMap.Graph; using StructureMap.Pipeline; using StructureMap.Query; using StructureMap.TypeRules; namespace StructureMap { public class Container : IContainer { private IPipelineGraph _pipelineGraph; private readonly object _syncLock = new object(); public static IContainer For<T>() where T : Registry, new() { return new Container(new T()); } public Container(Action<ConfigurationExpression> action) : this(PipelineGraph.For(action)) { } public Container(Registry registry) : this(new PluginGraphBuilder().Add(registry).Build()) { } public Container() : this(new PluginGraphBuilder().Build()) { } /// <summary> /// Constructor to create an Container /// </summary> /// <param name="pluginGraph"> /// PluginGraph containing the instance and type definitions /// for the Container /// </param> public Container(PluginGraph pluginGraph) : this(PipelineGraph.BuildRoot(pluginGraph)) { } internal Container(IPipelineGraph pipelineGraph) { Name = Guid.NewGuid().ToString(); _pipelineGraph = pipelineGraph; _pipelineGraph.RegisterContainer(this); } /// <summary> /// Provides queryable access to the configured PluginType's and Instances of this Container /// </summary> public IModel Model { get { return _pipelineGraph.ToModel(); } } /// <summary> /// Creates or finds the named instance of T /// </summary> /// <typeparam name="T"></typeparam> /// <param name="instanceKey"></param> /// <returns></returns> public T GetInstance<T>(string instanceKey) { return (T) GetInstance(typeof (T), instanceKey); } /// <summary> /// Creates a new instance of the requested type T using the supplied Instance. Mostly used internally /// </summary> /// <param name="instance"></param> /// <returns></returns> public T GetInstance<T>(Instance instance) { return (T) GetInstance(typeof (T), instance); } /// <summary> /// Gets the default instance of the pluginType using the explicitly configured arguments from the "args" /// </summary> /// <param name="args"></param> /// <returns></returns> public TPluginType GetInstance<TPluginType>(ExplicitArguments args) { return (TPluginType) GetInstance(typeof (TPluginType), args); } /// <summary> /// Gets the default instance of T, but built with the overridden /// arguments from args /// </summary> /// <typeparam name="T"></typeparam> /// <param name="args"></param> /// <param name="name"></param> /// <returns></returns> public T GetInstance<T>(ExplicitArguments args, string name) { return (T) GetInstance(typeof (T), args, name); } /// <summary> /// Gets the default instance of the pluginType using the explicitly configured arguments from the "args" /// </summary> /// <param name="pluginType"></param> /// <param name="args"></param> /// <returns></returns> public object GetInstance(Type pluginType, ExplicitArguments args) { try { var defaultInstance = _pipelineGraph.Instances.GetDefault(pluginType); var requestedName = BuildSession.DEFAULT; return buildInstanceWithArgs(pluginType, defaultInstance, args, requestedName); } catch (StructureMapException e) { e.Push("Container.GetInstance({0} ,{1})", pluginType.GetFullName(), args); throw; } } /// <summary> /// Gets the named instance of the pluginType using the explicitly configured arguments from the "args" /// </summary> /// <param name="pluginType"></param> /// <param name="args"></param> /// <param name="name"></param> /// <returns></returns> public object GetInstance(Type pluginType, ExplicitArguments args, string name) { try { var namedInstance = _pipelineGraph.Instances.FindInstance(pluginType, name); return buildInstanceWithArgs(pluginType, namedInstance, args, name); } catch (StructureMapException e) { e.Push("Container.GetInstance<{0}>({1}, '{2}')", pluginType.GetFullName(), args, name); throw; } } /// <summary> /// Gets all configured instances of type T using explicitly configured arguments from the "args" /// </summary> /// <param name="type"></param> /// <param name="args"></param> /// <returns></returns> public IEnumerable GetAllInstances(Type type, ExplicitArguments args) { try { var session = new BuildSession(_pipelineGraph, BuildSession.DEFAULT, args); return session.GetAllInstances(type); } catch (StructureMapException e) { e.Push("Container.GetAllInstances({0}, {1})", type.GetFullName(), args); throw; } } /// <summary> /// Gets the default instance of type T using the explicitly configured arguments from the "args" /// </summary> /// <typeparam name="T"></typeparam> /// <param name="args"></param> /// <returns></returns> public IEnumerable<T> GetAllInstances<T>(ExplicitArguments args) { try { var session = new BuildSession(_pipelineGraph, BuildSession.DEFAULT, args); return session.GetAllInstances<T>(); } catch (StructureMapException e) { e.Push("Container.GetAllInstances<{0}>({1})", typeof (T).GetFullName(), args); throw; } } /// <summary> /// Creates or finds the default instance of type T /// </summary> /// <typeparam name="T"></typeparam> /// <returns></returns> public T GetInstance<T>() { return (T) GetInstance(typeof (T)); } /// <summary> /// Creates or resolves all registered instances of type T /// </summary> /// <typeparam name="T"></typeparam> /// <returns></returns> public IEnumerable<T> GetAllInstances<T>() { try { var session = new BuildSession(_pipelineGraph); return session.GetAllInstances<T>(); } catch (StructureMapException e) { e.Push("Container.GetAllInstances<{0}>()", typeof (T).GetFullName()); throw; } } /// <summary> /// Creates or finds the named instance of the pluginType /// </summary> /// <param name="pluginType"></param> /// <param name="instanceKey"></param> /// <returns></returns> public object GetInstance(Type pluginType, string instanceKey) { try { return new BuildSession(_pipelineGraph, instanceKey).CreateInstance(pluginType, instanceKey); } catch (StructureMapException e) { e.Push("Container.GetInstance({0}, '{1}')", pluginType.GetFullName(), instanceKey); throw; } } /// <summary> /// Creates or finds the named instance of the pluginType. Returns null if the named instance is not known to the container. /// </summary> /// <param name="pluginType"></param> /// <param name="instanceKey"></param> /// <returns></returns> public object TryGetInstance(Type pluginType, string instanceKey) { try { return !_pipelineGraph.Instances.HasInstance(pluginType, instanceKey) ? null : GetInstance(pluginType, instanceKey); } catch (StructureMapException e) { e.Push("Container.TryGetInstance({0}, '{1}')", pluginType.GetFullName(), instanceKey); throw; } } /// <summary> /// Creates or finds the default instance of the pluginType. Returns null if the pluginType is not known to the container. /// </summary> /// <param name="pluginType"></param> /// <returns></returns> public object TryGetInstance(Type pluginType) { try { return !_pipelineGraph.Instances.HasDefaultForPluginType(pluginType) ? null : GetInstance(pluginType); } catch (StructureMapException e) { e.Push("Container.TryGetInstance({0})", pluginType.GetFullName()); throw; } } /// <summary> /// Creates or finds the default instance of type T. Returns the default value of T if it is not known to the container. /// </summary> /// <typeparam name="T"></typeparam> /// <returns></returns> public T TryGetInstance<T>() { return (T) (TryGetInstance(typeof (T)) ?? default(T)); } /// <summary> /// The "BuildUp" method takes in an already constructed object /// and uses Setter Injection to push in configured dependencies /// of that object /// </summary> /// <param name="target"></param> public void BuildUp(object target) { try { new BuildSession(_pipelineGraph).BuildUp(target); } catch (StructureMapException e) { e.Push("Container.BuildUp({0})", target); throw; } } /// <summary> /// Creates or finds the named instance of type T. Returns the default value of T if the named instance is not known to the container. /// </summary> /// <typeparam name="T"></typeparam> /// <returns></returns> public T TryGetInstance<T>(string instanceKey) { return (T) (TryGetInstance(typeof (T), instanceKey) ?? default(T)); } /// <summary> /// Creates or finds the default instance of the pluginType /// </summary> /// <param name="pluginType"></param> /// <returns></returns> public object GetInstance(Type pluginType) { try { return new BuildSession(_pipelineGraph).GetInstance(pluginType); } catch (StructureMapException e) { e.Push("Container.GetInstance({0})", pluginType.GetFullName()); throw; } } /// <summary> /// Creates a new instance of the requested type using the supplied Instance. Mostly used internally /// </summary> /// <param name="pluginType"></param> /// <param name="instance"></param> /// <returns></returns> public object GetInstance(Type pluginType, Instance instance) { try { var session = new BuildSession(_pipelineGraph, instance.Name) {RootType = instance.ReturnedType}; return session.FindObject(pluginType, instance); } catch (StructureMapException e) { e.Push("Container.GetInstance({0}, Instance: {1})", pluginType.GetFullName(), instance.Description); throw; } } /// <summary> /// Creates or resolves all registered instances of the pluginType /// </summary> /// <param name="pluginType"></param> /// <returns></returns> public IEnumerable GetAllInstances(Type pluginType) { try { return new BuildSession(_pipelineGraph).GetAllInstances(pluginType); } catch (StructureMapException e) { e.Push("Container.GetAllInstances({0})", pluginType.GetFullName()); throw; } } /// <summary> /// Used to add additional configuration to a Container *after* the initialization. /// </summary> /// <param name="configure"></param> public void Configure(Action<ConfigurationExpression> configure) { lock (_syncLock) { _pipelineGraph.Configure(configure); // Correct the Singleton lifecycle for child containers if (Role == ContainerRole.ProfileOrChild) { var singletons = _pipelineGraph.Instances.ImmediateInstances() .Where(x => x.Lifecycle is SingletonLifecycle); singletons .Each(x => x.SetLifecycleTo<ContainerLifecycle>()); _pipelineGraph.Instances.ImmediatePluginGraph.Families.Where(x => x.Lifecycle is SingletonLifecycle) .Each(x => x.SetLifecycleTo<ContainerLifecycle>()); } if (Role == ContainerRole.Nested) { _pipelineGraph.ValidateValidNestedScoping(); } } } /// <summary> /// Get the child container for the named profile /// </summary> /// <param name="profileName"></param> /// <returns></returns> public IContainer GetProfile(string profileName) { var pipeline = _pipelineGraph.Profiles.For(profileName); return new Container(pipeline); } /// <summary> /// Creates a new, anonymous child container /// </summary> /// <returns></returns> public IContainer CreateChildContainer() { var pipeline = _pipelineGraph.Profiles.NewChild(_pipelineGraph.Instances.ImmediatePluginGraph); return new Container(pipeline); } /// <summary> /// The profile name of this container /// </summary> public string ProfileName { get { return _pipelineGraph.Profile; } } /// <summary> /// Returns a report detailing the complete configuration of all PluginTypes and Instances /// </summary> /// <param name="pluginType">Optional parameter to filter the results down to just this plugin type</param> /// <param name="assembly">Optional parameter to filter the results down to only plugin types from this Assembly</param> /// <param name="@namespace">Optional parameter to filter the results down to only plugin types from this namespace</param> /// <param name="typeName">Optional parameter to filter the results down to any plugin type whose name contains this text</param> public string WhatDoIHave(Type pluginType = null, Assembly assembly = null, string @namespace = null, string typeName = null) { var writer = new WhatDoIHaveWriter(_pipelineGraph); return writer.GetText(new ModelQuery { Assembly = assembly, Namespace = @namespace, PluginType = pluginType, TypeName = typeName }); } /// <summary> /// Starts a request for an instance or instances with explicitly configured arguments. Specifies that any dependency /// of type T should be "arg" /// </summary> /// <typeparam name="T"></typeparam> /// <param name="arg"></param> /// <returns></returns> public ExplicitArgsExpression With<T>(T arg) { return new ExplicitArgsExpression(this).With(arg); } /// <summary> /// Starts a request for an instance or instances with explicitly configured arguments. Specifies that any dependency /// of type T should be "arg" /// </summary> /// <param name="pluginType"></param> /// <param name="arg"></param> /// <returns></returns> public ExplicitArgsExpression With(Type pluginType, object arg) { return new ExplicitArgsExpression(this).With(pluginType, arg); } /// <summary> /// Starts a request for an instance or instances with explicitly configured arguments. Specifies that any dependency or primitive argument /// with the designated name should be the next value. /// </summary> /// <param name="argName"></param> /// <returns></returns> public IExplicitProperty With(string argName) { return new ExplicitArgsExpression(this).With(argName); } /// <summary> /// Use with caution! Does a full environment test of the configuration of this container. Will try to create every configured /// instance and afterward calls any methods marked with the [ValidationMethod] attribute /// </summary> public void AssertConfigurationIsValid() { PipelineGraphValidator.AssertNoErrors(_pipelineGraph); } /// <summary> /// Removes all configured instances of type T from the Container. Use with caution! /// </summary> /// <typeparam name="T"></typeparam> public void EjectAllInstancesOf<T>() { _pipelineGraph.Ejector.EjectAllInstancesOf<T>(); } /// <summary> /// Convenience method to request an object using an Open Generic /// Type and its parameter Types /// </summary> /// <param name="templateType"></param> /// <returns></returns> /// <example> /// IFlattener flattener1 = container.ForGenericType(typeof (IFlattener&lt;&gt;)) /// .WithParameters(typeof (Address)).GetInstanceAs&lt;IFlattener&gt;(); /// </example> public OpenGenericTypeExpression ForGenericType(Type templateType) { return new OpenGenericTypeExpression(templateType, this); } /// <summary> /// Shortcut syntax for using an object to find a service that handles /// that type of object by using an open generic type /// </summary> /// <example> /// IHandler handler = container.ForObject(shipment) /// .GetClosedTypeOf(typeof (IHandler<>)) /// .As<IHandler>(); /// </example> /// <param name="subject"></param> /// <returns></returns> public CloseGenericTypeExpression ForObject(object subject) { return new CloseGenericTypeExpression(subject, this); } /// <summary> /// Starts a "Nested" Container for atomic, isolated access /// </summary> /// <returns></returns> public IContainer GetNestedContainer() { var pipeline = _pipelineGraph.ToNestedGraph(); return GetNestedContainer(pipeline); } /// <summary> /// Starts a new "Nested" Container for atomic, isolated service location. Opens /// </summary> /// <param name="profileName"></param> /// <returns></returns> public IContainer GetNestedContainer(string profileName) { var pipeline = _pipelineGraph.Profiles.For(profileName).ToNestedGraph(); return GetNestedContainer(pipeline); } private IContainer GetNestedContainer(IPipelineGraph pipeline) { var container = new Container(pipeline) { Name = "Nested-" + Name }; return container; } private bool _disposedLatch; public void Dispose() { if (_disposedLatch) return; _disposedLatch = true; _pipelineGraph.SafeDispose(); _pipelineGraph = null; } /// <summary> /// The name of the container. By default this is set to /// a random Guid. This is a convience property to /// assist with debugging. Feel free to set to anything, /// as this is not used in any logic. /// </summary> public string Name { get; set; } /// <summary> /// Injects the given object into a Container as the default for the designated /// TPluginType. Mostly used for temporarily setting up return values of the Container /// to introduce mocks or stubs during automated testing scenarios /// </summary> /// <typeparam name="TPluginType"></typeparam> /// <param name="instance"></param> public void Inject<TPluginType>(TPluginType instance) where TPluginType : class { Configure(x => x.For<TPluginType>().Use(instance)); } /// <summary> /// Injects the given object into a Container as the default for the designated /// pluginType. Mostly used for temporarily setting up return values of the Container /// to introduce mocks or stubs during automated testing scenarios /// </summary> public void Inject(Type pluginType, object @object) { Configure(x => x.For(pluginType).Use(@object)); } private object buildInstanceWithArgs(Type pluginType, Instance defaultInstance, ExplicitArguments args, string requestedName) { if (defaultInstance == null && pluginType.IsConcrete()) { defaultInstance = new ConfiguredInstance(pluginType); } var basicInstance = defaultInstance as IConfiguredInstance; var instance = basicInstance == null ? defaultInstance : basicInstance.Override(args); if (instance == null) { throw new StructureMapConfigurationException("No default instance or named instance '{0}' for requested plugin type {1}", requestedName, pluginType.GetFullName()); } var session = new BuildSession(_pipelineGraph, requestedName, args) { RootType = instance.ReturnedType }; return session.FindObject(pluginType, instance); } /// <summary> /// Starts a request for an instance or instances with explicitly configured /// arguments /// </summary> /// <param name="action"></param> /// <returns></returns> public ExplicitArgsExpression With(Action<IExplicitArgsExpression> action) { var expression = new ExplicitArgsExpression(this); action(expression); return expression; } /// <summary> /// Sets the default instance for the PluginType /// </summary> /// <param name="pluginType"></param> /// <param name="instance"></param> public void Inject(Type pluginType, Instance instance) { Configure(x => x.For(pluginType).Use(instance)); } /// <summary> /// Is this container the root, a profile or child, or a nested container? /// </summary> public ContainerRole Role { get { return _pipelineGraph.Role; } } #region Nested type: GetInstanceAsExpression public interface GetInstanceAsExpression { T GetInstanceAs<T>(); } #endregion #region Nested type: OpenGenericTypeExpression public class OpenGenericTypeExpression : GetInstanceAsExpression { private readonly Container _container; private readonly Type _templateType; private Type _pluginType; public OpenGenericTypeExpression(Type templateType, Container container) { if (!templateType.IsOpenGeneric()) { throw new StructureMapConfigurationException( "Type '{0}' is not an open generic type".ToFormat(templateType.GetFullName())); } _templateType = templateType; _container = container; } #region GetInstanceAsExpression Members public T GetInstanceAs<T>() { return (T) _container.GetInstance(_pluginType); } #endregion public GetInstanceAsExpression WithParameters(params Type[] parameterTypes) { _pluginType = _templateType.MakeGenericType(parameterTypes); return this; } } #endregion } }
using System; using System.Collections.Generic; namespace AsterNET.Manager.Response { /// <summary> /// Represents a response received from the Asterisk server as the result of a /// previously sent ManagerAction.<br /> /// The response can be linked with the action that caused it by looking the /// action id attribute that will match the action id of the corresponding /// action. /// </summary> public class ManagerResponse : IParseSupport { private string actionId; protected Dictionary<string, string> attributes; private DateTime dateReceived; private string message; private string privilege; private string response; private string server; private string uniqueId; #region Constructor - ManagerEvent() public ManagerResponse() { this.dateReceived = DateTime.Now; } public ManagerResponse(Dictionary<string, string> attributes) : this() { Helper.SetAttributes(this, attributes); } #endregion #region Attributes /// <summary> /// Store all unknown (without setter) keys from manager event.<br /> /// Use in default Parse method <see cref="Parse(string key, string value)" />. /// </summary> public Dictionary<string, string> Attributes { get { return attributes; } } #endregion #region Server /// <summary> /// Specify a server to which to send your commands (x.x.x.x or hostname).<br /> /// This should match the server name specified in your config file's "host" entry. /// If you do not specify a server, the proxy will pick the first one it finds -- fine in single-server configurations. /// </summary> public string Server { get { return this.server; } set { this.server = value; } } #endregion #region DateReceived /// <summary> /// Get/Set the point in time this response was received from the asterisk server. /// </summary> public DateTime DateReceived { get { return dateReceived; } set { this.dateReceived = value; } } #endregion #region Privilege /// <summary> /// Get/Set the AMI authorization class of this event.<br /> /// This is one or more of system, call, log, verbose, command, agent or user. /// Multiple privileges are separated by comma.<br /> /// Note: This property is not available from Asterisk 1.0 servers. /// </summary> public string Privilege { get { return privilege; } set { this.privilege = value; } } #endregion #region ActionId /// <summary> /// Get/Set the action id received with this response referencing the action that generated this response. /// </summary> public string ActionId { get { return actionId; } set { this.actionId = value; } } #endregion #region Message /// <summary> /// Get/Set the message received with this response.<br /> /// The content depends on the action that generated this response. /// </summary> public string Message { get { return message; } set { this.message = value; } } #endregion #region Response /// <summary> /// Get/Set the value of the "Response:" line.<br /> /// This typically a String like "Success" or "Error" but depends on the action that generated this response. /// </summary> public string Response { get { return response; } set { this.response = value; } } #endregion #region UniqueId /// <summary> /// Get/Set the unique id received with this response.<br /> /// The unique id is used to keep track of channels created by the action sent, for example an OriginateAction. /// </summary> public string UniqueId { get { return uniqueId; } set { this.uniqueId = value; } } #endregion #region IsSuccess() /// <summary> /// Return true if Response is success /// </summary> /// <returns></returns> public bool IsSuccess() { return response == "Success"; } #endregion #region GetAttribute(string key) /// <summary> /// Returns the value of the attribute with the given key.<br /> /// This is particulary important when a response contains special /// attributes that are dependent on the action that has been sent.<br /> /// An example of this is the response to the GetVarAction. /// It contains the value of the channel variable as an attribute /// stored under the key of the variable name.<br /> /// Example: /// <pre> /// GetVarAction action = new GetVarAction(); /// action.setChannel("SIP/1310-22c3"); /// action.setVariable("ALERT_INFO"); /// ManagerResponse response = connection.SendAction(action); /// String alertInfo = response.getAttribute("ALERT_INFO"); /// </pre> /// As all attributes are internally stored in lower case the key is /// automatically converted to lower case before lookup. /// </summary> /// <param name="key">the key to lookup.</param> /// <returns> /// the value of the attribute stored under this key or /// null if there is no such attribute. /// </returns> public string GetAttribute(string key) { return (string) attributes[key.ToLower(Helper.CultureInfo)]; } #endregion #region Parse(string key, string value) /// <summary> /// Unknown properties parser /// </summary> /// <param name="key">key name</param> /// <param name="value">key value</param> /// <returns>true - value parsed, false - can't parse value</returns> public virtual bool Parse(string key, string value) { if (attributes == null) attributes = new Dictionary<string, string>(); if (attributes.ContainsKey(key)) // Key already presents, add with delimiter attributes[key] += string.Concat(Common.LINE_SEPARATOR, value); else attributes.Add(key, value); return true; } #endregion #region ParseSpecial(Dictionary<string, string> attributes) /// <summary> /// Unknown properties parser /// </summary> /// <param name="attributes">dictionary</param> /// <returns>updated dictionary</returns> public virtual Dictionary<string, string> ParseSpecial(Dictionary<string, string> attributes) { return attributes; } #endregion #region ToString() public override string ToString() { return Helper.ToString(this); } #endregion } }
/* * Copyright 2010-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ /* * Do not modify this file. This file is generated from the swf-2012-01-25.normal.json service model. */ using System; using System.Collections.Generic; using System.Xml.Serialization; using System.Text; using System.IO; using Amazon.Runtime; using Amazon.Runtime.Internal; namespace Amazon.SimpleWorkflow.Model { /// <summary> /// Container for the parameters to the RegisterWorkflowType operation. /// Registers a new <i>workflow type</i> and its configuration settings in the specified /// domain. /// /// /// <para> /// The retention period for the workflow history is set by the <a>RegisterDomain</a> /// action. /// </para> /// <important>If the type already exists, then a <code>TypeAlreadyExists</code> fault /// is returned. You cannot change the configuration settings of a workflow type once /// it is registered and it must be registered as a new version.</important> /// <para> /// <b>Access Control</b> /// </para> /// /// <para> /// You can use IAM policies to control this action's access to Amazon SWF resources as /// follows: /// </para> /// <ul> <li>Use a <code>Resource</code> element with the domain name to limit the action /// to only specified domains.</li> <li>Use an <code>Action</code> element to allow or /// deny permission to call this action.</li> <li>Constrain the following parameters by /// using a <code>Condition</code> element with the appropriate keys. <ul> <li> <code>defaultTaskList.name</code>: /// String constraint. The key is <code>swf:defaultTaskList.name</code>.</li> <li> <code>name</code>: /// String constraint. The key is <code>swf:name</code>.</li> <li> <code>version</code>: /// String constraint. The key is <code>swf:version</code>.</li> </ul> </li> </ul> /// <para> /// If the caller does not have sufficient permissions to invoke the action, or the parameter /// values fall outside the specified constraints, the action fails. The associated event /// attribute's <b>cause</b> parameter will be set to OPERATION_NOT_PERMITTED. For details /// and example IAM policies, see <a href="http://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using /// IAM to Manage Access to Amazon SWF Workflows</a>. /// </para> /// </summary> public partial class RegisterWorkflowTypeRequest : AmazonSimpleWorkflowRequest { private ChildPolicy _defaultChildPolicy; private string _defaultExecutionStartToCloseTimeout; private string _defaultLambdaRole; private TaskList _defaultTaskList; private string _defaultTaskPriority; private string _defaultTaskStartToCloseTimeout; private string _description; private string _domain; private string _name; private string _version; /// <summary> /// Gets and sets the property DefaultChildPolicy. /// <para> /// If set, specifies the default policy to use for the child workflow executions when /// a workflow execution of this type is terminated, by calling the <a>TerminateWorkflowExecution</a> /// action explicitly or due to an expired timeout. This default can be overridden when /// starting a workflow execution using the <a>StartWorkflowExecution</a> action or the /// <code>StartChildWorkflowExecution</code> decision. /// </para> /// /// <para> /// The supported child policies are: /// </para> /// <ul> <li><b>TERMINATE:</b> the child executions will be terminated.</li> <li><b>REQUEST_CANCEL:</b> /// a request to cancel will be attempted for each child execution by recording a <code>WorkflowExecutionCancelRequested</code> /// event in its history. It is up to the decider to take appropriate actions when it /// receives an execution history with this event.</li> <li><b>ABANDON:</b> no action /// will be taken. The child executions will continue to run.</li> </ul> /// </summary> public ChildPolicy DefaultChildPolicy { get { return this._defaultChildPolicy; } set { this._defaultChildPolicy = value; } } // Check to see if DefaultChildPolicy property is set internal bool IsSetDefaultChildPolicy() { return this._defaultChildPolicy != null; } /// <summary> /// Gets and sets the property DefaultExecutionStartToCloseTimeout. /// <para> /// If set, specifies the default maximum duration for executions of this workflow type. /// You can override this default when starting an execution through the <a>StartWorkflowExecution</a> /// action or <code>StartChildWorkflowExecution</code> decision. /// </para> /// /// <para> /// The duration is specified in seconds; an integer greater than or equal to 0. Unlike /// some of the other timeout parameters in Amazon SWF, you cannot specify a value of /// "NONE" for <code>defaultExecutionStartToCloseTimeout</code>; there is a one-year max /// limit on the time that a workflow execution can run. Exceeding this limit will always /// cause the workflow execution to time out. /// </para> /// </summary> public string DefaultExecutionStartToCloseTimeout { get { return this._defaultExecutionStartToCloseTimeout; } set { this._defaultExecutionStartToCloseTimeout = value; } } // Check to see if DefaultExecutionStartToCloseTimeout property is set internal bool IsSetDefaultExecutionStartToCloseTimeout() { return this._defaultExecutionStartToCloseTimeout != null; } /// <summary> /// Gets and sets the property DefaultLambdaRole. /// <para> /// The ARN of the default IAM role to use when a workflow execution of this type invokes /// AWS Lambda functions. /// </para> /// /// <para> /// This default can be overridden when starting a workflow execution using the <a>StartWorkflowExecution</a> /// action or the <code>StartChildWorkflowExecution</code> and <code>ContinueAsNewWorkflowExecution</code> /// decision. /// </para> /// </summary> public string DefaultLambdaRole { get { return this._defaultLambdaRole; } set { this._defaultLambdaRole = value; } } // Check to see if DefaultLambdaRole property is set internal bool IsSetDefaultLambdaRole() { return this._defaultLambdaRole != null; } /// <summary> /// Gets and sets the property DefaultTaskList. /// <para> /// If set, specifies the default task list to use for scheduling decision tasks for executions /// of this workflow type. This default is used only if a task list is not provided when /// starting the execution through the <a>StartWorkflowExecution</a> action or <code>StartChildWorkflowExecution</code> /// decision. /// </para> /// </summary> public TaskList DefaultTaskList { get { return this._defaultTaskList; } set { this._defaultTaskList = value; } } // Check to see if DefaultTaskList property is set internal bool IsSetDefaultTaskList() { return this._defaultTaskList != null; } /// <summary> /// Gets and sets the property DefaultTaskPriority. /// <para> /// The default task priority to assign to the workflow type. If not assigned, then "0" /// will be used. Valid values are integers that range from Java's <code>Integer.MIN_VALUE</code> /// (-2147483648) to <code>Integer.MAX_VALUE</code> (2147483647). Higher numbers indicate /// higher priority. /// </para> /// /// <para> /// For more information about setting task priority, see <a href="http://docs.aws.amazon.com/amazonswf/latest/developerguide/programming-priority.html">Setting /// Task Priority</a> in the <i>Amazon Simple Workflow Developer Guide</i>. /// </para> /// </summary> public string DefaultTaskPriority { get { return this._defaultTaskPriority; } set { this._defaultTaskPriority = value; } } // Check to see if DefaultTaskPriority property is set internal bool IsSetDefaultTaskPriority() { return this._defaultTaskPriority != null; } /// <summary> /// Gets and sets the property DefaultTaskStartToCloseTimeout. /// <para> /// If set, specifies the default maximum duration of decision tasks for this workflow /// type. This default can be overridden when starting a workflow execution using the /// <a>StartWorkflowExecution</a> action or the <code>StartChildWorkflowExecution</code> /// decision. /// </para> /// /// <para> /// The duration is specified in seconds; an integer greater than or equal to 0. The value /// "NONE" can be used to specify unlimited duration. /// </para> /// </summary> public string DefaultTaskStartToCloseTimeout { get { return this._defaultTaskStartToCloseTimeout; } set { this._defaultTaskStartToCloseTimeout = value; } } // Check to see if DefaultTaskStartToCloseTimeout property is set internal bool IsSetDefaultTaskStartToCloseTimeout() { return this._defaultTaskStartToCloseTimeout != null; } /// <summary> /// Gets and sets the property Description. /// <para> /// Textual description of the workflow type. /// </para> /// </summary> public string Description { get { return this._description; } set { this._description = value; } } // Check to see if Description property is set internal bool IsSetDescription() { return this._description != null; } /// <summary> /// Gets and sets the property Domain. /// <para> /// The name of the domain in which to register the workflow type. /// </para> /// </summary> public string Domain { get { return this._domain; } set { this._domain = value; } } // Check to see if Domain property is set internal bool IsSetDomain() { return this._domain != null; } /// <summary> /// Gets and sets the property Name. /// <para> /// The name of the workflow type. /// </para> /// /// <para> /// The specified string must not start or end with whitespace. It must not contain a /// <code>:</code> (colon), <code>/</code> (slash), <code>|</code> (vertical bar), or /// any control characters (\u0000-\u001f | \u007f - \u009f). Also, it must not contain /// the literal string quotarnquot. /// </para> /// </summary> public string Name { get { return this._name; } set { this._name = value; } } // Check to see if Name property is set internal bool IsSetName() { return this._name != null; } /// <summary> /// Gets and sets the property Version. /// <para> /// The version of the workflow type. /// </para> /// <note>The workflow type consists of the name and version, the combination of which /// must be unique within the domain. To get a list of all currently registered workflow /// types, use the <a>ListWorkflowTypes</a> action.</note> /// <para> /// The specified string must not start or end with whitespace. It must not contain a /// <code>:</code> (colon), <code>/</code> (slash), <code>|</code> (vertical bar), or /// any control characters (\u0000-\u001f | \u007f - \u009f). Also, it must not contain /// the literal string quotarnquot. /// </para> /// </summary> public string Version { get { return this._version; } set { this._version = value; } } // Check to see if Version property is set internal bool IsSetVersion() { return this._version != null; } } }
using System; using System.Data; using System.Data.OleDb; using System.Reflection; //using PCSAssemblyLoader; using System.Collections; using System.Collections.Specialized; using System.Threading; using PCSUtils; using Utils = PCSComUtils.DataAccess.Utils; using PCSUtils.Utils; using C1.Win.C1Preview; namespace InventoryStatusReport { /// <summary> /// </summary> [Serializable] public class InventoryStatusReport : MarshalByRefObject, IDynamicReport { #region IDynamicReport Implementation private string mConnectionString; private ReportBuilder mReportBuilder = new ReportBuilder(); private C1PrintPreviewControl mReportViewer; private object mResult; private bool mUseReportViewerRenderEngine = false; private string mstrReportDefinitionFolder = string.Empty; /// <summary> /// ConnectionString, provide for the Dynamic Report /// ALlow Dynamic Report to access the DataBase of PCS /// </summary> public string PCSConnectionString { get { return mConnectionString; } set { mConnectionString = value; } } /// <summary> /// Report Builder Utility Object /// Dynamic Report can use this object to render, modify, layout the report /// </summary> public ReportBuilder PCSReportBuilder { get { return mReportBuilder; } set { mReportBuilder = value; } } /// <summary> /// ReportViewer Object, provide for the DynamicReport, /// allow Dynamic Report to manipulate with the REportViewer, /// modify the report after rendered if needed /// </summary> public C1PrintPreviewControl PCSReportViewer { get { return mReportViewer; } set { mReportViewer = value; } } /// <summary> /// Store other result if any. Ussually we store return DataTable here to display on the ReportViewer Form's Grid /// </summary> public object Result { get { return mResult; } set { mResult = value; } } /// <summary> /// Notify PCS whether the rendering report process is run by /// this IDynamicReport /// or the ReportViewer Engine (in the ReportViewer form) /// </summary> public bool UseReportViewerRenderEngine { get { return mUseReportViewerRenderEngine; } set { mUseReportViewerRenderEngine = value; } } /// <summary> /// Inform External Process where to find out the ReportLayout ( the PCS' ReportDefinition Folder Path ) /// </summary> public string ReportDefinitionFolder { get { return mstrReportDefinitionFolder; } set { mstrReportDefinitionFolder = value; } } private string mstrReportLayoutFile = string.Empty; /// <summary> /// Inform External Process about the Layout file /// in which PCS instruct to use /// (PCS will assign this property while ReportViewer Form execute, /// ReportVIewer form will use the layout file in the report config entry to put in this property) /// </summary> public string ReportLayoutFile { get { return mstrReportLayoutFile; } set { mstrReportLayoutFile = value; } } /// <summary> /// /// </summary> /// <param name="pstrMethod"></param> /// <param name="pobjParameters"></param> /// <returns></returns> public object Invoke(string pstrMethod, object[] pobjParameters) { return this.GetType().InvokeMember(pstrMethod, BindingFlags.InvokeMethod, null, this, pobjParameters); } #endregion PCSComUtils.Common.BO.UtilsBO objUtilBO = new PCSComUtils.Common.BO.UtilsBO(); public InventoryStatusReport() { } /// <summary> /// Thachnn: 28/10/2005 /// Preview the report for this form /// Using the "InventoryStatusReport.xml" layout /// </summary> /// <history>Thachnn: 29/12/2005: Add parameter display to the report. Change USECASE.</history> /// <param name="sender"></param> /// <param name="e"></param> public DataTable ExecuteReport(string pstrCCNID, string pstrMasterLocationID, string pstrLocationID, string pstrCategoryID, string pstrParameterModel) { #region Constants string mstrReportDefFolder = mstrReportDefinitionFolder; const string REPORT_LAYOUT_FILE = "InventoryStatusReport.xml"; const string TABLE_NAME = "WorkingSchemeReport"; const string REPORT_NAME = "Inventory Status"; const string PAGE = "Page"; const string HEADER = "Header"; const string REPORTFLD_TITLE = "fldTitle"; const string REPORTFLD_COMPANY = "fldCompany"; const string REPORTFLD_ADDRESS = "fldAddress"; const string REPORTFLD_TEL = "fldTel"; const string REPORTFLD_FAX = "fldFax"; const string REPORTFLD_DAY = "fldDay"; const string REPORTFLD_MONTH = "fldMonth"; const string REPORTFLD_YEAR = "fldYear"; const string REPORTFLD_CATEGORY = "fldCategory"; const string REPORTFLD_PARTNUMBER = "fldPartNumber"; const string REPORTFLD_PARTNAME = "fldPartName"; const string REPORTFLD_MODEL = "fldModel"; const string REPORTFLD_STOCKUM = "fldStockUM"; const string REPORTFLD_LOCATION = "fldLocation"; const string REPORTFLD_OHQTY = "fldOHQty"; const string REPORTFLD_COMMITQTY = "fldCommitQty"; const string REPORTFLD_AVAILABLEQTY = "fldAvailableQty"; const string REPORTFLD_TYPE = "fldType"; const string REPORTFLD_SOURCE = "fldSource"; const string REPORTFLD_SAFETYSTOCK = "fldSafetyStock"; const string REPORTFLD_LOT = "fldLot"; const string REPORTFLD_WARNING = "fldWarning"; #region QUERY COLUMMS const string CATEGORY_COL = "[Category]"; const string PARTNUMBER_COL = "[Part No.]"; const string PARTNAME_COL = "[Part Name]"; const string MODEL_COL = "[Model]"; const string STOCKUM_COL = "[Stock UM]"; const string LOCATION_COL = "[Location]"; const string OHQTY_COL = "[OH Qty]"; const string COMMITQTY_COL = "[Commit Qty]"; const string AVAILABLEQTY_COL = "[Available Qty]"; const string TYPE_COL = "[Type]"; const string SOURCE_COL = "[Source]"; const string SAFETYSTOCK_COL = "[Safety Stock]"; const string LOT_COL = "[Lot]"; const string WARNING_COL = "[Warning]"; #endregion #endregion #region GETTING THE PARAMETER PCSComUtils.Common.BO.UtilsBO boUtil = new PCSComUtils.Common.BO.UtilsBO(); PCSComUtils.Framework.ReportFrame.BO.C1PrintPreviewDialogBO objBO = new PCSComUtils.Framework.ReportFrame.BO.C1PrintPreviewDialogBO(); string strCCN = boUtil.GetCCNCodeFromID(int.Parse(pstrCCNID)); string strMasterLocation = objBO.GetMasterLocationCodeFromID(int.Parse(pstrMasterLocationID)) + ": " + objBO.GetMasterLocationNameFromID(int.Parse(pstrMasterLocationID)); string strLocation = string.Empty; try { strLocation = objBO.GetLocationCodeFromID(int.Parse(pstrLocationID)); } catch{} string strCategory = string.Empty; try { strCategory = objBO.GetCategoryCodeFromID(pstrCategoryID); } catch{} #endregion float fActualPageSize = 9000f; #region Build dtbResult DataTable DataTable dtbResult; try { dtbResult = GetInventoryStatusData(pstrCCNID, pstrMasterLocationID, pstrLocationID, pstrCategoryID, pstrParameterModel); } catch { dtbResult = new DataTable(); } #endregion ReportBuilder objRB; objRB = new ReportBuilder(); objRB.ReportName = REPORT_NAME; objRB.SourceDataTable = dtbResult; #region INIT REPORT BUIDER OBJECT try { objRB.ReportDefinitionFolder = mstrReportDefinitionFolder; objRB.ReportLayoutFile = REPORT_LAYOUT_FILE; if(objRB.AnalyseLayoutFile() == false) { // PCSMessageBox.Show(ErrorCode.MESSAGE_REPORT_TEMPLATE_FILE_NOT_FOUND, MessageBoxIcon.Error); return new DataTable(); } //objRB.UseLayoutFile = objRB.AnalyseLayoutFile(); // use layout file if any , auto drawing if not found layout file objRB.UseLayoutFile = true; // always use layout file } catch { objRB.UseLayoutFile = false; // PCSMessageBox.Show(ErrorCode.MESSAGE_REPORT_TEMPLATE_FILE_NOT_FOUND,MessageBoxIcon.Error); } C1.C1Report.Layout objLayout = objRB.Report.Layout; fActualPageSize = objLayout.PageSize.Width - (float)objLayout.MarginLeft - (float)objLayout.MarginRight; #endregion objRB.MakeDataTableForRender(); //grid.DataSource = objRB.RenderDataTable; #region RENDER TO PRINT PREVIEW // and show it in preview dialog PCSUtils.Framework.ReportFrame.C1PrintPreviewDialog printPreview = new PCSUtils.Framework.ReportFrame.C1PrintPreviewDialog(); printPreview.FormTitle = REPORT_NAME; objRB.ReportViewer = printPreview.ReportViewer; objRB.RenderReport(); #region COMPANY INFO header information get from system params // try // { // objRB.DrawPredefinedField(REPORTFLD_COMPANY,SystemProperty.SytemParams.Get(SystemParam.COMPANY_NAME)); // } // catch{} // try // { // objRB.DrawPredefinedField(REPORTFLD_ADDRESS,SystemProperty.SytemParams.Get(SystemParam.ADDRESS)); // } // catch{} // try // { // objRB.DrawPredefinedField(REPORTFLD_TEL,SystemProperty.SytemParams.Get(SystemParam.TEL)); // } // catch{} // try // { // objRB.DrawPredefinedField(REPORTFLD_FAX,SystemProperty.SytemParams.Get(SystemParam.FAX)); // } // catch{} #endregion #region DRAW Parameters const string CCN = "CCN"; const string MASTER_LOCATION = "Master Location"; const string LOCATION = "Location"; const string CATEGORY = "Category"; const string MODEL = "Model"; System.Collections.Specialized.NameValueCollection arrParamAndValue = new System.Collections.Specialized.NameValueCollection(); arrParamAndValue.Add(CCN, strCCN); arrParamAndValue.Add(MASTER_LOCATION, strMasterLocation); if(pstrLocationID.Trim() != string.Empty) { arrParamAndValue.Add(LOCATION, strLocation); } if(pstrCategoryID.Trim() != string.Empty) { arrParamAndValue.Add(CATEGORY, strCategory); } if(pstrParameterModel.Trim() != string.Empty) { arrParamAndValue.Add(MODEL, pstrParameterModel); } /// anchor the Parameter drawing canvas cordinate to the fldTitle C1.C1Report.Field fldTitle = objRB.GetFieldByName(REPORTFLD_TITLE); double dblStartX = fldTitle.Left; double dblStartY = fldTitle.Top + 1.3*fldTitle.RenderHeight; objRB.GetSectionByName(PAGE + HEADER).CanGrow = true; objRB.DrawParameters( objRB.GetSectionByName(PAGE + HEADER) ,dblStartX , dblStartY , arrParamAndValue, objRB.Report.Font.Size); #endregion /// there are some hardcode numbers here /// but these numbers are use ONLY ONE and ONLY USED HERE, so we don't need to define constant for it. objRB.DrawBoxGroup_Madeby_Checkedby_Approvedby(objRB.GetSectionByName(PAGE + HEADER), 15945 -1400-1400-1400, 600, 1400, 1300, 200); #region DAY--MONTH--YEAR INFO DateTime dtm; try { dtm = objUtilBO.GetDBDate(); } catch { dtm = DateTime.Now; } try { objRB.DrawPredefinedField(REPORTFLD_DAY,dtm.Day.ToString("00")); } catch{} try { objRB.DrawPredefinedField(REPORTFLD_MONTH,dtm.Month.ToString("00")); } catch{} try { objRB.DrawPredefinedField(REPORTFLD_YEAR,dtm.Year.ToString("0000")); } catch{} #endregion try // mapping report field with table column { objRB.DrawPredefinedField(REPORTFLD_CATEGORY,CATEGORY_COL); objRB.DrawPredefinedField(REPORTFLD_PARTNUMBER,PARTNUMBER_COL); objRB.DrawPredefinedField(REPORTFLD_PARTNAME,PARTNAME_COL); objRB.DrawPredefinedField(REPORTFLD_MODEL,MODEL_COL); objRB.DrawPredefinedField(REPORTFLD_STOCKUM,STOCKUM_COL); objRB.DrawPredefinedField(REPORTFLD_LOCATION,LOCATION_COL); objRB.DrawPredefinedField(REPORTFLD_OHQTY,OHQTY_COL); objRB.DrawPredefinedField(REPORTFLD_COMMITQTY,COMMITQTY_COL); objRB.DrawPredefinedField(REPORTFLD_AVAILABLEQTY,AVAILABLEQTY_COL); objRB.DrawPredefinedField(REPORTFLD_TYPE,TYPE_COL); objRB.DrawPredefinedField(REPORTFLD_SOURCE,SOURCE_COL); objRB.DrawPredefinedField(REPORTFLD_SAFETYSTOCK,SAFETYSTOCK_COL); objRB.DrawPredefinedField(REPORTFLD_LOT,LOT_COL); objRB.DrawPredefinedField(REPORTFLD_WARNING,WARNING_COL); } catch{} objRB.RefreshReport(); printPreview.Show(); #endregion UseReportViewerRenderEngine = false; mResult = dtbResult; return dtbResult; } /// <summary> /// Main function, generate the result data Table for the REPORT VIEWER /// </summary> /// <param name="pstrCCNID"></param> /// <returns></returns> public DataTable ExecuteReportOLD(string pstrCCNID, string pstrYear, string pstrMonth, string pstrMPSCycleID, string pstrProductionLineID) { //const string METHOD_NAME = ".ExecuteReport()"; const string TABLE_NAME = "WorkingSchemeReport"; const string SUB_TABLE_NAME = "ShiftTotalReport"; string REPORT_NAME = "WorkingSchemeReport"; const string SUB_REPORT_NAME = "ShiftTotalReport"; string REPORT_LAYOUT_FILE = "InventoryStatusReport.xml"; short COPIES = 1; const string ENDSTOCK = "EndStock"; const string CHANGECATEGORY = "Change Category"; const string LEADTIME = "Lead Time"; const string REQUIRECAPACITY = "Require Capacity"; const string STANDARDCAPACITY = "Standard Capacity"; const string COMPARESECOND = "Compare Second"; const string COMPAREPERCENT = "Compare Percent"; string strFromDate = string.Format("{0}-{1}-01",pstrYear,pstrMonth); // begin date of the selected month const string REPORTFLD_WORKINGDAYS = "fldParameterWorkingdays"; const string REPORTFLD_OFFDAYS = "fldParameterOffdays"; const string REPORTFLD_PARAMETER_CCN = "fldParameterCCN"; const string REPORTFLD_PARAMETER_YEAR = "fldParameterYear"; const string REPORTFLD_PARAMETER_MONTH = "fldParameterMonth"; const string REPORTFLD_PARAMETER_CYCLE = "fldParameterCycle"; const string REPORTFLD_PARAMETER_PRODUCTIONLINE = "fldParameterProductionLine"; int nCCNID = int.Parse(pstrCCNID); int nYear = int.Parse(pstrYear); int nMonth = int.Parse(pstrMonth); int nCycle = int.Parse(pstrMPSCycleID); int nProductionLineID = int.Parse(pstrProductionLineID); int nWorkingDays; int nOffDays; string strCCN = string.Empty; string strCycle = string.Empty; string strProductionLine = string.Empty; #region GETTING THE PARAMETER PCSComUtils.Common.BO.UtilsBO boUtil = new PCSComUtils.Common.BO.UtilsBO(); PCSComUtils.Framework.ReportFrame.BO.C1PrintPreviewDialogBO objBO = new PCSComUtils.Framework.ReportFrame.BO.C1PrintPreviewDialogBO(); strCCN = boUtil.GetCCNCodeFromID(nCCNID); strCycle = objBO.GetMPSCycleFromID(nCycle) + "-" + objBO.GetMPSCycleDescriptionFromID(nCycle); strProductionLine = objBO.GetProductLineCodeFromID(nProductionLineID) + "-" + objBO.GetProductLineNameFromID(nProductionLineID); // refer to mr.TuanTQ to get WOrkingDayInMonth nWorkingDays = GetWorkingDayInMonth(nMonth,nYear); nOffDays = DateTime.DaysInMonth(nYear,nMonth) - nWorkingDays; #endregion System.Data.DataTable dtbSourceData; System.Data.DataTable dtbSubReportData; OleDbConnection oconPCS = null; OleDbCommand ocmdPCS = null; string strSql = string.Empty; #region BUILD THE DATA TABLE try { DataSet dstPCS = new DataSet(); oconPCS = null; ocmdPCS = null; #region SQL Query strSql = " Declare @strFromDate smalldatetime " + " Declare @pstrCCNID int " + " Declare @pstrProductionLineID int " + " Declare @pstrMPSCycleID int " + " Declare @pstrMonth char(2) " + " Declare @pstrYear char(4) " + " " + " Declare @pstrInArray varchar(40) " + " Declare @pstrOutArray varchar(40) " + " " + " " + " Set @strFromDate = '"+pstrYear+"-"+nMonth.ToString("00")+"-01' " + " Set @pstrCCNID = " + pstrCCNID + " " + " Set @pstrProductionLineID = " +pstrProductionLineID+ " " + " Set @pstrMPSCycleID = " +pstrMPSCycleID+ " " + " Set @pstrYear = '" +pstrYear+ "' " + " Set @pstrMonth = '" +nMonth.ToString("00")+ "' " + " " + " " + " select " + " P.ProductID as [ProductID], " + " ITM_Category.Code as [Category], " + " P.Code as [Part Number], " + " P.Description as [Part Name], " + " P.Revision as [Model], " + " IsNull(CPOTable.Quantity,0) as [Plan], " + " IsNull(BeginStockTable.[Begin Stock],0.00) as [Begin Stock], " + " (IsNull(BeginStockTable.[Begin Stock],0) - IsNull(CPOTable.Quantity,0)) As [EndStock], " + " " + " " + " CAST(IsNull((ChangeTimeTable.ChangeTime) , 0.00) as decimal(20,5) ) as [Change Category], " + " " + " [Lead Time] = " + " CASE " + " WHEN ITM_Routing.Pacer = 'L' THEN ITM_Routing.LaborSetupTime + ITM_Routing.LaborRunTime " + " WHEN ITM_Routing.Pacer = 'M' THEN ITM_Routing.MachineSetupTime + ITM_Routing.MachineRunTime " + " WHEN ITM_Routing.Pacer = 'B' THEN ITM_Routing.MachineRunTime + ITM_Routing.LaborRunTime " + " END, " + " " + " 0.00 as [Require Capacity] " + " " + " from " + " MST_WorkCenter join ITM_Routing " + " on MST_WorkCenter.WorkCenterID = ITM_Routing.WorkCenterID and ITM_Routing.Type = 0 " + " left join PRO_ProductionLine " + " on MST_WOrkCenter.ProductionLineID = PRO_ProductionLine.ProductionLineID " + " join ITM_Product as P " + " on P.ProductID = ITM_Routing.ProductID " + " join ITM_Category " + " on P.CategoryID = ITM_Category.CategoryID " + " join PRO_WCCapacity " + " on MST_WorkCenter.WorkCenterID = PRO_WCCapacity.WorkCenterID " + " join PRO_ShiftCapacity " + " on PRO_WCCapacity.WCCapacityID = PRO_ShiftCapacity.WCCapacityID " + " join PRO_Shift " + " on PRO_ShiftCapacity.ShiftID = PRO_Shift.ShiftID " + " " + " " + " /* --######-- BEGIN Get Begin Quantity of Product */ " + " left join " + " ( " + " SELECT DISTINCT " + " ITM_Product.ProductID, " + " " + " ( " + " ISNULL(IV_MasLocCache.OHQuantity, 0) " + " " + " - (SELECT ISNULL(SUM(TransQuantity), 0) " + " FROM v_TransactionHistory inFrom_Today_TransHis " + " WHERE ProductID = ITM_Product.ProductID " + " AND CCNID = @pstrCCNID " + " AND PostDate BETWEEN @strFromDate AND GetDate() " + " AND TranTypeID IN (8, 11, 13, 19, 20, 16, 17) " + " ) " + " " + " + (SELECT ISNULL(SUM(TransQuantity), 0) " + " FROM v_TransactionHistory " + " WHERE ProductID = ITM_Product.ProductID " + " AND CCNID = @pstrCCNID " + " AND PostDate BETWEEN @strFromDate AND GetDate() " + " AND TranTypeID IN (12, 14, 15) " + " ) " + " ) " + " as [Begin Stock] " + " " + " FROM ITM_Product " + " INNER JOIN IV_MasLocCache ON ITM_Product.ProductID = IV_MasLocCache.ProductID " + " WHERE ITM_Product.CCNID = @pstrCCNID " + " " + " GROUP BY " + " ITM_Product.ProductID, " + " IV_MasLocCache.OHQuantity " + " " + " ) " + " as BeginStockTable " + " on P.ProductID = BeginStockTable.ProductID " + " /* ######-- END Get Begin Quantity of Product */ " + " " + " " + " /* BEGIN: Getting the Change time of Product */ " + " left join " + " ( " + " select " + " CCMatrix.DestProductID as [ProductID], " + " Sum(CCMatrix.ChangeTime) as [ChangeTime] " + " " + " from PRO_ChangeCategoryMatrix as CCMatrix " + " " + " /*BEGIN: Join to get condition on parameter: ProductionLineID*/ " + " join PRO_ChangeCategoryMaster CCMaster " + " on CCMaster.ChangeCategoryMasterID = CCMatrix.ChangeCategoryMasterID " + " join MST_WorkCenter " + " on MST_WorkCenter.WorkCenterID = CCMaster.WorkCenterID " + " and MST_WorkCenter.ProductionLineID = @pstrProductionLineID " + " and MST_WorkCenter.CCNID = @pstrCCNID " + " /*END: Join to get condition on parameter: CCNID, ProductionLineID*/ " + " " + " group by " + " DestProductID " + " ) " + " as ChangeTimeTable " + " on P.ProductID = ChangeTimeTable.ProductID " + " /*-- END: Getting the Change time of Product */ " + " " + " " + " " + " /* BEGIN GETTING CPO PLAN QUANTITY */ " + " left join " + " ( " + " SELECT " + " MTR_CPO.MPSCycleOptionMasterID, " + " ITM_Product.ProductID, " + " ITM_Product.Revision AS [Model], " + " ITM_Product.Code AS [Part Number], " + " /* --DATEPART(yyyy, MTR_CPO.DueDate) as [Year], */ " + " /* --DATEPART(mm, MTR_CPO.DueDate)as [Month], */ " + " SUM(MTR_CPO.Quantity) AS [Quantity] " + " " + " FROM " + " MTR_CPO " + " INNER JOIN MST_CCN " + " ON MTR_CPO.CCNID = MST_CCN.CCNID " + " INNER JOIN ITM_Product " + " ON MTR_CPO.ProductID = ITM_Product.ProductID " + " left outer JOIN ITM_Category " + " ON ITM_Product.CategoryID = ITM_Category.CategoryID " + " " + " WHERE " + " MTR_CPO.CCNID = @pstrCCNID AND " + " MTR_CPO.MPSCycleOptionMasterID = @pstrMPSCycleID AND " + " DATEPART(yyyy, MTR_CPO.DueDate) = @pstrYear AND " + " DATEPART(mm, MTR_CPO.DueDate) = @pstrMonth " + " " + " GROUP BY " + " MTR_CPO.MPSCycleOptionMasterID, " + " MST_CCN.Code, " + " ITM_Category.Code, " + " ITM_Product.ProductID, " + " ITM_Product.Code, " + " ITM_Product.Description, " + " ITM_Product.Revision, " + " DatePart(yyyy,MTR_CPO.DueDate), " + " DatePart(mm,MTR_CPO.DueDate) " + " ) " + " as CPOTable " + " on P.ProductID = CPOTable.ProductID " + " /* END GETTING CPO PLAN QUANTITY */ " + " " + " " + " " + " " + " WHERE " + " PRO_ProductionLine.ProductionLineID = @pstrProductionLineID " + " and DATEPART(yyyy, PRO_WCCapacity.BeginDate ) <= @pstrYear " + " and DATEPART(yyyy, PRO_WCCapacity.EndDate ) >= @pstrYear " + " and DATEPART(mm, PRO_WCCapacity.BeginDate ) <= @pstrMonth " + " and DATEPART(mm, PRO_WCCapacity.EndDate ) >= @pstrMonth " + " and MST_WOrkCenter.IsMain = 1 " + " " + " " + " /* GROUP BY of outside query*/ " + " group by " + " P.ProductID, " + " ITM_Category.Code, " + " P.Code, " + " P.Description, " + " P.Revision, " + " CPOTable.Quantity, " + " BeginStockTable.[Begin Stock], " + " " + " (IsNull(CPOTable.Quantity,0) - IsNull(BeginStockTable.[Begin Stock],0)), " + " " + " ITM_Routing.Pacer, " + " ITM_Routing.LaborSetupTime, " + " ITM_Routing.LaborRunTime, " + " ITM_Routing.MachineSetupTime, " + " ITM_Routing.MachineRunTime, " + " CAST(IsNull((ChangeTimeTable.ChangeTime) , 0.00) as decimal(20,5) ) " + " " + " Order by [Category],[Part Number],[Model] " + " " + " " + " " + " /**********************************************************************************/ " + " /**********************************************************************************/ " + " /**********************************************************************************/ " + " " + " " + " /* BEGIN StandardCapacity caculate */ " + " select " + " PRO_Shift.ShiftDesc as [Shift], " + " sum(PRO_WCCapacity.Capacity) as [Standard Capacity] " + " " + " from " + " MST_WorkCenter " + " join PRO_WCCapacity " + " on MST_WorkCenter.WorkCenterID = PRO_WCCapacity.WorkCenterID " + " join PRO_ShiftCapacity " + " on PRO_WCCapacity.WCCapacityID = PRO_ShiftCapacity.WCCapacityID " + " join PRO_Shift " + " on PRO_ShiftCapacity.ShiftID = PRO_Shift.ShiftID " + " " + " /* left join MTR_CPO " + " on MTR_CPO.ProductID = ITM_Product.ProductID " + " and MTR_CPO.MPSCycleOptionMasterID = @pstrMPSCycleID " + " */ " + " " + " WHERE " + " MST_WorkCenter.ProductionLineID = @pstrProductionLineID " + " and PRO_WCCapacity.CCNID = @pstrCCNID " + " and MST_WOrkCenter.IsMain = 1 " + " " + " and DATEPART(yyyy, PRO_WCCapacity.BeginDate ) <= @pstrYear " + " and DATEPART(yyyy, PRO_WCCapacity.EndDate ) >= @pstrYear " + " and DATEPART(mm, PRO_WCCapacity.BeginDate ) <= @pstrMonth " + " and DATEPART(mm, PRO_WCCapacity.EndDate ) >= @pstrMonth " + " " + " " + " group by " + " PRO_Shift.ShiftDesc " + " " + " /* END StandardCapacity caculate */ " + " " + " " ; #endregion oconPCS = new OleDbConnection(mConnectionString); ocmdPCS = new OleDbCommand(strSql, oconPCS); ocmdPCS.Connection.Open(); OleDbDataAdapter odadPCS = new OleDbDataAdapter(ocmdPCS); odadPCS.Fill(dstPCS); if(dstPCS.Tables.Count > 0) { dtbSourceData = dstPCS.Tables[0].Copy(); dtbSubReportData = dstPCS.Tables[1].Copy(); } else { dtbSourceData = new DataTable(); dtbSubReportData = new DataTable(); } } catch(OleDbException ex) { throw new Exception(strSql,ex); } finally { if (oconPCS!=null) { if (oconPCS.State != ConnectionState.Closed) { oconPCS.Close(); } } } #endregion #region TRANSFORM DATATABLE FOR REPORT // only re-calculate Require Capacity, Compare Second, and Compare Percent column /// Calculate the Require Capacity foreach(DataRow drow in dtbSourceData.Rows) { drow[REQUIRECAPACITY] = (decimal)drow[ENDSTOCK] * (decimal)drow[LEADTIME] + (decimal)drow[CHANGECATEGORY] ; } decimal decSumOfRequireCapacity = 0; /// calculate the Sum of Require Capacity foreach(DataRow drow in dtbSourceData.Rows) { decSumOfRequireCapacity += (decimal)drow[REQUIRECAPACITY]; } dtbSubReportData.Columns.Add(COMPARESECOND, typeof(System.Decimal)); dtbSubReportData.Columns.Add(COMPAREPERCENT); /// calculate the 2 Compare Column foreach(DataRow drow in dtbSubReportData.Rows) { drow[COMPARESECOND] = (decimal)drow[STANDARDCAPACITY] - decSumOfRequireCapacity; if(decSumOfRequireCapacity != Decimal.Zero) { decimal decPercentValue = ((decimal)drow[STANDARDCAPACITY]*100) / decSumOfRequireCapacity; /// Percent drow[COMPAREPERCENT] = decPercentValue.ToString("#,##0.00") + "%"; } } #endregion #region RENDER REPORT ReportWithSubReportBuilder objRB = new ReportWithSubReportBuilder(); objRB.ReportName = REPORT_NAME; objRB.SourceDataTable = dtbSourceData; objRB.SubReportDataSources.Add(SUB_REPORT_NAME, dtbSubReportData); objRB.ReportDefinitionFolder = mstrReportDefinitionFolder; objRB.ReportLayoutFile = REPORT_LAYOUT_FILE; objRB.UseLayoutFile = true; objRB.MakeDataTableForRender(); // and show it in preview dialog PCSUtils.Framework.ReportFrame.C1PrintPreviewDialog printPreview = new PCSUtils.Framework.ReportFrame.C1PrintPreviewDialog(); printPreview.FormTitle = REPORT_NAME; //Attach report viewer objRB.ReportViewer = printPreview.ReportViewer; objRB.RenderReport(); #region MODIFY THE REPORT LAYOUT objRB.DrawBoxGroup_Madeby_Checkedby_Approvedby(objRB.GetSectionByName("Header"),100,100,1300,1500,200); objRB.DrawPredefinedField(REPORTFLD_WORKINGDAYS, nWorkingDays.ToString()); objRB.DrawPredefinedField(REPORTFLD_OFFDAYS, nOffDays.ToString()); #region PUSH PARAMETER VALUE objRB.DrawPredefinedField(REPORTFLD_PARAMETER_CCN,strCCN); objRB.DrawPredefinedField(REPORTFLD_PARAMETER_YEAR, nYear.ToString("0000")); objRB.DrawPredefinedField(REPORTFLD_PARAMETER_MONTH, nMonth.ToString("00")); objRB.DrawPredefinedField(REPORTFLD_PARAMETER_CYCLE,strCycle); objRB.DrawPredefinedField(REPORTFLD_PARAMETER_PRODUCTIONLINE, strProductionLine); #endregion #endregion objRB.RefreshReport(); printPreview.Show(); #endregion UseReportViewerRenderEngine = false; mResult = dtbSourceData; return dtbSourceData; } /// <summary> /// Thachnn: 28/10/2005 - my bd /// Return data for Inventory Status Report /// </summary> /// <param name="pnCCNID"></param> /// <param name="pnMasterLocationID"></param> /// <param name="pnLocationID"></param> /// <param name="pnCategoryID"></param> /// <returns></returns> public DataTable GetInventoryStatusData(string pstrCCNID, string pstrMasterLocationID, string pstrLocationID, string pstrCategoryID, string pstrModel) { //const string METHOD_NAME = ".GetInventoryStatusFromCCNMasLocLocationCategory()"; const string TABLE_NAME = "InventoryStatusData"; DataSet dstPCS = new DataSet(); OleDbConnection oconPCS =null; OleDbCommand ocmdPCS = null; string strSql = " Declare @CCNID int " + " Declare @MasterLocationID int " + " Declare @LocationID int " + " Declare @CategoryID int " + " Declare @Model varchar(50) " + " /*-----------------------------------*/ " + " Set @CCNID = " +pstrCCNID+ " " + " Set @MasterLocationID = " +pstrMasterLocationID+ " " + " Set @LocationID = " + (pstrLocationID.Trim() == string.Empty ? byte.MinValue.ToString() : pstrLocationID ) + " " + " Set @CategoryID = " + (pstrCategoryID.Trim() == string.Empty ? byte.MinValue.ToString() : pstrCategoryID ) + " " + " Set @Model = '" +pstrModel+ "' " + " /*-----------------------------------*/ " + " SELECT " + " ITM_Category.Code AS Category, " + " ITM_Product.Code as [Part No.], " + " ITM_Product.Description as [Part Name], " + " ITM_Product.Revision as [Model], " + " MST_UnitOfMeasure.Code AS [Stock UM], " + " MST_Location.Code AS Location, " + " IV_LocationCache.OHQuantity AS [OH Qty], " + " IV_LocationCache.CommitQuantity AS [Commit Qty], " + " isnull(IV_LocationCache.OHQuantity,0) - isnull(IV_LocationCache.CommitQuantity,0) AS [Available Qty], " + " ITM_ProductType.Code AS Type, " + " ITM_Source.Code AS Source, " + " ITM_Product.SafetyStock AS [Safety Stock], " + " IV_LocationCache.Lot AS [Lot], " + " [Warning] = case " + " when isnull(IV_LocationCache.OHQuantity,0) - isnull(IV_LocationCache.CommitQuantity,0) - isnull(ITM_Product.SafetyStock,0) < 0 then 'X' " + " when isnull(IV_LocationCache.OHQuantity,0) - isnull(IV_LocationCache.CommitQuantity,0) - isnull(ITM_Product.SafetyStock,0) > 0 then '' " + " end " + " " + " FROM ITM_Product " + " INNER JOIN IV_LocationCache ON ITM_Product.ProductID = IV_LocationCache.ProductID " + " INNER JOIN MST_Location ON IV_LocationCache.LocationID = MST_Location.LocationID " + " INNER JOIN dbo.MST_MasterLocation ON dbo.MST_Location.MasterLocationID = dbo.MST_MasterLocation.MasterLocationID " + " INNER JOIN dbo.MST_CCN ON dbo.MST_MasterLocation.CCNID = dbo.MST_CCN.CCNID " + " INNER JOIN MST_UnitOfMeasure ON ITM_Product.StockUMID = MST_UnitOfMeasure.UnitOfMeasureID " + " LEFT OUTER JOIN ITM_Source ON ITM_Product.SourceID = ITM_Source.SourceID " + " LEFT OUTER JOIN ITM_ProductType ON ITM_Product.ProductTypeID = ITM_ProductType.ProductTypeID " + " LEFT OUTER JOIN ITM_Category ON ITM_Product.CategoryID = ITM_Category.CategoryID " + " WHERE " + (pstrLocationID.Trim() == string.Empty ? (string.Empty) : (" MST_Location.LocationID = @LocationID and ") ) + (pstrCategoryID.Trim() == string.Empty ? (string.Empty) : (" ITM_Category.CategoryID IN (" + pstrCategoryID + ") and ") ) + (pstrModel.Trim() == string.Empty ? (string.Empty) : (" ITM_Product.Revision IN (" + pstrModel + ") and ") ) + " MST_MasterLocation.CCNID = @CCNID and " + " MST_MasterLocation.MasterLocationID = @MasterLocationID " + " " + " ORDER BY ITM_Product.Description " + " " ; oconPCS = new OleDbConnection(mConnectionString); ocmdPCS = new OleDbCommand(strSql, oconPCS); ocmdPCS.Connection.Open(); OleDbDataAdapter odadPCS = new OleDbDataAdapter(ocmdPCS); odadPCS.Fill(dstPCS, TABLE_NAME); if(dstPCS.Tables.Count > 0) { if (oconPCS!=null) { if (oconPCS.State != ConnectionState.Closed) { oconPCS.Close(); } } return dstPCS.Tables[TABLE_NAME]; } else { if (oconPCS!=null) { if (oconPCS.State != ConnectionState.Closed) { oconPCS.Close(); } } return new DataTable(); } } /// <summary> /// Get actual offday day in a specific month = DaysInMonth - Workingday /// </summary> /// <param name="pintMonth"></param> /// <param name="pintYear"></param> /// <returns>Actual working day of month</returns> private int GetOffDayInMonth(int pintMonth, int pintYear) { // UtilsBO boUtils = new UtilsBO(); // ArrayList arrHoliday = boUtils.GetHolidaysInYear(pintYear); int intOffDays = DateTime.DaysInMonth(pintYear,pintMonth) - GetWorkingDayInMonth(pintMonth,pintYear); // for(int i = 1; i <= intDaysInMonth; i++) // { // if(arrHoliday.Contains(dtmTemp)) // { // intOffDays++; // } // if(arrOffDay.Contains(dtmTemp.DayOfWeek)) // { // intOffDays--; // } // // dtmTemp = dtmTemp.AddDays(1); // } return intOffDays; } /// <summary> /// Get actual working day in a specific month /// </summary> /// <param name="pintMonth"></param> /// <param name="pintYear"></param> /// <returns>Actual working day of month</returns> /// <author> Tuan TQ, 23 Nov, 2005</author> private int GetWorkingDayInMonth(int pintMonth, int pintYear) { int intDaysInMonth = DateTime.DaysInMonth(pintYear,pintMonth); PCSComUtils.Common.BO.UtilsBO boUtils = new PCSComUtils.Common.BO.UtilsBO(); ArrayList arrHoliday = boUtils.GetHolidaysInYear(pintYear); ArrayList arrOffDay = boUtils.GetWorkingDayByYear(pintYear); DateTime dtmTemp = new DateTime(pintYear, pintMonth, 1); int intWorkingDays = intDaysInMonth; for(int i = 1; i <= intDaysInMonth; i++) { if(arrHoliday.Contains(dtmTemp)) { intWorkingDays--; } if(arrOffDay.Contains(dtmTemp.DayOfWeek)) { intWorkingDays--; } dtmTemp = dtmTemp.AddDays(1); } return intWorkingDays; } } }
using System; using UnityEngine; #if UNITY_EDITOR using UnityEditor; #endif namespace FMODUnity { public class EventNotFoundException : Exception { public Guid Guid; public string Path; public EventNotFoundException(string path) : base("FMOD Studio event not found '" + path + "'") { Path = path; } public EventNotFoundException(Guid guid) : base("FMOD Studio event not found " + guid.ToString("b") + "") { Guid = guid; } } public class BusNotFoundException : Exception { public string Path; public BusNotFoundException(string path) : base("FMOD Studio bus not found '" + path + "'") { Path = path; } } public class VCANotFoundException : Exception { public string Path; public VCANotFoundException(string path) : base("FMOD Studio VCA not found '" + path + "'") { Path = path; } } public class BankLoadException : Exception { public string Path; public FMOD.RESULT Result; public BankLoadException(string path, FMOD.RESULT result) : base(String.Format("FMOD Studio could not load bank '{0}' : {1} : {2}", path, result.ToString(), FMOD.Error.String(result))) { Path = path; Result = result; } public BankLoadException(string path, string error) : base(String.Format("FMOD Studio could not load bank '{0}' : {1}", path, error)) { Path = path; Result = FMOD.RESULT.ERR_INTERNAL; } } public class SystemNotInitializedException : Exception { public FMOD.RESULT Result; public string Location; public SystemNotInitializedException(FMOD.RESULT result, string location) : base(String.Format("FMOD Studio initialization failed : {2} : {0} : {1}", result.ToString(), FMOD.Error.String(result), location)) { Result = result; Location = location; } public SystemNotInitializedException(Exception inner) : base("FMOD Studio initialization failed", inner) { } } public enum EmitterGameEvent { None, ObjectStart, ObjectDestroy, TriggerEnter, TriggerExit, TriggerEnter2D, TriggerExit2D, CollisionEnter, CollisionExit, CollisionEnter2D, CollisionExit2D, ObjectEnable, ObjectDisable } public enum LoaderGameEvent { None, ObjectStart, ObjectDestroy, TriggerEnter, TriggerExit, TriggerEnter2D, TriggerExit2D, } public static class RuntimeUtils { public const string LogFileName = "fmod.log"; public static FMOD.VECTOR ToFMODVector(this Vector3 vec) { FMOD.VECTOR temp; temp.x = vec.x; temp.y = vec.y; temp.z = vec.z; return temp; } public static FMOD.ATTRIBUTES_3D To3DAttributes(this Vector3 pos) { FMOD.ATTRIBUTES_3D attributes = new FMOD.ATTRIBUTES_3D(); attributes.forward = ToFMODVector(Vector3.forward); attributes.up = ToFMODVector(Vector3.up); attributes.position = ToFMODVector(pos); return attributes; } public static FMOD.ATTRIBUTES_3D To3DAttributes(this Transform transform) { FMOD.ATTRIBUTES_3D attributes = new FMOD.ATTRIBUTES_3D(); attributes.forward = transform.forward.ToFMODVector(); attributes.up = transform.up.ToFMODVector(); attributes.position = transform.position.ToFMODVector(); return attributes; } public static FMOD.ATTRIBUTES_3D To3DAttributes(Transform transform, Rigidbody rigidbody = null) { FMOD.ATTRIBUTES_3D attributes = transform.To3DAttributes(); if (rigidbody) { attributes.velocity = rigidbody.velocity.ToFMODVector(); } return attributes; } public static FMOD.ATTRIBUTES_3D To3DAttributes(GameObject go, Rigidbody rigidbody = null) { FMOD.ATTRIBUTES_3D attributes = go.transform.To3DAttributes(); if (rigidbody) { attributes.velocity = rigidbody.velocity.ToFMODVector(); } return attributes; } public static FMOD.ATTRIBUTES_3D To3DAttributes(Transform transform, Rigidbody2D rigidbody) { FMOD.ATTRIBUTES_3D attributes = transform.To3DAttributes(); if (rigidbody) { FMOD.VECTOR vel; vel.x = rigidbody.velocity.x; vel.y = rigidbody.velocity.y; vel.z = 0; attributes.velocity = vel; } return attributes; } public static FMOD.ATTRIBUTES_3D To3DAttributes(GameObject go, Rigidbody2D rigidbody) { FMOD.ATTRIBUTES_3D attributes = go.transform.To3DAttributes(); if (rigidbody) { FMOD.VECTOR vel; vel.x = rigidbody.velocity.x; vel.y = rigidbody.velocity.y; vel.z = 0; attributes.velocity = vel; } return attributes; } // Internal Helper Functions internal static FMODPlatform GetCurrentPlatform() { #if UNITY_EDITOR return FMODPlatform.PlayInEditor; #elif UNITY_STANDALONE_WIN return FMODPlatform.Windows; #elif UNITY_STANDALONE_OSX return FMODPlatform.Mac; #elif UNITY_STANDALONE_LINUX return FMODPlatform.Linux; #elif UNITY_TVOS return FMODPlatform.AppleTV; #elif UNITY_IOS FMODPlatform result; switch (UnityEngine.iOS.Device.generation) { case UnityEngine.iOS.DeviceGeneration.iPad1Gen: case UnityEngine.iOS.DeviceGeneration.iPad2Gen: case UnityEngine.iOS.DeviceGeneration.iPad3Gen: case UnityEngine.iOS.DeviceGeneration.iPadMini1Gen: case UnityEngine.iOS.DeviceGeneration.iPhone: case UnityEngine.iOS.DeviceGeneration.iPhone3G: case UnityEngine.iOS.DeviceGeneration.iPhone3GS: case UnityEngine.iOS.DeviceGeneration.iPhone4: case UnityEngine.iOS.DeviceGeneration.iPhone4S: result = FMODPlatform.MobileLow; break; default: result = FMODPlatform.MobileHigh; break; } UnityEngine.Debug.Log(String.Format("FMOD Studio: Device {0} classed as {1}", SystemInfo.deviceModel, result.ToString())); return result; #elif UNITY_ANDROID FMODPlatform result; if (SystemInfo.processorCount <= 2) { result = FMODPlatform.MobileLow; } else if (SystemInfo.processorCount >= 8) { result = FMODPlatform.MobileHigh; } else { // check the clock rate on quad core systems string freqinfo = "/sys/devices/system/cpu/cpu0/cpufreq/cpuinfo_max_freq"; try { using (global::System.IO.TextReader reader = new global::System.IO.StreamReader(freqinfo)) { string line = reader.ReadLine(); int khz = Int32.Parse(line) / 1000; if (khz >= 1600) { result = FMODPlatform.MobileHigh; } else { result = FMODPlatform.MobileLow; } } } catch { result = FMODPlatform.MobileLow; } } UnityEngine.Debug.Log(String.Format("FMOD Studio: Device {0} classed as {1}", SystemInfo.deviceModel, result.ToString())); return result; #elif UNITY_WINRT_8_1 FMODPlatform result; if (SystemInfo.processorCount <= 2) { result = FMODPlatform.MobileLow; } else { result = FMODPlatform.MobileHigh; } UnityEngine.Debug.Log(String.Format("FMOD Studio: Device {0} classed as {1}", SystemInfo.deviceModel, result.ToString())); return result; #elif UNITY_PS4 return FMODPlatform.PS4; #elif UNITY_XBOXONE return FMODPlatform.XboxOne; #elif UNITY_PSP2 return FMODPlatform.PSVita; #elif (!UNITY_5_0 && !UNITY_5_1) && UNITY_WIIU return FMODPlatform.WiiU; #elif UNITY_WSA_10_0 return FMODPlatform.UWP; #elif UNITY_SWITCH return FMODPlatform.Switch; #elif UNITY_WEBGL return FMODPlatform.WebGL; #endif } const string BankExtension = ".bank"; internal static string GetBankPath(string bankName) { #if UNITY_EDITOR // For play in editor use original asset location because streaming asset folder will contain platform specific banks string bankFolder = Settings.Instance.SourceBankPath; if (Settings.Instance.HasPlatforms) { bankFolder = global::System.IO.Path.Combine(bankFolder, Settings.Instance.GetBankPlatform(FMODPlatform.PlayInEditor)); } #elif UNITY_ANDROID string bankFolder = null; if (System.IO.Path.GetExtension(Application.dataPath) == ".apk") { bankFolder = "file:///android_asset"; } else { bankFolder = String.Format("jar:file://{0}!/assets", Application.dataPath); } #elif UNITY_WINRT_8_1 || UNITY_WSA_10_0 string bankFolder = "ms-appx:///Data/StreamingAssets"; #else string bankFolder = Application.streamingAssetsPath; #endif // Special case for Switch, remove / at start if needed. #if UNITY_SWITCH if (bankFolder[0] == '/') bankFolder = bankFolder.Substring(1); #endif if (System.IO.Path.GetExtension(bankName) != BankExtension) { return String.Format("{0}/{1}.bank", bankFolder, bankName); } else { return String.Format("{0}/{1}", bankFolder, bankName); } } internal static string GetPluginPath(string pluginName) { #if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || UNITY_XBOXONE || UNITY_WINRT_8_1 || UNITY_WSA_10_0 string pluginFileName = pluginName + ".dll"; #elif UNITY_EDITOR_OSX || UNITY_STANDALONE_OSX string pluginFileName = pluginName + ".bundle"; #elif UNITY_PS4 string pluginFileName = pluginName + ".prx"; #elif UNITY_ANDROID || UNITY_STANDALONE_LINUX string pluginFileName = "lib" + pluginName + ".so"; #elif UNITY_WEBGL string pluginFileName = pluginName + ".bc"; #endif #if UNITY_EDITOR_WIN && UNITY_EDITOR_64 string pluginFolder = Application.dataPath + "/Plugins/X86_64/"; #elif UNITY_EDITOR_WIN string pluginFolder = Application.dataPath + "/Plugins/X86/"; #elif UNITY_STANDALONE_WIN || UNITY_PS4 || UNITY_XBOXONE || UNITY_STANDALONE_OSX || UNITY_EDITOR_OSX || UNITY_WEBGL string pluginFolder = Application.dataPath + "/Plugins/"; #elif UNITY_STANDALONE_LINUX string pluginFolder = Application.dataPath + ((IntPtr.Size == 8) ? "/Plugins/x86_64/" : "/Plugins/x86/"); #elif UNITY_WSA string pluginFolder = ""; #elif UNITY_ANDROID var dirInfo = new global::System.IO.DirectoryInfo(Application.persistentDataPath); string packageName = dirInfo.Parent.Name; string pluginFolder = "/data/data/" + packageName + "/lib/"; #else string pluginFileName = ""; string pluginFolder = ""; #endif return pluginFolder + pluginFileName; } public static void EnforceLibraryOrder() { #if UNITY_ANDROID && !UNITY_EDITOR AndroidJavaClass jSystem = new AndroidJavaClass("java.lang.System"); jSystem.CallStatic("loadLibrary", FMOD.VERSION.dll); jSystem.CallStatic("loadLibrary", FMOD.Studio.STUDIO_VERSION.dll); #endif // Call a function in fmod.dll to make sure it's loaded before fmodstudio.dll int temp1, temp2; FMOD.Memory.GetStats(out temp1, out temp2); Guid temp3; FMOD.Studio.Util.ParseID("", out temp3); } #if UNITY_EDITOR public static FMODPlatform GetEditorFMODPlatform() { switch (EditorUserBuildSettings.activeBuildTarget) { case BuildTarget.Android: return FMODPlatform.Android; case BuildTarget.iOS: return FMODPlatform.iOS; case BuildTarget.PS4: return FMODPlatform.PS4; case BuildTarget.PSP2: return FMODPlatform.PSVita; case BuildTarget.StandaloneLinux: case BuildTarget.StandaloneLinux64: case BuildTarget.StandaloneLinuxUniversal: return FMODPlatform.Linux; #if UNITY_2017_3_OR_NEWER case BuildTarget.StandaloneOSX: #else case BuildTarget.StandaloneOSXIntel: case BuildTarget.StandaloneOSXIntel64: case BuildTarget.StandaloneOSXUniversal: #endif return FMODPlatform.Mac; case BuildTarget.StandaloneWindows: case BuildTarget.StandaloneWindows64: return FMODPlatform.Windows; case BuildTarget.XboxOne: return FMODPlatform.XboxOne; #if (UNITY_5_2 || UNITY_5_3_OR_NEWER) && !UNITY_2018_1_OR_NEWER case BuildTarget.WiiU: return FMODPlatform.WiiU; #endif case BuildTarget.WSAPlayer: #if UNITY_2017_1_OR_NEWER return FMODPlatform.UWP; #elif UNITY_5_2 || UNITY_5_3_OR_NEWER if (EditorUserBuildSettings.wsaSDK == WSASDK.UWP) { return FMODPlatform.UWP; } return FMODPlatform.None; #else if (EditorUserBuildSettings.wsaSDK == WSASDK.PhoneSDK81) { return FMODPlatform.WindowsPhone; } return FMODPlatform.None; #endif #if UNITY_5_3_OR_NEWER case BuildTarget.tvOS: return FMODPlatform.AppleTV; #endif #if UNITY_SWITCH case BuildTarget.Switch: return FMODPlatform.Switch; #endif #if UNITY_WEBGL case BuildTarget.WebGL: return FMODPlatform.WebGL; #endif default: return FMODPlatform.None; } } #endif } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: booking/incidental_reservations/incidental_reservation_upsert_notification.proto #pragma warning disable 1591, 0612, 3021 #region Designer generated code using pb = global::Google.Protobuf; using pbc = global::Google.Protobuf.Collections; using pbr = global::Google.Protobuf.Reflection; using scg = global::System.Collections.Generic; namespace HOLMS.Types.Booking.Reservations { /// <summary>Holder for reflection information generated from booking/incidental_reservations/incidental_reservation_upsert_notification.proto</summary> public static partial class IncidentalReservationUpsertNotificationReflection { #region Descriptor /// <summary>File descriptor for booking/incidental_reservations/incidental_reservation_upsert_notification.proto</summary> public static pbr::FileDescriptor Descriptor { get { return descriptor; } } private static pbr::FileDescriptor descriptor; static IncidentalReservationUpsertNotificationReflection() { byte[] descriptorData = global::System.Convert.FromBase64String( string.Concat( "ClBib29raW5nL2luY2lkZW50YWxfcmVzZXJ2YXRpb25zL2luY2lkZW50YWxf", "cmVzZXJ2YXRpb25fdXBzZXJ0X25vdGlmaWNhdGlvbi5wcm90bxIraG9sbXMu", "dHlwZXMuYm9va2luZy5pbmNpZGVudGFsX3Jlc2VydmF0aW9ucxouYm9va2lu", "Zy9yZXNlcnZhdGlvbnMvcmVzZXJ2YXRpb25fc3VtbWFyeS5wcm90bxpBYm9v", "a2luZy9pbmNpZGVudGFsX3Jlc2VydmF0aW9ucy9pbmNpZGVudGFsX2l0ZW1f", "cmVzZXJ2YXRpb24ucHJvdG8i9wEKJ0luY2lkZW50YWxSZXNlcnZhdGlvblVw", "c2VydE5vdGlmaWNhdGlvbhIRCglqX3dfdG9rZW4YASABKAkSUQoTYm9va2lu", "Z19yZXNlcnZhdGlvbhgCIAEoCzI0LmhvbG1zLnR5cGVzLmJvb2tpbmcucmVz", "ZXJ2YXRpb25zLlJlc2VydmF0aW9uU3VtbWFyeRJmChZpbmNpZGVudGFsX3Jl", "c2VydmF0aW9uGAMgASgLMkYuaG9sbXMudHlwZXMuYm9va2luZy5pbmNpZGVu", "dGFsX3Jlc2VydmF0aW9ucy5JbmNpZGVudGFsSXRlbVJlc2VydmF0aW9uQjla", "FGJvb2tpbmcvcmVzZXJ2YXRpb25zqgIgSE9MTVMuVHlwZXMuQm9va2luZy5S", "ZXNlcnZhdGlvbnNiBnByb3RvMw==")); descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, new pbr::FileDescriptor[] { global::HOLMS.Types.Booking.Reservations.ReservationSummaryReflection.Descriptor, global::HOLMS.Types.Booking.IncidentalReservations.IncidentalItemReservationReflection.Descriptor, }, new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::HOLMS.Types.Booking.Reservations.IncidentalReservationUpsertNotification), global::HOLMS.Types.Booking.Reservations.IncidentalReservationUpsertNotification.Parser, new[]{ "JWToken", "BookingReservation", "IncidentalReservation" }, null, null, null) })); } #endregion } #region Messages public sealed partial class IncidentalReservationUpsertNotification : pb::IMessage<IncidentalReservationUpsertNotification> { private static readonly pb::MessageParser<IncidentalReservationUpsertNotification> _parser = new pb::MessageParser<IncidentalReservationUpsertNotification>(() => new IncidentalReservationUpsertNotification()); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pb::MessageParser<IncidentalReservationUpsertNotification> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { get { return global::HOLMS.Types.Booking.Reservations.IncidentalReservationUpsertNotificationReflection.Descriptor.MessageTypes[0]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public IncidentalReservationUpsertNotification() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public IncidentalReservationUpsertNotification(IncidentalReservationUpsertNotification other) : this() { jWToken_ = other.jWToken_; BookingReservation = other.bookingReservation_ != null ? other.BookingReservation.Clone() : null; IncidentalReservation = other.incidentalReservation_ != null ? other.IncidentalReservation.Clone() : null; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public IncidentalReservationUpsertNotification Clone() { return new IncidentalReservationUpsertNotification(this); } /// <summary>Field number for the "j_w_token" field.</summary> public const int JWTokenFieldNumber = 1; private string jWToken_ = ""; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public string JWToken { get { return jWToken_; } set { jWToken_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } /// <summary>Field number for the "booking_reservation" field.</summary> public const int BookingReservationFieldNumber = 2; private global::HOLMS.Types.Booking.Reservations.ReservationSummary bookingReservation_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public global::HOLMS.Types.Booking.Reservations.ReservationSummary BookingReservation { get { return bookingReservation_; } set { bookingReservation_ = value; } } /// <summary>Field number for the "incidental_reservation" field.</summary> public const int IncidentalReservationFieldNumber = 3; private global::HOLMS.Types.Booking.IncidentalReservations.IncidentalItemReservation incidentalReservation_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public global::HOLMS.Types.Booking.IncidentalReservations.IncidentalItemReservation IncidentalReservation { get { return incidentalReservation_; } set { incidentalReservation_ = value; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { return Equals(other as IncidentalReservationUpsertNotification); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Equals(IncidentalReservationUpsertNotification other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (JWToken != other.JWToken) return false; if (!object.Equals(BookingReservation, other.BookingReservation)) return false; if (!object.Equals(IncidentalReservation, other.IncidentalReservation)) return false; return true; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override int GetHashCode() { int hash = 1; if (JWToken.Length != 0) hash ^= JWToken.GetHashCode(); if (bookingReservation_ != null) hash ^= BookingReservation.GetHashCode(); if (incidentalReservation_ != null) hash ^= IncidentalReservation.GetHashCode(); return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { if (JWToken.Length != 0) { output.WriteRawTag(10); output.WriteString(JWToken); } if (bookingReservation_ != null) { output.WriteRawTag(18); output.WriteMessage(BookingReservation); } if (incidentalReservation_ != null) { output.WriteRawTag(26); output.WriteMessage(IncidentalReservation); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int CalculateSize() { int size = 0; if (JWToken.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(JWToken); } if (bookingReservation_ != null) { size += 1 + pb::CodedOutputStream.ComputeMessageSize(BookingReservation); } if (incidentalReservation_ != null) { size += 1 + pb::CodedOutputStream.ComputeMessageSize(IncidentalReservation); } return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(IncidentalReservationUpsertNotification other) { if (other == null) { return; } if (other.JWToken.Length != 0) { JWToken = other.JWToken; } if (other.bookingReservation_ != null) { if (bookingReservation_ == null) { bookingReservation_ = new global::HOLMS.Types.Booking.Reservations.ReservationSummary(); } BookingReservation.MergeFrom(other.BookingReservation); } if (other.incidentalReservation_ != null) { if (incidentalReservation_ == null) { incidentalReservation_ = new global::HOLMS.Types.Booking.IncidentalReservations.IncidentalItemReservation(); } IncidentalReservation.MergeFrom(other.IncidentalReservation); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: input.SkipLastField(); break; case 10: { JWToken = input.ReadString(); break; } case 18: { if (bookingReservation_ == null) { bookingReservation_ = new global::HOLMS.Types.Booking.Reservations.ReservationSummary(); } input.ReadMessage(bookingReservation_); break; } case 26: { if (incidentalReservation_ == null) { incidentalReservation_ = new global::HOLMS.Types.Booking.IncidentalReservations.IncidentalItemReservation(); } input.ReadMessage(incidentalReservation_); break; } } } } } #endregion } #endregion Designer generated code
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.Reflection; using log4net; using Mono.Addins; using Nini.Config; using OpenMetaverse; using OpenSim.Framework; using OpenSim.Framework.Servers; using OpenSim.Framework.Client; using OpenSim.Region.Framework.Interfaces; using OpenSim.Region.Framework.Scenes; namespace OpenSim.Region.CoreModules.Avatar.InstantMessage { public struct SendReply { public bool Success; public string Message; public int Disposition; } [Extension(Path = "/OpenSim/RegionModules", NodeName = "RegionModule", Id = "OfflineMessageModule")] public class OfflineMessageModule : ISharedRegionModule { private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); private bool enabled = true; private List<Scene> m_SceneList = new List<Scene>(); private string m_RestURL = String.Empty; IMessageTransferModule m_TransferModule = null; private bool m_ForwardOfflineGroupMessages = true; private Dictionary<IClientAPI, List<UUID>> m_repliesSent= new Dictionary<IClientAPI, List<UUID>>(); public void Initialise(IConfigSource config) { IConfig cnf = config.Configs["Messaging"]; if (cnf == null) { enabled = false; return; } if (cnf != null && cnf.GetString("OfflineMessageModule", "None") != "OfflineMessageModule") { enabled = false; return; } m_RestURL = cnf.GetString("OfflineMessageURL", ""); if (m_RestURL == "") { m_log.Error("[OFFLINE MESSAGING] Module was enabled, but no URL is given, disabling"); enabled = false; return; } m_ForwardOfflineGroupMessages = cnf.GetBoolean("ForwardOfflineGroupMessages", m_ForwardOfflineGroupMessages); } public void AddRegion(Scene scene) { if (!enabled) return; lock (m_SceneList) { m_SceneList.Add(scene); scene.EventManager.OnNewClient += OnNewClient; } } public void RegionLoaded(Scene scene) { if (!enabled) return; if (m_TransferModule == null) { m_TransferModule = scene.RequestModuleInterface<IMessageTransferModule>(); if (m_TransferModule == null) { scene.EventManager.OnNewClient -= OnNewClient; enabled = false; m_SceneList.Clear(); m_log.Error("[OFFLINE MESSAGING] No message transfer module is enabled. Diabling offline messages"); } m_TransferModule.OnUndeliveredMessage += UndeliveredMessage; } } public void RemoveRegion(Scene scene) { if (!enabled) return; lock (m_SceneList) { m_SceneList.Remove(scene); } } public void PostInitialise() { if (!enabled) return; m_log.Debug("[OFFLINE MESSAGING] Offline messages enabled"); } public string Name { get { return "OfflineMessageModule"; } } public Type ReplaceableInterface { get { return null; } } public void Close() { } private Scene FindScene(UUID agentID) { foreach (Scene s in m_SceneList) { ScenePresence presence = s.GetScenePresence(agentID); if (presence != null && !presence.IsChildAgent) return s; } return null; } private IClientAPI FindClient(UUID agentID) { foreach (Scene s in m_SceneList) { ScenePresence presence = s.GetScenePresence(agentID); if (presence != null && !presence.IsChildAgent) return presence.ControllingClient; } return null; } private void OnNewClient(IClientAPI client) { client.OnRetrieveInstantMessages += RetrieveInstantMessages; client.OnLogout += OnClientLoggedOut; } public void OnClientLoggedOut(IClientAPI client) { m_repliesSent.Remove(client); } private void RetrieveInstantMessages(IClientAPI client) { if (m_RestURL == String.Empty) { return; } else { m_log.DebugFormat("[OFFLINE MESSAGING]: Retrieving stored messages for {0}", client.AgentId); List<GridInstantMessage> msglist = SynchronousRestObjectRequester.MakeRequest<UUID, List<GridInstantMessage>>( "POST", m_RestURL + "/RetrieveMessages/", client.AgentId); if (msglist != null) { foreach (GridInstantMessage im in msglist) { if (im.dialog == (byte)InstantMessageDialog.InventoryOffered) // send it directly or else the item will be given twice client.SendInstantMessage(im); else { // Send through scene event manager so all modules get a chance // to look at this message before it gets delivered. // // Needed for proper state management for stored group // invitations // im.offline = 1; Scene s = FindScene(client.AgentId); if (s != null) s.EventManager.TriggerIncomingInstantMessage(im); } } } } } private void UndeliveredMessage(GridInstantMessage im) { if (im.dialog != (byte)InstantMessageDialog.MessageFromObject && im.dialog != (byte)InstantMessageDialog.MessageFromAgent && im.dialog != (byte)InstantMessageDialog.GroupNotice && im.dialog != (byte)InstantMessageDialog.GroupInvitation && im.dialog != (byte)InstantMessageDialog.InventoryOffered && im.dialog != (byte)InstantMessageDialog.TaskInventoryOffered) { return; } if (!m_ForwardOfflineGroupMessages) { if (im.dialog == (byte)InstantMessageDialog.GroupNotice || im.dialog == (byte)InstantMessageDialog.GroupInvitation) return; } Scene scene = FindScene(new UUID(im.fromAgentID)); if (scene == null) scene = m_SceneList[0]; SendReply reply = SynchronousRestObjectRequester.MakeRequest<GridInstantMessage, SendReply>( "POST", m_RestURL+"/SaveMessage/?scope=" + scene.RegionInfo.ScopeID.ToString(), im); if (im.dialog == (byte)InstantMessageDialog.MessageFromAgent) { IClientAPI client = FindClient(new UUID(im.fromAgentID)); if (client == null) return; if (reply.Message == String.Empty) reply.Message = "User is not logged in. " + (reply.Success ? "Message saved." : "Message not saved"); bool sendReply = true; switch (reply.Disposition) { case 0: // Normal break; case 1: // Only once per user if (m_repliesSent.ContainsKey(client) && m_repliesSent[client].Contains(new UUID(im.toAgentID))) { sendReply = false; } else { if (!m_repliesSent.ContainsKey(client)) m_repliesSent[client] = new List<UUID>(); m_repliesSent[client].Add(new UUID(im.toAgentID)); } break; } if (sendReply) { client.SendInstantMessage(new GridInstantMessage( null, new UUID(im.toAgentID), "System", new UUID(im.fromAgentID), (byte)InstantMessageDialog.MessageFromAgent, reply.Message, false, new Vector3())); } } } } }
using System; using System.Collections.Generic; using CoreGraphics; using Toggl.Phoebe.Analytics; using Toggl.Phoebe.Data; using Toggl.Phoebe.Data.Reports; using Toggl.Ross.Theme; using Toggl.Ross.Views; using UIKit; using XPlatUtils; namespace Toggl.Ross.ViewControllers { public class ReportsViewController : UIViewController, InfiniteScrollView<ReportView>.IInfiniteScrollViewSource { private ZoomLevel _zoomLevel; public ZoomLevel ZoomLevel { get { return _zoomLevel; } set { if (_zoomLevel == value) { return; } _zoomLevel = value; scrollView.RefreshVisibleView (); SummaryReportView.SaveReportsState (ZoomLevel); TrackScreenView (); } } private ReportsMenuController menuController; private DateSelectorView dateSelectorView; private TopBorder topBorder; private SummaryReportView dataSource; private InfiniteScrollView<ReportView> scrollView; private SyncStatusViewController.StatusView statusView; private List<ReportView> cachedReports; private nint _timeSpaceIndex; private bool showStatus; static readonly nfloat padding = 24; static readonly nfloat navBarHeight = 64; static readonly nfloat selectorHeight = 50; public ReportsViewController () { EdgesForExtendedLayout = UIRectEdge.None; menuController = new ReportsMenuController (); dataSource = new SummaryReportView (); cachedReports = new List<ReportView>(); _zoomLevel = ZoomLevel.Week; _timeSpaceIndex = 0; } public override void ViewWillDisappear (bool animated) { NavigationController.InteractivePopGestureRecognizer.Enabled = true; base.ViewWillDisappear (animated); } public override void ViewDidDisappear (bool animated) { base.ViewDidDisappear (animated); if (menuController != null) { menuController.Detach (); menuController = null; } } public override void ViewDidLoad () { base.ViewDidLoad (); _zoomLevel = SummaryReportView.GetLastZoomViewed (); View.BackgroundColor = UIColor.White; menuController.Attach (this); topBorder = new TopBorder (); dateSelectorView = new DateSelectorView (); dateSelectorView.LeftArrowPressed += (sender, e) => scrollView.SetPageIndex (-1, true); dateSelectorView.RightArrowPressed += (sender, e) => { if ( _timeSpaceIndex >= 1) { return; } scrollView.SetPageIndex ( 1, true); }; scrollView = new InfiniteScrollView<ReportView> ( this); scrollView.Delegate = new InfiniteScrollDelegate(); scrollView.OnChangePage += (sender, e) => LoadReportData (); statusView = new SyncStatusViewController.StatusView () { Retry = LoadReportData, Cancel = () => StatusBarShown = false, StatusFailText = "ReportsStatusFailText".Tr(), StatusSyncingText = "ReportsStatusSyncText".Tr() }; Add (scrollView); Add (dateSelectorView); Add (topBorder); Add (statusView); NavigationController.InteractivePopGestureRecognizer.Enabled = false; } public override void ViewDidLayoutSubviews () { base.ViewDidLayoutSubviews (); topBorder.Frame = new CGRect (0.0f, 0.0f, View.Bounds.Width, 2.0f); dateSelectorView.Frame = new CGRect (0, View.Bounds.Height - selectorHeight, View.Bounds.Width, selectorHeight); scrollView.Frame = new CGRect (0.0f, 0.0f, View.Bounds.Width, View.Bounds.Height - selectorHeight); LayoutStatusBar (); } public override void LoadView () { View = new UIView ().Apply (Style.Screen); } public override void ViewDidAppear (bool animated) { base.ViewDidAppear (animated); TrackScreenView (); } private void TrackScreenView() { var screen = "Reports"; switch (ZoomLevel) { case ZoomLevel.Week: screen = "Reports (Week)"; break; case ZoomLevel.Month: screen = "Reports (Month)"; break; case ZoomLevel.Year: screen = "Reports (Year)"; break; } ServiceContainer.Resolve<ITracker> ().CurrentScreen = screen; } private void ChangeReportState () { dataSource.Period = _zoomLevel; dateSelectorView.DateContent = FormattedIntervalDate (_timeSpaceIndex); } private void LoadReportData() { _timeSpaceIndex = scrollView.PageIndex; var reportView = scrollView.CurrentPage; reportView.ZoomLevel = ZoomLevel; reportView.TimeSpaceIndex = (int)_timeSpaceIndex; StatusBarShown &= reportView.IsClean; reportView.LoadData(); ChangeReportState(); } private string FormattedIntervalDate (nint backDate) { string result = ""; if (backDate == 0) { switch (ZoomLevel) { case ZoomLevel.Week: result = "ReportsThisWeekSelector".Tr (); break; case ZoomLevel.Month: result = "ReportsThisMonthSelector".Tr (); break; case ZoomLevel.Year: result = "ReportsThisYearSelector".Tr (); break; } } else if (backDate == -1) { switch (ZoomLevel) { case ZoomLevel.Week: result = "ReportsLastWeekSelector".Tr (); break; case ZoomLevel.Month: result = "ReportsLastMonthSelector".Tr (); break; case ZoomLevel.Year: result = "ReportsLastYearSelector".Tr (); break; } } else { var startDate = dataSource.ResolveStartDate ((int)_timeSpaceIndex); var endDate = dataSource.ResolveEndDate (startDate); switch (ZoomLevel) { case ZoomLevel.Week: if (startDate.Month == endDate.Month) { result = startDate.ToString ("ReportsStartWeekInterval".Tr ()) + " - " + endDate.ToString ("ReportsEndWeekInterval".Tr ()); } else { result = startDate.Day + "th " + startDate.ToString ("MMM") + " - " + endDate.Day + "th " + startDate.ToString ("MMM"); } break; case ZoomLevel.Month: result = startDate.ToString ("ReportsMonthInterval".Tr ()); break; case ZoomLevel.Year: result = startDate.ToString ("ReportsYearInterval".Tr ()); break; } } return result; } #region StatusBar private void LayoutStatusBar () { var size = View.Frame.Size; var statusY = showStatus ? size.Height - selectorHeight : size.Height + 2f; statusView.Frame = new CGRect ( 0, statusY, size.Width, selectorHeight); } private bool StatusBarShown { get { return showStatus; } set { if (showStatus == value) { return; } showStatus = value; UIView.Animate (0.5f, LayoutStatusBar); } } #endregion #region IInfiniteScrollViewSource implementation public ReportView CreateView () { ReportView view; if (cachedReports.Count == 0) { view = new ReportView (); } else { view = cachedReports[0]; cachedReports.RemoveAt (0); } if ( scrollView.Pages.Count > 0) { view.Position = scrollView.CurrentPage.Position; } view.LoadStart += ReportLoadStart; view.LoadFinished += ReportLoadFinished; return view; } public void Dispose (ReportView view) { var reportView = view; if (reportView.IsClean) { reportView.StopReloadData (); } view.LoadStart -= ReportLoadStart; view.LoadFinished -= ReportLoadFinished; } public bool ShouldStartScroll () { var currentReport = scrollView.CurrentPage; if (!currentReport.Dragging) { currentReport.ScrollEnabled = false; foreach (var item in scrollView.Pages) { var report = item; report.Position = currentReport.Position; } } return !currentReport.Dragging; } #endregion private void ReportLoadStart ( object sender, EventArgs args) { statusView.IsSyncing |= StatusBarShown; } private void ReportLoadFinished ( object sender, EventArgs args) { var report = (ReportView)sender; if (report.IsError ) { // Make sure that error is shown statusView.IsSyncing = false; StatusBarShown = true; } else { // Successful sync, clear ignoring flag StatusBarShown = false; } } internal class InfiniteScrollDelegate : UIScrollViewDelegate { public override void DecelerationEnded (UIScrollView scrollView) { var infiniteScroll = (InfiniteScrollView<ReportView>)scrollView; infiniteScroll.CurrentPage.ScrollEnabled = true; } } internal class TopBorder : UIView { public TopBorder () { BackgroundColor = UIColor.Clear; } public override void Draw (CGRect rect) { using (CGContext g = UIGraphics.GetCurrentContext()) { Color.TimeBarBoderColor.SetColor (); g.FillRect (new CGRect (0.0f, 0.0f, rect.Width, 1.0f / ContentScaleFactor)); } } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections.Generic; using Internal.IL; using Internal.TypeSystem; using Debug = System.Diagnostics.Debug; namespace Internal.IL.Stubs { public class ILCodeStream { private struct LabelAndOffset { public readonly ILCodeLabel Label; public readonly int Offset; public LabelAndOffset(ILCodeLabel label, int offset) { Label = label; Offset = offset; } } internal byte[] _instructions; internal int _length; internal int _startOffsetForLinking; private ArrayBuilder<LabelAndOffset> _offsetsNeedingPatching; private ILEmitter _emitter; internal ILCodeStream(ILEmitter emitter) { _instructions = Array.Empty<byte>(); _startOffsetForLinking = -1; _emitter = emitter; } private void EmitByte(byte b) { if (_instructions.Length == _length) Array.Resize<byte>(ref _instructions, 2 * _instructions.Length + 10); _instructions[_length++] = b; } private void EmitUInt16(ushort value) { EmitByte((byte)value); EmitByte((byte)(value >> 8)); } private void EmitUInt32(int value) { EmitByte((byte)value); EmitByte((byte)(value >> 8)); EmitByte((byte)(value >> 16)); EmitByte((byte)(value >> 24)); } public void Emit(ILOpcode opcode) { if ((int)opcode > 0x100) EmitByte((byte)ILOpcode.prefix1); EmitByte((byte)opcode); } public void Emit(ILOpcode opcode, ILToken token) { Emit(opcode); EmitUInt32((int)token); } public void EmitLdc(int value) { if (-1 <= value && value <= 8) { Emit((ILOpcode)(ILOpcode.ldc_i4_0 + value)); } else if (value == (sbyte)value) { Emit(ILOpcode.ldc_i4_s); EmitByte((byte)value); } else { Emit(ILOpcode.ldc_i4); EmitUInt32(value); } } public void EmitLdArg(int index) { if (index < 4) { Emit((ILOpcode)(ILOpcode.ldarg_0 + index)); } else { Emit(ILOpcode.ldarg); EmitUInt16((ushort)index); } } public void EmitLdArga(int index) { if (index < 0x100) { Emit(ILOpcode.ldarga_s); EmitByte((byte)index); } else { Emit(ILOpcode.ldarga); EmitUInt16((ushort)index); } } public void EmitLdLoc(ILLocalVariable variable) { int index = (int)variable; if (index < 4) { Emit((ILOpcode)(ILOpcode.ldloc_0 + index)); } else if (index < 0x100) { Emit(ILOpcode.ldloc_s); EmitByte((byte)index); } else { Emit(ILOpcode.ldloc); EmitUInt16((ushort)index); } } public void EmitLdLoca(ILLocalVariable variable) { int index = (int)variable; if (index < 0x100) { Emit(ILOpcode.ldloca_s); EmitByte((byte)index); } else { Emit(ILOpcode.ldloca); EmitUInt16((ushort)index); } } public void EmitStLoc(ILLocalVariable variable) { int index = (int)variable; if (index < 4) { Emit((ILOpcode)(ILOpcode.stloc_0 + index)); } else if (index < 0x100) { Emit(ILOpcode.stloc_s); EmitByte((byte)index); } else { Emit(ILOpcode.stloc); EmitUInt16((ushort)index); } } public void Emit(ILOpcode opcode, ILCodeLabel label) { Debug.Assert(opcode == ILOpcode.br || opcode == ILOpcode.brfalse || opcode == ILOpcode.brtrue || opcode == ILOpcode.beq || opcode == ILOpcode.bge || opcode == ILOpcode.bgt || opcode == ILOpcode.ble || opcode == ILOpcode.blt || opcode == ILOpcode.bne_un || opcode == ILOpcode.bge_un || opcode == ILOpcode.bgt_un || opcode == ILOpcode.ble_un || opcode == ILOpcode.blt_un || opcode == ILOpcode.leave); Emit(opcode); _offsetsNeedingPatching.Add(new LabelAndOffset(label, _length)); EmitUInt32(4); } public void EmitSwitch(ILCodeLabel[] labels) { Emit(ILOpcode.switch_); EmitUInt32(labels.Length); int remainingBytes = labels.Length * 4; foreach (var label in labels) { _offsetsNeedingPatching.Add(new LabelAndOffset(label, _length)); EmitUInt32(remainingBytes); remainingBytes -= 4; } } public void EmitLdInd(TypeDesc type) { switch (type.UnderlyingType.Category) { case TypeFlags.Byte: case TypeFlags.SByte: case TypeFlags.Boolean: Emit(ILOpcode.ldind_i1); break; case TypeFlags.Char: case TypeFlags.UInt16: case TypeFlags.Int16: Emit(ILOpcode.ldind_i2); break; case TypeFlags.UInt32: case TypeFlags.Int32: Emit(ILOpcode.ldind_i4); break; case TypeFlags.UInt64: case TypeFlags.Int64: Emit(ILOpcode.ldind_i8); break; case TypeFlags.Single: Emit(ILOpcode.ldind_r4); break; case TypeFlags.Double: Emit(ILOpcode.ldind_r8); break; case TypeFlags.IntPtr: case TypeFlags.UIntPtr: case TypeFlags.Pointer: case TypeFlags.FunctionPointer: Emit(ILOpcode.ldind_i); break; case TypeFlags.Array: case TypeFlags.SzArray: case TypeFlags.Class: case TypeFlags.Interface: Emit(ILOpcode.ldind_ref); break; case TypeFlags.ValueType: case TypeFlags.Nullable: Emit(ILOpcode.ldobj, _emitter.NewToken(type)); break; default: Debug.Assert(false, "Unexpected TypeDesc category"); break; } } public void EmitStInd(TypeDesc type) { switch (type.UnderlyingType.Category) { case TypeFlags.Byte: case TypeFlags.SByte: case TypeFlags.Boolean: Emit(ILOpcode.stind_i1); break; case TypeFlags.Char: case TypeFlags.UInt16: case TypeFlags.Int16: Emit(ILOpcode.stind_i2); break; case TypeFlags.UInt32: case TypeFlags.Int32: Emit(ILOpcode.stind_i4); break; case TypeFlags.UInt64: case TypeFlags.Int64: Emit(ILOpcode.stind_i8); break; case TypeFlags.Single: Emit(ILOpcode.stind_r4); break; case TypeFlags.Double: Emit(ILOpcode.stind_r8); break; case TypeFlags.IntPtr: case TypeFlags.UIntPtr: case TypeFlags.Pointer: case TypeFlags.FunctionPointer: Emit(ILOpcode.stind_i); break; case TypeFlags.Array: case TypeFlags.SzArray: case TypeFlags.Class: case TypeFlags.Interface: Emit(ILOpcode.stind_ref); break; case TypeFlags.ValueType: case TypeFlags.Nullable: Emit(ILOpcode.stobj, _emitter.NewToken(type)); break; default: Debug.Assert(false, "Unexpected TypeDesc category"); break; } } public void EmitStElem(TypeDesc type) { switch (type.UnderlyingType.Category) { case TypeFlags.Byte: case TypeFlags.SByte: case TypeFlags.Boolean: Emit(ILOpcode.stelem_i1); break; case TypeFlags.Char: case TypeFlags.UInt16: case TypeFlags.Int16: Emit(ILOpcode.stelem_i2); break; case TypeFlags.UInt32: case TypeFlags.Int32: Emit(ILOpcode.stelem_i4); break; case TypeFlags.UInt64: case TypeFlags.Int64: Emit(ILOpcode.stelem_i8); break; case TypeFlags.Single: Emit(ILOpcode.stelem_r4); break; case TypeFlags.Double: Emit(ILOpcode.stelem_r8); break; case TypeFlags.IntPtr: case TypeFlags.UIntPtr: case TypeFlags.Pointer: case TypeFlags.FunctionPointer: Emit(ILOpcode.stelem_i); break; case TypeFlags.Array: case TypeFlags.SzArray: case TypeFlags.Class: case TypeFlags.Interface: Emit(ILOpcode.stelem_ref); break; case TypeFlags.ValueType: case TypeFlags.Nullable: Emit(ILOpcode.stelem, _emitter.NewToken(type)); break; default: Debug.Assert(false, "Unexpected TypeDesc category"); break; } } public void EmitLdElem(TypeDesc type) { switch (type.UnderlyingType.Category) { case TypeFlags.Byte: case TypeFlags.SByte: case TypeFlags.Boolean: Emit(ILOpcode.ldelem_i1); break; case TypeFlags.Char: case TypeFlags.UInt16: case TypeFlags.Int16: Emit(ILOpcode.ldelem_i2); break; case TypeFlags.UInt32: case TypeFlags.Int32: Emit(ILOpcode.ldelem_i4); break; case TypeFlags.UInt64: case TypeFlags.Int64: Emit(ILOpcode.ldelem_i8); break; case TypeFlags.Single: Emit(ILOpcode.ldelem_r4); break; case TypeFlags.Double: Emit(ILOpcode.ldelem_r8); break; case TypeFlags.IntPtr: case TypeFlags.UIntPtr: case TypeFlags.Pointer: case TypeFlags.FunctionPointer: Emit(ILOpcode.ldelem_i); break; case TypeFlags.Array: case TypeFlags.SzArray: case TypeFlags.Class: case TypeFlags.Interface: Emit(ILOpcode.ldelem_ref); break; case TypeFlags.ValueType: case TypeFlags.Nullable: Emit(ILOpcode.ldelem, _emitter.NewToken(type)); break; default: Debug.Assert(false, "Unexpected TypeDesc category"); break; } } public void EmitLabel(ILCodeLabel label) { label.Place(this, _length); } internal void PatchLabels() { for (int i = 0; i < _offsetsNeedingPatching.Count; i++) { LabelAndOffset patch = _offsetsNeedingPatching[i]; Debug.Assert(patch.Label.IsPlaced); Debug.Assert(_startOffsetForLinking > -1); int offset = patch.Offset; int delta = _instructions[offset + 3] << 24 | _instructions[offset + 2] << 16 | _instructions[offset + 1] << 8 | _instructions[offset]; int value = patch.Label.AbsoluteOffset - _startOffsetForLinking - patch.Offset - delta; _instructions[offset] = (byte)value; _instructions[offset + 1] = (byte)(value >> 8); _instructions[offset + 2] = (byte)(value >> 16); _instructions[offset + 3] = (byte)(value >> 24); } } } /// <summary> /// Represent a token. Use one of the overloads of <see cref="ILEmitter.NewToken"/> /// to create a new token. /// </summary> public enum ILToken { } /// <summary> /// Represents a local variable. Use <see cref="ILEmitter.NewLocal"/> to create a new local variable. /// </summary> public enum ILLocalVariable { } public class ILStubMethodIL : MethodIL { private byte[] _ilBytes; private LocalVariableDefinition[] _locals; private Object[] _tokens; private MethodDesc _method; public ILStubMethodIL(MethodDesc owningMethod, byte[] ilBytes, LocalVariableDefinition[] locals, Object[] tokens) { _ilBytes = ilBytes; _locals = locals; _tokens = tokens; _method = owningMethod; } public ILStubMethodIL(ILStubMethodIL methodIL) { _ilBytes = methodIL._ilBytes; _locals = methodIL._locals; _tokens = methodIL._tokens; _method = methodIL._method; } public override MethodDesc OwningMethod { get { return _method; } } public override byte[] GetILBytes() { return _ilBytes; } public override int MaxStack { get { // Conservative estimate... return _ilBytes.Length; } } public override ILExceptionRegion[] GetExceptionRegions() { return Array.Empty<ILExceptionRegion>(); } public override bool IsInitLocals { get { return true; } } public override LocalVariableDefinition[] GetLocals() { return _locals; } public override Object GetObject(int token) { return _tokens[(token & 0xFFFFFF) - 1]; } } public class ILCodeLabel { private ILCodeStream _codeStream; private int _offsetWithinCodeStream; internal bool IsPlaced { get { return _codeStream != null; } } internal int AbsoluteOffset { get { Debug.Assert(IsPlaced); Debug.Assert(_codeStream._startOffsetForLinking >= 0); return _codeStream._startOffsetForLinking + _offsetWithinCodeStream; } } internal ILCodeLabel() { } internal void Place(ILCodeStream codeStream, int offsetWithinCodeStream) { Debug.Assert(!IsPlaced); _codeStream = codeStream; _offsetWithinCodeStream = offsetWithinCodeStream; } } public class ILEmitter { private ArrayBuilder<ILCodeStream> _codeStreams; private ArrayBuilder<LocalVariableDefinition> _locals; private ArrayBuilder<Object> _tokens; public ILEmitter() { } public ILCodeStream NewCodeStream() { ILCodeStream stream = new ILCodeStream(this); _codeStreams.Add(stream); return stream; } private ILToken NewToken(Object value, int tokenType) { Debug.Assert(value != null); _tokens.Add(value); return (ILToken)(_tokens.Count | tokenType); } public ILToken NewToken(TypeDesc value) { return NewToken(value, 0x01000000); } public ILToken NewToken(MethodDesc value) { return NewToken(value, 0x0a000000); } public ILToken NewToken(FieldDesc value) { return NewToken(value, 0x0a000000); } public ILToken NewToken(string value) { return NewToken(value, 0x70000000); } public ILToken NewToken(MethodSignature value) { return NewToken(value, 0x11000000); } public ILLocalVariable NewLocal(TypeDesc localType, bool isPinned = false) { int index = _locals.Count; _locals.Add(new LocalVariableDefinition(localType, isPinned)); return (ILLocalVariable)index; } public ILCodeLabel NewCodeLabel() { var newLabel = new ILCodeLabel(); return newLabel; } public MethodIL Link(MethodDesc owningMethod) { int totalLength = 0; for (int i = 0; i < _codeStreams.Count; i++) { ILCodeStream ilCodeStream = _codeStreams[i]; ilCodeStream._startOffsetForLinking = totalLength; totalLength += ilCodeStream._length; } byte[] ilInstructions = new byte[totalLength]; int copiedLength = 0; for (int i = 0; i < _codeStreams.Count; i++) { ILCodeStream ilCodeStream = _codeStreams[i]; ilCodeStream.PatchLabels(); Array.Copy(ilCodeStream._instructions, 0, ilInstructions, copiedLength, ilCodeStream._length); copiedLength += ilCodeStream._length; } return new ILStubMethodIL(owningMethod, ilInstructions, _locals.ToArray(), _tokens.ToArray()); } } public abstract partial class ILStubMethod : MethodDesc { public abstract MethodIL EmitIL(); public override bool HasCustomAttribute(string attributeNamespace, string attributeName) { return false; } } }
// // Copyright (c) 2004-2020 Jaroslaw Kowalski <jaak@jkowalski.net>, Kim Christensen, Julian Verdurmen // // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // // * Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // // * Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // * Neither the name of Jaroslaw Kowalski nor the names of its // contributors may be used to endorse or promote products derived from this // software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF // THE POSSIBILITY OF SUCH DAMAGE. // namespace NLog.Internal.NetworkSenders { using System; using System.IO; using System.Net; using System.Net.Sockets; using System.Threading; using NLog.Common; /// <summary> /// A base class for all network senders. Supports one-way sending of messages /// over various protocols. /// </summary> internal abstract class NetworkSender : IDisposable { private static int currentSendTime; /// <summary> /// Initializes a new instance of the <see cref="NetworkSender" /> class. /// </summary> /// <param name="url">The network URL.</param> protected NetworkSender(string url) { Address = url; LastSendTime = Interlocked.Increment(ref currentSendTime); } /// <summary> /// Gets the address of the network endpoint. /// </summary> public string Address { get; private set; } /// <summary> /// Gets the last send time. /// </summary> public int LastSendTime { get; private set; } /// <summary> /// Initializes this network sender. /// </summary> public void Initialize() { DoInitialize(); } /// <summary> /// Closes the sender and releases any unmanaged resources. /// </summary> /// <param name="continuation">The continuation.</param> public void Close(AsyncContinuation continuation) { DoClose(continuation); } /// <summary> /// Flushes any pending messages and invokes a continuation. /// </summary> /// <param name="continuation">The continuation.</param> public void FlushAsync(AsyncContinuation continuation) { DoFlush(continuation); } /// <summary> /// Send the given text over the specified protocol. /// </summary> /// <param name="bytes">Bytes to be sent.</param> /// <param name="offset">Offset in buffer.</param> /// <param name="length">Number of bytes to send.</param> /// <param name="asyncContinuation">The asynchronous continuation.</param> public void Send(byte[] bytes, int offset, int length, AsyncContinuation asyncContinuation) { LastSendTime = Interlocked.Increment(ref currentSendTime); DoSend(bytes, offset, length, asyncContinuation); } /// <summary> /// Closes the sender and releases any unmanaged resources. /// </summary> public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } /// <summary> /// Performs sender-specific initialization. /// </summary> protected virtual void DoInitialize() { } /// <summary> /// Performs sender-specific close operation. /// </summary> /// <param name="continuation">The continuation.</param> protected virtual void DoClose(AsyncContinuation continuation) { continuation(null); } /// <summary> /// Performs sender-specific flush. /// </summary> /// <param name="continuation">The continuation.</param> protected virtual void DoFlush(AsyncContinuation continuation) { continuation(null); } /// <summary> /// Actually sends the given text over the specified protocol. /// </summary> /// <param name="bytes">The bytes to be sent.</param> /// <param name="offset">Offset in buffer.</param> /// <param name="length">Number of bytes to send.</param> /// <param name="asyncContinuation">The async continuation to be invoked after the buffer has been sent.</param> /// <remarks>To be overridden in inheriting classes.</remarks> protected abstract void DoSend(byte[] bytes, int offset, int length, AsyncContinuation asyncContinuation); /// <summary> /// Parses the URI into an endpoint address. /// </summary> /// <param name="uri">The URI to parse.</param> /// <param name="addressFamily">The address family.</param> /// <returns>Parsed endpoint.</returns> protected virtual EndPoint ParseEndpointAddress(Uri uri, AddressFamily addressFamily) { #if SILVERLIGHT return new DnsEndPoint(uri.Host, uri.Port, addressFamily); #else switch (uri.HostNameType) { case UriHostNameType.IPv4: case UriHostNameType.IPv6: return new IPEndPoint(IPAddress.Parse(uri.Host), uri.Port); default: { #if NETSTANDARD1_0 var addresses = Dns.GetHostAddressesAsync(uri.Host).Result; #else var addresses = Dns.GetHostEntry(uri.Host).AddressList; #endif foreach (var addr in addresses) { if (addr.AddressFamily == addressFamily || addressFamily == AddressFamily.Unspecified) { return new IPEndPoint(addr, uri.Port); } } throw new IOException($"Cannot resolve '{uri.Host}' to an address in '{addressFamily}'"); } } #endif } public virtual void CheckSocket() { } private void Dispose(bool disposing) { if (disposing) { Close(ex => { }); } } } }
// // Mono.Facebook.User.cs: // // Authors: // Thomas Van Machelen (thomas.vanmachelen@gmail.com) // George Talusan (george@convolve.ca) // // (C) Copyright 2007 Novell, Inc. (http://www.novell.com) // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System; using System.Xml.Serialization; namespace Mono.Facebook { public class Affiliation { [XmlElement ("nid")] public long NId; [XmlElement ("name")] public string Name; [XmlElement ("type")] public string Type; [XmlElement ("status")] public string Status; [XmlElement ("year")] public string Year; } public class Affiliations { [XmlElement ("affiliation")] public Affiliation[] affiliations_array; [XmlIgnore ()] public Affiliation[] AffiliationCollection { get { return affiliations_array ?? new Affiliation[0]; } } } public class Concentrations { [XmlElement ("concentration")] public string[] concentration_array; [XmlIgnore ()] public string[] ConcentrationCollection { get { return concentration_array ?? new string[0]; } } } public class EducationHistory { [XmlElement ("education_info")] public EducationInfo[] educations_array; [XmlIgnore ()] public EducationInfo[] EducationInfo { get { return educations_array ?? new EducationInfo[0]; } } } public class EducationInfo { [XmlElement ("name")] public string Name; [XmlElement ("year")] public int Year; [XmlElement ("concentrations")] public Concentrations concentrations; [XmlIgnore ()] public string[] Concentrations { get { return concentrations.ConcentrationCollection; } } } public class HighSchoolInfo { [XmlElement ("hs1_info")] public string HighSchoolOneName; [XmlElement ("hs2_info")] public string HighSchoolTwoName; [XmlElement ("grad_year")] public int GraduationYear; [XmlElement ("hs1_id")] public int HighSchoolOneId; [XmlElement ("hs2_id")] public int HighSchoolTwoId; } public class MeetingFor { [XmlElement ("seeking")] public string[] seeking; [XmlIgnore ()] public string[] Seeking { get { return seeking ?? new string[0]; } } } public class MeetingSex { [XmlElement ("sex")] public string[] sex; [XmlIgnore ()] public string[] Sex { get { return sex ?? new string[0]; } } } public class Status { [XmlElement ("message")] public string Message; [XmlElement ("time")] public long Time; } public class WorkHistory { [XmlElement ("work_info")] public WorkInfo[] workinfo_array; [XmlIgnore ()] public WorkInfo[] WorkInfo { get { return workinfo_array ?? new WorkInfo[0]; } } } public class WorkInfo { [XmlElement ("location")] public Location Location; [XmlElement ("company_name")] public string CompanyName; [XmlElement ("position")] public string Position; [XmlElement ("description")] public string Description; [XmlElement ("start_date")] public string StartDate; [XmlElement ("end_date")] public string EndDate; } public class User : Friend { public static readonly string[] FIELDS = { "about_me", "activities", "affiliations", "birthday", "books", "current_location", "education_history", "first_name", "hometown_location", "interests", "last_name", "movies", "music", "name", "notes_count", "pic", "pic_big", "pic_small", "political", "profile_update_time", "quotes", "relationship_status", "religion", "sex", "significant_other_id", "status", "timezone", "tv", "uid", "wall_count" }; [XmlElement ("about_me")] public string AboutMe; [XmlElement ("activities")] public string Activities; [XmlElement ("affiliations")] public Affiliations affiliations; [XmlIgnore ()] public Affiliation[] Affiliations { get { if (affiliations == null) { return new Affiliation[0]; } else { return affiliations.AffiliationCollection ?? new Affiliation[0]; } } } [XmlElement ("birthday")] public string Birthday; [XmlElement ("books")] public string Books; [XmlElement ("current_location")] public Location CurrentLocation; [XmlElement ("education_history")] public EducationHistory EducationHistory; [XmlElement ("first_name")] public string FirstName; [XmlElement ("hometown_location")] public Location HomeTownLocation; [XmlElement ("hs_info")] public HighSchoolInfo HighSchoolInfo; [XmlElement ("interests")] public string Interests; [XmlElement ("is_app_user")] public string is_app_user; public bool IsAppUser { get { return Util.GetBoolFromString(is_app_user); } } [XmlElement ("last_name")] public string LastName; [XmlElement ("meeting_for")] public MeetingFor MeetingFor; [XmlElement ("meeting_sex")] public MeetingSex MeetingSex; [XmlElement ("movies")] public string Movies; [XmlElement ("music")] public string Music; [XmlElement ("name")] public string Name; [XmlElement("notes_count")] public string notes_count; [XmlIgnore()] public int NotesCount { get { return Util.GetIntFromString(notes_count); } } [XmlElement ("pic")] public string Pic; [XmlIgnore ()] public Uri PicUri { get { return new Uri (Pic); } } [XmlElement ("pic_big")] public string PicBig; [XmlIgnore ()] public Uri PicBigUri { get { return new Uri (PicBig); } } [XmlElement ("pic_small")] public string PicSmall; [XmlIgnore ()] public Uri PicSmallUri { get { return new Uri (PicSmall); } } [XmlElement ("political")] public string Political; [XmlElement ("profile_update_time")] public long ProfileUpdateTime; [XmlElement ("quotes")] public string Quotes; [XmlElement ("relationship_status")] public string RelationshipStatus; [XmlElement ("religion")] public string Religion; [XmlElement ("sex")] public string Sex; [XmlElement ("significant_other_id")] public string significant_other_id; [XmlIgnore ()] public long SignificantOtherId { get { return Util.GetIntFromString(significant_other_id); } } [XmlElement ("status")] public Status Status; [XmlElement ("timezone")] public string timezone; public int TimeZone { get { return Util.GetIntFromString(timezone); } } [XmlElement ("tv")] public string Tv; [XmlElement ("wall_count")] public string wall_count; [XmlIgnore()] public int WallCount { get { return Util.GetIntFromString(wall_count); } } [XmlElement ("work_history")] public WorkHistory WorkHistory; } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Diagnostics; using System.Threading; #if !ES_BUILD_AGAINST_DOTNET_V35 using Contract = System.Diagnostics.Contracts.Contract; #else using Contract = Microsoft.Diagnostics.Contracts.Internal.Contract; #endif #if ES_BUILD_STANDALONE namespace Microsoft.Diagnostics.Tracing #else using System.Threading.Tasks; namespace System.Diagnostics.Tracing #endif { /// <summary> /// Tracks activities. This is meant to be a singleton (accessed by the ActivityTracer.Instance static property) /// /// Logically this is simply holds the m_current variable that holds the async local that holds the current ActivityInfo /// An ActivityInfo is represents a activity (which knows its creator and thus knows its path). /// /// Most of the magic is in the async local (it gets copied to new tasks) /// /// On every start event call OnStart /// /// Guid activityID; /// Guid relatedActivityID; /// if (OnStart(activityName, out activityID, out relatedActivityID, ForceStop, options)) /// // Log Start event with activityID and relatedActivityID /// /// On every stop event call OnStop /// /// Guid activityID; /// if (OnStop(activityName, ref activityID ForceStop)) /// // Stop event with activityID /// /// On any normal event log the event with activityTracker.CurrentActivityId /// </summary> internal class ActivityTracker { /// <summary> /// Called on work item begins. The activity name = providerName + activityName without 'Start' suffix. /// It updates CurrentActivityId to track. /// /// It returns true if the Start should be logged, otherwise (if it is illegal recursion) it return false. /// /// The start event should use as its activity ID the CurrentActivityId AFTER calling this routine and its /// RelatedActivityID the CurrentActivityId BEFORE calling this routine (the creator). /// /// If activity tracing is not on, then activityId and relatedActivityId are not set /// </summary> public void OnStart(string providerName, string activityName, int task, ref Guid activityId, ref Guid relatedActivityId, EventActivityOptions options) { if (m_current == null) // We are not enabled { // We used to rely on the TPL provider turning us on, but that has the disadvantage that you don't get Start-Stop tracking // until you use Tasks for the first time (which you may never do). Thus we change it to pull rather tan push for whether // we are enabled. if (m_checkedForEnable) return; m_checkedForEnable = true; if (TplEtwProvider.Log.IsEnabled(EventLevel.Informational, TplEtwProvider.Keywords.TasksFlowActivityIds)) Enable(); if (m_current == null) return; } Debug.Assert((options & EventActivityOptions.Disable) == 0); var currentActivity = m_current.Value; var fullActivityName = NormalizeActivityName(providerName, activityName, task); var etwLog = TplEtwProvider.Log; if (etwLog.Debug) { etwLog.DebugFacilityMessage("OnStartEnter", fullActivityName); etwLog.DebugFacilityMessage("OnStartEnterActivityState", ActivityInfo.LiveActivities(currentActivity)); } if (currentActivity != null) { // Stop activity tracking if we reached the maximum allowed depth if (currentActivity.m_level >= MAX_ACTIVITY_DEPTH) { activityId = Guid.Empty; relatedActivityId = Guid.Empty; if (etwLog.Debug) etwLog.DebugFacilityMessage("OnStartRET", "Fail"); return; } // Check for recursion, and force-stop any activities if the activity already started. if ((options & EventActivityOptions.Recursive) == 0) { ActivityInfo existingActivity = FindActiveActivity(fullActivityName, currentActivity); if (existingActivity != null) { OnStop(providerName, activityName, task, ref activityId); currentActivity = m_current.Value; } } } // Get a unique ID for this activity. long id; if (currentActivity == null) id = Interlocked.Increment(ref m_nextId); else id = Interlocked.Increment(ref currentActivity.m_lastChildID); // The previous ID is my 'causer' and becomes my related activity ID relatedActivityId = EventSource.CurrentThreadActivityId; // Add to the list of started but not stopped activities. ActivityInfo newActivity = new ActivityInfo(fullActivityName, id, currentActivity, relatedActivityId, options); m_current.Value = newActivity; // Remember the current ID so we can log it activityId = newActivity.ActivityId; if (etwLog.Debug) { etwLog.DebugFacilityMessage("OnStartRetActivityState", ActivityInfo.LiveActivities(newActivity)); etwLog.DebugFacilityMessage1("OnStartRet", activityId.ToString(), relatedActivityId.ToString()); } } /// <summary> /// Called when a work item stops. The activity name = providerName + activityName without 'Stop' suffix. /// It updates m_current variable to track this fact. The Stop event associated with stop should log the ActivityID associated with the event. /// /// If activity tracing is not on, then activityId and relatedActivityId are not set /// </summary> public void OnStop(string providerName, string activityName, int task, ref Guid activityId) { if (m_current == null) // We are not enabled return; var fullActivityName = NormalizeActivityName(providerName, activityName, task); var etwLog = TplEtwProvider.Log; if (etwLog.Debug) { etwLog.DebugFacilityMessage("OnStopEnter", fullActivityName); etwLog.DebugFacilityMessage("OnStopEnterActivityState", ActivityInfo.LiveActivities(m_current.Value)); } for (; ; ) // This is a retry loop. { ActivityInfo currentActivity = m_current.Value; ActivityInfo newCurrentActivity = null; // if we have seen any live activities (orphans), at he first one we have seen. // Search to find the activity to stop in one pass. This insures that we don't let one mistake // (stopping something that was not started) cause all active starts to be stopped // By first finding the target start to stop we are more robust. ActivityInfo activityToStop = FindActiveActivity(fullActivityName, currentActivity); // ignore stops where we can't find a start because we may have popped them previously. if (activityToStop == null) { activityId = Guid.Empty; // TODO add some logging about this. Basically could not find matching start. if (etwLog.Debug) etwLog.DebugFacilityMessage("OnStopRET", "Fail"); return; } activityId = activityToStop.ActivityId; // See if there are any orphans that need to be stopped. ActivityInfo orphan = currentActivity; while (orphan != activityToStop && orphan != null) { if (orphan.m_stopped != 0) // Skip dead activities. { orphan = orphan.m_creator; continue; } if (orphan.CanBeOrphan()) { // We can't pop anything after we see a valid orphan, remember this for later when we update m_current. if (newCurrentActivity == null) newCurrentActivity = orphan; } else { orphan.m_stopped = 1; Debug.Assert(orphan.m_stopped != 0); } orphan = orphan.m_creator; } // try to Stop the activity atomically. Other threads may be trying to do this as well. if (Interlocked.CompareExchange(ref activityToStop.m_stopped, 1, 0) == 0) { // I succeeded stopping this activity. Now we update our m_current pointer // If I haven't yet determined the new current activity, it is my creator. if (newCurrentActivity == null) newCurrentActivity = activityToStop.m_creator; m_current.Value = newCurrentActivity; if (etwLog.Debug) { etwLog.DebugFacilityMessage("OnStopRetActivityState", ActivityInfo.LiveActivities(newCurrentActivity)); etwLog.DebugFacilityMessage("OnStopRet", activityId.ToString()); } return; } // We failed to stop it. We must have hit a race to stop it. Just start over and try again. } } /// <summary> /// Turns on activity tracking. It is sticky, once on it stays on (race issues otherwise) /// </summary> public void Enable() { if (m_current == null) { // Catch the not Implemented try { m_current = new AsyncLocal<ActivityInfo>(ActivityChanging); } catch (NotImplementedException) { #if (!ES_BUILD_PCL && ! ES_BUILD_PN) // send message to debugger without delay System.Diagnostics.Debugger.Log(0, null, "Activity Enabled() called but AsyncLocals Not Supported (pre V4.6). Ignoring Enable"); #endif } } } /// <summary> /// An activity tracker is a singleton, this is how you get the one and only instance. /// </summary> public static ActivityTracker Instance { get { return s_activityTrackerInstance; } } #region private /// <summary> /// The current activity ID. Use this to log normal events. /// </summary> private Guid CurrentActivityId { get { return m_current.Value.ActivityId; } } /// <summary> /// Searched for a active (nonstopped) activity with the given name. Returns null if not found. /// </summary> private ActivityInfo FindActiveActivity(string name, ActivityInfo startLocation) { var activity = startLocation; while (activity != null) { if (name == activity.m_name && activity.m_stopped == 0) return activity; activity = activity.m_creator; } return null; } /// <summary> /// Strip out "Start" or "End" suffix from activity name and add providerName prefix. /// If 'task' it does not end in Start or Stop and Task is non-zero use that as the name of the activity /// </summary> private string NormalizeActivityName(string providerName, string activityName, int task) { if (activityName.EndsWith(EventSource.s_ActivityStartSuffix, StringComparison.Ordinal)) activityName = activityName.Substring(0, activityName.Length - EventSource.s_ActivityStartSuffix.Length); else if (activityName.EndsWith(EventSource.s_ActivityStopSuffix, StringComparison.Ordinal)) activityName = activityName.Substring(0, activityName.Length - EventSource.s_ActivityStopSuffix.Length); else if (task != 0) activityName = "task" + task.ToString(); // We use provider name to distinguish between activities from different providers. return providerName + activityName; } // ******************************************************************************* /// <summary> /// An ActivityInfo represents a particular activity. It is almost read-only. The only /// fields that change after creation are /// m_lastChildID - used to generate unique IDs for the children activities and for the most part can be ignored. /// m_stopped - indicates that this activity is dead /// This read-only-ness is important because an activity's m_creator chain forms the /// 'Path of creation' for the activity (which is also its unique ID) but is also used as /// the 'list of live parents' which indicate of those ancestors, which are alive (if they /// are not marked dead they are alive). /// </summary> private class ActivityInfo { public ActivityInfo(string name, long uniqueId, ActivityInfo creator, Guid activityIDToRestore, EventActivityOptions options) { m_name = name; m_eventOptions = options; m_creator = creator; m_uniqueId = uniqueId; m_level = creator != null ? creator.m_level + 1 : 0; m_activityIdToRestore = activityIDToRestore; // Create a nice GUID that encodes the chain of activities that started this one. CreateActivityPathGuid(out m_guid, out m_activityPathGuidOffset); } public Guid ActivityId { get { return m_guid; } } public static string Path(ActivityInfo activityInfo) { if (activityInfo == null) return (""); return Path(activityInfo.m_creator) + "/" + activityInfo.m_uniqueId.ToString(); } public override string ToString() { return m_name + "(" + Path(this) + (m_stopped != 0 ? ",DEAD)" : ")"); } public static string LiveActivities(ActivityInfo list) { if (list == null) return ""; return list.ToString() + ";" + LiveActivities(list.m_creator); } public bool CanBeOrphan() { if ((m_eventOptions & EventActivityOptions.Detachable) != 0) return true; return false; } #region private #region CreateActivityPathGuid /// <summary> /// Logically every activity Path (see Path()) that describes the activities that caused this /// (rooted in an activity that predates activity tracking. /// /// We wish to encode this path in the Guid to the extent that we can. Many of the paths have /// many small numbers in them and we take advantage of this in the encoding to output as long /// a path in the GUID as possible. /// /// Because of the possibility of GUID collision, we only use 96 of the 128 bits of the GUID /// for encoding the path. The last 32 bits are a simple checksum (and random number) that /// identifies this as using the convention defined here. /// /// It returns both the GUID which has the path as well as the offset that points just beyond /// the end of the activity (so it can be appended to). Note that if the end is in a nibble /// (it uses nibbles instead of bytes as the unit of encoding, then it will point at the unfinished /// byte (since the top nibble can't be zero you can determine if this is true by seeing if /// this byte is nonZero. This offset is needed to efficiently create the ID for child activities. /// </summary> private unsafe void CreateActivityPathGuid(out Guid idRet, out int activityPathGuidOffset) { fixed (Guid* outPtr = &idRet) { int activityPathGuidOffsetStart = 0; if (m_creator != null) { activityPathGuidOffsetStart = m_creator.m_activityPathGuidOffset; idRet = m_creator.m_guid; } else { // TODO FIXME - differentiate between AD inside PCL int appDomainID = 0; #if (!ES_BUILD_STANDALONE && !ES_BUILD_PN) appDomainID = System.Threading.Thread.GetDomainID(); #endif // We start with the appdomain number to make this unique among appdomains. activityPathGuidOffsetStart = AddIdToGuid(outPtr, activityPathGuidOffsetStart, (uint)appDomainID); } activityPathGuidOffset = AddIdToGuid(outPtr, activityPathGuidOffsetStart, (uint)m_uniqueId); // If the path does not fit, Make a GUID by incrementing rather than as a path, keeping as much of the path as possible if (12 < activityPathGuidOffset) CreateOverflowGuid(outPtr); } } /// <summary> /// If we can't fit the activity Path into the GUID we come here. What we do is simply /// generate a 4 byte number (s_nextOverflowId). Then look for an ancestor that has /// sufficient space for this ID. By doing this, we preserve the fact that this activity /// is a child (of unknown depth) from that ancestor. /// </summary> private unsafe void CreateOverflowGuid(Guid* outPtr) { // Search backwards for an ancestor that has sufficient space to put the ID. for (ActivityInfo ancestor = m_creator; ancestor != null; ancestor = ancestor.m_creator) { if (ancestor.m_activityPathGuidOffset <= 10) // we need at least 2 bytes. { uint id = unchecked((uint)Interlocked.Increment(ref ancestor.m_lastChildID)); // Get a unique ID // Try to put the ID into the GUID *outPtr = ancestor.m_guid; int endId = AddIdToGuid(outPtr, ancestor.m_activityPathGuidOffset, id, true); // Does it fit? if (endId <= 12) break; } } } /// <summary> /// The encoding for a list of numbers used to make Activity GUIDs. Basically /// we operate on nibbles (which are nice because they show up as hex digits). The /// list is ended with a end nibble (0) and depending on the nibble value (Below) /// the value is either encoded into nibble itself or it can spill over into the /// bytes that follow. /// </summary> enum NumberListCodes : byte { End = 0x0, // ends the list. No valid value has this prefix. LastImmediateValue = 0xA, PrefixCode = 0xB, // all the 'long' encodings go here. If the next nibble is MultiByte1-4 // than this is a 'overflow' id. Unlike the hierarchical IDs these are // allocated densely but don't tell you anything about nesting. we use // these when we run out of space in the GUID to store the path. MultiByte1 = 0xC, // 1 byte follows. If this Nibble is in the high bits, it the high bits of the number are stored in the low nibble. // commented out because the code does not explicitly reference the names (but they are logically defined). // MultiByte2 = 0xD, // 2 bytes follow (we don't bother with the nibble optimization) // MultiByte3 = 0xE, // 3 bytes follow (we don't bother with the nibble optimization) // MultiByte4 = 0xF, // 4 bytes follow (we don't bother with the nibble optimization) } /// Add the activity id 'id' to the output Guid 'outPtr' starting at the offset 'whereToAddId' /// Thus if this number is 6 that is where 'id' will be added. This will return 13 (12 /// is the maximum number of bytes that fit in a GUID) if the path did not fit. /// If 'overflow' is true, then the number is encoded as an 'overflow number (which has a /// special (longer prefix) that indicates that this ID is allocated differently private static unsafe int AddIdToGuid(Guid* outPtr, int whereToAddId, uint id, bool overflow = false) { byte* ptr = (byte*)outPtr; byte* endPtr = ptr + 12; ptr += whereToAddId; if (endPtr <= ptr) return 13; // 12 means we might exactly fit, 13 means we definately did not fit if (0 < id && id <= (uint)NumberListCodes.LastImmediateValue && !overflow) WriteNibble(ref ptr, endPtr, id); else { uint len = 4; if (id <= 0xFF) len = 1; else if (id <= 0xFFFF) len = 2; else if (id <= 0xFFFFFF) len = 3; if (overflow) { if (endPtr <= ptr + 2) // I need at least 2 bytes return 13; // Write out the prefix code nibble and the length nibble WriteNibble(ref ptr, endPtr, (uint)NumberListCodes.PrefixCode); } // The rest is the same for overflow and non-overflow case WriteNibble(ref ptr, endPtr, (uint)NumberListCodes.MultiByte1 + (len - 1)); // Do we have an odd nibble? If so flush it or use it for the 12 byte case. if (ptr < endPtr && *ptr != 0) { // If the value < 4096 we can use the nibble we are otherwise just outputting as padding. if (id < 4096) { // Indicate this is a 1 byte multicode with 4 high order bits in the lower nibble. *ptr = (byte)(((uint)NumberListCodes.MultiByte1 << 4) + (id >> 8)); id &= 0xFF; // Now we only want the low order bits. } ptr++; } // Write out the bytes. while (0 < len) { if (endPtr <= ptr) { ptr++; // Indicate that we have overflowed break; } *ptr++ = (byte)id; id = (id >> 8); --len; } } // Compute the checksum uint* sumPtr = (uint*)outPtr; // We set the last DWORD the sum of the first 3 DWORDS in the GUID. This sumPtr[3] = sumPtr[0] + sumPtr[1] + sumPtr[2] + 0x599D99AD; // This last number is a random number (it identifies us as us) return (int)(ptr - ((byte*)outPtr)); } /// <summary> /// Write a single Nible 'value' (must be 0-15) to the byte buffer represented by *ptr. /// Will not go past 'endPtr'. Also it assumes that we never write 0 so we can detect /// whether a nibble has already been written to ptr because it will be nonzero. /// Thus if it is non-zero it adds to the current byte, otherwise it advances and writes /// the new byte (in the high bits) of the next byte. /// </summary> private static unsafe void WriteNibble(ref byte* ptr, byte* endPtr, uint value) { Debug.Assert(0 <= value && value < 16); Debug.Assert(ptr < endPtr); if (*ptr != 0) *ptr++ |= (byte)value; else *ptr = (byte)(value << 4); } #endregion // CreateGuidForActivityPath readonly internal string m_name; // The name used in the 'start' and 'stop' APIs to help match up readonly long m_uniqueId; // a small number that makes this activity unique among its siblings internal readonly Guid m_guid; // Activity Guid, it is basically an encoding of the Path() (see CreateActivityPathGuid) internal readonly int m_activityPathGuidOffset; // Keeps track of where in m_guid the causality path stops (used to generated child GUIDs) internal readonly int m_level; // current depth of the Path() of the activity (used to keep recursion under control) readonly internal EventActivityOptions m_eventOptions; // Options passed to start. internal long m_lastChildID; // used to create a unique ID for my children activities internal int m_stopped; // This work item has stopped readonly internal ActivityInfo m_creator; // My parent (creator). Forms the Path() for the activity. readonly internal Guid m_activityIdToRestore; // The Guid to restore after a stop. #endregion } // This callback is used to initialize the m_current AsyncLocal Variable. // Its job is to keep the ETW Activity ID (part of thread local storage) in sync // with m_current.ActivityID void ActivityChanging(AsyncLocalValueChangedArgs<ActivityInfo> args) { ActivityInfo cur = args.CurrentValue; ActivityInfo prev = args.PreviousValue; // Are we popping off a value? (we have a prev, and it creator is cur) // Then check if we should use the GUID at the time of the start event if (prev != null && prev.m_creator == cur) { // If the saved activity ID is not the same as the creator activity // that takes precedence (it means someone explicitly did a SetActivityID) // Set it to that and get out if (cur == null || prev.m_activityIdToRestore != cur.ActivityId) { EventSource.SetCurrentThreadActivityId(prev.m_activityIdToRestore); return; } } // OK we did not have an explicit SetActivityID set. Then we should be // setting the activity to current ActivityInfo. However that activity // might be dead, in which case we should skip it, so we never set // the ID to dead things. while (cur != null) { // We found a live activity (typically the first time), set it to that. if (cur.m_stopped == 0) { EventSource.SetCurrentThreadActivityId(cur.ActivityId); return; } cur = cur.m_creator; } // we can get here if there is no information on our activity stack (everything is dead) // currently we do nothing, as that seems better than setting to Guid.Emtpy. } /// <summary> /// Async local variables have the property that the are automatically copied whenever a task is created and used /// while that task is running. Thus m_current 'flows' to any task that is caused by the current thread that /// last set it. /// /// This variable points a a linked list that represents all Activities that have started but have not stopped. /// </summary> AsyncLocal<ActivityInfo> m_current; bool m_checkedForEnable; // Singleton private static ActivityTracker s_activityTrackerInstance = new ActivityTracker(); // Used to create unique IDs at the top level. Not used for nested Ids (each activity has its own id generator) static long m_nextId = 0; private const ushort MAX_ACTIVITY_DEPTH = 100; // Limit maximum depth of activities to be tracked at 100. // This will avoid leaking memory in case of activities that are never stopped. #endregion } #if ES_BUILD_STANDALONE || ES_BUILD_PN /******************************** SUPPORT *****************************/ /// <summary> /// This is supplied by the framework. It is has the semantics that the value is copied to any new Tasks that is created /// by the current task. Thus all causally related code gets this value. Note that reads and writes to this VARIABLE /// (not what it points it) to this does not need to be protected by locks because it is inherently thread local (you always /// only get your thread local copy which means that you never have races. /// </summary> /// #if ES_BUILD_STANDALONE [EventSource(Name = "Microsoft.Tasks.Nuget")] #else [EventSource(Name = "System.Diagnostics.Tracing.TplEtwProvider")] #endif internal class TplEtwProvider : EventSource { public class Keywords { public const EventKeywords TasksFlowActivityIds = (EventKeywords)0x80; public const EventKeywords Debug = (EventKeywords)0x20000; } public static TplEtwProvider Log = new TplEtwProvider(); public bool Debug { get { return IsEnabled(EventLevel.Verbose, Keywords.Debug); } } public void DebugFacilityMessage(string Facility, string Message) { WriteEvent(1, Facility, Message); } public void DebugFacilityMessage1(string Facility, string Message, string Arg) { WriteEvent(2, Facility, Message, Arg); } public void SetActivityId(Guid Id) { WriteEvent(3, Id); } } #endif #if ES_BUILD_AGAINST_DOTNET_V35 || ES_BUILD_PCL || NO_ASYNC_LOCAL // In these cases we don't have any Async local support. Do nothing. internal sealed class AsyncLocalValueChangedArgs<T> { public T PreviousValue { get { return default(T); } } public T CurrentValue { get { return default(T); } } } internal sealed class AsyncLocal<T> { public AsyncLocal(Action<AsyncLocalValueChangedArgs<T>> valueChangedHandler) { throw new NotImplementedException("AsyncLocal only available on V4.6 and above"); } public T Value { get { return default(T); } set { } } } #endif }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using Nini.Config; using log4net; using System; using System.Reflection; using System.IO; using System.Net; using System.Text; using System.Text.RegularExpressions; using System.Xml; using System.Xml.Serialization; using System.Collections.Generic; using OpenSim.Server.Base; using OpenSim.Services.Interfaces; using OpenSim.Framework; using OpenSim.Framework.Servers.HttpServer; using OpenMetaverse; namespace OpenSim.Server.Handlers.GridUser { public class GridUserServerPostHandler : BaseStreamHandler { private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); private IGridUserService m_GridUserService; public GridUserServerPostHandler(IGridUserService service) : base("POST", "/griduser") { m_GridUserService = service; } public override byte[] Handle(string path, Stream requestData, IOSHttpRequest httpRequest, IOSHttpResponse httpResponse) { StreamReader sr = new StreamReader(requestData); string body = sr.ReadToEnd(); sr.Close(); body = body.Trim(); //m_log.DebugFormat("[XXX]: query String: {0}", body); string method = string.Empty; try { Dictionary<string, object> request = ServerUtils.ParseQueryString(body); if (!request.ContainsKey("METHOD")) return FailureResult(); method = request["METHOD"].ToString(); switch (method) { case "loggedin": return LoggedIn(request); case "loggedout": return LoggedOut(request); case "sethome": return SetHome(request); case "setposition": return SetPosition(request); case "getgriduserinfo": return GetGridUserInfo(request); case "getgriduserinfos": return GetGridUserInfos(request); } m_log.DebugFormat("[GRID USER HANDLER]: unknown method request: {0}", method); } catch (Exception e) { m_log.DebugFormat("[GRID USER HANDLER]: Exception in method {0}: {1}", method, e); } return FailureResult(); } byte[] LoggedIn(Dictionary<string, object> request) { string user = String.Empty; if (!request.ContainsKey("UserID")) return FailureResult(); user = request["UserID"].ToString(); GridUserInfo guinfo = m_GridUserService.LoggedIn(user); Dictionary<string, object> result = new Dictionary<string, object>(); result["result"] = guinfo.ToKeyValuePairs(); string xmlString = ServerUtils.BuildXmlResponse(result); //m_log.DebugFormat("[GRID USER HANDLER]: resp string: {0}", xmlString); return Util.UTF8NoBomEncoding.GetBytes(xmlString); } byte[] LoggedOut(Dictionary<string, object> request) { string userID = string.Empty; UUID regionID = UUID.Zero; Vector3 position = Vector3.Zero; Vector3 lookat = Vector3.Zero; if (!UnpackArgs(request, out userID, out regionID, out position, out lookat)) return FailureResult(); if (m_GridUserService.LoggedOut(userID, UUID.Zero, regionID, position, lookat)) return SuccessResult(); return FailureResult(); } byte[] SetHome(Dictionary<string, object> request) { string user = string.Empty; UUID region = UUID.Zero; Vector3 position = new Vector3(128, 128, 70); Vector3 look = Vector3.Zero; if (!UnpackArgs(request, out user, out region, out position, out look)) return FailureResult(); if (m_GridUserService.SetHome(user, region, position, look)) return SuccessResult(); return FailureResult(); } byte[] SetPosition(Dictionary<string, object> request) { string user = string.Empty; UUID region = UUID.Zero; Vector3 position = new Vector3(128, 128, 70); Vector3 look = Vector3.Zero; if (!request.ContainsKey("UserID") || !request.ContainsKey("RegionID")) return FailureResult(); if (!UnpackArgs(request, out user, out region, out position, out look)) return FailureResult(); if (m_GridUserService.SetLastPosition(user, UUID.Zero, region, position, look)) return SuccessResult(); return FailureResult(); } byte[] GetGridUserInfo(Dictionary<string, object> request) { string user = String.Empty; if (!request.ContainsKey("UserID")) return FailureResult(); user = request["UserID"].ToString(); GridUserInfo guinfo = m_GridUserService.GetGridUserInfo(user); Dictionary<string, object> result = new Dictionary<string, object>(); result["result"] = guinfo.ToKeyValuePairs(); string xmlString = ServerUtils.BuildXmlResponse(result); //m_log.DebugFormat("[GRID USER HANDLER]: resp string: {0}", xmlString); return Util.UTF8NoBomEncoding.GetBytes(xmlString); } byte[] GetGridUserInfos(Dictionary<string, object> request) { string[] userIDs; if (!request.ContainsKey("AgentIDs")) { m_log.DebugFormat("[GRID USER HANDLER]: GetGridUserInfos called without required uuids argument"); return FailureResult(); } if (!(request["AgentIDs"] is List<string>)) { m_log.DebugFormat("[GRID USER HANDLER]: GetGridUserInfos input argument was of unexpected type {0}", request["uuids"].GetType().ToString()); return FailureResult(); } userIDs = ((List<string>)request["AgentIDs"]).ToArray(); GridUserInfo[] pinfos = m_GridUserService.GetGridUserInfo(userIDs); Dictionary<string, object> result = new Dictionary<string, object>(); if ((pinfos == null) || ((pinfos != null) && (pinfos.Length == 0))) result["result"] = "null"; else { int i = 0; foreach (GridUserInfo pinfo in pinfos) { Dictionary<string, object> rinfoDict = pinfo.ToKeyValuePairs(); result["griduser" + i] = rinfoDict; i++; } } string xmlString = ServerUtils.BuildXmlResponse(result); return Util.UTF8NoBomEncoding.GetBytes(xmlString); } private bool UnpackArgs(Dictionary<string, object> request, out string user, out UUID region, out Vector3 position, out Vector3 lookAt) { user = string.Empty; region = UUID.Zero; position = new Vector3(128, 128, 70); lookAt = Vector3.Zero; if (!request.ContainsKey("UserID") || !request.ContainsKey("RegionID")) return false; user = request["UserID"].ToString(); if (!UUID.TryParse(request["RegionID"].ToString(), out region)) return false; if (request.ContainsKey("Position")) Vector3.TryParse(request["Position"].ToString(), out position); if (request.ContainsKey("LookAt")) Vector3.TryParse(request["LookAt"].ToString(), out lookAt); return true; } private byte[] SuccessResult() { XmlDocument doc = new XmlDocument(); XmlNode xmlnode = doc.CreateNode(XmlNodeType.XmlDeclaration, "", ""); doc.AppendChild(xmlnode); XmlElement rootElement = doc.CreateElement("", "ServerResponse", ""); doc.AppendChild(rootElement); XmlElement result = doc.CreateElement("", "result", ""); result.AppendChild(doc.CreateTextNode("Success")); rootElement.AppendChild(result); return DocToBytes(doc); } private byte[] FailureResult() { XmlDocument doc = new XmlDocument(); XmlNode xmlnode = doc.CreateNode(XmlNodeType.XmlDeclaration, "", ""); doc.AppendChild(xmlnode); XmlElement rootElement = doc.CreateElement("", "ServerResponse", ""); doc.AppendChild(rootElement); XmlElement result = doc.CreateElement("", "result", ""); result.AppendChild(doc.CreateTextNode("Failure")); rootElement.AppendChild(result); return DocToBytes(doc); } private byte[] DocToBytes(XmlDocument doc) { MemoryStream ms = new MemoryStream(); XmlTextWriter xw = new XmlTextWriter(ms, null); xw.Formatting = Formatting.Indented; doc.WriteTo(xw); xw.Flush(); return ms.ToArray(); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using Test.Cryptography; namespace System.Security.Cryptography.X509Certificates.Tests { internal static class TestData { public static byte[] MsCertificate = ( "308204ec308203d4a003020102021333000000b011af0a8bd03b9fdd00010000" + "00b0300d06092a864886f70d01010505003079310b3009060355040613025553" + "311330110603550408130a57617368696e67746f6e3110300e06035504071307" + "5265646d6f6e64311e301c060355040a13154d6963726f736f667420436f7270" + "6f726174696f6e312330210603550403131a4d6963726f736f667420436f6465" + "205369676e696e6720504341301e170d3133303132343232333333395a170d31" + "34303432343232333333395a308183310b300906035504061302555331133011" + "0603550408130a57617368696e67746f6e3110300e060355040713075265646d" + "6f6e64311e301c060355040a13154d6963726f736f667420436f72706f726174" + "696f6e310d300b060355040b13044d4f5052311e301c060355040313154d6963" + "726f736f667420436f72706f726174696f6e30820122300d06092a864886f70d" + "01010105000382010f003082010a0282010100e8af5ca2200df8287cbc057b7f" + "adeeeb76ac28533f3adb407db38e33e6573fa551153454a5cfb48ba93fa837e1" + "2d50ed35164eef4d7adb137688b02cf0595ca9ebe1d72975e41b85279bf3f82d" + "9e41362b0b40fbbe3bbab95c759316524bca33c537b0f3eb7ea8f541155c0865" + "1d2137f02cba220b10b1109d772285847c4fb91b90b0f5a3fe8bf40c9a4ea0f5" + "c90a21e2aae3013647fd2f826a8103f5a935dc94579dfb4bd40e82db388f12fe" + "e3d67a748864e162c4252e2aae9d181f0e1eb6c2af24b40e50bcde1c935c49a6" + "79b5b6dbcef9707b280184b82a29cfbfa90505e1e00f714dfdad5c238329ebc7" + "c54ac8e82784d37ec6430b950005b14f6571c50203010001a38201603082015c" + "30130603551d25040c300a06082b06010505070303301d0603551d0e04160414" + "5971a65a334dda980780ff841ebe87f9723241f230510603551d11044a3048a4" + "463044310d300b060355040b13044d4f5052313330310603550405132a333135" + "39352b34666166306237312d616433372d346161332d613637312d3736626330" + "35323334346164301f0603551d23041830168014cb11e8cad2b4165801c9372e" + "331616b94c9a0a1f30560603551d1f044f304d304ba049a0478645687474703a" + "2f2f63726c2e6d6963726f736f66742e636f6d2f706b692f63726c2f70726f64" + "756374732f4d6963436f645369675043415f30382d33312d323031302e63726c" + "305a06082b06010505070101044e304c304a06082b06010505073002863e6874" + "74703a2f2f7777772e6d6963726f736f66742e636f6d2f706b692f6365727473" + "2f4d6963436f645369675043415f30382d33312d323031302e637274300d0609" + "2a864886f70d0101050500038201010031d76e2a12573381d59dc6ebf93ad444" + "4d089eee5edf6a5bb779cf029cbc76689e90a19c0bc37fa28cf14dba9539fb0d" + "e0e19bf45d240f1b8d88153a7cdbadceb3c96cba392c457d24115426300d0dff" + "47ea0307e5e4665d2c7b9d1da910fa1cb074f24f696b9ea92484daed96a0df73" + "a4ef6a1aac4b629ef17cc0147f48cd4db244f9f03c936d42d8e87ce617a09b68" + "680928f90297ef1103ba6752adc1e9b373a6d263cd4ae23ee4f34efdffa1e0bb" + "02133b5d20de553fa3ae9040313875285e04a9466de6f57a7940bd1fcde845d5" + "aee25d3ef575c7e6666360ccd59a84878d2430f7ef34d0631db142674a0e4bbf" + "3a0eefb6953aa738e4259208a6886682").HexToByteArray(); public static readonly byte[] MsCertificatePemBytes = ByteUtils.AsciiBytes( @"-----BEGIN CERTIFICATE----- MIIE7DCCA9SgAwIBAgITMwAAALARrwqL0Duf3QABAAAAsDANBgkqhkiG9w0BAQUF ADB5MQswCQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMH UmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMSMwIQYDVQQD ExpNaWNyb3NvZnQgQ29kZSBTaWduaW5nIFBDQTAeFw0xMzAxMjQyMjMzMzlaFw0x NDA0MjQyMjMzMzlaMIGDMQswCQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3Rv bjEQMA4GA1UEBxMHUmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0 aW9uMQ0wCwYDVQQLEwRNT1BSMR4wHAYDVQQDExVNaWNyb3NvZnQgQ29ycG9yYXRp b24wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDor1yiIA34KHy8BXt/ re7rdqwoUz8620B9s44z5lc/pVEVNFSlz7SLqT+oN+EtUO01Fk7vTXrbE3aIsCzw WVyp6+HXKXXkG4Unm/P4LZ5BNisLQPu+O7q5XHWTFlJLyjPFN7Dz636o9UEVXAhl HSE38Cy6IgsQsRCddyKFhHxPuRuQsPWj/ov0DJpOoPXJCiHiquMBNkf9L4JqgQP1 qTXclFed+0vUDoLbOI8S/uPWenSIZOFixCUuKq6dGB8OHrbCryS0DlC83hyTXEmm ebW22875cHsoAYS4KinPv6kFBeHgD3FN/a1cI4Mp68fFSsjoJ4TTfsZDC5UABbFP ZXHFAgMBAAGjggFgMIIBXDATBgNVHSUEDDAKBggrBgEFBQcDAzAdBgNVHQ4EFgQU WXGmWjNN2pgHgP+EHr6H+XIyQfIwUQYDVR0RBEowSKRGMEQxDTALBgNVBAsTBE1P UFIxMzAxBgNVBAUTKjMxNTk1KzRmYWYwYjcxLWFkMzctNGFhMy1hNjcxLTc2YmMw NTIzNDRhZDAfBgNVHSMEGDAWgBTLEejK0rQWWAHJNy4zFha5TJoKHzBWBgNVHR8E TzBNMEugSaBHhkVodHRwOi8vY3JsLm1pY3Jvc29mdC5jb20vcGtpL2NybC9wcm9k dWN0cy9NaWNDb2RTaWdQQ0FfMDgtMzEtMjAxMC5jcmwwWgYIKwYBBQUHAQEETjBM MEoGCCsGAQUFBzAChj5odHRwOi8vd3d3Lm1pY3Jvc29mdC5jb20vcGtpL2NlcnRz L01pY0NvZFNpZ1BDQV8wOC0zMS0yMDEwLmNydDANBgkqhkiG9w0BAQUFAAOCAQEA MdduKhJXM4HVncbr+TrURE0Inu5e32pbt3nPApy8dmiekKGcC8N/oozxTbqVOfsN 4OGb9F0kDxuNiBU6fNutzrPJbLo5LEV9JBFUJjANDf9H6gMH5eRmXSx7nR2pEPoc sHTyT2lrnqkkhNrtlqDfc6TvahqsS2Ke8XzAFH9IzU2yRPnwPJNtQtjofOYXoJto aAko+QKX7xEDumdSrcHps3Om0mPNSuI+5PNO/f+h4LsCEztdIN5VP6OukEAxOHUo XgSpRm3m9Xp5QL0fzehF1a7iXT71dcfmZmNgzNWahIeNJDD37zTQYx2xQmdKDku/ Og7vtpU6pzjkJZIIpohmgg== -----END CERTIFICATE----- "); public const string PfxDataPassword = "12345"; public static SecureString CreatePfxDataPasswordSecureString() { var s = new SecureString(); // WARNING: // A key value of SecureString is in keeping string data off of the GC heap, such that it can // be reliably cleared when no longer needed. Creating a SecureString from a string or converting // a SecureString to a string diminishes that value. These conversion functions are for testing that // SecureString works, and does not represent a pattern to follow in any non-test situation. foreach (char c in PfxDataPassword.ToCharArray()) { s.AppendChar(c); } return s; } public static readonly byte[] PfxSha1Empty_ExpectedSig = ( "44b15120b8c7de19b4968d761600ffb8c54e5d0c1bcaba0880a20ab48912c8fd" + "fa81b28134eabf58f3211a0d1eefdaae115e7872d5a67045c3b62a5da4393940" + "e5a496413a6d55ea6309d0013e90657c83c6e40aa8fafeee66acbb6661c14190" + "11e1fde6f4fcc328bd7e537e4aa2dbe216d8f1f3aa7e5ec60eb9cfdca7a41d74").HexToByteArray(); public static readonly byte[] PfxData = ( "3082063A020103308205F606092A864886F70D010701A08205E7048205E33082" + "05DF3082035806092A864886F70D010701A08203490482034530820341308203" + "3D060B2A864886F70D010C0A0102A08202B6308202B2301C060A2A864886F70D" + "010C0103300E04085052002C7DA2C2A6020207D0048202907D485E3BFC6E6457" + "C811394C145D0E8A18325646854E4FF0097BC5A98547F5AD616C8EFDA8505AA8" + "7564ED4800A3139759497C60C6688B51F376ACAE906429C8771CB1428226B68A" + "6297207BCC9DD7F9563478DD83880AAB2304B545759B2275305DF4EFF9FAC24A" + "3CC9D3B2D672EFE45D8F48E24A16506C1D7566FC6D1B269FBF201B3AC3309D3E" + "BC6FD606257A7A707AA2F790EA3FE7A94A51138540C5319010CBA6DE9FB9D85F" + "CDC78DA60E33DF2F21C46FB9A8554B4F82E0A6EDBA4DB5585D77D331D35DAAED" + "51B6A5A3E000A299880FB799182C8CA3004B7837A9FEB8BFC76778089993F3D1" + "1D70233608AF7C50722D680623D2BF54BD4B1E7A604184D9F44E0AF8099FFA47" + "1E5536E7902793829DB9902DDB61264A62962950AD274EA516B2D44BE9036530" + "016E607B73F341AEEFED2211F6330364738B435B0D2ED6C57747F6C8230A053F" + "78C4DD65DB83B26C6A47836A6CBBAB92CBB262C6FB6D08632B4457F5FA8EABFA" + "65DB34157E1D301E9085CC443582CDD15404314872748545EB3FC3C574882655" + "8C9A85F966E315775BBE9DA34D1E8B6DADC3C9E120C6D6A2E1CFFE4EB014C3CE" + "FBC19356CE33DAC60F93D67A4DE247B0DAE13CD8B8C9F15604CC0EC9968E3AD7" + "F57C9F53C45E2ECB0A0945EC0BA04BAA15B48D8596EDC9F5FE9165A5D21949FB" + "5FE30A920AD2C0F78799F6443C300629B8CA4DCA19B9DBF1E27AAB7B12271228" + "119A95C9822BE6439414BEEAE24002B46EB97E030E18BD810ADE0BCF4213A355" + "038B56584B2FBCC3F5EA215D0CF667FFD823EA03AB62C3B193DFB4450AABB50B" + "AF306E8088EE7384FA2FDFF03E0DD7ACD61832223E806A94D46E196462522808" + "3163F1CAF333FDBBE2D54CA86968867CE0B6DD5E5B7F0633C6FAB4A19CC14F64" + "5EC14D0B1436F7623174301306092A864886F70D010915310604040100000030" + "5D06092B060104018237110131501E4E004D006900630072006F0073006F0066" + "00740020005300740072006F006E0067002000430072007900700074006F0067" + "007200610070006800690063002000500072006F007600690064006500723082" + "027F06092A864886F70D010706A08202703082026C0201003082026506092A86" + "4886F70D010701301C060A2A864886F70D010C0106300E0408E0C117E67A75D8" + "EB020207D080820238292882408B31826F0DC635F9BBE7C199A48A3B4FEFC729" + "DBF95508D6A7D04805A8DD612427F93124F522AC7D3C6F4DDB74D937F57823B5" + "B1E8CFAE4ECE4A1FFFD801558D77BA31985AA7F747D834CBE84464EF777718C9" + "865C819D6C9DAA0FA25E2A2A80B3F2AAA67D40E382EB084CCA85E314EA40C3EF" + "3ED1593904D7A16F37807C99AF06C917093F6C5AAEBB12A6C58C9956D4FBBDDE" + "1F1E389989C36E19DD38D4B978D6F47131E458AB68E237E40CB6A87F21C8773D" + "E845780B50995A51F041106F47C740B3BD946038984F1AC9E91230616480962F" + "11B0683F8802173C596C4BD554642F51A76F9DFFF9053DEF7B3C3F759FC7EEAC" + "3F2386106C4B8CB669589E004FB235F0357EA5CF0B5A6FC78A6D941A3AE44AF7" + "B601B59D15CD1EC61BCCC481FBB83EAE2F83153B41E71EF76A2814AB59347F11" + "6AB3E9C1621668A573013D34D13D3854E604286733C6BAD0F511D7F8FD6356F7" + "C3198D0CB771AF27F4B5A3C3B571FDD083FD68A9A1EEA783152C436F7513613A" + "7E399A1DA48D7E55DB7504DC47D1145DF8D7B6D32EAA4CCEE06F98BB3DDA2CC0" + "D0564A962F86DFB122E4F7E2ED6F1B509C58D4A3B2D0A68788F7E313AECFBDEF" + "456C31B96FC13586E02AEB65807ED83BB0CB7C28F157BC95C9C593C919469153" + "9AE3C620ED1D4D4AF0177F6B9483A5341D7B084BC5B425AFB658168EE2D8FB2B" + "FAB07A3BA061687A5ECD1F8DA9001DD3E7BE793923094ABB0F2CF4D24CB071B9" + "E568B18336BB4DC541352C9785C48D0F0E53066EB2009EFCB3E5644ED12252C1" + "BC303B301F300706052B0E03021A04144DEAB829B57A3156AEBC8239C0E7E884" + "EFD96E680414E147930B932899741C92D7652268938770254A2B020207D0").HexToByteArray(); public static byte[] StoreSavedAsPfxData = ( "3082070406092a864886f70d010702a08206f5308206f10201013100300b0609" + "2a864886f70d010701a08206d9308201e530820152a0030201020210d5b5bc1c" + "458a558845bff51cb4dff31c300906052b0e03021d05003011310f300d060355" + "040313064d794e616d65301e170d3130303430313038303030305a170d313130" + "3430313038303030305a3011310f300d060355040313064d794e616d6530819f" + "300d06092a864886f70d010101050003818d0030818902818100b11e30ea8742" + "4a371e30227e933ce6be0e65ff1c189d0d888ec8ff13aa7b42b68056128322b2" + "1f2b6976609b62b6bc4cf2e55ff5ae64e9b68c78a3c2dacc916a1bc7322dd353" + "b32898675cfb5b298b176d978b1f12313e3d865bc53465a11cca106870a4b5d5" + "0a2c410938240e92b64902baea23eb093d9599e9e372e48336730203010001a3" + "46304430420603551d01043b3039801024859ebf125e76af3f0d7979b4ac7a96" + "a1133011310f300d060355040313064d794e616d658210d5b5bc1c458a558845" + "bff51cb4dff31c300906052b0e03021d0500038181009bf6e2cf830ed485b86d" + "6b9e8dffdcd65efc7ec145cb9348923710666791fcfa3ab59d689ffd7234b787" + "2611c5c23e5e0714531abadb5de492d2c736e1c929e648a65cc9eb63cd84e57b" + "5909dd5ddf5dbbba4a6498b9ca225b6e368b94913bfc24de6b2bd9a26b192b95" + "7304b89531e902ffc91b54b237bb228be8afcda26476308204ec308203d4a003" + "020102021333000000b011af0a8bd03b9fdd0001000000b0300d06092a864886" + "f70d01010505003079310b300906035504061302555331133011060355040813" + "0a57617368696e67746f6e3110300e060355040713075265646d6f6e64311e30" + "1c060355040a13154d6963726f736f667420436f72706f726174696f6e312330" + "210603550403131a4d6963726f736f667420436f6465205369676e696e672050" + "4341301e170d3133303132343232333333395a170d3134303432343232333333" + "395a308183310b3009060355040613025553311330110603550408130a576173" + "68696e67746f6e3110300e060355040713075265646d6f6e64311e301c060355" + "040a13154d6963726f736f667420436f72706f726174696f6e310d300b060355" + "040b13044d4f5052311e301c060355040313154d6963726f736f667420436f72" + "706f726174696f6e30820122300d06092a864886f70d01010105000382010f00" + "3082010a0282010100e8af5ca2200df8287cbc057b7fadeeeb76ac28533f3adb" + "407db38e33e6573fa551153454a5cfb48ba93fa837e12d50ed35164eef4d7adb" + "137688b02cf0595ca9ebe1d72975e41b85279bf3f82d9e41362b0b40fbbe3bba" + "b95c759316524bca33c537b0f3eb7ea8f541155c08651d2137f02cba220b10b1" + "109d772285847c4fb91b90b0f5a3fe8bf40c9a4ea0f5c90a21e2aae3013647fd" + "2f826a8103f5a935dc94579dfb4bd40e82db388f12fee3d67a748864e162c425" + "2e2aae9d181f0e1eb6c2af24b40e50bcde1c935c49a679b5b6dbcef9707b2801" + "84b82a29cfbfa90505e1e00f714dfdad5c238329ebc7c54ac8e82784d37ec643" + "0b950005b14f6571c50203010001a38201603082015c30130603551d25040c30" + "0a06082b06010505070303301d0603551d0e041604145971a65a334dda980780" + "ff841ebe87f9723241f230510603551d11044a3048a4463044310d300b060355" + "040b13044d4f5052313330310603550405132a33313539352b34666166306237" + "312d616433372d346161332d613637312d373662633035323334346164301f06" + "03551d23041830168014cb11e8cad2b4165801c9372e331616b94c9a0a1f3056" + "0603551d1f044f304d304ba049a0478645687474703a2f2f63726c2e6d696372" + "6f736f66742e636f6d2f706b692f63726c2f70726f64756374732f4d6963436f" + "645369675043415f30382d33312d323031302e63726c305a06082b0601050507" + "0101044e304c304a06082b06010505073002863e687474703a2f2f7777772e6d" + "6963726f736f66742e636f6d2f706b692f63657274732f4d6963436f64536967" + "5043415f30382d33312d323031302e637274300d06092a864886f70d01010505" + "00038201010031d76e2a12573381d59dc6ebf93ad4444d089eee5edf6a5bb779" + "cf029cbc76689e90a19c0bc37fa28cf14dba9539fb0de0e19bf45d240f1b8d88" + "153a7cdbadceb3c96cba392c457d24115426300d0dff47ea0307e5e4665d2c7b" + "9d1da910fa1cb074f24f696b9ea92484daed96a0df73a4ef6a1aac4b629ef17c" + "c0147f48cd4db244f9f03c936d42d8e87ce617a09b68680928f90297ef1103ba" + "6752adc1e9b373a6d263cd4ae23ee4f34efdffa1e0bb02133b5d20de553fa3ae" + "9040313875285e04a9466de6f57a7940bd1fcde845d5aee25d3ef575c7e66663" + "60ccd59a84878d2430f7ef34d0631db142674a0e4bbf3a0eefb6953aa738e425" + "9208a68866823100").HexToByteArray(); public static byte[] StoreSavedAsCerData = ( "308201e530820152a0030201020210d5b5bc1c458a558845bff51cb4dff31c30" + "0906052b0e03021d05003011310f300d060355040313064d794e616d65301e17" + "0d3130303430313038303030305a170d3131303430313038303030305a301131" + "0f300d060355040313064d794e616d6530819f300d06092a864886f70d010101" + "050003818d0030818902818100b11e30ea87424a371e30227e933ce6be0e65ff" + "1c189d0d888ec8ff13aa7b42b68056128322b21f2b6976609b62b6bc4cf2e55f" + "f5ae64e9b68c78a3c2dacc916a1bc7322dd353b32898675cfb5b298b176d978b" + "1f12313e3d865bc53465a11cca106870a4b5d50a2c410938240e92b64902baea" + "23eb093d9599e9e372e48336730203010001a346304430420603551d01043b30" + "39801024859ebf125e76af3f0d7979b4ac7a96a1133011310f300d0603550403" + "13064d794e616d658210d5b5bc1c458a558845bff51cb4dff31c300906052b0e" + "03021d0500038181009bf6e2cf830ed485b86d6b9e8dffdcd65efc7ec145cb93" + "48923710666791fcfa3ab59d689ffd7234b7872611c5c23e5e0714531abadb5d" + "e492d2c736e1c929e648a65cc9eb63cd84e57b5909dd5ddf5dbbba4a6498b9ca" + "225b6e368b94913bfc24de6b2bd9a26b192b957304b89531e902ffc91b54b237" + "bb228be8afcda26476").HexToByteArray(); public static byte[] StoreSavedAsSerializedCerData = ( "0200000001000000bc0000001c0000006c000000010000000000000000000000" + "00000000020000007b00370037004500420044003000320044002d0044003800" + "440045002d0034003700350041002d0038003800360037002d00440032003000" + "4200300030003600340045003400390046007d00000000004d00690063007200" + "6f0073006f006600740020005300740072006f006e0067002000430072007900" + "700074006f0067007200610070006800690063002000500072006f0076006900" + "64006500720000002000000001000000e9010000308201e530820152a0030201" + "020210d5b5bc1c458a558845bff51cb4dff31c300906052b0e03021d05003011" + "310f300d060355040313064d794e616d65301e170d3130303430313038303030" + "305a170d3131303430313038303030305a3011310f300d060355040313064d79" + "4e616d6530819f300d06092a864886f70d010101050003818d00308189028181" + "00b11e30ea87424a371e30227e933ce6be0e65ff1c189d0d888ec8ff13aa7b42" + "b68056128322b21f2b6976609b62b6bc4cf2e55ff5ae64e9b68c78a3c2dacc91" + "6a1bc7322dd353b32898675cfb5b298b176d978b1f12313e3d865bc53465a11c" + "ca106870a4b5d50a2c410938240e92b64902baea23eb093d9599e9e372e48336" + "730203010001a346304430420603551d01043b3039801024859ebf125e76af3f" + "0d7979b4ac7a96a1133011310f300d060355040313064d794e616d658210d5b5" + "bc1c458a558845bff51cb4dff31c300906052b0e03021d0500038181009bf6e2" + "cf830ed485b86d6b9e8dffdcd65efc7ec145cb9348923710666791fcfa3ab59d" + "689ffd7234b7872611c5c23e5e0714531abadb5de492d2c736e1c929e648a65c" + "c9eb63cd84e57b5909dd5ddf5dbbba4a6498b9ca225b6e368b94913bfc24de6b" + "2bd9a26b192b957304b89531e902ffc91b54b237bb228be8afcda26476").HexToByteArray(); public static byte[] StoreSavedAsSerializedStoreData = ( "00000000434552540200000001000000bc0000001c0000006c00000001000000" + "000000000000000000000000020000007b003700370045004200440030003200" + "44002d0044003800440045002d0034003700350041002d003800380036003700" + "2d004400320030004200300030003600340045003400390046007d0000000000" + "4d006900630072006f0073006f006600740020005300740072006f006e006700" + "2000430072007900700074006f00670072006100700068006900630020005000" + "72006f007600690064006500720000002000000001000000e9010000308201e5" + "30820152a0030201020210d5b5bc1c458a558845bff51cb4dff31c300906052b" + "0e03021d05003011310f300d060355040313064d794e616d65301e170d313030" + "3430313038303030305a170d3131303430313038303030305a3011310f300d06" + "0355040313064d794e616d6530819f300d06092a864886f70d01010105000381" + "8d0030818902818100b11e30ea87424a371e30227e933ce6be0e65ff1c189d0d" + "888ec8ff13aa7b42b68056128322b21f2b6976609b62b6bc4cf2e55ff5ae64e9" + "b68c78a3c2dacc916a1bc7322dd353b32898675cfb5b298b176d978b1f12313e" + "3d865bc53465a11cca106870a4b5d50a2c410938240e92b64902baea23eb093d" + "9599e9e372e48336730203010001a346304430420603551d01043b3039801024" + "859ebf125e76af3f0d7979b4ac7a96a1133011310f300d060355040313064d79" + "4e616d658210d5b5bc1c458a558845bff51cb4dff31c300906052b0e03021d05" + "00038181009bf6e2cf830ed485b86d6b9e8dffdcd65efc7ec145cb9348923710" + "666791fcfa3ab59d689ffd7234b7872611c5c23e5e0714531abadb5de492d2c7" + "36e1c929e648a65cc9eb63cd84e57b5909dd5ddf5dbbba4a6498b9ca225b6e36" + "8b94913bfc24de6b2bd9a26b192b957304b89531e902ffc91b54b237bb228be8" + "afcda264762000000001000000f0040000308204ec308203d4a0030201020213" + "33000000b011af0a8bd03b9fdd0001000000b0300d06092a864886f70d010105" + "05003079310b3009060355040613025553311330110603550408130a57617368" + "696e67746f6e3110300e060355040713075265646d6f6e64311e301c06035504" + "0a13154d6963726f736f667420436f72706f726174696f6e3123302106035504" + "03131a4d6963726f736f667420436f6465205369676e696e6720504341301e17" + "0d3133303132343232333333395a170d3134303432343232333333395a308183" + "310b3009060355040613025553311330110603550408130a57617368696e6774" + "6f6e3110300e060355040713075265646d6f6e64311e301c060355040a13154d" + "6963726f736f667420436f72706f726174696f6e310d300b060355040b13044d" + "4f5052311e301c060355040313154d6963726f736f667420436f72706f726174" + "696f6e30820122300d06092a864886f70d01010105000382010f003082010a02" + "82010100e8af5ca2200df8287cbc057b7fadeeeb76ac28533f3adb407db38e33" + "e6573fa551153454a5cfb48ba93fa837e12d50ed35164eef4d7adb137688b02c" + "f0595ca9ebe1d72975e41b85279bf3f82d9e41362b0b40fbbe3bbab95c759316" + "524bca33c537b0f3eb7ea8f541155c08651d2137f02cba220b10b1109d772285" + "847c4fb91b90b0f5a3fe8bf40c9a4ea0f5c90a21e2aae3013647fd2f826a8103" + "f5a935dc94579dfb4bd40e82db388f12fee3d67a748864e162c4252e2aae9d18" + "1f0e1eb6c2af24b40e50bcde1c935c49a679b5b6dbcef9707b280184b82a29cf" + "bfa90505e1e00f714dfdad5c238329ebc7c54ac8e82784d37ec6430b950005b1" + "4f6571c50203010001a38201603082015c30130603551d25040c300a06082b06" + "010505070303301d0603551d0e041604145971a65a334dda980780ff841ebe87" + "f9723241f230510603551d11044a3048a4463044310d300b060355040b13044d" + "4f5052313330310603550405132a33313539352b34666166306237312d616433" + "372d346161332d613637312d373662633035323334346164301f0603551d2304" + "1830168014cb11e8cad2b4165801c9372e331616b94c9a0a1f30560603551d1f" + "044f304d304ba049a0478645687474703a2f2f63726c2e6d6963726f736f6674" + "2e636f6d2f706b692f63726c2f70726f64756374732f4d6963436f6453696750" + "43415f30382d33312d323031302e63726c305a06082b06010505070101044e30" + "4c304a06082b06010505073002863e687474703a2f2f7777772e6d6963726f73" + "6f66742e636f6d2f706b692f63657274732f4d6963436f645369675043415f30" + "382d33312d323031302e637274300d06092a864886f70d010105050003820101" + "0031d76e2a12573381d59dc6ebf93ad4444d089eee5edf6a5bb779cf029cbc76" + "689e90a19c0bc37fa28cf14dba9539fb0de0e19bf45d240f1b8d88153a7cdbad" + "ceb3c96cba392c457d24115426300d0dff47ea0307e5e4665d2c7b9d1da910fa" + "1cb074f24f696b9ea92484daed96a0df73a4ef6a1aac4b629ef17cc0147f48cd" + "4db244f9f03c936d42d8e87ce617a09b68680928f90297ef1103ba6752adc1e9" + "b373a6d263cd4ae23ee4f34efdffa1e0bb02133b5d20de553fa3ae9040313875" + "285e04a9466de6f57a7940bd1fcde845d5aee25d3ef575c7e6666360ccd59a84" + "878d2430f7ef34d0631db142674a0e4bbf3a0eefb6953aa738e4259208a68866" + "82000000000000000000000000").HexToByteArray(); public static byte[] DssCer = ( "3082025d3082021da00302010202101e9ae1e91e07de8640ac7af21ac22e8030" + "0906072a8648ce380403300e310c300a06035504031303466f6f301e170d3135" + "303232343232313734375a170d3136303232343232313734375a300e310c300a" + "06035504031303466f6f308201b73082012c06072a8648ce3804013082011f02" + "818100871018cc42552d14a5a9286af283f3cfba959b8835ec2180511d0dceb8" + "b979285708c800fc10cb15337a4ac1a48ed31394072015a7a6b525986b49e5e1" + "139737a794833c1aa1e0eaaa7e9d4efeb1e37a65dbc79f51269ba41e8f0763aa" + "613e29c81c3b977aeeb3d3c3f6feb25c270cdcb6aee8cd205928dfb33c44d2f2" + "dbe819021500e241edcf37c1c0e20aadb7b4e8ff7aa8fde4e75d02818100859b" + "5aeb351cf8ad3fabac22ae0350148fd1d55128472691709ec08481584413e9e5" + "e2f61345043b05d3519d88c021582ccef808af8f4b15bd901a310fefd518af90" + "aba6f85f6563db47ae214a84d0b7740c9394aa8e3c7bfef1beedd0dafda079bf" + "75b2ae4edb7480c18b9cdfa22e68a06c0685785f5cfb09c2b80b1d05431d0381" + "8400028180089a43f439b924bef3529d8d6206d1fca56a55caf52b41d6ce371e" + "bf07bda132c8eadc040007fcf4da06c1f30504ebd8a77d301f5a4702f01f0d2a" + "0707ac1da38dd3251883286e12456234da62eda0df5fe2fa07cd5b16f3638bec" + "ca7786312da7d3594a4bb14e353884da0e9aecb86e3c9bdb66fca78ea85e1cc3" + "f2f8bf0963300906072a8648ce380403032f00302c021461f6d143a47a4f7e0e" + "0ef9848b7f83eacbf83ffd021420e2ac47e656874633e01b0d207a99280c1127" + "01").HexToByteArray(); public static byte[] Dsa1024Pfx = ( "308206EE020103308206B406092A864886F70D010701A08206A5048206A13082" + "069D3082043706092A864886F70D010706A0820428308204240201003082041D" + "06092A864886F70D010701301C060A2A864886F70D010C0106300E04084AF212" + "89D5D7E2E702020800808203F0DECCF218AC91F26BAB026998AB77C7629D20DB" + "E2FB7022A3C4A1CECD743C0F932E944AE229DAFB61AD76C4DEB6995DF4F4BA01" + "2DBAD5C63A4C846E0807FCA0BC4A162CDFBAB4B3C4D304F473B3ACC1D268436E" + "F537DAE97ECC3C634C8DF2A294CC23E904A169F369021A0C024A03DE98A65B0F" + "3F14D6910525D76AD98B91E67BB7398E245CF48A4D2A5603CFCCF4E547D7EDAB" + "669D9A8597C6839119EB9FD932D1E4BA8B45D3317186CDA2EFF247BCFD64A5CA" + "ED604BF7033E423CC21CEC6454FE3B74E03A26C51A1C3519CE339FBE9F10B81D" + "DF6A0AAB4F8166D90B6F52B3439AB4B5273D0A506E3E01869F8FEBD1521EF8E5" + "BFB357FA630E3C988926EF3ACC0A0F4176FE8A93337C1A5C6DEAB5758EC2F07C" + "11E8B2495ECDE58D12312CCCA2E8B2EE8564B533D18C7A26A9290394C2A9942C" + "295EBB0317F5695103627519567960908323FFE6560AD054C97800218A52F37A" + "DDE4E7F18EF3BF3718A9D7BF57B700DBEB5AB86598C9604A4546995E34DBABBB" + "6A9FB483A3C2DFE6046DFD54F2D7AC61C062AF04B7FBAC395C5DD19408D6926A" + "93B896BFB92DA6F7F5A4E54EDBE2CFBB56576878150676ADB0D37E0177B91E0D" + "F09D7B37769E66842DD40C7B1422127F152A165BC9669168885BA0243C9641B4" + "48F68575AA6AB9247A49A61AC3C683EE057B7676B9610CF9100096FC46BDC8B9" + "BAA03535815D5E98BA3ABC1E18E39B50A8AF8D81E30F2DFD6AF5D0F9FC3636AB" + "69E128C793571723A79E42FC7C1BD7F39BD45FBE9C39EEB010005435BEC19844" + "2058033D2601B83124BD369DADB831317E0B2C28CE7535A2E89D8A0E5E34E252" + "3B0FCEC34FF26A2B80566F4D86F958F70106BF3322FA70A3312E48EAA130246A" + "07412E93FDE91F633F758BC49311F6CBBAEC5D2F22AFCD696F72BC22E7DE6C00" + "3275DFEC47E3848226FE9DBA184EA711E051B267C584749F897EFE7EAFD02F1D" + "BF3FD8E882474CA1F45509EF2E7B82F35B677CB88ED42AF729848EE2B424B0CE" + "2E9AAC945BABA550C20D5B25075A30FE70D8CAA5A527A35F1DF17BCCB91930C1" + "7120C625667120E0806C2B51EDFF540F928BD555FB48DBCB83CCCE0C385E78C8" + "65BE715AE6F8BE472E5FC187EBE3FEFD8D7FE62D4DB2EE61F42D24D81FAA9179" + "0FB17E8EBC8E219B6F9E039F5AB3BC4870821D474B36C8F8D0583D9DC06E4383" + "D03424420B8C8B26276877166A0F51E22F0D8FA60A070CFBD47EAFBC717C879C" + "B5A1EA69C4C2A38F26A1EEF96A0C32BFCECCE4EA97E90A425066B1DD0891353F" + "766EB9F2BFA2563A815DAF3639EBB147E1E8757A6BFAB902C4A8F037AD47E03F" + "AF2E019FCF6CA7430BDFEA4B45B28ED746BB90E09BEF7B370A75E7924BBA0920" + "25FE654A9A197A5B8BBBE43DC7C892FF14E75A37EB97FC489AB121A43E308202" + "5E06092A864886F70D010701A082024F0482024B3082024730820243060B2A86" + "4886F70D010C0A0102A082017630820172301C060A2A864886F70D010C010330" + "0E0408ECB4D1550DA52C6302020800048201509322DC0193DD9E79ADAFD38827" + "AD6DE9299327DDDF6E9DF4FB70D53A64951E4B814E90D2A19B3F4B8E39A2F851" + "A3E5E9B9EB947DD248A3E5F5EB458F3323D4656709E97C6BD59238C4D1F26AB6" + "7D73235FAE7780D98705957B6650AC0DE3E2D46E22455D0A105D138F16A84839" + "14EDDF5C518B748558704ED3AE4A8C4914F667BBDE07978E4A4FC66194F6B86B" + "AB9F558EDE890C25DFB97C59653906CC573B5DEB62165CFF8A5F4F8059A478EB" + "F6FED75F1DACDC612C2E271E25A7083E15D33697270FD442D79FFCB25DB135F9" + "8E580DC9CE14F73C3B847931AF821C77718455F595CA15B86386F3FCC5962262" + "5FC916DDB4A08479DCB49FF7444333FA99FBB22F1AEC1876CF1E099F7A4ECA85" + "A325A8623E071EEA9359194EEE712F73076C5EB72AA243D0C0978B934BC8596F" + "8353FD3CA859EEA457C6175E82AE5854CC7B6598A1E980332F56AB1EE1208277" + "4A91A63181B9302306092A864886F70D01091531160414E6335FA7097AB6DE4A" + "1CDB0C678D7A929883FB6430819106092B06010401823711013181831E818000" + "4D006900630072006F0073006F0066007400200045006E00680061006E006300" + "650064002000440053005300200061006E006400200044006900660066006900" + "65002D00480065006C006C006D0061006E002000430072007900700074006F00" + "67007200610070006800690063002000500072006F0076006900640065007230" + "313021300906052B0E03021A0500041466FD3518CEBBD69877BA663C9E8D7092" + "8E8A98F30408DFB5AE610308BCF802020800").HexToByteArray(); public const string Dsa1024PfxPassword = "1234"; public static byte[] Dsa1024Cert = ( "3082038D3082034AA003020102020900AB740A714AA83C92300B060960864801" + "650304030230818D310B3009060355040613025553311330110603550408130A" + "57617368696E67746F6E3110300E060355040713075265646D6F6E64311E301C" + "060355040A13154D6963726F736F667420436F72706F726174696F6E3120301E" + "060355040B13172E4E4554204672616D65776F726B2028436F72654658293115" + "30130603550403130C313032342D62697420445341301E170D31353131323531" + "34343030335A170D3135313232353134343030335A30818D310B300906035504" + "0613025553311330110603550408130A57617368696E67746F6E3110300E0603" + "55040713075265646D6F6E64311E301C060355040A13154D6963726F736F6674" + "20436F72706F726174696F6E3120301E060355040B13172E4E4554204672616D" + "65776F726B2028436F7265465829311530130603550403130C313032342D6269" + "7420445341308201B73082012C06072A8648CE3804013082011F02818100AEE3" + "309FC7C9DB750D4C3797D333B3B9B234B462868DB6FFBDED790B7FC8DDD574C2" + "BD6F5E749622507AB2C09DF5EAAD84859FC0706A70BB8C9C8BE22B4890EF2325" + "280E3A7F9A3CE341DBABEF6058D063EA6783478FF8B3B7A45E0CA3F7BAC9995D" + "CFDDD56DF168E91349130F719A4E717351FAAD1A77EAC043611DC5CC5A7F0215" + "00D23428A76743EA3B49C62EF0AA17314A85415F0902818100853F830BDAA738" + "465300CFEE02418E6B07965658EAFDA7E338A2EB1531C0E0CA5EF1A12D9DDC7B" + "550A5A205D1FF87F69500A4E4AF5759F3F6E7F0C48C55396B738164D9E35FB50" + "6BD50E090F6A497C70E7E868C61BD4477C1D62922B3DBB40B688DE7C175447E2" + "E826901A109FAD624F1481B276BF63A665D99C87CEE9FD063303818400028180" + "25B8E7078E149BAC352667623620029F5E4A5D4126E336D56F1189F9FF71EA67" + "1B844EBD351514F27B69685DDF716B32F102D60EA520D56F544D19B2F08F5D9B" + "DDA3CBA3A73287E21E559E6A07586194AFAC4F6E721EDCE49DE0029627626D7B" + "D30EEB337311DB4FF62D7608997B6CC32E9C42859820CA7EF399590D5A388C48" + "A330302E302C0603551D110425302387047F0000018710000000000000000000" + "0000000000000182096C6F63616C686F7374300B060960864801650304030203" + "3000302D021500B9316CC7E05C9F79197E0B41F6FD4E3FCEB72A8A0214075505" + "CCAECB18B7EF4C00F9C069FA3BC78014DE").HexToByteArray(); public static byte[] CertWithPolicies = ( "308201f33082015ca0030201020210134fb7082cf69bbb4930bfc8e1ca446130" + "0d06092a864886f70d0101050500300e310c300a06035504031303466f6f301e" + "170d3135303330313232343735385a170d3136303330313034343735385a300e" + "310c300a06035504031303466f6f30819f300d06092a864886f70d0101010500" + "03818d0030818902818100c252d52fb96658ddbb7d19dd9caaf203ec0376f77c" + "3012bd93e14bb22a6ff2b5ce8060a197e3fd8289fbff826746baae0db8d68b47" + "a1cf13678717d7db9a16dab028927173a3e843b3a7df8c5a4ff675957ea20703" + "6389a60a83d643108bd1293e2135a672a1cff10b7d5b3c78ab44d35e20ca6a5c" + "5b6f714c5bfd66ed4307070203010001a3523050301b06092b06010401823714" + "02040e1e0c00480065006c006c006f0000301a06092b0601040182371507040d" + "300b060357080902010302010230150603551d20040e300c3004060262133004" + "06027021300d06092a864886f70d0101050500038181001be04e59fbea63acfb" + "c8b6fd3d02dd7442532344cfbc124e924c0bacf23865e4ce2f442ad60ae457d8" + "4f7a1f05d50fb867c20e778e412a25237054555669ced01c1ce1ba8e8e57510f" + "73e1167c920f78aa5415dc5281f0c761fb25bb1ebc707bc003dd90911e649915" + "918cfe4f3176972f8afdc1cccd9705e7fb307a0c17d273").HexToByteArray(); public static byte[] CertWithTemplateData = ( "308201dc30820145a00302010202105101b8242daf6cae4c53bac68a948b0130" + "0d06092a864886f70d0101050500300e310c300a06035504031303466f6f301e" + "170d3135303330313232333133395a170d3136303330313034333133395a300e" + "310c300a06035504031303466f6f30819f300d06092a864886f70d0101010500" + "03818d0030818902818100a6dcff50bd1fe420301fea5fa56be93a7a53f2599c" + "e453cf3422bec797bac0ed78a03090a3754569e6494bcd585ac16a5ea5086344" + "3f25521085ca09580579cf0b46bd6e50015319fba5d2bd3724c53b20cdddf604" + "74bd7ef426aead9ca5ffea275a4b2b1b6f87c203ab8783559b75e319722886fb" + "eb784f5f06823906b2a9950203010001a33b3039301b06092b06010401823714" + "02040e1e0c00480065006c006c006f0000301a06092b0601040182371507040d" + "300b0603570809020103020102300d06092a864886f70d010105050003818100" + "962594da079523c26e2d3fc573fd17189ca33bedbeb2c38c92508fc2a865973b" + "e85ba686f765101aea0a0391b22fcfa6c0760eece91a0eb75501bf6871553f8d" + "6b089cf2ea63c872e0b4a178795b71826c4569857b45994977895e506dfb8075" + "ed1b1096987f2c8f65f2d6bbc788b1847b6ba13bee17ef6cb9c6a3392e13003f").HexToByteArray(); public static byte[] ComplexNameInfoCert = ( "308204BE30820427A00302010202080123456789ABCDEF300D06092A864886F70" + "D01010505003081A43110300E06035504061307436F756E747279310E300C0603" + "550408130553746174653111300F060355040713084C6F63616C6974793111300" + "F060355040A13084578616D706C654F31123010060355040B13094578616D706C" + "654F55311E301C06035504031315636E2E6973737565722E6578616D706C652E6" + "F72673126302406092A864886F70D0109011617697373756572656D61696C4065" + "78616D706C652E6F7267301E170D3133313131323134313531365A170D3134313" + "231333135313631375A3081A63110300E06035504061307436F756E747279310E" + "300C0603550408130553746174653111300F060355040713084C6F63616C69747" + "93111300F060355040A13084578616D706C654F31123010060355040B13094578" + "616D706C654F55311F301D06035504031316636E2E7375626A6563742E6578616" + "D706C652E6F72673127302506092A864886F70D01090116187375626A65637465" + "6D61696C406578616D706C652E6F7267305C300D06092A864886F70D010101050" + "0034B003048024100DC6FBBDA0300520DFBC9F046CC865D8876AEAC353807EA84" + "F58F92FE45EE03C22E970CAF41031D47F97C8A5117C62718482911A8A31B58D92" + "328BA3CF9E605230203010001A382023730820233300B0603551D0F0404030200" + "B0301D0603551D250416301406082B0601050507030106082B060105050703023" + "081FD0603551D120481F53081F28217646E73312E6973737565722E6578616D70" + "6C652E6F72678217646E73322E6973737565722E6578616D706C652E6F7267811" + "569616E656D61696C31406578616D706C652E6F7267811569616E656D61696C32" + "406578616D706C652E6F7267A026060A2B060104018237140203A0180C1669737" + "375657275706E31406578616D706C652E6F7267A026060A2B0601040182371402" + "03A0180C1669737375657275706E32406578616D706C652E6F7267861F6874747" + "03A2F2F757269312E6973737565722E6578616D706C652E6F72672F861F687474" + "703A2F2F757269322E6973737565722E6578616D706C652E6F72672F308201030" + "603551D110481FB3081F88218646E73312E7375626A6563742E6578616D706C65" + "2E6F72678218646E73322E7375626A6563742E6578616D706C652E6F726781157" + "3616E656D61696C31406578616D706C652E6F7267811573616E656D61696C3240" + "6578616D706C652E6F7267A027060A2B060104018237140203A0190C177375626" + "A65637475706E31406578616D706C652E6F7267A027060A2B0601040182371402" + "03A0190C177375626A65637475706E32406578616D706C652E6F7267862068747" + "4703A2F2F757269312E7375626A6563742E6578616D706C652E6F72672F862068" + "7474703A2F2F757269322E7375626A6563742E6578616D706C652E6F72672F300" + "D06092A864886F70D0101050500038181005CD44A247FF4DFBF2246CC04D7D57C" + "EF2B6D3A4BC83FF685F6B5196B65AFC8F992BE19B688E53E353EEA8B63951EC40" + "29008DE8B851E2C30B6BF73F219BCE651E5972E62D651BA171D1DA9831A449D99" + "AF4E2F4B9EE3FD0991EF305ADDA633C44EB5E4979751280B3F54F9CCD561AC27D" + "3426BC6FF32E8E1AAF9F7C0150A726B").HexToByteArray(); internal static readonly byte[] MultiPrivateKeyPfx = ( "30820F1602010330820ED606092A864886F70D010701A0820EC704820EC33082" + "0EBF308206A806092A864886F70D010701A08206990482069530820691308203" + "4C060B2A864886F70D010C0A0102A08202B6308202B2301C060A2A864886F70D" + "010C0103300E0408ED42EEFCD77BB2EB020207D00482029048F341D409492D23" + "D89C0C01DEE7EFFB6715B15D2BB558E9045D635CADFFFEC85C10A4849AB0657D" + "A17FE7EC578F779BA2DC129FA959664DC7E85DFD13CAC673E487208FE457223A" + "75732915FFCF3FF70F557B0846D62AD507300EA1770EDED82F7D8E6E75075728" + "A29D3BF829E75F09EF283A9DDEDDFBABC2E25698DA8C24E4FE34CD43C87554BF" + "55B1D4B2B0979F399AEC95B781C62CBE9E412329F9A9BCABF20F716A95F1D795" + "7C379A27587F6BBFA44A0B75FAAC15CA3730629C55E87990EE521BC4657EE2A4" + "41AF099A226D31707685A89A28EB27CA65512B70DEC09231369AA1A265D4F5C3" + "C5D17CB11DB54C70AB83EA28F4740D1F79D490F46F926FB267D5F0E4B2FE096D" + "F161A4FF9E9AC068EFCA999B3ED0A3BD05D8D1E3B67CF51E6A478154B427D87D" + "C861D0FE2A7A42600483D7B979DC71E8A00D0E805E3BB86E8673234DC1D14987" + "99272754A5FD5FEC118CF1E2B2A539B604FED5486A4E4D73FAAFF69023263B84" + "6870D6B8DB01E31CB3A1E4BA3588C1FA81C786745A33B95573D5381AB307827A" + "549A36AF535FD05E1247BB92C6C6FCB0E76E87F2E4C8136F37C9C19BE3001F59" + "FC5CB459C620B8E73711BF102D78F665F40E4D1A341370BC1FB7A5567C29359C" + "FFB938237002904BE59F5605AF96E8A670E2248AB71D27FE63E327077144F095" + "4CA815E0284E2FF5E1A11B2946276A99B91BF138A79B057436798AF72FD86842" + "881C5A5ECDA8A961A21553CC930703047F1F45699CEFEF26AAB6B7DBC65C8C62" + "4CA3286094596F2AA48268B9F5411058613185507332833AFB312D5780CEFF96" + "6DD05A2CB6E1B252D9656D8E92E63E6C0360F119232E954E11DE777D2DE1C208" + "F704DDB16E1351F49B42A859E3B6B2D94E1E2B3CD97F06B1123E9CCA049201E6" + "DB7273C0BDE63CC9318182301306092A864886F70D0109153106040401000000" + "306B06092B0601040182371101315E1E5C004D006900630072006F0073006F00" + "66007400200045006E00680061006E0063006500640020004300720079007000" + "74006F0067007200610070006800690063002000500072006F00760069006400" + "650072002000760031002E00303082033D060B2A864886F70D010C0A0102A082" + "02B6308202B2301C060A2A864886F70D010C0103300E04081F85B7ED57F6F934" + "020207D00482029051A5ADA683AAE06A699761CCF05CB081A4398A7B1256A250" + "84DBE1115BFAB07A5A9146BC22F2E4223FF25BCA1836AE218691815F20A27A1B" + "98D1FC78F84AFA7E90A55954EE5BEA47FFA35928A990CB47346767F6F4212DBC" + "D03FFF1E4D137979006B46B19A9FC3BC9B5036ED6F8582E2007D08DB94B2B576" + "E154719CAC90DFB6F238CA875FCBEBCF9E9F933E4451E6A2B60C2A0A8A35B5FD" + "20E5DDA000008DCCE95BBDF604A8F93001F594E402FF8649A6582DE5901EDF9D" + "ED7D6F9657C5A184D82690EFCFB2F25BFCE02BC56F0FF00595996EBF1BA25475" + "AB613461280DD641186237D8A3AB257BD6FB1BDC3768B00719D233E0D5FD26D0" + "8BA6EAB29D732B990FB9423E643E4663ABBA0D8885DD2A276EE02C92778261C7" + "853F708E2B9AF8D2E96416F676D0191BD24D0C8430BD419049F43C8E2A0C32F8" + "62207B3DA661577CE5933460D0EF69FAD7323098B55FEF3A9955FE632FBCE845" + "2BB5F3430AE2A9021EBF756CC7FDFC3E63581C8B0D7AB77760F447F868B59236" + "14DAA9C36AEBC67DC854B93C38E8A6D3AC11B1EE1D02855CE96ADEB840B626BF" + "C4B3BFD6487C9073F8A15F55BA945D58AD1636A7AED476EBDB5227A71144BF87" + "45192EF5CD177818F61836717ED9EB0A83BEEE582ADEDD407035E453083B17E7" + "C237009D9F04F355CEAB0C0E9AD6F13A3B54459FA05B19E02275FE2588258B63" + "A125F549D1B44C827CDC94260A02F4A1B42A30E675B9760D876685D6CA05C258" + "03BDE1F33D325CF6020A662B0F5DCCC8D77B941B273AC462F0D3E050CEB5AEF7" + "107C45372F7063EF1AB420CA555A6C9BE6E1067966755584346CDDE7C05B6132" + "E553B11C374DB90B54E5C096062349A1F6CB78A1A2D995C483541750CFA956DE" + "A0EB3667DE7AD78931C65B6E039B5DE461810B68C344D2723174301306092A86" + "4886F70D0109153106040402000000305D06092B060104018237110131501E4E" + "004D006900630072006F0073006F006600740020005300740072006F006E0067" + "002000430072007900700074006F006700720061007000680069006300200050" + "0072006F007600690064006500723082080F06092A864886F70D010706A08208" + "00308207FC020100308207F506092A864886F70D010701301C060A2A864886F7" + "0D010C0106300E04089ADEE71816BCD023020207D0808207C851AA1EA533FECA" + "BB26D3846FAEE8DEDB919C29F8B98BBBF785BC306C12A8ACB1437786C4689161" + "683718BB7E40EB60D9BE0C87056B5ECF20ACCB8BF7F36033B8FCB84ED1474E97" + "DE0A8709B563B6CF8E69DF4B3F970C92324946723C32D08B7C3A76C871C6B6C8" + "C56F2D3C4C00B8A809E65A4EB5EFECC011E2B10F0E44ECDA07B325417B249240" + "80844F6D7F1F6E420346EA85825EB830C7E05A5383412A9502A51F1AC07F315A" + "DE357F1F9FB2E6427976E78B8FF9CD6C2F9841F2D84658AC8747694EFD0C451B" + "7AC5B83D5F0780808417501666BB452B53CEB0698162D94541DE181A7968DB13" + "9F17A1076EDEB70B38B8881DBC6DE2B694070A5A1AA71E4CDFBF7F4D5DBCF166" + "46768364D3C74FA212E40CBE3BE7C51A74D271164D00E89F997FD418C51A7C2D" + "73130D7C6FCAA2CA65082CE38BFB753BB30CC71656529E8DBA4C4D0B7E1A79CF" + "2A052FFEFA2DEE3373115472AFD1F40A80B23AA6141D5CDE0A378FE6210D4EE6" + "9B8771D3E192FD989AEC14C26EA4845D261B8A45ABC1C8FA305449DCDEDA9882" + "DD4DDC69B2DE315645FBC3EE52090907E7687A22A63F538E030AB5A5413CA415" + "F1D70E70CB567261FB892A8B3BAFC72D632CD2FDCC0559E01D5C246CC27C9348" + "63CCFA52490E1F01D8D2D0AF2587E4D04011140A494FFA3CA42C5F645B94EE30" + "100DE019B27F66FFC035E49A65B2A3F6CB14EB1E2FFF1F25B5C87481BD8506F3" + "07E0B042A2C85B99ECA520B4AAC7DFF2B11C1213E4128A01765DDB27B867336B" + "8CCF148CE738465D46E7A0BEA466CD8BBCCE2E11B16E0F9D24FF2F2D7C9F8527" + "79ADBB818F87E4AFF7C21A9C2BC20D38209322A34B0B393B187C96583D3D73D9" + "440F994B2F320D3274848AB7167942179CFF725C2C7556CCC289A5E788C5B863" + "E6FCDD5E4B87E41458BEB3F43D14C7E5196C38CA36322F8B83064862178D5892" + "5AEF34F444A31A4FB18431D7D37C65ED519643BC7BD025F801390430022253AA" + "FCEA670726512C3532EA9F410DB8AA6628CC455E4AB3F478A6981DB9180B7A2A" + "24B365F37554CE04B08F22B3539D98BF9A1AC623BBF9A08DBEC951E9730C1318" + "02B2C40750AAE6A791B3219A96A5BAC7AE17A2F7EA02FF66D6FB36C2E6B6AB90" + "D821A6322BF3E8D82969756A474551DB9EAA8C587FC878F996F5FA1E1C39E983" + "F164B0A67897EB3755C378807FFDFE964C5C0F290784A08E8C925E85775A9B89" + "2E278F68C3C1DE72622AC10EA56D88C909EF4AC9F47ED61376737C1E43DBF0F8" + "9337F0684FA0B96E7A993EC328A6A5FBCDCB809ACBFDAE4ECE192A45480104ED" + "12820238AB6AC9C88CC9A82585FD29A81A7BC5BC591738A4D49A86D06B4E18BD" + "C83DFFAA60D8A0D4F70CC63D4E83812CB6753F3744545592D04223793E5B3051" + "25AAD8807A753D235769BD0280E2DE808B0CEE2B98B0F5562FF9EF68161A6B7E" + "08C8B105766EBCFC44AC858B1A89E34C099B194A8B24D1DBABC13909EFAF5B9A" + "9E77AEAF7DD9BE772FA01AB9518EB8864AE6D07D7DD7451797541D2F723BC71A" + "9C14ED1D811594E2C4A57017D4CB90FD82C195FA9B823DF1E2FFD965E3139F9A" + "6E8AAC36FA39CFA4C52E85D2A661F9D0D466720C5AB7ECDE968FF51B535B019A" + "3E9C76058E6F673A49CDD89EA7EC998BDADE71186EA084020A897A328753B72E" + "213A9D82443F7E34D94508199A2A63E71A12BD441C132201E9A3829B2727F23E" + "65C519F4DA2C40162A3A501B1BD57568ED75447FEAF8B42988CE25407644BFA0" + "B76059D275EC994BB336055E271751B32233D79A6E5E3AA700F3803CCA50586D" + "28934E3D4135FA043AF7DFAB977477283602B1739C4AF40E3856E75C34EB98C6" + "9A928ADE05B67A679630EFA14E64B2957EDD1AB4EC0B0E7BC38D4851EBF67928" + "33EACB62FB6C862B089E3066AE5EAAFD2A8B7FC712DE9BD2F488222EEB1FB91B" + "4E57C2D24092818965621C123280453EDCFA2EC9D9B50AFA437D1ED09EC36FD2" + "32B169ED301E0DB0BABE562B67130F90EBC85D325A90931A5B5A94736A4B3AAD" + "B8CA295F59AF7FF08CCFADE5AFBBC2346BC6D78D9E5F470E9BDFF547F2574B10" + "A48DD9D56B5B03E9E24D65C367B6E342A26A344111A66B1908EDAECD0834930D" + "A74E1CFE2E4B0636A7C18E51A27AD21992A2DCF466BAACAC227B90B5E61BED79" + "9C97DEE7EDB33CCAF5DAD7AAD3CACCDE59478CF69AE64B9065FCB436E1993514" + "C42872DD486ABB75A07A4ED46CDF0E12C0D73FAB83564CF1A814791971EC9C7C" + "6A08A13CE0453C2C3236C8B2E146D242E3D37A3ECF6C350D0B2AB956CB21057F" + "DC630750A71C61C66DE3D4A6DB187BEE2F86DEB93E723C5943EA17E699E93555" + "756920416BD6B267A4CFAC4EE90E96A6419302B4C0A3B9705509CA09EE92F184" + "FD2817BA09BE29E465909DB6C93E3C1CAF6DC29E1A5838F3C32CCB220235EF82" + "9CD21D1B3E960518A80D08AE7FF08D3AFB7451C823E9B8D49DAF66F503E4AE53" + "99FECFC958429D758C06EFF8338BC02457F6FE5053AA3C2F27D360058FD93566" + "3B55F026B504E39D86E7CE15F04B1C62BBFA0B1CA5E64FF0BD088D94FB1518E0" + "5B2F40BF9D71C61FC43E3AF8440570C44030F59D14B8858B7B8506B136E7E39B" + "B04F9AFEAF2FA292D28A8822046CEFDE381F2399370BDE9B97BC700418585C31" + "E9C353635ADAA6A00A833899D0EDA8F5FFC558D822AEB99C7E35526F5297F333" + "F9E758D4CD53277316608B1F7DB6AC71309A8542A356D407531BA1D3071BA9DC" + "02AE91C7DF2561AEBC3845A118B00D21913B4A401DDDC40CE983178EF26C4A41" + "343037301F300706052B0E03021A041454F0864331D9415EBA750C62FA93C97D" + "3402E1A40414B610EC75D16EA23BF253AAD061FAC376E1EAF684").HexToByteArray(); internal static readonly byte[] EmptyPfx = ( "304F020103301106092A864886F70D010701A004040230003037301F30070605" + "2B0E03021A0414822078BC83E955E314BDA908D76D4C5177CC94EB0414711018" + "F2897A44A90E92779CB655EA11814EC598").HexToByteArray(); internal const string ChainPfxPassword = "test"; internal static readonly byte[] ChainPfxBytes = ( "308213790201033082133506092A864886F70D010701A0821326048213223082" + "131E3082036706092A864886F70D010701A08203580482035430820350308203" + "4C060B2A864886F70D010C0A0102A08202B6308202B2301C060A2A864886F70D" + "010C0103300E040811E8B9808BA6E96C020207D004820290D11DA8713602105C" + "95792D65BCDFC1B7E3708483BF6CD83008082F89DAE4D003F86081B153BD4D4A" + "C122E802752DEA29F07D0B7E8F0FB8A762B4CAA63360F9F72CA5846771980A6F" + "AE2643CD412E6E4A101625371BBD48CC6E2D25191D256B531B06DB7CDAC04DF3" + "E10C6DC556D5FE907ABF32F2966A561C988A544C19B46DF1BE531906F2CC2263" + "A301302A857075C7A9C48A395241925C6A369B60D176419D75E320008D5EFD91" + "5257B160F6CD643953E85F19EBE4E4F72B9B787CF93E95F819D1E43EF01CCFA7" + "48F0E7260734EA9BC6039BA7557BE6328C0149718A1D9ECF3355082DE697B6CD" + "630A9C224D831B7786C7E904F1EF2D9D004E0E825DD74AC4A576CDFCA7CECD14" + "D8E2E6CCAA3A302871AE0BA979BB25559215D771FAE647905878E797BBA9FC62" + "50F30F518A8008F5A12B35CE526E31032B56EFE5A4121E1E39DC7339A0CE8023" + "24CDDB7E9497BA37D8B9F8D826F901C52708935B4CA5B0D4D760A9FB33B0442D" + "008444D5AEB16E5C32187C7038F29160DD1A2D4DB1F9E9A6C035CF5BCED45287" + "C5DEBAB18743AAF90E77201FEA67485BA3BBCE90CEA4180C447EE588AC19C855" + "638B9552D47933D2760351174D9C3493DCCE9708B3EFE4BE398BA64051BF52B7" + "C1DCA44D2D0ED5A6CFB116DDA41995FA99373C254F3F3EBF0F0049F1159A8A76" + "4CFE9F9CC56C5489DD0F4E924158C9B1B626030CB492489F6AD0A9DCAF3E141D" + "B4D4821B2D8A384110B6B0B522F62A9DC0C1315A2A73A7F25F96C530E2F700F9" + "86829A839B944AE6758B8DD1A1E9257F91C160878A255E299C18424EB9983EDE" + "6DD1C5F4D5453DD5A56AC87DB1EFA0806E3DBFF10A9623FBAA0BAF352F50AB5D" + "B16AB1171145860D21E2AB20B45C8865B48390A66057DE3A1ABE45EA65376EF6" + "A96FE36285C2328C318182301306092A864886F70D0109153106040401000000" + "306B06092B0601040182371101315E1E5C004D006900630072006F0073006F00" + "66007400200045006E00680061006E0063006500640020004300720079007000" + "74006F0067007200610070006800690063002000500072006F00760069006400" + "650072002000760031002E003030820FAF06092A864886F70D010706A0820FA0" + "30820F9C02010030820F9506092A864886F70D010701301C060A2A864886F70D" + "010C0106300E0408FFCC41FD8C8414F6020207D080820F68092C6010873CF9EC" + "54D4676BCFB5FA5F523D03C981CB4A3DC096074E7D04365DDD1E80BF366B8F9E" + "C4BC056E8CE0CAB516B9C28D17B55E1EB744C43829D0E06217852FA99CCF5496" + "176DEF9A48967C1EEB4A384DB7783E643E35B5B9A50533B76B8D53581F02086B" + "782895097860D6CA512514E10D004165C85E561DF5F9AEFD2D89B64F178A7385" + "C7FA40ECCA899B4B09AE40EE60DAE65B31FF2D1EE204669EFF309A1C7C8D7B07" + "51AE57276D1D0FB3E8344A801AC5226EA4ED97FCD9399A4EB2E778918B81B17F" + "E4F65B502595195C79E6B0E37EB8BA36DB12435587E10037D31173285D45304F" + "6B0056512B3E147D7B5C397709A64E1D74F505D2BD72ED99055161BC57B6200F" + "2F48CF128229EFBEBFC2707678C0A8C51E3C373271CB4FD8EF34A1345696BF39" + "50E8CE9831F667D68184F67FE4D30332E24E5C429957694AF23620EA7742F08A" + "38C9A517A7491083A367B31C60748D697DFA29635548C605F898B64551A48311" + "CB2A05B1ACA8033128D48E4A5AA263D970FE59FBA49017F29049CF80FFDBD192" + "95B421FEFF6036B37D2F8DC8A6E36C4F5D707FB05274CC0D8D94AFCC8C6AF546" + "A0CF49FBD3A67FB6D20B9FE6FDA6321E8ABF5F7CC794CFCC46005DC57A7BAFA8" + "9954E43230402C8100789F11277D9F05C78DF0509ECFBF3A85114FD35F4F17E7" + "98D60C0008064E2557BA7BF0B6F8663A6C014E0220693AE29E2AB4BDE5418B61" + "0889EC02FF5480BD1B344C87D73E6E4DB98C73F881B22C7D298059FE9D7ADA21" + "92BB6C87F8D25F323A70D234E382F6C332FEF31BB11C37E41903B9A59ADEA5E0" + "CBAB06DFB835257ABC179A897DEAD9F19B7DF861BE94C655DC73F628E065F921" + "E5DE98FFCBDF2A54AC01E677E365DD8B932B5BDA761A0032CE2127AB2A2B9DCB" + "63F1EA8A51FC360AB5BC0AD435F21F9B6842980D795A6734FDB27A4FA8209F73" + "62DD632FC5FB1F6DE762473D6EA68BFC4BCF983865E66E6D93159EFACC40AB31" + "AA178806CF893A76CAAA3279C988824A33AF734FAF8E21020D988640FAB6DB10" + "DF21D93D01776EEA5DAECF695E0C690ED27AD386E6F2D9C9482EA38946008CCB" + "8F0BD08F9D5058CF8057CA3AD50BB537116A110F3B3ACD9360322DB4D242CC1A" + "6E15FA2A95192FC65886BE2672031D04A4FB0B1F43AE8476CF82638B61B416AA" + "97925A0110B736B4D83D7977456F35D947B3D6C9571D8E2DA0E9DEE1E665A844" + "259C17E01E044FAB898AA170F99157F7B525D524B01BD0710D23A7689A615703" + "8A0697BD48FFE0253ABD6F862093574B2FC9BA38E1A6EC60AF187F10D79FF71F" + "7C50E87A07CC0A51099899F7336FE742ADEF25E720B8E0F8781EC7957D414CF5" + "D44D6998E7E35D2433AFD86442CCA637A1513BE3020B5334614277B3101ED7AD" + "22AFE50DE99A2AD0E690596C93B881E2962D7E52EE0A770FAF6917106A8FF029" + "8DF38D6DE926C30834C5D96854FFD053BDB020F7827FB81AD04C8BC2C773B2A5" + "9FDD6DDF7298A052B3486E03FECA5AA909479DDC7FED972192792888F49C40F3" + "910140C5BE264D3D07BEBF3275117AF51A80C9F66C7028A2C3155414CF939997" + "268A1F0AA9059CC3AA7C8BBEF880187E3D1BA8978CBB046E43289A020CAE11B2" + "5140E2247C15A32CF70C7AA186CBB68B258CF2397D2971F1632F6EBC4846444D" + "E445673B942F1F110C7D586B6728ECA5B0A62D77696BF25E21ED9196226E5BDA" + "5A80ECCC785BEEDE917EBC6FFDC2F7124FE8F719B0A937E35E9A720BB9ED72D2" + "1213E68F058D80E9F8D7162625B35CEC4863BD47BC2D8D80E9B9048811BDD8CB" + "B70AB215962CD9C40D56AE50B7003630AE26341C6E243B3D12D5933F73F78F15" + "B014C5B1C36B6C9F410A77CA997931C8BD5CCB94C332F6723D53A4CCC630BFC9" + "DE96EFA7FDB66FA519F967D6A2DB1B4898BB188DEB98A41FFA7907AE7601DDE2" + "30E241779A0FDF551FB84D80AAEE3D979F0510CD026D4AE2ED2EFB7468418CCD" + "B3BD2A29CD7C7DC6419B4637412304D5DA2DC178C0B4669CA8330B9713A812E6" + "52E812135D807E361167F2A6814CEF2A8A9591EFE2C18216A517473B9C3BF2B7" + "51E47844893DA30F7DCD4222D1A55D570C1B6F6A99AD1F9213BA8F84C0B14A6D" + "ED6A26EAFF8F89DF733EEB44117DF0FD357186BA4A15BD5C669F60D6D4C34028" + "322D4DDF035302131AB6FD08683804CC90C1791182F1AE3281EE69DDBBCC12B8" + "1E60942FD082286B16BE27DC11E3BB0F18C281E02F3BA66E48C5FD8E8EA3B731" + "BDB12A4A3F2D9E1F833DD204372003532E1BB11298BDF5092F2959FC439E6BD2" + "DC6C37E3E775DCBE821B9CBB02E95D84C15E736CEA2FDDAD63F5CD47115B4AD5" + "5227C2A02886CD2700540EBFD5BF18DC5F94C5874972FD5424FE62B30500B1A8" + "7521EA3798D11970220B2BE7EFC915FCB7A6B8962F09ABA005861E839813EDA3" + "E59F70D1F9C277B73928DFFC84A1B7B0F78A8B001164EB0824F2510885CA269F" + "DCBB2C3AE91BDE91A8BBC648299A3EB626E6F4236CCE79E14C803498562BAD60" + "28F5B619125F80925A2D3B1A56790795D04F417003A8E9E53320B89D3A3109B1" + "9BB17B34CC9700DA138FABB5997EC34D0A44A26553153DBCFF8F6A1B5432B150" + "58F7AD87C6B37537796C95369DAD53BE5543D86D940892F93983153B4031D4FA" + "B25DAB02C1091ACC1DAE2118ABD26D19435CD4F1A02BDE1896236C174743BCA6" + "A33FB5429E627EB3FD9F513E81F7BD205B81AAE627C69CF227B043722FA05141" + "39347D202C9B7B4E55612FC27164F3B5F287F29C443793E22F6ED6D2F353ED82" + "A9F33EDBA8F5F1B2958F1D6A3943A9614E7411FDBCA597965CD08A8042307081" + "BAC5A070B467E52D5B91CA58F986C5A33502236B5BAE6DB613B1A408D16B29D3" + "560F1E94AD840CFA93E83412937A115ABF68322538DA8082F0192D19EAAA41C9" + "299729D487A9404ECDB6396DDA1534841EAE1E7884FA43574E213AE656116D9E" + "F7591AA7BDE2B44733DFE27AA59949E5DC0EE00FDF42130A748DDD0FB0053C1A" + "55986983C8B9CEAC023CAD7EDFFA1C20D3C437C0EF0FC9868D845484D8BE6538" + "EAADA6365D48BA776EE239ED045667B101E3798FE53E1D4B9A2ACBBE6AF1E5C8" + "8A3FB03AD616404013E249EC34458F3A7C9363E7772151119FE058BD0939BAB7" + "64A2E545B0B2FDAA650B7E849C8DD4033922B2CAE46D0461C04A2C87657CB4C0" + "FFBA23DED69D097109EC8BFDC25BB64417FEEB32842DE3EFEF2BF4A47F08B9FC" + "D1907BC899CA9DA604F5132FB420C8D142D132E7E7B5A4BD0EF4A56D9E9B0ACD" + "88F0E862D3F8F0440954879FFE3AA7AA90573C6BFDC6D6474C606ACA1CD94C1C" + "3404349DD83A639B786AFCDEA1779860C05400E0479708F4A9A0DD51429A3F35" + "FBD5FB9B68CECC1D585F3E35B7BBFC469F3EAEEB8020A6F0C8E4D1804A3EB32E" + "B3909E80B0A41571B23931E164E0E1D0D05379F9FD3BF51AF04D2BE78BDB84BD" + "787D419E85626297CB35FCFB6ED64042EAD2EBC17BB65677A1A33A5C48ADD280" + "237FB2451D0EFB3A3C32354222C7AB77A3C92F7A45B5FB10092698D88725864A" + "3685FBDD0DC741424FCCD8A00B928F3638150892CAAB535CC2813D13026615B9" + "9977F7B8240E914ACA0FF2DCB1A9274BA1F55DF0D24CCD2BAB7741C9EA8B1ECD" + "E97477C45F88F034FDF73023502944AEE1FF370260C576992826C4B2E5CE9924" + "84E3B85170FCCAC3413DC0FF6F093593219E637F699A98BD29E8EE4550C128CA" + "182680FDA3B10BC07625734EE8A8274B43B170FC3AEC9AA58CD92709D388E166" + "AB4ADFD5A4876DC47C17DE51FDD42A32AF672515B6A81E7ABECFE748912B321A" + "FD0CBF4880298DD79403900A4002B5B436230EB6E49192DF49FAE0F6B60EBA75" + "A54592587C141AD3B319129006367E9532861C2893E7A2D0D2832DF4377C3184" + "5CB02A1D020282C3D2B7F77221F71FEA7FF0A988FEF15C4B2F6637159EEC5752" + "D8A7F4AB971117666A977370E754A4EB0DC52D6E8901DC60FCD87B5B6EF9A91A" + "F8D9A4E11E2FFDAB55FC11AF6EEB5B36557FC8945A1E291B7FF8931BE4A57B8E" + "68F04B9D4A9A02FC61AE913F2E2DDBEE42C065F4D30F568834D5BB15FDAF691F" + "197EF6C25AE87D8E968C6D15351093AAC4813A8E7B191F77E6B19146F839A43E" + "2F40DE8BE28EB22C0272545BADF3BD396D383B8DA8388147100B347999DDC412" + "5AB0AA1159BC6776BD2BF51534C1B40522D41466F414BDE333226973BAD1E6D5" + "7639D30AD94BEA1F6A98C047F1CE1294F0067B771778D59E7C722C73C2FF100E" + "13603206A694BF0ED07303BE0655DC984CA29893FD0A088B122B67AABDC803E7" + "3E5729E868B1CA26F5D05C818D9832C70F5992E7D15E14F9775C6AD24907CF2F" + "211CF87167861F94DCF9E3D365CB600B336D93AD44B8B89CA24E59C1F7812C84" + "DBE3EE57A536ED0D4BF948F7662E5BCBBB388C72243CFCEB720852D5A4A52F01" + "8C2C087E4DB43410FE9ABA3A8EF737B6E8FFDB1AB9832EBF606ED5E4BD62A86B" + "BCAE115C67682EDEA93E7845D0D6962C146B411F7784545851D2F327BEC7E434" + "4D68F137CDA217A3F0FF3B752A34C3B5339C79CB8E1AC690C038E85D6FC13379" + "090198D3555394D7A2159A23BD5EEF06EB0BCC729BB29B5BE911D02DA78FDA56" + "F035E508C722139AD6F25A6C84BED0E98893370164B033A2B52BC40D9BF5163A" + "F9650AB55EABB23370492A7D3A87E17C11B4D07A7296273F33069C835FD208BA" + "8F989A3CF8659054E2CCCFB0C983531DC6590F27C4A1D2C3A780FE945F7E52BB" + "9FFD2E324640E3E348541A620CD62605BBDB284AF97C621A00D5D1D2C31D6BD6" + "1149137B8A0250BC426417A92445A52574E999FB9102C16671914A1542E92DDE" + "541B2A0457112AF936DA84707CADFEA43BFEDAE5F58859908640420948086E57" + "FFD1B867C241D40197CB0D4AD58BB69B3724772E0079406A1272858AAA620668" + "F696955102639F3E95CFFC637EAF8AB54F0B5B2131AB292438D06E15F3826352" + "DEDC653DA5A4AACE2BB97061A498F3B6789A2310471B32F91A6B7A9944DDBB70" + "31525B3AE387214DC85A1C7749E9168F41272680D0B3C331D61175F23B623EEC" + "40F984C35C831268036680DE0821E5DEE5BB250C6984775D49B7AF94057371DB" + "72F81D2B0295FC6A51BCD00A697649D4346FDD59AC0DFAF21BFCC942C23C6134" + "FFBA2ABABC141FF700B52C5B26496BF3F42665A5B71BAC7F0C19870BD9873890" + "239C578CDDD8E08A1B0A429312FB24F151A11E4D180359A7FA043E8155453F67" + "265CB2812B1C98C144E7675CFC86413B40E35445AE7710227D13DC0B5550C870" + "10B363C492DA316FB40D3928570BF71BF47638F1401549369B1255DB080E5DFA" + "18EA666B9ECBE5C9768C06B3FF125D0E94B98BB24B4FD44E770B78D7B336E021" + "4FD72E77C1D0BE9F313EDCD147957E3463C62E753C10BB98584C85871AAEA9D1" + "F397FE9F1A639ADE31D40EAB391B03B588B8B031BCAC6C837C61B06E4B745052" + "474D33531086519C39EDD6310F3079EB5AC83289A6EDCBA3DC97E36E837134F7" + "303B301F300706052B0E03021A0414725663844329F8BF6DECA5873DDD8C96AA" + "8CA5D40414DF1D90CD18B3FBC72226B3C66EC2CB1AB351D4D2020207D0").HexToByteArray(); internal static readonly byte[] Pkcs7ChainDerBytes = ( "30820E1606092A864886F70D010702A0820E0730820E030201013100300B0609" + "2A864886F70D010701A0820DEB3082050B30820474A003020102020A15EAA83A" + "000100009291300D06092A864886F70D010105050030818131133011060A0992" + "268993F22C6401191603636F6D31193017060A0992268993F22C64011916096D" + "6963726F736F667431143012060A0992268993F22C6401191604636F72703117" + "3015060A0992268993F22C64011916077265646D6F6E643120301E0603550403" + "13174D532050617373706F7274205465737420537562204341301E170D313330" + "3131303231333931325A170D3331313231333232323630375A308185310B3009" + "060355040613025553310B30090603550408130257413110300E060355040713" + "075265646D6F6E64310D300B060355040A130454455354310D300B060355040B" + "130454455354311330110603550403130A746573742E6C6F63616C3124302206" + "092A864886F70D010901161563726973706F70406D6963726F736F66742E636F" + "6D30819F300D06092A864886F70D010101050003818D0030818902818100B406" + "851089E9CF7CDB438DD77BEBD819197BEEFF579C35EF9C4652DF9E6330AA7E2E" + "24B181C59DA4AF10E97220C1DF99F66CE6E97247E9126A016AC647BD2EFD136C" + "31470C7BE01A20E381243BEEC8530B7F6466C50A051DCE37274ED7FF2AFFF4E5" + "8AABA61D5A448F4A8A9B3765D1D769F627ED2F2DE9EE67B1A7ECA3D288C90203" + "010001A38202823082027E300E0603551D0F0101FF0404030204F0301D060355" + "1D250416301406082B0601050507030106082B06010505070302301D0603551D" + "0E04160414FB3485708CBF6188F720EF948489405C8D0413A7301F0603551D23" + "0418301680146A6678620A4FF49CA8B75FD566348F3371E42B133081D0060355" + "1D1F0481C83081C53081C2A081BFA081BC865F687474703A2F2F707074657374" + "73756263612E7265646D6F6E642E636F72702E6D6963726F736F66742E636F6D" + "2F43657274456E726F6C6C2F4D5325323050617373706F727425323054657374" + "25323053756225323043412831292E63726C865966696C653A2F2F5C5C707074" + "65737473756263612E7265646D6F6E642E636F72702E6D6963726F736F66742E" + "636F6D5C43657274456E726F6C6C5C4D532050617373706F7274205465737420" + "5375622043412831292E63726C3082013806082B060105050701010482012A30" + "82012630819306082B06010505073002868186687474703A2F2F707074657374" + "73756263612E7265646D6F6E642E636F72702E6D6963726F736F66742E636F6D" + "2F43657274456E726F6C6C2F70707465737473756263612E7265646D6F6E642E" + "636F72702E6D6963726F736F66742E636F6D5F4D5325323050617373706F7274" + "2532305465737425323053756225323043412831292E63727430818D06082B06" + "01050507300286818066696C653A2F2F5C5C70707465737473756263612E7265" + "646D6F6E642E636F72702E6D6963726F736F66742E636F6D5C43657274456E72" + "6F6C6C5C70707465737473756263612E7265646D6F6E642E636F72702E6D6963" + "726F736F66742E636F6D5F4D532050617373706F727420546573742053756220" + "43412831292E637274300D06092A864886F70D0101050500038181009DEBB8B5" + "A41ED54859795F68EF767A98A61EF7B07AAC190FCC0275228E4CAD360C9BA98B" + "0AE153C75522EEF42D400E813B4E49E7ACEB963EEE7B61D3C8DA05C183471544" + "725B2EBD1889877F62134827FB5993B8FDF618BD421ABA18D70D1C5B41ECDD11" + "695A48CB42EB501F96DA905471830C612B609126559120F6E18EA44830820358" + "308202C1A00302010202101B9671A4BC128B8341B0E314EAD9A191300D06092A" + "864886F70D01010505003081A13124302206092A864886F70D01090116156173" + "6D656D6F6E406D6963726F736F66742E636F6D310B3009060355040613025553" + "310B30090603550408130257413110300E060355040713075265646D6F6E6431" + "123010060355040A13094D6963726F736F667431163014060355040B130D5061" + "7373706F727420546573743121301F060355040313184D532050617373706F72" + "74205465737420526F6F74204341301E170D3035303132363031333933325A17" + "0D3331313231333232323630375A3081A13124302206092A864886F70D010901" + "161561736D656D6F6E406D6963726F736F66742E636F6D310B30090603550406" + "13025553310B30090603550408130257413110300E060355040713075265646D" + "6F6E6431123010060355040A13094D6963726F736F667431163014060355040B" + "130D50617373706F727420546573743121301F060355040313184D5320506173" + "73706F7274205465737420526F6F7420434130819F300D06092A864886F70D01" + "0101050003818D0030818902818100C4673C1226254F6BBD01B01D21BB05264A" + "9AA5B77AC51748EAC52048706DA6B890DCE043C6426FC44E76D70F9FE3A4AC85" + "5F533E3D08E140853DB769EE24DBDB7269FABEC0FDFF6ADE0AA85F0085B78864" + "58E7585E433B0924E81600433CB1177CE6AD5F2477B2A0E2D1A34B41F6C6F5AD" + "E4A9DD7D565C65F02C2AAA01C8E0C10203010001A3818E30818B301306092B06" + "0104018237140204061E0400430041300B0603551D0F040403020186300F0603" + "551D130101FF040530030101FF301D0603551D0E04160414F509C1D6267FC39F" + "CA1DE648C969C74FB111FE10301206092B060104018237150104050203010002" + "302306092B0601040182371502041604147F7A5208411D4607C0057C98F0C473" + "07010CB3DE300D06092A864886F70D0101050500038181004A8EAC73D8EA6D7E" + "893D5880945E0E3ABFC79C40BFA60A680CF8A8BF63EDC3AD9C11C081F1F44408" + "9581F5C8DCB23C0AEFA27571D971DBEB2AA9A1B3F7B9B0877E9311D36098A65B" + "7D03FC69A835F6C3096DEE135A864065F9779C82DEB0C777B9C4DB49F0DD11A0" + "EAB287B6E352F7ECA467D0D3CA2A8081119388BAFCDD25573082057C308204E5" + "A003020102020A6187C7F200020000001B300D06092A864886F70D0101050500" + "3081A13124302206092A864886F70D010901161561736D656D6F6E406D696372" + "6F736F66742E636F6D310B3009060355040613025553310B3009060355040813" + "0257413110300E060355040713075265646D6F6E6431123010060355040A1309" + "4D6963726F736F667431163014060355040B130D50617373706F727420546573" + "743121301F060355040313184D532050617373706F7274205465737420526F6F" + "74204341301E170D3039313032373231333133395A170D333131323133323232" + "3630375A30818131133011060A0992268993F22C6401191603636F6D31193017" + "060A0992268993F22C64011916096D6963726F736F667431143012060A099226" + "8993F22C6401191604636F727031173015060A0992268993F22C640119160772" + "65646D6F6E643120301E060355040313174D532050617373706F727420546573" + "742053756220434130819F300D06092A864886F70D010101050003818D003081" + "8902818100A6A4918F93C5D23B3C3A325AD8EC77043D207A0DDC294AD3F5BDE0" + "4033FADD4097BB1DB042B1D3B2F26A42CC3CB88FA9357710147AB4E1020A0DFB" + "2597AB8031DB62ABDC48398067EB79E4E2BBE5762F6B4C5EA7629BAC23F70269" + "06D46EC106CC6FBB4D143F7D5ADADEDE19B021EEF4A6BCB9D01DAEBB9A947703" + "40B748A3490203010001A38202D7308202D3300F0603551D130101FF04053003" + "0101FF301D0603551D0E041604146A6678620A4FF49CA8B75FD566348F3371E4" + "2B13300B0603551D0F040403020186301206092B060104018237150104050203" + "010001302306092B060104018237150204160414A0A485AE8296EA4944C6F6F3" + "886A8603FD07472C301906092B0601040182371402040C1E0A00530075006200" + "430041301F0603551D23041830168014F509C1D6267FC39FCA1DE648C969C74F" + "B111FE103081D60603551D1F0481CE3081CB3081C8A081C5A081C28663687474" + "703A2F2F70617373706F72747465737463612E7265646D6F6E642E636F72702E" + "6D6963726F736F66742E636F6D2F43657274456E726F6C6C2F4D532532305061" + "7373706F727425323054657374253230526F6F7425323043412831292E63726C" + "865B66696C653A2F2F50415353504F52545445535443412E7265646D6F6E642E" + "636F72702E6D6963726F736F66742E636F6D2F43657274456E726F6C6C2F4D53" + "2050617373706F7274205465737420526F6F742043412831292E63726C308201" + "4406082B06010505070101048201363082013230819A06082B06010505073002" + "86818D687474703A2F2F70617373706F72747465737463612E7265646D6F6E64" + "2E636F72702E6D6963726F736F66742E636F6D2F43657274456E726F6C6C2F50" + "415353504F52545445535443412E7265646D6F6E642E636F72702E6D6963726F" + "736F66742E636F6D5F4D5325323050617373706F727425323054657374253230" + "526F6F7425323043412832292E63727430819206082B06010505073002868185" + "66696C653A2F2F50415353504F52545445535443412E7265646D6F6E642E636F" + "72702E6D6963726F736F66742E636F6D2F43657274456E726F6C6C2F50415353" + "504F52545445535443412E7265646D6F6E642E636F72702E6D6963726F736F66" + "742E636F6D5F4D532050617373706F7274205465737420526F6F742043412832" + "292E637274300D06092A864886F70D010105050003818100C44788F8C4F5C2DC" + "84976F66417CBAE19FBFA82C257DA4C7FED6267BC711D113C78B1C097154A62A" + "B462ADC84A434AEBAE38DEB9605FAB534A3CAF7B72C199448E58640388911296" + "115ED6B3478D0E741D990F2D59D66F12E58669D8983489AB0406E37462164B56" + "6AA1D9B273C406FA694A2556D1D3ACE723382C19871B8C143100").HexToByteArray(); internal static readonly byte[] Pkcs7ChainPemBytes = ByteUtils.AsciiBytes( @"-----BEGIN PKCS7----- MIIOFgYJKoZIhvcNAQcCoIIOBzCCDgMCAQExADALBgkqhkiG9w0BBwGggg3rMIIF CzCCBHSgAwIBAgIKFeqoOgABAACSkTANBgkqhkiG9w0BAQUFADCBgTETMBEGCgmS JomT8ixkARkWA2NvbTEZMBcGCgmSJomT8ixkARkWCW1pY3Jvc29mdDEUMBIGCgmS JomT8ixkARkWBGNvcnAxFzAVBgoJkiaJk/IsZAEZFgdyZWRtb25kMSAwHgYDVQQD ExdNUyBQYXNzcG9ydCBUZXN0IFN1YiBDQTAeFw0xMzAxMTAyMTM5MTJaFw0zMTEy MTMyMjI2MDdaMIGFMQswCQYDVQQGEwJVUzELMAkGA1UECBMCV0ExEDAOBgNVBAcT B1JlZG1vbmQxDTALBgNVBAoTBFRFU1QxDTALBgNVBAsTBFRFU1QxEzARBgNVBAMT CnRlc3QubG9jYWwxJDAiBgkqhkiG9w0BCQEWFWNyaXNwb3BAbWljcm9zb2Z0LmNv bTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAtAaFEInpz3zbQ43Xe+vYGRl7 7v9XnDXvnEZS355jMKp+LiSxgcWdpK8Q6XIgwd+Z9mzm6XJH6RJqAWrGR70u/RNs MUcMe+AaIOOBJDvuyFMLf2RmxQoFHc43J07X/yr/9OWKq6YdWkSPSoqbN2XR12n2 J+0vLenuZ7Gn7KPSiMkCAwEAAaOCAoIwggJ+MA4GA1UdDwEB/wQEAwIE8DAdBgNV HSUEFjAUBggrBgEFBQcDAQYIKwYBBQUHAwIwHQYDVR0OBBYEFPs0hXCMv2GI9yDv lISJQFyNBBOnMB8GA1UdIwQYMBaAFGpmeGIKT/ScqLdf1WY0jzNx5CsTMIHQBgNV HR8EgcgwgcUwgcKggb+ggbyGX2h0dHA6Ly9wcHRlc3RzdWJjYS5yZWRtb25kLmNv cnAubWljcm9zb2Z0LmNvbS9DZXJ0RW5yb2xsL01TJTIwUGFzc3BvcnQlMjBUZXN0 JTIwU3ViJTIwQ0EoMSkuY3JshllmaWxlOi8vXFxwcHRlc3RzdWJjYS5yZWRtb25k LmNvcnAubWljcm9zb2Z0LmNvbVxDZXJ0RW5yb2xsXE1TIFBhc3Nwb3J0IFRlc3Qg U3ViIENBKDEpLmNybDCCATgGCCsGAQUFBwEBBIIBKjCCASYwgZMGCCsGAQUFBzAC hoGGaHR0cDovL3BwdGVzdHN1YmNhLnJlZG1vbmQuY29ycC5taWNyb3NvZnQuY29t L0NlcnRFbnJvbGwvcHB0ZXN0c3ViY2EucmVkbW9uZC5jb3JwLm1pY3Jvc29mdC5j b21fTVMlMjBQYXNzcG9ydCUyMFRlc3QlMjBTdWIlMjBDQSgxKS5jcnQwgY0GCCsG AQUFBzAChoGAZmlsZTovL1xccHB0ZXN0c3ViY2EucmVkbW9uZC5jb3JwLm1pY3Jv c29mdC5jb21cQ2VydEVucm9sbFxwcHRlc3RzdWJjYS5yZWRtb25kLmNvcnAubWlj cm9zb2Z0LmNvbV9NUyBQYXNzcG9ydCBUZXN0IFN1YiBDQSgxKS5jcnQwDQYJKoZI hvcNAQEFBQADgYEAneu4taQe1UhZeV9o73Z6mKYe97B6rBkPzAJ1Io5MrTYMm6mL CuFTx1Ui7vQtQA6BO05J56zrlj7ue2HTyNoFwYNHFURyWy69GImHf2ITSCf7WZO4 /fYYvUIauhjXDRxbQezdEWlaSMtC61AfltqQVHGDDGErYJEmVZEg9uGOpEgwggNY MIICwaADAgECAhAblnGkvBKLg0Gw4xTq2aGRMA0GCSqGSIb3DQEBBQUAMIGhMSQw IgYJKoZIhvcNAQkBFhVhc21lbW9uQG1pY3Jvc29mdC5jb20xCzAJBgNVBAYTAlVT MQswCQYDVQQIEwJXQTEQMA4GA1UEBxMHUmVkbW9uZDESMBAGA1UEChMJTWljcm9z b2Z0MRYwFAYDVQQLEw1QYXNzcG9ydCBUZXN0MSEwHwYDVQQDExhNUyBQYXNzcG9y dCBUZXN0IFJvb3QgQ0EwHhcNMDUwMTI2MDEzOTMyWhcNMzExMjEzMjIyNjA3WjCB oTEkMCIGCSqGSIb3DQEJARYVYXNtZW1vbkBtaWNyb3NvZnQuY29tMQswCQYDVQQG EwJVUzELMAkGA1UECBMCV0ExEDAOBgNVBAcTB1JlZG1vbmQxEjAQBgNVBAoTCU1p Y3Jvc29mdDEWMBQGA1UECxMNUGFzc3BvcnQgVGVzdDEhMB8GA1UEAxMYTVMgUGFz c3BvcnQgVGVzdCBSb290IENBMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDE ZzwSJiVPa70BsB0huwUmSpqlt3rFF0jqxSBIcG2muJDc4EPGQm/ETnbXD5/jpKyF X1M+PQjhQIU9t2nuJNvbcmn6vsD9/2reCqhfAIW3iGRY51heQzsJJOgWAEM8sRd8 5q1fJHeyoOLRo0tB9sb1reSp3X1WXGXwLCqqAcjgwQIDAQABo4GOMIGLMBMGCSsG AQQBgjcUAgQGHgQAQwBBMAsGA1UdDwQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0G A1UdDgQWBBT1CcHWJn/Dn8od5kjJacdPsRH+EDASBgkrBgEEAYI3FQEEBQIDAQAC MCMGCSsGAQQBgjcVAgQWBBR/elIIQR1GB8AFfJjwxHMHAQyz3jANBgkqhkiG9w0B AQUFAAOBgQBKjqxz2Optfok9WICUXg46v8ecQL+mCmgM+Ki/Y+3DrZwRwIHx9EQI lYH1yNyyPArvonVx2XHb6yqpobP3ubCHfpMR02CYplt9A/xpqDX2wwlt7hNahkBl +Xecgt6wx3e5xNtJ8N0RoOqyh7bjUvfspGfQ08oqgIERk4i6/N0lVzCCBXwwggTl oAMCAQICCmGHx/IAAgAAABswDQYJKoZIhvcNAQEFBQAwgaExJDAiBgkqhkiG9w0B CQEWFWFzbWVtb25AbWljcm9zb2Z0LmNvbTELMAkGA1UEBhMCVVMxCzAJBgNVBAgT AldBMRAwDgYDVQQHEwdSZWRtb25kMRIwEAYDVQQKEwlNaWNyb3NvZnQxFjAUBgNV BAsTDVBhc3Nwb3J0IFRlc3QxITAfBgNVBAMTGE1TIFBhc3Nwb3J0IFRlc3QgUm9v dCBDQTAeFw0wOTEwMjcyMTMxMzlaFw0zMTEyMTMyMjI2MDdaMIGBMRMwEQYKCZIm iZPyLGQBGRYDY29tMRkwFwYKCZImiZPyLGQBGRYJbWljcm9zb2Z0MRQwEgYKCZIm iZPyLGQBGRYEY29ycDEXMBUGCgmSJomT8ixkARkWB3JlZG1vbmQxIDAeBgNVBAMT F01TIFBhc3Nwb3J0IFRlc3QgU3ViIENBMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCB iQKBgQCmpJGPk8XSOzw6MlrY7HcEPSB6DdwpStP1veBAM/rdQJe7HbBCsdOy8mpC zDy4j6k1dxAUerThAgoN+yWXq4Ax22Kr3Eg5gGfreeTiu+V2L2tMXqdim6wj9wJp BtRuwQbMb7tNFD99Wtre3hmwIe70pry50B2uu5qUdwNAt0ijSQIDAQABo4IC1zCC AtMwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUamZ4YgpP9Jyot1/VZjSPM3Hk KxMwCwYDVR0PBAQDAgGGMBIGCSsGAQQBgjcVAQQFAgMBAAEwIwYJKwYBBAGCNxUC BBYEFKCkha6ClupJRMb284hqhgP9B0csMBkGCSsGAQQBgjcUAgQMHgoAUwB1AGIA QwBBMB8GA1UdIwQYMBaAFPUJwdYmf8Ofyh3mSMlpx0+xEf4QMIHWBgNVHR8Egc4w gcswgciggcWggcKGY2h0dHA6Ly9wYXNzcG9ydHRlc3RjYS5yZWRtb25kLmNvcnAu bWljcm9zb2Z0LmNvbS9DZXJ0RW5yb2xsL01TJTIwUGFzc3BvcnQlMjBUZXN0JTIw Um9vdCUyMENBKDEpLmNybIZbZmlsZTovL1BBU1NQT1JUVEVTVENBLnJlZG1vbmQu Y29ycC5taWNyb3NvZnQuY29tL0NlcnRFbnJvbGwvTVMgUGFzc3BvcnQgVGVzdCBS b290IENBKDEpLmNybDCCAUQGCCsGAQUFBwEBBIIBNjCCATIwgZoGCCsGAQUFBzAC hoGNaHR0cDovL3Bhc3Nwb3J0dGVzdGNhLnJlZG1vbmQuY29ycC5taWNyb3NvZnQu Y29tL0NlcnRFbnJvbGwvUEFTU1BPUlRURVNUQ0EucmVkbW9uZC5jb3JwLm1pY3Jv c29mdC5jb21fTVMlMjBQYXNzcG9ydCUyMFRlc3QlMjBSb290JTIwQ0EoMikuY3J0 MIGSBggrBgEFBQcwAoaBhWZpbGU6Ly9QQVNTUE9SVFRFU1RDQS5yZWRtb25kLmNv cnAubWljcm9zb2Z0LmNvbS9DZXJ0RW5yb2xsL1BBU1NQT1JUVEVTVENBLnJlZG1v bmQuY29ycC5taWNyb3NvZnQuY29tX01TIFBhc3Nwb3J0IFRlc3QgUm9vdCBDQSgy KS5jcnQwDQYJKoZIhvcNAQEFBQADgYEAxEeI+MT1wtyEl29mQXy64Z+/qCwlfaTH /tYme8cR0RPHixwJcVSmKrRirchKQ0rrrjjeuWBfq1NKPK97csGZRI5YZAOIkRKW EV7Ws0eNDnQdmQ8tWdZvEuWGadiYNImrBAbjdGIWS1Zqodmyc8QG+mlKJVbR06zn IzgsGYcbjBQxAA== -----END PKCS7-----"); internal static readonly byte[] Pkcs7EmptyPemBytes = ByteUtils.AsciiBytes( @"-----BEGIN PKCS7----- MCcGCSqGSIb3DQEHAqAaMBgCAQExADALBgkqhkiG9w0BBwGgAKEAMQA= -----END PKCS7-----"); internal static readonly byte[] Pkcs7EmptyDerBytes = ( "302706092A864886F70D010702A01A30180201013100300B06092A864886F70D" + "010701A000A1003100").HexToByteArray(); internal static readonly byte[] Pkcs7SingleDerBytes = ( "3082021406092A864886F70D010702A0820205308202010201013100300B0609" + "2A864886F70D010701A08201E9308201E530820152A0030201020210D5B5BC1C" + "458A558845BFF51CB4DFF31C300906052B0E03021D05003011310F300D060355" + "040313064D794E616D65301E170D3130303430313038303030305A170D313130" + "3430313038303030305A3011310F300D060355040313064D794E616D6530819F" + "300D06092A864886F70D010101050003818D0030818902818100B11E30EA8742" + "4A371E30227E933CE6BE0E65FF1C189D0D888EC8FF13AA7B42B68056128322B2" + "1F2B6976609B62B6BC4CF2E55FF5AE64E9B68C78A3C2DACC916A1BC7322DD353" + "B32898675CFB5B298B176D978B1F12313E3D865BC53465A11CCA106870A4B5D5" + "0A2C410938240E92B64902BAEA23EB093D9599E9E372E48336730203010001A3" + "46304430420603551D01043B3039801024859EBF125E76AF3F0D7979B4AC7A96" + "A1133011310F300D060355040313064D794E616D658210D5B5BC1C458A558845" + "BFF51CB4DFF31C300906052B0E03021D0500038181009BF6E2CF830ED485B86D" + "6B9E8DFFDCD65EFC7EC145CB9348923710666791FCFA3AB59D689FFD7234B787" + "2611C5C23E5E0714531ABADB5DE492D2C736E1C929E648A65CC9EB63CD84E57B" + "5909DD5DDF5DBBBA4A6498B9CA225B6E368B94913BFC24DE6B2BD9A26B192B95" + "7304B89531E902FFC91B54B237BB228BE8AFCDA264763100").HexToByteArray(); internal static readonly byte[] Pkcs7SinglePemBytes = ByteUtils.AsciiBytes( @"-----BEGIN PKCS7----- MIICFAYJKoZIhvcNAQcCoIICBTCCAgECAQExADALBgkqhkiG9w0BBwGgggHpMIIB 5TCCAVKgAwIBAgIQ1bW8HEWKVYhFv/UctN/zHDAJBgUrDgMCHQUAMBExDzANBgNV BAMTBk15TmFtZTAeFw0xMDA0MDEwODAwMDBaFw0xMTA0MDEwODAwMDBaMBExDzAN BgNVBAMTBk15TmFtZTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAsR4w6odC SjceMCJ+kzzmvg5l/xwYnQ2Ijsj/E6p7QraAVhKDIrIfK2l2YJtitrxM8uVf9a5k 6baMeKPC2syRahvHMi3TU7MomGdc+1spixdtl4sfEjE+PYZbxTRloRzKEGhwpLXV CixBCTgkDpK2SQK66iPrCT2VmenjcuSDNnMCAwEAAaNGMEQwQgYDVR0BBDswOYAQ JIWevxJedq8/DXl5tKx6lqETMBExDzANBgNVBAMTBk15TmFtZYIQ1bW8HEWKVYhF v/UctN/zHDAJBgUrDgMCHQUAA4GBAJv24s+DDtSFuG1rno3/3NZe/H7BRcuTSJI3 EGZnkfz6OrWdaJ/9cjS3hyYRxcI+XgcUUxq6213kktLHNuHJKeZIplzJ62PNhOV7 WQndXd9du7pKZJi5yiJbbjaLlJE7/CTeayvZomsZK5VzBLiVMekC/8kbVLI3uyKL 6K/NomR2MQA= -----END PKCS7-----"); internal static readonly byte[] MicrosoftDotComSslCertBytes = ( "308205943082047CA00302010202103DF70C5D9903F8D8868B9B8CCF20DF6930" + "0D06092A864886F70D01010B05003077310B3009060355040613025553311D30" + "1B060355040A131453796D616E74656320436F72706F726174696F6E311F301D" + "060355040B131653796D616E746563205472757374204E6574776F726B312830" + "260603550403131F53796D616E74656320436C61737320332045562053534C20" + "4341202D204733301E170D3134313031353030303030305A170D313631303135" + "3233353935395A3082010F31133011060B2B0601040182373C02010313025553" + "311B3019060B2B0601040182373C0201020C0A57617368696E67746F6E311D30" + "1B060355040F131450726976617465204F7267616E697A6174696F6E31123010" + "06035504051309363030343133343835310B3009060355040613025553310E30" + "0C06035504110C0539383035323113301106035504080C0A57617368696E6774" + "6F6E3110300E06035504070C075265646D6F6E643118301606035504090C0F31" + "204D6963726F736F667420576179311E301C060355040A0C154D6963726F736F" + "667420436F72706F726174696F6E310E300C060355040B0C054D53434F4D311A" + "301806035504030C117777772E6D6963726F736F66742E636F6D30820122300D" + "06092A864886F70D01010105000382010F003082010A0282010100A46861FA9D" + "5DB763633BF5A64EF6E7C2C2367F48D2D46643A22DFCFCCB24E58A14D0F06BDC" + "956437F2A56BA4BEF70BA361BF12964A0D665AFD84B0F7494C8FA4ABC5FCA2E0" + "17C06178AEF2CDAD1B5F18E997A14B965C074E8F564970607276B00583932240" + "FE6E2DD013026F9AE13D7C91CC07C4E1E8E87737DC06EF2B575B89D62EFE4685" + "9F8255A123692A706C68122D4DAFE11CB205A7B3DE06E553F7B95F978EF8601A" + "8DF819BF32040BDF92A0DE0DF269B4514282E17AC69934E8440A48AB9D1F5DF8" + "9A502CEF6DFDBE790045BD45E0C94E5CA8ADD76A013E9C978440FC8A9E2A9A49" + "40B2460819C3E302AA9C9F355AD754C86D3ED77DDAA3DA13810B4D0203010001" + "A38201803082017C30310603551D11042A302882117777772E6D6963726F736F" + "66742E636F6D821377777771612E6D6963726F736F66742E636F6D3009060355" + "1D1304023000300E0603551D0F0101FF0404030205A0301D0603551D25041630" + "1406082B0601050507030106082B0601050507030230660603551D20045F305D" + "305B060B6086480186F84501071706304C302306082B06010505070201161768" + "747470733A2F2F642E73796D63622E636F6D2F637073302506082B0601050507" + "020230191A1768747470733A2F2F642E73796D63622E636F6D2F727061301F06" + "03551D230418301680140159ABE7DD3A0B59A66463D6CF200757D591E76A302B" + "0603551D1F042430223020A01EA01C861A687474703A2F2F73722E73796D6362" + "2E636F6D2F73722E63726C305706082B06010505070101044B3049301F06082B" + "060105050730018613687474703A2F2F73722E73796D63642E636F6D30260608" + "2B06010505073002861A687474703A2F2F73722E73796D63622E636F6D2F7372" + "2E637274300D06092A864886F70D01010B0500038201010015F8505B627ED7F9" + "F96707097E93A51E7A7E05A3D420A5C258EC7A1CFE1843EC20ACF728AAFA7A1A" + "1BC222A7CDBF4AF90AA26DEEB3909C0B3FB5C78070DAE3D645BFCF840A4A3FDD" + "988C7B3308BFE4EB3FD66C45641E96CA3352DBE2AEB4488A64A9C5FB96932BA7" + "0059CE92BD278B41299FD213471BD8165F924285AE3ECD666C703885DCA65D24" + "DA66D3AFAE39968521995A4C398C7DF38DFA82A20372F13D4A56ADB21B582254" + "9918015647B5F8AC131CC5EB24534D172BC60218A88B65BCF71C7F388CE3E0EF" + "697B4203720483BB5794455B597D80D48CD3A1D73CBBC609C058767D1FF060A6" + "09D7E3D4317079AF0CD0A8A49251AB129157F9894A036487").HexToByteArray(); internal static readonly byte[] MicrosoftDotComIssuerBytes = ( "3082052B30820413A00302010202107EE14A6F6FEFF2D37F3FAD654D3ADAB430" + "0D06092A864886F70D01010B05003081CA310B30090603550406130255533117" + "3015060355040A130E566572695369676E2C20496E632E311F301D060355040B" + "1316566572695369676E205472757374204E6574776F726B313A303806035504" + "0B1331286329203230303620566572695369676E2C20496E632E202D20466F72" + "20617574686F72697A656420757365206F6E6C79314530430603550403133C56" + "6572695369676E20436C6173732033205075626C6963205072696D6172792043" + "657274696669636174696F6E20417574686F72697479202D204735301E170D31" + "33313033313030303030305A170D3233313033303233353935395A3077310B30" + "09060355040613025553311D301B060355040A131453796D616E74656320436F" + "72706F726174696F6E311F301D060355040B131653796D616E74656320547275" + "7374204E6574776F726B312830260603550403131F53796D616E74656320436C" + "61737320332045562053534C204341202D20473330820122300D06092A864886" + "F70D01010105000382010F003082010A0282010100D8A1657423E82B64E232D7" + "33373D8EF5341648DD4F7F871CF84423138EFB11D8445A18718E601626929BFD" + "170BE1717042FEBFFA1CC0AAA3A7B571E8FF1883F6DF100A1362C83D9CA7DE2E" + "3F0CD91DE72EFB2ACEC89A7F87BFD84C041532C9D1CC9571A04E284F84D935FB" + "E3866F9453E6728A63672EBE69F6F76E8E9C6004EB29FAC44742D27898E3EC0B" + "A592DCB79ABD80642B387C38095B66F62D957A86B2342E859E900E5FB75DA451" + "72467013BF67F2B6A74D141E6CB953EE231A4E8D48554341B189756A4028C57D" + "DDD26ED202192F7B24944BEBF11AA99BE3239AEAFA33AB0A2CB7F46008DD9F1C" + "CDDD2D016680AFB32F291D23B88AE1A170070C340F0203010001A382015D3082" + "0159302F06082B0601050507010104233021301F06082B060105050730018613" + "687474703A2F2F73322E73796D63622E636F6D30120603551D130101FF040830" + "060101FF02010030650603551D20045E305C305A0604551D2000305230260608" + "2B06010505070201161A687474703A2F2F7777772E73796D617574682E636F6D" + "2F637073302806082B06010505070202301C1A1A687474703A2F2F7777772E73" + "796D617574682E636F6D2F72706130300603551D1F042930273025A023A02186" + "1F687474703A2F2F73312E73796D63622E636F6D2F706361332D67352E63726C" + "300E0603551D0F0101FF04040302010630290603551D1104223020A41E301C31" + "1A30180603550403131153796D616E746563504B492D312D353333301D060355" + "1D0E041604140159ABE7DD3A0B59A66463D6CF200757D591E76A301F0603551D" + "230418301680147FD365A7C2DDECBBF03009F34339FA02AF333133300D06092A" + "864886F70D01010B050003820101004201557BD0161A5D58E8BB9BA84DD7F3D7" + "EB139486D67F210B47BC579B925D4F059F38A4107CCF83BE0643468D08BC6AD7" + "10A6FAABAF2F61A863F265DF7F4C8812884FB369D9FF27C00A97918F56FB89C4" + "A8BB922D1B73B0C6AB36F4966C2008EF0A1E6624454F670040C8075474333BA6" + "ADBB239F66EDA2447034FB0EEA01FDCF7874DFA7AD55B75F4DF6D63FE086CE24" + "C742A9131444354BB6DFC960AC0C7FD993214BEE9CE4490298D3607B5CBCD530" + "2F07CE4442C40B99FEE69FFCB07886516DD12C9DC696FB8582BB042FF76280EF" + "62DA7FF60EAC90B856BD793FF2806EA3D9B90F5D3A071D9193864B294CE1DCB5" + "E1E0339DB3CB36914BFEA1B4EEF0F9").HexToByteArray(); internal static readonly byte[] MicrosoftDotComRootBytes = ( "308204D3308203BBA003020102021018DAD19E267DE8BB4A2158CDCC6B3B4A30" + "0D06092A864886F70D01010505003081CA310B30090603550406130255533117" + "3015060355040A130E566572695369676E2C20496E632E311F301D060355040B" + "1316566572695369676E205472757374204E6574776F726B313A303806035504" + "0B1331286329203230303620566572695369676E2C20496E632E202D20466F72" + "20617574686F72697A656420757365206F6E6C79314530430603550403133C56" + "6572695369676E20436C6173732033205075626C6963205072696D6172792043" + "657274696669636174696F6E20417574686F72697479202D204735301E170D30" + "36313130383030303030305A170D3336303731363233353935395A3081CA310B" + "300906035504061302555331173015060355040A130E566572695369676E2C20" + "496E632E311F301D060355040B1316566572695369676E205472757374204E65" + "74776F726B313A3038060355040B133128632920323030362056657269536967" + "6E2C20496E632E202D20466F7220617574686F72697A656420757365206F6E6C" + "79314530430603550403133C566572695369676E20436C617373203320507562" + "6C6963205072696D6172792043657274696669636174696F6E20417574686F72" + "697479202D20473530820122300D06092A864886F70D01010105000382010F00" + "3082010A0282010100AF240808297A359E600CAAE74B3B4EDC7CBC3C451CBB2B" + "E0FE2902F95708A364851527F5F1ADC831895D22E82AAAA642B38FF8B955B7B1" + "B74BB3FE8F7E0757ECEF43DB66621561CF600DA4D8DEF8E0C362083D5413EB49" + "CA59548526E52B8F1B9FEBF5A191C23349D843636A524BD28FE870514DD18969" + "7BC770F6B3DC1274DB7B5D4B56D396BF1577A1B0F4A225F2AF1C926718E5F406" + "04EF90B9E400E4DD3AB519FF02BAF43CEEE08BEB378BECF4D7ACF2F6F03DAFDD" + "759133191D1C40CB7424192193D914FEAC2A52C78FD50449E48D6347883C6983" + "CBFE47BD2B7E4FC595AE0E9DD4D143C06773E314087EE53F9F73B8330ACF5D3F" + "3487968AEE53E825150203010001A381B23081AF300F0603551D130101FF0405" + "30030101FF300E0603551D0F0101FF040403020106306D06082B060105050701" + "0C0461305FA15DA05B3059305730551609696D6167652F6769663021301F3007" + "06052B0E03021A04148FE5D31A86AC8D8E6BC3CF806AD448182C7B192E302516" + "23687474703A2F2F6C6F676F2E766572697369676E2E636F6D2F76736C6F676F" + "2E676966301D0603551D0E041604147FD365A7C2DDECBBF03009F34339FA02AF" + "333133300D06092A864886F70D0101050500038201010093244A305F62CFD81A" + "982F3DEADC992DBD77F6A5792238ECC4A7A07812AD620E457064C5E797662D98" + "097E5FAFD6CC2865F201AA081A47DEF9F97C925A0869200DD93E6D6E3C0D6ED8" + "E606914018B9F8C1EDDFDB41AAE09620C9CD64153881C994EEA284290B136F8E" + "DB0CDD2502DBA48B1944D2417A05694A584F60CA7E826A0B02AA251739B5DB7F" + "E784652A958ABD86DE5E8116832D10CCDEFDA8822A6D281F0D0BC4E5E71A2619" + "E1F4116F10B595FCE7420532DBCE9D515E28B69E85D35BEFA57D4540728EB70E" + "6B0E06FB33354871B89D278BC4655F0D86769C447AF6955CF65D320833A454B6" + "183F685CF2424A853854835FD1E82CF2AC11D6A8ED636A").HexToByteArray(); internal static readonly byte[] Rsa384CertificatePemBytes = ByteUtils.AsciiBytes( @"-----BEGIN CERTIFICATE----- MIICTzCCAgmgAwIBAgIJAMQtYhFJ0+5jMA0GCSqGSIb3DQEBBQUAMIGSMQswCQYD VQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3RvbjEQMA4GA1UEBwwHUmVkbW9uZDEY MBYGA1UECgwPTWljcm9zb2Z0IENvcnAuMSAwHgYDVQQLDBcuTkVUIEZyYW1ld29y ayAoQ29yZUZ4KTEgMB4GA1UEAwwXUlNBIDM4NC1iaXQgQ2VydGlmaWNhdGUwHhcN MTYwMzAyMTY1OTA0WhcNMTYwNDAxMTY1OTA0WjCBkjELMAkGA1UEBhMCVVMxEzAR BgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1JlZG1vbmQxGDAWBgNVBAoMD01p Y3Jvc29mdCBDb3JwLjEgMB4GA1UECwwXLk5FVCBGcmFtZXdvcmsgKENvcmVGeCkx IDAeBgNVBAMMF1JTQSAzODQtYml0IENlcnRpZmljYXRlMEwwDQYJKoZIhvcNAQEB BQADOwAwOAIxANrMIthuZxV1Ay4x8gbc/BksZeLVEInlES0JbyiCr9tbeM22Vy/S 9h2zkEciMuPZ9QIDAQABo1AwTjAdBgNVHQ4EFgQU5FG2Fmi86hJOCf4KnjaxOGWV dRUwHwYDVR0jBBgwFoAU5FG2Fmi86hJOCf4KnjaxOGWVdRUwDAYDVR0TBAUwAwEB /zANBgkqhkiG9w0BAQUFAAMxAEzDg/u8TlApCnE8qxhcbTXk2MbX+2n5PCn+MVrW wggvPj3b2WMXsVWiPr4S1Y/nBA== -----END CERTIFICATE-----"); internal static readonly byte[] ECDsabrainpoolP160r1_Pfx = ( "308203D00201033082039606092A864886F70D010701A0820387048203833082" + "037F3082029F06092A864886F70D010706A08202903082028C02010030820285" + "06092A864886F70D010701301C060A2A864886F70D010C0106300E0408E4D7F5" + "F71CA4D5380202080080820258F19503250C93322C81CCC92C57AD76AD4DDF79" + "10DBAB6A63D6AAF3F470AC5283BDEB532086D3379B7A3D68D17FAC483EBEA029" + "1D2B5F862885E048A3034580A41F238AA836B9E94C12B0656B51C72355AED1DD" + "19FE771E3768095D6467FC8742BE0BC5D65360CD875D35C23D272842F64791A1" + "53F96AFBD3D7660EC016BF9D59B2B68C2A34D93B133697D6B77DB27649BEEABC" + "0B68D35DB3779DD4C871C9C26799E6ABB5E9048DDC44C6E6310F3A023AD09E97" + "1AB1DF38FDF3091FB35125EA3A14F5D72A00EC4C637951F026DE79C0E30E0244" + "808FB46EFD4EA9C67411DC2B13842B273F405F6D58D45D1D2D47BC1362ED9884" + "C2C44EA334A3B02C7E661F489DED798B63D64F90916596BADC87C68C868FCECB" + "6F246410186BBB2F2DC2ED24BF215AA54570072E21970CF856398BB1FD8C2F61" + "0788231C51D45CE471A235464147A405799F8CBE39AA30A8BFD2534C738AE330" + "8771394D871429EF2D6AB6381F793D7CBC0374D4E529B8B6AA37BE04FBE71A9A" + "A7954814C0C8841740539ED97DB56C3CBE5851C9D875E4B6023AE095B62C9DC5" + "36E06DA40C4B874776CBABDDAB50BDD5ECF9D997EEB1483D3AC23E6F37DD4CBD" + "64163E7A115BCE44554C53DD860B83CBE3B35F1E26B87185C602E4FFB3A99458" + "0D6A9334F74AA29B3609926FE86F197C955CBAEC2A41EE1572A4295F62D4D9CA" + "050CD933BC0BA43D7744EAFA9D6B8253241EB36C605DC334A6470BC709F13985" + "8AC238FD7F3C14EDDAB6E29996FE966A96EAC23CF17063C89315734D76CCB21F" + "94A7E4A44A5F6DCB93CEEEFB539664296F8F609CFE293200FE4B5EE57AB3A1E7" + "A3483DC6243081D906092A864886F70D010701A081CB0481C83081C53081C206" + "0B2A864886F70D010C0A0102A0818B308188301C060A2A864886F70D010C0103" + "300E0408CD30F3C5A9918832020208000468BF29F33058622BA159CDD3DE2B49" + "CBDD736BF1483FF4D43BACCC93B544A513D5E47DB4FECADBB4E3277A6B90345D" + "7E73F507924A615D968F807834D3796EFB0A3EF214A75883E3AB90086DA2418B" + "0B2D657DEA39A8600172B6975FFB39E88863DB11283A5CEA1FCB312530230609" + "2A864886F70D01091531160414472193B362B056F6D6928EFF4C43FF1EFEB173" + "4E30313021300906052B0E03021A05000414B703685D5039D8EEF1A46F772F31" + "F177FDE874EC0408B4EF89F18902CE9502020800").HexToByteArray(); internal static readonly byte[] ECDsabrainpoolP160r1_Explicit_Pfx = ( "30820501020103308204C706092A864886F70D010701A08204B8048204B43082" + "04B03082032F06092A864886F70D010706A08203203082031C02010030820315" + "06092A864886F70D010701301C060A2A864886F70D010C0106300E0408C55E7D" + "72B26355D202020800808202E8B8BCB9180C8751F860C5F77C04CC921A89D836" + "E8BC06DA99238EF8D65FB3473AF97D930B93471B28F08F483062BCEDB44287FA" + "E813B6CA812393475F50BB2D0AD233A8CE0832DECE54EF78A1F289A7802A9900" + "BAA2B7392BCF6C963D54D8DD63898D5D2FA472E77194763822157664417D93D8" + "BF740A4A5683FFFDF8B4CC7E074C6DCFF73E21368D743F0A1CE33484B02A4502" + "F7340A853F82F606B10DEA7239EF56C8DBDAED8DD3C956DD4D3E42CA517318E6" + "0DF70721469C512E120C500956D960A59EEB4A3A6541891319ACA68AB99462EA" + "CB59B52F6C6BFCF24EEA4E19BDDC5DDE1B6F36549DD589E1B10CAA02D4A53C36" + "1F1B9F101F9375394CF9BC8D941F94CC4F128EC861AA6B4182E1F81D2C0BADEA" + "2E6596BAC8819DE293ECEEC7C54BE97CD3917E0497F2559D44EF3484618DCB9B" + "85820762F392925BB36BD482DF544B614CDF9C6728BD92E858C1DF61435E7A03" + "DED27DA460603A61BE7DB93870E9530EB51384663E42A49C342891B8E757ED49" + "18A590D59734EA1C95E992CD93B80EBD3C246999305C204A813E0DCF26E64446" + "15A79E74042C7EAD4FEF0E68AA9DF627B5F4C037BF728015B6BBFA046CAA805C" + "BE7B1867262B36172E8759FAE7B965FF9B98D3539A48525E10A57A84C55AEFAC" + "3ED9025AB8B0680E0896DDD776C0AFC1A95BDD5DBE0ECCEB860B3CD3D6A2A493" + "2BC7774246A6936AFABA9BA8292841F9D6417AFFB00872E9B4ADF11889AEF20A" + "FCB8EAEBADAF38A2A240D36940B1585B37F7CA6A887EE1FBA199990FC104CD1F" + "A983CC2CE91156559EFCFC0F25B7C24B161DF4B4436F14428C4AE06F49FCC328" + "D00891A44AFAE5412FD330E23CFAE6107B4C965DFDB6335E8EFDF2C40767846B" + "C95ABF784DE155EED92DAB7A265DC17BC3ADA68D04E378D2E4E8924B05937092" + "E779EB04899E4FB19AAE7AA5FCF8D7A33BA464E4BB1FFB4E4D4CD71152F42B68" + "F5AB298B10DEB653C7F77F66B761DFD61E4E2DDD13B0B15639473BF5C3B8A31D" + "3D2334287F61E1A06F44CD3F2E74F59F43876F0D923082017906092A864886F7" + "0D010701A082016A04820166308201623082015E060B2A864886F70D010C0A01" + "02A082012630820122301C060A2A864886F70D010C0103300E0408A92CDE5D9A" + "F5278D0202080004820100A302E03B1BDF59D4ECD6F0FE7745F7C0DCE2CCEF0E" + "B26A7B92839B60288B05445BA1C91109D7E632E0C7B742E2D7D0947573AFEF1F" + "FAFCF8135DA3B5EE26A8E3AB7F415A8A19112724F850F024D3527F1FE2A303B1" + "34A644307AC6816E59E08540FA782351B27E37775AF3CD904E50A1A76C7C4F34" + "7EE78A1ED51FF71D00954130369012750746A280983E883E59AFDBBCCC7D1AA0" + "ECDCF2079ECFA4645E156ACC5FD6913763FC93C2E0C572042D00FE4EEB5E75DE" + "28C21FA1A7356C4071572DF23CC23833EA26705C0AA080636E27512B5F5755FE" + "A0514A31833D52C48A743BCDC0B58257FEDD23EE4EDBC06B574019E792B44BD6" + "3F3770875A25075183AF2C3125302306092A864886F70D01091531160414141C" + "1C8591A700DDE70FAC750C1539B2DFECAA3C30313021300906052B0E03021A05" + "0004143706E218219A899C700BD7AE3E8650FD1B2885AB0408E77FDD798BCADE" + "3C02020800").HexToByteArray(); internal static readonly byte[] ECDsabrainpoolP160r1_CertificatePemBytes = ByteUtils.AsciiBytes( @"-----BEGIN CERTIFICATE----- MIIB+TCCAbagAwIBAgIJAIJ8gBL6U36GMAoGCCqGSM49BAMCMHAxCzAJBgNVBAYT AlVTMRUwEwYDVQQIDAxOb3J0aCBEYWtvdGExDjAMBgNVBAcMBUZhcmdvMRgwFgYD VQQKDA9NaWNyb3NvZnQgQ29ycC4xIDAeBgNVBAsMFy5ORVQgRnJhbWV3b3JrIChD b3JlRngpMB4XDTE2MDYxMDE0NTYzOFoXDTE2MDcxMDE0NTYzOFowcDELMAkGA1UE BhMCVVMxFTATBgNVBAgMDE5vcnRoIERha290YTEOMAwGA1UEBwwFRmFyZ28xGDAW BgNVBAoMD01pY3Jvc29mdCBDb3JwLjEgMB4GA1UECwwXLk5FVCBGcmFtZXdvcmsg KENvcmVGeCkwQjAUBgcqhkjOPQIBBgkrJAMDAggBAQEDKgAEQk2dep8HoNJcbCal ie5QIMYsNnphtOo9WUCgrrzEG3wfrxz39HcAXaNQME4wHQYDVR0OBBYEFPprBFD9 qDQynQJmJUpVKv9WR8z5MB8GA1UdIwQYMBaAFPprBFD9qDQynQJmJUpVKv9WR8z5 MAwGA1UdEwQFMAMBAf8wCgYIKoZIzj0EAwIDMQAwLgIVAN3U12PFcEe4HHi+Rio+ xk3lf6EbAhUAqdeGDOXgpHEoWEmzOQ6nWWQik1k= -----END CERTIFICATE-----"); internal static readonly byte[] ECDsabrainpoolP160r1_ExplicitCertificatePemBytes = ByteUtils.AsciiBytes( @"-----BEGIN CERTIFICATE----- MIICijCCAkigAwIBAgIJAJVtMTsUqcjsMAoGCCqGSM49BAMCMHAxCzAJBgNVBAYT AlVTMRUwEwYDVQQIDAxOb3J0aCBEYWtvdGExDjAMBgNVBAcMBUZhcmdvMRgwFgYD VQQKDA9NaWNyb3NvZnQgQ29ycC4xIDAeBgNVBAsMFy5ORVQgRnJhbWV3b3JrIChD b3JlRngpMB4XDTE2MDYxMDE1MDg1NVoXDTE2MDcxMDE1MDg1NVowcDELMAkGA1UE BhMCVVMxFTATBgNVBAgMDE5vcnRoIERha290YTEOMAwGA1UEBwwFRmFyZ28xGDAW BgNVBAoMD01pY3Jvc29mdCBDb3JwLjEgMB4GA1UECwwXLk5FVCBGcmFtZXdvcmsg KENvcmVGeCkwgdMwgaQGByqGSM49AgEwgZgCAQEwIAYHKoZIzj0BAQIVAOleSl9z cFncYN/HrZWz2BOVFWIPMCwEFDQOe+KigOt04r5hutp0XZfo98MABBQeWJqFlUI0 EhNPqi297JXI2GdeWAQpBL7VrxbqP2pPYpOMRjHrWve9vNvDFmfLR3oajsM4+UdB ZpyXYxbaYyECFQDpXkpfc3BZ3GDfWZHUUClAnmD8CQIBAQMqAARz9ueEHonciPIW lTsd673ZaNpP9GMoFfHns3DnUC0pC1Grh+6sZcPIo1AwTjAdBgNVHQ4EFgQU65OI c9u4x/ZfIRyjcSaZTUKSsuIwHwYDVR0jBBgwFoAU65OIc9u4x/ZfIRyjcSaZTUKS suIwDAYDVR0TBAUwAwEB/zAKBggqhkjOPQQDAgMwADAtAhUAxMT7z8lLv7hgWmGh 5siYmHkAExoCFFaaS2r7/kdkXsauyr37q6ewD6s+ -----END CERTIFICATE-----"); internal static readonly ECDsaCngKeyValues ECDsaCng256PublicKey = new ECDsaCngKeyValues() { QX = "448d98ee08aeba0d8b40f3c6dbd500e8b69f07c70c661771655228ea5a178a91".HexToByteArray(), QY = "0ef5cb1759f6f2e062021d4f973f5bb62031be87ae915cff121586809e3219af".HexToByteArray(), D = "692837e9cf613c0e290462a6f08faadcc7002398f75598d5554698a0cb51cf47".HexToByteArray(), }; internal static readonly byte[] ECDsa256Certificate = ("308201223081c9a00302010202106a3c9e85ba6af1ac4f08111d8bdda340300906072a8648ce3d0401301431123010060355" + "04031309456332353655736572301e170d3135303931303231333533305a170d3136303931303033333533305a3014311230" + "10060355040313094563323536557365723059301306072a8648ce3d020106082a8648ce3d03010703420004448d98ee08ae" + "ba0d8b40f3c6dbd500e8b69f07c70c661771655228ea5a178a910ef5cb1759f6f2e062021d4f973f5bb62031be87ae915cff" + "121586809e3219af300906072a8648ce3d04010349003046022100f221063dca71955d17c8f0e0f63a144c4065578fd9f68e" + "1ae6a7683e209ea742022100ed1db6a8be27cfb20ab43e0ca061622ceff26f7249a0f791e4d6be1a4e52adfa").HexToByteArray(); internal static readonly ECDsaCngKeyValues ECDsaCng384PublicKey = new ECDsaCngKeyValues() { QX = "c59eca607aa5559e6b2f8ac2eeb12d9ab47f420feabeb444c3f71520d7f2280439979323ab5a67344811d296fef6d1bd".HexToByteArray(), QY = "d15f307cc6cc6c8baeeeb168bfb02c34d6eb0621efb3d06ad31c06b29eaf6ec2ec67bf288455e729d82e5a6439f70901".HexToByteArray(), D = "f55ba33e28cea32a014e2fe1213bb4d41cef361f1fee022116b15be50feb96bc946b10a46a9a7a94176787e0928a3e1d".HexToByteArray(), }; internal static readonly byte[] ECDsa384Certificate = ("3082015f3081e6a00302010202101e78eb573e70a2a64744672296988ad7300906072a8648ce3d0401301431123010060355" + "04031309456333383455736572301e170d3135303931303231333634365a170d3136303931303033333634365a3014311230" + "10060355040313094563333834557365723076301006072a8648ce3d020106052b8104002203620004c59eca607aa5559e6b" + "2f8ac2eeb12d9ab47f420feabeb444c3f71520d7f2280439979323ab5a67344811d296fef6d1bdd15f307cc6cc6c8baeeeb1" + "68bfb02c34d6eb0621efb3d06ad31c06b29eaf6ec2ec67bf288455e729d82e5a6439f70901300906072a8648ce3d04010369" + "003066023100a8fbaeeae61953897eae5f0beeeffaca48e89bc0cb782145f39f4ba5b03390ce6a28e432e664adf5ebc6a802" + "040b238b023100dcc19109383b9482fdda68f40a63ee41797dbb8f25c0284155cc4238d682fbb3fb6e86ea0933297e850a26" + "16f6c39bbf").HexToByteArray(); internal static readonly ECDsaCngKeyValues ECDsaCng521PublicKey = new ECDsaCngKeyValues() { QX = "0134af29d1fe5e581fd2ff6194263abcb6f8cb4d9c08bdb384ede9b8663ae2f4e1af6c85eacc69dc768fbfcd856630792e05484cefb1fefb693081dc6490dac579c0".HexToByteArray(), QY = "00bfe103f53cbcb039873b1a3e81a9da9abd71995e722318367281d30b35a338bf356662342b653eff38e85881863b7128ddbb856d8ae158365550bb6330b93d4ef0".HexToByteArray(), D = "0153603164bcef5c9f62388d06dcbf5681479be4397c07ff6f44bb848465e3397537d5f61abc7bc9266d4df6bae1df4847fcfd3dabdda37a2fe549b821ea858d088d".HexToByteArray(), }; internal static readonly ECDsaCngKeyValues ECDsabrainpoolP160r1_PublicKey = new ECDsaCngKeyValues() { QX = "424D9D7A9F07A0D25C6C26A589EE5020C62C367A".HexToByteArray(), QY = "61B4EA3D5940A0AEBCC41B7C1FAF1CF7F477005D".HexToByteArray(), }; internal static readonly byte[] ECDsa521Certificate = ("308201a93082010ca00302010202102c3134fe79bb9daa48df6431f4c1e4f3300906072a8648ce3d04013014311230100603" + "5504031309456335323155736572301e170d3135303931303231333832305a170d3136303931303033333832305a30143112" + "30100603550403130945633532315573657230819b301006072a8648ce3d020106052b8104002303818600040134af29d1fe" + "5e581fd2ff6194263abcb6f8cb4d9c08bdb384ede9b8663ae2f4e1af6c85eacc69dc768fbfcd856630792e05484cefb1fefb" + "693081dc6490dac579c000bfe103f53cbcb039873b1a3e81a9da9abd71995e722318367281d30b35a338bf356662342b653e" + "ff38e85881863b7128ddbb856d8ae158365550bb6330b93d4ef0300906072a8648ce3d040103818b0030818702420090bdf5" + "dfb328501910da4b02ba3ccd41f2bb073608c55f0f2b2e1198496c59b44db9e516a6a63ba7841d22cf590e39d3f09636d0eb" + "cd59a92c105f499e1329615602414285111634719b9bbd10eb7d08655b2fa7d7eb5e225bfdafef15562ae2f9f0c6a943a7bd" + "f0e39223d807b5e2e617a8e424294d90869567326531bcad0f893a0f3a").HexToByteArray(); internal static readonly byte[] EccCert_KeyAgreement = ( "308201553081FDA00302010202105A1C956450FFED894E85DC61E11CD968300A" + "06082A8648CE3D04030230143112301006035504030C09454344482054657374" + "301E170D3135303433303138303131325A170D3136303433303138323131325A" + "30143112301006035504030C094543444820546573743059301306072A8648CE" + "3D020106082A8648CE3D0301070342000477DE73EA00A82250B69E3F24A14CDD" + "C4C47C83993056DD0A2C6C17D5C8E7A054216B9253533D12C082E0C8B91B3B10" + "CDAB564820D417E6D056E4E34BCCA87301A331302F300E0603551D0F0101FF04" + "0403020009301D0603551D0E0416041472DE05F588BF2741C8A28FF99EA399F7" + "AAB2C1B3300A06082A8648CE3D040302034700304402203CDF0CC71C63747BDA" + "2D2D563115AE68D34867E74BCA02738086C316B846CDF2022079F3990E5DCCEE" + "627B2E6E42317D4D279181EE695EE239D0C8516DD53A896EC3").HexToByteArray(); internal static readonly byte[] ECDsa224Certificate = ( "3082026630820214A003020102020900B94BCCE3179BAA21300A06082A8648CE" + "3D040302308198310B30090603550406130255533113301106035504080C0A57" + "617368696E67746F6E3110300E06035504070C075265646D6F6E64311E301C06" + "0355040A0C154D6963726F736F667420436F72706F726174696F6E3120301E06" + "0355040B0C172E4E4554204672616D65776F726B2028436F7265465829312030" + "1E06035504030C174E4953542F53454320502D3232342054657374204B657930" + "1E170D3135313233313232353532345A170D3136303133303232353532345A30" + "8198310B30090603550406130255533113301106035504080C0A57617368696E" + "67746F6E3110300E06035504070C075265646D6F6E64311E301C060355040A0C" + "154D6963726F736F667420436F72706F726174696F6E3120301E060355040B0C" + "172E4E4554204672616D65776F726B2028436F72654658293120301E06035504" + "030C174E4953542F53454320502D3232342054657374204B6579304E30100607" + "2A8648CE3D020106052B81040021033A000452FF02B55AE35AA7FFF1B0A82DC2" + "260083DD7D5893E85FBAD1D663B718176F7D5D9A04B8AEA968E9FECFEE348CDB" + "49A938401783BADAC484A350304E301D0603551D0E041604140EA9C5C4681A6E" + "48CE64E47EE8BBB0BA5FF8AB3E301F0603551D230418301680140EA9C5C4681A" + "6E48CE64E47EE8BBB0BA5FF8AB3E300C0603551D13040530030101FF300A0608" + "2A8648CE3D040302034000303D021D00AC10B79B6FD6BEE113573A1B68A3B771" + "3B9DA2719A9588376E334811021C1AAC3CA829DA79CE223FA83283E6F0A5A59D" + "2399E140D957C1C9DDAF").HexToByteArray(); internal static readonly byte[] ECDsaP256_DigitalSignature_Pfx_Windows = ( "308204470201033082040306092A864886F70D010701A08203F4048203F03082" + "03EC3082016D06092A864886F70D010701A082015E0482015A30820156308201" + "52060B2A864886F70D010C0A0102A081CC3081C9301C060A2A864886F70D010C" + "0103300E0408EC154269C5878209020207D00481A80BAA4AF8660E6FAB7B050B" + "8EF604CFC378652B54FE005DC3C7E2F12E5EFC7FE2BB0E1B3828CAFE752FD64C" + "7CA04AF9FBC5A1F36E30D7D299C52BF6AE65B54B9240CC37C04E7E06330C24E9" + "6D19A67B7015A6BF52C172FFEA719B930DBE310EEBC756BDFF2DF2846EE973A6" + "6C63F4E9130083D64487B35C1941E98B02B6D5A92972293742383C62CCAFB996" + "EAD71A1DF5D0380EFFF25BA60B233A39210FD7D55A9B95CD8A440DF666317430" + "1306092A864886F70D0109153106040401000000305D06092B06010401823711" + "0131501E4E004D006900630072006F0073006F0066007400200053006F006600" + "7400770061007200650020004B00650079002000530074006F00720061006700" + "65002000500072006F007600690064006500723082027706092A864886F70D01" + "0706A0820268308202640201003082025D06092A864886F70D010701301C060A" + "2A864886F70D010C0106300E0408175CCB1790C48584020207D080820230E956" + "E38768A035D8EA911283A63F2E5B6E5B73231CFC4FFD386481DE24B7BB1B0995" + "D614A0D1BD086215CE0054E01EF9CF91B7D80A4ACB6B596F1DFD6CBCA71476F6" + "10C0D6DD24A301E4B79BA6993F15D34A8ADB7115A8605E797A2C6826A4379B65" + "90B56CA29F7C36997119257A827C3CA0EC7F8F819536208C650E324C8F884794" + "78705F833155463A4EFC02B5D5E2608B83F3CAF6C9BB97C1BBBFC6C5584BDCD3" + "9C46A3944915B3845C41429C7792EB4FA3A7EDECCD801F31A4B6EF57D808AEEA" + "AF3D1F55F378EF8EF9632CED16EDA3EFBE4A9D5C5F608CA90A9AC8D3F86462AC" + "219BFFD0B8A87DDD22CF029230369B33FC2B488B5F82702EFC3F270F912EAD2E" + "2402D99F8324164C5CD5959F22DEC0D1D212345B4B3F62848E7D9CFCE2224B61" + "976C107E1B218B4B7614FF65BCCA388F85D6920270D4C588DEED323C416D014F" + "5F648CC2EE941855EB3C889DCB9A345ED11CAE94041A86ED23E5789137A3DE22" + "5F4023D260BB686901F2149B5D7E37102FFF5282995892BDC2EAB48BD5DA155F" + "72B1BD05EE3EDD32160AC852E5B47CA9AEACE24946062E9D7DCDA642F945C9E7" + "C98640DFAC7A2B88E76A560A0B4156611F9BE8B3613C71870F035062BD4E3D9F" + "D896CF373CBFBFD31410972CDE50739FFB8EC9180A52D7F5415EBC997E5A4221" + "349B4BB7D53614630EEEA729A74E0C0D20726FDE5814321D6C265A7DC6BA24CA" + "F2FCE8C8C162733D58E02E08921E70EF838B95C96A5818489782563AE8A2A85F" + "64A95EB350FF8EF6D625AD031BCD303B301F300706052B0E03021A0414C8D96C" + "ED140F5CA3CB92BEFCA32C690804576ABF0414B59D4FECA9944D40EEFDE7FB96" + "196D167B0FA511020207D0").HexToByteArray(); // The PFX in ECDsaP256_DigitalSignature_Pfx_Windows washed through OpenSSL internal static readonly byte[] ECDsaP256_DigitalSignature_Pfx_OpenSsl = ( "308203BE0201033082038406092A864886F70D010701A0820375048203713082" + "036D308201FF06092A864886F70D010706A08201F0308201EC020100308201E5" + "06092A864886F70D010701301C060A2A864886F70D010C0106300E040888F579" + "00302DB63A02020800808201B8F5EDB44F8B2572E85E52946B233A47F03DF776" + "BC3A05CB74B4145A9D3AE3C7FD61B330194E1E154B89929F3FA3908FEE95512A" + "052FDDE8E1913E2CCFD803EE6D868696D86934DCF5300DC951F36BE93E3F4AA2" + "096B926CF8410AF77FFA087213F84F17EB1D36B61AF4AAD87288301569239B9A" + "B66392ADA3D468DC33F42FCEC3BEE78148CA72686BB733DB89FC951AE92FD0F7" + "D5937DE78B1AF984BD13E5127F73A91D40097976AEF00157DCC34B16C1724E5B" + "88090A1A2DA7337C72720A7ED8F1A89C09AB4143C3F6D80B1925AB8F744069F6" + "399D997827F7D0931DCB5E3B09783D1D8555910906B33AD03759D292021C21A2" + "9EA2F29CF9BA4D66E4E69AA9FDCCCB4D49A806DBB804EBEBAED7AE0DD4AD2133" + "1482A3CC5DB246CE59998824B7E46F337F8887D990FA1756D6A039D293B243BB" + "DCFB19AD613A42C5778E7094EA43C3136EF359209790462A36CF87D89B6D76CF" + "BD8C34B8C41D96C83683751B8B067F42017A37D05B599B82B70830B5A93499A0" + "A4791F5DAB2143C8DF35EC7E88B71A0990E7F6FEA304CE594C9280D7B9120816" + "45C87112B1ED85124533792ABEF8B4946F811FB9FE922F6F786E5BFD7D7C43F6" + "48AB43C43F3082016606092A864886F70D010701A0820157048201533082014F" + "3082014B060B2A864886F70D010C0A0102A081B43081B1301C060A2A864886F7" + "0D010C0103300E0408F58B95D6E307213C02020800048190E0FB35890FFB6F30" + "7DD0BD8B10EB10488EAB18702E5AC9F67C557409DF8E3F382D06060FB3B5A08D" + "1EA31313E80A0488B4034C8906BD873A5308E412783684A35DBD9EEACF5D090D" + "AE7390E3309D016C41133946A6CF70E32BE8002CD4F06A90F5BBCE6BF932EC71" + "F634312D315310CE2015B30C51FCC54B60FB3D6E7B734C1ADEBE37056A46AB3C" + "23276B16603FC50C318184302306092A864886F70D01091531160414F20D17B7" + "9B898999F0AA1D5EA333FAEF2BDB2A29305D06092B060104018237110131501E" + "4E004D006900630072006F0073006F0066007400200053006F00660074007700" + "61007200650020004B00650079002000530074006F0072006100670065002000" + "500072006F0076006900640065007230313021300906052B0E03021A05000414" + "96C2244022AB2B809E0F97270F7F4EA7769DD26F04084C0E2946D65F8F220202" + "0800").HexToByteArray(); internal struct ECDsaCngKeyValues { public byte[] QX; public byte[] QY; public byte[] D; } internal static readonly RSAParameters RsaBigExponentParams = new RSAParameters { Modulus = ( "AF81C1CBD8203F624A539ED6608175372393A2837D4890E48A19DED369731156" + "20968D6BE0D3DAA38AA777BE02EE0B6B93B724E8DCC12B632B4FA80BBC925BCE" + "624F4CA7CC606306B39403E28C932D24DD546FFE4EF6A37F10770B2215EA8CBB" + "5BF427E8C4D89B79EB338375100C5F83E55DE9B4466DDFBEEE42539AEF33EF18" + "7B7760C3B1A1B2103C2D8144564A0C1039A09C85CF6B5974EB516FC8D6623C94" + "AE3A5A0BB3B4C792957D432391566CF3E2A52AFB0C142B9E0681B8972671AF2B" + "82DD390A39B939CF719568687E4990A63050CA7768DCD6B378842F18FDB1F6D9" + "FF096BAF7BEB98DCF930D66FCFD503F58D41BFF46212E24E3AFC45EA42BD8847").HexToByteArray(), Exponent = new byte[] { 0x02, 0x00, 0x00, 0x04, 0x41 }, D = ( "64AF9BA5262483DA92B53F13439FD0EF13012F879ABC03CB7C06F1209904F352" + "C1F223519DC48BFAEEBB511B0D955F6167B50E034FEA2ABC590B4EA9FBF0C51F" + "9FFEA16F7927AE681CBF7358452BCA29D58705E0CAA106013B09A6F5F5911498" + "D2C4FD6915585488E5F3AD89836C93C8775AFAB4D13C2014266BE8EE6B8AA66C" + "9E942D493466C8E3A370F8E6378CE95D637E03673670BE4BCACE5FCDADD238D9" + "F32CA35DE845776AC4BF36118812328C493F91C25A9BD42672D0AFAFDE0AF7E6" + "19078D48B485EF91933DDCFFB54587B8F512D223C81894E91784982F3C5C6587" + "1351F4655AB023C4AD99B6B03A96F9046CE124A471E828F05F8DB3BC7CCCF2D1").HexToByteArray(), P = ( "E43A3826A97204AE3CD8649A84DB4BBF0725C4B08F8C43840557A0CD04E313AF" + "6D0460DDE69CDC508AD043D72514DA7A66BC918CD9624F485644B9DEEAB2BE0E" + "112956D472CF0FD51F80FD33872D2DCC562A0588B012E8C90CE7D254B94792C6" + "E7A02B3CCAA150E67A64377ACC49479AD5EB555493B2100CB0410956F7D73BF5").HexToByteArray(), Q = ( "C4DD2D7ADD6CA50740D3973F40C4DEBDBAB51F7F5181ABAE726C32596A3EDD0A" + "EE44DAADDD8A9B7A864C4FFDAE00C4CB1F10177BA01C0466F812D522610F8C45" + "43F1C3EF579FA9E13AE8DA1A4A8DAE307861D2CEAC03560279B61B6514989883" + "FE86C5C7420D312838FC2F70BED59B5229654201882664CEFA38B48A3723E9CB").HexToByteArray(), DP = ( "09ECF151F5CDD2C9E6E52682364FA5B4ED094F622E4031BF46B851358A584DCC" + "B5328B0BD9B63589183F491593D2A3ACAD14E0AACDA1F181B5C7D93C57ED26E6" + "2C9FC26AF37E4A0644ECE82A7BA8AED88FF1D8E9C56CC66385CDB244EB3D57D1" + "7E6AD420B19C9E2BEE18192B816265B74DA55FA3825F922D9D8E835B76BF3071").HexToByteArray(), DQ = ( "89B33B695789174B88368C494639D4D3267224572A40B2FE61910384228E3DBD" + "11EED9040CD03977E9E0D7FC8BFC4BF4A93283529FF1D96590B18F4EABEF0303" + "794F293E88DC761B3E23AFECB19F29F8A4D2A9058B714CF3F4D10733F13EA72B" + "BF1FBEC8D71E106D0CE2115F3AD2DE020325C3879A091C413CD6397F83B3CB89").HexToByteArray(), InverseQ = ( "7C57ED74C9176FBA76C23183202515062C664D4D49FF3E037047A309DA10F159" + "0CE01B7A1CD1A4326DC75883DFF93110AB065AAED140C9B98176A8810809ADEC" + "75E86764A0951597EF467FA8FD509181CD2E491E43BE41084E5BE1B562EE76E9" + "F92C9AB1E5AEAD9D291A6337E4DE85BDE67A0D72B4E55ADCF207F7A5A5225E15").HexToByteArray() }; internal static readonly byte[] BigExponentPkcs10Bytes = ( "30820311308201F902010030818A310B30090603550406130255533113301106" + "03550408130A57617368696E67746F6E3110300E060355040713075265646D6F" + "6E64311E301C060355040A13154D6963726F736F667420436F72706F72617469" + "6F6E3120301E060355040B13172E4E4554204672616D65776F726B2028436F72" + "6546582931123010060355040313096C6F63616C686F737430820124300D0609" + "2A864886F70D010101050003820111003082010C0282010100AF81C1CBD8203F" + "624A539ED6608175372393A2837D4890E48A19DED36973115620968D6BE0D3DA" + "A38AA777BE02EE0B6B93B724E8DCC12B632B4FA80BBC925BCE624F4CA7CC6063" + "06B39403E28C932D24DD546FFE4EF6A37F10770B2215EA8CBB5BF427E8C4D89B" + "79EB338375100C5F83E55DE9B4466DDFBEEE42539AEF33EF187B7760C3B1A1B2" + "103C2D8144564A0C1039A09C85CF6B5974EB516FC8D6623C94AE3A5A0BB3B4C7" + "92957D432391566CF3E2A52AFB0C142B9E0681B8972671AF2B82DD390A39B939" + "CF719568687E4990A63050CA7768DCD6B378842F18FDB1F6D9FF096BAF7BEB98" + "DCF930D66FCFD503F58D41BFF46212E24E3AFC45EA42BD884702050200000441" + "A03F303D06092A864886F70D01090E3130302E302C0603551D11042530238704" + "7F00000187100000000000000000000000000000000182096C6F63616C686F73" + "74300D06092A864886F70D01010B050003820101003BCAE7E02D3A828435110C" + "8A65197FF1A027EC5ACA37EBE29B6E7093A4BDCA9BDA8E244DC05846AA9F186D" + "2EBBDF6474BB09ECF5A3C11F3A7E56D9D489C3D4AE2DCF5D52ABFCDFED6D4623" + "AF7C7D2E52A189BC4A0BFC5EB96EC158A96B292DF6E4ADCAE5233A7E1598444E" + "23F732526B71172266E45706F90EFAB0945A75D446F0A6547C788DD81AD6F4D1" + "E7FD0E8884083AF52003D9CD38B3A140F2E552CF3FBF0B4C771E5745C6DA6F26" + "DCFD0FEB87B9FDD2F4724A09DE1FB4C55E439F43C6E37A866BA19494B210D294" + "699B3C957C6DD22E9B63DBAE3B5AE62919F0EA3DF304C7DD9E0BBA0E7053605F" + "D066A788426159BB937C58E5A110461DC9364CA7CA").HexToByteArray(); internal static DSAParameters GetDSA1024Params() { DSAParameters p = new DSAParameters(); p.G = ( "6BC366B66355545E098F1FE90E5469B567E09FA79D817F2B367B45DECD4301A59C81D6911F7691D370E15AC692C04BC11872" + "C171A7FE654E963D7DDA575A9E98CE026FB7D3934A258608134A8EC5ED69A2AEDC89401B67ADDE427F17EDAEB72D7AF45D9A" + "B1D59E1B13D4EFBD17C764330267DDE352C20E05B80DB3C109FE8B9C").HexToByteArray(); p.P = ( "C16D26C74D6C1627799C0918548E553FE58C7881DA484629CAF64311F4B27CFEF6BDB0F21206B0FFC4999A2FED53B43B9EE2" + "910C68DA2C436A8018F4938F6472369F5647D005BCC96E22590CC15E3CD4EA0D132F5DA5AF6AAA0807B0CC4EF3404AF542F4" + "546B37BDD6A47E641130837DB99397C845635D7DC36D0537E4A84B31").HexToByteArray(); p.Q = "D83C0ECB73551E2FE30D51FCF4236C651883ADD7".HexToByteArray(); p.X = "C02678007779E52E360682214BD47F8FAF42BC2D".HexToByteArray(); p.Y = ( "690BB37A9145E05D6E7B47C457898AAEDD72501C9D16E79B1AD75A872CF017AA90BBFB90F1B3B7F5C03C87E46E8725665526" + "FD34157B26F596A1F0997F59F3E65EFC615A552D5E7569C5FFC4593D5A0299110E71C97E1221A5A03FE9A6935AEDD88EF0B3" + "B2F79D3A99ED75F7B871E6EAF2680D96D574A5F4C13BACE3B4B44DE1").HexToByteArray(); return p; } } }
// // DebugTests.cs // // Author: // Lluis Sanchez Gual <lluis@novell.com> // // Copyright (c) 2009 Novell, Inc (http://www.novell.com) // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. using System; using System.IO; using System.Threading; using System.Reflection; using System.Collections.Generic; using Mono.Debugging.Soft; using Mono.Debugging.Client; using NUnit.Framework; namespace Mono.Debugging.Tests { [TestFixture] public abstract partial class DebugTests { const string TestAppExeName = "MonoDevelop.Debugger.Tests.TestApp.exe"; const string TestAppProjectDirName = "MonoDevelop.Debugger.Tests.TestApp"; protected readonly ManualResetEvent targetStoppedEvent = new ManualResetEvent (false); readonly string EngineId; string TestName = ""; ITextFile SourceFile; SourceLocation lastStoppedPosition; public bool AllowTargetInvokes { get; protected set; } public DebuggerSession Session { get; private set; } public StackFrame Frame { get; private set; } protected DebugTests (string engineId) { EngineId = engineId; } public virtual void IgnoreCorDebugger (string message = "") { if (!(Session is SoftDebuggerSession)) { Assert.Ignore (message); } } public virtual void IgnoreSoftDebugger (string message = "") { if (Session is SoftDebuggerSession) { Assert.Ignore (message); } } // TODO: implement in another part of the class #region Partial Definitions /// <summary> /// Returns parent directory of target executable /// </summary> //protected string TargetExeDirectory { get { throw new NotImplementedException (); } } /// <summary> /// Returns parent directory of target project sources /// </summary> //protected string TargetProjectSourceDir { get { throw new NotImplementedException (); } } /// <summary> /// Creates debugger session. The type of session is dependent on <paramref name="engineId"/> /// </summary> /// <param name="test">test name, usually used as entry point method in target exe</param> /// <param name="engineId">the ID of debugger engine</param> //protected DebuggerSession CreateSession (string test, string engineId); /// <summary> /// Creates start info to run the app /// </summary> /// <param name="test">test name</param> /// <param name="engineId">the ID of debugger engine</param> //protected DebuggerStartInfo CreateStartInfo (string test, string engineId); /// <summary> /// Reads file from given path /// </summary> /// <param name="sourcePath"></param> /// <returns></returns> //protected ITextFile ReadFile (string sourcePath) #endregion [TestFixtureSetUp] public virtual void SetUp () { SetUpPartial (); } partial void SetUpPartial(); [TestFixtureTearDown] public virtual void TearDown () { TearDownPartial (); if (Session != null) { Session.Exit (); Session.Dispose (); Session = null; } } partial void TearDownPartial (); protected virtual string TargetExePath { get{ return Path.Combine (TargetExeDirectory, TestAppExeName); } } protected virtual void Start (string test) { TestName = test; Session = CreateSession (test, EngineId); var dsi = CreateStartInfo (test, EngineId); var soft = dsi as SoftDebuggerStartInfo; if (soft != null) { var assemblyName = AssemblyName.GetAssemblyName (TargetExePath); soft.UserAssemblyNames = new List<AssemblyName> {assemblyName}; } var ops = new DebuggerSessionOptions { ProjectAssembliesOnly = true, EvaluationOptions = EvaluationOptions.DefaultOptions }; ops.EvaluationOptions.AllowTargetInvoke = AllowTargetInvokes; ops.EvaluationOptions.EvaluationTimeout = 100000; var sourcePath = Path.Combine (TargetProjectSourceDir, test + ".cs"); SourceFile = ReadFile(sourcePath); AddBreakpoint ("break"); var done = new ManualResetEvent (false); Session.TargetHitBreakpoint += (sender, e) => { Frame = e.Backtrace.GetFrame (0); lastStoppedPosition = Frame.SourceLocation; targetStoppedEvent.Set (); done.Set (); }; Session.TargetExceptionThrown += (sender, e) => { Frame = e.Backtrace.GetFrame (0); for (int i = 0; i < e.Backtrace.FrameCount; i++) { if (!e.Backtrace.GetFrame (i).IsExternalCode) { Frame = e.Backtrace.GetFrame (i); break; } } lastStoppedPosition = Frame.SourceLocation; targetStoppedEvent.Set (); }; Session.TargetStopped += (sender, e) => { //This can be null in case of ForcedStop //which is called when exception is thrown //when Continue & Stepping is executed if (e.Backtrace != null) { Frame = e.Backtrace.GetFrame (0); lastStoppedPosition = Frame.SourceLocation; targetStoppedEvent.Set (); } else { Console.WriteLine ("e.Backtrace is null"); } }; var targetExited = new ManualResetEvent (false); Session.TargetExited += delegate { targetExited.Set (); }; Session.Run (dsi, ops); Session.ExceptionHandler = (ex) => { Console.WriteLine ("Session.ExceptionHandler:" + Environment.NewLine + ex.ToString ()); HandleAnyException(ex); return true; }; switch (WaitHandle.WaitAny (new WaitHandle[]{ done, targetExited }, 30000)) { case 0: //Breakpoint is hit good... run tests now break; case 1: throw new Exception ("Test application exited before hitting breakpoint"); default: throw new Exception ("Timeout while waiting for initial breakpoint"); } if (Session is SoftDebuggerSession) { Console.WriteLine ("SDB protocol version:" + ((SoftDebuggerSession)Session).ProtocolVersion); } } void GetLineAndColumn (string breakpointMarker, int offset, string statement, out int line, out int col, ITextFile file) { int i = file.Text.IndexOf ("/*" + breakpointMarker + "*/", StringComparison.Ordinal); if (i == -1) Assert.Fail ("Break marker not found: " + breakpointMarker + " in " + file.Name); file.GetLineColumnFromPosition (i, out line, out col); line += offset; if (statement != null) { int lineStartPosition = file.GetPositionFromLineColumn (line, 1); string lineText = file.GetText (lineStartPosition, lineStartPosition + file.GetLineLength (line)); col = lineText.IndexOf (statement, StringComparison.Ordinal) + 1; if (col == 0) Assert.Fail ("Failed to find statement:" + statement + " at " + file.Name + "(" + line + ")"); } else { col = 1; } } public Breakpoint AddBreakpoint (string breakpointMarker, int offset = 0, string statement = null, ITextFile file = null) { file = file ?? SourceFile; int col, line; GetLineAndColumn (breakpointMarker, offset, statement, out line, out col, file); var bp = new Breakpoint (file.Name, line, col); Session.Breakpoints.Add (bp); return bp; } public void RunToCursor (string breakpointMarker, int offset = 0, string statement = null, ITextFile file = null) { file = file ?? SourceFile; int col, line; GetLineAndColumn (breakpointMarker, offset, statement, out line, out col, file); targetStoppedEvent.Reset (); Session.Breakpoints.RemoveRunToCursorBreakpoints (); var bp = new RunToCursorBreakpoint (file.Name, line, col); Session.Breakpoints.Add (bp); Session.Continue (); CheckPosition (breakpointMarker, offset, statement); } public void InitializeTest () { Session.Breakpoints.Clear (); Session.Options.EvaluationOptions = EvaluationOptions.DefaultOptions; Session.Options.ProjectAssembliesOnly = true; Session.Options.StepOverPropertiesAndOperators = false; AddBreakpoint ("break"); while (!CheckPosition ("break", 0, silent: true)) { targetStoppedEvent.Reset (); Session.Continue (); } } public ObjectValue Eval (string exp) { return Frame.GetExpressionValue (exp, true).Sync (); } public void WaitStop (int miliseconds) { if (!targetStoppedEvent.WaitOne (miliseconds)) { Assert.Fail ("WaitStop failure: Target stop timeout"); } } public bool CheckPosition (string guid, int offset = 0, string statement = null, bool silent = false, ITextFile file = null) { file = file ?? SourceFile; if (!targetStoppedEvent.WaitOne (6000)) { if (!silent) Assert.Fail ("CheckPosition failure: Target stop timeout"); return false; } if (lastStoppedPosition.FileName == file.Name) { int i = file.Text.IndexOf ("/*" + guid + "*/", StringComparison.Ordinal); if (i == -1) { if (!silent) Assert.Fail ("CheckPosition failure: Guid marker not found:" + guid + " in file:" + file.Name); return false; } int line, col; file.GetLineColumnFromPosition (i, out line, out col); if ((line + offset) != lastStoppedPosition.Line) { if (!silent) Assert.Fail ("CheckPosition failure: Wrong line Expected:" + (line + offset) + " Actual:" + lastStoppedPosition.Line + " in file:" + file.Name); return false; } if (!string.IsNullOrEmpty (statement)) { int position = file.GetPositionFromLineColumn (lastStoppedPosition.Line, lastStoppedPosition.Column); string actualStatement = file.GetText (position, position + statement.Length); if (statement != actualStatement) { if (!silent) Assert.AreEqual (statement, actualStatement); return false; } } } else { if (!silent) Assert.Fail ("CheckPosition failure: Wrong file Excpected:" + file.Name + " Actual:" + lastStoppedPosition.FileName); return false; } return true; } public void StepIn (string guid, string statement) { StepIn (guid, 0, statement); } public void StepIn (string guid, int offset = 0, string statement = null) { targetStoppedEvent.Reset (); Session.StepInstruction (); CheckPosition (guid, offset, statement); } public void StepOver (string guid, string statement) { StepOver (guid, 0, statement); } public void StepOver (string guid, int offset = 0, string statement = null) { targetStoppedEvent.Reset (); Session.NextInstruction (); CheckPosition (guid, offset, statement); } public void StepOut (string guid, string statement) { StepOut (guid, 0, statement); } public void StepOut (string guid, int offset = 0, string statement = null) { targetStoppedEvent.Reset (); Session.Finish (); CheckPosition (guid, offset, statement); } public void Continue (string guid, string statement) { Continue (guid, 0, statement); } public void Continue (string guid, int offset = 0, string statement = null, ITextFile file = null) { targetStoppedEvent.Reset (); Session.Continue (); CheckPosition(guid, offset, statement, file: file); } public void StartTest (string methodName) { if (!targetStoppedEvent.WaitOne (3000)) { Assert.Fail ("StartTest failure: Target stop timeout"); } Assert.AreEqual ('"' + methodName + '"', Eval ("NextMethodToCall = \"" + methodName + "\";").Value); targetStoppedEvent.Reset (); Session.Continue (); } public void SetNextStatement (string guid, int offset = 0, string statement = null, ITextFile file = null) { file = file ?? SourceFile; int line, column; GetLineAndColumn (guid, offset, statement, out line, out column, file); Session.SetNextStatement (file.Name, line, column); } public void AddCatchpoint (string exceptionName, bool includeSubclasses) { Session.Breakpoints.Add (new Catchpoint (exceptionName, includeSubclasses)); } partial void HandleAnyException(Exception exception); } static class EvalHelper { public static bool AtLeast (this Version ver, int major, int minor) { if ((ver.Major > major) || ((ver.Major == major && ver.Minor >= minor))) return true; else return false; } public static ObjectValue Sync (this ObjectValue val) { if (!val.IsEvaluating) return val; object locker = new object (); EventHandler h = delegate { lock (locker) { Monitor.PulseAll (locker); } }; val.ValueChanged += h; lock (locker) { while (val.IsEvaluating) { if (!Monitor.Wait (locker, 8000)) throw new Exception ("Timeout while waiting for value evaluation"); } } val.ValueChanged -= h; return val; } public static ObjectValue GetChildSync (this ObjectValue val, string name, EvaluationOptions ops) { var result = val.GetChild (name, ops); return result != null ? result.Sync () : null; } public static ObjectValue[] GetAllChildrenSync (this ObjectValue val) { var children = val.GetAllChildren (); foreach (var child in children) { child.Sync (); } return children; } } }
// Copyright (c) Microsoft Corporation. // Licensed under the MIT License. using Microsoft.VisualStudio.Services.Agent.Util; using System.Collections.Generic; using System.IO; using System.Runtime.Serialization; using System.Text; using System.Threading; namespace Microsoft.VisualStudio.Services.Agent { public enum SignatureVerificationMode { Error, Warning, None } public sealed class SignatureVerificationSettings { [DataMember(EmitDefaultValue = false)] public SignatureVerificationMode Mode { get; set; } [DataMember(EmitDefaultValue = false)] public List<string> Fingerprints { get; set; } } // // Settings are persisted in this structure // [DataContract] public sealed class AgentSettings { [DataMember(EmitDefaultValue = false)] public bool AcceptTeeEula { get; set; } [DataMember(EmitDefaultValue = false)] public int AgentId { get; set; } [DataMember(EmitDefaultValue = false)] public string AgentName { get; set; } [DataMember(EmitDefaultValue = false)] public bool AlwaysExtractTask { get; set; } [IgnoreDataMember] public bool IsHosted => !string.IsNullOrEmpty(NotificationPipeName) || !string.IsNullOrEmpty(NotificationSocketAddress); [DataMember(EmitDefaultValue = false)] public string Fingerprint { // This setter is for backwards compatibility with the top level fingerprint setting set { // prefer the new config format to the old if (SignatureVerification == null && value != null) { SignatureVerification = new SignatureVerificationSettings() { Mode = SignatureVerificationMode.Error, Fingerprints = new List<string>() { value } }; } } } [DataMember(EmitDefaultValue = false)] public string NotificationPipeName { get; set; } [DataMember(EmitDefaultValue = false)] public string NotificationSocketAddress { get; set; } [DataMember(EmitDefaultValue = false)] public bool SkipCapabilitiesScan { get; set; } [DataMember(EmitDefaultValue = false)] public bool SkipSessionRecover { get; set; } [DataMember(EmitDefaultValue = false)] public SignatureVerificationSettings SignatureVerification { get; set; } [DataMember(EmitDefaultValue = false)] public bool DisableLogUploads { get; set; } [DataMember(EmitDefaultValue = false)] public int PoolId { get; set; } [DataMember(EmitDefaultValue = false)] public string PoolName { get; set; } [DataMember(EmitDefaultValue = false)] public string ServerUrl { get; set; } [DataMember(EmitDefaultValue = false)] public string WorkFolder { get; set; } // Do not use Project Name any more to save in agent settings file. Ensure to use ProjectId. // Deployment Group scenario will not work for project rename scenario if we work with projectName [DataMember(EmitDefaultValue = false)] public string ProjectName { get; set; } [DataMember(EmitDefaultValue = false)] public int MachineGroupId { get; set; } [DataMember(EmitDefaultValue = false)] public int DeploymentGroupId { get; set; } [DataMember(EmitDefaultValue = false)] public string ProjectId { get; set; } [DataMember(EmitDefaultValue = false)] public string CollectionName { get; set; } [DataMember(EmitDefaultValue = false)] public string MonitorSocketAddress { get; set; } [DataMember(EmitDefaultValue = false)] public int EnvironmentId { get; set; } [DataMember(EmitDefaultValue = false)] public int EnvironmentVMResourceId { get; set; } [DataMember(EmitDefaultValue = false)] public string EnvironmentName { get; set; } } [DataContract] public sealed class AutoLogonSettings { [DataMember(EmitDefaultValue = false)] public string UserDomainName { get; set; } [DataMember(EmitDefaultValue = false)] public string UserName { get; set; } } [DataContract] public sealed class AgentRuntimeOptions { [DataMember(EmitDefaultValue = false)] /// <summary>Use SecureChannel (only valid on Windows)</summary> public bool GitUseSecureChannel { get; set; } } [DataContract] public class SetupInfo { [DataMember] public string Group { get; set; } [DataMember] public string Detail { get; set; } } [ServiceLocator(Default = typeof(ConfigurationStore))] public interface IConfigurationStore : IAgentService { string RootFolder { get; } bool IsConfigured(); bool IsServiceConfigured(); bool IsAutoLogonConfigured(); bool HasCredentials(); CredentialData GetCredentials(); AgentSettings GetSettings(); void SaveCredential(CredentialData credential); void SaveSettings(AgentSettings settings); void DeleteCredential(); void DeleteSettings(); void DeleteAutoLogonSettings(); void SaveAutoLogonSettings(AutoLogonSettings settings); AutoLogonSettings GetAutoLogonSettings(); AgentRuntimeOptions GetAgentRuntimeOptions(); IEnumerable<SetupInfo> GetSetupInfo(); void SaveAgentRuntimeOptions(AgentRuntimeOptions options); void DeleteAgentRuntimeOptions(); } public sealed class ConfigurationStore : AgentService, IConfigurationStore { private string _binPath; private string _configFilePath; private string _credFilePath; private string _serviceConfigFilePath; private string _autoLogonSettingsFilePath; private string _runtimeOptionsFilePath; private string _setupInfoFilePath; private CredentialData _creds; private AgentSettings _settings; private AutoLogonSettings _autoLogonSettings; private AgentRuntimeOptions _runtimeOptions; private IEnumerable<SetupInfo> _setupInfo; public override void Initialize(IHostContext hostContext) { base.Initialize(hostContext); var currentAssemblyLocation = System.Reflection.Assembly.GetEntryAssembly().Location; Trace.Info("currentAssemblyLocation: {0}", currentAssemblyLocation); _binPath = HostContext.GetDirectory(WellKnownDirectory.Bin); Trace.Info("binPath: {0}", _binPath); RootFolder = HostContext.GetDirectory(WellKnownDirectory.Root); Trace.Info("RootFolder: {0}", RootFolder); _configFilePath = hostContext.GetConfigFile(WellKnownConfigFile.Agent); Trace.Info("ConfigFilePath: {0}", _configFilePath); _credFilePath = hostContext.GetConfigFile(WellKnownConfigFile.Credentials); Trace.Info("CredFilePath: {0}", _credFilePath); _serviceConfigFilePath = hostContext.GetConfigFile(WellKnownConfigFile.Service); Trace.Info("ServiceConfigFilePath: {0}", _serviceConfigFilePath); _autoLogonSettingsFilePath = hostContext.GetConfigFile(WellKnownConfigFile.Autologon); Trace.Info("AutoLogonSettingsFilePath: {0}", _autoLogonSettingsFilePath); _runtimeOptionsFilePath = hostContext.GetConfigFile(WellKnownConfigFile.Options); Trace.Info("RuntimeOptionsFilePath: {0}", _runtimeOptionsFilePath); _setupInfoFilePath = hostContext.GetConfigFile(WellKnownConfigFile.SetupInfo); Trace.Info("SetupInfoFilePath: {0}", _setupInfoFilePath); } public string RootFolder { get; private set; } public bool HasCredentials() { Trace.Info("HasCredentials()"); bool credsStored = (new FileInfo(_credFilePath)).Exists; Trace.Info("stored {0}", credsStored); return credsStored; } public bool IsConfigured() { Trace.Info("IsConfigured()"); bool configured = (new FileInfo(_configFilePath)).Exists; Trace.Info("IsConfigured: {0}", configured); return configured; } public bool IsServiceConfigured() { Trace.Info("IsServiceConfigured()"); bool serviceConfigured = (new FileInfo(_serviceConfigFilePath)).Exists; Trace.Info($"IsServiceConfigured: {serviceConfigured}"); return serviceConfigured; } public bool IsAutoLogonConfigured() { Trace.Entering(); bool autoLogonConfigured = (new FileInfo(_autoLogonSettingsFilePath)).Exists; Trace.Info($"IsAutoLogonConfigured: {autoLogonConfigured}"); return autoLogonConfigured; } public CredentialData GetCredentials() { if (_creds == null) { _creds = IOUtil.LoadObject<CredentialData>(_credFilePath); } return _creds; } public AgentSettings GetSettings() { if (_settings == null) { AgentSettings configuredSettings = null; if (File.Exists(_configFilePath)) { string json = File.ReadAllText(_configFilePath, Encoding.UTF8); Trace.Info($"Read setting file: {json.Length} chars"); configuredSettings = StringUtil.ConvertFromJson<AgentSettings>(json); } ArgUtil.NotNull(configuredSettings, nameof(configuredSettings)); _settings = configuredSettings; } return _settings; } public AutoLogonSettings GetAutoLogonSettings() { if (_autoLogonSettings == null) { _autoLogonSettings = IOUtil.LoadObject<AutoLogonSettings>(_autoLogonSettingsFilePath); } return _autoLogonSettings; } public IEnumerable<SetupInfo> GetSetupInfo() { if (_setupInfo == null) { if (File.Exists(_setupInfoFilePath)) { Trace.Info($"Load machine setup info from {_setupInfoFilePath}"); _setupInfo = IOUtil.LoadObject<List<SetupInfo>>(_setupInfoFilePath); } else { _setupInfo = new List<SetupInfo>(); } } return _setupInfo; } public void SaveCredential(CredentialData credential) { ArgUtil.NotNull(credential, nameof(credential)); Trace.Info("Saving {0} credential @ {1}", credential.Scheme, _credFilePath); if (File.Exists(_credFilePath)) { // Delete existing credential file first, since the file is hidden and not able to overwrite. Trace.Info("Delete exist agent credential file."); IOUtil.DeleteFile(_credFilePath); } IOUtil.SaveObject(credential, _credFilePath); Trace.Info("Credentials Saved."); File.SetAttributes(_credFilePath, File.GetAttributes(_credFilePath) | FileAttributes.Hidden); } public void SaveSettings(AgentSettings settings) { Trace.Info("Saving agent settings."); if (File.Exists(_configFilePath)) { // Delete existing agent settings file first, since the file is hidden and not able to overwrite. Trace.Info("Delete exist agent settings file."); IOUtil.DeleteFile(_configFilePath); } IOUtil.SaveObject(settings, _configFilePath); Trace.Info("Settings Saved."); File.SetAttributes(_configFilePath, File.GetAttributes(_configFilePath) | FileAttributes.Hidden); } public void SaveAutoLogonSettings(AutoLogonSettings autoLogonSettings) { Trace.Info("Saving autologon settings."); if (File.Exists(_autoLogonSettingsFilePath)) { // Delete existing autologon settings file first, since the file is hidden and not able to overwrite. Trace.Info("Delete existing autologon settings file."); IOUtil.DeleteFile(_autoLogonSettingsFilePath); } IOUtil.SaveObject(autoLogonSettings, _autoLogonSettingsFilePath); Trace.Info("AutoLogon settings Saved."); File.SetAttributes(_autoLogonSettingsFilePath, File.GetAttributes(_autoLogonSettingsFilePath) | FileAttributes.Hidden); } public void DeleteCredential() { IOUtil.Delete(_credFilePath, default(CancellationToken)); } public void DeleteSettings() { IOUtil.Delete(_configFilePath, default(CancellationToken)); } public void DeleteAutoLogonSettings() { IOUtil.Delete(_autoLogonSettingsFilePath, default(CancellationToken)); } public AgentRuntimeOptions GetAgentRuntimeOptions() { if (_runtimeOptions == null && File.Exists(_runtimeOptionsFilePath)) { _runtimeOptions = IOUtil.LoadObject<AgentRuntimeOptions>(_runtimeOptionsFilePath); } return _runtimeOptions; } public void SaveAgentRuntimeOptions(AgentRuntimeOptions options) { Trace.Info("Saving runtime options."); if (File.Exists(_runtimeOptionsFilePath)) { // Delete existing runtime options file first, since the file is hidden and not able to overwrite. Trace.Info("Delete exist runtime options file."); IOUtil.DeleteFile(_runtimeOptionsFilePath); } IOUtil.SaveObject(options, _runtimeOptionsFilePath); Trace.Info("Options Saved."); File.SetAttributes(_runtimeOptionsFilePath, File.GetAttributes(_runtimeOptionsFilePath) | FileAttributes.Hidden); } public void DeleteAgentRuntimeOptions() { IOUtil.Delete(_runtimeOptionsFilePath, default(CancellationToken)); } } }
using System; using System.Collections.Generic; using System.Linq; using UnityEngine; using UnityEngine.Experimental.VFX; namespace UnityEditor.VFX { [Flags] enum VFXAttributeMode { None = 0, Read = 1 << 0, Write = 1 << 1, ReadWrite = Read | Write, ReadSource = 1 << 2, } struct VFXAttribute { public static readonly float kDefaultSize = 0.1f; public static readonly VFXAttribute Seed = new VFXAttribute("seed", VFXValueType.Uint32); public static readonly VFXAttribute OldPosition = new VFXAttribute("oldPosition", VFXValueType.Float3); public static readonly VFXAttribute Position = new VFXAttribute("position", VFXValueType.Float3); public static readonly VFXAttribute Velocity = new VFXAttribute("velocity", VFXValueType.Float3); public static readonly VFXAttribute Direction = new VFXAttribute("direction", VFXValue.Constant(new Vector3(0.0f, 0.0f, 1.0f))); public static readonly VFXAttribute Color = new VFXAttribute("color", VFXValue.Constant(Vector3.one)); public static readonly VFXAttribute Alpha = new VFXAttribute("alpha", VFXValue.Constant(1.0f)); public static readonly VFXAttribute Size = new VFXAttribute("size", VFXValue.Constant(kDefaultSize)); public static readonly VFXAttribute ScaleX = new VFXAttribute("scaleX", VFXValue.Constant(1.0f), VFXVariadic.BelongsToVariadic); public static readonly VFXAttribute ScaleY = new VFXAttribute("scaleY", VFXValue.Constant(1.0f), VFXVariadic.BelongsToVariadic); public static readonly VFXAttribute ScaleZ = new VFXAttribute("scaleZ", VFXValue.Constant(1.0f), VFXVariadic.BelongsToVariadic); public static readonly VFXAttribute Lifetime = new VFXAttribute("lifetime", VFXValueType.Float); public static readonly VFXAttribute Age = new VFXAttribute("age", VFXValueType.Float); public static readonly VFXAttribute AngleX = new VFXAttribute("angleX", VFXValueType.Float, VFXVariadic.BelongsToVariadic); public static readonly VFXAttribute AngleY = new VFXAttribute("angleY", VFXValueType.Float, VFXVariadic.BelongsToVariadic); public static readonly VFXAttribute AngleZ = new VFXAttribute("angleZ", VFXValueType.Float, VFXVariadic.BelongsToVariadic); public static readonly VFXAttribute AngularVelocityX = new VFXAttribute("angularVelocityX", VFXValueType.Float, VFXVariadic.BelongsToVariadic); public static readonly VFXAttribute AngularVelocityY = new VFXAttribute("angularVelocityY", VFXValueType.Float, VFXVariadic.BelongsToVariadic); public static readonly VFXAttribute AngularVelocityZ = new VFXAttribute("angularVelocityZ", VFXValueType.Float, VFXVariadic.BelongsToVariadic); public static readonly VFXAttribute TexIndex = new VFXAttribute("texIndex", VFXValueType.Float); public static readonly VFXAttribute PivotX = new VFXAttribute("pivotX", VFXValue.Constant(0.0f), VFXVariadic.BelongsToVariadic); public static readonly VFXAttribute PivotY = new VFXAttribute("pivotY", VFXValue.Constant(0.0f), VFXVariadic.BelongsToVariadic); public static readonly VFXAttribute PivotZ = new VFXAttribute("pivotZ", VFXValue.Constant(0.0f), VFXVariadic.BelongsToVariadic); public static readonly VFXAttribute ParticleId = new VFXAttribute("particleId", VFXValueType.Uint32); public static readonly VFXAttribute AxisX = new VFXAttribute("axisX", VFXValue.Constant(Vector3.right)); public static readonly VFXAttribute AxisY = new VFXAttribute("axisY", VFXValue.Constant(Vector3.up)); public static readonly VFXAttribute AxisZ = new VFXAttribute("axisZ", VFXValue.Constant(Vector3.forward)); public static readonly VFXAttribute Alive = new VFXAttribute("alive", VFXValue.Constant(true)); public static readonly VFXAttribute Mass = new VFXAttribute("mass", VFXValue.Constant(1.0f)); public static readonly VFXAttribute TargetPosition = new VFXAttribute("targetPosition", VFXValueType.Float3); public static readonly VFXAttribute EventCount = new VFXAttribute("eventCount", VFXValueType.Uint32); public static readonly VFXAttribute SpawnTime = new VFXAttribute("spawnTime", VFXValueType.Float); public static readonly VFXAttribute[] AllAttribute = VFXReflectionHelper.CollectStaticReadOnlyExpression<VFXAttribute>(typeof(VFXAttribute)); public static readonly VFXAttribute[] AllAttributeReadOnly = new VFXAttribute[] { Seed, ParticleId, SpawnTime }; public static readonly VFXAttribute[] AllAttributeWriteOnly = new VFXAttribute[] { EventCount }; public static readonly VFXAttribute[] AllAttributeLocalOnly = new VFXAttribute[] { EventCount }; public static readonly string[] All = AllAttribute.Select(e => e.name).ToArray(); public static readonly string[] AllReadOnly = AllAttributeReadOnly.Select(e => e.name).ToArray(); public static readonly string[] AllLocalOnly = AllAttributeLocalOnly.Select(e => e.name).ToArray(); public static readonly string[] AllWriteOnly = AllAttributeWriteOnly.Select(e => e.name).ToArray(); public static readonly string[] AllExceptLocalOnly = All.Except(AllLocalOnly).ToArray(); public static readonly string[] AllWritable = All.Except(AllReadOnly).ToArray(); public static readonly string[] AllReadWritable = All.Except(AllReadOnly).Except(AllWriteOnly).ToArray(); public static readonly VFXAttribute[] AllVariadicAttribute = new VFXAttribute[] { new VFXAttribute("angle", VFXValueType.Float3, VFXVariadic.True), new VFXAttribute("angularVelocity", VFXValueType.Float3, VFXVariadic.True), new VFXAttribute("pivot", VFXValueType.Float3, VFXVariadic.True), new VFXAttribute("scale", VFXValueType.Float3, VFXVariadic.True) }; public static readonly string[] AllVariadic = AllVariadicAttribute.Select(e => e.name).ToArray(); public static readonly string[] AllIncludingVariadic = AllAttribute.Where(e => e.variadic != VFXVariadic.BelongsToVariadic).Select(e => e.name).ToArray().Concat(AllVariadic).ToArray(); public static readonly string[] AllIncludingVariadicExceptLocalOnly = AllIncludingVariadic.Except(AllLocalOnly).ToArray(); public static readonly string[] AllIncludingVariadicWritable = AllIncludingVariadic.Except(AllReadOnly).ToArray(); public static readonly string[] AllIncludingVariadicReadWritable = AllIncludingVariadic.Except(AllReadOnly).Except(AllWriteOnly).ToArray(); static private VFXValue GetValueFromType(VFXValueType type) { switch (type) { case VFXValueType.Boolean: return VFXValue.Constant<bool>(); case VFXValueType.Uint32: return VFXValue.Constant<uint>(); case VFXValueType.Int32: return VFXValue.Constant<int>(); case VFXValueType.Float: return VFXValue.Constant<float>(); case VFXValueType.Float2: return VFXValue.Constant<Vector2>(); case VFXValueType.Float3: return VFXValue.Constant<Vector3>(); case VFXValueType.Float4: return VFXValue.Constant<Vector4>(); default: throw new InvalidOperationException(string.Format("Unexpected attribute type: {0}", type)); } } public VFXAttribute(string name, VFXValueType type, VFXVariadic variadic = VFXVariadic.False) { this.name = name; this.value = GetValueFromType(type); this.variadic = variadic; } public VFXAttribute(string name, VFXValue value, VFXVariadic variadic = VFXVariadic.False) { this.name = name; this.value = value; this.variadic = variadic; } public static VFXAttribute Find(string attributeName) { int index = Array.FindIndex(AllAttribute, e => e.name == attributeName); if (index != -1) return AllAttribute[index]; index = Array.FindIndex(AllVariadicAttribute, e => e.name == attributeName); if (index != -1) return AllVariadicAttribute[index]; throw new ArgumentException(string.Format("Unable to find attribute expression : {0}", attributeName)); } public static bool Exist(string attributeName) { bool exist = Array.Exists(AllAttribute, e => e.name == attributeName); if (!exist) exist = Array.Exists(AllVariadicAttribute, e => e.name == attributeName); return exist; } public string name; public VFXValue value; public VFXVariadic variadic; public VFXValueType type { get { return value.valueType; } } } struct VFXAttributeInfo { public VFXAttributeInfo(VFXAttribute attrib, VFXAttributeMode mode) { this.attrib = attrib; this.mode = mode; } public VFXAttribute attrib; public VFXAttributeMode mode; } enum VFXAttributeLocation { Current = 0, Source = 1, } enum VFXVariadic { False = 0, True = 1, BelongsToVariadic = 2 } enum VariadicChannelOptions { X = 0, Y = 1, Z = 2, XY = 3, XZ = 4, YZ = 5, XYZ = 6 }; #pragma warning disable 0659 sealed class VFXAttributeExpression : VFXExpression { public VFXAttributeExpression(VFXAttribute attribute, VFXAttributeLocation location = VFXAttributeLocation.Current) : base(Flags.PerElement) { m_attribute = attribute; m_attributeLocation = location; } public override VFXExpressionOperation operation { get { return VFXExpressionOperation.None; } } public override VFXValueType valueType { get { return m_attribute.type; } } public string attributeName { get { return m_attribute.name; } } public VFXAttributeLocation attributeLocation { get { return m_attributeLocation; } } public VFXAttribute attribute { get { return m_attribute; } } private VFXAttribute m_attribute; private VFXAttributeLocation m_attributeLocation; public override bool Equals(object obj) { if (!(obj is VFXAttributeExpression)) return false; var other = (VFXAttributeExpression)obj; return valueType == other.valueType && attributeLocation == other.attributeLocation && attributeName == other.attributeName; } protected override int GetInnerHashCode() { return (attributeName.GetHashCode() * 397) ^ attributeLocation.GetHashCode(); } sealed protected override VFXExpression Evaluate(VFXExpression[] constParents) { return this; } public override string GetCodeString(string[] parents) { return attributeLocation == VFXAttributeLocation.Current ? attributeName : attributeName + "_source"; } public override IEnumerable<VFXAttributeInfo> GetNeededAttributes() { yield return new VFXAttributeInfo(attribute, m_attributeLocation == VFXAttributeLocation.Source ? VFXAttributeMode.ReadSource : VFXAttributeMode.Read); } } #pragma warning restore 0659 }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections.Generic; using Internal.IL; using Internal.TypeSystem; using Debug = System.Diagnostics.Debug; namespace Internal.IL.Stubs { public class ILCodeStream { private struct LabelAndOffset { public readonly ILCodeLabel Label; public readonly int Offset; public LabelAndOffset(ILCodeLabel label, int offset) { Label = label; Offset = offset; } } internal byte[] _instructions; internal int _length; internal int _startOffsetForLinking; private ArrayBuilder<LabelAndOffset> _offsetsNeedingPatching; internal ILCodeStream() { _instructions = Array.Empty<byte>(); _startOffsetForLinking = -1; } private void EmitByte(byte b) { if (_instructions.Length == _length) Array.Resize<byte>(ref _instructions, 2 * _instructions.Length + 10); _instructions[_length++] = b; } private void EmitUInt16(ushort value) { EmitByte((byte)value); EmitByte((byte)(value >> 8)); } private void EmitUInt32(int value) { EmitByte((byte)value); EmitByte((byte)(value >> 8)); EmitByte((byte)(value >> 16)); EmitByte((byte)(value >> 24)); } public void Emit(ILOpcode opcode) { if ((int)opcode > 0x100) EmitByte((byte)ILOpcode.prefix1); EmitByte((byte)opcode); } public void Emit(ILOpcode opcode, ILToken token) { Emit(opcode); EmitUInt32((int)token); } public void EmitLdc(int value) { if (-1 <= value && value <= 8) { Emit((ILOpcode)(ILOpcode.ldc_i4_0 + value)); } else if (value == (sbyte)value) { Emit(ILOpcode.ldc_i4_s); EmitByte((byte)value); } else { Emit(ILOpcode.ldc_i4); EmitUInt32(value); } } public void EmitLdArg(int index) { if (index < 4) { Emit((ILOpcode)(ILOpcode.ldarg_0 + index)); } else { Emit(ILOpcode.ldarg); EmitUInt16((ushort)index); } } public void EmitLdLoc(ILLocalVariable variable) { int index = (int)variable; if (index < 4) { Emit((ILOpcode)(ILOpcode.ldloc_0 + index)); } else if (index < 0x100) { Emit(ILOpcode.ldloc_s); EmitByte((byte)index); } else { Emit(ILOpcode.ldloc); EmitUInt16((ushort)index); } } public void EmitLdLoca(ILLocalVariable variable) { int index = (int)variable; if (index < 0x100) { Emit(ILOpcode.ldloca_s); EmitByte((byte)index); } else { Emit(ILOpcode.ldloca); EmitUInt16((ushort)index); } } public void EmitStLoc(ILLocalVariable variable) { int index = (int)variable; if (index < 4) { Emit((ILOpcode)(ILOpcode.stloc_0 + index)); } else if (index < 0x100) { Emit(ILOpcode.stloc_s); EmitByte((byte)index); } else { Emit(ILOpcode.stloc); EmitUInt16((ushort)index); } } public void Emit(ILOpcode opcode, ILCodeLabel label) { Debug.Assert(opcode == ILOpcode.br || opcode == ILOpcode.brfalse || opcode == ILOpcode.brtrue || opcode == ILOpcode.beq || opcode == ILOpcode.bge || opcode == ILOpcode.bgt || opcode == ILOpcode.ble || opcode == ILOpcode.blt || opcode == ILOpcode.bne_un || opcode == ILOpcode.bge_un || opcode == ILOpcode.bgt_un || opcode == ILOpcode.ble_un || opcode == ILOpcode.blt_un || opcode == ILOpcode.leave); Emit(opcode); _offsetsNeedingPatching.Add(new LabelAndOffset(label, _length)); EmitUInt32(0); } public void EmitLabel(ILCodeLabel label) { label.Place(this, _length); } internal void PatchLabels() { for (int i = 0; i < _offsetsNeedingPatching.Count; i++) { LabelAndOffset patch = _offsetsNeedingPatching[i]; Debug.Assert(patch.Label.IsPlaced); Debug.Assert(_startOffsetForLinking > -1); int value = patch.Label.AbsoluteOffset - _startOffsetForLinking - patch.Offset - 4; int offset = patch.Offset; _instructions[offset] = (byte)value; _instructions[offset + 1] = (byte)(value >> 8); _instructions[offset + 2] = (byte)(value >> 16); _instructions[offset + 3] = (byte)(value >> 24); } } } /// <summary> /// Represent a token. Use one of the overloads of <see cref="ILEmitter.NewToken"/> /// to create a new token. /// </summary> public enum ILToken { } /// <summary> /// Represents a local variable. Use <see cref="ILEmitter.NewLocal"/> to create a new local variable. /// </summary> public enum ILLocalVariable { } internal class ILStubMethodIL : MethodIL { private byte[] _ilBytes; private LocalVariableDefinition[] _locals; private Object[] _tokens; public ILStubMethodIL(byte[] ilBytes, LocalVariableDefinition[] locals, Object[] tokens) { _ilBytes = ilBytes; _locals = locals; _tokens = tokens; } public override byte[] GetILBytes() { return _ilBytes; } public override int GetMaxStack() { // Conservative estimate... return _ilBytes.Length; } public override ILExceptionRegion[] GetExceptionRegions() { return Array.Empty<ILExceptionRegion>(); } public override bool GetInitLocals() { return true; } public override LocalVariableDefinition[] GetLocals() { return _locals; } public override Object GetObject(int token) { return _tokens[(token & 0xFFFFFF) - 1]; } } public class ILCodeLabel { private ILCodeStream _codeStream; private int _offsetWithinCodeStream; internal bool IsPlaced { get { return _codeStream != null; } } internal int AbsoluteOffset { get { Debug.Assert(IsPlaced); Debug.Assert(_codeStream._startOffsetForLinking >= 0); return _codeStream._startOffsetForLinking + _offsetWithinCodeStream; } } internal ILCodeLabel() { } internal void Place(ILCodeStream codeStream, int offsetWithinCodeStream) { Debug.Assert(!IsPlaced); _codeStream = codeStream; _offsetWithinCodeStream = offsetWithinCodeStream; } } public class ILEmitter { private ArrayBuilder<ILCodeStream> _codeStreams; private ArrayBuilder<LocalVariableDefinition> _locals; private ArrayBuilder<Object> _tokens; public ILEmitter() { } public ILCodeStream NewCodeStream() { ILCodeStream stream = new ILCodeStream(); _codeStreams.Add(stream); return stream; } private ILToken NewToken(Object value, int tokenType) { _tokens.Add(value); return (ILToken)(_tokens.Count | tokenType); } public ILToken NewToken(TypeDesc value) { return NewToken(value, 0x01000000); } public ILToken NewToken(MethodDesc value) { return NewToken(value, 0x0a000000); } public ILToken NewToken(FieldDesc value) { return NewToken(value, 0x0a000000); } public ILToken NewToken(string value) { return NewToken(value, 0x70000000); } public ILToken NewToken(MethodSignature value) { return NewToken(value, 0x11000000); } public ILLocalVariable NewLocal(TypeDesc localType, bool isPinned = false) { int index = _locals.Count; _locals.Add(new LocalVariableDefinition(localType, isPinned)); return (ILLocalVariable)index; } public ILCodeLabel NewCodeLabel() { var newLabel = new ILCodeLabel(); return newLabel; } public MethodIL Link() { int totalLength = 0; for (int i = 0; i < _codeStreams.Count; i++) { ILCodeStream ilCodeStream = _codeStreams[i]; ilCodeStream._startOffsetForLinking = totalLength; totalLength += ilCodeStream._length; } byte[] ilInstructions = new byte[totalLength]; int copiedLength = 0; for (int i = 0; i < _codeStreams.Count; i++) { ILCodeStream ilCodeStream = _codeStreams[i]; ilCodeStream.PatchLabels(); Array.Copy(ilCodeStream._instructions, 0, ilInstructions, copiedLength, ilCodeStream._length); copiedLength += ilCodeStream._length; } return new ILStubMethodIL(ilInstructions, _locals.ToArray(), _tokens.ToArray()); } } public abstract class ILStubMethod : MethodDesc { public abstract MethodIL EmitIL(); public override bool HasCustomAttribute(string attributeNamespace, string attributeName) { return false; } } }
using System; using System.Collections.Generic; using Should; using Xunit; using System.Linq; namespace AutoMapper.UnitTests { namespace General { public class When_mapping_dto_with_a_missing_match : NonValidatingSpecBase { public class ModelObject { } public class ModelDto { public string SomePropertyThatDoesNotExistOnModel { get; set; } } protected override MapperConfiguration Configuration { get; } = new MapperConfiguration(cfg => { cfg.CreateMap<ModelObject, ModelDto>(); }); [Fact] public void Should_map_successfully() { ModelDto dto = Mapper.Map<ModelObject, ModelDto>(new ModelObject()); dto.ShouldNotBeNull(); } } public class When_mapping_a_null_model : AutoMapperSpecBase { private ModelDto _result; public class ModelDto { } public class ModelObject { } protected override MapperConfiguration Configuration { get; } = new MapperConfiguration(cfg => { cfg.AllowNullDestinationValues = false; cfg.CreateMap<ModelObject, ModelDto>(); }); [Fact] public void Should_always_provide_a_dto() { _result = Mapper.Map<ModelObject, ModelDto>(null); _result.ShouldNotBeNull(); } } public class When_mapping_a_dto_with_a_private_parameterless_constructor : AutoMapperSpecBase { private ModelDto _result; public class ModelObject { public string SomeValue { get; set; } } public class ModelDto { public string SomeValue { get; set; } private ModelDto() { } } protected override MapperConfiguration Configuration { get; } = new MapperConfiguration(cfg => { cfg.CreateMap<ModelObject, ModelDto>(); }); protected override void Because_of() { var model = new ModelObject { SomeValue = "Some value" }; _result = Mapper.Map<ModelObject, ModelDto>(model); } [Fact] public void Should_map_the_dto_value() { _result.SomeValue.ShouldEqual("Some value"); } } public class When_mapping_to_a_dto_string_property_and_the_dto_type_is_not_a_string : AutoMapperSpecBase { private ModelDto _result; public class ModelObject { public int NotAString { get; set; } } public class ModelDto { public string NotAString { get; set; } } protected override MapperConfiguration Configuration { get; } = new MapperConfiguration(cfg => { cfg.CreateMap<ModelObject, ModelDto>(); }); protected override void Because_of() { var model = new ModelObject { NotAString = 5 }; _result = Mapper.Map<ModelObject, ModelDto>(model); } [Fact] public void Should_use_the_ToString_value_of_the_unmatched_type() { _result.NotAString.ShouldEqual("5"); } } public class When_mapping_dto_with_an_array_property : AutoMapperSpecBase { private ModelDto _result; public class ModelObject { public IEnumerable<int> GetSomeCoolValues() { return new[] { 4, 5, 6 }; } } public class ModelDto { public string[] SomeCoolValues { get; set; } } protected override MapperConfiguration Configuration { get; } = new MapperConfiguration(cfg => { cfg.CreateMap<ModelObject, ModelDto>(); }); protected override void Because_of() { var model = new ModelObject(); _result = Mapper.Map<ModelObject, ModelDto>(model); } [Fact] public void Should_map_the_collection_of_items_in_the_input_to_the_array() { _result.SomeCoolValues[0].ShouldEqual("4"); _result.SomeCoolValues[1].ShouldEqual("5"); _result.SomeCoolValues[2].ShouldEqual("6"); } } public class When_mapping_a_dto_with_mismatched_property_types : NonValidatingSpecBase { public class ModelObject { public string NullableDate { get; set; } } public class ModelDto { public DateTime NullableDate { get; set; } } protected override MapperConfiguration Configuration { get; } = new MapperConfiguration(cfg => { cfg.CreateMap<ModelObject, ModelDto>(); }); [Fact] public void Should_throw_a_mapping_exception() { var model = new ModelObject(); model.NullableDate = "Lorem Ipsum"; typeof(AutoMapperMappingException).ShouldBeThrownBy(() => Mapper.Map<ModelObject, ModelDto>(model)); } } public class When_mapping_an_array_of_model_objects : AutoMapperSpecBase { private ModelObject[] _model; private ModelDto[] _dto; public class ModelObject { public string SomeValue { get; set; } } public class ModelDto { public string SomeValue { get; set; } } protected override MapperConfiguration Configuration { get; } = new MapperConfiguration(cfg => { cfg.CreateMap<ModelObject, ModelDto>(); }); protected override void Because_of() { _model = new[] {new ModelObject {SomeValue = "First"}, new ModelObject {SomeValue = "Second"}}; _dto = (ModelDto[]) Mapper.Map(_model, typeof (ModelObject[]), typeof (ModelDto[])); } [Fact] public void Should_create_an_array_of_ModelDto_objects() { _dto.Length.ShouldEqual(2); } [Fact] public void Should_map_properties() { _dto.Any(d => d.SomeValue.Contains("First")).ShouldBeTrue(); _dto.Any(d => d.SomeValue.Contains("Second")).ShouldBeTrue(); } } public class When_mapping_a_List_of_model_objects : AutoMapperSpecBase { private List<ModelObject> _model; private ModelDto[] _dto; public class ModelObject { public string SomeValue { get; set; } } public class ModelDto { public string SomeValue { get; set; } } protected override MapperConfiguration Configuration { get; } = new MapperConfiguration(cfg => { cfg.CreateMap<ModelObject, ModelDto>(); }); protected override void Because_of() { _model = new List<ModelObject> {new ModelObject {SomeValue = "First"}, new ModelObject {SomeValue = "Second"}}; _dto = (ModelDto[]) Mapper.Map(_model, typeof (ModelObject[]), typeof (ModelDto[])); } [Fact] public void Should_create_an_array_of_ModelDto_objects() { _dto.Length.ShouldEqual(2); } [Fact] public void Should_map_properties() { _dto.Any(d => d.SomeValue.Contains("First")).ShouldBeTrue(); _dto.Any(d => d.SomeValue.Contains("Second")).ShouldBeTrue(); } } public class When_mapping_a_nullable_type_to_non_nullable_type : AutoMapperSpecBase { private ModelObject _model; private ModelDto _dto; public class ModelObject { public int? SomeValue { get; set; } public int? SomeNullableValue { get; set; } } public class ModelDto { public int SomeValue { get; set; } public int SomeNullableValue { get; set; } } protected override MapperConfiguration Configuration { get; } = new MapperConfiguration(cfg => { cfg.CreateMap<ModelObject, ModelDto>(); }); protected override void Because_of() { _model = new ModelObject { SomeValue = 2 }; _dto = Mapper.Map<ModelObject, ModelDto>(_model); } [Fact] public void Should_map_value_if_has_value() { _dto.SomeValue.ShouldEqual(2); } [Fact] public void Should_not_set_value_if_null() { _dto.SomeNullableValue.ShouldEqual(0); } } public class When_mapping_a_non_nullable_type_to_a_nullable_type : AutoMapperSpecBase { private ModelObject _model; private ModelDto _dto; public class ModelObject { public int SomeValue { get; set; } public int SomeOtherValue { get; set; } } public class ModelDto { public int? SomeValue { get; set; } public int? SomeOtherValue { get; set; } } protected override MapperConfiguration Configuration { get; } = new MapperConfiguration(cfg => { cfg.CreateMap<ModelObject, ModelDto>(); }); protected override void Because_of() { _model = new ModelObject {SomeValue = 2}; _dto = Mapper.Map<ModelObject, ModelDto>(_model); } [Fact] public void Should_map_value_if_has_value() { _dto.SomeValue.ShouldEqual(2); } [Fact] public void Should_not_set_value_if_null() { _dto.SomeOtherValue.ShouldEqual(0); } } public class When_mapping_a_nullable_type_to_a_nullable_type : AutoMapperSpecBase { private ModelObject _model; private ModelDto _dto; public class ModelObject { public int? SomeValue { get; set; } public int? SomeOtherValue { get; set; } } public class ModelDto { public int? SomeValue { get; set; } public int? SomeOtherValue2 { get; set; } } protected override MapperConfiguration Configuration { get; } = new MapperConfiguration(cfg => { cfg.CreateMap<ModelObject, ModelDto>() .ForMember(dest => dest.SomeOtherValue2, opt => opt.MapFrom(src => src.SomeOtherValue)); }); protected override void Because_of() { _model = new ModelObject(); _dto = Mapper.Map<ModelObject, ModelDto>(_model); } [Fact] public void Should_map_value_if_has_value() { _dto.SomeValue.ShouldBeNull(); } [Fact] public void Should_not_set_value_if_null() { _dto.SomeOtherValue2.ShouldBeNull(); } } public class When_mapping_tuples : AutoMapperSpecBase { private Dest _dest; public class Source { public Tuple<int, int> Value { get; set; } } public class Dest { public Tuple<int, int> Value { get; set; } } protected override MapperConfiguration Configuration { get; } = new MapperConfiguration(cfg => { cfg.CreateMap<Source, Dest>(); }); protected override void Because_of() { var source = new Source { Value = new Tuple<int, int>(10, 11) }; _dest = Mapper.Map<Source, Dest>(source); } [Fact] public void Should_map_tuple() { _dest.Value.ShouldNotBeNull(); _dest.Value.Item1.ShouldEqual(10); _dest.Value.Item2.ShouldEqual(11); } } } }
// Copyright (c) Charlie Poole, Rob Prouse and Contributors. MIT License - see LICENSE.txt using System; using System.Threading; using NUnit.Framework.Interfaces; using NUnit.TestData; using NUnit.TestUtilities; using F = NUnit.TestData.AwaitableReturnTypeFixture; namespace NUnit.Framework { [TestFixture(nameof(F.ReturnsTask))] [TestFixture(nameof(F.ReturnsCustomTask))] [TestFixture(nameof(F.ReturnsCustomAwaitable))] [TestFixture(nameof(F.ReturnsCustomAwaitableWithImplicitOnCompleted))] [TestFixture(nameof(F.ReturnsCustomAwaitableWithImplicitUnsafeOnCompleted))] public class AwaitableReturnTypeTests { private readonly string _methodName; public AwaitableReturnTypeTests(string methodName) { _methodName = methodName; } protected ITestResult RunCurrentTestMethod(AsyncWorkload workload) { var test = TestBuilder.MakeTestFromMethod(typeof(F), _methodName); return TestBuilder.RunTest(test, new F(workload)); } [Test] public void GetResultIsCalledSynchronouslyIfIsCompleteIsFalse() { var wasCalled = false; RunCurrentTestMethod(new AsyncWorkload( isCompleted: true, onCompleted: continuation => Assert.Fail("OnCompleted should not be called when IsCompleted is true."), getResult: () => { wasCalled = true; return 42; }) ).AssertPassed(); Assert.That(wasCalled); } [Test] public void GetResultIsCalledSynchronouslyWhenContinued() { var wasCalled = false; RunCurrentTestMethod(new AsyncWorkload( isCompleted: false, onCompleted: continuation => continuation.Invoke(), getResult: () => { wasCalled = true; return 42; }) ).AssertPassed(); Assert.That(wasCalled); } [Test] public void GetResultIsNotCalledUntilContinued() { using (var continuationIsAvailable = new ManualResetEventSlim()) using (var getResultWasCalled = new ManualResetEventSlim()) { var continuation = (Action)null; ThreadPool.QueueUserWorkItem(state => { RunCurrentTestMethod(new AsyncWorkload( isCompleted: false, onCompleted: action => { continuation = action; continuationIsAvailable.Set(); }, getResult: () => { getResultWasCalled.Set(); return 42; }) ); }); continuationIsAvailable.Wait(); if (getResultWasCalled.IsSet) Assert.Fail("GetResult was called before the continuation passed to OnCompleted was invoked."); continuation.Invoke(); if (!getResultWasCalled.Wait(10_000)) Assert.Fail("GetResult was not called after the continuation passed to OnCompleted was invoked."); } } [Test] public void ExceptionThrownBeforeReturningAwaitableShouldBeHandled() { var getAwaiterWasCalled = false; var isCompletedWasCalled = false; var onCompletedWasCalled = false; var getResultWasCalled = false; var result = RunCurrentTestMethod(new AsyncWorkload( beforeReturningAwaitable: () => { throw new OddlyNamedException("Failure message"); }, beforeReturningAwaiter: () => getAwaiterWasCalled = true, isCompleted: () => isCompletedWasCalled = true, onCompleted: continuation => onCompletedWasCalled = true, getResult: () => getResultWasCalled = true)); Assert.That(result.ResultState.Status, Is.EqualTo(TestStatus.Failed)); Assert.That(result.Message, Contains.Substring("OddlyNamedException")); Assert.That(result.Message, Contains.Substring("Failure message")); Assert.That(getAwaiterWasCalled, Is.False); Assert.That(isCompletedWasCalled, Is.False); Assert.That(onCompletedWasCalled, Is.False); Assert.That(getResultWasCalled, Is.False); } [Test] public void ExceptionThrownInGetAwaiterShouldBeHandled() { var isCompletedWasCalled = false; var onCompletedWasCalled = false; var getResultWasCalled = false; var result = RunCurrentTestMethod(new AsyncWorkload( beforeReturningAwaitable: null, beforeReturningAwaiter: () => { throw new OddlyNamedException("Failure message"); }, isCompleted: () => isCompletedWasCalled = true, onCompleted: continuation => onCompletedWasCalled = true, getResult: () => getResultWasCalled = true)); Assert.That(result.ResultState.Status, Is.EqualTo(TestStatus.Failed)); Assert.That(result.Message, Contains.Substring("OddlyNamedException")); Assert.That(result.Message, Contains.Substring("Failure message")); Assert.That(isCompletedWasCalled, Is.False); Assert.That(onCompletedWasCalled, Is.False); Assert.That(getResultWasCalled, Is.False); } [Test] public void ExceptionThrownInIsCompletedShouldBeHandled() { var onCompletedWasCalled = false; var getResultWasCalled = false; var result = RunCurrentTestMethod(new AsyncWorkload( isCompleted: () => { throw new OddlyNamedException("Failure message"); }, onCompleted: continuation => onCompletedWasCalled = true, getResult: () => getResultWasCalled = true)); Assert.That(result.ResultState.Status, Is.EqualTo(TestStatus.Failed)); Assert.That(result.Message, Contains.Substring("OddlyNamedException")); Assert.That(result.Message, Contains.Substring("Failure message")); Assert.That(onCompletedWasCalled, Is.False); Assert.That(getResultWasCalled, Is.False); } [Test] public void ExceptionThrownInOnCompletedShouldBeHandled() { var getResultWasCalled = false; var result = RunCurrentTestMethod(new AsyncWorkload( isCompleted: false, onCompleted: continuation => { throw new OddlyNamedException("Failure message"); }, getResult: () => getResultWasCalled = true)); Assert.That(result.ResultState.Status, Is.EqualTo(TestStatus.Failed)); Assert.That(result.Message, Contains.Substring("OddlyNamedException")); Assert.That(result.Message, Contains.Substring("Failure message")); Assert.That(getResultWasCalled, Is.False); } [Test] public void ExceptionThrownInGetResultShouldBeHandled() { var result = RunCurrentTestMethod(new AsyncWorkload( isCompleted: false, onCompleted: continuation => continuation.Invoke(), getResult: () => { throw new OddlyNamedException("Failure message"); })); Assert.That(result.ResultState.Status, Is.EqualTo(TestStatus.Failed)); Assert.That(result.Message, Contains.Substring("OddlyNamedException")); Assert.That(result.Message, Contains.Substring("Failure message")); } private sealed class OddlyNamedException : Exception { public OddlyNamedException(string message) : base(message) { } } [Test] public void OperationCanceledExceptionThrownInGetResultShouldBeReportedAsSuch() { var result = RunCurrentTestMethod(new AsyncWorkload( isCompleted: false, onCompleted: continuation => continuation.Invoke(), getResult: () => { throw new OperationCanceledException(); })); Assert.That(result.ResultState.Status, Is.EqualTo(TestStatus.Failed)); Assert.That(result.Message, Contains.Substring("OperationCanceledException")); } [Test] public void TaskCanceledExceptionThrownInGetResultShouldBeReportedAsSuch() { var result = RunCurrentTestMethod(new AsyncWorkload( isCompleted: false, onCompleted: continuation => continuation.Invoke(), getResult: () => { throw new System.Threading.Tasks.TaskCanceledException(); })); Assert.That(result.ResultState.Status, Is.EqualTo(TestStatus.Failed)); Assert.That(result.Message, Contains.Substring("TaskCanceledException")); } } }
using System; using System.Collections.Generic; using System.Globalization; using System.Windows; using System.Windows.Controls; using System.Windows.Data; namespace LordJZ.Presentation.Controls { /// <remarks> /// Mostly taken from MahApps.Metro. /// </remarks> [TemplateVisualState(Name = "Large", GroupName = "SizeStates")] [TemplateVisualState(Name = "Small", GroupName = "SizeStates")] [TemplateVisualState(Name = "Inactive", GroupName = "ActiveStates")] [TemplateVisualState(Name = "Active", GroupName = "ActiveStates")] public class ProgressRing : Control { public static readonly DependencyProperty BindableWidthProperty = DependencyProperty.Register("BindableWidth", typeof(double), typeof(ProgressRing), new PropertyMetadata(default(double), BindableWidthCallback)); public static readonly DependencyProperty IsActiveProperty = DependencyProperty.Register("IsActive", typeof(bool), typeof(ProgressRing), new FrameworkPropertyMetadata(default(bool), FrameworkPropertyMetadataOptions.BindsTwoWayByDefault, IsActiveChanged)); public static readonly DependencyProperty IsLargeProperty = DependencyProperty.Register("IsLarge", typeof(bool), typeof(ProgressRing), new PropertyMetadata(true, IsLargeChangedCallback)); public static readonly DependencyProperty MaxSideLengthProperty = DependencyProperty.Register("MaxSideLength", typeof(double), typeof(ProgressRing), new PropertyMetadata(default(double))); public static readonly DependencyProperty EllipseDiameterProperty = DependencyProperty.Register("EllipseDiameter", typeof(double), typeof(ProgressRing), new PropertyMetadata(default(double))); public static readonly DependencyProperty EllipseOffsetProperty = DependencyProperty.Register("EllipseOffset", typeof(Thickness), typeof(ProgressRing), new PropertyMetadata(default(Thickness))); private List<Action> _deferredActions = new List<Action>(); static ProgressRing() { DefaultStyleKeyProperty.OverrideMetadata(typeof(ProgressRing), new FrameworkPropertyMetadata(typeof(ProgressRing))); } public ProgressRing() { SizeChanged += this.OnSizeChanged; } public double MaxSideLength { get { return (double)GetValue(MaxSideLengthProperty); } private set { SetValue(MaxSideLengthProperty, value); } } public double EllipseDiameter { get { return (double)GetValue(EllipseDiameterProperty); } private set { SetValue(EllipseDiameterProperty, value); } } public Thickness EllipseOffset { get { return (Thickness)GetValue(EllipseOffsetProperty); } private set { SetValue(EllipseOffsetProperty, value); } } public double BindableWidth { get { return (double)GetValue(BindableWidthProperty); } private set { SetValue(BindableWidthProperty, value); } } public bool IsActive { get { return (bool)GetValue(IsActiveProperty); } set { SetValue(IsActiveProperty, value); } } public bool IsLarge { get { return (bool)GetValue(IsLargeProperty); } set { SetValue(IsLargeProperty, value); } } private static void BindableWidthCallback(DependencyObject dependencyObject, DependencyPropertyChangedEventArgs dependencyPropertyChangedEventArgs) { var ring = dependencyObject as ProgressRing; if (ring == null) return; var action = new Action(() => { ring.SetEllipseDiameter( (double) dependencyPropertyChangedEventArgs.NewValue); ring.SetEllipseOffset( (double) dependencyPropertyChangedEventArgs.NewValue); ring.SetMaxSideLength( (double) dependencyPropertyChangedEventArgs.NewValue); }); if (ring._deferredActions != null) ring._deferredActions.Add(action); else action(); } private void SetMaxSideLength(double width) { this.MaxSideLength = width <= 60 ? 60.0 : width; } private void SetEllipseDiameter(double width) { if (width <= 60) { this.EllipseDiameter = 6.0; } else { this.EllipseDiameter = width * 0.1 + 6; } } private void SetEllipseOffset(double width) { if (width <= 60) { this.EllipseOffset = new Thickness(0, 24, 0, 0); } else { this.EllipseOffset = new Thickness(0, width * 0.4 + 24, 0, 0); } } private static void IsLargeChangedCallback(DependencyObject dependencyObject, DependencyPropertyChangedEventArgs dependencyPropertyChangedEventArgs) { var ring = dependencyObject as ProgressRing; if (ring == null) return; ring.UpdateLargeState(); } private void UpdateLargeState() { Action action; if (this.IsLarge) action = () => VisualStateManager.GoToState(this, "Large", true); else action = () => VisualStateManager.GoToState(this, "Small", true); if (this._deferredActions != null) this._deferredActions.Add(action); else action(); } private void OnSizeChanged(object sender, SizeChangedEventArgs sizeChangedEventArgs) { this.BindableWidth = ActualWidth; } private static void IsActiveChanged(DependencyObject dependencyObject, DependencyPropertyChangedEventArgs dependencyPropertyChangedEventArgs) { var ring = dependencyObject as ProgressRing; if (ring == null) return; ring.UpdateActiveState(); } private void UpdateActiveState() { Action action; if (this.IsActive) action = () => VisualStateManager.GoToState(this, "Active", true); else action = () => VisualStateManager.GoToState(this, "Inactive", true); if (this._deferredActions != null) this._deferredActions.Add(action); else action(); } public override void OnApplyTemplate() { //make sure the states get updated this.UpdateLargeState(); this.UpdateActiveState(); base.OnApplyTemplate(); if (this._deferredActions != null) foreach (var action in this._deferredActions) action(); this._deferredActions = null; } } internal class WidthToMaxSideLengthConverter : IValueConverter { public object Convert(object value, Type targetType, object parameter, CultureInfo culture) { if (value is double) { var width = (double)value; return width <= 60 ? 60.0 : width; } return null; } public object ConvertBack(object value, Type targetType, object parameter, CultureInfo culture) { throw new NotImplementedException(); } } }
using System; using System.Collections.Generic; using System.IO; using System.Net; using System.Text; using Newtonsoft.Json; using Core.Application.Broadcasters; using Core.Domain.Models; using Core.Application.Models; using Core.Application.Injection; namespace Core.Application.Services { public class WebRequestService : ServiceBase, IWebRequestService { #region PROPERTIES private readonly INetworkEventBroadcaster _networkEventBroadcaster; private readonly string _requestBaseUrl; #endregion #region CTOR public WebRequestService (string requestBaseUri) { _networkEventBroadcaster = Injector.Resolve<INetworkEventBroadcaster> (); _requestBaseUrl = requestBaseUri; } #endregion #region INTERFACE IMPLEMENTATION public void SetBasicAuthHeader(WebRequest req, String userName, String userPassword) { string authInfo = userName + ":" + userPassword; authInfo = Convert.ToBase64String(Encoding.Default.GetBytes(authInfo)); req.Headers["Authorization"] = "Basic " + authInfo; } public T GetDataSingle<T>(HttpWebRequest request) where T : new() { return GetDataSingle<T>(request, false, true); } public T GetDataSingle<T>(HttpWebRequest request, bool suppressNetworkErrors, bool suppressExceptions, bool notifyUser = false) where T : new() { var data = default(T); try{ _networkEventBroadcaster.RaiseNetworkUsageStart(); using (HttpWebResponse response = (HttpWebResponse) request.GetResponse()) { if (response.StatusCode != HttpStatusCode.OK) { // Handle error response return default(T); } else { using (StreamReader streamReader = new StreamReader(response.GetResponseStream())) { var content = streamReader.ReadToEnd (); if (string.IsNullOrWhiteSpace (content)) { //TODO: Handle empty response } else { // Convert response to model data = DeserializeSingle<T> (content); } } } } } catch (WebException ex) { if (!suppressNetworkErrors) { _networkEventBroadcaster.RaiseNetworkErrorEvent (ex, notifyUser); } if (!suppressExceptions) { throw; } } catch (Exception ex) { if (!suppressExceptions) { _networkEventBroadcaster.RaiseErrorEvent (ex, notifyUser); } if (!suppressExceptions) { throw; } } finally { _networkEventBroadcaster.RaiseNetworkUsageEnd (); } return data; } public List<T> GetData<T>(HttpWebRequest request) { return GetData<T>(request, false, true); } public List<T> GetData<T>(HttpWebRequest request, bool suppressNetworkErrors, bool suppressExceptions, bool notifyUser = false) { var data = new List<T> (); try { _networkEventBroadcaster.RaiseNetworkUsageStart (); using (HttpWebResponse response = (HttpWebResponse) request.GetResponse()) { if (response.StatusCode != HttpStatusCode.OK) { // Handle error response return null; } else { using (StreamReader streamReader = new StreamReader(response.GetResponseStream())) { var content = streamReader.ReadToEnd (); if (string.IsNullOrWhiteSpace (content)) { //TODO: Handle empty response } else { // Convert response to models data = DeserializeList<T> (content); } } } } } catch (WebException ex) { if (!suppressNetworkErrors) { _networkEventBroadcaster.RaiseNetworkErrorEvent (ex, notifyUser); } if (!suppressExceptions) { throw; } } catch (Exception ex) { if (!suppressExceptions) { _networkEventBroadcaster.RaiseErrorEvent (ex, notifyUser); } if (!suppressExceptions) { throw; } } finally { _networkEventBroadcaster.RaiseNetworkUsageEnd (); } return data; } public WebRequestServiceResponse PostData(HttpWebRequest request, object objectToPost) { return PostData (request, objectToPost, false, true); } public WebRequestServiceResponse PostData(HttpWebRequest request, object objectToPost, bool suppressNetworkErrors, bool suppressExceptions, bool notifyUser = false) { var response = new WebRequestServiceResponse (); try { _networkEventBroadcaster.RaiseNetworkUsageStart (); var errorInfo = SerializeObject(objectToPost); using (var streamWriter = new StreamWriter(request.GetRequestStream())) { streamWriter.Write (errorInfo); streamWriter.Flush (); streamWriter.Close (); using (HttpWebResponse webResponse = (HttpWebResponse) request.GetResponse()) { response = new WebRequestServiceResponse(null, webResponse.StatusCode, webResponse.StatusDescription); // Nothing else to do here } } } catch (WebException ex) { if (!suppressNetworkErrors) { _networkEventBroadcaster.RaiseNetworkErrorEvent (ex, notifyUser); } if (!suppressExceptions) { throw; } } catch (Exception ex) { if (!suppressExceptions) { _networkEventBroadcaster.RaiseErrorEvent (ex, notifyUser); } if (!suppressExceptions) { throw; } } finally { _networkEventBroadcaster.RaiseNetworkUsageEnd (); } return response; } public T PostData<T> (HttpWebRequest request, object objectToPost) { return PostData<T>(request, objectToPost, false, true); } public T PostData<T> (HttpWebRequest request, object objectToPost, bool suppressNetworkErrors, bool suppressExceptions, bool notifyUser = false) { T data = default(T); try { _networkEventBroadcaster.RaiseNetworkUsageStart (); using (var streamWriter = new StreamWriter(request.GetRequestStream())) { streamWriter.Write (SerializeObject(objectToPost)); streamWriter.Flush (); streamWriter.Close (); using (HttpWebResponse webResponse = (HttpWebResponse) request.GetResponse()) { if (webResponse.StatusCode == HttpStatusCode.OK) { using (StreamReader streamReader = new StreamReader(webResponse.GetResponseStream())) { var content = streamReader.ReadToEnd (); if (!string.IsNullOrWhiteSpace (content)) { data = DeserializeSingle<T> (content); } } } } } } catch (WebException ex) { if (!suppressNetworkErrors) { _networkEventBroadcaster.RaiseNetworkErrorEvent (ex, notifyUser); } if (!suppressExceptions) { throw; } } catch (Exception ex) { if (!suppressExceptions) { _networkEventBroadcaster.RaiseErrorEvent (ex, notifyUser); } if (!suppressExceptions) { throw; } } finally { _networkEventBroadcaster.RaiseNetworkUsageEnd (); } return data; } public WebRequestServiceResponse PostData (HttpWebRequest request, string json) { return PostData (request, json, false, true); } public WebRequestServiceResponse PostData (HttpWebRequest request, string json, bool suppressNetworkErrors, bool suppressExceptions, bool notifyUser = false) { var response = new WebRequestServiceResponse (); try { _networkEventBroadcaster.RaiseNetworkUsageStart (); using (var streamWriter = new StreamWriter(request.GetRequestStream())) { streamWriter.Write (json); streamWriter.Flush (); streamWriter.Close (); using (HttpWebResponse webResponse = (HttpWebResponse) request.GetResponse()) { response = new WebRequestServiceResponse(null, webResponse.StatusCode, webResponse.StatusDescription); if (webResponse.StatusCode == HttpStatusCode.OK) { using (StreamReader streamReader = new StreamReader(webResponse.GetResponseStream())) { var content = streamReader.ReadToEnd (); if (!string.IsNullOrWhiteSpace (content)) { response.Data = content; } } } } } } catch (WebException ex) { if (!suppressNetworkErrors) { _networkEventBroadcaster.RaiseNetworkErrorEvent (ex, notifyUser); } if (!suppressExceptions) { throw; } } catch (Exception ex) { if (!suppressExceptions) { _networkEventBroadcaster.RaiseErrorEvent (ex, notifyUser); } if (!suppressExceptions) { throw; } } finally { _networkEventBroadcaster.RaiseNetworkUsageEnd (); } return response; } #endregion #region PRIVATE METHODS private HttpWebRequest GetRequest (string controller, string action, User user = null) { return GetRequest (controller, action, null, user); } private HttpWebRequest GetRequest (string controller, string action, string qsParams, User user = null) { var uri = (qsParams == null) ? string.Format ("{0}/{1}/{2}", _requestBaseUrl, controller, action) : string.Format ("{0}/{1}/{2}?{3}", _requestBaseUrl, controller, action, qsParams); var request = (HttpWebRequest)HttpWebRequest.Create (uri); request.ContentType = "application/json"; request.Method = "GET"; if (user != null) SetBasicAuthHeader (request, user.UserName, user.Password); return request; } private HttpWebRequest GetPostRequest (string controller, string action, User user = null) { var uri = string.Format ("{0}/{1}/{2}", _requestBaseUrl, controller, action); var request = (HttpWebRequest)HttpWebRequest.Create (uri); request.ContentType = "application/json; charset=utf-8"; request.Method = "POST"; if (user != null) SetBasicAuthHeader (request, user.UserName, user.Password); return request; } private T DeserializeSingle<T>(string json) { return JsonConvert.DeserializeObject<T> (json); } private List<T> DeserializeList<T>(string json) { return JsonConvert.DeserializeObject<List<T>> (json); } private string SerializeObject (object items) { return JsonConvert.SerializeObject (items); } #endregion } }
#region Licence /**************************************************************************** Copyright 1999-2010 Vincent J. Jacquet. All rights reserved. Permission is granted to anyone to use this software for any purpose on any computer system, and to alter it and redistribute it, subject to the following restrictions: 1. The author is not responsible for the consequences of use of this software, no matter how awful, even if they arise from flaws in it. 2. The origin of this software must not be misrepresented, either by explicit claim or by omission. Since few users ever read sources, credits must appear in the documentation. 3. Altered versions must be plainly marked as such, and must not be misrepresented as being the original software. Since few users ever read sources, credits must appear in the documentation. 4. This notice may not be removed or altered. ****************************************************************************/ #endregion using System; using System.ComponentModel.Design; using System.Threading; using WmcSoft.Properties; namespace WmcSoft.Threading { public abstract class JobDispatcher : IServiceProvider, IDisposable { #region NullJobDispatcher Class sealed class NullJobDispatcher : JobDispatcher { public override void Dispatch(IJob job) { } } #endregion #region Internal job Classes struct ThreadStartJob : IJob { ThreadStart start; internal ThreadStartJob(ThreadStart start) { if (start == null) throw new ArgumentNullException(nameof(start)); this.start = start; } #region IJob Membres void IJob.Execute(IServiceProvider serviceProvider) { start(); } #endregion } struct ActionJob<T> : IJob { Action<T> action; internal ActionJob(Action<T> action) { if (action == null) throw new ArgumentNullException(nameof(action)); this.action = action; } #region IJob Membres void IJob.Execute(IServiceProvider serviceProvider) { var service = serviceProvider.GetService(typeof(T)); var t = service == null ? default : (T)service; action(t); } #endregion } #endregion #region Lifecycle protected JobDispatcher() { serviceContainer = new ServiceContainer(); } protected JobDispatcher(IServiceProvider parentProvider) { serviceContainer = new ServiceContainer(parentProvider); } #endregion #region Static Properties public static readonly JobDispatcher Null = new NullJobDispatcher(); #endregion #region Fields readonly IServiceProvider serviceContainer; #endregion #region Abstracts & overridables /// <summary> /// Dispatches the specified job. /// </summary> /// <param name="job">An <see cref="System.Object"/> that implements <see cref="IJob"/>.</param> public abstract void Dispatch(IJob job); /// <summary> /// Dispatches the specified job. /// </summary> /// <param name="job"></param> public void Dispatch(ThreadStart job) { Dispatch(new ThreadStartJob(job)); } /// <summary> /// Dispatches the specified job. /// </summary> /// <typeparam name="T"></typeparam> /// <param name="job"></param> public void Dispatch<T>(Action<T> job) { Dispatch(new ActionJob<T>(job)); } /// <summary> /// Disposes the specified job if it implements the interface /// </summary> /// <param name="job">An <see cref="System.Object"/> that implements <see cref="IJob"/>.</param> protected virtual void Dispose(IJob job) { IDisposable disposable = job as IDisposable; if (disposable != null) { disposable.Dispose(); } } ///// <summary> ///// Blocks the current thread while the dispatcher is busy. ///// </summary> ///// <returns><c>true</c> if the current instance receives a signal; otherwise <see cref="WaitWhileBusy()"/> never returns.</returns> //public bool WaitWhileBusy() { // return WaitWhileBusy(-1); //} ///// <summary> ///// Blocks the current thread while the dispatcher is busy, using a 32-bit signed ///// integer to measure the time interval. ///// </summary> ///// <param name="millisecondsTimeout">The number of milliseconds to wait, or <see cref="System.Threading.Timeout.Infinite"/> (-1) to wait indefinitely.</param> ///// <returns><c>true</c> if the current instance receives a signal; otherwise, <c>false</c>.</returns> //public abstract bool WaitWhileBusy(int millisecondsTimeout); ///// <summary> ///// Returns <c>true</c> when the <see cref="JobDispatcher"/> is busy. ///// </summary> //public abstract bool IsBusy { get; } /// <summary> /// Trait property to indicate if the JobDispatcher supports cancellation. /// </summary> public virtual bool SupportsCancellation => false; /// <summary> /// Returns <c>true</c> if the job dispatcher has been cancelled. /// </summary> public virtual bool CancellationPending => false; /// <summary> /// Requests to cancel the job dispatcher. /// </summary> public virtual void CancelAsync() { throw new NotSupportedException(Resources.DispatcherDoesntSuppportCancellation); } #endregion #region IServiceProvider Membres /// <summary> /// /// </summary> /// <param name="serviceType"></param> /// <returns></returns> public virtual object GetService(Type serviceType) { if (serviceType == typeof(IServiceProvider) || serviceType == typeof(JobDispatcher)) return this; return serviceContainer.GetService(serviceType); } #endregion #region IDisposable Membres /// <summary> /// Releases all resources used by the <see cref="JobDispatcher"/>. /// </summary> public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } /// <summary> /// When overridden in a derived class, releases the unmanaged resources used by /// the <see cref="JobDispatcher"/>, and optionally releases the managed resources. /// </summary> /// <param name="disposing"><c>true</c> to release both managed and unmanaged resources; <c>false</c> to release only unmanaged resources. </param> protected virtual void Dispose(bool disposing) { } /// <summary> /// Releases the resources held by the current instance. /// </summary> ~JobDispatcher() { Dispose(false); } #endregion } }