context
stringlengths
2.52k
185k
gt
stringclasses
1 value
// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! namespace Google.Apis.PolicyAnalyzer.v1 { /// <summary>The PolicyAnalyzer Service.</summary> public class PolicyAnalyzerService : Google.Apis.Services.BaseClientService { /// <summary>The API version.</summary> public const string Version = "v1"; /// <summary>The discovery version used to generate this service.</summary> public static Google.Apis.Discovery.DiscoveryVersion DiscoveryVersionUsed = Google.Apis.Discovery.DiscoveryVersion.Version_1_0; /// <summary>Constructs a new service.</summary> public PolicyAnalyzerService() : this(new Google.Apis.Services.BaseClientService.Initializer()) { } /// <summary>Constructs a new service.</summary> /// <param name="initializer">The service initializer.</param> public PolicyAnalyzerService(Google.Apis.Services.BaseClientService.Initializer initializer) : base(initializer) { Projects = new ProjectsResource(this); } /// <summary>Gets the service supported features.</summary> public override System.Collections.Generic.IList<string> Features => new string[0]; /// <summary>Gets the service name.</summary> public override string Name => "policyanalyzer"; /// <summary>Gets the service base URI.</summary> public override string BaseUri => #if NETSTANDARD1_3 || NETSTANDARD2_0 || NET45 BaseUriOverride ?? "https://policyanalyzer.googleapis.com/"; #else "https://policyanalyzer.googleapis.com/"; #endif /// <summary>Gets the service base path.</summary> public override string BasePath => ""; #if !NET40 /// <summary>Gets the batch base URI; <c>null</c> if unspecified.</summary> public override string BatchUri => "https://policyanalyzer.googleapis.com/batch"; /// <summary>Gets the batch base path; <c>null</c> if unspecified.</summary> public override string BatchPath => "batch"; #endif /// <summary>Available OAuth 2.0 scopes for use with the Policy Analyzer API.</summary> public class Scope { /// <summary> /// See, edit, configure, and delete your Google Cloud data and see the email address for your Google /// Account. /// </summary> public static string CloudPlatform = "https://www.googleapis.com/auth/cloud-platform"; } /// <summary>Available OAuth 2.0 scope constants for use with the Policy Analyzer API.</summary> public static class ScopeConstants { /// <summary> /// See, edit, configure, and delete your Google Cloud data and see the email address for your Google /// Account. /// </summary> public const string CloudPlatform = "https://www.googleapis.com/auth/cloud-platform"; } /// <summary>Gets the Projects resource.</summary> public virtual ProjectsResource Projects { get; } } /// <summary>A base abstract class for PolicyAnalyzer requests.</summary> public abstract class PolicyAnalyzerBaseServiceRequest<TResponse> : Google.Apis.Requests.ClientServiceRequest<TResponse> { /// <summary>Constructs a new PolicyAnalyzerBaseServiceRequest instance.</summary> protected PolicyAnalyzerBaseServiceRequest(Google.Apis.Services.IClientService service) : base(service) { } /// <summary>V1 error format.</summary> [Google.Apis.Util.RequestParameterAttribute("$.xgafv", Google.Apis.Util.RequestParameterType.Query)] public virtual System.Nullable<XgafvEnum> Xgafv { get; set; } /// <summary>V1 error format.</summary> public enum XgafvEnum { /// <summary>v1 error format</summary> [Google.Apis.Util.StringValueAttribute("1")] Value1 = 0, /// <summary>v2 error format</summary> [Google.Apis.Util.StringValueAttribute("2")] Value2 = 1, } /// <summary>OAuth access token.</summary> [Google.Apis.Util.RequestParameterAttribute("access_token", Google.Apis.Util.RequestParameterType.Query)] public virtual string AccessToken { get; set; } /// <summary>Data format for response.</summary> [Google.Apis.Util.RequestParameterAttribute("alt", Google.Apis.Util.RequestParameterType.Query)] public virtual System.Nullable<AltEnum> Alt { get; set; } /// <summary>Data format for response.</summary> public enum AltEnum { /// <summary>Responses with Content-Type of application/json</summary> [Google.Apis.Util.StringValueAttribute("json")] Json = 0, /// <summary>Media download with context-dependent Content-Type</summary> [Google.Apis.Util.StringValueAttribute("media")] Media = 1, /// <summary>Responses with Content-Type of application/x-protobuf</summary> [Google.Apis.Util.StringValueAttribute("proto")] Proto = 2, } /// <summary>JSONP</summary> [Google.Apis.Util.RequestParameterAttribute("callback", Google.Apis.Util.RequestParameterType.Query)] public virtual string Callback { get; set; } /// <summary>Selector specifying which fields to include in a partial response.</summary> [Google.Apis.Util.RequestParameterAttribute("fields", Google.Apis.Util.RequestParameterType.Query)] public virtual string Fields { get; set; } /// <summary> /// API key. Your API key identifies your project and provides you with API access, quota, and reports. Required /// unless you provide an OAuth 2.0 token. /// </summary> [Google.Apis.Util.RequestParameterAttribute("key", Google.Apis.Util.RequestParameterType.Query)] public virtual string Key { get; set; } /// <summary>OAuth 2.0 token for the current user.</summary> [Google.Apis.Util.RequestParameterAttribute("oauth_token", Google.Apis.Util.RequestParameterType.Query)] public virtual string OauthToken { get; set; } /// <summary>Returns response with indentations and line breaks.</summary> [Google.Apis.Util.RequestParameterAttribute("prettyPrint", Google.Apis.Util.RequestParameterType.Query)] public virtual System.Nullable<bool> PrettyPrint { get; set; } /// <summary> /// Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a /// user, but should not exceed 40 characters. /// </summary> [Google.Apis.Util.RequestParameterAttribute("quotaUser", Google.Apis.Util.RequestParameterType.Query)] public virtual string QuotaUser { get; set; } /// <summary>Legacy upload protocol for media (e.g. "media", "multipart").</summary> [Google.Apis.Util.RequestParameterAttribute("uploadType", Google.Apis.Util.RequestParameterType.Query)] public virtual string UploadType { get; set; } /// <summary>Upload protocol for media (e.g. "raw", "multipart").</summary> [Google.Apis.Util.RequestParameterAttribute("upload_protocol", Google.Apis.Util.RequestParameterType.Query)] public virtual string UploadProtocol { get; set; } /// <summary>Initializes PolicyAnalyzer parameter list.</summary> protected override void InitParameters() { base.InitParameters(); RequestParameters.Add("$.xgafv", new Google.Apis.Discovery.Parameter { Name = "$.xgafv", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add("access_token", new Google.Apis.Discovery.Parameter { Name = "access_token", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add("alt", new Google.Apis.Discovery.Parameter { Name = "alt", IsRequired = false, ParameterType = "query", DefaultValue = "json", Pattern = null, }); RequestParameters.Add("callback", new Google.Apis.Discovery.Parameter { Name = "callback", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add("fields", new Google.Apis.Discovery.Parameter { Name = "fields", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add("key", new Google.Apis.Discovery.Parameter { Name = "key", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add("oauth_token", new Google.Apis.Discovery.Parameter { Name = "oauth_token", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add("prettyPrint", new Google.Apis.Discovery.Parameter { Name = "prettyPrint", IsRequired = false, ParameterType = "query", DefaultValue = "true", Pattern = null, }); RequestParameters.Add("quotaUser", new Google.Apis.Discovery.Parameter { Name = "quotaUser", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add("uploadType", new Google.Apis.Discovery.Parameter { Name = "uploadType", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add("upload_protocol", new Google.Apis.Discovery.Parameter { Name = "upload_protocol", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); } } /// <summary>The "projects" collection of methods.</summary> public class ProjectsResource { private const string Resource = "projects"; /// <summary>The service which this resource belongs to.</summary> private readonly Google.Apis.Services.IClientService service; /// <summary>Constructs a new resource.</summary> public ProjectsResource(Google.Apis.Services.IClientService service) { this.service = service; Locations = new LocationsResource(service); } /// <summary>Gets the Locations resource.</summary> public virtual LocationsResource Locations { get; } /// <summary>The "locations" collection of methods.</summary> public class LocationsResource { private const string Resource = "locations"; /// <summary>The service which this resource belongs to.</summary> private readonly Google.Apis.Services.IClientService service; /// <summary>Constructs a new resource.</summary> public LocationsResource(Google.Apis.Services.IClientService service) { this.service = service; ActivityTypes = new ActivityTypesResource(service); } /// <summary>Gets the ActivityTypes resource.</summary> public virtual ActivityTypesResource ActivityTypes { get; } /// <summary>The "activityTypes" collection of methods.</summary> public class ActivityTypesResource { private const string Resource = "activityTypes"; /// <summary>The service which this resource belongs to.</summary> private readonly Google.Apis.Services.IClientService service; /// <summary>Constructs a new resource.</summary> public ActivityTypesResource(Google.Apis.Services.IClientService service) { this.service = service; Activities = new ActivitiesResource(service); } /// <summary>Gets the Activities resource.</summary> public virtual ActivitiesResource Activities { get; } /// <summary>The "activities" collection of methods.</summary> public class ActivitiesResource { private const string Resource = "activities"; /// <summary>The service which this resource belongs to.</summary> private readonly Google.Apis.Services.IClientService service; /// <summary>Constructs a new resource.</summary> public ActivitiesResource(Google.Apis.Services.IClientService service) { this.service = service; } /// <summary>Queries policy activities on Google Cloud resources.</summary> /// <param name="parent"> /// Required. The container resource on which to execute the request. Acceptable formats: /// `projects/[PROJECT_ID|PROJECT_NUMBER]/locations/[LOCATION]/activityTypes/[ACTIVITY_TYPE]` /// LOCATION here refers to Google Cloud Locations: https://cloud.google.com/about/locations/ /// </param> public virtual QueryRequest Query(string parent) { return new QueryRequest(service, parent); } /// <summary>Queries policy activities on Google Cloud resources.</summary> public class QueryRequest : PolicyAnalyzerBaseServiceRequest<Google.Apis.PolicyAnalyzer.v1.Data.GoogleCloudPolicyanalyzerV1QueryActivityResponse> { /// <summary>Constructs a new Query request.</summary> public QueryRequest(Google.Apis.Services.IClientService service, string parent) : base(service) { Parent = parent; InitParameters(); } /// <summary> /// Required. The container resource on which to execute the request. Acceptable formats: /// `projects/[PROJECT_ID|PROJECT_NUMBER]/locations/[LOCATION]/activityTypes/[ACTIVITY_TYPE]` /// LOCATION here refers to Google Cloud Locations: https://cloud.google.com/about/locations/ /// </summary> [Google.Apis.Util.RequestParameterAttribute("parent", Google.Apis.Util.RequestParameterType.Path)] public virtual string Parent { get; private set; } /// <summary> /// Optional. Filter expression to restrict the activities returned. For /// serviceAccountLastAuthentication activities, supported filters are: - /// `activities.full_resource_name {=} [STRING]` - `activities.fullResourceName {=} [STRING]` /// where `[STRING]` is the full resource name of the service account. For /// serviceAccountKeyLastAuthentication activities, supported filters are: - /// `activities.full_resource_name {=} [STRING]` - `activities.fullResourceName {=} [STRING]` /// where `[STRING]` is the full resource name of the service account key. /// </summary> [Google.Apis.Util.RequestParameterAttribute("filter", Google.Apis.Util.RequestParameterType.Query)] public virtual string Filter { get; set; } /// <summary> /// Optional. The maximum number of results to return from this request. Max limit is 1000. /// Non-positive values are ignored. The presence of `nextPageToken` in the response indicates /// that more results might be available. /// </summary> [Google.Apis.Util.RequestParameterAttribute("pageSize", Google.Apis.Util.RequestParameterType.Query)] public virtual System.Nullable<int> PageSize { get; set; } /// <summary> /// Optional. If present, then retrieve the next batch of results from the preceding call to /// this method. `pageToken` must be the value of `nextPageToken` from the previous response. /// The values of other method parameters should be identical to those in the previous call. /// </summary> [Google.Apis.Util.RequestParameterAttribute("pageToken", Google.Apis.Util.RequestParameterType.Query)] public virtual string PageToken { get; set; } /// <summary>Gets the method name.</summary> public override string MethodName => "query"; /// <summary>Gets the HTTP method.</summary> public override string HttpMethod => "GET"; /// <summary>Gets the REST path.</summary> public override string RestPath => "v1/{+parent}/activities:query"; /// <summary>Initializes Query parameter list.</summary> protected override void InitParameters() { base.InitParameters(); RequestParameters.Add("parent", new Google.Apis.Discovery.Parameter { Name = "parent", IsRequired = true, ParameterType = "path", DefaultValue = null, Pattern = @"^projects/[^/]+/locations/[^/]+/activityTypes/[^/]+$", }); RequestParameters.Add("filter", new Google.Apis.Discovery.Parameter { Name = "filter", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add("pageSize", new Google.Apis.Discovery.Parameter { Name = "pageSize", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add("pageToken", new Google.Apis.Discovery.Parameter { Name = "pageToken", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); } } } } } } } namespace Google.Apis.PolicyAnalyzer.v1.Data { public class GoogleCloudPolicyanalyzerV1Activity : Google.Apis.Requests.IDirectResponseSchema { /// <summary>A struct of custom fields to explain the activity.</summary> [Newtonsoft.Json.JsonPropertyAttribute("activity")] public virtual System.Collections.Generic.IDictionary<string, object> Activity { get; set; } /// <summary>The type of the activity.</summary> [Newtonsoft.Json.JsonPropertyAttribute("activityType")] public virtual string ActivityType { get; set; } /// <summary> /// The full resource name that identifies the resource. For examples of full resource names for Google Cloud /// services, see https://cloud.google.com/iam/help/troubleshooter/full-resource-names. /// </summary> [Newtonsoft.Json.JsonPropertyAttribute("fullResourceName")] public virtual string FullResourceName { get; set; } /// <summary>The data observation period to build the activity.</summary> [Newtonsoft.Json.JsonPropertyAttribute("observationPeriod")] public virtual GoogleCloudPolicyanalyzerV1ObservationPeriod ObservationPeriod { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } /// <summary>Represents data observation period.</summary> public class GoogleCloudPolicyanalyzerV1ObservationPeriod : Google.Apis.Requests.IDirectResponseSchema { /// <summary>The observation end time. The time in this timestamp is always `07:00:00Z`.</summary> [Newtonsoft.Json.JsonPropertyAttribute("endTime")] public virtual object EndTime { get; set; } /// <summary>The observation start time. The time in this timestamp is always `07:00:00Z`.</summary> [Newtonsoft.Json.JsonPropertyAttribute("startTime")] public virtual object StartTime { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } /// <summary>Response to the `QueryActivity` method.</summary> public class GoogleCloudPolicyanalyzerV1QueryActivityResponse : Google.Apis.Requests.IDirectResponseSchema { /// <summary>The set of activities that match the filter included in the request.</summary> [Newtonsoft.Json.JsonPropertyAttribute("activities")] public virtual System.Collections.Generic.IList<GoogleCloudPolicyanalyzerV1Activity> Activities { get; set; } /// <summary> /// If there might be more results than those appearing in this response, then `nextPageToken` is included. To /// get the next set of results, call this method again using the value of `nextPageToken` as `pageToken`. /// </summary> [Newtonsoft.Json.JsonPropertyAttribute("nextPageToken")] public virtual string NextPageToken { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Globalization; using System.Runtime.Serialization; using System.Configuration.Assemblies; using System.Reflection.Runtime.Assemblies; using Internal.Reflection.Augments; namespace System.Reflection { public sealed class AssemblyName : ICloneable, IDeserializationCallback, ISerializable { public AssemblyName() { HashAlgorithm = AssemblyHashAlgorithm.None; VersionCompatibility = AssemblyVersionCompatibility.SameMachine; _flags = AssemblyNameFlags.None; } public AssemblyName(string assemblyName) { if (assemblyName == null) throw new ArgumentNullException(nameof(assemblyName)); RuntimeAssemblyName runtimeAssemblyName = AssemblyNameParser.Parse(assemblyName); runtimeAssemblyName.CopyToAssemblyName(this); } // Constructs a new AssemblyName during deserialization. (Needs to public so we can whitelist in Reflection). public AssemblyName(SerializationInfo info, StreamingContext context) { //The graph is not valid until OnDeserialization() has been called. _siInfo = info; } public object Clone() { AssemblyName n = new AssemblyName(); n.Name = Name; n._publicKey = (byte[])_publicKey?.Clone(); n._publicKeyToken = (byte[])_publicKeyToken?.Clone(); n.CultureInfo = CultureInfo; n.Version = (Version)Version?.Clone(); n._flags = _flags; n.CodeBase = CodeBase; n.HashAlgorithm = HashAlgorithm; n.VersionCompatibility = VersionCompatibility; return n; } public ProcessorArchitecture ProcessorArchitecture { get { int x = (((int)_flags) & 0x70) >> 4; if (x > 5) x = 0; return (ProcessorArchitecture)x; } set { int x = ((int)value) & 0x07; if (x <= 5) { _flags = (AssemblyNameFlags)((int)_flags & 0xFFFFFF0F); _flags |= (AssemblyNameFlags)(x << 4); } } } public AssemblyContentType ContentType { get { int x = (((int)_flags) & 0x00000E00) >> 9; if (x > 1) x = 0; return (AssemblyContentType)x; } set { int x = ((int)value) & 0x07; if (x <= 1) { _flags = (AssemblyNameFlags)((int)_flags & 0xFFFFF1FF); _flags |= (AssemblyNameFlags)(x << 9); } } } public string CultureName { get { return CultureInfo?.Name; } set { CultureInfo = (value == null) ? null : new CultureInfo(value); } } public CultureInfo CultureInfo { get; set; } public AssemblyNameFlags Flags { get { return (AssemblyNameFlags)((uint)_flags & 0xFFFFF10F); } set { _flags &= unchecked((AssemblyNameFlags)0x00000EF0); _flags |= (value & unchecked((AssemblyNameFlags)0xFFFFF10F)); } } public string FullName { get { if (this.Name == null) return string.Empty; return AssemblyNameHelpers.ComputeDisplayName(this.ToRuntimeAssemblyName()); } } public string Name { get; set; } public Version Version { get; set; } public string CodeBase { get; set; } public AssemblyHashAlgorithm HashAlgorithm { get; set; } public AssemblyVersionCompatibility VersionCompatibility { get; set; } public StrongNameKeyPair KeyPair { get; set; } public string EscapedCodeBase { get { if (CodeBase == null) return null; else return EscapeCodeBase(CodeBase); } } public byte[] GetPublicKey() { return _publicKey; } public byte[] GetPublicKeyToken() { if (_publicKeyToken == null) _publicKeyToken = AssemblyNameHelpers.ComputePublicKeyToken(_publicKey); return _publicKeyToken; } public void SetPublicKey(byte[] publicKey) { _publicKey = publicKey; if (publicKey == null) _flags &= ~AssemblyNameFlags.PublicKey; else _flags |= AssemblyNameFlags.PublicKey; } public void SetPublicKeyToken(byte[] publicKeyToken) { _publicKeyToken = publicKeyToken; } public override string ToString() { string s = FullName; if (s == null) return base.ToString(); else return s; } public void GetObjectData(SerializationInfo info, StreamingContext context) { if (info == null) throw new ArgumentNullException(nameof(info)); //Allocate the serialization info and serialize our static data. info.AddValue("_Name", Name); info.AddValue("_PublicKey", _publicKey, typeof(byte[])); info.AddValue("_PublicKeyToken", _publicKeyToken, typeof(byte[])); info.AddValue("_CultureInfo", (CultureInfo == null) ? -1 : CultureInfo.LCID); info.AddValue("_CodeBase", CodeBase); info.AddValue("_Version", Version); info.AddValue("_HashAlgorithm", HashAlgorithm, typeof(AssemblyHashAlgorithm)); info.AddValue("_StrongNameKeyPair", KeyPair, typeof(StrongNameKeyPair)); info.AddValue("_VersionCompatibility", VersionCompatibility, typeof(AssemblyVersionCompatibility)); info.AddValue("_Flags", _flags, typeof(AssemblyNameFlags)); // These are fields used (and set) internally by the full framework only. The fields are optional but the full framework // will catch an exception internally if they aren't there so to avoid that annoyance, we'll emit them using their default values. info.AddValue("_HashAlgorithmForControl", AssemblyHashAlgorithm.None, typeof(AssemblyHashAlgorithm)); info.AddValue("_HashForControl", null, typeof(byte[])); } public void OnDeserialization(object sender) { // Deserialization has already been performed if (_siInfo == null) return; Name = _siInfo.GetString("_Name"); _publicKey = (byte[])_siInfo.GetValue("_PublicKey", typeof(byte[])); _publicKeyToken = (byte[])_siInfo.GetValue("_PublicKeyToken", typeof(byte[])); int lcid = (int)_siInfo.GetInt32("_CultureInfo"); if (lcid != -1) CultureInfo = new CultureInfo(lcid); CodeBase = _siInfo.GetString("_CodeBase"); Version = (Version)_siInfo.GetValue("_Version", typeof(Version)); HashAlgorithm = (AssemblyHashAlgorithm)_siInfo.GetValue("_HashAlgorithm", typeof(AssemblyHashAlgorithm)); KeyPair = (StrongNameKeyPair)_siInfo.GetValue("_StrongNameKeyPair", typeof(StrongNameKeyPair)); VersionCompatibility = (AssemblyVersionCompatibility)_siInfo.GetValue("_VersionCompatibility", typeof(AssemblyVersionCompatibility)); _flags = (AssemblyNameFlags)_siInfo.GetValue("_Flags", typeof(AssemblyNameFlags)); _siInfo = null; } public static AssemblyName GetAssemblyName(string assemblyFile) { throw new NotImplementedException(); } // TODO: https://github.com/dotnet/corert/issues/3253 /// <summary> /// Compares the simple names disregarding Version, Culture and PKT. While this clearly does not /// match the intent of this api, this api has been broken this way since its debut and we cannot /// change its behavior now. /// </summary> public static bool ReferenceMatchesDefinition(AssemblyName reference, AssemblyName definition) { if (object.ReferenceEquals(reference, definition)) return true; if (reference == null) throw new ArgumentNullException(nameof(reference)); if (definition == null) throw new ArgumentNullException(nameof(definition)); string refName = reference.Name ?? string.Empty; string defName = definition.Name ?? string.Empty; return refName.Equals(defName, StringComparison.OrdinalIgnoreCase); } internal static string EscapeCodeBase(string codebase) { throw new PlatformNotSupportedException(); } private AssemblyNameFlags _flags; private byte[] _publicKey; private byte[] _publicKeyToken; private SerializationInfo _siInfo; } }
using System; using System.Drawing; using System.Collections; using System.Collections.Generic; using System.ComponentModel; using System.Windows.Forms; using System.Text; using System.Xml; using fyiReporting.RDL; namespace fyiReporting.RdlDesign { /// <summary> /// Summary description for DialogDataSourceRef. /// </summary> internal class DialogNewTable : System.Windows.Forms.Form { private DesignXmlDraw _Draw; private System.Windows.Forms.Button bOK; private System.Windows.Forms.Button bCancel; private System.Windows.Forms.Label label1; private System.Windows.Forms.ComboBox cbDataSets; private System.Windows.Forms.Label label2; private System.Windows.Forms.Label label3; private System.Windows.Forms.ListBox lbFields; private System.Windows.Forms.CheckedListBox lbTableColumns; private System.Windows.Forms.Button bUp; private System.Windows.Forms.Button bDown; private System.Windows.Forms.Button bRight; private System.Windows.Forms.Button bAllRight; private System.Windows.Forms.Button bLeft; private System.Windows.Forms.Button bAllLeft; private System.Windows.Forms.Label label4; private System.Windows.Forms.ComboBox cbGroupColumn; private System.Windows.Forms.CheckBox chkGrandTotals; private System.Windows.Forms.GroupBox groupBox1; private System.Windows.Forms.RadioButton rbHorz; private System.Windows.Forms.RadioButton rbVert; private System.Windows.Forms.RadioButton rbVertComp; /// <summary> /// Required designer variable. /// </summary> private System.ComponentModel.Container components = null; internal DialogNewTable(DesignXmlDraw dxDraw, XmlNode container) { _Draw = dxDraw; // // Required for Windows Form Designer support // InitializeComponent(); InitValues(container); } private void InitValues(XmlNode container) { this.bOK.Enabled = false; rbHorz.Checked = true; // // Obtain the existing DataSets info // object[] datasets = _Draw.DataSetNames; if (datasets == null) return; // not much to do if no DataSets if (_Draw.IsDataRegion(container)) { string s = _Draw.GetDataSetNameValue(container); if (s == null) return; this.cbDataSets.Items.Add(s); this.cbDataSets.Enabled = false; } else this.cbDataSets.Items.AddRange(datasets); cbDataSets.SelectedIndex = 0; } internal string TableXml { get { return rbHorz.Checked? TableXmlHorz: TableXmlVert; } } private string TableXmlHorz { get { StringBuilder table = new StringBuilder("<Table>"); table.AppendFormat("<DataSetName>{0}</DataSetName>", this.cbDataSets.Text); table.Append("<NoRows>Query returned no rows!</NoRows><Style>"+ "<BorderStyle><Default>Solid</Default></BorderStyle></Style>"); StringBuilder tablecolumns = new StringBuilder("<TableColumns>"); StringBuilder headercolumns = new StringBuilder("<Header><TableRows><TableRow><Height>12 pt</Height><TableCells>"); StringBuilder detailcolumns = new StringBuilder("<Details><TableRows><TableRow><Height>12 pt</Height><TableCells>"); StringBuilder tablegroups= null; StringBuilder footergroup=null; string gname = this.cbGroupColumn.Text; if (gname != null && gname.Trim() != "") { gname = gname.Trim(); tablegroups = new StringBuilder("<TableGroups><TableGroup><Grouping><GroupExpressions><GroupExpression>"); tablegroups.AppendFormat("=Fields!{0}.Value</GroupExpression></GroupExpressions></Grouping>", gname); tablegroups.Append("<Header><TableRows><TableRow><Height>12 pt</Height><TableCells>"); footergroup = new StringBuilder("<Footer><TableRows><TableRow><Height>12 pt</Height><TableCells>"); } else gname = null; StringBuilder footercolumns = null; if (this.chkGrandTotals.Checked) footercolumns = new StringBuilder("<Footer><TableRows><TableRow><Height>12 pt</Height><TableCells>"); bool bHaveFooter=false; // indicates one or more columns have been checked for subtotaling foreach (string colname in this.lbTableColumns.Items) { tablecolumns.Append("<TableColumn><Width>1in</Width></TableColumn>"); headercolumns.AppendFormat("<TableCell><ReportItems><Textbox><Value>{0}</Value>"+ "<Style><TextAlign>Center</TextAlign><BorderStyle><Default>Solid</Default></BorderStyle>"+ "<FontWeight>Bold</FontWeight></Style>"+ "</Textbox></ReportItems></TableCell>", colname); string dcol; string gcol; if (gname == colname) { dcol = ""; gcol = string.Format("=Fields!{0}.Value", colname); } else { gcol = ""; dcol = string.Format("=Fields!{0}.Value", colname); } int iChecked = this.lbTableColumns.CheckedItems.IndexOf(colname); string fcol=""; if (iChecked >= 0) { bHaveFooter = true; fcol = string.Format("=Sum(Fields!{0}.Value)", colname); } if (tablegroups != null) { tablegroups.AppendFormat("<TableCell><ReportItems><Textbox>"+ "<Value>{0}</Value><CanGrow>true</CanGrow>"+ "<Style><BorderStyle><Default>Solid</Default></BorderStyle>"+ "</Style></Textbox></ReportItems></TableCell>", gcol); footergroup.AppendFormat("<TableCell><ReportItems><Textbox>"+ "<Value>{0}</Value><CanGrow>true</CanGrow>"+ "<Style><BorderStyle><Default>Solid</Default></BorderStyle>"+ "</Style></Textbox></ReportItems></TableCell>", fcol); } detailcolumns.AppendFormat("<TableCell><ReportItems><Textbox>"+ "<Value>{0}</Value><CanGrow>true</CanGrow>"+ "<Style><BorderStyle><Default>Solid</Default></BorderStyle>"+ "</Style></Textbox></ReportItems></TableCell>", dcol); if (footercolumns != null) footercolumns.AppendFormat("<TableCell><ReportItems><Textbox>"+ "<Value>{0}</Value><CanGrow>true</CanGrow>"+ "<Style><BorderStyle><Default>Solid</Default></BorderStyle>"+ "</Style></Textbox></ReportItems></TableCell>", fcol); } tablecolumns.Append("</TableColumns>"); table.Append(tablecolumns.ToString()); headercolumns.Append("</TableCells></TableRow></TableRows>"+ "<RepeatOnNewPage>true</RepeatOnNewPage></Header>"); table.Append(headercolumns.ToString()); detailcolumns.Append("</TableCells></TableRow></TableRows>"+ "</Details>"); table.Append(detailcolumns.ToString()); if (footercolumns != null) { footercolumns.Append("</TableCells></TableRow></TableRows>"+ "</Footer>"); table.Append(footercolumns.ToString()); } if (tablegroups != null) { tablegroups.Append("</TableCells></TableRow></TableRows>"+ "</Header>"); if (bHaveFooter) { footergroup.Append("</TableCells></TableRow></TableRows>"+ "</Footer>"); tablegroups.Append(footergroup.ToString()); } tablegroups.Append("</TableGroup></TableGroups>"); table.Append(tablegroups); } table.Append("</Table>"); return table.ToString(); } } private string TableXmlVert { get { StringBuilder table = new StringBuilder("<Table>"); table.AppendFormat("<DataSetName>{0}</DataSetName>", this.cbDataSets.Text); table.Append("<NoRows>Query returned no rows!</NoRows><Style>"+ "<BorderStyle><Default>Solid</Default></BorderStyle></Style>"); table.Append("<TableColumns><TableColumn><Width>5in</Width></TableColumn></TableColumns>"); table.Append("<Details><TableRows>"+ Environment.NewLine); foreach (string colname in this.lbTableColumns.Items) { string dcol = string.Format("Fields!{0}.Value", colname); if (this.rbVertComp.Checked) { string val = String.Format("<Value>=\"&lt;span style='color:Crimson;'&gt;{0}:&amp;nbsp;&amp;nbsp;&lt;/span&gt;\" &amp; {1}</Value>", colname, dcol); table.AppendFormat( "<TableRow><Height>12 pt</Height>"+ "<Visibility><Hidden>=Iif({1} = Nothing, true, false)</Hidden></Visibility>"+ "<TableCells><TableCell><ReportItems><Textbox>"+ "{0}"+ "<CanGrow>true</CanGrow>"+ "<Style><BorderStyle><Default>None</Default></BorderStyle>"+ "<Format>html</Format>"+ "</Style></Textbox></ReportItems></TableCell>"+ "</TableCells></TableRow>"+ Environment.NewLine, val, dcol); } else { table.AppendFormat( "<TableRow><Height>12 pt</Height><TableCells>"+ "<TableCell><ReportItems><Textbox>"+ "<Value>{0}</Value>"+ "<Style><BorderStyle><Default>None</Default></BorderStyle>"+ "<FontWeight>Bold</FontWeight>"+ "<Color>Crimson</Color>"+ "</Style></Textbox></ReportItems></TableCell>"+ "</TableCells></TableRow>", colname); table.AppendFormat( "<TableRow><Height>12 pt</Height><TableCells>"+ "<TableCell><ReportItems><Textbox>"+ "<Value>={0}</Value><CanGrow>true</CanGrow>"+ "<Style><BorderStyle><Default>None</Default></BorderStyle>"+ "</Style></Textbox></ReportItems></TableCell>"+ "</TableCells></TableRow>", dcol); } } table.Append("</TableRows></Details></Table>"); return table.ToString(); } } /// <summary> /// Clean up any resources being used. /// </summary> protected override void Dispose( bool disposing ) { if( disposing ) { if(components != null) { components.Dispose(); } } base.Dispose( disposing ); } #region Windows Form Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { this.bOK = new System.Windows.Forms.Button(); this.bCancel = new System.Windows.Forms.Button(); this.label1 = new System.Windows.Forms.Label(); this.cbDataSets = new System.Windows.Forms.ComboBox(); this.label2 = new System.Windows.Forms.Label(); this.label3 = new System.Windows.Forms.Label(); this.lbFields = new System.Windows.Forms.ListBox(); this.lbTableColumns = new System.Windows.Forms.CheckedListBox(); this.bUp = new System.Windows.Forms.Button(); this.bDown = new System.Windows.Forms.Button(); this.bRight = new System.Windows.Forms.Button(); this.bAllRight = new System.Windows.Forms.Button(); this.bLeft = new System.Windows.Forms.Button(); this.bAllLeft = new System.Windows.Forms.Button(); this.label4 = new System.Windows.Forms.Label(); this.cbGroupColumn = new System.Windows.Forms.ComboBox(); this.chkGrandTotals = new System.Windows.Forms.CheckBox(); this.groupBox1 = new System.Windows.Forms.GroupBox(); this.rbVert = new System.Windows.Forms.RadioButton(); this.rbHorz = new System.Windows.Forms.RadioButton(); this.rbVertComp = new System.Windows.Forms.RadioButton(); this.groupBox1.SuspendLayout(); this.SuspendLayout(); // // bOK // this.bOK.Location = new System.Drawing.Point(272, 312); this.bOK.Name = "bOK"; this.bOK.TabIndex = 12; this.bOK.Text = "OK"; this.bOK.Click += new System.EventHandler(this.bOK_Click); // // bCancel // this.bCancel.DialogResult = System.Windows.Forms.DialogResult.Cancel; this.bCancel.Location = new System.Drawing.Point(368, 312); this.bCancel.Name = "bCancel"; this.bCancel.TabIndex = 13; this.bCancel.Text = "Cancel"; // // label1 // this.label1.Location = new System.Drawing.Point(16, 16); this.label1.Name = "label1"; this.label1.Size = new System.Drawing.Size(48, 23); this.label1.TabIndex = 3; this.label1.Text = "DataSet"; // // cbDataSets // this.cbDataSets.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList; this.cbDataSets.Location = new System.Drawing.Point(80, 16); this.cbDataSets.Name = "cbDataSets"; this.cbDataSets.Size = new System.Drawing.Size(360, 21); this.cbDataSets.TabIndex = 0; this.cbDataSets.SelectedIndexChanged += new System.EventHandler(this.cbDataSets_SelectedIndexChanged); // // label2 // this.label2.Location = new System.Drawing.Point(16, 88); this.label2.Name = "label2"; this.label2.TabIndex = 9; this.label2.Text = "DataSet Fields"; // // label3 // this.label3.Location = new System.Drawing.Point(232, 88); this.label3.Name = "label3"; this.label3.Size = new System.Drawing.Size(240, 23); this.label3.TabIndex = 10; this.label3.Text = "Table Columns (check totals when not Down)"; // // lbFields // this.lbFields.Location = new System.Drawing.Point(16, 112); this.lbFields.Name = "lbFields"; this.lbFields.SelectionMode = System.Windows.Forms.SelectionMode.MultiExtended; this.lbFields.Size = new System.Drawing.Size(152, 134); this.lbFields.TabIndex = 2; // // lbTableColumns // this.lbTableColumns.Location = new System.Drawing.Point(232, 112); this.lbTableColumns.Name = "lbTableColumns"; this.lbTableColumns.Size = new System.Drawing.Size(152, 139); this.lbTableColumns.TabIndex = 7; // // bUp // this.bUp.Location = new System.Drawing.Point(392, 120); this.bUp.Name = "bUp"; this.bUp.Size = new System.Drawing.Size(48, 24); this.bUp.TabIndex = 8; this.bUp.Text = "Up"; this.bUp.Click += new System.EventHandler(this.bUp_Click); // // bDown // this.bDown.Location = new System.Drawing.Point(392, 152); this.bDown.Name = "bDown"; this.bDown.Size = new System.Drawing.Size(48, 24); this.bDown.TabIndex = 9; this.bDown.Text = "Down"; this.bDown.Click += new System.EventHandler(this.bDown_Click); // // bRight // this.bRight.Location = new System.Drawing.Point(184, 120); this.bRight.Name = "bRight"; this.bRight.Size = new System.Drawing.Size(32, 24); this.bRight.TabIndex = 3; this.bRight.Text = ">"; this.bRight.Click += new System.EventHandler(this.bRight_Click); // // bAllRight // this.bAllRight.Location = new System.Drawing.Point(184, 152); this.bAllRight.Name = "bAllRight"; this.bAllRight.Size = new System.Drawing.Size(32, 24); this.bAllRight.TabIndex = 4; this.bAllRight.Text = ">>"; this.bAllRight.Click += new System.EventHandler(this.bAllRight_Click); // // bLeft // this.bLeft.Location = new System.Drawing.Point(184, 184); this.bLeft.Name = "bLeft"; this.bLeft.Size = new System.Drawing.Size(32, 24); this.bLeft.TabIndex = 5; this.bLeft.Text = "<"; this.bLeft.Click += new System.EventHandler(this.bLeft_Click); // // bAllLeft // this.bAllLeft.Location = new System.Drawing.Point(184, 216); this.bAllLeft.Name = "bAllLeft"; this.bAllLeft.Size = new System.Drawing.Size(32, 24); this.bAllLeft.TabIndex = 6; this.bAllLeft.Text = "<<"; this.bAllLeft.Click += new System.EventHandler(this.bAllLeft_Click); // // label4 // this.label4.Location = new System.Drawing.Point(16, 264); this.label4.Name = "label4"; this.label4.Size = new System.Drawing.Size(216, 23); this.label4.TabIndex = 1; this.label4.Text = "Pick a column to group (create hierarchy)"; // // cbGroupColumn // this.cbGroupColumn.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList; this.cbGroupColumn.Location = new System.Drawing.Point(16, 280); this.cbGroupColumn.Name = "cbGroupColumn"; this.cbGroupColumn.Size = new System.Drawing.Size(168, 21); this.cbGroupColumn.TabIndex = 10; this.cbGroupColumn.Enter += new System.EventHandler(this.cbGroupColumn_Enter); // // chkGrandTotals // this.chkGrandTotals.Location = new System.Drawing.Point(232, 280); this.chkGrandTotals.Name = "chkGrandTotals"; this.chkGrandTotals.Size = new System.Drawing.Size(168, 16); this.chkGrandTotals.TabIndex = 11; this.chkGrandTotals.Text = "Calculate Grand Totals"; // // groupBox1 // this.groupBox1.Controls.Add(this.rbVertComp); this.groupBox1.Controls.Add(this.rbVert); this.groupBox1.Controls.Add(this.rbHorz); this.groupBox1.Location = new System.Drawing.Point(16, 40); this.groupBox1.Name = "groupBox1"; this.groupBox1.Size = new System.Drawing.Size(424, 40); this.groupBox1.TabIndex = 1; this.groupBox1.TabStop = false; this.groupBox1.Text = "Arrange Fields"; // // rbVert // this.rbVert.Location = new System.Drawing.Point(160, 16); this.rbVert.Name = "rbVert"; this.rbVert.Size = new System.Drawing.Size(120, 16); this.rbVert.TabIndex = 1; this.rbVert.Text = "Down (row per field)"; // // rbHorz // this.rbHorz.Location = new System.Drawing.Point(8, 16); this.rbHorz.Name = "rbHorz"; this.rbHorz.Size = new System.Drawing.Size(160, 16); this.rbHorz.TabIndex = 0; this.rbHorz.Text = "Across (standard columns)"; this.rbHorz.CheckedChanged += new System.EventHandler(this.rbHorz_CheckedChanged); // // rbVertComp // this.rbVertComp.Location = new System.Drawing.Point(296, 16); this.rbVertComp.Name = "rbVertComp"; this.rbVertComp.Size = new System.Drawing.Size(112, 16); this.rbVertComp.TabIndex = 2; this.rbVertComp.Text = "Down (compress)"; // // DialogNewTable // this.AcceptButton = this.bOK; this.AutoScaleBaseSize = new System.Drawing.Size(5, 13); this.CancelButton = this.bCancel; this.ClientSize = new System.Drawing.Size(456, 336); this.Controls.Add(this.groupBox1); this.Controls.Add(this.chkGrandTotals); this.Controls.Add(this.cbGroupColumn); this.Controls.Add(this.label4); this.Controls.Add(this.bAllLeft); this.Controls.Add(this.bLeft); this.Controls.Add(this.bAllRight); this.Controls.Add(this.bRight); this.Controls.Add(this.bDown); this.Controls.Add(this.bUp); this.Controls.Add(this.lbTableColumns); this.Controls.Add(this.lbFields); this.Controls.Add(this.label3); this.Controls.Add(this.label2); this.Controls.Add(this.cbDataSets); this.Controls.Add(this.label1); this.Controls.Add(this.bCancel); this.Controls.Add(this.bOK); this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedDialog; this.MaximizeBox = false; this.MinimizeBox = false; this.Name = "DialogNewTable"; this.ShowInTaskbar = false; this.SizeGripStyle = System.Windows.Forms.SizeGripStyle.Hide; this.StartPosition = System.Windows.Forms.FormStartPosition.CenterParent; this.Text = "New Table"; this.groupBox1.ResumeLayout(false); this.ResumeLayout(false); } #endregion public void Apply() { // } private void bOK_Click(object sender, System.EventArgs e) { // apply the result Apply(); DialogResult = DialogResult.OK; } private void cbDataSets_SelectedIndexChanged(object sender, System.EventArgs e) { this.lbTableColumns.Items.Clear(); bOK.Enabled = false; this.lbFields.Items.Clear(); string [] fields = _Draw.GetFields(cbDataSets.Text, false); if (fields != null) lbFields.Items.AddRange(fields); } private void bRight_Click(object sender, System.EventArgs e) { ListBox.SelectedIndexCollection sic = lbFields.SelectedIndices; int count=sic.Count; foreach (int i in sic) { string fname = (string) lbFields.Items[i]; lbTableColumns.Items.Add(fname); } // Need to remove backwards ArrayList ar = new ArrayList(sic); ar.Reverse(); foreach (int i in ar) { lbFields.Items.RemoveAt(i); } bOK.Enabled = lbTableColumns.Items.Count > 0; if (count > 0 && lbFields.Items.Count > 0) lbFields.SelectedIndex = 0; } private void bLeft_Click(object sender, System.EventArgs e) { ICollection sic = lbTableColumns.SelectedIndices; int count = sic.Count; foreach (int i in sic) { string fname = (string) lbTableColumns.Items[i]; lbFields.Items.Add(fname); if (fname == this.cbGroupColumn.Text) this.cbGroupColumn.Text = ""; } // Need to remove backwards ArrayList ar = new ArrayList(sic); ar.Reverse(); foreach (int i in ar) { lbTableColumns.Items.RemoveAt(i); } bOK.Enabled = lbTableColumns.Items.Count > 0; if (count > 0 && lbTableColumns.Items.Count > 0) lbTableColumns.SelectedIndex = 0; } private void bAllRight_Click(object sender, System.EventArgs e) { foreach (object fname in lbFields.Items) { lbTableColumns.Items.Add(fname); } lbFields.Items.Clear(); bOK.Enabled = lbTableColumns.Items.Count > 0; } private void bAllLeft_Click(object sender, System.EventArgs e) { foreach (object fname in lbTableColumns.Items) { lbFields.Items.Add(fname); } lbTableColumns.Items.Clear(); this.cbGroupColumn.Text = ""; bOK.Enabled = false; } private void bUp_Click(object sender, System.EventArgs e) { int index = lbTableColumns.SelectedIndex; if (index <= 0) return; string prename = (string) lbTableColumns.Items[index-1]; lbTableColumns.Items.RemoveAt(index-1); lbTableColumns.Items.Insert(index, prename); } private void bDown_Click(object sender, System.EventArgs e) { int index = lbTableColumns.SelectedIndex; if (index < 0 || index + 1 == lbTableColumns.Items.Count) return; string postname = (string) lbTableColumns.Items[index+1]; lbTableColumns.Items.RemoveAt(index+1); lbTableColumns.Items.Insert(index, postname); } private void cbGroupColumn_Enter(object sender, System.EventArgs e) { cbGroupColumn.Items.Clear(); cbGroupColumn.Items.Add(""); if (lbTableColumns.Items.Count > 0) { object[] names = new object[lbTableColumns.Items.Count]; lbTableColumns.Items.CopyTo(names, 0); cbGroupColumn.Items.AddRange(names); } } private void rbHorz_CheckedChanged(object sender, System.EventArgs e) { // only standard column report supports grouping and totals this.cbGroupColumn.Enabled = this.chkGrandTotals.Enabled = rbHorz.Checked; } } }
using System; using System.Collections.Generic; using System.Compiler; using System.Diagnostics.Contracts; using System.Linq; using System.Threading.Tasks; using Microsoft.Contracts.Foxtrot.Utils; namespace Microsoft.Contracts.Foxtrot { /// <summary> /// Visitor that creates special closure type for async postconditions. /// </summary> /// <remarks> /// /// Current class generate AsyncClosure with CheckMethod and CheckException methods. /// /// Following transformation are applied to the original async method: /// /// // This is oroginal task, generated by the AsyncTaskMethodBuilder /// var originalTask = t_builder.Task; /// /// var closure = new AsyncClosure(); /// var task2 = originalTask.ContinueWith(closure.CheckPost).Unwrap(); /// return task2; /// /// There are 2 cases: /// 1) Task has no return value. /// In this case only EnsuresOnThrow could be used, and we emit: /// Task CheckMethod(Task t) /// { /// if (t.Status == TaskStatus.Faulted) /// { /// // CheckException will throw if EnsuresOnThrow is not held /// CheckException(t.Exception); /// } /// /// return t; /// } /// /// 2) Task(T) reutrns a T value. /// In this case both EnsuresOnThrow and Contract.Ensures(Contract.Result) could be used. /// We emit: /// /// Task&lt;int> CheckMethod(Task&lt;int> t) /// { /// if (t.Status == TaskStatus.Faulted) /// { /// // CheckException will throw if EnsuresOnThrow is not held /// CheckException(t.Exception); /// } /// /// if (t.Status == TaskStatus.RanToCompletion) /// { /// // Check ensures /// } /// } /// </remarks> internal class EmitAsyncClosure : StandardVisitor { /// <summary> /// Class that maps generic arguments of the enclosed class/method to the generic arguments of the closure. /// </summary> /// <remarks> /// The problem. /// Original implementation of the Code Contract didn't support async postconditions in generics. /// Here is why: /// Suppose we have following function (in class <c>Foo</c>: /// <code><![CDATA[ /// public static Task<T> FooAsync() where T: class /// { /// Contract.Ensures(Contract.Result<T>() != null); /// } /// ]]></code> /// In this case, ccrewrite will generate async closure class called <c>Foo.AsyncContractClosure_0&lt;T&gt;</c> /// with following structure: /// <code><![CDATA[ /// [CompilerGenerated] /// private class <Foo>AsyncContractClosure_0<T> where T : class /// { /// public Task<T> CheckPost(Task<T> task) /// { /// TaskStatus status = task.Status; /// if (status == TaskStatus.RanToCompletion) /// { /// RewriterMethods.Ensures(task.Result != null, null, "Contract.Result<T>() != null"); /// } /// return task; /// } /// } /// ]]> /// </code> /// The code looks good, but the IL could be invalid (without the trick that this class provides). /// Postcondition of the method in our case is declared in the generic method (in <code>FooAsync</code>) /// but ccrewrite moves it into non-generic method (<code>CheckPost</code>) of the generic class (closure). /// /// But on IL level there is different instructions for referencing method generic arguments and type generic arguments. /// /// After changing <code>Contract.Result</code> to <code>task.Result</code> and moving postcondition to <code>CheckPost</code> /// method, following IL would be generated: /// /// <code> <![CDATA[ /// IL_0011: call instance !0 class [mscorlib]System.Threading.Tasks.Task`1<!T>::get_Result() /// IL_0016: box !!0 // <-- here is our problem! /// ]]> /// </code> /// /// This means that method <code>CheckPost</code> would contains a reference to generic method argument of the /// original method. /// /// The goal of this class is to store a mapping between enclosing generic types and closure generic types. /// </remarks> private class GenericTypeMapper { class TypeNodePair { public TypeNodePair(TypeNode enclosingGenericType, TypeNode closureGenericType) { EnclosingGenericType = enclosingGenericType; ClosureGenericType = closureGenericType; } public TypeNode EnclosingGenericType { get; private set; } public TypeNode ClosureGenericType { get; private set; } } // Mapping between enclosing generic type and closure generic type. // This is a simple list not a dictionary, because number of generic arguments is very small. // So linear complexity will not harm performance. readonly List<TypeNodePair> typeParametersMapping = new List<TypeNodePair>(); public bool IsEmpty { get { return typeParametersMapping.Count == 0; } } public void AddMapping(TypeNode enclosingGenericType, TypeNode closureGenericType) { typeParametersMapping.Add(new TypeNodePair(enclosingGenericType, closureGenericType)); } /// <summary> /// Returns associated generic type of the closure class by enclosing generic type (for instance, by /// generic type of the enclosing generic method that uses current closure). /// </summary> /// <remarks> /// Function returns the same argument if the matching argument does not exists. /// </remarks> public TypeNode GetClosureTypeParameterByEnclosingTypeParameter(TypeNode enclosingType) { if (enclosingType == null) { return null; } var gen = enclosingType; if (gen.ConsolidatedTemplateParameters != null && gen.ConsolidatedTemplateParameters.Count != 0) { gen = gen.ConsolidatedTemplateParameters[0]; } var candidate = typeParametersMapping.FirstOrDefault(t => t.EnclosingGenericType == enclosingType); return candidate != null ? candidate.ClosureGenericType : enclosingType; } /// <summary> /// Returns associated generic type of the closure class by enclosing generic type (for instance, by /// generic type of the enclosing generic method that uses current closure). /// </summary> /// <remarks> /// Function returns the same argument if the matching argument does not exists. /// </remarks> public TypeNode GetEnclosingTypeParameterByClosureTypeParameter(TypeNode closureType) { if (closureType == null) { return null; } var candidate = typeParametersMapping.FirstOrDefault(t => t.ClosureGenericType == closureType); return candidate != null ? candidate.EnclosingGenericType : closureType; } } // This assembly should be in this class but not in the SystemTypes from System.CompilerCC. // Moving this type there will lead to test failures and assembly resolution errors. private static readonly AssemblyNode/*!*/ SystemCoreAssembly = SystemTypes.GetSystemCoreAssembly(false, true); private static TypeNode TaskExtensionsTypeNode = HelperMethods.FindType( SystemCoreAssembly, Identifier.For("System.Threading.Tasks"), Identifier.For("TaskExtensions")); private static readonly Identifier CheckExceptionMethodId = Identifier.For("CheckException"); private static readonly Identifier CheckMethodId = Identifier.For("CheckPost"); private readonly Cache<TypeNode> aggregateExceptionType; private readonly Cache<TypeNode> func2Type; private readonly Dictionary<Local, MemberBinding> closureLocals = new Dictionary<Local, MemberBinding>(); private readonly List<SourceContext> contractResultCapturedInStaticContext = new List<SourceContext>(); private readonly Rewriter rewriter; private readonly TypeNode declaringType; private readonly Class closureClass; private readonly Class closureClassInstance; private readonly Specializer /*?*/ forwarder; private readonly Local closureLocal; // Fields for the CheckMethod generation private Method checkPostMethod; private StatementList checkPostBody; // Holds a copy of CheckMethod argument private Local originalResultLocal; private Parameter checkMethodTaskParameter; private readonly TypeNode checkMethodTaskType; private readonly GenericTypeMapper genericTypeMapper = new GenericTypeMapper(); public EmitAsyncClosure(Method from, Rewriter rewriter) { Contract.Requires(from != null); Contract.Requires(from.DeclaringType != null); Contract.Requires(rewriter != null); if (TaskExtensionsTypeNode == null) { throw new InvalidOperationException( "Can't generate async closure because System.Threading.Tasks.TaskExceptions class is unavailable."); } this.rewriter = rewriter; this.declaringType = from.DeclaringType; var closureName = HelperMethods.NextUnusedMemberName(declaringType, "<" + from.Name.Name + ">AsyncContractClosure"); this.closureClass = new Class( declaringModule: declaringType.DeclaringModule, declaringType: declaringType, attributes: null, flags: TypeFlags.NestedPrivate, Namespace: null, name: Identifier.For(closureName), baseClass: SystemTypes.Object, interfaces: null, members: null); declaringType.Members.Add(this.closureClass); RewriteHelper.TryAddCompilerGeneratedAttribute(this.closureClass); var taskType = from.ReturnType; this.aggregateExceptionType = new Cache<TypeNode>(() => HelperMethods.FindType(rewriter.AssemblyBeingRewritten, StandardIds.System, Identifier.For("AggregateException"))); this.func2Type = new Cache<TypeNode>(() => HelperMethods.FindType(SystemTypes.SystemAssembly, StandardIds.System, Identifier.For("Func`2"))); // Should distinguish between generic enclosing method and non-generic method in enclosing type. // In both cases generated closure should be generic. var enclosingTemplateParameters = GetGenericTypesFrom(from); if (!enclosingTemplateParameters.IsNullOrEmpty()) { this.closureClass.TemplateParameters = CreateTemplateParameters(closureClass, enclosingTemplateParameters, declaringType); this.closureClass.IsGeneric = true; this.closureClass.EnsureMangledName(); this.forwarder = new Specializer( targetModule: this.declaringType.DeclaringModule, pars: enclosingTemplateParameters, args: this.closureClass.TemplateParameters); this.forwarder.VisitTypeParameterList(this.closureClass.TemplateParameters); taskType = this.forwarder.VisitTypeReference(taskType); for (int i = 0; i < enclosingTemplateParameters.Count; i++) { this.genericTypeMapper.AddMapping(enclosingTemplateParameters[i], closureClass.TemplateParameters[i]); } } else { this.closureClassInstance = this.closureClass; } this.checkMethodTaskType = taskType; // Emiting CheckPost method declaration EmitCheckPostMethodCore(checkMethodTaskType); // Generate closure constructor. // Constructor should be generated AFTER visiting type parameters in // the previous block of code. Otherwise current class would not have // appropriate number of generic arguments! var ctor = CreateConstructor(closureClass); closureClass.Members.Add(ctor); // Now that we added the ctor and the check method, let's instantiate the closure class if necessary if (this.closureClassInstance == null) { var consArgs = new TypeNodeList(); var args = new TypeNodeList(); var parentCount = this.closureClass.DeclaringType.ConsolidatedTemplateParameters == null ? 0 : this.closureClass.DeclaringType.ConsolidatedTemplateParameters.Count; for (int i = 0; i < parentCount; i++) { consArgs.Add(this.closureClass.DeclaringType.ConsolidatedTemplateParameters[i]); } if (!enclosingTemplateParameters.IsNullOrEmpty()) { for (int i = 0; i < enclosingTemplateParameters.Count; i++) { consArgs.Add(enclosingTemplateParameters[i]); args.Add(enclosingTemplateParameters[i]); } } this.closureClassInstance = (Class) this.closureClass.GetConsolidatedTemplateInstance(this.rewriter.AssemblyBeingRewritten, closureClass.DeclaringType, closureClass.DeclaringType, args, consArgs); } // create closure initializer for context method this.closureLocal = new Local(this.ClosureClass); this.ClosureInitializer = new Block(new StatementList()); // Generate constructor call that initializes closure instance this.ClosureInitializer.Statements.Add( new AssignmentStatement( this.closureLocal, new Construct(new MemberBinding(null, this.Ctor), new ExpressionList()))); } /// <summary> /// Add postconditions for the task-based methods. /// </summary> /// <remarks> /// Method inserts all required logic to the <paramref name="returnBlock"/> calling /// ContinueWith method on the <paramref name="taskBasedResult"/>. /// </remarks> public void AddAsyncPostconditions(List<Ensures> asyncPostconditions, Block returnBlock, Local taskBasedResult) { Contract.Requires(asyncPostconditions != null); Contract.Requires(returnBlock != null); Contract.Requires(taskBasedResult != null); Contract.Requires(asyncPostconditions.Count > 0); // Async postconditions are impelemented using custom closure class // with CheckPost method that checks postconditions when the task // is finished. // Add Async postconditions to the AsyncClosure AddAsyncPost(asyncPostconditions); // Add task.ContinueWith().Unwrap(); method call to returnBlock AddContinueWithMethodToReturnBlock(returnBlock, taskBasedResult); ChangeThisReferencesToClosureLocals(); } private void ChangeThisReferencesToClosureLocals() { var fieldRewriter = new FieldRewriter(this); fieldRewriter.Visit(this.checkPostMethod); } /// <summary> /// Returns a list of source spans where non-capturing lambdas were used. /// </summary> public IList<SourceContext> ContractResultCapturedInStaticContext { get { return contractResultCapturedInStaticContext; } } /// <summary> /// Instance used in calling method context /// </summary> public Class ClosureClass { get { return this.closureClassInstance; } } /// <summary> /// Local instance of the async closure class /// </summary> public Local ClosureLocal { get { return this.closureLocal; } } /// <summary> /// Block of code, responsible for closure instance initialization /// </summary> public Block ClosureInitializer { get; private set; } private InstanceInitializer Ctor { get { return (InstanceInitializer)this.closureClassInstance.GetMembersNamed(StandardIds.Ctor)[0]; } } private static TypeNodeList GetGenericTypesFrom(Method method) { if (method.IsGeneric) { return method.TemplateParameters; } if (method.DeclaringType.IsGeneric) { return GetFirstNonEmptyGenericListWalkingUpDeclaringTypes(method.DeclaringType); } return null; } private static TypeNodeList GetFirstNonEmptyGenericListWalkingUpDeclaringTypes(TypeNode node) { if (node == null) { return null; } if (node.TemplateParameters != null && node.TemplateParameters.Count != 0) { return node.TemplateParameters; } return GetFirstNonEmptyGenericListWalkingUpDeclaringTypes(node.DeclaringType); } [Pure] private static TypeNodeList CreateTemplateParameters(Class closureClass, TypeNodeList inputTemplateParameters, TypeNode declaringType) { Contract.Requires(closureClass != null); Contract.Requires(inputTemplateParameters != null); Contract.Requires(declaringType != null); var dup = new Duplicator(declaringType.DeclaringModule, declaringType); var templateParameters = new TypeNodeList(); var parentCount = declaringType.ConsolidatedTemplateParameters.CountOrDefault(); for (int i = 0; i < inputTemplateParameters.Count; i++) { var tp = HelperMethods.NewEqualTypeParameter( dup, (ITypeParameter)inputTemplateParameters[i], closureClass, parentCount + i); templateParameters.Add(tp); } return templateParameters; } private void AddContinueWithMethodToReturnBlock(Block returnBlock, Local taskBasedResult) { Contract.Requires(returnBlock != null); Contract.Requires(taskBasedResult != null); var taskType = taskBasedResult.Type; // To find appropriate ContinueWith method task type should be unwrapped var taskTemplate = HelperMethods.Unspecialize(taskType); var continueWithMethodLocal = GetContinueWithMethod(closureClass, taskTemplate, taskType); // TODO: not sure that this is possible situation when continueWith method is null. // Maybe Contract.Assert(continueWithMethod != null) should be used instead! if (continueWithMethodLocal != null) { // We need to create delegate instance that should be passed to ContinueWith method var funcType = continueWithMethodLocal.Parameters[0].Type; var funcCtor = funcType.GetConstructor(SystemTypes.Object, SystemTypes.IntPtr); Contract.Assume(funcCtor != null); var funcLocal = new Local(funcCtor.DeclaringType); // Creating a method pointer to the AsyncClosure.CheckMethod // In this case we can't use checkMethod field. // Getting CheckMethod from clsoureClassInstance will provide correct (potentially updated) // generic arguments for enclosing type. var checkMethodFromClosureInstance = (Method) closureClassInstance.GetMembersNamed(CheckMethodId)[0]; Contract.Assume(checkMethodFromClosureInstance != null); var ldftn = new UnaryExpression( new MemberBinding(null, checkMethodFromClosureInstance), NodeType.Ldftn, CoreSystemTypes.IntPtr); // Creating delegate that would be used as a continuation for original task returnBlock.Statements.Add( new AssignmentStatement(funcLocal, new Construct(new MemberBinding(null, funcCtor), new ExpressionList(closureLocal, ldftn)))); // Wrapping continuation into TaskExtensions.Unwrap method // (this helps to preserve original exception and original result of the task, // but allows to throw postconditions violations). // Generating: result.ContinueWith(closure.CheckPost); var taskContinuationOption = new Literal(TaskContinuationOptions.ExecuteSynchronously); var continueWithCall = new MethodCall( new MemberBinding(taskBasedResult, continueWithMethodLocal), new ExpressionList(funcLocal, taskContinuationOption)); // Generating: TaskExtensions.Unwrap(result.ContinueWith(...)) var unwrapMethod = GetUnwrapMethod(checkMethodTaskType); var unwrapCall = new MethodCall( new MemberBinding(null, unwrapMethod), new ExpressionList(continueWithCall)); // Generating: result = Unwrap(...); var resultAssignment = new AssignmentStatement(taskBasedResult, unwrapCall); returnBlock.Statements.Add(resultAssignment); } } /// <summary> /// Method generates core part of the CheckMethod /// </summary> private void EmitCheckPostMethodCore(TypeNode taskType) { Contract.Requires(taskType != null); this.checkMethodTaskParameter = new Parameter(Identifier.For("task"), taskType); // TODO ST: can I switch to new Local(taskType.Type)?!? In this case this initialization // could be moved outside this method this.originalResultLocal = new Local(new Identifier("taskLocal"), checkMethodTaskParameter.Type); // Generate: public Task<T> CheckPost(Task<T> task) where T is taskType or // public Task CheckPost(Task task) for non-generic task. checkPostMethod = new Method( declaringType: this.closureClass, attributes: null, name: CheckMethodId, parameters: new ParameterList(checkMethodTaskParameter), // was: taskType.TemplateArguments[0] when hasResult was true and SystemTypes.Void otherwise returnType: taskType, body: null); checkPostMethod.CallingConvention = CallingConventionFlags.HasThis; checkPostMethod.Flags |= MethodFlags.Public; this.checkPostBody = new StatementList(); this.closureClass.Members.Add(this.checkPostMethod); if (taskType.IsGeneric) { // Assign taskParameter to originalResultLocal because // this field is used in a postcondition checkPostBody.Add(new AssignmentStatement(this.originalResultLocal, checkMethodTaskParameter)); } } [ContractVerification(false)] private void AddAsyncPost(List<Ensures> asyncPostconditions) { var origBody = new Block(this.checkPostBody); origBody.HasLocals = true; var newBodyBlock = new Block(new StatementList()); newBodyBlock.HasLocals = true; var methodBody = new StatementList(); var methodBodyBlock = new Block(methodBody); methodBodyBlock.HasLocals = true; checkPostMethod.Body = methodBodyBlock; methodBody.Add(newBodyBlock); Block newExitBlock = new Block(); methodBody.Add(newExitBlock); // Map closure locals to fields and initialize closure fields foreach (Ensures e in asyncPostconditions) { if (e == null) continue; this.Visit(e); if (this.forwarder != null) { this.forwarder.Visit(e); } ReplaceResult repResult = new ReplaceResult( this.checkPostMethod, this.originalResultLocal, this.rewriter.AssemblyBeingRewritten); repResult.Visit(e); if (repResult.ContractResultWasCapturedInStaticContext) { this.contractResultCapturedInStaticContext.Add(e.Assertion.SourceContext); } // now need to initialize closure result fields foreach (var target in repResult.NecessaryResultInitializationAsync(this.closureLocals)) { // note: target here methodBody.Add(new AssignmentStatement(target, this.originalResultLocal)); } } // Emit normal postconditions SourceContext? lastEnsuresSourceContext = null; var ensuresChecks = new StatementList(); Method contractEnsuresMethod = this.rewriter.RuntimeContracts.EnsuresMethod; // For generic types need to 'fix' generic type parameters that are used in the closure method. // See comment to the GenericTypeMapper for more details. TypeParameterFixerVisitor fixer = null; if (!this.genericTypeMapper.IsEmpty) { fixer = new TypeParameterFixerVisitor(genericTypeMapper); } foreach (Ensures e in GetTaskResultBasedEnsures(asyncPostconditions)) { // TODO: Not sure that 'break' is enough! It seems that this is possible // only when something is broken, because normal postconditions // are using Contract.Result<T>() and this is possible only for // generic tasks. if (IsVoidTask()) break; // something is wrong in the original contract lastEnsuresSourceContext = e.SourceContext; // // Call Contract.RewriterEnsures // ExpressionList args = new ExpressionList(); if (fixer != null) { fixer.Visit(e.PostCondition); } args.Add(e.PostCondition); args.Add(e.UserMessage ?? Literal.Null); args.Add(e.SourceConditionText ?? Literal.Null); ensuresChecks.Add( new ExpressionStatement( new MethodCall( new MemberBinding(null, contractEnsuresMethod), args, NodeType.Call, SystemTypes.Void), e.SourceContext)); } this.rewriter.CleanUpCodeCoverage.VisitStatementList(ensuresChecks); // // Normal postconditions // // Wrapping normal ensures into following if statement // if (task.Status == TaskStatus.RanToCompletion) // { postcondition check } // // Implementation of this stuff is a bit tricky because if-statements // are inverse in the IL. // Basically, we need to generate following code: // if (!(task.Status == Task.Status.RanToCompletion)) // goto EndOfNormalPostcondition; // {postcondition check} // EndOfNormalPostcondition: // {other Code} // Marker for EndOfNormalPostcondition Block endOfNormalPostcondition = new Block(); // Generate: if (task.Status != RanToCompletion) goto endOfNormalPostcondition; StatementList checkStatusStatements = CreateIfTaskResultIsEqualsTo( checkMethodTaskParameter, TaskStatus.RanToCompletion, endOfNormalPostcondition); methodBodyBlock.Statements.Add(new Block(checkStatusStatements)); // Emit a check for __ContractsRuntime.insideContractEvaluation around Ensures // TODO ST: there is no sense to add recursion check in async postcondition that can be checked in different thread! methodBodyBlock.Statements.Add(new Block(ensuresChecks)); // Emit a check for __ContractsRuntime.insideContractEvaluation around Ensures //this.rewriter.EmitRecursionGuardAroundChecks(this.checkPostMethod, methodBodyBlock, ensuresChecks); // Now, normal postconditions are written to the method body. // We need to add endOfNormalPostcondition block as a marker. methodBodyBlock.Statements.Add(endOfNormalPostcondition); // // Exceptional postconditions // var exceptionalPostconditions = GetExceptionalEnsures(asyncPostconditions).ToList(); if (exceptionalPostconditions.Count > 0) { // For exceptional postconditions we need to generate CheckException method first Method checkExceptionMethod = CreateCheckExceptionMethod(); EmitCheckExceptionBody(checkExceptionMethod, exceptionalPostconditions); this.closureClass.Members.Add(checkExceptionMethod); // Then, we're using the same trick as for normal postconditions: // wrapping exceptional postconditions only when task.Status is TaskStatus.Faulted Block checkExceptionBlock = new Block(new StatementList()); // Marker for endOfExceptionPostcondition Block endOfExceptionPostcondition = new Block(); StatementList checkStatusIsException = CreateIfTaskResultIsEqualsTo( checkMethodTaskParameter, TaskStatus.Faulted, endOfExceptionPostcondition); checkExceptionBlock.Statements.Add(new Block(checkStatusIsException)); // Now we need to emit actuall check for exceptional postconditions // Emit: var ae = task.Exception; var aeLocal = new Local(aggregateExceptionType.Value); checkExceptionBlock.Statements.Add( new AssignmentStatement(aeLocal, new MethodCall( new MemberBinding(checkMethodTaskParameter, GetTaskProperty(checkMethodTaskParameter, "get_Exception")), new ExpressionList()))); // Emit: CheckException(ae); // Need to store method result somewhere, otherwise stack would be corrupted var checkResultLocal = new Local(SystemTypes.Boolean); checkExceptionBlock.Statements.Add( new AssignmentStatement(checkResultLocal, new MethodCall(new MemberBinding(null, checkExceptionMethod), new ExpressionList(checkExceptionMethod.ThisParameter, aeLocal)))); checkExceptionBlock.Statements.Add(endOfExceptionPostcondition); methodBody.Add(checkExceptionBlock); } // Copy original block to body statement for both: normal and exceptional postconditions. newBodyBlock.Statements.Add(origBody); Block returnBlock = CreateReturnBlock(checkMethodTaskParameter, lastEnsuresSourceContext); methodBody.Add(returnBlock); } /// <summary> /// Helper visitor class that changes all references to type parameters to appropriate once. /// </summary> private class TypeParameterFixerVisitor : StandardVisitor { private readonly GenericTypeMapper genericParametersMapping; public TypeParameterFixerVisitor(GenericTypeMapper genericParametersMapping) { Contract.Requires(genericParametersMapping != null); this.genericParametersMapping = genericParametersMapping; } public override Expression VisitAddressDereference(AddressDereference addr) { // Replacing initobj !!0 to initobj !0 var newType = genericParametersMapping.GetClosureTypeParameterByEnclosingTypeParameter(addr.Type); if (newType != addr.Type) { return new AddressDereference(addr.Address, newType, addr.Volatile, addr.Alignment, addr.SourceContext); } return base.VisitAddressDereference(addr); } // Literal is used when contract result compares to null: Contract.Result<T>() != null public override Expression VisitLiteral(Literal literal) { var origin = literal.Value as TypeNode; if (origin == null) { return base.VisitLiteral(literal); } var newLiteralType = this.genericParametersMapping.GetClosureTypeParameterByEnclosingTypeParameter(origin); if (newLiteralType != origin) { return new Literal(newLiteralType); } return base.VisitLiteral(literal); } public override TypeNode VisitTypeParameter(TypeNode typeParameter) { var fixedVersion = this.genericParametersMapping.GetClosureTypeParameterByEnclosingTypeParameter(typeParameter); if (fixedVersion != typeParameter) { return fixedVersion; } return base.VisitTypeParameter(typeParameter); } public override TypeNode VisitTypeReference(TypeNode type) { var fixedVersion = this.genericParametersMapping.GetClosureTypeParameterByEnclosingTypeParameter(type); if (fixedVersion != type) { return fixedVersion; } return base.VisitTypeReference(type); } public override TypeNode VisitTypeNode(TypeNode typeNode) { var fixedVersion = this.genericParametersMapping.GetClosureTypeParameterByEnclosingTypeParameter(typeNode); if (fixedVersion != typeNode) { return fixedVersion; } return base.VisitTypeNode(typeNode); } } private static IEnumerable<Ensures> GetTaskResultBasedEnsures(List<Ensures> asyncPostconditions) { return asyncPostconditions.Where(post => !(post is EnsuresExceptional)); } private static IEnumerable<EnsuresExceptional> GetExceptionalEnsures(List<Ensures> asyncPostconditions) { return asyncPostconditions.OfType<EnsuresExceptional>(); } /// <summary> /// Returns TaskExtensions.Unwrap method. /// </summary> [Pure] private Member GetUnwrapMethod(TypeNode checkMethodTaskType) { Contract.Requires(checkMethodTaskType != null); Contract.Ensures(Contract.Result<Member>() != null); Contract.Assert(TaskExtensionsTypeNode != null, "Can't find System.Threading.Tasks.TaskExtensions type"); var unwrapCandidates = TaskExtensionsTypeNode.GetMembersNamed(Identifier.For("Unwrap")); Contract.Assert(unwrapCandidates != null, "Can't find Unwrap method in the TaskExtensions type"); // Should be only two methods. If that is not true, we need to change this code to reflect this! Contract.Assume(unwrapCandidates.Count == 2, "Should be exactly two candidate Unwrap methods."); // We need to find appropriate Unwrap method based on CheckMethod argument type. var firstMethod = (Method)unwrapCandidates[0]; var secondMethod = (Method)unwrapCandidates[1]; Contract.Assume(firstMethod != null && secondMethod != null); var genericUnwrapCandidate = firstMethod.IsGeneric ? firstMethod : secondMethod; var nonGenericUnwrapCandidate = firstMethod.IsGeneric ? secondMethod : firstMethod; if (checkMethodTaskType.IsGeneric) { // We need to "instantiate" generic first. // I.e. for Task<int> we need to have Unwrap(Task<Task<int>>): Task<int> // In this case we need to map back generic types. // CheckPost method is a non-generic method from (potentially) generic closure class. // In this case, if enclosing method is generic we need to map generic types back // and use !!0 (reference to method template arg) instead of using !0 (which is reference // to closure class template arg). var enclosingGeneritType = this.genericTypeMapper.GetEnclosingTypeParameterByClosureTypeParameter( checkMethodTaskType.TemplateArguments[0]); return genericUnwrapCandidate.GetTemplateInstance(null, enclosingGeneritType); } return nonGenericUnwrapCandidate; } /// <summary> /// Factory method that creates bool CheckException(Exception e) /// </summary> [Pure] private Method CreateCheckExceptionMethod() { Contract.Ensures(Contract.Result<Method>() != null); var exnParameter = new Parameter(Identifier.For("e"), SystemTypes.Exception); var checkExceptionMethod = new Method( declaringType: this.closureClass, attributes: null, name: CheckExceptionMethodId, parameters: new ParameterList(exnParameter), returnType: SystemTypes.Boolean, body: new Block(new StatementList())); checkExceptionMethod.Body.HasLocals = true; checkExceptionMethod.CallingConvention = CallingConventionFlags.HasThis; checkExceptionMethod.Flags |= MethodFlags.Public; if (checkExceptionMethod.ExceptionHandlers == null) checkExceptionMethod.ExceptionHandlers = new ExceptionHandlerList(); return checkExceptionMethod; } private void EmitCheckExceptionBody(Method checkExceptionMethod, List<EnsuresExceptional> exceptionalPostconditions) { Contract.Requires(checkExceptionMethod != null); Contract.Requires(exceptionalPostconditions != null); Contract.Requires(exceptionalPostconditions.Count > 0); // We emit the following method: // bool CheckException(Exception e) { // var ex = e as C1; // if (ex != null) { // EnsuresOnThrow(predicate) // } // else { // var ex2 = e as AggregateException; // if (ex2 != null) { // ex2.Handle(CheckException); // } // } // // // Method always returns true. This is by design! // // We need to check all exceptions in the AggregateException // // and fail in EnsuresOnThrow if the postcondition is not met. // return true; // handled var body = checkExceptionMethod.Body.Statements; var returnBlock = new Block(new StatementList()); foreach (var e in exceptionalPostconditions) { // The catchBlock contains the catchBody, and then // an empty block that is used in the EH. // TODO ST: name is confusing because there is no catch blocks in this method! Block catchBlock = new Block(new StatementList()); // local is: var ex1 = e as C1; Local localEx = new Local(e.Type); body.Add( new AssignmentStatement(localEx, new BinaryExpression(checkExceptionMethod.Parameters[0], new MemberBinding(null, e.Type), NodeType.Isinst))); Block skipBlock = new Block(); body.Add(new Branch(new UnaryExpression(localEx, NodeType.LogicalNot), skipBlock)); body.Add(catchBlock); body.Add(skipBlock); // call Contract.EnsuresOnThrow ExpressionList args = new ExpressionList(); args.Add(e.PostCondition); args.Add(e.UserMessage ?? Literal.Null); args.Add(e.SourceConditionText ?? Literal.Null); args.Add(localEx); var checks = new StatementList(); checks.Add( new ExpressionStatement( new MethodCall( new MemberBinding(null, this.rewriter.RuntimeContracts.EnsuresOnThrowMethod), args, NodeType.Call, SystemTypes.Void), e.SourceContext)); this.rewriter.CleanUpCodeCoverage.VisitStatementList(checks); // TODO ST: actually I can't see this recursion guard check in the resulting IL!! rewriter.EmitRecursionGuardAroundChecks(checkExceptionMethod, catchBlock, checks); catchBlock.Statements.Add(new Branch(null, returnBlock)); } // recurse on AggregateException itself { // var ae = e as AggregateException; // if (ae != null) { // ae.Handle(this.CheckException); // } Block catchBlock = new Block(new StatementList()); var aggregateType = aggregateExceptionType.Value; // var ex2 = e as AggregateException; Local localEx2 = new Local(aggregateType); body.Add( new AssignmentStatement(localEx2, new BinaryExpression( checkExceptionMethod.Parameters[0], new MemberBinding(null, aggregateType), NodeType.Isinst))); Block skipBlock = new Block(); body.Add(new Branch(new UnaryExpression(localEx2, NodeType.LogicalNot), skipBlock)); body.Add(catchBlock); body.Add(skipBlock); var funcType = func2Type.Value; funcType = funcType.GetTemplateInstance(this.rewriter.AssemblyBeingRewritten, SystemTypes.Exception, SystemTypes.Boolean); var handleMethod = aggregateType.GetMethod(Identifier.For("Handle"), funcType); var funcLocal = new Local(funcType); var ldftn = new UnaryExpression( new MemberBinding(null, checkExceptionMethod), NodeType.Ldftn, CoreSystemTypes.IntPtr); catchBlock.Statements.Add( new AssignmentStatement(funcLocal, new Construct( new MemberBinding(null, funcType.GetConstructor(SystemTypes.Object, SystemTypes.IntPtr)), new ExpressionList(checkExceptionMethod.ThisParameter, ldftn)))); catchBlock.Statements.Add( new ExpressionStatement(new MethodCall(new MemberBinding(localEx2, handleMethod), new ExpressionList(funcLocal)))); } // add return true to CheckException method body.Add(returnBlock); body.Add(new Return(Literal.True)); } /// <summary> /// Returns property for the task object. /// </summary> private static Method GetTaskProperty(Parameter taskParameter, string propertyName) { Contract.Requires(taskParameter != null); Contract.Ensures(Contract.Result<Method>() != null); // For generic task Status property defined in the base class. // That's why we need to check what the taskParameter type is - is it generic or not. // If the taskParameter is generic we need to use base type (because Task<T> : Task). var taskTypeWithStatusProperty = taskParameter.Type.IsGeneric ? taskParameter.Type.BaseType : taskParameter.Type; return taskTypeWithStatusProperty.GetMethod(Identifier.For(propertyName)); } /// <summary> /// Method returns a list of statements that checks task status. /// </summary> private static StatementList CreateIfTaskResultIsEqualsTo( Parameter taskParameterToCheck, TaskStatus expectedStatus, Block endBlock) { Contract.Ensures(Contract.Result<StatementList>() != null); var result = new StatementList(); // If-statement is slightly different in IL. // To get `if (condition) {statements}` // we need to generate: // if (!condition) goto endBLock; statements; endBlock: // This method emits a check that simplifies CheckMethod implementation. var statusProperty = GetTaskProperty(taskParameterToCheck, "get_Status"); Contract.Assert(statusProperty != null, "Can't find Task.Status property"); // Emitting: var tmpStatus = task.Status; var tmpStatus = new Local(statusProperty.ReturnType); result.Add( new AssignmentStatement(tmpStatus, new MethodCall(new MemberBinding(taskParameterToCheck, statusProperty), new ExpressionList()))); // if (tmpStatus != expectedStatus) // goto endOfMethod; // This is an inverted form of the check: if (tmpStatus == expectedStatus) {check} result.Add( new Branch( new BinaryExpression( tmpStatus, new Literal(expectedStatus), NodeType.Ne), endBlock)); return result; } private static Block CreateReturnBlock(Parameter checkPostTaskParameter, SourceContext? lastEnsuresSourceContext) { Statement returnStatement = new Return(checkPostTaskParameter); if (lastEnsuresSourceContext != null) { returnStatement.SourceContext = lastEnsuresSourceContext.Value; } Block returnBlock = new Block(new StatementList(1)); returnBlock.Statements.Add(returnStatement); return returnBlock; } /// <summary> /// Returns correct version of the ContinueWith method. /// </summary> /// <remarks> /// This function returns ContinueWith overload that takes TaskContinuationOptions. /// </remarks> private static Method GetContinueWithMethod(Class closureClass, TypeNode taskTemplate, TypeNode taskType) { var continueWithCandidates = taskTemplate.GetMembersNamed(Identifier.For("ContinueWith")); // Looking for an overload with TaskContinuationOptions const int expectedNumberOfArguments = 2; for (int i = 0; i < continueWithCandidates.Count; i++) { var cand = continueWithCandidates[i] as Method; if (cand == null) continue; // For non-generic version we're looking for ContinueWith(Action<Task>, TaskContinuationOptions) if (!taskType.IsGeneric) { if (cand.IsGeneric) continue; if (cand.ParameterCount != expectedNumberOfArguments) continue; if (cand.Parameters[0].Type.GetMetadataName() != "Action`1") continue; if (cand.Parameters[1].Type.GetMetadataName() != "TaskContinuationOptions") continue; return cand; } // For generic version we're looking for ContinueWith(Func<Task, T>, TaskContinuationOptions) if (!cand.IsGeneric) continue; if (cand.TemplateParameters.Count != 1) continue; if (cand.ParameterCount != expectedNumberOfArguments) continue; if (cand.Parameters[0].Type.GetMetadataName() != "Func`2") continue; if (cand.Parameters[1].Type.GetMetadataName() != "TaskContinuationOptions") continue; // now create instance, first of task var taskInstance = taskTemplate.GetTemplateInstance( closureClass.DeclaringModule, taskType.TemplateArguments[0]); // ST: some black magic is happening, but it seems it is required to get ContinueWith // from generic instantiated version of the task var candMethod = (Method)taskInstance.GetMembersNamed(Identifier.For("ContinueWith"))[i]; // Candidate method would have following signature: // Task<T> ContinueWith(Task<T> t) for generic version return candMethod.GetTemplateInstance(null, taskType); } return null; } private static InstanceInitializer CreateConstructor(Class closureClass) { var ctor = new InstanceInitializer(closureClass, null, null, null); ctor.CallingConvention = CallingConventionFlags.HasThis; ctor.Flags |= MethodFlags.Public | MethodFlags.HideBySig; // Regular block that calls base class constructor ctor.Body = new Block( new StatementList( new ExpressionStatement( new MethodCall(new MemberBinding(ctor.ThisParameter, SystemTypes.Object.GetConstructor()), new ExpressionList())), new Return())); return ctor; } private bool IsVoidTask() { return this.checkPostMethod.ReturnType == SystemTypes.Void; } class FieldRewriter : StandardVisitor { private readonly EmitAsyncClosure closure; private Field enclosingInstance; public FieldRewriter(EmitAsyncClosure closure) { this.closure = closure; } public override Expression VisitMemberBinding(MemberBinding memberBinding) { // Original postcondition could have an access to the instance state via Contract.Ensures(_state == "foo"); // Now postcondition body resides in the different class and all references to 'this' pointer should be changed. // If member binding references 'this', then we need to initialize '_this' field that points to the enclosing class // and redirect the binding to this field. var thisNode = memberBinding != null ? memberBinding.TargetObject as This : null; if (thisNode != null && thisNode.DeclaringMethod != null && thisNode.DeclaringMethod.DeclaringType != null && // Need to change only when 'this' belongs to enclosing class instance thisNode.DeclaringMethod.DeclaringType.Equals(this.closure.declaringType)) { var thisField = EnsureClosureInitialization(memberBinding.TargetObject); return new MemberBinding( new MemberBinding(this.closure.checkPostMethod.ThisParameter, thisField), memberBinding.BoundMember); } return base.VisitMemberBinding(memberBinding); } /// <summary> /// Ensures that async closure has a field with enclosing class field, like public EnclosingType _this. /// </summary> private Field EnsureClosureInitialization(Expression targetObject) { if (this.enclosingInstance == null) { var localType = this.closure.forwarder != null ? this.closure.forwarder.VisitTypeReference(targetObject.Type) : targetObject.Type; var enclosedTypeField = new Field( this.closure.closureClass, null, FieldFlags.Public, new Identifier("_this"), localType, null); this.closure.closureClass.Members.Add(enclosedTypeField); // initialize the closure field var instantiatedField = Rewriter.GetMemberInstanceReference(enclosedTypeField, this.closure.closureClassInstance); this.closure.ClosureInitializer.Statements.Add( new AssignmentStatement( new MemberBinding(this.closure.closureLocal, instantiatedField), targetObject)); this.enclosingInstance = enclosedTypeField; } return this.enclosingInstance; } } // Visitor for changing closure locals to fields public override Expression VisitLocal(Local local) { if (HelperMethods.IsClosureType(this.declaringType, local.Type)) { MemberBinding mb; if (!closureLocals.TryGetValue(local, out mb)) { // TODO ST: not clear what's going on here! // Clarification: this method changes access to local variables to apropriate fields. // Consider following example: // public async Task<int> FooAsync(int[] arguments, int length) // { // Contract.Ensures(Contract.ForAll(arguments, i => i == Contract.Result<int>() && i == length)); // } // In this case, CheckPost method should reference length that will become a field of the generated // closure class. // So this code will change all locals (like length) to appropriate fields of the async closure instance. // Forwarder would be null, if enclosing method with async closure is not generic var localType = forwarder != null ? forwarder.VisitTypeReference(local.Type) : local.Type; var closureField = new Field(this.closureClass, null, FieldFlags.Public, local.Name, localType, null); this.closureClass.Members.Add(closureField); mb = new MemberBinding(this.checkPostMethod.ThisParameter, closureField); closureLocals.Add(local, mb); // initialize the closure field var instantiatedField = Rewriter.GetMemberInstanceReference(closureField, this.closureClassInstance); this.ClosureInitializer.Statements.Add( new AssignmentStatement( new MemberBinding(this.closureLocal, instantiatedField), local)); } return mb; } return local; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections.Generic; using System.Text; namespace Apache.Geode.Client.FwkLib { using Apache.Geode.DUnitFramework; using System.Threading; using System.Xml.Serialization; using System.IO; using System.Reflection; public class EventTest : FwkTest { public const string EventCountersBB = "EventCountersBB"; #region Private members private List<CacheableKey> m_keysVec = new List<CacheableKey>(); #endregion #region Private utility methods private int percentDifferent() { string testRegionName = GetStringValue("regionName"); if (testRegionName.Length == 0) { throw new FwkException("Data not provided for regionname failing"); } int bbExpected = 0; int expected = GetUIntValue("expectedKeyCount"); if (expected < 0) { bbExpected = (int)Util.BBGet(EventCountersBB, "CREATE_COUNT"); if (bbExpected <= 0) { throw new FwkException("Data not provided for expectedKeyCount failing"); } expected = bbExpected; } Region testReg = CacheHelper.GetRegion(testRegionName); ISerializable[] keys = testReg.GetKeys(); int keyCount = keys.Length; double diff = 0; if (keyCount > expected) { diff = keyCount - expected; } else { diff = expected - keyCount; } int retval = (int)((diff / ((double)expected + 1.0)) * 100.0); FwkInfo("Expected to have " + expected + " keys, found " + keyCount + " keys, percent Difference: " + retval); return retval; } private void doEntryTest(string opcode) { FwkInfo("Calling doEntryTest inside getRandomregion"); Region reg = GetRandomRegion(true); FwkInfo("After getrandomregion inside doentrytest"); if (reg == null) { FwkInfo("Check in side doEntryTest ... No region to operate on"); throw new FwkException("No reion to operate on"); } lock (this) { FwkInfo("After getrandomregion inside doentrytest Check 1"); if (reg.Attributes.CachingEnabled == false) { return; } ISerializable[] keys = reg.GetKeys(); int maxkeys = GetUIntValue("distinctKeys"); int balanceEntries = GetUIntValue("balanceEntries"); FwkInfo("After getrandomregion inside doentrytest Check 2 balance entries = {0}", balanceEntries); if (balanceEntries > 0) { if (keys.Length >= maxkeys) { FwkInfo("Balancing cache content by destroying excess entries."); int dcnt = 0; for (uint i = 100; i > 0; i--) { try { destroyObject(reg, true); } catch (Exception e) { // ignore the exception ... TODO print the message FwkSevere(e.Message); } } FwkInfo("Cache balancing complete, did " + dcnt + " destroys."); } else if (keys.Length == 0) { FwkInfo("Balancing cache content by creating entries."); int ccnt = 0; for (int i = 100; i > 0; i--) { try { addObject(reg, true, null, null); ccnt++; } catch (TimeoutException e) { FwkSevere("Caught unexpected timeout exception during entry " + opcode + " operation: " + e.Message + " continuing with test."); } catch (Exception e) { // Ignore other exception ... @TODO FwkInfo("Ignoring exception " + e.Message); } } FwkInfo("Cache balancing complete, did " + ccnt + " creates."); } } else { FwkInfo("Balance entries less than zero"); } } FwkInfo("After getrandomregion inside doentrytest Check 4 before switch opcode = {0}", opcode); //opcode = "read"; FwkInfo("opcode = " + opcode.ToString() + " " + opcode); switch (opcode) { case "add": addObject(reg, true, null, null); break; case "update": updateObject(reg); break; case "invalidate": invalidateObject(reg, false); break; case "destroy": destroyObject(reg, false); break; case "read": readObject(reg); break; case "localInvalidate": invalidateObject(reg, true); break; case "localDestroy": destroyObject(reg, true); break; default: FwkSevere("Invalid operation specified: " + opcode); break; } } private void doRegionTest(string opcode, int iMaxRegions) { Region randomRegion; lock (this) { int iRegionCount = getAllRegionCount(); if (iRegionCount >= iMaxRegions) { while (iRegionCount > iMaxRegions / 2) { try { randomRegion = GetRandomRegion(true); if (randomRegion == null) { FwkException("expected to get a valid random region, get a null region instead"); } else { destroyRegion(randomRegion, false); } iRegionCount = getAllRegionCount(); FwkInfo("Inside doregiontest ... iregioncount = {0}", iRegionCount); } catch (Exception ignore) { FwkInfo(ignore.Message); } } } else if (iRegionCount <= 0) { for (Int32 i = iMaxRegions / 2; i > 0; i--) { try { addRegion(); } catch (Exception ignore) { FwkInfo(ignore.Message); } } } FwkInfo("Inside doregiontest after else"); } FwkInfo("Again GetRandomRegion"); randomRegion = GetRandomRegion(true); if (randomRegion == null) { //need to create a region opcode = "addRegion"; } FwkInfo("Do region test: " + opcode); switch (opcode) { case "addRegion": addRegion(); break; case "clearRegion": clearRegion(randomRegion, false); break; case "invalidateRegion": invalidateRegion(randomRegion, false); break; case "destroyRegion": destroyRegion(randomRegion, false); break; case "localClearRegion": clearRegion(randomRegion, true); break; case "localInvalidateRegion": invalidateRegion(randomRegion, true); break; case "localDestroyRegion": destroyRegion(randomRegion, true); break; default: FwkSevere("Invalid operation specified: " + opcode); break; } } private string getNextRegionName(Region region) { string regionName = null; int count = 0; string path; do { path = GetStringValue("regionPaths"); if (path.Length == 0) { FwkException("No regionPaths defined in the xml file. Needed for region event test"); } do { int length = path.Length; try { region = CacheHelper.GetRegion(path); } catch (Exception e) { FwkSevere(e.Message); } if (region == null) { int pos = path.LastIndexOf('/'); regionName = path.Substring(pos + 1, path.Length - pos); path = path.Substring(0, pos); } } while ((region == null) && path.Length == 0); } while ((++count < 5) && regionName.Length != 0); return regionName; } public void measureMemory(string s, double vs, double rs) { } public CacheableKey findKeyNotInCache(Region region) { CacheableKey key; if (m_keysVec.Count == 0) { lock (this) { int numKeys = GetUIntValue("distinctKeys"); for (int i = 0; i < numKeys; i++) { string skey = i.ToString(); key = new CacheableString(skey); //int pos = m_keysVec.Length; m_keysVec.Add(key); } } } key = null; int start = Util.Rand(m_keysVec.Count); bool wrapped = false; int cur = start; while ((cur != start) || !wrapped) { if (cur >= m_keysVec.Count) { cur = 0; wrapped = true; } else { if (!region.ContainsKey(m_keysVec[cur])) { key = m_keysVec[cur]; cur = start; wrapped = true; } else { cur++; } } } return key; } #endregion #region Public methods public void DoEntryOperations() { Util.Log("Calling doeventoperations"); doEventOperations(); } public void doEventOperations() { UInt32 counter = 0; string taskID = "begin"; // Clear up everything from previous test. // Make sure we have one root region. { // TODO: Lock and task id business // ACE_Guard<ACE_Thread_Mutex> guard( *testLock); //if ((taskID != null) && (this.getn != taskID)) { // TODO CacheHelper.DestroyAllRegions(true); //destroyAllRegions(); CreateRootRegion(); if (taskID != null) { //(taskID); } // FWKINFO( "DBG doEventOperations set id" ); //taskID = strdup(getTaskId().c_str()); } // FWKINFO( "DBG doEventOperations release lock" ); } int workTime = GetUIntValue("workTime"); FwkInfo("doEventOperations will work for " + workTime + " seconds. "); int skipCounter = GetUIntValue("skipCount"); skipCounter = (skipCounter > 0) ? skipCounter : 100; int iMaxRegions = GetUIntValue("maxRegions"); // TODO: DEFAULT_MAX_REGION int DEFAULT_MAX_REGION = 10; iMaxRegions = (iMaxRegions > 0) ? iMaxRegions : DEFAULT_MAX_REGION; // TODO: check the correctness. DateTime endTime = DateTime.Now + TimeSpan.FromMilliseconds((double)workTime); int opsSecond = GetUIntValue("opsSecond"); opsSecond = (opsSecond > 0) ? opsSecond : 0; PaceMeter pm = new PaceMeter(opsSecond); int logSize = GetUIntValue("logSize"); int opCount = 0; DateTime now = new DateTime(); string opcode = string.Empty; bool isDone = false; FwkInfo("Entering event loop."); do { FwkInfo("Before getRegionCount"); if (logSize == 1) { int cnt = getRegionCount(); FwkInfo(cnt + ((cnt == 1) ? " region " : " regions ") + opCount); } FwkInfo("After getRegionCount"); int randomOP = GetUIntValue("randomOP"); if (randomOP == 5) { opcode = GetStringValue("regionOps"); } else { opcode = GetStringValue("entryOps"); } FwkInfo("Check 1"); if (opcode.Length != 0) { bool skipTest = false; if (opcode == "abort") { skipTest = true; if (--skipCounter == 0) { // TODO: definitely wrong. what is intended is unclear. //char * segv = NULL; //strcpy( segv, "Forcing segv" ); } } else if (opcode == "exit") { skipTest = true; if (--skipCounter == 0) { Environment.Exit(0); } } else if (opcode == "done") { skipTest = true; if (--skipCounter == 0) { isDone = true; } } if (!skipTest) { FwkInfo("Check 2 doRegionTest"); if (randomOP == 5) { doRegionTest(opcode, iMaxRegions); } else { FwkInfo("Check 3 doEntryTest"); doEntryTest(opcode); FwkInfo("Check 4 doentrytest over"); } opCount++; pm.CheckPace(); } counter++; if ((counter % 1000) == 0) { FwkInfo("Performed " + counter + " operations."); } Util.BBIncrement(EventCountersBB, "CURRENT_OPS_COUNT"); } else { FwkSevere("NULL operation specified." + "randomOP: " + randomOP); } now = DateTime.Now; FwkInfo("do while end in doeventoperations"); } while ((now < endTime) && !isDone); FwkInfo("Event loop complete."); FwkInfo("doEventOperations() performed " + counter + " operations."); } public void doIterate() { FwkInfo("doIterate()"); uint ulKeysInRegion = 0; uint ulNoneNullValuesInRegion = 0; string sError = null; Region[] rootRegionArray; Region rootRegion; RegionAttributes attr; rootRegionArray = CacheHelper<TKey, TVal>.DCache.RootRegions(); int ulRegionCount = rootRegionArray.Length; for (int ulIndex = 0; ulIndex < ulRegionCount; ulIndex++) { rootRegion = rootRegionArray[ulIndex]; attr = rootRegion.Attributes; bool bHasInvalidateAction = attr.EntryIdleTimeoutAction == ExpirationAction.Invalidate || (attr.EntryTimeToLiveAction == ExpirationAction.Invalidate); iterateRegion(rootRegion, true, bHasInvalidateAction, ulKeysInRegion, ulNoneNullValuesInRegion, sError); if (sError.Length > 0) { FwkException(sError); } } } public void doMemoryMeasurement() { // TODO Later } public void verifyKeyCount() { int percentDiff = percentDifferent(); if (percentDiff > 10) { FwkSevere("Actual number of keys does not match expected number."); } } public void addEntry() { string testRegionName = GetStringValue("regionName"); if (testRegionName.Length == 0) { FwkException("Data not provided for 'regionName', failing."); } Region region = CacheHelper<TKey, TVal>.DCache.GetRegion(testRegionName); int usePid = GetUIntValue("usePID"); int pid = Util.PID; int opsSecond = GetUIntValue("opsSecond"); if (opsSecond < 0) { opsSecond = 0; // No throttle } PaceMeter pm = new PaceMeter(opsSecond); int entryCount = GetUIntValue("EntryCount"); if (entryCount <= 0) { entryCount = 100; } FwkInfo("addEntry: Adding " + entryCount + " entries to the cache."); for (Int32 count = 0; count < entryCount; count++) { string sKey; Serializable sValue; if (usePid == 1) { sKey = pid.ToString(); } else { sKey = string.Empty; } sKey += count.ToString(); // get value size int vsize = GetUIntValue("valueSizes"); if (vsize < 0) { vsize = 1000; } byte[] buffer = new byte[vsize]; Util.RandBytes(buffer); sValue = CacheableBytes.Create(buffer); // TODO: check CacheableKey key = new CacheableString(sKey); Serializable value = sValue; if (key == null) { FwkSevere("EventTest::addObject null keyPtr generated."); } FwkInfo("created entry with key: " + key.ToString()); region.Put(key as CacheableKey, value); Util.BBIncrement(EventCountersBB, "CREATE_COUNT"); pm.CheckPace(); } FwkInfo("addEntry: Complete."); } public void addOrDestroyEntry() { string testRegionName = GetStringValue("regionName"); if (testRegionName.Length == 0) { FwkException("Data not provided for 'regionName', failing."); } Region region = CacheHelper<TKey, TVal>.DCache.GetRegion(testRegionName); int usePid = GetUIntValue("usePID"); int pid = Util.PID; int entryCount = GetUIntValue("EntryCount"); if (entryCount <= 0) { entryCount = 100; } FwkInfo("addOrDestroyEntry: Adding or Destroying ( if present )" + entryCount + " entries to the cache."); for (int count = 0; count < entryCount; count++) { string sKey; if (usePid == 1) { sKey = pid.ToString(); } else { sKey = string.Empty; } sKey += count.ToString(); // get value size int vsize = GetUIntValue("valueSizes"); if (vsize < 0) { vsize = 1000; } byte[] buffer = new byte[vsize]; Util.RandBytes(buffer); CacheableKey key = new CacheableString(sKey); CacheableBytes value = CacheableBytes.Create(buffer); if (key == null) { FwkSevere("EventTest::addObject null keyPtr generated."); } string op = GetStringValue("popOp"); if (op == "put") { region.Put(key as CacheableKey, value); } else { region.Destroy(key as CacheableKey); } Util.BBIncrement(EventCountersBB, "CREATE_COUNT"); } FwkInfo("addOrDestroyEntry: Complete."); } public void validateCacheContent() { FwkInfo("validateCacheContent()"); string testRegionName = GetStringValue("testRegion"); string validateRegionName = GetStringValue("validateRegion"); Region testRegion = CacheHelper<TKey, TVal>.DCache.GetRegion(testRegionName); Region validateRegion = CacheHelper<TKey, TVal>.DCache.GetRegion(validateRegionName); ICacheableKey[] keyVector; keyVector = testRegion.GetKeys(); int ulKeysInRegion = keyVector.Length; if (ulKeysInRegion == 0) { FwkSevere("zero keys in testRegion " + testRegion.Name); } ICacheableKey key; ISerializable value; int entryPassCnt = 0; int entryFailCnt = 0; for (int ulIndex = 0; ulIndex < ulKeysInRegion; ulIndex++) { key = keyVector[ulIndex]; value = testRegion.Get(key); if (TestEntryPropagation(validateRegion, key as CacheableString, value as CacheableBytes)) { entryFailCnt++; } else { entryPassCnt++; } } FwkInfo("entryFailCnt is " + entryFailCnt + " entryPassCnt is " + entryPassCnt); if (entryFailCnt == 0) { FwkInfo("validateCacheContent() - TEST ENDED, RESULT = SUCCESSFUL "); } else { FwkSevere("validateCacheContent() - TEST ENDED, RESULT = FAILED "); } } public void validateRegionContent() { FwkInfo("validateRegionContent()"); string testRegionName = GetStringValue("testRegion"); string validateRegionName = GetStringValue("validateRegion"); string regionName = GetStringValue("regionName"); Region testRegion = CacheHelper<TKey, TVal>.DCache.GetRegion(testRegionName); Region validateRegion = CacheHelper<TKey, TVal>.DCache.GetRegion(validateRegionName); FwkInfo("localDestroyRegion region name is " + testRegion.Name); // destroy the region int iBeforeCounter = (int)Util.BBGet(EventCountersBB, "numAfterRegionDestroyEvents_isNotExp"); testRegion.LocalDestroyRegion(); CreateRootRegion(); Region region = CacheHelper<TKey, TVal>.DCache.GetRegion(regionName); FwkInfo(" Recreated Region name is " + region.Name); ICacheableKey[] keyVector; ICacheableKey[] keyVectorValidateRegion; keyVector = region.GetKeys(); keyVectorValidateRegion = validateRegion.GetKeys(); int ulKeysInRegion = keyVector.Length; int ulKeysInValidateRegion = keyVectorValidateRegion.Length; if (ulKeysInRegion != ulKeysInValidateRegion) { FwkSevere("Region Key count is not equal, Region " + region.Name + " key count is " + ulKeysInRegion + " and Region " + validateRegion.Name + " key count is " + ulKeysInValidateRegion); } ICacheableKey key; ISerializable value; int entryPassCnt = 0; int entryFailCnt = 0; for (int ulIndex = 0; ulIndex < ulKeysInRegion; ulIndex++) { key = keyVector[ulIndex]; value = region.Get(key); if (TestEntryPropagation(validateRegion, key as CacheableString, value as CacheableBytes)) { entryFailCnt++; } else { entryPassCnt++; } } FwkInfo("entryFailCnt is " + entryFailCnt + " entryPassCnt is " + entryPassCnt); if (entryFailCnt == 0) { FwkInfo("validateRegionContent() - TEST ENDED, RESULT = SUCCESSFUL "); } else { FwkSevere("validateRegionContent() - TEST ENDED, RESULT = FAILED "); } } public void doCreateObject() { // Not implemented. } public void doIterateOnEntry() { FwkInfo("doIterateOnEntry()"); string testRegionName = GetStringValue("testRegion"); string validateRegionName = GetStringValue("validateRegion"); Region testRegion = CacheHelper<TKey, TVal>.DCache.GetRegion(testRegionName); Region validateRegion = CacheHelper<TKey, TVal>.DCache.GetRegion(validateRegionName); ICacheableKey[] keyVector = null; int keysInRegion = 1; int lastCount = 0; int tryCount = 30; int tries = 0; while ((keysInRegion != lastCount) && (tries++ < tryCount)) { Thread.Sleep(10000); // sleep for 10 seconds. lastCount = keysInRegion; keyVector = testRegion.GetKeys(); keysInRegion = keyVector.Length; } if ((keysInRegion == 0) || (tries >= tryCount)) { FwkException("After " + tries + " tries, counted " + keysInRegion + " keys in the region."); } FwkInfo("After " + tries + " tries, counted " + keysInRegion + " keys in the region."); CacheableKey key; Serializable value; for (int index = 0; index < keysInRegion; index++) { key = keyVector[index] as CacheableKey; value = testRegion.Get(key) as Serializable; validateRegion.Create(key, value); } } public void feedEntries() { string testRegionName = GetStringValue("regionName"); if (testRegionName.Length == 0) { FwkException("Data not provided for 'regionName', failing."); } Region region = CacheHelper<TKey, TVal>.DCache.GetRegion(testRegionName); int opsSecond = GetUIntValue("opsSecond"); if (opsSecond < 0) { opsSecond = 0; // No throttle } PaceMeter pm = new PaceMeter(opsSecond); int secondsToRun = GetUIntValue("workTime"); secondsToRun = (secondsToRun < 1) ? 100 : secondsToRun; FwkInfo("feedEntries: Will add entries for " + secondsToRun + " seconds."); DateTime end = DateTime.Now + TimeSpan.FromSeconds((double)secondsToRun); DateTime now = DateTime.Now; int count = 0; while (now < end) { string key = (++count).ToString(); // get value size int vsize = GetUIntValue("valueSizes"); if (vsize < 0) { vsize = 1000; } byte[] buffer = new byte[vsize]; Util.RandBytes(buffer); CacheableKey skey = new CacheableString(key); Serializable value = CacheableBytes.Create(buffer); if (key == null) { FwkSevere("EventTest::feedEntries null keyPtr generated."); now = end; } region.Put(skey, value); Util.BBIncrement(EventCountersBB, "CREATE_COUNT"); pm.CheckPace(); now = DateTime.Now; } } public void doBasicTest() { Region region = GetRandomRegion(true); int numKeys = GetUIntValue("distinctKeys"); numKeys = numKeys > 0 ? numKeys : 1000; CacheableKey[] keys = new CacheableKey[numKeys]; Serializable[] values = new Serializable[numKeys]; for (int i = 0; i < numKeys; ++i) { int ksize = GetUIntValue("valueSizes"); ksize = ksize > 0 ? ksize : 12; int vsize = GetUIntValue("valueSizes"); vsize = vsize > 0 ? vsize : 100; string kStr = "key_"; byte[] buffer = new byte[vsize]; Util.RandBytes(buffer); CacheableKey key = new CacheableString(kStr); Serializable value = CacheableBytes.Create(buffer); keys[i] = key; values[i] = value; region.Create(key, value); } ICacheableKey[] expectKeys = region.GetKeys(); if (expectKeys.Length != numKeys) { FwkSevere("Expect " + numKeys + " keys after create, got " + expectKeys.Length + " keys"); } for (int i = 0; i < numKeys; ++i) { region.LocalInvalidate(keys[i]); } expectKeys = region.GetKeys(); if (expectKeys.Length != numKeys) { FwkSevere("Expect " + numKeys + " keys after localInvalidate, got " + expectKeys.Length + " keys"); } for (int i = 0; i < numKeys; ++i) { ISerializable val = region.Get(keys[i]); if (val.ToString() != values[i].ToString()) { FwkSevere("Expect " + values[i].ToString() + ", got " + val.ToString()); } } expectKeys = region.GetKeys(); if (expectKeys.Length != numKeys) { FwkSevere("Expect " + numKeys + " keys after first get, got " + expectKeys.Length + " keys"); } for (int i = 0; i < numKeys; ++i) { region.LocalDestroy(keys[i]); } expectKeys = region.GetKeys(); if ((expectKeys.Length) != 0) { FwkSevere("Expect 0 keys after localDestroy, got " + expectKeys.Length + " keys"); } for (int i = 0; i < numKeys; ++i) { ISerializable val = region.Get(keys[i]); // get if (val.ToString() != values[i].ToString()) { FwkSevere("Expect " + values[i].ToString() + ", got " + val.ToString()); } } expectKeys = region.GetKeys(); if (expectKeys.Length != numKeys) { FwkSevere("Expect " + numKeys + " keys after second get, got " + expectKeys.Length + " keys"); } for (int i = 0; i < numKeys; ++i) { region.Invalidate(keys[i]); } expectKeys = region.GetKeys(); if (expectKeys.Length != numKeys) { FwkSevere("Expect " + numKeys + " keys after invalidate, got " + expectKeys.Length + " keys"); } for (int i = 0; i < numKeys; ++i) { region.Get(keys[i]); } expectKeys = region.GetKeys(); if (expectKeys.Length != numKeys) { FwkSevere("Expect " + numKeys + " keys after invalidate all entries in server, got " + expectKeys.Length + " keys"); } for (int i = 0; i < numKeys; ++i) { region.Put(keys[i], values[i]); } expectKeys = region.GetKeys(); if (expectKeys.Length != numKeys) { FwkSevere("Expect " + numKeys + " keys after put, got " + expectKeys.Length + " keys"); } for (int i = 0; i < numKeys; ++i) { region.Destroy(keys[i]); } expectKeys = region.GetKeys(); if (expectKeys.Length != 0) { FwkSevere("Expect 0 keys after destroy, got " + expectKeys.Length + " keys"); } int excepCount = 0; for (int i = 0; i < numKeys; ++i) { try { region.Get(keys[i]); } catch (EntryNotFoundException e) { FwkInfo(e.Message); ++excepCount; } } expectKeys = region.GetKeys(); if (expectKeys.Length != 0) { FwkSevere("Expect 0 keys because all entries are destoyed in server, got " + expectKeys.Length + " keys"); } if (excepCount != numKeys) { FwkSevere("Expect " + numKeys + " exceptions because all entries are destoyed in server, got " + excepCount + " exceptions"); } for (int i = 0; i < numKeys; ++i) { region.Create(keys[i], values[i]); } expectKeys = region.GetKeys(); if (expectKeys.Length != numKeys) { FwkSevere("Expect " + numKeys + " keys after second create, got " + expectKeys.Length + " keys"); } for (int i = 0; i < numKeys; ++i) { region.Get(keys[i]); } expectKeys = region.GetKeys(); if (expectKeys.Length != numKeys) { FwkSevere("Expect " + numKeys + " keys after invalidate all entries in server, got " + expectKeys.Length + " keys"); } } public void doTwinkleRegion() { int secondsToRun = GetUIntValue("workTime"); secondsToRun = (secondsToRun < 1) ? 10 : secondsToRun; FwkInfo("Seconds to run: " + secondsToRun); int end = DateTime.Now.Second + secondsToRun; bool done = false; bool regionDestroyed = false; int errCnt = 0; while (!done) { int sleepTime = GetUIntValue("sleepTime"); sleepTime = ((sleepTime < 1) || regionDestroyed) ? 10 : sleepTime; FwkInfo("sleepTime is " + sleepTime + " seconds."); DateTime now = DateTime.Now; // TODO: 10 magic number to avoid compilation if ((now.Second > end) || ((now.Second + 10) > end)) { // TODO : Check DateTime usage in the entire file. // FWKINFO( "Exiting loop, time is up." ); done = true; continue; } FwkInfo("EventTest::doTwinkleRegion() sleeping for " + sleepTime + " seconds."); Thread.Sleep(sleepTime * 1000); if (regionDestroyed) { FwkInfo("EventTest::doTwinkleRegion() will create a region."); CreateRootRegion(); regionDestroyed = false; FwkInfo("EventTest::doTwinkleRegion() region created."); int percentDiff = percentDifferent(); if (percentDiff > 10) { errCnt++; FwkSevere("Actual number of keys is not within 10% of expected."); } } else { FwkInfo("EventTest::doTwinkleRegion() will destroy a region."); Region region = GetRandomRegion(true); if (region != null) { region.LocalDestroyRegion(); region = null; } regionDestroyed = true; FwkInfo("EventTest::doTwinkleRegion() local region destroy is complete."); } } // while if (regionDestroyed) { CreateRootRegion(); FwkInfo("EventTest::doTwinkleRegion() region created."); } FwkInfo("EventTest::doTwinkleRegion() completed."); if (errCnt > 0) { FwkException("Region key count was out of bounds on " + errCnt + " region creates."); } } // TODO Entire method check. public void checkTest(string taskId) { // TODO: For lock // SpinLockGuard guard( m_lck ); // TODO: setTask(taskId) if (CacheHelper<TKey, TVal>.DCache == null) { Properties pp = new Properties(); //TODO: Initialize? cacheInitialize( pp ); //string val = getStringValue( "EventBB" ); //if ( !val.empty() ) //{ // m_sEventBB = val; //} } } public void createRootRegion(string regionName) { FwkInfo("In createRootRegion region"); Region rootRegion; if (regionName == null) { rootRegion = CreateRootRegion(); } else { rootRegion = CacheHelper.CreateRegion(regionName, null); } Util.BBIncrement(EventCountersBB, rootRegion.FullPath); Util.BBIncrement(EventCountersBB, "ROOT_REGION_COUNT"); FwkInfo("In createRootRegion, Created root region: " + rootRegion.FullPath); } public bool TestEntryPropagation(Region region, CacheableString szKey, CacheableBytes szValue) { bool bEntryError = false; bool bContainsKey = false; bool bContainsValue = false; bContainsKey = region.ContainsKey(szKey); bContainsValue = region.ContainsValueForKey(szKey); if (!bContainsKey || !bContainsValue) { FwkSevere("Key: " + szKey.Value + " not found in region " + region.FullPath + ", mirroring is enabled"); bEntryError = true; } return bEntryError; } public void addObject(Region region, bool bLogAddition, string pszKey, string pszValue) { CacheableKey key; if (pszKey == null) { key = findKeyNotInCache(region); } else { key = new CacheableString(pszKey); } if (key == null) { FwkInfo("EventTest::addObject null key generated for " + pszKey); return; } Serializable value; if (pszValue == null) { int vsize = GetUIntValue("valueSizes"); if (vsize < 0) { vsize = 1000; } byte[] buffer = new byte[vsize]; Util.RandBytes(buffer); value = CacheableBytes.Create(buffer); } else { value = new CacheableString(pszValue); } if (value == null) { FwkInfo("EventTest::addObject null valuePtr generated."); return; } region.Create(key, value); Util.BBIncrement(EventCountersBB, "CREATE_COUNT"); } public void invalidateObject(Region randomRegion, bool bIsLocalInvalidate) { CacheableKey keyP = getKey(randomRegion, false); if (keyP == null) { Util.BBIncrement(EventCountersBB, "OPS_SKIPPED_COUNT"); return; } if (bIsLocalInvalidate) { randomRegion.LocalInvalidate(keyP); Util.BBIncrement(EventCountersBB, "LOCAL_INVALIDATE_COUNT"); } else { randomRegion.Invalidate(keyP); Util.BBIncrement(EventCountersBB, "INVALIDATE_COUNT"); } } public void destroyObject(Region randomRegion, bool bIsLocalDestroy) { FwkInfo("EventTest::destroyObject"); CacheableKey keyP = getKey(randomRegion, true); if (keyP == null) { Util.BBIncrement(EventCountersBB, "OPS_SKIPPED_COUNT"); return; } if (bIsLocalDestroy) { randomRegion.LocalDestroy(keyP); Util.BBIncrement(EventCountersBB, "LOCAL_DESTROY_COUNT"); } else { randomRegion.Destroy(keyP); Util.BBIncrement(EventCountersBB, "DESTROY_COUNT"); } } public void updateObject(Region randomRegion) { CacheableKey keyP = getKey(randomRegion, true); if (keyP == null) { FwkInfo("EventTest::updateObject key is null"); Util.BBIncrement(EventCountersBB, "OPS_SKIPPED_COUNT"); return; } ISerializable anObj = randomRegion.Get(keyP); int vsize = GetUIntValue("valueSizes"); if (vsize < 0) { vsize = 1000; } byte[] buffer = new byte[vsize]; Util.RandBytes(buffer); CacheableBytes newObj = CacheableBytes.Create(buffer); randomRegion.Put(keyP, newObj); } public void readObject(Region randomRegion) { FwkInfo("Inside readObject randomregion = {0}", randomRegion.FullPath); CacheableKey keyP = getKey(randomRegion, true); FwkInfo("After getkey"); if (keyP == null) { Util.BBIncrement(EventCountersBB, "OPS_SKIPPED_COUNT"); FwkInfo("skipped and returned"); return; } FwkInfo("skipped and returned before Get"); CacheableBytes anObj = randomRegion.Get(keyP) as CacheableBytes; FwkInfo("got anobj"); //byte[] b = anObj.Value; //FwkInfo("byte array = " + b.ToString()); } public void addRegion() { Region parentRegion = null; string sRegionName = getNextRegionName(parentRegion); if (sRegionName.Length == 0) { // nothing to do return; } Region region; FwkInfo("In addRegion, enter create region " + sRegionName); if (parentRegion == null) { // TODO Is this right. region = CacheHelper.CreateRegion(sRegionName, null); } else { string fullName = parentRegion.FullPath; var regionAttributes = parentRegion.Attributes; var regionAttributesFactory = new AttributesFactory(regionAttributes); regionAttributes = regionAttributesFactory.Create(); region = parentRegion.CreateSubRegion(sRegionName, regionAttributes); Util.BBSet(EventCountersBB, sRegionName, fullName); } int iInitRegionNumObjects = GetUIntValue("initRegionNumObjects"); // Create objects in the new region for (int iIndex = 0; iIndex < iInitRegionNumObjects; iIndex++) { string skey = iIndex.ToString(); addObject(region, true, skey, null); } FwkInfo("In addRegion, exit create region " + sRegionName); } public void clearRegion(Region randomRegion, bool bIsLocalClear) { int iSubRegionCount = 0; // invalidate the region iSubRegionCount = getSubRegionCount(randomRegion) + 1; //bbGet("EventCountersBB", // TODO // "numAfterRegionInvalidateEvents_isNotExp", &iBeforeCounter); FwkInfo("In clearRegion, enter clear region " + randomRegion.Name); string pszCounterName = "LOCAL_REGION_CLEAR_COUNT"; if (bIsLocalClear) { randomRegion.LocalClear(); } else { pszCounterName = "REGION_CLEAR_COUNT"; randomRegion.Clear(); } Util.BBAdd(EventCountersBB, pszCounterName, iSubRegionCount); FwkInfo("In clearRegion, exit invalidate region " + randomRegion.Name); } public void invalidateRegion(Region randomRegion, bool bIsLocalInvalidate) { int iSubRegionCount = 0; // invalidate the region iSubRegionCount = getSubRegionCount(randomRegion) + 1; //bbGet("EventCountersBB", // TODO // "numAfterRegionInvalidateEvents_isNotExp", &iBeforeCounter); FwkInfo("In invalidateRegion, enter invalidate region " + randomRegion.Name); string pszCounterName = "LOCAL_REGION_INVALIDATE_COUNT"; if (bIsLocalInvalidate) { randomRegion.LocalInvalidateRegion(); } else { pszCounterName = "REGION_INVALIDATE_COUNT"; randomRegion.InvalidateRegion(); } Util.BBAdd(EventCountersBB, pszCounterName, iSubRegionCount); FwkInfo("In invalidateRegion, exit invalidate region " + randomRegion.Name); } public void destroyRegion(Region randomRegion, bool bIsLocalDestroy) { int iSubRegionCount = 0; // destroy the region //int iBeforeCounter = -1; //bbGet( "EventCountersBB", "numAfterRegionDestroyEvents_isNotExp", &iBeforeCounter );//TODO iSubRegionCount = getSubRegionCount(randomRegion) + 1; string pszCounterName = "LOCAL_REGION_DESTROY_COUNT"; FwkInfo("In destroyRegion, enter destroy region " + randomRegion.Name); if (bIsLocalDestroy) { randomRegion.LocalDestroyRegion(); } else { pszCounterName = "REGION_DESTROY_COUNT"; randomRegion.DestroyRegion(); } Util.BBIncrement(EventCountersBB, pszCounterName); FwkInfo("In destroyRegion, exit destroy region " + randomRegion.Name); } public Region GetRandomRegion(bool bAllowRootRegion) { FwkInfo("Inside GetRandomRegion ... Check 1"); Region[] rootRegionVector = CacheHelper<TKey, TVal>.DCache.RootRegions(); int irootSize = rootRegionVector.Length; Region[] subRegionVector; FwkInfo("Inside GetRandomRegion ... Check 2"); int iRootSize = rootRegionVector.Length; if (iRootSize == 0) { // TODO return null; //return RegionPtr(); } FwkInfo("Inside GetRandomRegion ... Check 3 and irrotsize = {0}", irootSize); Region[] choseRegionVector = new Region[1]; // if roots can be chosen, add them to candidates if (bAllowRootRegion) { for (int iRootIndex = 0; iRootIndex < iRootSize; iRootIndex++) { FwkInfo("Inside GetRandomRegion ... Check 4.{0}", iRootIndex); choseRegionVector[iRootIndex] = rootRegionVector[iRootIndex]; } } FwkInfo("Inside GetRandomRegion ... Check 4"); // add all subregions for (int iRootIndex = 0; iRootIndex < iRootSize; iRootIndex++) { subRegionVector = rootRegionVector[iRootIndex].SubRegions(true); int iSubSize = subRegionVector.Length; for (int iSubIndex = 0; iSubIndex < iSubSize; iSubIndex++) { choseRegionVector[choseRegionVector.Length] = subRegionVector[iSubIndex]; //choseRegionVector.push_back(subRegionVector.at(iSubIndex)); } } FwkInfo("Inside GetRandomRegion ... Check 5"); int iChoseRegionSize = choseRegionVector.Length; if (iChoseRegionSize == 0) { // TODO return null; //return RegionPtr(); } FwkInfo("Inside GetRandomRegion ... Check 6"); int idx = Util.Rand(iChoseRegionSize); //string regionName = choseRegionVector.at(idx)->getFullPath(); FwkInfo("Inside GetRandomRegion ... Check 7"); return choseRegionVector[idx]; } public void handleExpectedException(Exception e) { FwkInfo("Caught and ignored: " + e.Message); } public void verifyObjectInvalidated(Region region, CacheableKey key) { if ((region == null) && (key == null)) { return; } string error = null; if (!region.ContainsKey(key)) { error = "unexpected contains key"; } if (region.ContainsValueForKey(key)) { error = "Unexpected containsValueForKey "; } RegionEntry entry = region.GetEntry(key); if (entry == null) { error = "getEntry returned null"; } else { if (entry.Key != key) { error = "Keys are different"; } if (entry.Value != null) { error = "Expected value to be null"; } } if (error.Length != 0) { FwkException(error); } } public void verifyObjectDestroyed(Region region, CacheableKey key) { if ((region == null) && (key == null)) { return; } string osError; bool bContainsKey = region.ContainsKey(key); if (bContainsKey) { // TODO key.ToString() osError = "Unexpected containsKey " + bContainsKey + " for key " + key.ToString() + " in region " + region.FullPath + Environment.NewLine; } bool bContainsValueForKey = region.ContainsValueForKey(key); if (bContainsValueForKey) { osError = "Unexpected containsValueForKey " + bContainsValueForKey + " for key " + key.ToString() + " in region " + region.FullPath + Environment.NewLine; } RegionEntry entry = region.GetEntry(key); // TODO ... see this section //if (entry != null) //{ // CacheableString entryKey = key.; // CacheableBytes entryValuePtr = entryPtr->getValue(); // osError << "getEntry for key " << CacheableStringPtr( keyPtr )->asChar() << //" in region " << regionPtr->getFullPath() << //" returned was non-null; getKey is " << entryKeyPtr->asChar() << //", value is " << entryValuePtr->bytes() << "\n"; //} //if (sError.size() > 0) //{ // FWKEXCEPTION(sError); //} } public void iterateRegion(Region aRegion, bool bAllowZeroKeys, bool bAllowZeroNonNullValues, uint ulKeysInRegion, uint ulNoneNullValuesInRegion, string sError) { if (aRegion == null) { return; } ulKeysInRegion = 0; ulNoneNullValuesInRegion = 0; ICacheableKey[] keyVector = aRegion.GetKeys(); ulKeysInRegion = (uint)keyVector.Length; if (ulKeysInRegion == 0) { if (!bAllowZeroKeys) { sError = "Region " + aRegion.FullPath + " has " + ulKeysInRegion + " keys" + Environment.NewLine; } } CacheableKey key = null; ISerializable value = null; for (uint ulIndex = 0; ulIndex < ulKeysInRegion; ulIndex++) { key = keyVector[ulIndex] as CacheableKey; try { value = aRegion.Get(key); } catch (CacheLoaderException e) { FwkException("CacheLoaderException " + e.Message); } catch (TimeoutException e) { FwkException("TimeoutException " + e.Message); } if (value != null) { ulNoneNullValuesInRegion++; } } if (ulNoneNullValuesInRegion == 0) { if (!bAllowZeroNonNullValues) { sError += "Region " + aRegion.FullPath + " has " + ulNoneNullValuesInRegion + " non-null values" + Environment.NewLine; } } } public int getSubRegionCount(Region region) { Region[] subregions = region.SubRegions(true); return subregions.Length; } public int getAllRegionCount() { if (CacheHelper<TKey, TVal>.DCache == null) { FwkSevere("Null cache pointer, no connection established."); return 0; } Region[] rootRegions = CacheHelper<TKey, TVal>.DCache.RootRegions(); int iRootSize = rootRegions.Length; int iTotalRegions = iRootSize; for (int iIndex = 0; iIndex < iRootSize; iIndex++) { // TODO getSubRegionCount implementation iTotalRegions += getSubRegionCount(rootRegions[iIndex]); } return iTotalRegions; } public CacheableKey getKey(Region region, bool bInvalidOK) { FwkInfo("random key check 1"); //int randomKey = int.Parse((string)Util.ReadObject("randomKey")); int randomKey = GetUIntValue("randomKey"); CacheableKey keyP = null; FwkInfo("random key check 2 ... randomkey = {0}", randomKey); if (randomKey > 0) { string sKey = randomKey.ToString(); keyP = new CacheableString(sKey); FwkInfo("random key check 2.1 .. keyP.tostring = {0}", keyP.ToString()); return keyP; } FwkInfo("random key check 3"); ICacheableKey[] keys = region.GetKeys(); int iKeySize = keys.Length; if (iKeySize == 0) { return keyP; } FwkInfo("random key check 4"); int iStartAt = Util.Rand(iKeySize); if (bInvalidOK) { return keys[iStartAt] as CacheableKey; } int iKeyIndex = iStartAt; do { FwkInfo("random key check 5"); bool hasValue = region.ContainsValueForKey(keys[iKeyIndex]); if (hasValue) { return keys[iKeyIndex] as CacheableKey; } iKeyIndex++; if (iKeyIndex >= iKeySize) { iKeyIndex = 0; } } while (iKeyIndex != iStartAt); FwkInfo("getKey: All values invalid in region"); return keyP; } public void setEventError(string pszMsg) { Util.BBSet(EventCountersBB, "EventErrorMessage", pszMsg); } public void removeRegion(Region region) { string name = region.FullPath; FwkInfo("In removeRegion, local destroy on " + name); region.LocalDestroyRegion(); Util.BBDecrement(EventCountersBB, name); } public Int32 getRegionCount() { FwkInfo("Check 1.1 Inside getRegionCount"); Region[] roots = CacheHelper<TKey, TVal>.DCache.RootRegions(); FwkInfo("Check 1.1 root region count = {0}", roots.Length); return roots.Length; } #endregion #region Callback create methods public static ICacheWriter CreateETCacheWriter() { return new ETCacheWriter(); } public static ICacheLoader CreateETCacheLoader() { return new ETCacheLoader(); } public static ICacheListener CreateETCacheListener() { return new ETCacheListener(); } #endregion } }
// // RenderCairo.cs // // Author: // Krzysztof Marecki // // Copyright (c) 2010 Krzysztof Marecki // // This file is part of the NReports project // This file is part of the My-FyiReporting project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. using System; using System.Collections; using System.Globalization; using System.Threading; using fyiReporting.RDL; using Pango; namespace fyiReporting.RdlGtkViewer { public class RenderCairo : IDisposable { Cairo.Context g; Pango.Layout layout; float scale = 1.0f; float dpiX = 96; float dpiY = 96; public RenderCairo(Cairo.Context g) : this(g, 1.0f) { } public RenderCairo(Cairo.Context g, float scale) { this.g = g; this.layout = Pango.CairoHelper.CreateLayout(g); this.scale = scale; g.Scale(scale, scale); } public void Dispose() { if (layout != null) { layout.Dispose(); } } internal float PixelsX(float x) { return (x * dpiX / 96.0f); } internal float PixelsY(float y) { return (y * dpiY / 96.0f); } private void ProcessPage(Cairo.Context g, IEnumerable p) { foreach (PageItem pi in p) { if (pi is PageTextHtml) { // PageTextHtml is actually a composite object (just like a page) ProcessHtml(pi as PageTextHtml, g); continue; } if (pi is PageLine) { PageLine pl = pi as PageLine; DrawLine( pl.SI.BColorLeft.ToCairoColor(), pl.SI.BStyleLeft, pl.SI.BWidthLeft, g, PixelsX(pl.X), PixelsY(pl.Y), PixelsX(pl.X2), PixelsY(pl.Y2) ); continue; } // RectangleF rect = new RectangleF(PixelsX(pi.X), PixelsY(pi.Y), PixelsX(pi.W), PixelsY(pi.H)); Cairo.Rectangle rect = new Cairo.Rectangle(PixelsX(pi.X), PixelsY(pi.Y), PixelsX(pi.W), PixelsY(pi.H)); if (pi.SI.BackgroundImage != null) { // put out any background image PageImage i = pi.SI.BackgroundImage; DrawImage(i, g, rect); continue; } if (pi is PageText) { PageText pt = pi as PageText; DrawString(pt, g, rect); } if (pi is PageImage) { PageImage i = pi as PageImage; DrawImage(i, g, rect); } if (pi is PageRectangle) { //DrawBackground(g, rect, pi.SI); } // else if (pi is PageEllipse) // { // PageEllipse pe = pi as PageEllipse; // DrawEllipse(pe, g, rect); // } // else if (pi is PagePie) // { // PagePie pp = pi as PagePie; // DrawPie(pp, g, rect); // } // else if (pi is PagePolygon) // { // PagePolygon ppo = pi as PagePolygon; // FillPolygon(ppo, g, rect); // } // else if (pi is PageCurve) // { // PageCurve pc = pi as PageCurve; // DrawCurve(pc.SI.BColorLeft, pc.SI.BStyleLeft, pc.SI.BWidthLeft, // g, pc.Points, pc.Offset, pc.Tension); // } // DrawBorder(pi, g, rect); } } private void ProcessHtml(PageTextHtml pth, Cairo.Context g) { // pth.Build(g); // Builds the subobjects that make up the html this.ProcessPage(g, pth); } private void DrawLine(Cairo.Color c, BorderStyleEnum bs, float w, Cairo.Context g, double x, double y, double x2, double y2) { if (bs == BorderStyleEnum.None//|| c.IsEmpty || w <= 0) // nothing to draw return; g.Save(); // Pen p = null; // p = new Pen(c, w); g.Color = c; g.LineWidth = w; switch (bs) { case BorderStyleEnum.Dashed: // p.DashStyle = DashStyle.Dash; g.SetDash(new double[] { 2, 1 }, 0.0); break; case BorderStyleEnum.Dotted: // p.DashStyle = DashStyle.Dot; g.SetDash(new double[] { 1 }, 0.0); break; case BorderStyleEnum.Double: case BorderStyleEnum.Groove: case BorderStyleEnum.Inset: case BorderStyleEnum.Solid: case BorderStyleEnum.Outset: case BorderStyleEnum.Ridge: case BorderStyleEnum.WindowInset: default: g.SetDash(new double[] { }, 0.0); break; } // g.DrawLine(p, x, y, x2, y2); g.MoveTo(x, y); g.LineTo(x2, y2); g.Stroke(); g.Restore(); } private void DrawImage(PageImage pi, Cairo.Context g, Cairo.Rectangle r) { // Stream strm = null; // System.Drawing.Image im = null; Gdk.Pixbuf im = null; try { // strm = new MemoryStream (pi.ImageData); // im = System.Drawing.Image.FromStream (strm); im = new Gdk.Pixbuf(pi.ImageData); DrawImageSized(pi, im, g, r); } finally { // if (strm != null) // strm.Close(); if (im != null) im.Dispose(); } } private void DrawImageSized(PageImage pi, Gdk.Pixbuf im, Cairo.Context g, Cairo.Rectangle r) { double height, width; // some work variables StyleInfo si = pi.SI; // adjust drawing rectangle based on padding // System.Drawing.RectangleF r2 = new System.Drawing.RectangleF(r.Left + PixelsX(si.PaddingLeft), // r.Top + PixelsY(si.PaddingTop), // r.Width - PixelsX(si.PaddingLeft + si.PaddingRight), // r.Height - PixelsY(si.PaddingTop + si.PaddingBottom)); Cairo.Rectangle r2 = new Cairo.Rectangle(r.X + PixelsX(si.PaddingLeft), r.Y + PixelsY(si.PaddingTop), r.Width - PixelsX(si.PaddingLeft + si.PaddingRight), r.Height - PixelsY(si.PaddingTop + si.PaddingBottom)); Cairo.Rectangle ir; // int work rectangle switch (pi.Sizing) { case ImageSizingEnum.AutoSize: // // Note: GDI+ will stretch an image when you only provide // // the left/top coordinates. This seems pretty stupid since // // it results in the image being out of focus even though // // you don't want the image resized. // if (g.DpiX == im.HorizontalResolution && // g.DpiY == im.VerticalResolution) float imwidth = PixelsX(im.Width); float imheight = PixelsX(im.Height); ir = new Cairo.Rectangle(Convert.ToInt32(r2.X), Convert.ToInt32(r2.Y), imwidth, imheight); // else // ir = new Cairo.Rectangle(Convert.ToInt32(r2.X), Convert.ToInt32(r2.Y), // Convert.ToInt32(r2.Width), Convert.ToInt32(r2.Height)); //g.DrawImage(im, ir); im = im.ScaleSimple((int)r2.Width, (int)r2.Height, Gdk.InterpType.Hyper); g.DrawPixbufRect(im, ir, scale); break; case ImageSizingEnum.Clip: // Region saveRegion = g.Clip; g.Save(); // Region clipRegion = new Region(g.Clip.GetRegionData()); // clipRegion.Intersect(r2); // g.Clip = clipRegion; g.Rectangle(r2); g.Clip(); // if (dpiX == im.HorizontalResolution && // dpiY == im.VerticalResolution) ir = new Cairo.Rectangle(Convert.ToInt32(r2.X), Convert.ToInt32(r2.Y), im.Width, im.Height); // else // ir = new Cairo.Rectangle(Convert.ToInt32(r2.X), Convert.ToInt32(r2.Y), // Convert.ToInt32(r2.Width), Convert.ToInt32(r2.Height)); // g.DrawImage(im, ir); g.DrawPixbufRect(im, r2, scale); // g.Clip = saveRegion; g.Restore(); break; case ImageSizingEnum.FitProportional: double ratioIm = (float)im.Height / (float)im.Width; double ratioR = r2.Height / r2.Width; height = r2.Height; width = r2.Width; if (ratioIm > ratioR) { // this means the rectangle width must be corrected width = height * (1 / ratioIm); } else if (ratioIm < ratioR) { // this means the ractangle height must be corrected height = width * ratioIm; } r2 = new Cairo.Rectangle(r2.X, r2.Y, width, height); g.DrawPixbufRect(im, r2, scale); break; case ImageSizingEnum.Fit: default: g.DrawPixbufRect(im, r2, scale); break; } } private void DrawString(PageText pt, Cairo.Context g, Cairo.Rectangle r) { switch (pt.SI.WritingMode) { case WritingModeEnum.lr_tb: DrawStringHorizontal(pt, g, r); break; case WritingModeEnum.tb_rl: DrawStringTBRL(pt, g, r); break; case WritingModeEnum.tb_lr: DrawStringTBLR(pt, g, r); break; default: throw new NotSupportedException($"Writing mode {pt.SI.WritingMode} is not supported"); } } private void DrawStringHorizontal(PageText pt, Cairo.Context g, Cairo.Rectangle r) { StyleInfo si = pt.SI; string s = pt.Text; g.Save(); layout = CairoHelper.CreateLayout(g); float fontsize = si.FontSize * 72f / 96f; var font = FontDescription.FromString(string.Format("{0} {1}", si.GetFontFamily().Name, fontsize * PixelsX(1))); if (si.FontStyle == FontStyleEnum.Italic) font.Style = Style.Italic; switch (si.FontWeight) { case FontWeightEnum.Bold: case FontWeightEnum.Bolder: case FontWeightEnum.W500: case FontWeightEnum.W600: case FontWeightEnum.W700: case FontWeightEnum.W800: case FontWeightEnum.W900: font.Weight = Weight.Bold; break; } FontDescription oldfont = layout.FontDescription; layout.FontDescription = font; switch (si.TextAlign) { case TextAlignEnum.Right: layout.Alignment = Alignment.Right; break; case TextAlignEnum.Center: layout.Alignment = Alignment.Center; break; case TextAlignEnum.Left: default: layout.Alignment = Alignment.Left; break; } layout.Width = Units.FromPixels((int)(r.Width - si.PaddingLeft - si.PaddingRight - 2)); layout.Wrap = WrapMode.WordChar; layout.SetText(s); Rectangle logical; Rectangle ink; layout.GetExtents(out ink, out logical); double height = logical.Height / Scale.PangoScale; double y = 0; switch (si.VerticalAlign) { case VerticalAlignEnum.Top: y = r.Y + si.PaddingTop; break; case VerticalAlignEnum.Middle: y = r.Y + (r.Height - height) / 2; break; case VerticalAlignEnum.Bottom: y = r.Y + (r.Height - height) - si.PaddingBottom; break; } // draw the background DrawBackground(g, r, si); Cairo.Rectangle box = new Cairo.Rectangle( r.X + si.PaddingLeft + 1, y, r.Width, r.Height); g.Color = si.Color.ToCairoColor(); g.MoveTo(box.X, box.Y); CairoHelper.ShowLayout(g, layout); layout.FontDescription = oldfont; g.Restore(); } private void DrawStringTBRL(PageText pt, Cairo.Context g, Cairo.Rectangle r) { StyleInfo si = pt.SI; string s = pt.Text; g.Save(); layout = CairoHelper.CreateLayout(g); //Pango fonts are scaled to 72dpi, Windows fonts uses 96dpi float fontsize = si.FontSize * 72f / 96f; var font = FontDescription.FromString($"{si.GetFontFamily().Name} {fontsize * PixelsX(1)}"); if (si.FontStyle == FontStyleEnum.Italic) font.Style = Style.Italic; switch (si.FontWeight) { case FontWeightEnum.Bold: case FontWeightEnum.Bolder: case FontWeightEnum.W500: case FontWeightEnum.W600: case FontWeightEnum.W700: case FontWeightEnum.W800: case FontWeightEnum.W900: font.Weight = Weight.Bold; break; } FontDescription oldfont = layout.FontDescription; layout.FontDescription = font; switch (si.TextAlign) { case TextAlignEnum.Right: layout.Alignment = Alignment.Right; break; case TextAlignEnum.Center: layout.Alignment = Alignment.Center; break; case TextAlignEnum.Left: default: layout.Alignment = Alignment.Left; break; } layout.Width = Units.FromPixels((int)(r.Height - si.PaddingTop - si.PaddingBottom - 2)); layout.Wrap = WrapMode.WordChar; layout.SetText(s); Rectangle logical; Rectangle ink; layout.GetExtents(out ink, out logical); double height = logical.Height / Scale.PangoScale; double y = 0; double x = 0; switch (si.VerticalAlign) { case VerticalAlignEnum.Top: x = r.X + r.Width - si.PaddingRight; break; case VerticalAlignEnum.Middle: x = r.X + (r.Width + height) / 2; break; case VerticalAlignEnum.Bottom: x = r.X + height + si.PaddingLeft; break; } // draw the background DrawBackground(g, r, si); Cairo.Rectangle box = new Cairo.Rectangle( x, r.Y + si.PaddingTop + 1, r.Width, r.Height); g.Color = si.Color.ToCairoColor(); g.Rotate(90 * Math.PI / 180.0); CairoHelper.UpdateLayout(g, layout); g.MoveTo(box.Y, -box.X); CairoHelper.ShowLayout(g, layout); layout.FontDescription = oldfont; g.Restore(); } private void DrawStringTBLR(PageText pt, Cairo.Context g, Cairo.Rectangle r) { StyleInfo si = pt.SI; string s = pt.Text; g.Save(); layout = CairoHelper.CreateLayout(g); //Pango fonts are scaled to 72dpi, Windows fonts uses 96dpi float fontsize = (si.FontSize * 72 / 96); var font = FontDescription.FromString($"{si.GetFontFamily().Name} {fontsize * PixelsX(1)}"); if (si.FontStyle == FontStyleEnum.Italic) font.Style = Style.Italic; switch (si.FontWeight) { case FontWeightEnum.Bold: case FontWeightEnum.Bolder: case FontWeightEnum.W500: case FontWeightEnum.W600: case FontWeightEnum.W700: case FontWeightEnum.W800: case FontWeightEnum.W900: font.Weight = Weight.Bold; break; } FontDescription oldfont = layout.FontDescription; layout.FontDescription = font; switch (si.TextAlign) { case TextAlignEnum.Right: layout.Alignment = Alignment.Right; break; case TextAlignEnum.Center: layout.Alignment = Alignment.Center; break; case TextAlignEnum.Left: default: layout.Alignment = Alignment.Left; break; } layout.Width = Units.FromPixels((int)(r.Height - si.PaddingTop - si.PaddingBottom - 2)); layout.Wrap = WrapMode.WordChar; layout.SetText(s); Rectangle logical; Rectangle ink; layout.GetExtents(out ink, out logical); double height = logical.Height / Scale.PangoScale; double y = 0; double x = 0; switch (si.VerticalAlign) { case VerticalAlignEnum.Top: x = r.X + si.PaddingLeft; break; case VerticalAlignEnum.Middle: x = r.X + (r.Width - height) / 2; break; case VerticalAlignEnum.Bottom: x = r.X + (r.Width - height) + si.PaddingLeft; break; } // draw the background DrawBackground(g, r, si); Cairo.Rectangle box = new Cairo.Rectangle( x, r.Y + r.Height - si.PaddingBottom - 1, r.Height - si.PaddingBottom - si.PaddingTop, r.Width - si.PaddingLeft + si.PaddingRight); g.Color = si.Color.ToCairoColor(); g.Rotate(270 * Math.PI / 180.0); CairoHelper.UpdateLayout(g, layout); g.MoveTo(-box.Y, box.X); CairoHelper.ShowLayout(g, layout); layout.FontDescription = oldfont; g.Restore(); } private void DrawBackground(Cairo.Context g, Cairo.Rectangle rect, StyleInfo si) { // LinearGradientBrush linGrBrush = null; // SolidBrush sb = null; if (si.BackgroundColor.IsEmpty) return; g.Save(); Cairo.Color c = si.BackgroundColor.ToCairoColor(); Cairo.Gradient gradient = null; if (si.BackgroundGradientType != BackgroundGradientTypeEnum.None && !si.BackgroundGradientEndColor.IsEmpty) { Cairo.Color ec = si.BackgroundGradientEndColor.ToCairoColor(); switch (si.BackgroundGradientType) { case BackgroundGradientTypeEnum.LeftRight: // linGrBrush = new LinearGradientBrush(rect, c, ec, LinearGradientMode.Horizontal); gradient = new Cairo.LinearGradient(rect.X, rect.Y, rect.X + rect.Width, rect.Y); break; case BackgroundGradientTypeEnum.TopBottom: // linGrBrush = new LinearGradientBrush(rect, c, ec, LinearGradientMode.Vertical); gradient = new Cairo.LinearGradient(rect.X, rect.Y, rect.X, rect.Y + rect.Height); break; case BackgroundGradientTypeEnum.Center: // linGrBrush = new LinearGradientBrush(rect, c, ec, LinearGradientMode.Horizontal); throw new NotSupportedException(); // break; case BackgroundGradientTypeEnum.DiagonalLeft: // linGrBrush = new LinearGradientBrush(rect, c, ec, LinearGradientMode.ForwardDiagonal); gradient = new Cairo.LinearGradient(rect.X, rect.Y, rect.X + rect.Width, rect.Y + rect.Height); break; case BackgroundGradientTypeEnum.DiagonalRight: // linGrBrush = new LinearGradientBrush(rect, c, ec, LinearGradientMode.BackwardDiagonal); gradient = new Cairo.LinearGradient(rect.X + rect.Width, rect.Y + rect.Height, rect.X, rect.Y); break; case BackgroundGradientTypeEnum.HorizontalCenter: // linGrBrush = new LinearGradientBrush(rect, c, ec, LinearGradientMode.Horizontal); throw new NotSupportedException(); // break; case BackgroundGradientTypeEnum.VerticalCenter: // linGrBrush = new LinearGradientBrush(rect, c, ec, LinearGradientMode.Vertical); throw new NotSupportedException(); // break; default: break; } gradient.AddColorStop(0, c); gradient.AddColorStop(1, ec); } if (gradient != null) { //// g.FillRectangle(linGrBrush, rect); g.FillRectangle(rect, gradient); gradient.Destroy(); } else if (!si.BackgroundColor.IsEmpty) { g.FillRectangle(rect, c); // g.DrawRoundedRectangle (rect, 2, c, 1); // g.FillRoundedRectangle (rect, 8, c); } g.Restore(); } private void DrawBorder(PageItem pi, Cairo.Context g, Cairo.Rectangle r) { if (r.Height <= 0 || r.Width <= 0) // no bounding box to use return; double right = r.X + r.Width; double bottom = r.Y + r.Height; StyleInfo si = pi.SI; DrawLine(si.BColorTop.ToCairoColor(), si.BStyleTop, si.BWidthTop, g, r.X, r.Y, right, r.Y); DrawLine(si.BColorRight.ToCairoColor(), si.BStyleRight, si.BWidthRight, g, right, r.Y, right, bottom); DrawLine(si.BColorLeft.ToCairoColor(), si.BStyleLeft, si.BWidthLeft, g, r.X, r.Y, r.X, bottom); DrawLine(si.BColorBottom.ToCairoColor(), si.BStyleBottom, si.BWidthBottom, g, r.X, bottom, right, bottom); //if (si.) { // g.DrawRoundedRectangle (r, 8, si.BColorTop.ToCairoColor (), 1); //} } #region IRender implementation public void RunPages(Pages pgs) { //TODO : Why Cairo is broken when CurrentThread.CurrentCulture is set to local ? //At Linux when CurrentCulture is set to local culture, Cairo rendering is serious broken CultureInfo oldci = Thread.CurrentThread.CurrentCulture; Thread.CurrentThread.CurrentCulture = CultureInfo.InvariantCulture; try { foreach (Page p in pgs) { ProcessPage(g, p); break; } } finally { Thread.CurrentThread.CurrentCulture = oldci; } } public void RunPage(Page pgs) { //TODO : Why Cairo is broken when CurrentThread.CurrentCulture is set to local ? //At Linux when CurrentCulture is set to local culture, Cairo rendering is serious broken CultureInfo oldci = Thread.CurrentThread.CurrentCulture; Thread.CurrentThread.CurrentCulture = CultureInfo.InvariantCulture; try { ProcessPage(g, pgs); } finally { Thread.CurrentThread.CurrentCulture = oldci; } } #endregion } }
using Lucene.Net.Index; using System; using System.Collections.Generic; namespace Lucene.Net.Codecs.Lucene40 { using Lucene.Net.Randomized.Generators; using Lucene.Net.Support; using NUnit.Framework; using AtomicReader = Lucene.Net.Index.AtomicReader; using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext; using IBits = Lucene.Net.Util.IBits; using BytesRef = Lucene.Net.Util.BytesRef; using Directory = Lucene.Net.Store.Directory; using DirectoryReader = Lucene.Net.Index.DirectoryReader; using DocsEnum = Lucene.Net.Index.DocsEnum; using IndexWriter = Lucene.Net.Index.IndexWriter; using IOUtils = Lucene.Net.Util.IOUtils; using LineFileDocs = Lucene.Net.Util.LineFileDocs; using LuceneTestCase = Lucene.Net.Util.LuceneTestCase; using MatchNoBits = Lucene.Net.Util.Bits.MatchNoBits; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer; using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter; using Terms = Lucene.Net.Index.Terms; using TermsEnum = Lucene.Net.Index.TermsEnum; using TestUtil = Lucene.Net.Util.TestUtil; // TODO: really this should be in BaseTestPF or somewhere else? useful test! [TestFixture] public class TestReuseDocsEnum : LuceneTestCase { [OneTimeSetUp] public override void BeforeClass() { base.BeforeClass(); OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true; // explicitly instantiates ancient codec } [Test] public virtual void TestReuseDocsEnumNoReuse() { Directory dir = NewDirectory(); Codec cp = TestUtil.AlwaysPostingsFormat(new Lucene40RWPostingsFormat()); RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetCodec(cp)); int numdocs = AtLeast(20); CreateRandomIndex(numdocs, writer, Random()); writer.Commit(); DirectoryReader open = DirectoryReader.Open(dir); foreach (AtomicReaderContext ctx in open.Leaves) { AtomicReader indexReader = (AtomicReader)ctx.Reader; Terms terms = indexReader.GetTerms("body"); TermsEnum iterator = terms.GetIterator(null); IdentityHashMap<DocsEnum, bool?> enums = new IdentityHashMap<DocsEnum, bool?>(); MatchNoBits bits = new MatchNoBits(indexReader.MaxDoc); while ((iterator.Next()) != null) { DocsEnum docs = iterator.Docs(Random().NextBoolean() ? bits : new MatchNoBits(indexReader.MaxDoc), null, Random().NextBoolean() ? DocsFlags.FREQS : DocsFlags.NONE); enums[docs] = true; } Assert.AreEqual(terms.Count, enums.Count); } IOUtils.Dispose(writer, open, dir); } // tests for reuse only if bits are the same either null or the same instance [Test] public virtual void TestReuseDocsEnumSameBitsOrNull() { Directory dir = NewDirectory(); Codec cp = TestUtil.AlwaysPostingsFormat(new Lucene40RWPostingsFormat()); RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetCodec(cp)); int numdocs = AtLeast(20); CreateRandomIndex(numdocs, writer, Random()); writer.Commit(); DirectoryReader open = DirectoryReader.Open(dir); foreach (AtomicReaderContext ctx in open.Leaves) { Terms terms = ((AtomicReader)ctx.Reader).GetTerms("body"); TermsEnum iterator = terms.GetIterator(null); IdentityHashMap<DocsEnum, bool?> enums = new IdentityHashMap<DocsEnum, bool?>(); MatchNoBits bits = new MatchNoBits(open.MaxDoc); DocsEnum docs = null; while ((iterator.Next()) != null) { docs = iterator.Docs(bits, docs, Random().NextBoolean() ? DocsFlags.FREQS : DocsFlags.NONE); enums[docs] = true; } Assert.AreEqual(1, enums.Count); enums.Clear(); iterator = terms.GetIterator(null); docs = null; while ((iterator.Next()) != null) { docs = iterator.Docs(new MatchNoBits(open.MaxDoc), docs, Random().NextBoolean() ? DocsFlags.FREQS : DocsFlags.NONE); enums[docs] = true; } Assert.AreEqual(terms.Count, enums.Count); enums.Clear(); iterator = terms.GetIterator(null); docs = null; while ((iterator.Next()) != null) { docs = iterator.Docs(null, docs, Random().NextBoolean() ? DocsFlags.FREQS : DocsFlags.NONE); enums[docs] = true; } Assert.AreEqual(1, enums.Count); } IOUtils.Dispose(writer, open, dir); } // make sure we never reuse from another reader even if it is the same field & codec etc [Test] public virtual void TestReuseDocsEnumDifferentReader() { Directory dir = NewDirectory(); Codec cp = TestUtil.AlwaysPostingsFormat(new Lucene40RWPostingsFormat()); MockAnalyzer analyzer = new MockAnalyzer(Random()); analyzer.MaxTokenLength = TestUtil.NextInt(Random(), 1, IndexWriter.MAX_TERM_LENGTH); RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetCodec(cp)); int numdocs = AtLeast(20); CreateRandomIndex(numdocs, writer, Random()); writer.Commit(); DirectoryReader firstReader = DirectoryReader.Open(dir); DirectoryReader secondReader = DirectoryReader.Open(dir); IList<AtomicReaderContext> leaves = firstReader.Leaves; IList<AtomicReaderContext> leaves2 = secondReader.Leaves; foreach (AtomicReaderContext ctx in leaves) { Terms terms = ((AtomicReader)ctx.Reader).GetTerms("body"); TermsEnum iterator = terms.GetIterator(null); IdentityHashMap<DocsEnum, bool?> enums = new IdentityHashMap<DocsEnum, bool?>(); MatchNoBits bits = new MatchNoBits(firstReader.MaxDoc); iterator = terms.GetIterator(null); DocsEnum docs = null; BytesRef term = null; while ((term = iterator.Next()) != null) { docs = iterator.Docs(null, RandomDocsEnum("body", term, leaves2, bits), Random().NextBoolean() ? DocsFlags.FREQS : DocsFlags.NONE); enums[docs] = true; } Assert.AreEqual(terms.Count, enums.Count); iterator = terms.GetIterator(null); enums.Clear(); docs = null; while ((term = iterator.Next()) != null) { docs = iterator.Docs(bits, RandomDocsEnum("body", term, leaves2, bits), Random().NextBoolean() ? DocsFlags.FREQS : DocsFlags.NONE); enums[docs] = true; } Assert.AreEqual(terms.Count, enums.Count); } IOUtils.Dispose(writer, firstReader, secondReader, dir); } public virtual DocsEnum RandomDocsEnum(string field, BytesRef term, IList<AtomicReaderContext> readers, IBits bits) { if (Random().Next(10) == 0) { return null; } AtomicReader indexReader = (AtomicReader)readers[Random().Next(readers.Count)].Reader; Terms terms = indexReader.GetTerms(field); if (terms == null) { return null; } TermsEnum iterator = terms.GetIterator(null); if (iterator.SeekExact(term)) { return iterator.Docs(bits, null, Random().NextBoolean() ? DocsFlags.FREQS : DocsFlags.NONE); } return null; } /// <summary> /// populates a writer with random stuff. this must be fully reproducable with /// the seed! /// </summary> public static void CreateRandomIndex(int numdocs, RandomIndexWriter writer, Random random) { LineFileDocs lineFileDocs = new LineFileDocs(random); for (int i = 0; i < numdocs; i++) { writer.AddDocument(lineFileDocs.NextDoc()); } lineFileDocs.Dispose(); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.ComponentModel; using System.Diagnostics; using System.Diagnostics.Contracts; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Threading.Tasks; using Windows.Foundation; using Windows.Storage.Streams; namespace System.IO { /// <summary> /// Contains extension methods for conversion between WinRT streams and managed streams. /// This class is the public facade for the stream adapters library. /// </summary> public static class WindowsRuntimeStreamExtensions { #region Constants and static Fields private const Int32 DefaultBufferSize = 16384; // = 0x4000 = 16 KBytes. private static ConditionalWeakTable<Object, Stream> s_winRtToNetFxAdapterMap = new ConditionalWeakTable<Object, Stream>(); private static ConditionalWeakTable<Stream, NetFxToWinRtStreamAdapter> s_netFxToWinRtAdapterMap = new ConditionalWeakTable<Stream, NetFxToWinRtStreamAdapter>(); #endregion Constants and static Fields #region Helpers #if DEBUG private static void AssertMapContains<TKey, TValue>(ConditionalWeakTable<TKey, TValue> map, TKey key, TValue value, bool valueMayBeWrappedInBufferedStream) where TKey : class where TValue : class { TValue valueInMap; Debug.Assert(key != null); bool hasValueForKey = map.TryGetValue(key, out valueInMap); Debug.Assert(hasValueForKey); if (valueMayBeWrappedInBufferedStream) { BufferedStream bufferedValueInMap = valueInMap as BufferedStream; Debug.Assert(Object.ReferenceEquals(value, valueInMap) || (bufferedValueInMap != null && Object.ReferenceEquals(value, bufferedValueInMap.UnderlyingStream))); } else { Debug.Assert(Object.ReferenceEquals(value, valueInMap)); } } #endif // DEBUG private static void EnsureAdapterBufferSize(Stream adapter, Int32 requiredBufferSize, String methodName) { Debug.Assert(adapter != null); Debug.Assert(!String.IsNullOrWhiteSpace(methodName)); Int32 currentBufferSize = 0; BufferedStream bufferedAdapter = adapter as BufferedStream; if (bufferedAdapter != null) currentBufferSize = bufferedAdapter.BufferSize; if (requiredBufferSize != currentBufferSize) { if (requiredBufferSize == 0) throw new InvalidOperationException(SR.Format(SR.InvalidOperation_CannotChangeBufferSizeOfWinRtStreamAdapterToZero, methodName)); throw new InvalidOperationException(SR.Format(SR.InvalidOperation_CannotChangeBufferSizeOfWinRtStreamAdapter, methodName)); } } #endregion Helpers #region WinRt-to-NetFx conversion [CLSCompliant(false)] public static Stream AsStreamForRead(this IInputStream windowsRuntimeStream) { return AsStreamInternal(windowsRuntimeStream, DefaultBufferSize, "AsStreamForRead", forceBufferSize: false); } [CLSCompliant(false)] public static Stream AsStreamForRead(this IInputStream windowsRuntimeStream, Int32 bufferSize) { return AsStreamInternal(windowsRuntimeStream, bufferSize, "AsStreamForRead", forceBufferSize: true); } [CLSCompliant(false)] public static Stream AsStreamForWrite(this IOutputStream windowsRuntimeStream) { return AsStreamInternal(windowsRuntimeStream, DefaultBufferSize, "AsStreamForWrite", forceBufferSize: false); } [CLSCompliant(false)] public static Stream AsStreamForWrite(this IOutputStream windowsRuntimeStream, Int32 bufferSize) { return AsStreamInternal(windowsRuntimeStream, bufferSize, "AsStreamForWrite", forceBufferSize: true); } [CLSCompliant(false)] public static Stream AsStream(this IRandomAccessStream windowsRuntimeStream) { return AsStreamInternal(windowsRuntimeStream, DefaultBufferSize, "AsStream", forceBufferSize: false); } [CLSCompliant(false)] public static Stream AsStream(this IRandomAccessStream windowsRuntimeStream, Int32 bufferSize) { return AsStreamInternal(windowsRuntimeStream, bufferSize, "AsStream", forceBufferSize: true); } private static Stream AsStreamInternal(Object windowsRuntimeStream, Int32 bufferSize, String invokedMethodName, bool forceBufferSize) { if (windowsRuntimeStream == null) throw new ArgumentNullException(nameof(windowsRuntimeStream)); if (bufferSize < 0) throw new ArgumentOutOfRangeException(nameof(bufferSize), SR.ArgumentOutOfRange_WinRtAdapterBufferSizeMayNotBeNegative); Debug.Assert(!String.IsNullOrWhiteSpace(invokedMethodName)); Contract.Ensures(Contract.Result<Stream>() != null); Contract.EndContractBlock(); // If the WinRT stream is actually a wrapped managed stream, we will unwrap it and return the original. // In that case we do not need to put the wrapper into the map. // We currently do capability-based adapter selection for WinRt->NetFx, but not vice versa (time constraints). // Once we added the reverse direction, we will be able replce this entire section with just a few lines. NetFxToWinRtStreamAdapter sAdptr = windowsRuntimeStream as NetFxToWinRtStreamAdapter; if (sAdptr != null) { Stream wrappedNetFxStream = sAdptr.GetManagedStream(); if (wrappedNetFxStream == null) throw new ObjectDisposedException(nameof(windowsRuntimeStream), SR.ObjectDisposed_CannotPerformOperation); #if DEBUG // In Chk builds, verify that the original managed stream is correctly entered into the NetFx->WinRT map: AssertMapContains(s_netFxToWinRtAdapterMap, wrappedNetFxStream, sAdptr, valueMayBeWrappedInBufferedStream: false); #endif // DEBUG return wrappedNetFxStream; } // We have a real WinRT stream. Stream adapter; bool adapterExists = s_winRtToNetFxAdapterMap.TryGetValue(windowsRuntimeStream, out adapter); // There is already an adapter: if (adapterExists) { Debug.Assert((adapter is BufferedStream && ((BufferedStream)adapter).UnderlyingStream is WinRtToNetFxStreamAdapter) || (adapter is WinRtToNetFxStreamAdapter)); if (forceBufferSize) EnsureAdapterBufferSize(adapter, bufferSize, invokedMethodName); return adapter; } // We do not have an adapter for this WinRT stream yet and we need to create one. // Do that in a thread-safe manner in a separate method such that we only have to pay for the compiler allocating // the required closure if this code path is hit: return AsStreamInternalFactoryHelper(windowsRuntimeStream, bufferSize, invokedMethodName, forceBufferSize); } // Separate method so we only pay for closure allocation if this code is executed: private static Stream WinRtToNetFxAdapterMap_GetValue(Object winRtStream) { return s_winRtToNetFxAdapterMap.GetValue(winRtStream, (wrtStr) => WinRtToNetFxStreamAdapter.Create(wrtStr)); } // Separate method so we only pay for closure allocation if this code is executed: private static Stream WinRtToNetFxAdapterMap_GetValue(Object winRtStream, Int32 bufferSize) { return s_winRtToNetFxAdapterMap.GetValue(winRtStream, (wrtStr) => new BufferedStream(WinRtToNetFxStreamAdapter.Create(wrtStr), bufferSize)); } private static Stream AsStreamInternalFactoryHelper(Object windowsRuntimeStream, Int32 bufferSize, String invokedMethodName, bool forceBufferSize) { Debug.Assert(windowsRuntimeStream != null); Debug.Assert(bufferSize >= 0); Debug.Assert(!String.IsNullOrWhiteSpace(invokedMethodName)); Contract.Ensures(Contract.Result<Stream>() != null); Contract.EndContractBlock(); // Get the adapter for this windowsRuntimeStream again (it may have been created concurrently). // If none exists yet, create a new one: Stream adapter = (bufferSize == 0) ? WinRtToNetFxAdapterMap_GetValue(windowsRuntimeStream) : WinRtToNetFxAdapterMap_GetValue(windowsRuntimeStream, bufferSize); Debug.Assert(adapter != null); Debug.Assert((adapter is BufferedStream && ((BufferedStream)adapter).UnderlyingStream is WinRtToNetFxStreamAdapter) || (adapter is WinRtToNetFxStreamAdapter)); if (forceBufferSize) EnsureAdapterBufferSize(adapter, bufferSize, invokedMethodName); WinRtToNetFxStreamAdapter actualAdapter = adapter as WinRtToNetFxStreamAdapter; if (actualAdapter == null) actualAdapter = ((BufferedStream)adapter).UnderlyingStream as WinRtToNetFxStreamAdapter; actualAdapter.SetWonInitializationRace(); return adapter; } #endregion WinRt-to-NetFx conversion #region NetFx-to-WinRt conversion [CLSCompliant(false)] public static IInputStream AsInputStream(this Stream stream) { if (stream == null) throw new ArgumentNullException(nameof(stream)); if (!stream.CanRead) throw new NotSupportedException(SR.NotSupported_CannotConvertNotReadableToInputStream); Contract.Ensures(Contract.Result<IInputStream>() != null); Contract.EndContractBlock(); Object adapter = AsWindowsRuntimeStreamInternal(stream); IInputStream winRtStream = adapter as IInputStream; Debug.Assert(winRtStream != null); return winRtStream; } [CLSCompliant(false)] public static IOutputStream AsOutputStream(this Stream stream) { if (stream == null) throw new ArgumentNullException(nameof(stream)); if (!stream.CanWrite) throw new NotSupportedException(SR.NotSupported_CannotConvertNotWritableToOutputStream); Contract.Ensures(Contract.Result<IOutputStream>() != null); Contract.EndContractBlock(); Object adapter = AsWindowsRuntimeStreamInternal(stream); IOutputStream winRtStream = adapter as IOutputStream; Debug.Assert(winRtStream != null); return winRtStream; } [CLSCompliant(false)] public static IRandomAccessStream AsRandomAccessStream(this Stream stream) { if (stream == null) throw new ArgumentNullException(nameof(stream)); if (!stream.CanSeek) throw new NotSupportedException(SR.NotSupported_CannotConvertNotSeekableToRandomAccessStream); Contract.Ensures(Contract.Result<IRandomAccessStream>() != null); Contract.EndContractBlock(); Object adapter = AsWindowsRuntimeStreamInternal(stream); IRandomAccessStream winRtStream = adapter as IRandomAccessStream; Debug.Assert(winRtStream != null); return winRtStream; } private static Object AsWindowsRuntimeStreamInternal(Stream stream) { Contract.Ensures(Contract.Result<Object>() != null); Contract.EndContractBlock(); // Check to see if the managed stream is actually a wrapper of a WinRT stream: // (This can be either an adapter directly, or an adapter wrapped in a BufferedStream.) WinRtToNetFxStreamAdapter sAdptr = stream as WinRtToNetFxStreamAdapter; if (sAdptr == null) { BufferedStream buffAdptr = stream as BufferedStream; if (buffAdptr != null) sAdptr = buffAdptr.UnderlyingStream as WinRtToNetFxStreamAdapter; } // If the managed stream us actually a WinRT stream, we will unwrap it and return the original. // In that case we do not need to put the wrapper into the map. if (sAdptr != null) { Object wrappedWinRtStream = sAdptr.GetWindowsRuntimeStream<Object>(); if (wrappedWinRtStream == null) throw new ObjectDisposedException(nameof(stream), SR.ObjectDisposed_CannotPerformOperation); #if DEBUG // In Chk builds, verify that the original WinRT stream is correctly entered into the WinRT->NetFx map: AssertMapContains(s_winRtToNetFxAdapterMap, wrappedWinRtStream, sAdptr, valueMayBeWrappedInBufferedStream: true); #endif // DEBUG return wrappedWinRtStream; } // We have a real managed Stream. // See if the managed stream already has an adapter: NetFxToWinRtStreamAdapter adapter; bool adapterExists = s_netFxToWinRtAdapterMap.TryGetValue(stream, out adapter); // There is already an adapter: if (adapterExists) return adapter; // We do not have an adapter for this managed stream yet and we need to create one. // Do that in a thread-safe manner in a separate method such that we only have to pay for the compiler allocating // the required closure if this code path is hit: return AsWindowsRuntimeStreamInternalFactoryHelper(stream); } private static NetFxToWinRtStreamAdapter AsWindowsRuntimeStreamInternalFactoryHelper(Stream stream) { Debug.Assert(stream != null); Contract.Ensures(Contract.Result<NetFxToWinRtStreamAdapter>() != null); Contract.EndContractBlock(); // Get the adapter for managed stream again (it may have been created concurrently). // If none exists yet, create a new one: NetFxToWinRtStreamAdapter adapter = s_netFxToWinRtAdapterMap.GetValue(stream, (str) => NetFxToWinRtStreamAdapter.Create(str)); Debug.Assert(adapter != null); adapter.SetWonInitializationRace(); return adapter; } #endregion NetFx-to-WinRt conversion } // class WindowsRuntimeStreamExtensions } // namespace // WindowsRuntimeStreamExtensions.cs
using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.ComponentModel; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.IO; using System.Linq; using System.Net.Http; using System.Net.Http.Formatting; using System.Net.Http.Headers; using System.Web.Http.Description; using System.Xml.Linq; using Newtonsoft.Json; namespace MilkBot.Areas.HelpPage { /// <summary> /// This class will generate the samples for the help page. /// </summary> public class HelpPageSampleGenerator { /// <summary> /// Initializes a new instance of the <see cref="HelpPageSampleGenerator"/> class. /// </summary> public HelpPageSampleGenerator() { ActualHttpMessageTypes = new Dictionary<HelpPageSampleKey, Type>(); ActionSamples = new Dictionary<HelpPageSampleKey, object>(); SampleObjects = new Dictionary<Type, object>(); SampleObjectFactories = new List<Func<HelpPageSampleGenerator, Type, object>> { DefaultSampleObjectFactory, }; } /// <summary> /// Gets CLR types that are used as the content of <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/>. /// </summary> public IDictionary<HelpPageSampleKey, Type> ActualHttpMessageTypes { get; internal set; } /// <summary> /// Gets the objects that are used directly as samples for certain actions. /// </summary> public IDictionary<HelpPageSampleKey, object> ActionSamples { get; internal set; } /// <summary> /// Gets the objects that are serialized as samples by the supported formatters. /// </summary> public IDictionary<Type, object> SampleObjects { get; internal set; } /// <summary> /// Gets factories for the objects that the supported formatters will serialize as samples. Processed in order, /// stopping when the factory successfully returns a non-<see langref="null"/> object. /// </summary> /// <remarks> /// Collection includes just <see cref="ObjectGenerator.GenerateObject(Type)"/> initially. Use /// <code>SampleObjectFactories.Insert(0, func)</code> to provide an override and /// <code>SampleObjectFactories.Add(func)</code> to provide a fallback.</remarks> [SuppressMessage("Microsoft.Design", "CA1006:DoNotNestGenericTypesInMemberSignatures", Justification = "This is an appropriate nesting of generic types")] public IList<Func<HelpPageSampleGenerator, Type, object>> SampleObjectFactories { get; private set; } /// <summary> /// Gets the request body samples for a given <see cref="ApiDescription"/>. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The samples keyed by media type.</returns> public IDictionary<MediaTypeHeaderValue, object> GetSampleRequests(ApiDescription api) { return GetSample(api, SampleDirection.Request); } /// <summary> /// Gets the response body samples for a given <see cref="ApiDescription"/>. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The samples keyed by media type.</returns> public IDictionary<MediaTypeHeaderValue, object> GetSampleResponses(ApiDescription api) { return GetSample(api, SampleDirection.Response); } /// <summary> /// Gets the request or response body samples. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param> /// <returns>The samples keyed by media type.</returns> public virtual IDictionary<MediaTypeHeaderValue, object> GetSample(ApiDescription api, SampleDirection sampleDirection) { if (api == null) { throw new ArgumentNullException("api"); } string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName; string actionName = api.ActionDescriptor.ActionName; IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name); Collection<MediaTypeFormatter> formatters; Type type = ResolveType(api, controllerName, actionName, parameterNames, sampleDirection, out formatters); var samples = new Dictionary<MediaTypeHeaderValue, object>(); // Use the samples provided directly for actions var actionSamples = GetAllActionSamples(controllerName, actionName, parameterNames, sampleDirection); foreach (var actionSample in actionSamples) { samples.Add(actionSample.Key.MediaType, WrapSampleIfString(actionSample.Value)); } // Do the sample generation based on formatters only if an action doesn't return an HttpResponseMessage. // Here we cannot rely on formatters because we don't know what's in the HttpResponseMessage, it might not even use formatters. if (type != null && !typeof(HttpResponseMessage).IsAssignableFrom(type)) { object sampleObject = GetSampleObject(type); foreach (var formatter in formatters) { foreach (MediaTypeHeaderValue mediaType in formatter.SupportedMediaTypes) { if (!samples.ContainsKey(mediaType)) { object sample = GetActionSample(controllerName, actionName, parameterNames, type, formatter, mediaType, sampleDirection); // If no sample found, try generate sample using formatter and sample object if (sample == null && sampleObject != null) { sample = WriteSampleObjectUsingFormatter(formatter, sampleObject, type, mediaType); } samples.Add(mediaType, WrapSampleIfString(sample)); } } } } return samples; } /// <summary> /// Search for samples that are provided directly through <see cref="ActionSamples"/>. /// </summary> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> /// <param name="type">The CLR type.</param> /// <param name="formatter">The formatter.</param> /// <param name="mediaType">The media type.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param> /// <returns>The sample that matches the parameters.</returns> public virtual object GetActionSample(string controllerName, string actionName, IEnumerable<string> parameterNames, Type type, MediaTypeFormatter formatter, MediaTypeHeaderValue mediaType, SampleDirection sampleDirection) { object sample; // First, try to get the sample provided for the specified mediaType, sampleDirection, controllerName, actionName and parameterNames. // If not found, try to get the sample provided for the specified mediaType, sampleDirection, controllerName and actionName regardless of the parameterNames. // If still not found, try to get the sample provided for the specified mediaType and type. // Finally, try to get the sample provided for the specified mediaType. if (ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, parameterNames), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, new[] { "*" }), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, type), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType), out sample)) { return sample; } return null; } /// <summary> /// Gets the sample object that will be serialized by the formatters. /// First, it will look at the <see cref="SampleObjects"/>. If no sample object is found, it will try to create /// one using <see cref="DefaultSampleObjectFactory"/> (which wraps an <see cref="ObjectGenerator"/>) and other /// factories in <see cref="SampleObjectFactories"/>. /// </summary> /// <param name="type">The type.</param> /// <returns>The sample object.</returns> [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Even if all items in SampleObjectFactories throw, problem will be visible as missing sample.")] public virtual object GetSampleObject(Type type) { object sampleObject; if (!SampleObjects.TryGetValue(type, out sampleObject)) { // No specific object available, try our factories. foreach (Func<HelpPageSampleGenerator, Type, object> factory in SampleObjectFactories) { if (factory == null) { continue; } try { sampleObject = factory(this, type); if (sampleObject != null) { break; } } catch { // Ignore any problems encountered in the factory; go on to the next one (if any). } } } return sampleObject; } /// <summary> /// Resolves the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The type.</returns> public virtual Type ResolveHttpRequestMessageType(ApiDescription api) { string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName; string actionName = api.ActionDescriptor.ActionName; IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name); Collection<MediaTypeFormatter> formatters; return ResolveType(api, controllerName, actionName, parameterNames, SampleDirection.Request, out formatters); } /// <summary> /// Resolves the type of the action parameter or return value when <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/> is used. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or a response.</param> /// <param name="formatters">The formatters.</param> [SuppressMessage("Microsoft.Design", "CA1021:AvoidOutParameters", Justification = "This is only used in advanced scenarios.")] public virtual Type ResolveType(ApiDescription api, string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection, out Collection<MediaTypeFormatter> formatters) { if (!Enum.IsDefined(typeof(SampleDirection), sampleDirection)) { throw new InvalidEnumArgumentException("sampleDirection", (int)sampleDirection, typeof(SampleDirection)); } if (api == null) { throw new ArgumentNullException("api"); } Type type; if (ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, parameterNames), out type) || ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, new[] { "*" }), out type)) { // Re-compute the supported formatters based on type Collection<MediaTypeFormatter> newFormatters = new Collection<MediaTypeFormatter>(); foreach (var formatter in api.ActionDescriptor.Configuration.Formatters) { if (IsFormatSupported(sampleDirection, formatter, type)) { newFormatters.Add(formatter); } } formatters = newFormatters; } else { switch (sampleDirection) { case SampleDirection.Request: ApiParameterDescription requestBodyParameter = api.ParameterDescriptions.FirstOrDefault(p => p.Source == ApiParameterSource.FromBody); type = requestBodyParameter == null ? null : requestBodyParameter.ParameterDescriptor.ParameterType; formatters = api.SupportedRequestBodyFormatters; break; case SampleDirection.Response: default: type = api.ResponseDescription.ResponseType ?? api.ResponseDescription.DeclaredType; formatters = api.SupportedResponseFormatters; break; } } return type; } /// <summary> /// Writes the sample object using formatter. /// </summary> /// <param name="formatter">The formatter.</param> /// <param name="value">The value.</param> /// <param name="type">The type.</param> /// <param name="mediaType">Type of the media.</param> /// <returns></returns> [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as InvalidSample.")] public virtual object WriteSampleObjectUsingFormatter(MediaTypeFormatter formatter, object value, Type type, MediaTypeHeaderValue mediaType) { if (formatter == null) { throw new ArgumentNullException("formatter"); } if (mediaType == null) { throw new ArgumentNullException("mediaType"); } object sample = String.Empty; MemoryStream ms = null; HttpContent content = null; try { if (formatter.CanWriteType(type)) { ms = new MemoryStream(); content = new ObjectContent(type, value, formatter, mediaType); formatter.WriteToStreamAsync(type, value, ms, content, null).Wait(); ms.Position = 0; StreamReader reader = new StreamReader(ms); string serializedSampleString = reader.ReadToEnd(); if (mediaType.MediaType.ToUpperInvariant().Contains("XML")) { serializedSampleString = TryFormatXml(serializedSampleString); } else if (mediaType.MediaType.ToUpperInvariant().Contains("JSON")) { serializedSampleString = TryFormatJson(serializedSampleString); } sample = new TextSample(serializedSampleString); } else { sample = new InvalidSample(String.Format( CultureInfo.CurrentCulture, "Failed to generate the sample for media type '{0}'. Cannot use formatter '{1}' to write type '{2}'.", mediaType, formatter.GetType().Name, type.Name)); } } catch (Exception e) { sample = new InvalidSample(String.Format( CultureInfo.CurrentCulture, "An exception has occurred while using the formatter '{0}' to generate sample for media type '{1}'. Exception message: {2}", formatter.GetType().Name, mediaType.MediaType, UnwrapException(e).Message)); } finally { if (ms != null) { ms.Dispose(); } if (content != null) { content.Dispose(); } } return sample; } internal static Exception UnwrapException(Exception exception) { AggregateException aggregateException = exception as AggregateException; if (aggregateException != null) { return aggregateException.Flatten().InnerException; } return exception; } // Default factory for sample objects private static object DefaultSampleObjectFactory(HelpPageSampleGenerator sampleGenerator, Type type) { // Try to create a default sample object ObjectGenerator objectGenerator = new ObjectGenerator(); return objectGenerator.GenerateObject(type); } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")] private static string TryFormatJson(string str) { try { object parsedJson = JsonConvert.DeserializeObject(str); return JsonConvert.SerializeObject(parsedJson, Formatting.Indented); } catch { // can't parse JSON, return the original string return str; } } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")] private static string TryFormatXml(string str) { try { XDocument xml = XDocument.Parse(str); return xml.ToString(); } catch { // can't parse XML, return the original string return str; } } private static bool IsFormatSupported(SampleDirection sampleDirection, MediaTypeFormatter formatter, Type type) { switch (sampleDirection) { case SampleDirection.Request: return formatter.CanReadType(type); case SampleDirection.Response: return formatter.CanWriteType(type); } return false; } private IEnumerable<KeyValuePair<HelpPageSampleKey, object>> GetAllActionSamples(string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection) { HashSet<string> parameterNamesSet = new HashSet<string>(parameterNames, StringComparer.OrdinalIgnoreCase); foreach (var sample in ActionSamples) { HelpPageSampleKey sampleKey = sample.Key; if (String.Equals(controllerName, sampleKey.ControllerName, StringComparison.OrdinalIgnoreCase) && String.Equals(actionName, sampleKey.ActionName, StringComparison.OrdinalIgnoreCase) && (sampleKey.ParameterNames.SetEquals(new[] { "*" }) || parameterNamesSet.SetEquals(sampleKey.ParameterNames)) && sampleDirection == sampleKey.SampleDirection) { yield return sample; } } } private static object WrapSampleIfString(object sample) { string stringSample = sample as string; if (stringSample != null) { return new TextSample(stringSample); } return sample; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections.Generic; /// <summary> /// String.GetEnumerator() /// Retrieves an object that can iterate through the individual characters in this string. /// </summary> class StringGetEnumerator { private const int c_MIN_STRING_LEN = 8; private const int c_MAX_STRING_LEN = 256; public static int Main() { StringGetEnumerator iege = new StringGetEnumerator(); TestLibrary.TestFramework.BeginTestCase("for method: String.GetEnumerator()"); if (iege.RunTests()) { TestLibrary.TestFramework.EndTestCase(); TestLibrary.TestFramework.LogInformation("PASS"); return 100; } else { TestLibrary.TestFramework.EndTestCase(); TestLibrary.TestFramework.LogInformation("FAIL"); return 0; } } public bool RunTests() { bool retVal = true; TestLibrary.TestFramework.LogInformation("[Positive]"); retVal = PosTest1() && retVal; retVal = PosTest2() && retVal; retVal = PosTest3() && retVal; return retVal; } #region Positive test scenarioes public bool PosTest1() { bool retVal = true; const string c_TEST_DESC = "PosTest1: Random string"; const string c_TEST_ID = "P001"; string strSrc; IEnumerator<Char> iterator; bool condition = false; bool expectedValue = true; bool actualValue = false; strSrc = TestLibrary.Generator.GetString(-55, false, c_MIN_STRING_LEN, c_MAX_STRING_LEN); TestLibrary.TestFramework.BeginScenario(c_TEST_DESC); try { iterator = ((IEnumerable<Char>)strSrc).GetEnumerator(); condition = true; int index = 0; while (iterator.MoveNext()) { condition = object.Equals(iterator.Current, strSrc[index]) && condition; index++; } iterator.Reset(); actualValue = condition && (null != iterator); if (actualValue != expectedValue) { string errorDesc = "Value is not " + expectedValue + " as expected: Actual(" + actualValue + ")"; errorDesc += GetDataString(strSrc); TestLibrary.TestFramework.LogError("001" + " TestId-" + c_TEST_ID, errorDesc); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("002" + " TestId-" + c_TEST_ID, "Unexpected exception: " + e + GetDataString(strSrc)); retVal = false; } return retVal; } public bool PosTest2() { bool retVal = true; const string c_TEST_DESC = "PosTest2: string.Empty "; const string c_TEST_ID = "P002"; string strSrc; IEnumerator<Char> iterator; bool condition = false; bool expectedValue = true; bool actualValue = false; strSrc = string.Empty; TestLibrary.TestFramework.BeginScenario(c_TEST_DESC); try { iterator = ((IEnumerable<Char>)strSrc).GetEnumerator(); condition = true; int index = 0; while (iterator.MoveNext()) { condition = object.Equals(iterator.Current, strSrc[index]) && condition; index++; } iterator.Reset(); actualValue = condition && (null != iterator); if (actualValue != expectedValue) { string errorDesc = "Value is not " + expectedValue + " as expected: Actual(" + actualValue + ")"; errorDesc += GetDataString(strSrc); TestLibrary.TestFramework.LogError("003" + " TestId-" + c_TEST_ID, errorDesc); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("004" + " TestId-" + c_TEST_ID, "Unexpected exception: " + e + GetDataString(strSrc)); retVal = false; } return retVal; } public bool PosTest3() { bool retVal = true; const string c_TEST_DESC = @"PosTest3: string is \0 "; const string c_TEST_ID = "P003"; string strSrc; IEnumerator<Char> iterator; bool condition = false; bool expectedValue = true; bool actualValue = false; strSrc = "\0"; TestLibrary.TestFramework.BeginScenario(c_TEST_DESC); try { iterator = ((IEnumerable<Char>)strSrc).GetEnumerator(); condition = true; int index = 0; while (iterator.MoveNext()) { condition = object.Equals(iterator.Current, strSrc[index]) && condition; index++; } iterator.Reset(); actualValue = condition && (null != iterator); if (actualValue != expectedValue) { string errorDesc = "Value is not " + expectedValue + " as expected: Actual(" + actualValue + ")"; errorDesc += GetDataString(strSrc); TestLibrary.TestFramework.LogError("005" + " TestId-" + c_TEST_ID, errorDesc); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("006" + " TestId-" + c_TEST_ID, "Unexpected exception: " + e + GetDataString(strSrc)); retVal = false; } return retVal; } #endregion // end for positive test scenarioes private string GetDataString(string strSrc) { string str1, str; int len1; if (null == strSrc) { str1 = "null"; len1 = 0; } else { str1 = strSrc; len1 = strSrc.Length; } str = string.Format("\n[Source string value]\n \"{0}\"", str1); str += string.Format("\n[Length of source string]\n {0}", len1); return str; } }
#region Copyright (c) 2007 by Dan Shechter //////////////////////////////////////////////////////////////////////////////////////// //// // IBNet, an Interactive Brokers TWS .NET Client & Server implmentation // by Dan Shechter //////////////////////////////////////////////////////////////////////////////////////// // License: MPL 1.1/GPL 2.0/LGPL 2.1 // // The contents of this file are subject to the Mozilla Public License Version // 1.1 (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // http://www.mozilla.org/MPL/ // // Software distributed under the License is distributed on an "AS IS" basis, // WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License // for the specific language governing rights and limitations under the // License. // // The Original Code is any part of this file that is not marked as a contribution. // // The Initial Developer of the Original Code is Dan Shecter. // Portions created by the Initial Developer are Copyright (C) 2007 // the Initial Developer. All Rights Reserved. // // Contributor(s): None. // // Alternatively, the contents of this file may be used under the terms of // either the GNU General Public License Version 2 or later (the "GPL"), or // the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), // in which case the provisions of the GPL or the LGPL are applicable instead // of those above. If you wish to allow use of your version of this file only // under the terms of either the GPL or the LGPL, and not to allow others to // use your version of this file under the terms of the MPL, indicate your // decision by deleting the provisions above and replace them with the notice // and other provisions required by the GPL or the LGPL. If you do not delete // the provisions above, a recipient may use your version of this file under // the terms of any one of the MPL, the GPL or the LGPL. //////////////////////////////////////////////////////////////////////////////////////// #endregion using System; using System.Diagnostics; using System.IO; namespace Daemaged.IBNet { /// <summary> /// A subclass of <see cref="Stream"/> that echos values read from or /// written to a given primary stream to a given slave stream. In the /// write-only case, this functionality is similar to what is often called /// a "tee" stream. /// </summary> internal class EchoStream : Stream { #region StreamOwnership Enumeration /// <summary> /// Flags that specify the degree of ownership that the echo stream /// exerts over its primary and slave streams. Ownership determines /// whether the echo stream closes its consituent streams whenever /// it itself is closed. /// </summary> [Flags] public enum StreamOwnership { /// <summary> /// The <see cref="EchoStream"/> does not own either of its /// constituent streams, and will close neither of them whenever /// the <see cref="EchoStream"/> is closed. /// </summary> OwnNone = 0x0, /// <summary> /// The <see cref="EchoStream"/> owns its primary stream, and will /// close it whenever the <see cref="EchoStream"/> is closed. /// </summary> OwnPrimaryStream = 0x1, /// <summary> /// The <see cref="EchoStream"/> owns its slave stream, and will /// close it whenever the <see cref="EchoStream"/> is closed. /// </summary> OwnSlaveStream = 0x2, /// <summary> /// The <see cref="EchoStream"/> owns both of its constituent /// streams, and will close both whenever the /// <see cref="EchoStream"/> is closed. This is equivalent to /// <c><see cref="OwnPrimaryStream"/> | <see cref="OwnSlaveStream"/> /// </c>. /// </summary> OwnBoth = OwnPrimaryStream | OwnSlaveStream } #endregion // StreamOwnership Enumeration #region SlaveFailAction Enumeration /// <summary> /// Specifies the action that will be taken whenever a failure occurs in /// reading or writing the slave stream. Failures that occur when reading /// or writing the primary stream are always propogated back to the caller /// as the original exception that was thrown. /// </summary> public enum SlaveFailAction { /// <summary> /// The failure will be propogated to the client as the original /// exception that was thrown by the underlying stream. This is /// the default action for all failures, and is the most efficient, /// as <see cref="Read"/> and <see cref="Write"/> operations never /// have to enter an expensive try block to implement this behavior. /// </summary> Propogate, /// <summary> /// The failure will be ignored. Any exceptions that are thrown will /// be silently swallowed. This is a less efficient option than /// <see cref="Propogate"/>, but it allows for simple robustness in /// the face of slave stream failures. It does not affect behavior /// for primary stream failures. /// </summary> Ignore, /// <summary> /// The failure will be passed on to an error handler delegate that /// determines how to handle it. The performance hit for this option /// is equivalent to that of <see cref="Ignore"/>, which is to /// say that it is slower than <see cref="Propogate"/>. This cannot /// be set directly; it is set automatically whenever you use the /// <see cref="SlaveReadFailFilter"/> or /// <see cref="SlaveWriteFailFilter"/> delegates. /// </summary> Filter } #endregion // SlaveFailAction Enumeration #region SlaveFailHandler Delegate #region Delegates /// <summary> /// Delegate for slave-failure filters used by <see cref="EchoStream"/>. /// The oSender parameter is the <see cref="EchoStream"/> in which the /// error occurred. The method parameter identifies whether this is a /// read, write or seek failure. The exc parameter contains the /// exception that occurred while attempting to read or write the /// slave stream. The delegate should return a /// <see cref="SlaveFailAction"/> that instructs the caller on how to /// proceed with the exception. Note that /// <see cref="SlaveFailAction.Filter"/> is not a valid return value /// from a fail handler, and will cause an /// <see cref="InvalidOperationException"/> to occur. /// </summary> public delegate SlaveFailAction SlaveFailHandler( object oSender, SlaveFailMethod method, Exception exc ); #endregion /// <summary> /// Identifies the method in which a slave failure occurred. /// </summary> public enum SlaveFailMethod { /// <summary> /// Failure occurred in the <see cref="Read"/> method. /// </summary> Read, /// <summary> /// Failure occurred in the <see cref="Write"/> method. /// </summary> Write, /// <summary> /// Failure occurred in the <see cref="Seek"/> method. /// </summary> Seek } #endregion // SlaveFailHandler Delegate #region Private Implementation Variables readonly Stream _primaryStream; readonly Stream _slaveStream; int m_lastReadResult; SlaveFailAction m_readFailAction = SlaveFailAction.Propogate; SlaveFailAction m_seekFailAction = SlaveFailAction.Propogate; SlaveFailHandler m_slaveReadFailFilter; SlaveFailHandler m_slaveSeekFailFilter; SlaveFailHandler m_slaveWriteFailFilter; StreamOwnership m_streamsOwned; SlaveFailAction m_writeFailAction = SlaveFailAction.Propogate; #endregion // Private Implementation Variables #region Construction / Destruction /// <summary> /// Constructs a new <see cref="EchoStream"/> object. /// </summary> /// <param name="primaryStream"> /// The primary stream. See <see cref="PrimaryStream"/>. /// </param> /// <param name="slaveStream"> /// The slave stream. See <see cref="SlaveStream"/>. /// </param> /// <param name="streamsOwned"> /// Controls which streams are "owned" by the <see cref="EchoStream"/>. /// See <see cref="StreamsOwned"/>. /// </param> public EchoStream( Stream primaryStream, Stream slaveStream, StreamOwnership streamsOwned) { Debug.Assert(primaryStream != null, "primaryStream"); Debug.Assert(slaveStream != null, "slaveStream"); _primaryStream = primaryStream; _slaveStream = slaveStream; m_streamsOwned = streamsOwned; } /// <summary> /// Closes the slave stream and any of its constituent streams that /// it owns. See <see cref="StreamsOwned"/> and /// <see cref="Stream.Close"/>. /// </summary> public override void Close() { // Flush all data through both streams. Flush(); // Close the streams that we own. if ((m_streamsOwned & StreamOwnership.OwnPrimaryStream) > 0) _primaryStream.Close(); if ((m_streamsOwned & StreamOwnership.OwnSlaveStream) > 0) _slaveStream.Close(); base.Close(); } #endregion // Construction / Destruction #region Non-Error Properties /// <summary> /// Controls which of the <see cref="EchoStream"/> object's constituent /// streams are closed whenever the <see cref="EchoStream"/> itself is /// closed. See <see cref="StreamOwnership"/>. /// </summary> public StreamOwnership StreamsOwned { get => m_streamsOwned; set => m_streamsOwned = value; } /// <summary> /// Gets the primary stream for the <see cref="EchoStream"/>. When /// data is written to the <see cref="EchoStream"/>, it is written /// to the primary stream. When data is read from the /// <see cref="EchoStream"/>, it is read from the primary stream. /// </summary> public Stream PrimaryStream => _primaryStream; /// <summary> /// Gets the slave stream for the <see cref="EchoStream"/>. When /// data is written to the <see cref="EchoStream"/>, it is written /// to the slave stream. When data is read from the /// <see cref="EchoStream"/>, however, it is not read from the slave /// stream; it is read from the primary stream instead. Whatever /// data is read from the primary stream is then written (echoed) into /// the slave stream. /// </summary> public Stream SlaveStream => _slaveStream; #endregion // Non-Error Properties #region Error Handling Properties /// <summary> /// At all times, this property reflects the return value of the last /// call to <see cref="Stream.Read"/> that was made on the primary /// stream. /// </summary> /// <remarks> /// The primary stream is read whenever <see cref="Read"/> /// is called on this stream. This property is useful because it /// provides a way to tell how much data was successfully read from /// the primary stream in the case where a subsequent echo to the /// slave stream caused an exception and the /// <see cref="SlaveWriteFailAction"/> setting for the object allowed /// the exception to propogate out of <see cref="Read"/>, effectively /// losing the return value. A caller can use the more efficient /// <see cref="SlaveFailAction.Propogate"/> setting and still have /// robust behavior at the cost of more complex client code and the /// use of this property to recover from reads that failed because of /// a downed slave stream. /// </remarks> public int LastReadResult => m_lastReadResult; /// <summary> /// Sets the action to use for all possible failures. It is more /// maintainable to use this property whenever you want to handle /// all possible slave exceptions in the same way for a stream, /// because you do not need to modify your code later if new /// exception-related properties are added to <see cref="EchoStream"/>. /// See <see cref="SlaveReadFailAction"/> and the other properties /// like it. /// </summary> public SlaveFailAction SlaveFailActions { set { SlaveReadFailAction = value; SlaveWriteFailAction = value; SlaveSeekFailAction = value; } } /// <summary> /// Sets the filter to use for all possible failures. It is more /// maintainable to use this property whenever you want to handle /// all possible slave exceptions in the same way for a stream, /// because you do not need to modify your code later if new /// exception-related properties are added to <see cref="EchoStream"/>. /// See <see cref="SlaveReadFailAction"/> and the other properties /// like it. /// </summary> public SlaveFailHandler SlaveFailFilters { set { SlaveReadFailFilter = value; SlaveWriteFailFilter = value; SlaveSeekFailFilter = value; } } /// <summary> /// Controls what action is taken whenever a failure occurs while /// trying to echo data read from the primary stream back to the /// slave stream. See <see cref="SlaveFailAction"/> and /// <see cref="Read"/>. /// </summary> public SlaveFailAction SlaveReadFailAction { get => m_readFailAction; set { if (value == SlaveFailAction.Filter) { throw new InvalidOperationException( "You cannot set this property to " + "SlaveFailAction.Filter manually. Use the " + "SlaveReadFailFilter property instead." ); } else { // Unset any read filter that may have been set and set // the new read fail behavior. m_slaveReadFailFilter = null; m_readFailAction = value; } } } /// <summary> /// Controls what action is taken whenever a failure occurs while /// trying to write data to the slave stream. See /// <see cref="SlaveFailAction"/> and <see cref="Write"/>. /// </summary> public SlaveFailAction SlaveWriteFailAction { get => m_writeFailAction; set { if (value == SlaveFailAction.Filter) { throw new InvalidOperationException( "You cannot set this property to " + "SlaveFailAction.Filter manually. Use the " + "SlaveWriteFailFilter property instead." ); } else { // Unset any write filter that may have been set and set // the new write fail behavior. m_slaveWriteFailFilter = null; m_writeFailAction = value; } } } /// <summary> /// Controls what action is taken whenever a failure occurs while /// trying to seek in the slave stream. See /// <see cref="SlaveFailAction"/> and <see cref="Seek"/>. /// </summary> public SlaveFailAction SlaveSeekFailAction { get => m_seekFailAction; set { if (value == SlaveFailAction.Filter) { throw new InvalidOperationException( "You cannot set this property to " + "SlaveFailAction.Filter manually. Use the " + "SlaveSeekFailFilter property instead." ); } else { // Unset any write filter that may have been set and set // the new write fail behavior. m_slaveSeekFailFilter = null; m_seekFailAction = value; } } } /// <summary> /// Gets and sets the filter delegate that will be called whenever a /// failure occurs while trying to while trying to to write data to /// the slave stream. See <see cref="SlaveFailAction"/> and /// <see cref="Write"/>. /// </summary> public SlaveFailHandler SlaveWriteFailFilter { get => m_slaveWriteFailFilter; set { // The somewhat roundabout way in which this is written helps // to preserve the existing fail action in the case where there // was no previous fail handler and the user calls this with a // null handler (which should just be a no-op from the user's // point of view). // Reset the fail action to a potentially-temporary default // if there was a previous slave (so that the fail action // was Filter). if (m_slaveWriteFailFilter != null) m_writeFailAction = SlaveFailAction.Propogate; m_slaveWriteFailFilter = value; // Automatically set the fail action to Filter if we now have // a slave filter. if (value != null) m_writeFailAction = SlaveFailAction.Filter; } } /// <summary> /// Gets and sets the filter delegate that will be called whenever a /// failure occurs while trying to echo data read from the primary /// stream back to the slave stream. See <see cref="SlaveFailAction"/> /// and <see cref="Read"/>. /// </summary> public SlaveFailHandler SlaveReadFailFilter { get => m_slaveReadFailFilter; set { // The somewhat roundabout way in which this is written helps // to preserve the existing fail action in the case where there // was no previous fail handler and the user calls this with a // null handler (which should just be a no-op from the user's // point of view). // Reset the fail action to a potentially-temporary default // if there was a previous slave (so that the fail action // was Filter). if (m_slaveReadFailFilter != null) m_readFailAction = SlaveFailAction.Propogate; m_slaveReadFailFilter = value; // Automatically set the fail action to Filter if we now have // a slave filter. if (value != null) m_readFailAction = SlaveFailAction.Filter; } } /// <summary> /// Gets and sets the filter delegate that will be called whenever a /// failure occurs while trying to seek within the slave stream. /// See <see cref="SlaveFailAction"/> and <see cref="Seek"/>. /// </summary> public SlaveFailHandler SlaveSeekFailFilter { get => m_slaveSeekFailFilter; set { // The somewhat roundabout way in which this is written helps // to preserve the existing fail action in the case where there // was no previous fail handler and the user calls this with a // null handler (which should just be a no-op from the user's // point of view). // Reset the fail action to a potentially-temporary default // if there was a previous slave (so that the fail action // was Filter). if (m_slaveSeekFailFilter != null) m_seekFailAction = SlaveFailAction.Propogate; m_slaveSeekFailFilter = value; // Automatically set the fail action to Filter if we now have // a slave filter. if (value != null) m_seekFailAction = SlaveFailAction.Filter; } } #endregion // Error Handling Properties #region Stream Implementation /// <summary> /// Returns the value of CanRead provided by the primary stream. /// See <see cref="Stream.CanRead"/>. /// </summary> public override bool CanRead => _primaryStream.CanRead; /// <summary> /// Returns true if CanSeek on both the primary stream and the slave /// stream returns true. /// </summary> public override bool CanSeek => _primaryStream.CanSeek && _slaveStream.CanSeek; /// <summary> /// Returns true if the primary stream can timeout. The slave's ability /// to timeout is insignificant. /// </summary> public override bool CanTimeout => _primaryStream.CanTimeout; /// <summary> /// Returns true if CanWrite on both the primary stream and the slave /// stream returns true. Note that an <see cref="EchoStream"/> whose /// <see cref="SlaveStream"/>'s CanWrite returns false is not /// very useful. /// </summary> public override bool CanWrite => _primaryStream.CanWrite && _slaveStream.CanWrite; /// <summary> /// Gets the length of the stream. For <see cref="EchoStream"/>, this /// is the length of the primary stream, since the slave stream is never /// read by the <see cref="EchoStream"/>. /// </summary> public override long Length => _primaryStream.Length; /// <summary> /// Gets and sets the current position of the stream. /// </summary> /// <remarks> /// <para> /// For <see cref="EchoStream"/>, this gets the position of the primary /// stream. When the position is set, <see cref="EchoStream"/> /// calculates the difference between the old and new positions /// in the primary stream, and makes the same relative change in both /// streams. In this way, changing the position of an /// <see cref="EchoStream"/> will cause future writes to happen at /// the correct place in both constituent streams, even if the two /// streams had different amounts of data written to them at some point /// in the past (before they were unioned by the echo stream, for /// instance). /// </para> /// <para> /// Failures to set the position of the slave stream are handled using /// the <see cref="SlaveSeekFailAction"/> and /// <see cref="SlaveSeekFailFilter"/> properties. /// </para> /// </remarks> public override long Position { get => _primaryStream.Position; set { // The position of the primary stream is set directly. The // position of the slave stream, which tracks that of the // primary stream but may have a different actual value based // on things that happened to the streams before they were // unioned by this class, is calculated based on the requested // change in position in the primary stream, rather than being // set directly to the given value. var diff = value - _primaryStream.Position; _primaryStream.Position = value; if (m_seekFailAction == SlaveFailAction.Propogate) { _slaveStream.Position += diff; } else { try { _slaveStream.Position += diff; } catch (Exception exc) { HandleSlaveException( exc, SlaveFailMethod.Seek, m_seekFailAction ); } } } } /// <summary> /// Gets or Sets the ReadTimeout of the primary stream. /// The slave's timeout is unchanged; /// </summary> public override int ReadTimeout { get => _primaryStream.ReadTimeout; set => _primaryStream.ReadTimeout = value; } /// <summary> /// Gets or Sets the WriteTimeout of the primary stream. /// The slave's timeout is unchanged; /// </summary> public override int WriteTimeout { get => _primaryStream.WriteTimeout; set => _primaryStream.WriteTimeout = value; } /// <summary> /// Flushes both constituent streams. See <see cref="Stream.Flush"/>. /// Because Flush is just a special case of delayed <see cref="Write"/>, /// this method uses the exception handling framework put in place for /// <see cref="Write"/>. See that method for more information. /// </summary> public override void Flush() { _primaryStream.Flush(); if (m_writeFailAction == SlaveFailAction.Propogate) { // This is the simple and most efficient case. _slaveStream.Flush(); } else { // This is the case that involves more expensive error // handling. try { _slaveStream.Flush(); } catch (Exception exc) { HandleSlaveException( exc, SlaveFailMethod.Write, m_writeFailAction ); } } } /// <summary> /// Sets the length of the stream. See <see cref="Stream.SetLength"/>. /// This method sets the length of the slave stream relative to the /// length that it sets on the primary stream, in the same spirit as /// the behavior of the <see cref="Seek"/> and <see cref="Position"/> /// members. It also uses the same error handling mechanism used /// by those members. /// </summary> public override void SetLength(long len) { var diff = len - _primaryStream.Length; _primaryStream.SetLength(len); if (m_seekFailAction == SlaveFailAction.Propogate) { _slaveStream.SetLength(_slaveStream.Length + diff); } else { try { _slaveStream.SetLength(_slaveStream.Length + diff); } catch (Exception exc) { HandleSlaveException( exc, SlaveFailMethod.Seek, m_seekFailAction ); } } } /// <summary> /// Reads from the primary stream, and echos anything that was read into /// the slave stream. See <see cref="Stream.Read"/>. /// </summary> /// <remarks> /// This method is not exception-safe in the sense that it is possible /// that data read from the primary stream might not be echoed to the /// slave stream if the slave stream throws an exception. This reflects /// the reality that there is no way to "unread" data from a stream, and /// there is no way to know for certain that the slave stream will not /// throw an exception before reading from the primary stream. Because /// of this, you may find that your buffer contains good data that has /// been read even though an exception is thrown by the slave stream. /// To facilitate situations in which it is important to continue /// reading even if the slave stream goes down, and still retain good /// performance in the common case where both streams must work for /// all reads, <see cref="EchoStream"/> supports the /// <see cref="SlaveReadFailAction"/>, <see cref="SlaveReadFailFilter"/> /// and <see cref="LastReadResult"/> /// properties. See those properties for more information. /// </remarks> public override int Read(byte[] buffer, int offset, int count) { // Read from the primary stream. m_lastReadResult = _primaryStream.Read(buffer, offset, count); if (m_lastReadResult != 0) { // Echo to the slave stream. if (m_readFailAction == SlaveFailAction.Propogate) { // This is the simple and most efficent case. _slaveStream.Write(buffer, offset, m_lastReadResult); } else { // This is the case that involves more expensive error // handling. try { _slaveStream.Write(buffer, offset, m_lastReadResult); } catch (Exception exc) { HandleSlaveException( exc, SlaveFailMethod.Read, m_readFailAction ); } } } return m_lastReadResult; } /// <summary> /// Writes to both the main stream and the slave stream. See /// <see cref="Stream.Write"/>. /// </summary> /// <remarks> /// This method is not exception-safe in the sense that it is possible /// that data written to the primary stream might not be also written to /// the slave stream if the slave stream throws an exception. This /// reflects the reality that there is no way to "unwrite" data from a /// stream, and there is no way to know for certain that the slave /// stream will not throw an exception before writing to the primary /// stream. To facilitate situations in which it is important to /// continue writing even if the slave stream goes down, and still /// retain good performance in the common case where both streams must /// work for all writes, <see cref="EchoStream"/> supports the /// <see cref="SlaveWriteFailAction"/> and /// <see cref="SlaveWriteFailFilter"/> /// properties. See those properties for more information. /// </remarks> public override void Write(byte[] buffer, int offset, int count) { _primaryStream.Write(buffer, offset, count); if (m_writeFailAction == SlaveFailAction.Propogate) { // This is the simple and most efficient case. _slaveStream.Write(buffer, offset, count); } else { // This is the case that involves more expensive error // handling. try { _slaveStream.Write(buffer, offset, count); } catch (Exception exc) { HandleSlaveException( exc, SlaveFailMethod.Write, m_writeFailAction ); } } } /// <summary> /// Performs a seek on both streams. The seek is handled in the same /// way as a change in the <see cref="Position"/> property in regards /// to the relationship between the two constituent streams. See /// <see cref="Position"/> and <see cref="Stream.Seek"/>. /// </summary> /// <remarks> /// <para> /// Failures to set the position of the slave stream are handled using /// the <see cref="SlaveSeekFailAction"/> and /// <see cref="SlaveSeekFailFilter"/> properties, just as with the /// <see cref="Position"/> property. /// </para> /// </remarks> public override long Seek(long offset, SeekOrigin origin) { // This may be a little backwards from usual, but we implement // this in terms of our Position property, rather than the other // way around, because Position properly calculates changes in // both streams for us. if (origin == SeekOrigin.Begin) Position = offset; else if (origin == SeekOrigin.Current) Position += offset; else if (origin == SeekOrigin.End) Position = Length + offset; return Position; } #endregion // Stream Implementation #region Private Implementation Methods void FilterException(Exception exc, SlaveFailMethod method) { // Allow a user-provided filter function to // handle the errors. var action = SlaveFailAction.Filter; if (method == SlaveFailMethod.Read) action = m_slaveReadFailFilter(this, method, exc); else if (method == SlaveFailMethod.Write) action = m_slaveWriteFailFilter(this, method, exc); else if (method == SlaveFailMethod.Seek) action = m_slaveSeekFailFilter(this, method, exc); else Debug.Assert(false, "Unhandled SlaveFailMethod."); if (action == SlaveFailAction.Filter) { throw new InvalidOperationException( "SlaveFailAction.Filter is not a valid return " + "value for the ReadFailFilter delegate.", exc ); } // Handle the exception in the manner specified by the filter. // This will always be an indirect recursive call into // HandleSlaveException since this method is always called by // HandleSlaveException itself, but this time the action can't // possibly be 'Filter' (see above), so there can be no // infinite recursion. HandleSlaveException(exc, method, action); } void HandleSlaveException( Exception exc, SlaveFailMethod method, SlaveFailAction action) { if (action == SlaveFailAction.Propogate) { throw exc; } else if (action == SlaveFailAction.Ignore) { // Intentionally Empty } else if (action == SlaveFailAction.Filter) { FilterException(exc, method); } else { Debug.Assert(false, "Unhandled SlaveFailAction"); } } #endregion // Private Implementation Methods } }
using Amazon.DynamoDBv2; using Amazon.DynamoDBv2.Model; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; using Orleans.Configuration; using Orleans.Internal; using Orleans.Runtime; using System; using System.Collections.Generic; using System.Threading.Tasks; namespace Orleans.Reminders.DynamoDB { /// <summary> /// Implementation for IReminderTable using DynamoDB as underlying storage. /// </summary> internal class DynamoDBReminderTable : IReminderTable { private const string GRAIN_REFERENCE_PROPERTY_NAME = "GrainReference"; private const string REMINDER_NAME_PROPERTY_NAME = "ReminderName"; private const string SERVICE_ID_PROPERTY_NAME = "ServiceId"; private const string START_TIME_PROPERTY_NAME = "StartTime"; private const string PERIOD_PROPERTY_NAME = "Period"; private const string GRAIN_HASH_PROPERTY_NAME = "GrainHash"; private const string REMINDER_ID_PROPERTY_NAME = "ReminderId"; private const string ETAG_PROPERTY_NAME = "ETag"; private const string CURRENT_ETAG_ALIAS = ":currentETag"; private const string SERVICE_ID_INDEX = "ServiceIdIndex"; private SafeRandom _random = new SafeRandom(); private readonly ILogger logger; private readonly GrainReferenceKeyStringConverter grainReferenceConverter; private readonly DynamoDBReminderStorageOptions options; private readonly string serviceId; private DynamoDBStorage storage; /// <summary>Initializes a new instance of the <see cref="DynamoDBReminderTable"/> class.</summary> /// <param name="grainReferenceConverter">The grain factory.</param> /// <param name="loggerFactory">logger factory to use</param> /// <param name="clusterOptions"></param> /// <param name="storageOptions"></param> public DynamoDBReminderTable( GrainReferenceKeyStringConverter grainReferenceConverter, ILoggerFactory loggerFactory, IOptions<ClusterOptions> clusterOptions, IOptions<DynamoDBReminderStorageOptions> storageOptions) { this.grainReferenceConverter = grainReferenceConverter; this.logger = loggerFactory.CreateLogger<DynamoDBReminderTable>(); this.serviceId = clusterOptions.Value.ServiceId; this.options = storageOptions.Value; } /// <summary>Initialize current instance with specific global configuration and logger</summary> public Task Init() { this.storage = new DynamoDBStorage(this.logger, this.options.Service, this.options.AccessKey, this.options.SecretKey, this.options.ReadCapacityUnits, this.options.WriteCapacityUnits); this.logger.Info(ErrorCode.ReminderServiceBase, "Initializing AWS DynamoDB Reminders Table"); var secondaryIndex = new GlobalSecondaryIndex { IndexName = SERVICE_ID_INDEX, Projection = new Projection { ProjectionType = ProjectionType.ALL }, KeySchema = new List<KeySchemaElement> { new KeySchemaElement { AttributeName = SERVICE_ID_PROPERTY_NAME, KeyType = KeyType.HASH}, new KeySchemaElement { AttributeName = GRAIN_HASH_PROPERTY_NAME, KeyType = KeyType.RANGE } } }; return this.storage.InitializeTable(this.options.TableName, new List<KeySchemaElement> { new KeySchemaElement { AttributeName = REMINDER_ID_PROPERTY_NAME, KeyType = KeyType.HASH }, new KeySchemaElement { AttributeName = GRAIN_HASH_PROPERTY_NAME, KeyType = KeyType.RANGE } }, new List<AttributeDefinition> { new AttributeDefinition { AttributeName = REMINDER_ID_PROPERTY_NAME, AttributeType = ScalarAttributeType.S }, new AttributeDefinition { AttributeName = GRAIN_HASH_PROPERTY_NAME, AttributeType = ScalarAttributeType.N }, new AttributeDefinition { AttributeName = SERVICE_ID_PROPERTY_NAME, AttributeType = ScalarAttributeType.S } }, new List<GlobalSecondaryIndex> { secondaryIndex }); } /// <summary> /// Reads a reminder for a grain reference by reminder name. /// Read a row from the reminder table /// </summary> /// <param name="grainRef"> grain ref to locate the row </param> /// <param name="reminderName"> reminder name to locate the row </param> /// <returns> Return the ReminderTableData if the rows were read successfully </returns> public async Task<ReminderEntry> ReadRow(GrainReference grainRef, string reminderName) { var reminderId = ConstructReminderId(this.serviceId, grainRef, reminderName); var keys = new Dictionary<string, AttributeValue> { { $"{REMINDER_ID_PROPERTY_NAME}", new AttributeValue(reminderId) }, { $"{GRAIN_HASH_PROPERTY_NAME}", new AttributeValue { N = grainRef.GetUniformHashCode().ToString() } } }; try { return await this.storage.ReadSingleEntryAsync(this.options.TableName, keys, this.Resolve).ConfigureAwait(false); } catch (Exception exc) { this.logger.Warn(ErrorCode.ReminderServiceBase, $"Intermediate error reading reminder entry {Utils.DictionaryToString(keys)} from table {this.options.TableName}.", exc); throw; } } /// <summary> /// Read one row from the reminder table /// </summary> /// <param name="grainRef">grain ref to locate the row </param> /// <returns> Return the ReminderTableData if the rows were read successfully </returns> public async Task<ReminderTableData> ReadRows(GrainReference grainRef) { var expressionValues = new Dictionary<string, AttributeValue> { { $":{SERVICE_ID_PROPERTY_NAME}", new AttributeValue(this.serviceId) }, { $":{GRAIN_REFERENCE_PROPERTY_NAME}", new AttributeValue(grainRef.ToKeyString()) } }; try { var expression = $"{SERVICE_ID_PROPERTY_NAME} = :{SERVICE_ID_PROPERTY_NAME} AND {GRAIN_REFERENCE_PROPERTY_NAME} = :{GRAIN_REFERENCE_PROPERTY_NAME}"; var records = await this.storage.ScanAsync(this.options.TableName, expressionValues, expression, this.Resolve).ConfigureAwait(false); return new ReminderTableData(records); } catch (Exception exc) { this.logger.Warn(ErrorCode.ReminderServiceBase, $"Intermediate error reading reminder entry {Utils.DictionaryToString(expressionValues)} from table {this.options.TableName}.", exc); throw; } } /// <summary> /// Reads reminder table data for a given hash range. /// </summary> /// <param name="beginHash"></param> /// <param name="endHash"></param> /// <returns> Return the RemiderTableData if the rows were read successfully </returns> public async Task<ReminderTableData> ReadRows(uint beginHash, uint endHash) { var expressionValues = new Dictionary<string, AttributeValue> { { $":{SERVICE_ID_PROPERTY_NAME}", new AttributeValue(this.serviceId) }, { $":Begin{GRAIN_HASH_PROPERTY_NAME}", new AttributeValue { N = beginHash.ToString() } }, { $":End{GRAIN_HASH_PROPERTY_NAME}", new AttributeValue { N = endHash.ToString() } } }; try { string expression = string.Empty; if (beginHash < endHash) { expression = $"{SERVICE_ID_PROPERTY_NAME} = :{SERVICE_ID_PROPERTY_NAME} AND {GRAIN_HASH_PROPERTY_NAME} > :Begin{GRAIN_HASH_PROPERTY_NAME} AND {GRAIN_HASH_PROPERTY_NAME} <= :End{GRAIN_HASH_PROPERTY_NAME}"; } else { expression = $"{SERVICE_ID_PROPERTY_NAME} = :{SERVICE_ID_PROPERTY_NAME} AND ({GRAIN_HASH_PROPERTY_NAME} > :Begin{GRAIN_HASH_PROPERTY_NAME} OR {GRAIN_HASH_PROPERTY_NAME} <= :End{GRAIN_HASH_PROPERTY_NAME})"; } var records = await this.storage.ScanAsync(this.options.TableName, expressionValues, expression, this.Resolve).ConfigureAwait(false); return new ReminderTableData(records); } catch (Exception exc) { this.logger.Warn(ErrorCode.ReminderServiceBase, $"Intermediate error reading reminder entry {Utils.DictionaryToString(expressionValues)} from table {this.options.TableName}.", exc); throw; } } private ReminderEntry Resolve(Dictionary<string, AttributeValue> item) { return new ReminderEntry { ETag = item[ETAG_PROPERTY_NAME].N, GrainRef = this.grainReferenceConverter.FromKeyString(item[GRAIN_REFERENCE_PROPERTY_NAME].S), Period = TimeSpan.Parse(item[PERIOD_PROPERTY_NAME].S), ReminderName = item[REMINDER_NAME_PROPERTY_NAME].S, StartAt = DateTime.Parse(item[START_TIME_PROPERTY_NAME].S) }; } /// <summary> /// Remove one row from the reminder table /// </summary> /// <param name="grainRef"> specific grain ref to locate the row </param> /// <param name="reminderName"> reminder name to locate the row </param> /// <param name="eTag"> e tag </param> /// <returns> Return true if the row was removed </returns> public async Task<bool> RemoveRow(GrainReference grainRef, string reminderName, string eTag) { var reminderId = ConstructReminderId(this.serviceId, grainRef, reminderName); var keys = new Dictionary<string, AttributeValue> { { $"{REMINDER_ID_PROPERTY_NAME}", new AttributeValue(reminderId) }, { $"{GRAIN_HASH_PROPERTY_NAME}", new AttributeValue { N = grainRef.GetUniformHashCode().ToString() } } }; try { var conditionalValues = new Dictionary<string, AttributeValue> { { CURRENT_ETAG_ALIAS, new AttributeValue { N = eTag } } }; var expression = $"{ETAG_PROPERTY_NAME} = {CURRENT_ETAG_ALIAS}"; await this.storage.DeleteEntryAsync(this.options.TableName, keys, expression, conditionalValues).ConfigureAwait(false); return true; } catch (ConditionalCheckFailedException) { return false; } } /// <summary> /// Test hook to clear reminder table data. /// </summary> /// <returns></returns> public async Task TestOnlyClearTable() { var expressionValues = new Dictionary<string, AttributeValue> { { $":{SERVICE_ID_PROPERTY_NAME}", new AttributeValue(this.serviceId) } }; try { var expression = $"{SERVICE_ID_PROPERTY_NAME} = :{SERVICE_ID_PROPERTY_NAME}"; var records = await this.storage.ScanAsync(this.options.TableName, expressionValues, expression, item => new Dictionary<string, AttributeValue> { { REMINDER_ID_PROPERTY_NAME, item[REMINDER_ID_PROPERTY_NAME] }, { GRAIN_HASH_PROPERTY_NAME, item[GRAIN_HASH_PROPERTY_NAME] } }).ConfigureAwait(false); if (records.Count <= 25) { await this.storage.DeleteEntriesAsync(this.options.TableName, records); } else { List<Task> tasks = new List<Task>(); foreach (var batch in records.BatchIEnumerable(25)) { tasks.Add(this.storage.DeleteEntriesAsync(this.options.TableName, batch)); } await Task.WhenAll(tasks); } } catch (Exception exc) { this.logger.Warn(ErrorCode.ReminderServiceBase, $"Intermediate error removing reminder entries {Utils.DictionaryToString(expressionValues)} from table {this.options.TableName}.", exc); throw; } } /// <summary> /// Async method to put an entry into the reminder table /// </summary> /// <param name="entry"> The entry to put </param> /// <returns> Return the entry ETag if entry was upsert successfully </returns> public async Task<string> UpsertRow(ReminderEntry entry) { var reminderId = ConstructReminderId(this.serviceId, entry.GrainRef, entry.ReminderName); var fields = new Dictionary<string, AttributeValue> { { REMINDER_ID_PROPERTY_NAME, new AttributeValue(reminderId) }, { GRAIN_HASH_PROPERTY_NAME, new AttributeValue { N = entry.GrainRef.GetUniformHashCode().ToString() } }, { SERVICE_ID_PROPERTY_NAME, new AttributeValue(this.serviceId) }, { GRAIN_REFERENCE_PROPERTY_NAME, new AttributeValue( entry.GrainRef.ToKeyString()) }, { PERIOD_PROPERTY_NAME, new AttributeValue(entry.Period.ToString()) }, { START_TIME_PROPERTY_NAME, new AttributeValue(entry.StartAt.ToString()) }, { REMINDER_NAME_PROPERTY_NAME, new AttributeValue(entry.ReminderName) }, { ETAG_PROPERTY_NAME, new AttributeValue { N = this._random.Next(int.MaxValue).ToString() } } }; try { if (this.logger.IsEnabled(LogLevel.Debug)) this.logger.Debug("UpsertRow entry = {0}, etag = {1}", entry.ToString(), entry.ETag); await this.storage.PutEntryAsync(this.options.TableName, fields); entry.ETag = fields[ETAG_PROPERTY_NAME].N; return entry.ETag; } catch (Exception exc) { this.logger.Warn(ErrorCode.ReminderServiceBase, $"Intermediate error updating entry {entry.ToString()} to the table {this.options.TableName}.", exc); throw; } } private static string ConstructReminderId(string serviceId, GrainReference grainRef, string reminderName) { return $"{serviceId}_{grainRef.ToKeyString()}_{reminderName}"; } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is // regenerated. // // This file was autogenerated by a tool. // Do not modify it. // namespace Microsoft.Azure.Batch { using Models = Microsoft.Azure.Batch.Protocol.Models; using System; using System.Collections.Generic; using System.Linq; /// <summary> /// A pool in the Azure Batch service. /// </summary> public partial class CloudPool : ITransportObjectProvider<Models.PoolAddParameter>, IInheritedBehaviors, IPropertyMetadata { private class PropertyContainer : PropertyCollection { public readonly PropertyAccessor<Common.AllocationState?> AllocationStateProperty; public readonly PropertyAccessor<DateTime?> AllocationStateTransitionTimeProperty; public readonly PropertyAccessor<IList<string>> ApplicationLicensesProperty; public readonly PropertyAccessor<IList<ApplicationPackageReference>> ApplicationPackageReferencesProperty; public readonly PropertyAccessor<bool?> AutoScaleEnabledProperty; public readonly PropertyAccessor<TimeSpan?> AutoScaleEvaluationIntervalProperty; public readonly PropertyAccessor<string> AutoScaleFormulaProperty; public readonly PropertyAccessor<AutoScaleRun> AutoScaleRunProperty; public readonly PropertyAccessor<IList<CertificateReference>> CertificateReferencesProperty; public readonly PropertyAccessor<CloudServiceConfiguration> CloudServiceConfigurationProperty; public readonly PropertyAccessor<DateTime?> CreationTimeProperty; public readonly PropertyAccessor<int?> CurrentDedicatedComputeNodesProperty; public readonly PropertyAccessor<int?> CurrentLowPriorityComputeNodesProperty; public readonly PropertyAccessor<string> DisplayNameProperty; public readonly PropertyAccessor<string> ETagProperty; public readonly PropertyAccessor<string> IdProperty; public readonly PropertyAccessor<bool?> InterComputeNodeCommunicationEnabledProperty; public readonly PropertyAccessor<DateTime?> LastModifiedProperty; public readonly PropertyAccessor<int?> MaxTasksPerComputeNodeProperty; public readonly PropertyAccessor<IList<MetadataItem>> MetadataProperty; public readonly PropertyAccessor<IList<MountConfiguration>> MountConfigurationProperty; public readonly PropertyAccessor<NetworkConfiguration> NetworkConfigurationProperty; public readonly PropertyAccessor<IReadOnlyList<ResizeError>> ResizeErrorsProperty; public readonly PropertyAccessor<TimeSpan?> ResizeTimeoutProperty; public readonly PropertyAccessor<StartTask> StartTaskProperty; public readonly PropertyAccessor<Common.PoolState?> StateProperty; public readonly PropertyAccessor<DateTime?> StateTransitionTimeProperty; public readonly PropertyAccessor<PoolStatistics> StatisticsProperty; public readonly PropertyAccessor<int?> TargetDedicatedComputeNodesProperty; public readonly PropertyAccessor<int?> TargetLowPriorityComputeNodesProperty; public readonly PropertyAccessor<TaskSchedulingPolicy> TaskSchedulingPolicyProperty; public readonly PropertyAccessor<string> UrlProperty; public readonly PropertyAccessor<IList<UserAccount>> UserAccountsProperty; public readonly PropertyAccessor<VirtualMachineConfiguration> VirtualMachineConfigurationProperty; public readonly PropertyAccessor<string> VirtualMachineSizeProperty; public PropertyContainer() : base(BindingState.Unbound) { this.AllocationStateProperty = this.CreatePropertyAccessor<Common.AllocationState?>(nameof(AllocationState), BindingAccess.None); this.AllocationStateTransitionTimeProperty = this.CreatePropertyAccessor<DateTime?>(nameof(AllocationStateTransitionTime), BindingAccess.None); this.ApplicationLicensesProperty = this.CreatePropertyAccessor<IList<string>>(nameof(ApplicationLicenses), BindingAccess.Read | BindingAccess.Write); this.ApplicationPackageReferencesProperty = this.CreatePropertyAccessor<IList<ApplicationPackageReference>>(nameof(ApplicationPackageReferences), BindingAccess.Read | BindingAccess.Write); this.AutoScaleEnabledProperty = this.CreatePropertyAccessor<bool?>(nameof(AutoScaleEnabled), BindingAccess.Read | BindingAccess.Write); this.AutoScaleEvaluationIntervalProperty = this.CreatePropertyAccessor<TimeSpan?>(nameof(AutoScaleEvaluationInterval), BindingAccess.Read | BindingAccess.Write); this.AutoScaleFormulaProperty = this.CreatePropertyAccessor<string>(nameof(AutoScaleFormula), BindingAccess.Read | BindingAccess.Write); this.AutoScaleRunProperty = this.CreatePropertyAccessor<AutoScaleRun>(nameof(AutoScaleRun), BindingAccess.None); this.CertificateReferencesProperty = this.CreatePropertyAccessor<IList<CertificateReference>>(nameof(CertificateReferences), BindingAccess.Read | BindingAccess.Write); this.CloudServiceConfigurationProperty = this.CreatePropertyAccessor<CloudServiceConfiguration>(nameof(CloudServiceConfiguration), BindingAccess.Read | BindingAccess.Write); this.CreationTimeProperty = this.CreatePropertyAccessor<DateTime?>(nameof(CreationTime), BindingAccess.None); this.CurrentDedicatedComputeNodesProperty = this.CreatePropertyAccessor<int?>(nameof(CurrentDedicatedComputeNodes), BindingAccess.None); this.CurrentLowPriorityComputeNodesProperty = this.CreatePropertyAccessor<int?>(nameof(CurrentLowPriorityComputeNodes), BindingAccess.None); this.DisplayNameProperty = this.CreatePropertyAccessor<string>(nameof(DisplayName), BindingAccess.Read | BindingAccess.Write); this.ETagProperty = this.CreatePropertyAccessor<string>(nameof(ETag), BindingAccess.None); this.IdProperty = this.CreatePropertyAccessor<string>(nameof(Id), BindingAccess.Read | BindingAccess.Write); this.InterComputeNodeCommunicationEnabledProperty = this.CreatePropertyAccessor<bool?>(nameof(InterComputeNodeCommunicationEnabled), BindingAccess.Read | BindingAccess.Write); this.LastModifiedProperty = this.CreatePropertyAccessor<DateTime?>(nameof(LastModified), BindingAccess.None); this.MaxTasksPerComputeNodeProperty = this.CreatePropertyAccessor<int?>(nameof(MaxTasksPerComputeNode), BindingAccess.Read | BindingAccess.Write); this.MetadataProperty = this.CreatePropertyAccessor<IList<MetadataItem>>(nameof(Metadata), BindingAccess.Read | BindingAccess.Write); this.MountConfigurationProperty = this.CreatePropertyAccessor<IList<MountConfiguration>>(nameof(MountConfiguration), BindingAccess.Read | BindingAccess.Write); this.NetworkConfigurationProperty = this.CreatePropertyAccessor<NetworkConfiguration>(nameof(NetworkConfiguration), BindingAccess.Read | BindingAccess.Write); this.ResizeErrorsProperty = this.CreatePropertyAccessor<IReadOnlyList<ResizeError>>(nameof(ResizeErrors), BindingAccess.None); this.ResizeTimeoutProperty = this.CreatePropertyAccessor<TimeSpan?>(nameof(ResizeTimeout), BindingAccess.Read | BindingAccess.Write); this.StartTaskProperty = this.CreatePropertyAccessor<StartTask>(nameof(StartTask), BindingAccess.Read | BindingAccess.Write); this.StateProperty = this.CreatePropertyAccessor<Common.PoolState?>(nameof(State), BindingAccess.None); this.StateTransitionTimeProperty = this.CreatePropertyAccessor<DateTime?>(nameof(StateTransitionTime), BindingAccess.None); this.StatisticsProperty = this.CreatePropertyAccessor<PoolStatistics>(nameof(Statistics), BindingAccess.None); this.TargetDedicatedComputeNodesProperty = this.CreatePropertyAccessor<int?>(nameof(TargetDedicatedComputeNodes), BindingAccess.Read | BindingAccess.Write); this.TargetLowPriorityComputeNodesProperty = this.CreatePropertyAccessor<int?>(nameof(TargetLowPriorityComputeNodes), BindingAccess.Read | BindingAccess.Write); this.TaskSchedulingPolicyProperty = this.CreatePropertyAccessor<TaskSchedulingPolicy>(nameof(TaskSchedulingPolicy), BindingAccess.Read | BindingAccess.Write); this.UrlProperty = this.CreatePropertyAccessor<string>(nameof(Url), BindingAccess.None); this.UserAccountsProperty = this.CreatePropertyAccessor<IList<UserAccount>>(nameof(UserAccounts), BindingAccess.Read | BindingAccess.Write); this.VirtualMachineConfigurationProperty = this.CreatePropertyAccessor<VirtualMachineConfiguration>(nameof(VirtualMachineConfiguration), BindingAccess.Read | BindingAccess.Write); this.VirtualMachineSizeProperty = this.CreatePropertyAccessor<string>(nameof(VirtualMachineSize), BindingAccess.Read | BindingAccess.Write); } public PropertyContainer(Models.CloudPool protocolObject) : base(BindingState.Bound) { this.AllocationStateProperty = this.CreatePropertyAccessor( UtilitiesInternal.MapNullableEnum<Models.AllocationState, Common.AllocationState>(protocolObject.AllocationState), nameof(AllocationState), BindingAccess.Read); this.AllocationStateTransitionTimeProperty = this.CreatePropertyAccessor( protocolObject.AllocationStateTransitionTime, nameof(AllocationStateTransitionTime), BindingAccess.Read); this.ApplicationLicensesProperty = this.CreatePropertyAccessor( UtilitiesInternal.CollectionToThreadSafeCollection(protocolObject.ApplicationLicenses, o => o), nameof(ApplicationLicenses), BindingAccess.Read); this.ApplicationPackageReferencesProperty = this.CreatePropertyAccessor( ApplicationPackageReference.ConvertFromProtocolCollection(protocolObject.ApplicationPackageReferences), nameof(ApplicationPackageReferences), BindingAccess.Read | BindingAccess.Write); this.AutoScaleEnabledProperty = this.CreatePropertyAccessor( protocolObject.EnableAutoScale, nameof(AutoScaleEnabled), BindingAccess.Read); this.AutoScaleEvaluationIntervalProperty = this.CreatePropertyAccessor( protocolObject.AutoScaleEvaluationInterval, nameof(AutoScaleEvaluationInterval), BindingAccess.Read); this.AutoScaleFormulaProperty = this.CreatePropertyAccessor( protocolObject.AutoScaleFormula, nameof(AutoScaleFormula), BindingAccess.Read); this.AutoScaleRunProperty = this.CreatePropertyAccessor( UtilitiesInternal.CreateObjectWithNullCheck(protocolObject.AutoScaleRun, o => new AutoScaleRun(o).Freeze()), nameof(AutoScaleRun), BindingAccess.Read); this.CertificateReferencesProperty = this.CreatePropertyAccessor( CertificateReference.ConvertFromProtocolCollection(protocolObject.CertificateReferences), nameof(CertificateReferences), BindingAccess.Read | BindingAccess.Write); this.CloudServiceConfigurationProperty = this.CreatePropertyAccessor( UtilitiesInternal.CreateObjectWithNullCheck(protocolObject.CloudServiceConfiguration, o => new CloudServiceConfiguration(o).Freeze()), nameof(CloudServiceConfiguration), BindingAccess.Read); this.CreationTimeProperty = this.CreatePropertyAccessor( protocolObject.CreationTime, nameof(CreationTime), BindingAccess.Read); this.CurrentDedicatedComputeNodesProperty = this.CreatePropertyAccessor( protocolObject.CurrentDedicatedNodes, nameof(CurrentDedicatedComputeNodes), BindingAccess.Read); this.CurrentLowPriorityComputeNodesProperty = this.CreatePropertyAccessor( protocolObject.CurrentLowPriorityNodes, nameof(CurrentLowPriorityComputeNodes), BindingAccess.Read); this.DisplayNameProperty = this.CreatePropertyAccessor( protocolObject.DisplayName, nameof(DisplayName), BindingAccess.Read); this.ETagProperty = this.CreatePropertyAccessor( protocolObject.ETag, nameof(ETag), BindingAccess.Read); this.IdProperty = this.CreatePropertyAccessor( protocolObject.Id, nameof(Id), BindingAccess.Read); this.InterComputeNodeCommunicationEnabledProperty = this.CreatePropertyAccessor( protocolObject.EnableInterNodeCommunication, nameof(InterComputeNodeCommunicationEnabled), BindingAccess.Read); this.LastModifiedProperty = this.CreatePropertyAccessor( protocolObject.LastModified, nameof(LastModified), BindingAccess.Read); this.MaxTasksPerComputeNodeProperty = this.CreatePropertyAccessor( protocolObject.MaxTasksPerNode, nameof(MaxTasksPerComputeNode), BindingAccess.Read); this.MetadataProperty = this.CreatePropertyAccessor( MetadataItem.ConvertFromProtocolCollection(protocolObject.Metadata), nameof(Metadata), BindingAccess.Read | BindingAccess.Write); this.MountConfigurationProperty = this.CreatePropertyAccessor( Batch.MountConfiguration.ConvertFromProtocolCollectionAndFreeze(protocolObject.MountConfiguration), nameof(MountConfiguration), BindingAccess.Read); this.NetworkConfigurationProperty = this.CreatePropertyAccessor( UtilitiesInternal.CreateObjectWithNullCheck(protocolObject.NetworkConfiguration, o => new NetworkConfiguration(o).Freeze()), nameof(NetworkConfiguration), BindingAccess.Read); this.ResizeErrorsProperty = this.CreatePropertyAccessor( ResizeError.ConvertFromProtocolCollectionReadOnly(protocolObject.ResizeErrors), nameof(ResizeErrors), BindingAccess.Read); this.ResizeTimeoutProperty = this.CreatePropertyAccessor( protocolObject.ResizeTimeout, nameof(ResizeTimeout), BindingAccess.Read); this.StartTaskProperty = this.CreatePropertyAccessor( UtilitiesInternal.CreateObjectWithNullCheck(protocolObject.StartTask, o => new StartTask(o)), nameof(StartTask), BindingAccess.Read | BindingAccess.Write); this.StateProperty = this.CreatePropertyAccessor( UtilitiesInternal.MapNullableEnum<Models.PoolState, Common.PoolState>(protocolObject.State), nameof(State), BindingAccess.Read); this.StateTransitionTimeProperty = this.CreatePropertyAccessor( protocolObject.StateTransitionTime, nameof(StateTransitionTime), BindingAccess.Read); this.StatisticsProperty = this.CreatePropertyAccessor( UtilitiesInternal.CreateObjectWithNullCheck(protocolObject.Stats, o => new PoolStatistics(o).Freeze()), nameof(Statistics), BindingAccess.Read); this.TargetDedicatedComputeNodesProperty = this.CreatePropertyAccessor( protocolObject.TargetDedicatedNodes, nameof(TargetDedicatedComputeNodes), BindingAccess.Read); this.TargetLowPriorityComputeNodesProperty = this.CreatePropertyAccessor( protocolObject.TargetLowPriorityNodes, nameof(TargetLowPriorityComputeNodes), BindingAccess.Read); this.TaskSchedulingPolicyProperty = this.CreatePropertyAccessor( UtilitiesInternal.CreateObjectWithNullCheck(protocolObject.TaskSchedulingPolicy, o => new TaskSchedulingPolicy(o).Freeze()), nameof(TaskSchedulingPolicy), BindingAccess.Read); this.UrlProperty = this.CreatePropertyAccessor( protocolObject.Url, nameof(Url), BindingAccess.Read); this.UserAccountsProperty = this.CreatePropertyAccessor( UserAccount.ConvertFromProtocolCollectionAndFreeze(protocolObject.UserAccounts), nameof(UserAccounts), BindingAccess.Read); this.VirtualMachineConfigurationProperty = this.CreatePropertyAccessor( UtilitiesInternal.CreateObjectWithNullCheck(protocolObject.VirtualMachineConfiguration, o => new VirtualMachineConfiguration(o).Freeze()), nameof(VirtualMachineConfiguration), BindingAccess.Read); this.VirtualMachineSizeProperty = this.CreatePropertyAccessor( protocolObject.VmSize, nameof(VirtualMachineSize), BindingAccess.Read); } } private PropertyContainer propertyContainer; private readonly BatchClient parentBatchClient; #region Constructors /// <summary> /// Initializes a new instance of the <see cref="CloudPool"/> class. /// </summary> /// <param name='parentBatchClient'>The parent <see cref="BatchClient"/> to use.</param> /// <param name='baseBehaviors'>The base behaviors to use.</param> internal CloudPool( BatchClient parentBatchClient, IEnumerable<BatchClientBehavior> baseBehaviors) { this.propertyContainer = new PropertyContainer(); this.parentBatchClient = parentBatchClient; InheritUtil.InheritClientBehaviorsAndSetPublicProperty(this, baseBehaviors); } internal CloudPool( BatchClient parentBatchClient, Models.CloudPool protocolObject, IEnumerable<BatchClientBehavior> baseBehaviors) { this.parentBatchClient = parentBatchClient; InheritUtil.InheritClientBehaviorsAndSetPublicProperty(this, baseBehaviors); this.propertyContainer = new PropertyContainer(protocolObject); } #endregion Constructors #region IInheritedBehaviors /// <summary> /// Gets or sets a list of behaviors that modify or customize requests to the Batch service /// made via this <see cref="CloudPool"/>. /// </summary> /// <remarks> /// <para>These behaviors are inherited by child objects.</para> /// <para>Modifications are applied in the order of the collection. The last write wins.</para> /// </remarks> public IList<BatchClientBehavior> CustomBehaviors { get; set; } #endregion IInheritedBehaviors #region CloudPool /// <summary> /// Gets an <see cref="Common.AllocationState"/> which indicates what node allocation activity is occurring on the /// pool. /// </summary> public Common.AllocationState? AllocationState { get { return this.propertyContainer.AllocationStateProperty.Value; } } /// <summary> /// Gets the time at which the pool entered its current <see cref="AllocationState"/>. /// </summary> public DateTime? AllocationStateTransitionTime { get { return this.propertyContainer.AllocationStateTransitionTimeProperty.Value; } } /// <summary> /// Gets or sets the list of application licenses the Batch service will make available on each compute node in the /// pool. /// </summary> /// <remarks> /// <para>The list of application licenses must be a subset of available Batch service application licenses.</para><para>The /// permitted licenses available on the pool are 'maya', 'vray', '3dsmax', 'arnold'. An additional charge applies /// for each application license added to the pool.</para> /// </remarks> public IList<string> ApplicationLicenses { get { return this.propertyContainer.ApplicationLicensesProperty.Value; } set { this.propertyContainer.ApplicationLicensesProperty.Value = ConcurrentChangeTrackedList<string>.TransformEnumerableToConcurrentList(value); } } /// <summary> /// Gets or sets a list of application packages to be installed on each compute node in the pool. /// </summary> /// <remarks> /// Changes to application package references affect all new compute nodes joining the pool, but do not affect compute /// nodes that are already in the pool until they are rebooted or reimaged. There is a maximum of 10 application /// package references on any given pool. /// </remarks> public IList<ApplicationPackageReference> ApplicationPackageReferences { get { return this.propertyContainer.ApplicationPackageReferencesProperty.Value; } set { this.propertyContainer.ApplicationPackageReferencesProperty.Value = ConcurrentChangeTrackedModifiableList<ApplicationPackageReference>.TransformEnumerableToConcurrentModifiableList(value); } } /// <summary> /// Gets or sets whether the pool size should automatically adjust according to the <see cref="AutoScaleFormula"/>. /// </summary> /// <remarks> /// <para>If true, the <see cref="AutoScaleFormula"/> property is required, the pool automatically resizes according /// to the formula, and <see cref="TargetDedicatedComputeNodes"/> and <see cref="TargetLowPriorityComputeNodes"/> /// must be null.</para> <para>If false, one of the <see cref="TargetDedicatedComputeNodes"/> or <see cref="TargetLowPriorityComputeNodes"/> /// properties is required.</para><para>The default value is false.</para> /// </remarks> public bool? AutoScaleEnabled { get { return this.propertyContainer.AutoScaleEnabledProperty.Value; } set { this.propertyContainer.AutoScaleEnabledProperty.Value = value; } } /// <summary> /// Gets or sets a time interval at which to automatically adjust the pool size according to the <see cref="AutoScaleFormula"/>. /// </summary> /// <remarks> /// The default value is 15 minutes. The minimum allowed value is 5 minutes. /// </remarks> public TimeSpan? AutoScaleEvaluationInterval { get { return this.propertyContainer.AutoScaleEvaluationIntervalProperty.Value; } set { this.propertyContainer.AutoScaleEvaluationIntervalProperty.Value = value; } } /// <summary> /// Gets or sets a formula for the desired number of compute nodes in the pool. /// </summary> /// <remarks> /// <para>For how to write autoscale formulas, see https://azure.microsoft.com/documentation/articles/batch-automatic-scaling/. /// This property is required if <see cref="AutoScaleEnabled"/> is set to true. It must be null if AutoScaleEnabled /// is false.</para><para>The formula is checked for validity before the pool is created. If the formula is not valid, /// an exception is thrown when you try to commit the <see cref="CloudPool"/>.</para> /// </remarks> public string AutoScaleFormula { get { return this.propertyContainer.AutoScaleFormulaProperty.Value; } set { this.propertyContainer.AutoScaleFormulaProperty.Value = value; } } /// <summary> /// Gets the results and errors from the last execution of the <see cref="AutoScaleFormula"/>. /// </summary> public AutoScaleRun AutoScaleRun { get { return this.propertyContainer.AutoScaleRunProperty.Value; } } /// <summary> /// Gets or sets a list of certificates to be installed on each compute node in the pool. /// </summary> public IList<CertificateReference> CertificateReferences { get { return this.propertyContainer.CertificateReferencesProperty.Value; } set { this.propertyContainer.CertificateReferencesProperty.Value = ConcurrentChangeTrackedModifiableList<CertificateReference>.TransformEnumerableToConcurrentModifiableList(value); } } /// <summary> /// Gets or sets the <see cref="CloudServiceConfiguration"/> for the pool. /// </summary> public CloudServiceConfiguration CloudServiceConfiguration { get { return this.propertyContainer.CloudServiceConfigurationProperty.Value; } set { this.propertyContainer.CloudServiceConfigurationProperty.Value = value; } } /// <summary> /// Gets the creation time for the pool. /// </summary> public DateTime? CreationTime { get { return this.propertyContainer.CreationTimeProperty.Value; } } /// <summary> /// Gets the number of dedicated compute nodes currently in the pool. /// </summary> public int? CurrentDedicatedComputeNodes { get { return this.propertyContainer.CurrentDedicatedComputeNodesProperty.Value; } } /// <summary> /// Gets the number of low-priority compute nodes currently in the pool. /// </summary> /// <remarks> /// Low-priority compute nodes which have been preempted are included in this count. /// </remarks> public int? CurrentLowPriorityComputeNodes { get { return this.propertyContainer.CurrentLowPriorityComputeNodesProperty.Value; } } /// <summary> /// Gets or sets the display name of the pool. /// </summary> public string DisplayName { get { return this.propertyContainer.DisplayNameProperty.Value; } set { this.propertyContainer.DisplayNameProperty.Value = value; } } /// <summary> /// Gets the ETag for the pool. /// </summary> public string ETag { get { return this.propertyContainer.ETagProperty.Value; } } /// <summary> /// Gets or sets the id of the pool. /// </summary> public string Id { get { return this.propertyContainer.IdProperty.Value; } set { this.propertyContainer.IdProperty.Value = value; } } /// <summary> /// Gets or sets whether the pool permits direct communication between its compute nodes. /// </summary> /// <remarks> /// Enabling inter-node communication limits the maximum size of the pool due to deployment restrictions on the nodes /// of the pool. This may result in the pool not reaching its desired size. /// </remarks> public bool? InterComputeNodeCommunicationEnabled { get { return this.propertyContainer.InterComputeNodeCommunicationEnabledProperty.Value; } set { this.propertyContainer.InterComputeNodeCommunicationEnabledProperty.Value = value; } } /// <summary> /// Gets the last modified time of the pool. /// </summary> public DateTime? LastModified { get { return this.propertyContainer.LastModifiedProperty.Value; } } /// <summary> /// Gets or sets the maximum number of tasks that can run concurrently on a single compute node in the pool. /// </summary> /// <remarks> /// The default value is 1. The maximum value is the smaller of 4 times the number of cores of the <see cref="VirtualMachineSize"/> /// of the pool or 256. /// </remarks> public int? MaxTasksPerComputeNode { get { return this.propertyContainer.MaxTasksPerComputeNodeProperty.Value; } set { this.propertyContainer.MaxTasksPerComputeNodeProperty.Value = value; } } /// <summary> /// Gets or sets a list of name-value pairs associated with the pool as metadata. /// </summary> public IList<MetadataItem> Metadata { get { return this.propertyContainer.MetadataProperty.Value; } set { this.propertyContainer.MetadataProperty.Value = ConcurrentChangeTrackedModifiableList<MetadataItem>.TransformEnumerableToConcurrentModifiableList(value); } } /// <summary> /// Gets or sets a list of file systems to mount on each node in the pool. /// </summary> /// <remarks> /// This supports Azure Files, NFS, CIFS/SMB, and Blobfuse. /// </remarks> public IList<MountConfiguration> MountConfiguration { get { return this.propertyContainer.MountConfigurationProperty.Value; } set { this.propertyContainer.MountConfigurationProperty.Value = ConcurrentChangeTrackedModifiableList<MountConfiguration>.TransformEnumerableToConcurrentModifiableList(value); } } /// <summary> /// Gets or sets the network configuration of the pool. /// </summary> public NetworkConfiguration NetworkConfiguration { get { return this.propertyContainer.NetworkConfigurationProperty.Value; } set { this.propertyContainer.NetworkConfigurationProperty.Value = value; } } /// <summary> /// Gets a list of errors encountered while performing the last resize on the <see cref="CloudPool"/>. Errors are /// returned only when the Batch service encountered an error while resizing the pool, and when the pool's <see cref="CloudPool.AllocationState"/> /// is <see cref="Common.AllocationState.Steady">Steady</see>. /// </summary> public IReadOnlyList<ResizeError> ResizeErrors { get { return this.propertyContainer.ResizeErrorsProperty.Value; } } /// <summary> /// Gets or sets the timeout for allocation of compute nodes to the pool. /// </summary> public TimeSpan? ResizeTimeout { get { return this.propertyContainer.ResizeTimeoutProperty.Value; } set { this.propertyContainer.ResizeTimeoutProperty.Value = value; } } /// <summary> /// Gets or sets a task to run on each compute node as it joins the pool. The task runs when the node is added to /// the pool or when the node is restarted. /// </summary> public StartTask StartTask { get { return this.propertyContainer.StartTaskProperty.Value; } set { this.propertyContainer.StartTaskProperty.Value = value; } } /// <summary> /// Gets the current state of the pool. /// </summary> public Common.PoolState? State { get { return this.propertyContainer.StateProperty.Value; } } /// <summary> /// Gets the time at which the pool entered its current state. /// </summary> public DateTime? StateTransitionTime { get { return this.propertyContainer.StateTransitionTimeProperty.Value; } } /// <summary> /// Gets the resource usage statistics for the pool. /// </summary> /// <remarks> /// This property is populated only if the <see cref="CloudPool"/> was retrieved with an <see cref="ODATADetailLevel.ExpandClause"/> /// including the 'stats' attribute; otherwise it is null. The statistics may not be immediately available. The Batch /// service performs periodic roll-up of statistics. The typical delay is about 30 minutes. /// </remarks> public PoolStatistics Statistics { get { return this.propertyContainer.StatisticsProperty.Value; } } /// <summary> /// Gets or sets the desired number of dedicated compute nodes in the pool. /// </summary> /// <remarks> /// This setting cannot be specified if <see cref="AutoScaleEnabled"/> is set to true. At least one of this property /// and <see cref="TargetLowPriorityComputeNodes"/> must be specified if <see cref="AutoScaleEnabled"/> is false. /// If not specified, the default is 0. /// </remarks> public int? TargetDedicatedComputeNodes { get { return this.propertyContainer.TargetDedicatedComputeNodesProperty.Value; } set { this.propertyContainer.TargetDedicatedComputeNodesProperty.Value = value; } } /// <summary> /// Gets or sets the desired number of low-priority compute nodes in the pool. /// </summary> /// <remarks> /// This setting cannot be specified if <see cref="AutoScaleEnabled"/> is set to true. At least one of <see cref="TargetDedicatedComputeNodes"/> /// and this property must be specified if <see cref="AutoScaleEnabled"/> is false. If not specified, the default /// is 0. /// </remarks> public int? TargetLowPriorityComputeNodes { get { return this.propertyContainer.TargetLowPriorityComputeNodesProperty.Value; } set { this.propertyContainer.TargetLowPriorityComputeNodesProperty.Value = value; } } /// <summary> /// Gets or sets how tasks are distributed among compute nodes in the pool. /// </summary> public TaskSchedulingPolicy TaskSchedulingPolicy { get { return this.propertyContainer.TaskSchedulingPolicyProperty.Value; } set { this.propertyContainer.TaskSchedulingPolicyProperty.Value = value; } } /// <summary> /// Gets the URL of the pool. /// </summary> public string Url { get { return this.propertyContainer.UrlProperty.Value; } } /// <summary> /// Gets or sets the list of user accounts to be created on each node in the pool. /// </summary> public IList<UserAccount> UserAccounts { get { return this.propertyContainer.UserAccountsProperty.Value; } set { this.propertyContainer.UserAccountsProperty.Value = ConcurrentChangeTrackedModifiableList<UserAccount>.TransformEnumerableToConcurrentModifiableList(value); } } /// <summary> /// Gets or sets the <see cref="VirtualMachineConfiguration"/> of the pool. /// </summary> public VirtualMachineConfiguration VirtualMachineConfiguration { get { return this.propertyContainer.VirtualMachineConfigurationProperty.Value; } set { this.propertyContainer.VirtualMachineConfigurationProperty.Value = value; } } /// <summary> /// Gets or sets the size of the virtual machines in the pool. All virtual machines in a pool are the same size. /// </summary> /// <remarks> /// <para>For information about available sizes of virtual machines in pools, see Choose a VM size for compute nodes /// in an Azure Batch pool (https://docs.microsoft.com/azure/batch/batch-pool-vm-sizes).</para> /// </remarks> public string VirtualMachineSize { get { return this.propertyContainer.VirtualMachineSizeProperty.Value; } set { this.propertyContainer.VirtualMachineSizeProperty.Value = value; } } #endregion // CloudPool #region IPropertyMetadata bool IModifiable.HasBeenModified { get { return this.propertyContainer.HasBeenModified; } } bool IReadOnly.IsReadOnly { get { return this.propertyContainer.IsReadOnly; } set { this.propertyContainer.IsReadOnly = value; } } #endregion //IPropertyMetadata #region Internal/private methods /// <summary> /// Return a protocol object of the requested type. /// </summary> /// <returns>The protocol object of the requested type.</returns> Models.PoolAddParameter ITransportObjectProvider<Models.PoolAddParameter>.GetTransportObject() { Models.PoolAddParameter result = new Models.PoolAddParameter() { ApplicationLicenses = this.ApplicationLicenses, ApplicationPackageReferences = UtilitiesInternal.ConvertToProtocolCollection(this.ApplicationPackageReferences), EnableAutoScale = this.AutoScaleEnabled, AutoScaleEvaluationInterval = this.AutoScaleEvaluationInterval, AutoScaleFormula = this.AutoScaleFormula, CertificateReferences = UtilitiesInternal.ConvertToProtocolCollection(this.CertificateReferences), CloudServiceConfiguration = UtilitiesInternal.CreateObjectWithNullCheck(this.CloudServiceConfiguration, (o) => o.GetTransportObject()), DisplayName = this.DisplayName, Id = this.Id, EnableInterNodeCommunication = this.InterComputeNodeCommunicationEnabled, MaxTasksPerNode = this.MaxTasksPerComputeNode, Metadata = UtilitiesInternal.ConvertToProtocolCollection(this.Metadata), MountConfiguration = UtilitiesInternal.ConvertToProtocolCollection(this.MountConfiguration), NetworkConfiguration = UtilitiesInternal.CreateObjectWithNullCheck(this.NetworkConfiguration, (o) => o.GetTransportObject()), ResizeTimeout = this.ResizeTimeout, StartTask = UtilitiesInternal.CreateObjectWithNullCheck(this.StartTask, (o) => o.GetTransportObject()), TargetDedicatedNodes = this.TargetDedicatedComputeNodes, TargetLowPriorityNodes = this.TargetLowPriorityComputeNodes, TaskSchedulingPolicy = UtilitiesInternal.CreateObjectWithNullCheck(this.TaskSchedulingPolicy, (o) => o.GetTransportObject()), UserAccounts = UtilitiesInternal.ConvertToProtocolCollection(this.UserAccounts), VirtualMachineConfiguration = UtilitiesInternal.CreateObjectWithNullCheck(this.VirtualMachineConfiguration, (o) => o.GetTransportObject()), VmSize = this.VirtualMachineSize, }; return result; } #endregion // Internal/private methods } }
using System; namespace CSharpVitamins { /// <summary> /// Represents a globally unique identifier (GUID) with a /// shorter string value. Sguid /// </summary> public struct ShortGuid { #region Static /// <summary> /// A read-only instance of the ShortGuid class whose value /// is guaranteed to be all zeroes. /// </summary> public static readonly ShortGuid Empty = new ShortGuid(Guid.Empty); #endregion #region Fields Guid _guid; string _value; #endregion #region Contructors /// <summary> /// Creates a ShortGuid from a base64 encoded string /// </summary> /// <param name="value">The encoded guid as a /// base64 string</param> public ShortGuid(string value) { _value = value; _guid = Decode(value); } /// <summary> /// Creates a ShortGuid from a Guid /// </summary> /// <param name="guid">The Guid to encode</param> public ShortGuid(Guid guid) { _value = Encode(guid); _guid = guid; } #endregion #region Properties /// <summary> /// Gets/sets the underlying Guid /// </summary> public Guid Guid { get { return _guid; } set { if (value != _guid) { _guid = value; _value = Encode(value); } } } /// <summary> /// Gets/sets the underlying base64 encoded string /// </summary> public string Value { get { return _value; } set { if (value != _value) { _value = value; _guid = Decode(value); } } } #endregion #region ToString /// <summary> /// Returns the base64 encoded guid as a string /// </summary> /// <returns></returns> public override string ToString() { return _value; } #endregion #region Equals /// <summary> /// Returns a value indicating whether this instance and a /// specified Object represent the same type and value. /// </summary> /// <param name="obj">The object to compare</param> /// <returns></returns> public override bool Equals(object obj) { if (obj is ShortGuid) return _guid.Equals(((ShortGuid)obj)._guid); if (obj is Guid) return _guid.Equals((Guid)obj); if (obj is string) return _guid.Equals(((ShortGuid)obj)._guid); return false; } #endregion #region GetHashCode /// <summary> /// Returns the HashCode for underlying Guid. /// </summary> /// <returns></returns> public override int GetHashCode() { return _guid.GetHashCode(); } #endregion #region NewGuid /// <summary> /// Initialises a new instance of the ShortGuid class /// </summary> /// <returns></returns> public static ShortGuid NewGuid() { return new ShortGuid(Guid.NewGuid()); } #endregion #region Encode /// <summary> /// Creates a new instance of a Guid using the string value, /// then returns the base64 encoded version of the Guid. /// </summary> /// <param name="value">An actual Guid string (i.e. not a ShortGuid)</param> /// <returns></returns> public static string Encode(string value) { Guid guid = new Guid(value); return Encode(guid); } /// <summary> /// Encodes the given Guid as a base64 string that is 22 /// characters long. /// </summary> /// <param name="guid">The Guid to encode</param> /// <returns></returns> public static string Encode(Guid guid) { string encoded = Convert.ToBase64String(guid.ToByteArray()); encoded = encoded .Replace("/", "_") .Replace("+", "-"); return encoded.Substring(0, 22); } #endregion #region Decode /// <summary> /// Decodes the given base64 string /// </summary> /// <param name="value">The base64 encoded string of a Guid</param> /// <returns>A new Guid</returns> public static Guid Decode(string value) { value = value .Replace("_", "/") .Replace("-", "+"); byte[] buffer = Convert.FromBase64String(value + "=="); return new Guid(buffer); } #endregion #region Operators /// <summary> /// Determines if both ShortGuids have the same underlying /// Guid value. /// </summary> /// <param name="x"></param> /// <param name="y"></param> /// <returns></returns> public static bool operator ==(ShortGuid x, ShortGuid y) { if ((object)x == null) return (object)y == null; return x._guid == y._guid; } /// <summary> /// Determines if both ShortGuids do not have the /// same underlying Guid value. /// </summary> /// <param name="x"></param> /// <param name="y"></param> /// <returns></returns> public static bool operator !=(ShortGuid x, ShortGuid y) { return !(x == y); } /// <summary> /// Implicitly converts the ShortGuid to it's string equivilent /// </summary> /// <param name="shortGuid"></param> /// <returns></returns> public static implicit operator string(ShortGuid shortGuid) { return shortGuid._value; } /// <summary> /// Implicitly converts the ShortGuid to it's Guid equivilent /// </summary> /// <param name="shortGuid"></param> /// <returns></returns> public static implicit operator Guid(ShortGuid shortGuid) { return shortGuid._guid; } /// <summary> /// Implicitly converts the string to a ShortGuid /// </summary> /// <param name="shortGuid"></param> /// <returns></returns> public static implicit operator ShortGuid(string shortGuid) { return new ShortGuid(shortGuid); } /// <summary> /// Implicitly converts the Guid to a ShortGuid /// </summary> /// <param name="guid"></param> /// <returns></returns> public static implicit operator ShortGuid(Guid guid) { return new ShortGuid(guid); } #endregion } }
#region Licence... /* The MIT License (MIT) Copyright (c) 2014 Oleg Shilo Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #endregion Licence... using System; using System.Collections.Generic; using System.Linq; using System.Xml.Linq; using IO = System.IO; using Path = System.IO.Path; namespace WixSharp { /// <summary> /// Automatically insert elements required for satisfy odd MSI restrictions. /// <para>- You must set KeyPath you install in the user profile.</para> /// <para>- You must use a registry key under HKCU as component's KeyPath, not a file. </para> /// <para>- The Component element cannot have multiple key path set. </para> /// <para>- The project must have at least one directory element. </para> /// <para>- All directories installed in the user profile must have corresponding RemoveDirectory /// elements. </para> /// <para>...</para> /// <para> /// The MSI always wants registry keys as the key paths for per-user components. /// It has to do with the way profiles work with advertised content in enterprise deployments. /// The fact that you do not want to install any registry doesn't matter. MSI is the boss. /// </para> /// <para>The following link is a good example of the technique: /// http://stackoverflow.com/questions/16119708/component-testcomp-installs-to-user-profile-it-must-use-a-registry-key-under-hk</para> /// </summary> public static class AutoElements { /// <summary> /// The disable automatic insertion of <c>CreateFolder</c> element. /// Required for: NativeBootstrapper, EmbeddedMultipleActions, EmptyDirectories, InstallDir, Properties, /// ReleaseFolder, Shortcuts and WildCardFiles samples. /// <para>Can also be managed by disabling ICE validation via Light.exe command line arguments.</para> /// <para> /// This flag is a heavier alternative of DisableAutoKeyPath. /// See: http://stackoverflow.com/questions/10358989/wix-using-keypath-on-components-directories-files-registry-etc-etc /// for some background info. /// /// </para> /// </summary> public static bool DisableAutoCreateFolder = true; /// <summary> /// The disable automatic insertion of <c>KeyPath=yes</c> attribute for the Component element. /// Required for: NativeBootstrapper, EmbeddedMultipleActions, EmptyDirectories, InstallDir, Properties, /// ReleaseFolder, Shortcuts and WildCardFiles samples. /// <para>Can also be managed by disabling ICE validation via Light.exe command line arguments.</para> /// <para> /// This flag is a lighter alternative of DisableAutoCreateFolder. /// See: http://stackoverflow.com/questions/10358989/wix-using-keypath-on-components-directories-files-registry-etc-etc /// for some background info. /// </para> /// </summary> public static bool DisableAutoKeyPath = false; /// <summary> /// Disables automatic insertion of user profile registry elements. /// Required for: AllInOne, ConditionalInstallation, CustomAttributes, ReleaseFolder, Shortcuts, /// Shortcuts (advertised), Shortcuts-2, WildCardFiles samples. /// <para>Can also be managed by disabling ICE validation via Light.exe command line arguments.</para> /// </summary> public static bool DisableAutoUserProfileRegistry = false; static void InsertRemoveFolder(XElement xDir, XElement xComponent, string when = "uninstall") { if (!xDir.IsUserProfileRoot()) xComponent.Add(new XElement("RemoveFolder", new XAttribute("Id", xDir.Attribute("Id").Value), new XAttribute("On", when))); } internal static XElement InsertUserProfileRemoveFolder(this XElement xComponent) { var xDir = xComponent.Parent("Directory"); if (!xDir.Descendants("RemoveFolder").Any() && !xDir.IsUserProfileRoot()) xComponent.Add(new XElement("RemoveFolder", new XAttribute("Id", xDir.Attribute("Id").Value), new XAttribute("On", "uninstall"))); return xComponent; } static void InsertCreateFolder(XElement xComponent) { //"Empty Directories" sample demonstrates the need for CreateFolder if (!DisableAutoCreateFolder) { //prevent adding more than 1 CreateFolder elements - elements that don't specify @Directory if (xComponent.Elements("CreateFolder") .All(element => element.HasAttribute("Directory"))) xComponent.Add(new XElement("CreateFolder")); } if (!DisableAutoKeyPath) { //a component must have KeyPath set on itself or on a single (just one) nested element if (!xComponent.HasKeyPathElements()) xComponent.SetAttribute("KeyPath=yes"); } } internal static bool HasKeyPathElements(this XElement xComponent) { return xComponent.Descendants() .Where(e => e.HasKeyPathSet()) .Any(); } internal static XElement ClearKeyPath(this XElement element) { return element.SetAttribute("KeyPath", null); } internal static bool HasKeyPathSet(this XElement element) { var attr = element.Attribute("KeyPath"); if (attr != null && attr.Value == "yes") return true; return false; } internal static XElement InsertUserProfileRegValue(this XElement xComponent) { //UserProfileRegValue has to be a KeyPath fo need to remove any KeyPath on other elements var keyPathes = xComponent.Descendants() .ForEach(e => e.ClearKeyPath()); xComponent.ClearKeyPath(); xComponent.Add( new XElement("RegistryKey", new XAttribute("Root", "HKCU"), new XAttribute("Key", @"Software\WixSharp\Used"), new XElement("RegistryValue", new XAttribute("Value", "0"), new XAttribute("Type", "string"), new XAttribute("KeyPath", "yes")))); return xComponent; } static void InsertDummyUserProfileRegistry(XElement xComponent) { if (!DisableAutoUserProfileRegistry) { InsertUserProfileRegValue(xComponent); } } static void SetFileKeyPath(XElement element, bool isKeyPath = true) { if (element.Attribute("KeyPath") == null) element.Add(new XAttribute("KeyPath", isKeyPath ? "yes" : "no")); } static bool ContainsDummyUserProfileRegistry(this XElement xComponent) { return (from e in xComponent.Elements("RegistryKey") where e.Attribute("Key") != null && e.Attribute("Key").Value == @"Software\WixSharp\Used" select e).Count() != 0; } static bool ContainsAnyRemoveFolder(this XElement xDir) { return (xDir.Descendants("RemoveFolder").Count() != 0); } static bool ContainsFiles(this XElement xComp) { return xComp.Elements("File").Count() != 0; } static bool ContainsComponents(this XElement xDir) { return xDir.Elements("Component").Any(); } static bool ContainsAdvertisedShortcuts(this XElement xComp) { var advertisedShortcuts = from e in xComp.Descendants("Shortcut") where e.Attribute("Advertise") != null && e.Attribute("Advertise").Value == "yes" select e; return (advertisedShortcuts.Count() != 0); } static bool ContainsNonAdvertisedShortcuts(this XElement xComp) { var nonAdvertisedShortcuts = from e in xComp.Descendants("Shortcut") where e.Attribute("Advertise") == null || e.Attribute("Advertise").Value == "no" select e; return (nonAdvertisedShortcuts.Count() != 0); } static XElement CrteateComponentFor(this XDocument doc, XElement xDir) { string compId = xDir.Attribute("Id").Value; XElement xComponent = xDir.AddElement( new XElement("Component", new XAttribute("Id", compId), new XAttribute("Guid", WixGuid.NewGuid(compId)))); foreach (XElement xFeature in doc.Root.Descendants("Feature")) xFeature.Add(new XElement("ComponentRef", new XAttribute("Id", xComponent.Attribute("Id").Value))); return xComponent; } private static string[] GetUserProfileFolders() { return new[] { "ProgramMenuFolder", "AppDataFolder", "LocalAppDataFolder", "TempFolder", "PersonalFolder", "DesktopFolder" }; } static bool InUserProfile(this XElement xDir) { string[] userProfileFolders = GetUserProfileFolders(); XElement xParentDir = xDir; do { if (xParentDir.Name == "Directory") { var attrName = xParentDir.Attribute("Name").Value; if (userProfileFolders.Contains(attrName)) return true; } xParentDir = xParentDir.Parent; } while (xParentDir != null); return false; } static bool IsUserProfileRoot(this XElement xDir) { string[] userProfileFolders = GetUserProfileFolders(); return userProfileFolders.Contains(xDir.Attribute("Name").Value); } internal static void InjectShortcutIcons(XDocument doc) { var shortcuts = from s in doc.Root.Descendants("Shortcut") where s.HasAttribute("Icon") select s; int iconIndex = 1; var icons = new Dictionary<string, string>(); foreach (var iconFile in (from s in shortcuts select s.Attribute("Icon").Value).Distinct()) { icons.Add(iconFile, "IconFile" + (iconIndex++) + "_" + IO.Path.GetFileName(iconFile).Expand()); } foreach (XElement shortcut in shortcuts) { string iconFile = shortcut.Attribute("Icon").Value; string iconId = icons[iconFile]; shortcut.Attribute("Icon").Value = iconId; } XElement product = doc.Root.Select("Product"); foreach (string file in icons.Keys) product.AddElement( new XElement("Icon", new XAttribute("Id", icons[file]), new XAttribute("SourceFile", file))); } static void InjectPlatformAttributes(XDocument doc) { var is64BitPlatform = doc.Root.Select("Product/Package").HasAttribute("Platform", val => val == "x64"); if (is64BitPlatform) doc.Descendants("Component") .ForEach(comp => comp.SetAttributeValue("Win64", "yes")); } static void ExpandCustomAttributes(XDocument doc) { foreach (XAttribute instructionAttr in doc.Root.Descendants().Select(x => x.Attribute("WixSharpCustomAttributes")).Where(x => x != null)) { XElement sourceElement = instructionAttr.Parent; foreach (string item in instructionAttr.Value.Split(';')) if (item.IsNotEmpty()) { if (!ExpandCustomAttribute(sourceElement, item)) throw new ApplicationException("Cannot resolve custom attribute definition:" + item); } instructionAttr.Remove(); } } static Func<XElement, string, bool> ExpandCustomAttribute = DefaultExpandCustomAttribute; static bool DefaultExpandCustomAttribute(XElement source, string item) { var attrParts = item.Split('='); var keyParts = attrParts.First().Split(':'); string element = keyParts.First(); string key = keyParts.Last(); string value = attrParts.Last(); if (element == "Component") { XElement destElement = source.Parent("Component"); if (destElement != null) { destElement.SetAttributeValue(key, value); return true; } } if (element == "Icon" && source.Name.LocalName == "Property") { source.Parent("Product") .SelectOrCreate("Icon") .SetAttributeValue(key, value); return true; } if (element == "Custom" && source.Name.LocalName == "CustomAction") { string id = source.Attribute("Id").Value; var elements = source.Document.Descendants("Custom").Where(e => e.Attribute("Action").Value == id); if (elements.Any()) { elements.ForEach(e => e.SetAttributeValue(key, value)); return true; } } if (key.StartsWith("xml_include")) { var parts = value.Split('|'); string parentName = parts[0]; string xmlFile = parts[1]; var placement = source; if (!parentName.IsEmpty()) placement = source.Parent(parentName); if (placement != null) { placement.Add(new XProcessingInstruction("include", xmlFile)); return true; } } return false; } internal static void InjectAutoElementsHandler(XDocument doc) { InjectPlatformAttributes(doc); ExpandCustomAttributes(doc); InjectShortcutIcons(doc); XElement installDir = doc.Root.Select("Product").Element("Directory").Element("Directory"); XAttribute installDirName = installDir.Attribute("Name"); if (IO.Path.IsPathRooted(installDirName.Value)) { var product = installDir.Parent("Product"); string absolutePath = installDirName.Value; installDirName.Value = "ABSOLUTEPATH"; //<SetProperty> is an attractive approach but it doesn't allow conditional setting of 'ui' and 'execute' as required depending on UI level // it is ether hard coded 'both' or hard coded both 'ui' or 'execute' // <SetProperty Id="INSTALLDIR" Value="C:\My Company\MyProduct" Sequence="both" Before="AppSearch"> product.Add(new XElement("CustomAction", new XAttribute("Id", "Set_INSTALLDIR_AbsolutePath"), new XAttribute("Property", installDir.Attribute("Id").Value), new XAttribute("Value", absolutePath))); product.SelectOrCreate("InstallExecuteSequence").Add( new XElement("Custom", "(NOT Installed) AND (UILevel < 5)", new XAttribute("Action", "Set_INSTALLDIR_AbsolutePath"), new XAttribute("Before", "CostFinalize"))); product.SelectOrCreate("InstallUISequence").Add( new XElement("Custom", "(NOT Installed) AND (UILevel = 5)", new XAttribute("Action", "Set_INSTALLDIR_AbsolutePath"), new XAttribute("Before", "CostFinalize"))); } foreach (XElement xDir in doc.Root.Descendants("Directory").ToArray()) { var dirComponents = xDir.Elements("Component"); if (dirComponents.Any()) { var componentsWithNoFiles = dirComponents.Where(x => !x.ContainsFiles()).ToArray(); foreach (XElement item in componentsWithNoFiles) { if (!item.Attribute("Id").Value.EndsWith(".EmptyDirectory")) InsertCreateFolder(item); else if (!xDir.ContainsAnyRemoveFolder()) InsertRemoveFolder(xDir, item, "both"); //to keep WiX/compiler happy and allow removal of the dummy directory } } foreach (XElement xComp in dirComponents) { if (xDir.InUserProfile()) { if (!xDir.ContainsAnyRemoveFolder()) InsertRemoveFolder(xDir, xComp); if (!xComp.ContainsDummyUserProfileRegistry()) InsertDummyUserProfileRegistry(xComp); } else { if (xComp.ContainsNonAdvertisedShortcuts()) if (!xComp.ContainsDummyUserProfileRegistry()) InsertDummyUserProfileRegistry(xComp); } foreach (XElement xFile in xComp.Elements("File")) if (xFile.ContainsAdvertisedShortcuts() && !xComp.ContainsDummyUserProfileRegistry()) SetFileKeyPath(xFile); } if (!xDir.ContainsComponents() && xDir.InUserProfile()) { if (!xDir.IsUserProfileRoot()) { XElement xComp1 = doc.CrteateComponentFor(xDir); if (!xDir.ContainsAnyRemoveFolder()) InsertRemoveFolder(xDir, xComp1); if (!xComp1.ContainsDummyUserProfileRegistry()) InsertDummyUserProfileRegistry(xComp1); } } } } internal static void NormalizeFilePaths(XDocument doc, string sourceBaseDir, bool emitRelativePaths) { string rootDir = sourceBaseDir; if (rootDir.IsEmpty()) rootDir = Environment.CurrentDirectory; rootDir = IO.Path.GetFullPath(rootDir); Action<IEnumerable<XElement>, string> normalize = (elements, attributeName) => { elements.Where(e => e.HasAttribute(attributeName)) .ForEach(e => { var attr = e.Attribute(attributeName); if (emitRelativePaths) attr.Value = Utils.MakeRelative(attr.Value, rootDir); else attr.Value = Path.GetFullPath(attr.Value); }); }; normalize(doc.Root.FindAll("Icon"), "SourceFile"); normalize(doc.Root.FindAll("File"), "Source"); normalize(doc.Root.FindAll("Merge"), "SourceFile"); normalize(doc.Root.FindAll("Binary"), "SourceFile"); normalize(doc.Root.FindAll("EmbeddedUI"), "SourceFile"); normalize(doc.Root.FindAll("Payload"), "SourceFile"); normalize(doc.Root.FindAll("MsiPackage"), "SourceFile"); normalize(doc.Root.FindAll("ExePackage"), "SourceFile"); } } }
// ============================================================================ // FileName: RegistrarCore.cs // // Description: // SIP Registrar that strives to be RFC3822 compliant. // // Author(s): // Aaron Clauson // // History: // 21 Jan 2006 Aaron Clauson Created. // 22 Nov 2007 Aaron Clauson Fixed bug where binding refresh was generating a duplicate exception if the uac endpoint changed but the contact did not. // // License: // This software is licensed under the BSD License http://www.opensource.org/licenses/bsd-license.php // // Copyright (c) 2006-2007 Aaron Clauson (aaronc@blueface.ie), Blue Face Ltd, Dublin, Ireland (www.blueface.ie) // All rights reserved. // // Redistribution and use in source and binary forms, with or without modification, are permitted provided that // the following conditions are met: // // Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. // Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following // disclaimer in the documentation and/or other materials provided with the distribution. Neither the name of Blue Face Ltd. // nor the names of its contributors may be used to endorse or promote products derived from this software without specific // prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, // BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. // IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, // OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, // OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE // POSSIBILITY OF SUCH DAMAGE. // ============================================================================ using System; using System.Collections; using System.Collections.Generic; using System.Data; using System.Diagnostics; using System.Net; using System.Net.Sockets; using System.Runtime.Serialization; using System.Security.Cryptography; using System.Security.Cryptography.X509Certificates; using System.Text; using System.Text.RegularExpressions; using System.Threading; using System.Xml; using System.Xml.Serialization; using SIPSorcery.CRM; using SIPSorcery.Persistence; using SIPSorcery.SIP; using SIPSorcery.SIP.App; using SIPSorcery.Sys; using log4net; #if UNITTEST using NUnit.Framework; #endif namespace SIPSorcery.Servers { public enum RegisterResultEnum { Unknown = 0, Trying = 1, Forbidden = 2, Authenticated = 3, AuthenticationRequired = 4, Failed = 5, Error = 6, RequestWithNoUser = 7, RemoveAllRegistrations = 9, DuplicateRequest = 10, AuthenticatedFromCache = 11, RequestWithNoContact = 12, NonRegisterMethod = 13, DomainNotServiced = 14, IntervalTooBrief = 15, SwitchboardPaymentRequired = 16, } /// <summary> /// The registrar core is the class that actually does the work of receiving registration requests and populating and /// maintaining the SIP registrations list. /// /// From RFC 3261 Chapter "10.2 Constructing the REGISTER Request" /// - Request-URI: The Request-URI names the domain of the location service for which the registration is meant. /// - The To header field contains the address of record whose registration is to be created, queried, or modified. /// The To header field and the Request-URI field typically differ, as the former contains a user name. /// /// [ed Therefore: /// - The Request-URI inidcates the domain for the registration and should match the domain in the To address of record. /// - The To address of record contians the username of the user that is attempting to authenticate the request.] /// /// Method of operation: /// - New SIP messages received by the SIP Transport layer and queued before being sent to RegistrarCode for processing. For requests /// or response that match an existing REGISTER transaction the SIP Transport layer will handle the retransmit or drop the request if /// it's already being processed. /// - Any non-REGISTER requests received by the RegistrarCore are responded to with not supported, /// - If a persistence is being used to store registered contacts there will generally be a number of threads running for the /// persistence class. Of those threads there will be one that runs calling the SIPRegistrations.IdentifyDirtyContacts. This call identifies /// expired contacts and initiates the sending of any keep alive and OPTIONs requests. /// </summary> public class RegistrarCore { private const int MAX_REGISTER_QUEUE_SIZE = 1000; private const int MAX_PROCESS_REGISTER_SLEEP = 10000; private const string REGISTRAR_THREAD_NAME_PREFIX = "sipregistrar-core"; private static ILog logger = AppState.GetLogger("sipregistrar"); private int m_minimumBindingExpiry = SIPRegistrarBindingsManager.MINIMUM_EXPIRY_SECONDS; private SIPTransport m_sipTransport; private SIPRegistrarBindingsManager m_registrarBindingsManager; private SIPAssetGetDelegate<SIPAccount> GetSIPAccount_External; private GetCanonicalDomainDelegate GetCanonicalDomain_External; private SIPAuthenticateRequestDelegate SIPRequestAuthenticator_External; private SIPAssetPersistor<Customer> CustomerPersistor_External; private string m_serverAgent = SIPConstants.SIP_SERVER_STRING; private bool m_mangleUACContact = false; // Whether or not to adjust contact URIs that contain private hosts to the value of the bottom via received socket. private bool m_strictRealmHandling = false; // If true the registrar will only accept registration requests for domains it is configured for, otherwise any realm is accepted. private event SIPMonitorLogDelegate m_registrarLogEvent; private SIPUserAgentConfigurationManager m_userAgentConfigs; private Queue<SIPNonInviteTransaction> m_registerQueue = new Queue<SIPNonInviteTransaction>(); private AutoResetEvent m_registerARE = new AutoResetEvent(false); //private RSACryptoServiceProvider m_switchbboardRSAProvider; // If available this certificate can be used to sign switchboard tokens. private string m_switchboarduserAgentPrefix; public event Action<double, bool> RegisterComplete; // Event to allow hook into get notifications about the processing time for registrations. The boolean parameter is true of the request contained an authentication header. public int BacklogLength { get { return m_registerQueue.Count; } } public bool Stop; public RegistrarCore( SIPTransport sipTransport, SIPRegistrarBindingsManager registrarBindingsManager, SIPAssetGetDelegate<SIPAccount> getSIPAccount, GetCanonicalDomainDelegate getCanonicalDomain, bool mangleUACContact, bool strictRealmHandling, SIPMonitorLogDelegate proxyLogDelegate, SIPUserAgentConfigurationManager userAgentConfigs, SIPAuthenticateRequestDelegate sipRequestAuthenticator, string switchboarduserAgentPrefix, SIPAssetPersistor<Customer> customerPersistor) { m_sipTransport = sipTransport; m_registrarBindingsManager = registrarBindingsManager; GetSIPAccount_External = getSIPAccount; GetCanonicalDomain_External = getCanonicalDomain; m_mangleUACContact = mangleUACContact; m_strictRealmHandling = strictRealmHandling; m_registrarLogEvent = proxyLogDelegate; m_userAgentConfigs = userAgentConfigs; SIPRequestAuthenticator_External = sipRequestAuthenticator; m_switchboarduserAgentPrefix = switchboarduserAgentPrefix; CustomerPersistor_External = customerPersistor; //try //{ // if (!switchboardCertificateName.IsNullOrBlank()) // { // X509Certificate2 switchboardCertificate = AppState.LoadCertificate(StoreLocation.LocalMachine, switchboardCertificateName, false); // m_switchbboardRSAProvider = (RSACryptoServiceProvider)switchboardCertificate.PrivateKey; // logger.Debug("Switchboard RSA provider successfully loaded from " + switchboardCertificateName + " certificate."); // } //} //catch (Exception excp) //{ // logger.Error("Exception loading switchboard certificate using " + switchboardCertificateName + ". " + excp.Message); //} } public void Start(int threadCount) { logger.Debug("SIPRegistrarCore thread started with " + threadCount + " threads."); for (int index = 1; index <= threadCount; index++) { string threadSuffix = index.ToString(); ThreadPool.QueueUserWorkItem(delegate { ProcessRegisterRequest(REGISTRAR_THREAD_NAME_PREFIX + threadSuffix); }); } } public void AddRegisterRequest(SIPEndPoint localSIPEndPoint, SIPEndPoint remoteEndPoint, SIPRequest registerRequest) { try { if (registerRequest.Method != SIPMethodsEnum.REGISTER) { SIPResponse notSupportedResponse = GetErrorResponse(registerRequest, SIPResponseStatusCodesEnum.MethodNotAllowed, "Registration requests only"); m_sipTransport.SendResponse(notSupportedResponse); } else { SIPSorceryPerformanceMonitor.IncrementCounter(SIPSorceryPerformanceMonitor.REGISTRAR_REGISTRATION_REQUESTS_PER_SECOND); int requestedExpiry = GetRequestedExpiry(registerRequest); if (registerRequest.Header.To == null) { logger.Debug("Bad register request, no To header from " + remoteEndPoint + "."); SIPResponse badReqResponse = SIPTransport.GetResponse(registerRequest, SIPResponseStatusCodesEnum.BadRequest, "Missing To header"); m_sipTransport.SendResponse(badReqResponse); } else if(registerRequest.Header.To.ToURI.User.IsNullOrBlank()) { logger.Debug("Bad register request, no To user from " + remoteEndPoint + "."); SIPResponse badReqResponse = SIPTransport.GetResponse(registerRequest, SIPResponseStatusCodesEnum.BadRequest, "Missing username on To header"); m_sipTransport.SendResponse(badReqResponse); } else if (registerRequest.Header.Contact == null || registerRequest.Header.Contact.Count == 0) { logger.Debug("Bad register request, no Contact header from " + remoteEndPoint + "."); SIPResponse badReqResponse = SIPTransport.GetResponse(registerRequest, SIPResponseStatusCodesEnum.BadRequest, "Missing Contact header"); m_sipTransport.SendResponse(badReqResponse); } else if (requestedExpiry > 0 && requestedExpiry < m_minimumBindingExpiry) { logger.Debug("Bad register request, no expiry of " + requestedExpiry + " to small from " + remoteEndPoint + "."); SIPResponse tooFrequentResponse = GetErrorResponse(registerRequest, SIPResponseStatusCodesEnum.IntervalTooBrief, null); tooFrequentResponse.Header.MinExpires = m_minimumBindingExpiry; m_sipTransport.SendResponse(tooFrequentResponse); } else { if (m_registerQueue.Count < MAX_REGISTER_QUEUE_SIZE) { SIPNonInviteTransaction registrarTransaction = m_sipTransport.CreateNonInviteTransaction(registerRequest, remoteEndPoint, localSIPEndPoint, null); lock (m_registerQueue) { m_registerQueue.Enqueue(registrarTransaction); } FireProxyLogEvent(new SIPMonitorConsoleEvent(SIPMonitorServerTypesEnum.Registrar, SIPMonitorEventTypesEnum.BindingInProgress, "Register queued for " + registerRequest.Header.To.ToURI.ToString() + ".", null)); } else { logger.Error("Register queue exceeded max queue size " + MAX_REGISTER_QUEUE_SIZE + ", overloaded response sent."); SIPResponse overloadedResponse = SIPTransport.GetResponse(registerRequest, SIPResponseStatusCodesEnum.TemporarilyUnavailable, "Registrar overloaded, please try again shortly"); m_sipTransport.SendResponse(overloadedResponse); } m_registerARE.Set(); } } } catch (Exception excp) { logger.Error("Exception AddRegisterRequest (" + remoteEndPoint.ToString() + "). " + excp.Message); } } private void ProcessRegisterRequest(string threadName) { try { Thread.CurrentThread.Name = threadName; while (!Stop) { if (m_registerQueue.Count > 0) { try { SIPNonInviteTransaction registrarTransaction = null; lock (m_registerQueue) { registrarTransaction = m_registerQueue.Dequeue(); } if (registrarTransaction != null) { DateTime startTime = DateTime.Now; RegisterResultEnum result = Register(registrarTransaction); TimeSpan duration = DateTime.Now.Subtract(startTime); FireProxyLogEvent(new SIPMonitorConsoleEvent(SIPMonitorServerTypesEnum.Registrar, SIPMonitorEventTypesEnum.RegistrarTiming, "register result=" + result.ToString() + ", time=" + duration.TotalMilliseconds + "ms, user=" + registrarTransaction.TransactionRequest.Header.To.ToURI.User + ".", null)); if (RegisterComplete != null) { RegisterComplete(duration.TotalMilliseconds, registrarTransaction.TransactionRequest.Header.AuthenticationHeader != null); } } } catch (InvalidOperationException invalidOpExcp) { // This occurs when the queue is empty. logger.Warn("InvalidOperationException ProcessRegisterRequest Register Job. " + invalidOpExcp.Message); } catch (Exception regExcp) { logger.Error("Exception ProcessRegisterRequest Register Job. " + regExcp.Message); } } else { m_registerARE.WaitOne(MAX_PROCESS_REGISTER_SLEEP); } } logger.Warn("ProcessRegisterRequest thread " + Thread.CurrentThread.Name + " stopping."); } catch (Exception excp) { logger.Error("Exception ProcessRegisterRequest (" + Thread.CurrentThread.Name + "). " + excp.Message); } } private int GetRequestedExpiry(SIPRequest registerRequest) { int contactHeaderExpiry = (registerRequest.Header.Contact != null && registerRequest.Header.Contact.Count > 0) ? registerRequest.Header.Contact[0].Expires : -1; return (contactHeaderExpiry == -1) ? registerRequest.Header.Expires : contactHeaderExpiry; } private RegisterResultEnum Register(SIPTransaction registerTransaction) { try { SIPRequest sipRequest = registerTransaction.TransactionRequest; SIPURI registerURI = sipRequest.URI; SIPToHeader toHeader = sipRequest.Header.To; string toUser = toHeader.ToURI.User; string canonicalDomain = (m_strictRealmHandling) ? GetCanonicalDomain_External(toHeader.ToURI.Host, true) : toHeader.ToURI.Host; int requestedExpiry = GetRequestedExpiry(sipRequest); if (canonicalDomain == null) { FireProxyLogEvent(new SIPMonitorConsoleEvent(SIPMonitorServerTypesEnum.Registrar, SIPMonitorEventTypesEnum.Warn, "Register request for " + toHeader.ToURI.Host + " rejected as no matching domain found.", null)); SIPResponse noDomainResponse = GetErrorResponse(sipRequest, SIPResponseStatusCodesEnum.Forbidden, "Domain not serviced"); registerTransaction.SendFinalResponse(noDomainResponse); return RegisterResultEnum.DomainNotServiced; } SIPAccount sipAccount = GetSIPAccount_External(s => s.SIPUsername == toUser && s.SIPDomain == canonicalDomain); SIPRequestAuthenticationResult authenticationResult = SIPRequestAuthenticator_External(registerTransaction.LocalSIPEndPoint, registerTransaction.RemoteEndPoint, sipRequest, sipAccount, FireProxyLogEvent); if (!authenticationResult.Authenticated) { // 401 Response with a fresh nonce needs to be sent. SIPResponse authReqdResponse = SIPTransport.GetResponse(sipRequest, authenticationResult.ErrorResponse, null); authReqdResponse.Header.AuthenticationHeader = authenticationResult.AuthenticationRequiredHeader; registerTransaction.SendFinalResponse(authReqdResponse); if (authenticationResult.ErrorResponse == SIPResponseStatusCodesEnum.Forbidden) { FireProxyLogEvent(new SIPMonitorConsoleEvent(SIPMonitorServerTypesEnum.Registrar, SIPMonitorEventTypesEnum.Warn, "Forbidden " + toUser + "@" + canonicalDomain + " does not exist, " + sipRequest.Header.ProxyReceivedFrom + ", " + sipRequest.Header.UserAgent + ".", null)); return RegisterResultEnum.Forbidden; } else { FireProxyLogEvent(new SIPMonitorConsoleEvent(SIPMonitorServerTypesEnum.Registrar, SIPMonitorEventTypesEnum.Registrar, "Authentication required for " + toUser + "@" + canonicalDomain + " from " + sipRequest.Header.ProxyReceivedFrom + ".", toUser)); return RegisterResultEnum.AuthenticationRequired; } } else { // Authenticated. //if (!sipRequest.Header.UserAgent.IsNullOrBlank() && !m_switchboarduserAgentPrefix.IsNullOrBlank() && sipRequest.Header.UserAgent.StartsWith(m_switchboarduserAgentPrefix)) //{ // // Check that the switchboard user is authorised. // var customer = CustomerPersistor_External.Get(x => x.CustomerUsername == sipAccount.Owner); // if (!(customer.ServiceLevel == CustomerServiceLevels.Switchboard.ToString() || customer.ServiceLevel == CustomerServiceLevels.Gold.ToString())) // { // FireProxyLogEvent(new SIPMonitorConsoleEvent(SIPMonitorServerTypesEnum.Registrar, SIPMonitorEventTypesEnum.Warn, "Register request for switchboard from " + toHeader.ToURI.Host + " rejected as not correct service level.", sipAccount.Owner)); // SIPResponse payReqdResponse = GetErrorResponse(sipRequest, SIPResponseStatusCodesEnum.PaymentRequired, "You need to purchase a Switchboard service"); // registerTransaction.SendFinalResponse(payReqdResponse); // return RegisterResultEnum.SwitchboardPaymentRequired; // } //} if (sipRequest.Header.Contact == null || sipRequest.Header.Contact.Count == 0) { // No contacts header to update bindings with, return a list of the current bindings. List<SIPRegistrarBinding> bindings = m_registrarBindingsManager.GetBindings(sipAccount.Id); //List<SIPContactHeader> contactsList = m_registrarBindingsManager.GetContactHeader(); // registration.GetContactHeader(true, null); if (bindings != null) { sipRequest.Header.Contact = GetContactHeader(bindings); } SIPResponse okResponse = GetOkResponse(sipRequest); registerTransaction.SendFinalResponse(okResponse); FireProxyLogEvent(new SIPMonitorConsoleEvent(SIPMonitorServerTypesEnum.Registrar, SIPMonitorEventTypesEnum.RegisterSuccess, "Empty registration request successful for " + toUser + "@" + canonicalDomain + " from " + sipRequest.Header.ProxyReceivedFrom + ".", toUser)); } else { SIPEndPoint uacRemoteEndPoint = SIPEndPoint.TryParse(sipRequest.Header.ProxyReceivedFrom) ?? registerTransaction.RemoteEndPoint; SIPEndPoint proxySIPEndPoint = SIPEndPoint.TryParse(sipRequest.Header.ProxyReceivedOn); SIPEndPoint registrarEndPoint = registerTransaction.LocalSIPEndPoint; SIPResponseStatusCodesEnum updateResult = SIPResponseStatusCodesEnum.Ok; string updateMessage = null; DateTime startTime = DateTime.Now; List<SIPRegistrarBinding> bindingsList = m_registrarBindingsManager.UpdateBindings( sipAccount, proxySIPEndPoint, uacRemoteEndPoint, registrarEndPoint, //sipRequest.Header.Contact[0].ContactURI.CopyOf(), sipRequest.Header.Contact, sipRequest.Header.CallId, sipRequest.Header.CSeq, //sipRequest.Header.Contact[0].Expires, sipRequest.Header.Expires, sipRequest.Header.UserAgent, out updateResult, out updateMessage); //int bindingExpiry = GetBindingExpiry(bindingsList, sipRequest.Header.Contact[0].ContactURI.ToString()); TimeSpan duration = DateTime.Now.Subtract(startTime); FireProxyLogEvent(new SIPMonitorConsoleEvent(SIPMonitorServerTypesEnum.Registrar, SIPMonitorEventTypesEnum.RegistrarTiming, "Binding update time for " + toUser + "@" + canonicalDomain + " took " + duration.TotalMilliseconds + "ms.", null)); if (updateResult == SIPResponseStatusCodesEnum.Ok) { string proxySocketStr = (proxySIPEndPoint != null) ? " (proxy=" + proxySIPEndPoint.ToString() + ")" : null; int bindingCount = 1; foreach (SIPRegistrarBinding binding in bindingsList) { string bindingIndex = (bindingsList.Count == 1) ? String.Empty : " (" + bindingCount + ")"; //FireProxyLogEvent(new SIPMonitorConsoleEvent(SIPMonitorServerTypesEnum.Registrar, SIPMonitorEventTypesEnum.RegisterSuccess, "Registration successful for " + toUser + "@" + canonicalDomain + " from " + uacRemoteEndPoint + proxySocketStr + ", binding " + binding.ContactSIPURI.ToParameterlessString() + ";expiry=" + binding.Expiry + bindingIndex + ".", toUser)); FireProxyLogEvent(new SIPMonitorConsoleEvent(SIPMonitorServerTypesEnum.Registrar, SIPMonitorEventTypesEnum.RegisterSuccess, "Registration successful for " + toUser + "@" + canonicalDomain + " from " + uacRemoteEndPoint + ", binding " + binding.ContactSIPURI.ToParameterlessString() + ";expiry=" + binding.Expiry + bindingIndex + ".", toUser)); //FireProxyLogEvent(new SIPMonitorMachineEvent(SIPMonitorMachineEventTypesEnum.SIPRegistrarBindingUpdate, toUser, uacRemoteEndPoint, sipAccount.Id.ToString())); bindingCount++; } // The standard states that the Ok response should contain the list of current bindings but that breaks some UAs. As a // compromise the list is returned with the Contact that UAC sent as the first one in the list. bool contactListSupported = m_userAgentConfigs.GetUserAgentContactListSupport(sipRequest.Header.UserAgent); if (contactListSupported) { sipRequest.Header.Contact = GetContactHeader(bindingsList); } else { // Some user agents can't match the contact header if the expiry is added to it. sipRequest.Header.Contact[0].Expires = GetBindingExpiry(bindingsList, sipRequest.Header.Contact[0].ContactURI.ToString()); ; } SIPResponse okResponse = GetOkResponse(sipRequest); // If a request was made for a switchboard token and a certificate is available to sign the tokens then generate it. //if (sipRequest.Header.SwitchboardTokenRequest > 0 && m_switchbboardRSAProvider != null) //{ // SwitchboardToken token = new SwitchboardToken(sipRequest.Header.SwitchboardTokenRequest, sipAccount.Owner, uacRemoteEndPoint.Address.ToString()); // lock (m_switchbboardRSAProvider) // { // token.SignedHash = Convert.ToBase64String(m_switchbboardRSAProvider.SignHash(Crypto.GetSHAHash(token.GetHashString()), null)); // } // string tokenXML = token.ToXML(true); // logger.Debug("Switchboard token set for " + sipAccount.Owner + " with expiry of " + token.Expiry + "s."); // okResponse.Header.SwitchboardToken = Crypto.SymmetricEncrypt(sipAccount.SIPPassword, sipRequest.Header.AuthenticationHeader.SIPDigest.Nonce, tokenXML); //} registerTransaction.SendFinalResponse(okResponse); } else { // The binding update failed even though the REGISTER request was authorised. This is probably due to a // temporary problem connecting to the bindings data store. Send Ok but set the binding expiry to the minimum so // that the UA will try again as soon as possible. FireProxyLogEvent(new SIPMonitorConsoleEvent(SIPMonitorServerTypesEnum.Registrar, SIPMonitorEventTypesEnum.Error, "Registration request successful but binding update failed for " + toUser + "@" + canonicalDomain + " from " + registerTransaction.RemoteEndPoint + ".", toUser)); sipRequest.Header.Contact[0].Expires = m_minimumBindingExpiry; SIPResponse okResponse = GetOkResponse(sipRequest); registerTransaction.SendFinalResponse(okResponse); } } return RegisterResultEnum.Authenticated; } } catch (Exception excp) { string regErrorMessage = "Exception registrarcore registering. " + excp.Message + "\r\n" + registerTransaction.TransactionRequest.ToString(); logger.Error(regErrorMessage); FireProxyLogEvent(new SIPMonitorConsoleEvent(SIPMonitorServerTypesEnum.Registrar, SIPMonitorEventTypesEnum.Error, regErrorMessage, null)); try { SIPResponse errorResponse = GetErrorResponse(registerTransaction.TransactionRequest, SIPResponseStatusCodesEnum.InternalServerError, null); registerTransaction.SendFinalResponse(errorResponse); } catch { } return RegisterResultEnum.Error; } } private int GetBindingExpiry(List<SIPRegistrarBinding> bindings, string bindingURI) { if (bindings == null || bindings.Count == 0) { return -1; } else { foreach (SIPRegistrarBinding binding in bindings) { if (binding.ContactURI == bindingURI) { return binding.Expiry; } } return -1; } } /// <summary> /// Gets a SIP contact header for this address-of-record based on the bindings list. /// </summary> /// <returns></returns> private List<SIPContactHeader> GetContactHeader(List<SIPRegistrarBinding> bindings) { if (bindings != null && bindings.Count > 0) { List<SIPContactHeader> contactHeaderList = new List<SIPContactHeader>(); foreach (SIPRegistrarBinding binding in bindings) { SIPContactHeader bindingContact = new SIPContactHeader(null, binding.ContactSIPURI); bindingContact.Expires = Convert.ToInt32(binding.ExpiryTime.Subtract(DateTime.UtcNow).TotalSeconds % Int32.MaxValue); contactHeaderList.Add(bindingContact); } return contactHeaderList; } else { return null; } } private SIPResponse GetOkResponse(SIPRequest sipRequest) { try { SIPResponse okResponse = SIPTransport.GetResponse(sipRequest, SIPResponseStatusCodesEnum.Ok, null); SIPHeader requestHeader = sipRequest.Header; okResponse.Header = new SIPHeader(requestHeader.Contact, requestHeader.From, requestHeader.To, requestHeader.CSeq, requestHeader.CallId); // RFC3261 has a To Tag on the example in section "24.1 Registration". if (okResponse.Header.To.ToTag == null || okResponse.Header.To.ToTag.Trim().Length == 0) { okResponse.Header.To.ToTag = CallProperties.CreateNewTag(); } okResponse.Header.CSeqMethod = requestHeader.CSeqMethod; okResponse.Header.Vias = requestHeader.Vias; okResponse.Header.Server = m_serverAgent; okResponse.Header.MaxForwards = Int32.MinValue; okResponse.Header.SetDateHeader(); return okResponse; } catch (Exception excp) { logger.Error("Exception GetOkResponse. " + excp.Message); throw excp; } } private SIPResponse GetAuthReqdResponse(SIPRequest sipRequest, string nonce, string realm) { try { SIPResponse authReqdResponse = SIPTransport.GetResponse(sipRequest, SIPResponseStatusCodesEnum.Unauthorised, null); SIPAuthenticationHeader authHeader = new SIPAuthenticationHeader(SIPAuthorisationHeadersEnum.WWWAuthenticate, realm, nonce); SIPHeader requestHeader = sipRequest.Header; SIPHeader unauthHeader = new SIPHeader(requestHeader.Contact, requestHeader.From, requestHeader.To, requestHeader.CSeq, requestHeader.CallId); if (unauthHeader.To.ToTag == null || unauthHeader.To.ToTag.Trim().Length == 0) { unauthHeader.To.ToTag = CallProperties.CreateNewTag(); } unauthHeader.CSeqMethod = requestHeader.CSeqMethod; unauthHeader.Vias = requestHeader.Vias; unauthHeader.AuthenticationHeader = authHeader; unauthHeader.Server = m_serverAgent; unauthHeader.MaxForwards = Int32.MinValue; authReqdResponse.Header = unauthHeader; return authReqdResponse; } catch (Exception excp) { logger.Error("Exception GetAuthReqdResponse. " + excp.Message); throw excp; } } private SIPResponse GetErrorResponse(SIPRequest sipRequest, SIPResponseStatusCodesEnum errorResponseCode, string errorMessage) { try { SIPResponse errorResponse = SIPTransport.GetResponse(sipRequest, errorResponseCode, null); if (errorMessage != null) { errorResponse.ReasonPhrase = errorMessage; } SIPHeader requestHeader = sipRequest.Header; SIPHeader errorHeader = new SIPHeader(requestHeader.Contact, requestHeader.From, requestHeader.To, requestHeader.CSeq, requestHeader.CallId); if (errorHeader.To.ToTag == null || errorHeader.To.ToTag.Trim().Length == 0) { errorHeader.To.ToTag = CallProperties.CreateNewTag(); } errorHeader.CSeqMethod = requestHeader.CSeqMethod; errorHeader.Vias = requestHeader.Vias; errorHeader.Server = m_serverAgent; errorHeader.MaxForwards = Int32.MinValue; errorResponse.Header = errorHeader; return errorResponse; } catch (Exception excp) { logger.Error("Exception GetErrorResponse. " + excp.Message); throw excp; } } private void FireProxyLogEvent(SIPMonitorEvent monitorEvent) { if (m_registrarLogEvent != null) { try { m_registrarLogEvent(monitorEvent); } catch (Exception excp) { logger.Error("Exception FireProxyLogEvent RegistrarCore. " + excp.Message); } } } } }
using System; using System.Collections.Generic; using System.Windows.Forms; using Bloom.Book; using Bloom.TeamCollection; using Bloom.Utils; namespace Bloom { public interface IEvent {}//hack for our autofact registration because of the generic I don't know how to select by actual event<> public class Event<TPayload> : IEvent { private readonly string _nameForLogging; protected enum LoggingLevel { Minor, Major }; private LoggingLevel _loggingLevel; protected Event(string nameForLogging, LoggingLevel loggingLevel) { _nameForLogging = nameForLogging; _loggingLevel = loggingLevel; } private readonly List<Action<TPayload>> _subscribers = new List<Action<TPayload>>(); public void Subscribe(Action<TPayload> action) { if (!_subscribers.Contains(action)) { _subscribers.Add(action); } } public void Raise(TPayload descriptor) { SIL.Reporting.Logger.WriteMinorEvent("Event: " + _nameForLogging); using (PerformanceMeasurement.Global?.MeasureMaybe(_loggingLevel == LoggingLevel.Major, _nameForLogging)) { foreach (Action<TPayload> subscriber in _subscribers) { ((Action<TPayload>) subscriber)(descriptor); } } } public bool HasSubscribers { get { return _subscribers.Count > 0; } } } public class TabChangedDetails { public Control From; public Control To; } /// <summary> /// called before the actual change /// </summary> public class SelectedTabAboutToChangeEvent : Event<TabChangedDetails> { public SelectedTabAboutToChangeEvent() : base("SelectedTabAboutToChangeEvent", LoggingLevel.Minor) { } } /// <summary> /// Gives the first control in the tab /// </summary> public class SelectedTabChangedEvent : Event<TabChangedDetails> { public SelectedTabChangedEvent() : base("SelectedTabChangedEvent", LoggingLevel.Major) { } } public class CreateFromSourceBookCommand: Event<Book.Book> { public CreateFromSourceBookCommand() : base("CreateFromSourceBookCommand", LoggingLevel.Major) { } } /// <summary> /// called when the user is quiting or changing to another library /// </summary> public class LibraryClosing : Event<object> { public LibraryClosing() : base("LibraryClosing", LoggingLevel.Major) { } } public class EditBookCommand : Event<Book.Book> { public EditBookCommand() : base("EditBookCommand", LoggingLevel.Major) { } } public class SendReceiveCommand : Event<object> { public SendReceiveCommand() : base("SendReceiveCommand", LoggingLevel.Major) { } } // public class BookCollectionChangedEvent : Event<BookCollection> // { } public class PageListChangedEvent : Event<object> { public PageListChangedEvent() : base("PageListChangedEvent", LoggingLevel.Minor) { } } /// <summary> /// This is used to purge the BloomServer cache, so solve the problem of "My Book/image3" (for example) /// leading to a picture from the previous book we worked on, back when *it* was named simple "My Book" /// The pair here is from, to paths. /// </summary> public class BookRenamedEvent : Event<KeyValuePair<string,string>> { public BookRenamedEvent() : base("BookRenamedEvent", LoggingLevel.Major) { } } public class BookDownloadStartingEvent : Event<object> { public BookDownloadStartingEvent() : base("BookDownloadStartingEvent", LoggingLevel.Major) { } } /// <summary> /// ANything displaying the book should re-load it. /// </summary> public class BookRefreshEvent : Event<Book.Book> { public BookRefreshEvent() : base("BookRefreshEvent", LoggingLevel.Minor) { } } /// <summary> /// Accessibility Checker uses this... not exactly semantic, but it does give us the hook at the right time /// </summary> public class BookSavedEvent : Event<Book.Book> { public BookSavedEvent() : base("BookSavedEvent", LoggingLevel.Minor) { } } /// <summary> /// Anything displaying a book should re-load it the current page /// </summary> public class PageRefreshEvent : Event<PageRefreshEvent.SaveBehavior> { public enum SaveBehavior { SaveBeforeRefresh, JustRedisplay } public PageRefreshEvent() : base("PageRefreshEvent", LoggingLevel.Minor) { } } public class RelocatePageInfo { public IPage Page; public int IndexOfPageAfterMove; public bool Cancel; public RelocatePageInfo(IPage page, int indexOfPageAfterMove) { Page = page; IndexOfPageAfterMove = indexOfPageAfterMove; } } public class RelocatePageEvent : Event<RelocatePageInfo> { public RelocatePageEvent() : base("RelocatePageEvent", LoggingLevel.Minor) { } } /// <summary> /// It's tricky to change the collection folder while a book is open, /// so we just queue it and have the project do the rename when we close/reopen /// </summary> public class QueueRenameOfCollection : Event<string> { public QueueRenameOfCollection() : base("QueueRenameOfCollection", LoggingLevel.Major) { } } /// <summary> /// fired when its possible that string should update from the localization manager /// </summary> public class LocalizationChangedEvent : Event<object> { public LocalizationChangedEvent() : base("LocalizationChangedEvent", LoggingLevel.Major) { } } public class ControlKeyEvent : Event<object> { public readonly Keys Keys; public ControlKeyEvent() : base("ControlKeyEvent", LoggingLevel.Minor) { } } // An event that signals that the status of a book in a Team Collection has changed. // This could be that it has been checked in or out (here or elsewhere), or some // other remote change like a modification to the book itself (checksum changed). public class BookStatusChangeEvent : Event<BookStatusChangeEventArgs> { public BookStatusChangeEvent() : base("TeamCollectionBookStatusChange", LoggingLevel.Minor) { } } }
/* Copyright (c) Microsoft Corporation All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE, MERCHANTABLITY OR NON-INFRINGEMENT. See the Apache Version 2.0 License for specific language governing permissions and limitations under the License. */ using System; using System.Collections; using System.Collections.Generic; using System.IO; using System.IO.Compression; using System.Text; using System.Reflection; using System.Diagnostics; using Microsoft.Win32.SafeHandles; using System.Runtime.InteropServices; using Microsoft.Research.DryadLinq; namespace Microsoft.Research.DryadLinq.Internal { internal unsafe class DryadLinqBlockStream : NativeBlockStream, IDisposable { private const int DefaultBuffSize = 8192 * 128; private Stream m_stream; internal DryadLinqBlockStream(Stream stream) { this.m_stream = stream; } internal override Int64 GetTotalLength() { return this.m_stream.Length; } internal override unsafe DataBlockInfo ReadDataBlock() { DataBlockInfo blockInfo; blockInfo.DataBlock = (byte*)Marshal.AllocHGlobal(DefaultBuffSize); blockInfo.ItemHandle = (IntPtr)blockInfo.DataBlock; byte[] buffer = new byte[DefaultBuffSize]; blockInfo.BlockSize = this.m_stream.Read(buffer, 0, DefaultBuffSize); fixed (byte* pBuffer = buffer) { DryadLinqUtil.memcpy(pBuffer, blockInfo.DataBlock, blockInfo.BlockSize); } return blockInfo; } internal override unsafe bool WriteDataBlock(IntPtr itemHandle, Int32 numBytesToWrite) { byte* dataBlock = (byte*)itemHandle; byte[] buffer = new byte[numBytesToWrite]; fixed (byte* pBuffer = buffer) { DryadLinqUtil.memcpy(dataBlock, pBuffer, numBytesToWrite); } this.m_stream.Write(buffer, 0, numBytesToWrite); return true; } internal override unsafe DataBlockInfo AllocateDataBlock(Int32 size) { DataBlockInfo blockInfo; blockInfo.ItemHandle = Marshal.AllocHGlobal((IntPtr)size); blockInfo.DataBlock = (byte*)blockInfo.ItemHandle; blockInfo.BlockSize = size; return blockInfo; } internal override unsafe void ReleaseDataBlock(IntPtr itemHandle) { if (itemHandle != IntPtr.Zero) { Marshal.FreeHGlobal(itemHandle); } } internal override void Flush() { this.m_stream.Flush(); } internal override void Close() { this.m_stream.Close(); } public void Dispose() { this.m_stream.Dispose(); } } // This class directly talks to NTFS files. internal unsafe class DryadLinqFileBlockStream : NativeBlockStream { private const int DefaultBuffSize = 8192*128; private const int FILE_FLAG_NO_BUFFERING = 0x20000000; private FileStream m_fstream; private SafeFileHandle m_fhandle; private CompressionScheme m_compressionScheme; private bool m_isClosed; private Stream m_compressStream; internal DryadLinqFileBlockStream(FileStream fstream, CompressionScheme scheme) { this.m_fstream = fstream; this.m_fhandle = fstream.SafeFileHandle; this.m_compressionScheme = scheme; this.m_isClosed = false; this.m_compressStream = null; } private void Initialize(string filePath, FileMode mode, FileAccess access, CompressionScheme scheme) { try { FileOptions options = FileOptions.None; if (access == FileAccess.Read) { options |= FileOptions.SequentialScan; // options |= (FileOptions)FILE_FLAG_NO_BUFFERING; } else { // options |= FileOptions.WriteThrough; } // options |= FileOptions.Asynchronous; this.m_fstream = new FileStream(filePath, mode, access, FileShare.Read, DefaultBuffSize, options); } catch (Exception e) { throw new DryadLinqException(DryadLinqErrorCode.CannotAccesFilePath, String.Format(SR.CannotAccesFilePath , filePath),e); } this.m_fhandle = m_fstream.SafeFileHandle; this.m_isClosed = false; this.m_compressionScheme = scheme; this.m_compressStream = null; } internal DryadLinqFileBlockStream(string filePath, FileAccess access, CompressionScheme scheme) { FileMode mode = (access == FileAccess.Read) ? FileMode.Open : FileMode.OpenOrCreate; this.Initialize(filePath, mode, access, scheme); } internal DryadLinqFileBlockStream(string filePath, FileAccess access) : this(filePath, access, CompressionScheme.None) { } internal DryadLinqFileBlockStream(string filePath, FileMode mode, FileAccess access, CompressionScheme scheme) { this.Initialize(filePath, mode, access, scheme); } internal DryadLinqFileBlockStream(string filePath, FileMode mode, FileAccess access) : this(filePath, mode, access, CompressionScheme.None) { } internal override unsafe Int64 GetTotalLength() { Int64 totalLen; bool success = DryadLinqNative.GetFileSizeEx(this.m_fhandle, out totalLen); if (!success) { throw new DryadLinqException(DryadLinqErrorCode.GetFileSizeError, String.Format(SR.GetFileSizeError, Marshal.GetLastWin32Error())); } return totalLen; } internal override unsafe DataBlockInfo ReadDataBlock() { DataBlockInfo blockInfo; blockInfo.DataBlock = (byte*)Marshal.AllocHGlobal(DefaultBuffSize); blockInfo.ItemHandle = (IntPtr)blockInfo.DataBlock; if (this.m_compressionScheme == CompressionScheme.None) { Int32* pBlockSize = &blockInfo.BlockSize; bool success = DryadLinqNative.ReadFile(this.m_fhandle, blockInfo.DataBlock, DefaultBuffSize, (IntPtr)pBlockSize, null); if (!success) { throw new DryadLinqException(DryadLinqErrorCode.ReadFileError, String.Format(SR.ReadFileError, Marshal.GetLastWin32Error())); } } else { if (this.m_compressStream == null) { if (this.m_compressionScheme == CompressionScheme.Gzip) { this.m_compressStream = new GZipStream(this.m_fstream, CompressionMode.Decompress); } else { throw new DryadLinqException(DryadLinqErrorCode.UnknownCompressionScheme, SR.UnknownCompressionScheme); } } // YY: Made an extra copy here. Could do better. byte[] buffer = new byte[DefaultBuffSize]; blockInfo.BlockSize = this.m_compressStream.Read(buffer, 0, DefaultBuffSize); fixed (byte* pBuffer = buffer) { DryadLinqUtil.memcpy(pBuffer, blockInfo.DataBlock, blockInfo.BlockSize); } } return blockInfo; } internal override unsafe bool WriteDataBlock(IntPtr itemHandle, Int32 numBytesToWrite) { byte* dataBlock = (byte*)itemHandle; if (this.m_compressionScheme == CompressionScheme.None) { Int32 numBytesWritten = 0; Int32 remainingBytes = numBytesToWrite; while (remainingBytes > 0) { Int32* pNumBytesWritten = &numBytesWritten; bool success = DryadLinqNative.WriteFile(this.m_fhandle, dataBlock, (UInt32)remainingBytes, (IntPtr)pNumBytesWritten, null); if (!success) { throw new DryadLinqException(DryadLinqErrorCode.WriteFileError, String.Format(SR.WriteFileError, Marshal.GetLastWin32Error())); } dataBlock += numBytesWritten; remainingBytes -= numBytesWritten; } } else { if (this.m_compressStream == null) { if (this.m_compressionScheme == CompressionScheme.Gzip) { this.m_compressStream = new GZipStream(this.m_fstream, CompressionMode.Compress); } else { throw new DryadLinqException(DryadLinqErrorCode.UnknownCompressionScheme, SR.UnknownCompressionScheme); } } // YY: Made an extra copy here. Could do better. byte[] buffer = new byte[numBytesToWrite]; fixed (byte* pBuffer = buffer) { DryadLinqUtil.memcpy(dataBlock, pBuffer, numBytesToWrite); } this.m_compressStream.Write(buffer, 0, numBytesToWrite); } return true; } internal override void Flush() { if (this.m_compressStream != null) { this.m_compressStream.Flush(); } this.m_fstream.Flush(); } internal override void Close() { if (!this.m_isClosed) { this.m_isClosed = true; if (this.m_compressStream != null) { this.m_compressStream.Close(); } this.m_fstream.Close(); } } internal override unsafe DataBlockInfo AllocateDataBlock(Int32 size) { DataBlockInfo blockInfo; blockInfo.ItemHandle = Marshal.AllocHGlobal((IntPtr)size); blockInfo.DataBlock = (byte*)blockInfo.ItemHandle; blockInfo.BlockSize = size; return blockInfo; } internal override unsafe void ReleaseDataBlock(IntPtr itemHandle) { if (itemHandle != IntPtr.Zero) { Marshal.FreeHGlobal(itemHandle); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections.Generic; using System.Reflection; using System.Reflection.Metadata; using System.Reflection.Metadata.Ecma335; using System.Reflection.PortableExecutable; using Internal.TypeSystem; namespace Internal.TypeSystem.Ecma { public partial class EcmaModule : ModuleDesc { private PEReader _peReader; protected MetadataReader _metadataReader; internal interface IEntityHandleObject { EntityHandle Handle { get; } } private sealed class EcmaObjectLookupWrapper : IEntityHandleObject { private EntityHandle _handle; private object _obj; public EcmaObjectLookupWrapper(EntityHandle handle, object obj) { _obj = obj; _handle = handle; } public EntityHandle Handle { get { return _handle; } } public object Object { get { return _obj; } } } internal class EcmaObjectLookupHashtable : LockFreeReaderHashtable<EntityHandle, IEntityHandleObject> { private EcmaModule _module; public EcmaObjectLookupHashtable(EcmaModule module) { _module = module; } protected override int GetKeyHashCode(EntityHandle key) { return key.GetHashCode(); } protected override int GetValueHashCode(IEntityHandleObject value) { return value.Handle.GetHashCode(); } protected override bool CompareKeyToValue(EntityHandle key, IEntityHandleObject value) { return key.Equals(value.Handle); } protected override bool CompareValueToValue(IEntityHandleObject value1, IEntityHandleObject value2) { if (Object.ReferenceEquals(value1, value2)) return true; else return value1.Handle.Equals(value2.Handle); } protected override IEntityHandleObject CreateValueFromKey(EntityHandle handle) { object item; switch (handle.Kind) { case HandleKind.TypeDefinition: item = new EcmaType(_module, (TypeDefinitionHandle)handle); break; case HandleKind.MethodDefinition: { MethodDefinitionHandle methodDefinitionHandle = (MethodDefinitionHandle)handle; TypeDefinitionHandle typeDefinitionHandle = _module._metadataReader.GetMethodDefinition(methodDefinitionHandle).GetDeclaringType(); EcmaType type = (EcmaType)_module.GetObject(typeDefinitionHandle); item = new EcmaMethod(type, methodDefinitionHandle); } break; case HandleKind.FieldDefinition: { FieldDefinitionHandle fieldDefinitionHandle = (FieldDefinitionHandle)handle; TypeDefinitionHandle typeDefinitionHandle = _module._metadataReader.GetFieldDefinition(fieldDefinitionHandle).GetDeclaringType(); EcmaType type = (EcmaType)_module.GetObject(typeDefinitionHandle); item = new EcmaField(type, fieldDefinitionHandle); } break; case HandleKind.TypeReference: item = _module.ResolveTypeReference((TypeReferenceHandle)handle); break; case HandleKind.MemberReference: item = _module.ResolveMemberReference((MemberReferenceHandle)handle); break; case HandleKind.AssemblyReference: item = _module.ResolveAssemblyReference((AssemblyReferenceHandle)handle); break; case HandleKind.TypeSpecification: item = _module.ResolveTypeSpecification((TypeSpecificationHandle)handle); break; case HandleKind.MethodSpecification: item = _module.ResolveMethodSpecification((MethodSpecificationHandle)handle); break; case HandleKind.ExportedType: item = _module.ResolveExportedType((ExportedTypeHandle)handle); break; case HandleKind.StandaloneSignature: item = _module.ResolveStandaloneSignature((StandaloneSignatureHandle)handle); break; case HandleKind.ModuleDefinition: // ECMA-335 Partition 2 II.22.38 1d: This should not occur in a CLI ("compressed metadata") module, // but resolves to "current module". item = _module; break; default: throw new BadImageFormatException("Unknown metadata token type: " + handle.Kind); } switch (handle.Kind) { case HandleKind.TypeDefinition: case HandleKind.MethodDefinition: case HandleKind.FieldDefinition: // type/method/field definitions directly correspond to their target item. return (IEntityHandleObject)item; default: // Everything else is some form of reference which cannot be self-describing return new EcmaObjectLookupWrapper(handle, item); } } } private LockFreeReaderHashtable<EntityHandle, IEntityHandleObject> _resolvedTokens; internal EcmaModule(TypeSystemContext context, PEReader peReader, MetadataReader metadataReader) : base(context) { _peReader = peReader; _metadataReader = metadataReader; _resolvedTokens = new EcmaObjectLookupHashtable(this); } public static EcmaModule Create(TypeSystemContext context, PEReader peReader) { MetadataReader metadataReader = CreateMetadataReader(context, peReader); if (metadataReader.IsAssembly) return new EcmaAssembly(context, peReader, metadataReader); else return new EcmaModule(context, peReader, metadataReader); } private static MetadataReader CreateMetadataReader(TypeSystemContext context, PEReader peReader) { if (!peReader.HasMetadata) { ThrowHelper.ThrowBadImageFormatException(); } var stringDecoderProvider = context as IMetadataStringDecoderProvider; MetadataReader metadataReader = peReader.GetMetadataReader(MetadataReaderOptions.None /* MetadataReaderOptions.ApplyWindowsRuntimeProjections */, (stringDecoderProvider != null) ? stringDecoderProvider.GetMetadataStringDecoder() : null); return metadataReader; } public PEReader PEReader { get { return _peReader; } } public MetadataReader MetadataReader { get { return _metadataReader; } } /// <summary> /// Gets the managed entrypoint method of this module or null if the module has no managed entrypoint. /// </summary> public MethodDesc EntryPoint { get { CorHeader corHeader = _peReader.PEHeaders.CorHeader; if ((corHeader.Flags & CorFlags.NativeEntryPoint) != 0) { // Entrypoint is an RVA to an unmanaged method return null; } int entryPointToken = corHeader.EntryPointTokenOrRelativeVirtualAddress; if (entryPointToken == 0) { // No entrypoint return null; } EntityHandle handle = MetadataTokens.EntityHandle(entryPointToken); if (handle.Kind == HandleKind.MethodDefinition) { return GetMethod(handle); } else if (handle.Kind == HandleKind.AssemblyFile) { // Entrypoint not in the manifest assembly throw new NotImplementedException(); } // Bad metadata throw new BadImageFormatException(); } } public sealed override MetadataType GetType(string nameSpace, string name, bool throwIfNotFound = true) { var stringComparer = _metadataReader.StringComparer; // TODO: More efficient implementation? foreach (var typeDefinitionHandle in _metadataReader.TypeDefinitions) { var typeDefinition = _metadataReader.GetTypeDefinition(typeDefinitionHandle); if (stringComparer.Equals(typeDefinition.Name, name) && stringComparer.Equals(typeDefinition.Namespace, nameSpace)) { return (MetadataType)GetType((EntityHandle)typeDefinitionHandle); } } foreach (var exportedTypeHandle in _metadataReader.ExportedTypes) { var exportedType = _metadataReader.GetExportedType(exportedTypeHandle); if (stringComparer.Equals(exportedType.Name, name) && stringComparer.Equals(exportedType.Namespace, nameSpace)) { if (exportedType.IsForwarder) { Object implementation = GetObject(exportedType.Implementation); if (implementation is ModuleDesc) { return ((ModuleDesc)(implementation)).GetType(nameSpace, name); } // TODO throw new NotImplementedException(); } // TODO: throw new NotImplementedException(); } } if (throwIfNotFound) ThrowHelper.ThrowTypeLoadException(nameSpace, name, this); return null; } public TypeDesc GetType(EntityHandle handle) { TypeDesc type = GetObject(handle) as TypeDesc; if (type == null) throw new BadImageFormatException("Type expected"); return type; } public MethodDesc GetMethod(EntityHandle handle) { MethodDesc method = GetObject(handle) as MethodDesc; if (method == null) throw new BadImageFormatException("Method expected"); return method; } public FieldDesc GetField(EntityHandle handle) { FieldDesc field = GetObject(handle) as FieldDesc; if (field == null) throw new BadImageFormatException("Field expected"); return field; } public Object GetObject(EntityHandle handle) { IEntityHandleObject obj = _resolvedTokens.GetOrCreateValue(handle); if (obj is EcmaObjectLookupWrapper) { return ((EcmaObjectLookupWrapper)obj).Object; } else { return obj; } } private Object ResolveMethodSpecification(MethodSpecificationHandle handle) { MethodSpecification methodSpecification = _metadataReader.GetMethodSpecification(handle); MethodDesc methodDef = GetMethod(methodSpecification.Method); BlobReader signatureReader = _metadataReader.GetBlobReader(methodSpecification.Signature); EcmaSignatureParser parser = new EcmaSignatureParser(this, signatureReader); TypeDesc[] instantiation = parser.ParseMethodSpecSignature(); return Context.GetInstantiatedMethod(methodDef, new Instantiation(instantiation)); } private Object ResolveStandaloneSignature(StandaloneSignatureHandle handle) { StandaloneSignature signature = _metadataReader.GetStandaloneSignature(handle); BlobReader signatureReader = _metadataReader.GetBlobReader(signature.Signature); EcmaSignatureParser parser = new EcmaSignatureParser(this, signatureReader); MethodSignature methodSig = parser.ParseMethodSignature(); return methodSig; } private Object ResolveTypeSpecification(TypeSpecificationHandle handle) { TypeSpecification typeSpecification = _metadataReader.GetTypeSpecification(handle); BlobReader signatureReader = _metadataReader.GetBlobReader(typeSpecification.Signature); EcmaSignatureParser parser = new EcmaSignatureParser(this, signatureReader); return parser.ParseType(); } private Object ResolveMemberReference(MemberReferenceHandle handle) { MemberReference memberReference = _metadataReader.GetMemberReference(handle); Object parent = GetObject(memberReference.Parent); TypeDesc parentTypeDesc = parent as TypeDesc; if (parentTypeDesc != null) { BlobReader signatureReader = _metadataReader.GetBlobReader(memberReference.Signature); EcmaSignatureParser parser = new EcmaSignatureParser(this, signatureReader); string name = _metadataReader.GetString(memberReference.Name); if (parser.IsFieldSignature) { FieldDesc field = parentTypeDesc.GetField(name); if (field != null) return field; ThrowHelper.ThrowMissingFieldException(parentTypeDesc, name); } else { MethodSignature sig = parser.ParseMethodSignature(); TypeDesc typeDescToInspect = parentTypeDesc; // Try to resolve the name and signature in the current type, or any of the base types. do { // TODO: handle substitutions MethodDesc method = typeDescToInspect.GetMethod(name, sig); if (method != null) { // If this resolved to one of the base types, make sure it's not a constructor. // Instance constructors are not inherited. if (typeDescToInspect != parentTypeDesc && method.IsConstructor) break; return method; } typeDescToInspect = typeDescToInspect.BaseType; } while (typeDescToInspect != null); ThrowHelper.ThrowMissingMethodException(parentTypeDesc, name, sig); } } else if (parent is MethodDesc) { ThrowHelper.ThrowInvalidProgramException(ExceptionStringID.InvalidProgramVararg, (MethodDesc)parent); } else if (parent is ModuleDesc) { throw new NotImplementedException("MemberRef to a global function or variable."); } throw new BadImageFormatException(); } private Object ResolveTypeReference(TypeReferenceHandle handle) { TypeReference typeReference = _metadataReader.GetTypeReference(handle); Object resolutionScope = GetObject(typeReference.ResolutionScope); if (resolutionScope is ModuleDesc) { return ((ModuleDesc)(resolutionScope)).GetType(_metadataReader.GetString(typeReference.Namespace), _metadataReader.GetString(typeReference.Name)); } else if (resolutionScope is MetadataType) { string typeName = _metadataReader.GetString(typeReference.Name); MetadataType result = ((MetadataType)(resolutionScope)).GetNestedType(typeName); if (result != null) return result; ThrowHelper.ThrowTypeLoadException(typeName, ((MetadataType)resolutionScope).Module); } // TODO throw new NotImplementedException(); } private Object ResolveAssemblyReference(AssemblyReferenceHandle handle) { AssemblyReference assemblyReference = _metadataReader.GetAssemblyReference(handle); AssemblyName an = new AssemblyName(); an.Name = _metadataReader.GetString(assemblyReference.Name); an.Version = assemblyReference.Version; var publicKeyOrToken = _metadataReader.GetBlobBytes(assemblyReference.PublicKeyOrToken); if ((assemblyReference.Flags & AssemblyFlags.PublicKey) != 0) { an.SetPublicKey(publicKeyOrToken); } else { an.SetPublicKeyToken(publicKeyOrToken); } an.CultureName = _metadataReader.GetString(assemblyReference.Culture); an.ContentType = GetContentTypeFromAssemblyFlags(assemblyReference.Flags); return Context.ResolveAssembly(an); } private Object ResolveExportedType(ExportedTypeHandle handle) { ExportedType exportedType = _metadataReader.GetExportedType(handle); var implementation = GetObject(exportedType.Implementation); if (implementation is ModuleDesc) { var module = (ModuleDesc)implementation; string nameSpace = _metadataReader.GetString(exportedType.Namespace); string name = _metadataReader.GetString(exportedType.Name); return module.GetType(nameSpace, name); } else if (implementation is MetadataType) { var type = (MetadataType)implementation; string name = _metadataReader.GetString(exportedType.Name); var nestedType = type.GetNestedType(name); if (nestedType == null) ThrowHelper.ThrowTypeLoadException(name, this); return nestedType; } else { throw new BadImageFormatException("Unknown metadata token type for exported type"); } } public sealed override IEnumerable<MetadataType> GetAllTypes() { foreach (var typeDefinitionHandle in _metadataReader.TypeDefinitions) { yield return (MetadataType)GetType(typeDefinitionHandle); } } public sealed override MetadataType GetGlobalModuleType() { int typeDefinitionsCount = _metadataReader.TypeDefinitions.Count; if (typeDefinitionsCount == 0) return null; return (MetadataType)GetType(MetadataTokens.EntityHandle(0x02000001 /* COR_GLOBAL_PARENT_TOKEN */)); } protected static AssemblyContentType GetContentTypeFromAssemblyFlags(AssemblyFlags flags) { return (AssemblyContentType)(((int)flags & 0x0E00) >> 9); } public string GetUserString(UserStringHandle userStringHandle) { // String literals are not cached return _metadataReader.GetUserString(userStringHandle); } public override string ToString() { ModuleDefinition moduleDefinition = _metadataReader.GetModuleDefinition(); return _metadataReader.GetString(moduleDefinition.Name); } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. extern alias PDB; using System; using System.Collections.Generic; using System.Collections.Immutable; using System.Collections.ObjectModel; using System.Globalization; using System.IO; using System.Linq; using System.Reflection; using System.Reflection.Metadata; using System.Reflection.Metadata.Ecma335; using System.Reflection.PortableExecutable; using System.Runtime.CompilerServices; using System.Runtime.InteropServices.WindowsRuntime; using System.Threading; using Microsoft.CodeAnalysis.CodeGen; using Microsoft.CodeAnalysis.Collections; using Microsoft.CodeAnalysis.Emit; using Microsoft.CodeAnalysis.Test.Utilities; using Microsoft.DiaSymReader; using Microsoft.VisualStudio.Debugger.Evaluation; using Microsoft.VisualStudio.Debugger.Evaluation.ClrCompilation; using Roslyn.Test.Utilities; using Xunit; using PDB::Roslyn.Test.MetadataUtilities; using PDB::Roslyn.Test.PdbUtilities; namespace Microsoft.CodeAnalysis.ExpressionEvaluator.UnitTests { internal sealed class Scope { internal readonly int StartOffset; internal readonly int EndOffset; internal readonly ImmutableArray<string> Locals; internal Scope(int startOffset, int endOffset, ImmutableArray<string> locals, bool isEndInclusive) { this.StartOffset = startOffset; this.EndOffset = endOffset + (isEndInclusive ? 1 : 0); this.Locals = locals; } internal int Length { get { return this.EndOffset - this.StartOffset + 1; } } internal bool Contains(int offset) { return (offset >= this.StartOffset) && (offset < this.EndOffset); } } internal static class ExpressionCompilerTestHelpers { internal static CompileResult CompileAssignment( this EvaluationContextBase context, string target, string expr, out string error, CompilationTestData testData = null, DiagnosticFormatter formatter = null) { ResultProperties resultProperties; ImmutableArray<AssemblyIdentity> missingAssemblyIdentities; var result = context.CompileAssignment( target, expr, ImmutableArray<Alias>.Empty, formatter ?? DebuggerDiagnosticFormatter.Instance, out resultProperties, out error, out missingAssemblyIdentities, EnsureEnglishUICulture.PreferredOrNull, testData); Assert.Empty(missingAssemblyIdentities); // This is a crude way to test the language, but it's convenient to share this test helper. var isCSharp = context.GetType().Namespace.IndexOf("csharp", StringComparison.OrdinalIgnoreCase) >= 0; var expectedFlags = error != null ? DkmClrCompilationResultFlags.None : isCSharp ? DkmClrCompilationResultFlags.PotentialSideEffect : DkmClrCompilationResultFlags.PotentialSideEffect | DkmClrCompilationResultFlags.ReadOnlyResult; Assert.Equal(expectedFlags, resultProperties.Flags); Assert.Equal(default(DkmEvaluationResultCategory), resultProperties.Category); Assert.Equal(default(DkmEvaluationResultAccessType), resultProperties.AccessType); Assert.Equal(default(DkmEvaluationResultStorageType), resultProperties.StorageType); Assert.Equal(default(DkmEvaluationResultTypeModifierFlags), resultProperties.ModifierFlags); return result; } internal static CompileResult CompileAssignment( this EvaluationContextBase context, string target, string expr, ImmutableArray<Alias> aliases, DiagnosticFormatter formatter, out ResultProperties resultProperties, out string error, out ImmutableArray<AssemblyIdentity> missingAssemblyIdentities, CultureInfo preferredUICulture, CompilationTestData testData) { var diagnostics = DiagnosticBag.GetInstance(); var result = context.CompileAssignment(target, expr, aliases, diagnostics, out resultProperties, testData); if (diagnostics.HasAnyErrors()) { bool useReferencedModulesOnly; error = context.GetErrorMessageAndMissingAssemblyIdentities(diagnostics, formatter, preferredUICulture, EvaluationContextBase.SystemCoreIdentity, out useReferencedModulesOnly, out missingAssemblyIdentities); } else { error = null; missingAssemblyIdentities = ImmutableArray<AssemblyIdentity>.Empty; } diagnostics.Free(); return result; } internal static ReadOnlyCollection<byte> CompileGetLocals( this EvaluationContextBase context, ArrayBuilder<LocalAndMethod> locals, bool argumentsOnly, out string typeName, CompilationTestData testData, DiagnosticDescription[] expectedDiagnostics = null) { var diagnostics = DiagnosticBag.GetInstance(); var result = context.CompileGetLocals( locals, argumentsOnly, ImmutableArray<Alias>.Empty, diagnostics, out typeName, testData); diagnostics.Verify(expectedDiagnostics ?? DiagnosticDescription.None); diagnostics.Free(); return result; } internal static CompileResult CompileExpression( this EvaluationContextBase context, string expr, out string error, CompilationTestData testData = null, DiagnosticFormatter formatter = null) { ResultProperties resultProperties; return CompileExpression(context, expr, out resultProperties, out error, testData, formatter); } internal static CompileResult CompileExpression( this EvaluationContextBase context, string expr, out ResultProperties resultProperties, out string error, CompilationTestData testData = null, DiagnosticFormatter formatter = null) { ImmutableArray<AssemblyIdentity> missingAssemblyIdentities; var result = context.CompileExpression( expr, DkmEvaluationFlags.TreatAsExpression, ImmutableArray<Alias>.Empty, formatter ?? DebuggerDiagnosticFormatter.Instance, out resultProperties, out error, out missingAssemblyIdentities, EnsureEnglishUICulture.PreferredOrNull, testData); Assert.Empty(missingAssemblyIdentities); return result; } static internal CompileResult CompileExpression( this EvaluationContextBase evaluationContext, string expr, DkmEvaluationFlags compilationFlags, ImmutableArray<Alias> aliases, out string error, CompilationTestData testData = null, DiagnosticFormatter formatter = null) { ResultProperties resultProperties; ImmutableArray<AssemblyIdentity> missingAssemblyIdentities; var result = evaluationContext.CompileExpression( expr, compilationFlags, aliases, formatter ?? DebuggerDiagnosticFormatter.Instance, out resultProperties, out error, out missingAssemblyIdentities, EnsureEnglishUICulture.PreferredOrNull, testData); Assert.Empty(missingAssemblyIdentities); return result; } /// <summary> /// Compile C# expression and emit assembly with evaluation method. /// </summary> /// <returns> /// Result containing generated assembly, type and method names, and any format specifiers. /// </returns> static internal CompileResult CompileExpression( this EvaluationContextBase evaluationContext, string expr, DkmEvaluationFlags compilationFlags, ImmutableArray<Alias> aliases, DiagnosticFormatter formatter, out ResultProperties resultProperties, out string error, out ImmutableArray<AssemblyIdentity> missingAssemblyIdentities, CultureInfo preferredUICulture, CompilationTestData testData) { var diagnostics = DiagnosticBag.GetInstance(); var result = evaluationContext.CompileExpression(expr, compilationFlags, aliases, diagnostics, out resultProperties, testData); if (diagnostics.HasAnyErrors()) { bool useReferencedModulesOnly; error = evaluationContext.GetErrorMessageAndMissingAssemblyIdentities(diagnostics, formatter, preferredUICulture, EvaluationContextBase.SystemCoreIdentity, out useReferencedModulesOnly, out missingAssemblyIdentities); } else { error = null; missingAssemblyIdentities = ImmutableArray<AssemblyIdentity>.Empty; } diagnostics.Free(); return result; } internal static CompileResult CompileExpressionWithRetry( ImmutableArray<MetadataBlock> metadataBlocks, EvaluationContextBase context, ExpressionCompiler.CompileDelegate<CompileResult> compile, DkmUtilities.GetMetadataBytesPtrFunction getMetaDataBytesPtr, out string errorMessage) { return ExpressionCompiler.CompileWithRetry( metadataBlocks, DebuggerDiagnosticFormatter.Instance, (blocks, useReferencedModulesOnly) => context, compile, getMetaDataBytesPtr, out errorMessage); } internal static CompileResult CompileExpressionWithRetry( ImmutableArray<MetadataBlock> metadataBlocks, string expr, ImmutableArray<Alias> aliases, ExpressionCompiler.CreateContextDelegate createContext, DkmUtilities.GetMetadataBytesPtrFunction getMetaDataBytesPtr, out string errorMessage, out CompilationTestData testData) { var r = ExpressionCompiler.CompileWithRetry( metadataBlocks, DebuggerDiagnosticFormatter.Instance, createContext, (context, diagnostics) => { var td = new CompilationTestData(); ResultProperties resultProperties; var compileResult = context.CompileExpression( expr, DkmEvaluationFlags.TreatAsExpression, aliases, diagnostics, out resultProperties, td); return new CompileExpressionResult(compileResult, td); }, getMetaDataBytesPtr, out errorMessage); testData = r.TestData; return r.CompileResult; } private struct CompileExpressionResult { internal readonly CompileResult CompileResult; internal readonly CompilationTestData TestData; internal CompileExpressionResult(CompileResult compileResult, CompilationTestData testData) { this.CompileResult = compileResult; this.TestData = testData; } } internal static TypeDefinition GetTypeDef(this MetadataReader reader, string typeName) { return reader.TypeDefinitions.Select(reader.GetTypeDefinition).First(t => reader.StringComparer.Equals(t.Name, typeName)); } internal static MethodDefinition GetMethodDef(this MetadataReader reader, TypeDefinition typeDef, string methodName) { return typeDef.GetMethods().Select(reader.GetMethodDefinition).First(m => reader.StringComparer.Equals(m.Name, methodName)); } internal static MethodDefinitionHandle GetMethodDefHandle(this MetadataReader reader, TypeDefinition typeDef, string methodName) { return typeDef.GetMethods().First(h => reader.StringComparer.Equals(reader.GetMethodDefinition(h).Name, methodName)); } internal static void CheckTypeParameters(this MetadataReader reader, GenericParameterHandleCollection genericParameters, params string[] expectedNames) { var actualNames = genericParameters.Select(reader.GetGenericParameter).Select(tp => reader.GetString(tp.Name)).ToArray(); Assert.True(expectedNames.SequenceEqual(actualNames)); } internal static AssemblyName GetAssemblyName(this byte[] exeBytes) { using (var reader = new PEReader(ImmutableArray.CreateRange(exeBytes))) { var metadataReader = reader.GetMetadataReader(); var def = metadataReader.GetAssemblyDefinition(); var name = metadataReader.GetString(def.Name); return new AssemblyName() { Name = name, Version = def.Version }; } } internal static Guid GetModuleVersionId(this byte[] exeBytes) { using (var reader = new PEReader(ImmutableArray.CreateRange(exeBytes))) { return reader.GetMetadataReader().GetModuleVersionId(); } } internal static ImmutableArray<string> GetLocalNames(this ISymUnmanagedReader symReader, int methodToken, int methodVersion = 1) { var method = symReader.GetMethodByVersion(methodToken, methodVersion); if (method == null) { return ImmutableArray<string>.Empty; } var scopes = ArrayBuilder<ISymUnmanagedScope>.GetInstance(); method.GetAllScopes(scopes); var names = ArrayBuilder<string>.GetInstance(); foreach (var scope in scopes) { var locals = scope.GetLocals(); foreach (var local in locals) { var name = local.GetName(); int slot; local.GetAddressField1(out slot); while (names.Count <= slot) { names.Add(null); } names[slot] = name; } } scopes.Free(); return names.ToImmutableAndFree(); } internal static void VerifyIL( this ImmutableArray<byte> assembly, string qualifiedName, string expectedIL, [CallerLineNumber]int expectedValueSourceLine = 0, [CallerFilePath]string expectedValueSourcePath = null) { var parts = qualifiedName.Split('.'); if (parts.Length != 2) { throw new NotImplementedException(); } using (var metadata = ModuleMetadata.CreateFromImage(assembly)) { var module = metadata.Module; var reader = module.MetadataReader; var typeDef = reader.GetTypeDef(parts[0]); var methodName = parts[1]; var methodHandle = reader.GetMethodDefHandle(typeDef, methodName); var methodBody = module.GetMethodBodyOrThrow(methodHandle); var pooled = PooledStringBuilder.GetInstance(); var builder = pooled.Builder; var writer = new StringWriter(pooled.Builder); var visualizer = new MetadataVisualizer(reader, writer); visualizer.VisualizeMethodBody(methodBody, methodHandle, emitHeader: false); var actualIL = pooled.ToStringAndFree(); AssertEx.AssertEqualToleratingWhitespaceDifferences(expectedIL, actualIL, escapeQuotes: true, expectedValueSourcePath: expectedValueSourcePath, expectedValueSourceLine: expectedValueSourceLine); } } internal static ImmutableArray<MetadataReference> GetEmittedReferences(Compilation compilation, MetadataReader mdReader) { // Determine the set of references that were actually used // and ignore any references that were dropped in emit. var referenceNames = new HashSet<string>(mdReader.AssemblyReferences.Select(h => GetAssemblyReferenceName(mdReader, h))); return ImmutableArray.CreateRange(compilation.References.Where(r => IsReferenced(r, referenceNames))); } internal static ImmutableArray<Scope> GetScopes(this ISymUnmanagedReader symReader, int methodToken, int methodVersion, bool isEndInclusive) { var method = symReader.GetMethodByVersion(methodToken, methodVersion); if (method == null) { return ImmutableArray<Scope>.Empty; } var scopes = ArrayBuilder<ISymUnmanagedScope>.GetInstance(); method.GetAllScopes(scopes); var result = scopes.SelectAsArray(s => new Scope(s.GetStartOffset(), s.GetEndOffset(), s.GetLocals().SelectAsArray(l => l.GetName()), isEndInclusive)); scopes.Free(); return result; } internal static Scope GetInnermostScope(this ImmutableArray<Scope> scopes, int offset) { Scope result = null; foreach (var scope in scopes) { if (scope.Contains(offset)) { if ((result == null) || (result.Length > scope.Length)) { result = scope; } } } return result; } private static string GetAssemblyReferenceName(MetadataReader reader, AssemblyReferenceHandle handle) { var reference = reader.GetAssemblyReference(handle); return reader.GetString(reference.Name); } private static bool IsReferenced(MetadataReference reference, HashSet<string> referenceNames) { var assemblyMetadata = ((PortableExecutableReference)reference).GetMetadataNoCopy() as AssemblyMetadata; if (assemblyMetadata == null) { // Netmodule. Assume it is referenced. return true; } var name = assemblyMetadata.GetAssembly().Identity.Name; return referenceNames.Contains(name); } internal static ModuleInstance ToModuleInstance(this MetadataReference reference) { return ModuleInstance.Create((PortableExecutableReference)reference); } internal static ModuleInstance ToModuleInstance( this Compilation compilation, DebugInformationFormat debugFormat = DebugInformationFormat.Pdb, bool includeLocalSignatures = true) { var pdbStream = (debugFormat != 0) ? new MemoryStream() : null; var peImage = compilation.EmitToArray(new EmitOptions(debugInformationFormat: debugFormat), pdbStream: pdbStream); var symReader = (debugFormat != 0) ? SymReaderFactory.CreateReader(pdbStream, new PEReader(peImage)) : null; return ModuleInstance.Create(peImage, symReader, includeLocalSignatures); } internal static ModuleInstance GetModuleInstanceForIL(string ilSource) { ImmutableArray<byte> peBytes; ImmutableArray<byte> pdbBytes; CommonTestBase.EmitILToArray(ilSource, appendDefaultHeader: true, includePdb: true, assemblyBytes: out peBytes, pdbBytes: out pdbBytes); return ModuleInstance.Create(peBytes, SymReaderFactory.CreateReader(pdbBytes), includeLocalSignatures: true); } internal static AssemblyIdentity GetAssemblyIdentity(this MetadataReference reference) { using (var moduleMetadata = GetManifestModuleMetadata(reference)) { return moduleMetadata.MetadataReader.ReadAssemblyIdentityOrThrow(); } } internal static Guid GetModuleVersionId(this MetadataReference reference) { using (var moduleMetadata = GetManifestModuleMetadata(reference)) { return moduleMetadata.MetadataReader.GetModuleVersionIdOrThrow(); } } private static ModuleMetadata GetManifestModuleMetadata(MetadataReference reference) { // make a copy to avoid disposing shared reference metadata: var metadata = ((MetadataImageReference)reference).GetMetadata(); return (metadata as AssemblyMetadata)?.GetModules()[0] ?? (ModuleMetadata)metadata; } internal static void VerifyLocal<TMethodSymbol>( this CompilationTestData testData, string typeName, LocalAndMethod localAndMethod, string expectedMethodName, string expectedLocalName, string expectedLocalDisplayName, DkmClrCompilationResultFlags expectedFlags, Action<TMethodSymbol> verifyTypeParameters, string expectedILOpt, bool expectedGeneric, string expectedValueSourcePath, int expectedValueSourceLine) where TMethodSymbol : IMethodSymbol { Assert.Equal(expectedLocalName, localAndMethod.LocalName); Assert.Equal(expectedLocalDisplayName, localAndMethod.LocalDisplayName); Assert.True(expectedMethodName.StartsWith(localAndMethod.MethodName, StringComparison.Ordinal), expectedMethodName + " does not start with " + localAndMethod.MethodName); // Expected name may include type arguments and parameters. Assert.Equal(expectedFlags, localAndMethod.Flags); var methodData = testData.GetMethodData(typeName + "." + expectedMethodName); verifyTypeParameters((TMethodSymbol)methodData.Method); if (expectedILOpt != null) { string actualIL = methodData.GetMethodIL(); AssertEx.AssertEqualToleratingWhitespaceDifferences( expectedILOpt, actualIL, escapeQuotes: true, expectedValueSourcePath: expectedValueSourcePath, expectedValueSourceLine: expectedValueSourceLine); } Assert.Equal(((Cci.IMethodDefinition)methodData.Method).CallingConvention, expectedGeneric ? Cci.CallingConvention.Generic : Cci.CallingConvention.Default); } internal static ISymUnmanagedReader ConstructSymReaderWithImports(ImmutableArray<byte> peImage, string methodName, params string[] importStrings) { using (var peReader = new PEReader(peImage)) { var metadataReader = peReader.GetMetadataReader(); var methodHandle = metadataReader.MethodDefinitions.Single(h => metadataReader.StringComparer.Equals(metadataReader.GetMethodDefinition(h).Name, methodName)); var methodToken = metadataReader.GetToken(methodHandle); return new MockSymUnmanagedReader(new Dictionary<int, MethodDebugInfoBytes> { { methodToken, new MethodDebugInfoBytes.Builder(new [] { importStrings }).Build() }, }.ToImmutableDictionary()); } } internal const uint NoILOffset = 0xffffffff; internal static readonly MetadataReference IntrinsicAssemblyReference = GetIntrinsicAssemblyReference(); internal static ImmutableArray<MetadataReference> AddIntrinsicAssembly(this ImmutableArray<MetadataReference> references) { var builder = ArrayBuilder<MetadataReference>.GetInstance(); builder.AddRange(references); builder.Add(IntrinsicAssemblyReference); return builder.ToImmutableAndFree(); } private static MetadataReference GetIntrinsicAssemblyReference() { var source = @".assembly extern mscorlib { } .class public Microsoft.VisualStudio.Debugger.Clr.IntrinsicMethods { .method public static object GetObjectAtAddress(uint64 address) { ldnull throw } .method public static class [mscorlib]System.Exception GetException() { ldnull throw } .method public static class [mscorlib]System.Exception GetStowedException() { ldnull throw } .method public static object GetReturnValue(int32 index) { ldnull throw } .method public static void CreateVariable(class [mscorlib]System.Type 'type', string name, valuetype [mscorlib]System.Guid customTypeInfoPayloadTypeId, uint8[] customTypeInfoPayload) { ldnull throw } .method public static object GetObjectByAlias(string name) { ldnull throw } .method public static !!T& GetVariableAddress<T>(string name) { ldnull throw } }"; return CommonTestBase.CompileIL(source); } /// <summary> /// Return MetadataReferences to the .winmd assemblies /// for the given namespaces. /// </summary> internal static ImmutableArray<MetadataReference> GetRuntimeWinMds(params string[] namespaces) { var paths = new HashSet<string>(); foreach (var @namespace in namespaces) { foreach (var path in WindowsRuntimeMetadata.ResolveNamespace(@namespace, null)) { paths.Add(path); } } return ImmutableArray.CreateRange(paths.Select(GetAssembly)); } private const string Version1_3CLRString = "WindowsRuntime 1.3;CLR v4.0.30319"; private const string Version1_3String = "WindowsRuntime 1.3"; private const string Version1_4String = "WindowsRuntime 1.4"; private static readonly int s_versionStringLength = Version1_3CLRString.Length; private static readonly byte[] s_version1_3CLRBytes = ToByteArray(Version1_3CLRString, s_versionStringLength); private static readonly byte[] s_version1_3Bytes = ToByteArray(Version1_3String, s_versionStringLength); private static readonly byte[] s_version1_4Bytes = ToByteArray(Version1_4String, s_versionStringLength); private static byte[] ToByteArray(string str, int length) { var bytes = new byte[length]; for (int i = 0; i < str.Length; i++) { bytes[i] = (byte)str[i]; } return bytes; } internal static byte[] ToVersion1_3(byte[] bytes) { return ToVersion(bytes, s_version1_3CLRBytes, s_version1_3Bytes); } internal static byte[] ToVersion1_4(byte[] bytes) { return ToVersion(bytes, s_version1_3CLRBytes, s_version1_4Bytes); } private static byte[] ToVersion(byte[] bytes, byte[] from, byte[] to) { int n = bytes.Length; var copy = new byte[n]; Array.Copy(bytes, copy, n); int index = IndexOf(copy, from); Array.Copy(to, 0, copy, index, to.Length); return copy; } private static int IndexOf(byte[] a, byte[] b) { int m = b.Length; int n = a.Length - m; for (int x = 0; x < n; x++) { var matches = true; for (int y = 0; y < m; y++) { if (a[x + y] != b[y]) { matches = false; break; } } if (matches) { return x; } } return -1; } private static MetadataReference GetAssembly(string path) { var bytes = File.ReadAllBytes(path); var metadata = ModuleMetadata.CreateFromImage(bytes); return metadata.GetReference(filePath: path); } internal static uint GetOffset(int methodToken, ISymUnmanagedReader symReader, int atLineNumber = -1) { int ilOffset; if (symReader == null) { ilOffset = 0; } else { var symMethod = symReader.GetMethod(methodToken); if (symMethod == null) { ilOffset = 0; } else { var sequencePoints = symMethod.GetSequencePoints(); ilOffset = atLineNumber < 0 ? sequencePoints.Where(sp => sp.StartLine != SequencePointList.HiddenSequencePointLine).Select(sp => sp.Offset).FirstOrDefault() : sequencePoints.First(sp => sp.StartLine == atLineNumber).Offset; } } Assert.InRange(ilOffset, 0, int.MaxValue); return (uint)ilOffset; } internal static string GetMethodOrTypeSignatureParts(string signature, out string[] parameterTypeNames) { var parameterListStart = signature.IndexOf('('); if (parameterListStart < 0) { parameterTypeNames = null; return signature; } var parameters = signature.Substring(parameterListStart + 1, signature.Length - parameterListStart - 2); var methodName = signature.Substring(0, parameterListStart); parameterTypeNames = (parameters.Length == 0) ? new string[0] : parameters.Split(','); return methodName; } } }
// Copyright (c) The Avalonia Project. All rights reserved. // Licensed under the MIT license. See licence.md file in the project root for full license information. using System; using System.Collections.Generic; using System.Globalization; using System.Reactive.Linq; using System.Threading; using System.Threading.Tasks; using Avalonia.Data; using Avalonia.Markup.Data; using Avalonia.UnitTests; using Moq; using Xunit; namespace Avalonia.Markup.UnitTests.Data { public class BindingExpressionTests : IClassFixture<InvariantCultureFixture> { [Fact] public async Task Should_Get_Simple_Property_Value() { var data = new Class1 { StringValue = "foo" }; var target = new BindingExpression(new ExpressionObserver(data, "StringValue"), typeof(string)); var result = await target.Take(1); Assert.Equal("foo", result); GC.KeepAlive(data); } [Fact] public void Should_Set_Simple_Property_Value() { var data = new Class1 { StringValue = "foo" }; var target = new BindingExpression(new ExpressionObserver(data, "StringValue"), typeof(string)); target.OnNext("bar"); Assert.Equal("bar", data.StringValue); GC.KeepAlive(data); } [Fact] public void Should_Set_Indexed_Value() { var data = new { Foo = new[] { "foo" } }; var target = new BindingExpression(new ExpressionObserver(data, "Foo[0]"), typeof(string)); target.OnNext("bar"); Assert.Equal("bar", data.Foo[0]); GC.KeepAlive(data); } [Fact] public async Task Should_Convert_Get_String_To_Double() { var data = new Class1 { StringValue = $"{5.6}" }; var target = new BindingExpression(new ExpressionObserver(data, "StringValue"), typeof(double)); var result = await target.Take(1); Assert.Equal(5.6, result); GC.KeepAlive(data); } [Fact] public async Task Getting_Invalid_Double_String_Should_Return_BindingError() { var data = new Class1 { StringValue = "foo" }; var target = new BindingExpression(new ExpressionObserver(data, "StringValue"), typeof(double)); var result = await target.Take(1); Assert.IsType<BindingNotification>(result); GC.KeepAlive(data); } [Fact] public async Task Should_Coerce_Get_Null_Double_String_To_UnsetValue() { var data = new Class1 { StringValue = null }; var target = new BindingExpression(new ExpressionObserver(data, "StringValue"), typeof(double)); var result = await target.Take(1); Assert.Equal(AvaloniaProperty.UnsetValue, result); GC.KeepAlive(data); } [Fact] public void Should_Convert_Set_String_To_Double() { var data = new Class1 { StringValue = $"{5.6}" }; var target = new BindingExpression(new ExpressionObserver(data, "StringValue"), typeof(double)); target.OnNext(6.7); Assert.Equal($"{6.7}", data.StringValue); GC.KeepAlive(data); } [Fact] public async Task Should_Convert_Get_Double_To_String() { var data = new Class1 { DoubleValue = 5.6 }; var target = new BindingExpression(new ExpressionObserver(data, "DoubleValue"), typeof(string)); var result = await target.Take(1); Assert.Equal($"{5.6}", result); GC.KeepAlive(data); } [Fact] public void Should_Convert_Set_Double_To_String() { var data = new Class1 { DoubleValue = 5.6 }; var target = new BindingExpression(new ExpressionObserver(data, "DoubleValue"), typeof(string)); target.OnNext($"{6.7}"); Assert.Equal(6.7, data.DoubleValue); GC.KeepAlive(data); } [Fact] public async Task Should_Return_BindingNotification_With_FallbackValue_For_NonConvertibe_Target_Value() { var data = new Class1 { StringValue = "foo" }; var target = new BindingExpression( new ExpressionObserver(data, "StringValue"), typeof(int), 42, DefaultValueConverter.Instance); var result = await target.Take(1); Assert.Equal( new BindingNotification( new InvalidCastException("'foo' is not a valid number."), BindingErrorType.Error, 42), result); GC.KeepAlive(data); } [Fact] public async Task Should_Return_BindingNotification_With_FallbackValue_For_NonConvertibe_Target_Value_With_Data_Validation() { var data = new Class1 { StringValue = "foo" }; var target = new BindingExpression( new ExpressionObserver(data, "StringValue", true), typeof(int), 42, DefaultValueConverter.Instance); var result = await target.Take(1); Assert.Equal( new BindingNotification( new InvalidCastException("'foo' is not a valid number."), BindingErrorType.Error, 42), result); GC.KeepAlive(data); } [Fact] public async Task Should_Return_BindingNotification_For_Invalid_FallbackValue() { var data = new Class1 { StringValue = "foo" }; var target = new BindingExpression( new ExpressionObserver(data, "StringValue"), typeof(int), "bar", DefaultValueConverter.Instance); var result = await target.Take(1); Assert.Equal( new BindingNotification( new AggregateException( new InvalidCastException("'foo' is not a valid number."), new InvalidCastException("Could not convert FallbackValue 'bar' to 'System.Int32'")), BindingErrorType.Error), result); GC.KeepAlive(data); } [Fact] public async Task Should_Return_BindingNotification_For_Invalid_FallbackValue_With_Data_Validation() { var data = new Class1 { StringValue = "foo" }; var target = new BindingExpression( new ExpressionObserver(data, "StringValue", true), typeof(int), "bar", DefaultValueConverter.Instance); var result = await target.Take(1); Assert.Equal( new BindingNotification( new AggregateException( new InvalidCastException("'foo' is not a valid number."), new InvalidCastException("Could not convert FallbackValue 'bar' to 'System.Int32'")), BindingErrorType.Error), result); GC.KeepAlive(data); } [Fact] public void Setting_Invalid_Double_String_Should_Not_Change_Target() { var data = new Class1 { DoubleValue = 5.6 }; var target = new BindingExpression(new ExpressionObserver(data, "DoubleValue"), typeof(string)); target.OnNext("foo"); Assert.Equal(5.6, data.DoubleValue); GC.KeepAlive(data); } [Fact] public void Setting_Invalid_Double_String_Should_Use_FallbackValue() { var data = new Class1 { DoubleValue = 5.6 }; var target = new BindingExpression( new ExpressionObserver(data, "DoubleValue"), typeof(string), "9.8", DefaultValueConverter.Instance); target.OnNext("foo"); Assert.Equal(9.8, data.DoubleValue); GC.KeepAlive(data); } [Fact] public void Should_Coerce_Setting_Null_Double_To_Default_Value() { var data = new Class1 { DoubleValue = 5.6 }; var target = new BindingExpression(new ExpressionObserver(data, "DoubleValue"), typeof(string)); target.OnNext(null); Assert.Equal(0, data.DoubleValue); GC.KeepAlive(data); } [Fact] public void Should_Coerce_Setting_UnsetValue_Double_To_Default_Value() { var data = new Class1 { DoubleValue = 5.6 }; var target = new BindingExpression(new ExpressionObserver(data, "DoubleValue"), typeof(string)); target.OnNext(AvaloniaProperty.UnsetValue); Assert.Equal(0, data.DoubleValue); GC.KeepAlive(data); } [Fact] public void Should_Pass_ConverterParameter_To_Convert() { var data = new Class1 { DoubleValue = 5.6 }; var converter = new Mock<IValueConverter>(); var target = new BindingExpression( new ExpressionObserver(data, "DoubleValue"), typeof(string), converter.Object, converterParameter: "foo"); target.Subscribe(_ => { }); converter.Verify(x => x.Convert(5.6, typeof(string), "foo", CultureInfo.CurrentCulture)); GC.KeepAlive(data); } [Fact] public void Should_Pass_ConverterParameter_To_ConvertBack() { var data = new Class1 { DoubleValue = 5.6 }; var converter = new Mock<IValueConverter>(); var target = new BindingExpression( new ExpressionObserver(data, "DoubleValue"), typeof(string), converter.Object, converterParameter: "foo"); target.OnNext("bar"); converter.Verify(x => x.ConvertBack("bar", typeof(double), "foo", CultureInfo.CurrentCulture)); GC.KeepAlive(data); } [Fact] public void Should_Handle_DataValidation() { var data = new Class1 { DoubleValue = 5.6 }; var converter = new Mock<IValueConverter>(); var target = new BindingExpression(new ExpressionObserver(data, "DoubleValue", true), typeof(string)); var result = new List<object>(); target.Subscribe(x => result.Add(x)); target.OnNext(1.2); target.OnNext($"{3.4}"); target.OnNext("bar"); Assert.Equal( new[] { new BindingNotification($"{5.6}"), new BindingNotification($"{1.2}"), new BindingNotification($"{3.4}"), new BindingNotification( new InvalidCastException("'bar' is not a valid number."), BindingErrorType.Error) }, result); GC.KeepAlive(data); } private class Class1 : NotifyingBase { private string _stringValue; private double _doubleValue; public string StringValue { get { return _stringValue; } set { _stringValue = value; RaisePropertyChanged(); } } public double DoubleValue { get { return _doubleValue; } set { _doubleValue = value; RaisePropertyChanged(); } } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections; using System.Globalization; namespace System.Drawing { /// <summary> /// Translates colors to and from GDI+ <see cref='Color'/> objects. /// </summary> public sealed class ColorTranslator { private const int Win32RedShift = 0; private const int Win32GreenShift = 8; private const int Win32BlueShift = 16; private static Hashtable s_htmlSysColorTable; /// <summary> /// Translates the specified <see cref='Color'/> to a Win32 color. /// </summary> public static int ToWin32(Color c) { return c.R << Win32RedShift | c.G << Win32GreenShift | c.B << Win32BlueShift; } /// <summary> /// Translates the specified <see cref='Color'/> to an Ole color. /// </summary> public static int ToOle(Color c) { // WARNING!!! WARNING!!! WARNING!!! WARNING!!! // WARNING!!! WARNING!!! WARNING!!! WARNING!!! // We must never have another method called ToOle() with a different signature. // This is so that we can push into the runtime a custom marshaller for OLE_COLOR to Color. if (ColorUtil.GetIsKnownColor(c)) { switch (c.ToKnownColor()) { case KnownColor.ActiveBorder: return unchecked((int)0x8000000A); case KnownColor.ActiveCaption: return unchecked((int)0x80000002); case KnownColor.ActiveCaptionText: return unchecked((int)0x80000009); case KnownColor.AppWorkspace: return unchecked((int)0x8000000C); case KnownColor.ButtonFace: return unchecked((int)0x8000000F); case KnownColor.ButtonHighlight: return unchecked((int)0x80000014); case KnownColor.ButtonShadow: return unchecked((int)0x80000010); case KnownColor.Control: return unchecked((int)0x8000000F); case KnownColor.ControlDark: return unchecked((int)0x80000010); case KnownColor.ControlDarkDark: return unchecked((int)0x80000015); case KnownColor.ControlLight: return unchecked((int)0x80000016); case KnownColor.ControlLightLight: return unchecked((int)0x80000014); case KnownColor.ControlText: return unchecked((int)0x80000012); case KnownColor.Desktop: return unchecked((int)0x80000001); case KnownColor.GradientActiveCaption: return unchecked((int)0x8000001B); case KnownColor.GradientInactiveCaption: return unchecked((int)0x8000001C); case KnownColor.GrayText: return unchecked((int)0x80000011); case KnownColor.Highlight: return unchecked((int)0x8000000D); case KnownColor.HighlightText: return unchecked((int)0x8000000E); case KnownColor.HotTrack: return unchecked((int)0x8000001A); case KnownColor.InactiveBorder: return unchecked((int)0x8000000B); case KnownColor.InactiveCaption: return unchecked((int)0x80000003); case KnownColor.InactiveCaptionText: return unchecked((int)0x80000013); case KnownColor.Info: return unchecked((int)0x80000018); case KnownColor.InfoText: return unchecked((int)0x80000017); case KnownColor.Menu: return unchecked((int)0x80000004); case KnownColor.MenuBar: return unchecked((int)0x8000001E); case KnownColor.MenuHighlight: return unchecked((int)0x8000001D); case KnownColor.MenuText: return unchecked((int)0x80000007); case KnownColor.ScrollBar: return unchecked((int)0x80000000); case KnownColor.Window: return unchecked((int)0x80000005); case KnownColor.WindowFrame: return unchecked((int)0x80000006); case KnownColor.WindowText: return unchecked((int)0x80000008); } } return ToWin32(c); } /// <summary> /// Translates an Ole color value to a GDI+ <see cref='Color'/>. /// </summary> public static Color FromOle(int oleColor) { // WARNING!!! WARNING!!! WARNING!!! WARNING!!! // WARNING!!! WARNING!!! WARNING!!! WARNING!!! // We must never have another method called ToOle() with a different signature. // This is so that we can push into the runtime a custom marshaller for OLE_COLOR to Color. switch (oleColor) { case unchecked((int)0x8000000A): return ColorUtil.FromKnownColor(KnownColor.ActiveBorder); case unchecked((int)0x80000002): return ColorUtil.FromKnownColor(KnownColor.ActiveCaption); case unchecked((int)0x80000009): return ColorUtil.FromKnownColor(KnownColor.ActiveCaptionText); case unchecked((int)0x8000000C): return ColorUtil.FromKnownColor(KnownColor.AppWorkspace); case unchecked((int)0x8000000F): return ColorUtil.FromKnownColor(KnownColor.Control); case unchecked((int)0x80000010): return ColorUtil.FromKnownColor(KnownColor.ControlDark); case unchecked((int)0x80000015): return ColorUtil.FromKnownColor(KnownColor.ControlDarkDark); case unchecked((int)0x80000016): return ColorUtil.FromKnownColor(KnownColor.ControlLight); case unchecked((int)0x80000014): return ColorUtil.FromKnownColor(KnownColor.ControlLightLight); case unchecked((int)0x80000012): return ColorUtil.FromKnownColor(KnownColor.ControlText); case unchecked((int)0x80000001): return ColorUtil.FromKnownColor(KnownColor.Desktop); case unchecked((int)0x8000001B): return ColorUtil.FromKnownColor(KnownColor.GradientActiveCaption); case unchecked((int)0x8000001C): return ColorUtil.FromKnownColor(KnownColor.GradientInactiveCaption); case unchecked((int)0x80000011): return ColorUtil.FromKnownColor(KnownColor.GrayText); case unchecked((int)0x8000000D): return ColorUtil.FromKnownColor(KnownColor.Highlight); case unchecked((int)0x8000000E): return ColorUtil.FromKnownColor(KnownColor.HighlightText); case unchecked((int)0x8000001A): return ColorUtil.FromKnownColor(KnownColor.HotTrack); case unchecked((int)0x8000000B): return ColorUtil.FromKnownColor(KnownColor.InactiveBorder); case unchecked((int)0x80000003): return ColorUtil.FromKnownColor(KnownColor.InactiveCaption); case unchecked((int)0x80000013): return ColorUtil.FromKnownColor(KnownColor.InactiveCaptionText); case unchecked((int)0x80000018): return ColorUtil.FromKnownColor(KnownColor.Info); case unchecked((int)0x80000017): return ColorUtil.FromKnownColor(KnownColor.InfoText); case unchecked((int)0x80000004): return ColorUtil.FromKnownColor(KnownColor.Menu); case unchecked((int)0x8000001E): return ColorUtil.FromKnownColor(KnownColor.MenuBar); case unchecked((int)0x8000001D): return ColorUtil.FromKnownColor(KnownColor.MenuHighlight); case unchecked((int)0x80000007): return ColorUtil.FromKnownColor(KnownColor.MenuText); case unchecked((int)0x80000000): return ColorUtil.FromKnownColor(KnownColor.ScrollBar); case unchecked((int)0x80000005): return ColorUtil.FromKnownColor(KnownColor.Window); case unchecked((int)0x80000006): return ColorUtil.FromKnownColor(KnownColor.WindowFrame); case unchecked((int)0x80000008): return ColorUtil.FromKnownColor(KnownColor.WindowText); } Color color = Color.FromArgb((byte)((oleColor >> Win32RedShift) & 0xFF), (byte)((oleColor >> Win32GreenShift) & 0xFF), (byte)((oleColor >> Win32BlueShift) & 0xFF)); return KnownColorTable.ArgbToKnownColor(color.ToArgb()); } /// <summary> /// Translates an Win32 color value to a GDI+ <see cref='Color'/>. /// </summary> public static Color FromWin32(int win32Color) { return FromOle(win32Color); } /// <summary> /// Translates an Html color representation to a GDI+ <see cref='Color'/>. /// </summary> public static Color FromHtml(string htmlColor) { Color c = Color.Empty; // empty color if ((htmlColor == null) || (htmlColor.Length == 0)) return c; // #RRGGBB or #RGB if ((htmlColor[0] == '#') && ((htmlColor.Length == 7) || (htmlColor.Length == 4))) { if (htmlColor.Length == 7) { c = Color.FromArgb(Convert.ToInt32(htmlColor.Substring(1, 2), 16), Convert.ToInt32(htmlColor.Substring(3, 2), 16), Convert.ToInt32(htmlColor.Substring(5, 2), 16)); } else { string r = Char.ToString(htmlColor[1]); string g = Char.ToString(htmlColor[2]); string b = Char.ToString(htmlColor[3]); c = Color.FromArgb(Convert.ToInt32(r + r, 16), Convert.ToInt32(g + g, 16), Convert.ToInt32(b + b, 16)); } } // special case. Html requires LightGrey, but .NET uses LightGray if (c.IsEmpty && String.Equals(htmlColor, "LightGrey", StringComparison.OrdinalIgnoreCase)) { c = Color.LightGray; } // System color if (c.IsEmpty) { if (s_htmlSysColorTable == null) { InitializeHtmlSysColorTable(); } object o = s_htmlSysColorTable[htmlColor.ToLower(CultureInfo.InvariantCulture)]; if (o != null) { c = (Color)o; } } // resort to type converter which will handle named colors if (c.IsEmpty) { try { c = ColorConverterCommon.ConvertFromString(htmlColor, CultureInfo.CurrentCulture); } catch(Exception ex) { throw new ArgumentException(ex.Message, nameof(htmlColor), ex); } } return c; } /// <summary> /// Translates the specified <see cref='Color'/> to an Html string color representation. /// </summary> public static string ToHtml(Color c) { string colorString = String.Empty; if (c.IsEmpty) return colorString; if (ColorUtil.IsSystemColor(c)) { switch (c.ToKnownColor()) { case KnownColor.ActiveBorder: colorString = "activeborder"; break; case KnownColor.GradientActiveCaption: case KnownColor.ActiveCaption: colorString = "activecaption"; break; case KnownColor.AppWorkspace: colorString = "appworkspace"; break; case KnownColor.Desktop: colorString = "background"; break; case KnownColor.Control: colorString = "buttonface"; break; case KnownColor.ControlLight: colorString = "buttonface"; break; case KnownColor.ControlDark: colorString = "buttonshadow"; break; case KnownColor.ControlText: colorString = "buttontext"; break; case KnownColor.ActiveCaptionText: colorString = "captiontext"; break; case KnownColor.GrayText: colorString = "graytext"; break; case KnownColor.HotTrack: case KnownColor.Highlight: colorString = "highlight"; break; case KnownColor.MenuHighlight: case KnownColor.HighlightText: colorString = "highlighttext"; break; case KnownColor.InactiveBorder: colorString = "inactiveborder"; break; case KnownColor.GradientInactiveCaption: case KnownColor.InactiveCaption: colorString = "inactivecaption"; break; case KnownColor.InactiveCaptionText: colorString = "inactivecaptiontext"; break; case KnownColor.Info: colorString = "infobackground"; break; case KnownColor.InfoText: colorString = "infotext"; break; case KnownColor.MenuBar: case KnownColor.Menu: colorString = "menu"; break; case KnownColor.MenuText: colorString = "menutext"; break; case KnownColor.ScrollBar: colorString = "scrollbar"; break; case KnownColor.ControlDarkDark: colorString = "threeddarkshadow"; break; case KnownColor.ControlLightLight: colorString = "buttonhighlight"; break; case KnownColor.Window: colorString = "window"; break; case KnownColor.WindowFrame: colorString = "windowframe"; break; case KnownColor.WindowText: colorString = "windowtext"; break; } } else if (c.IsNamedColor) { if (c == Color.LightGray) { // special case due to mismatch between Html and enum spelling colorString = "LightGrey"; } else { colorString = c.Name; } } else { colorString = "#" + c.R.ToString("X2", null) + c.G.ToString("X2", null) + c.B.ToString("X2", null); } return colorString; } private static void InitializeHtmlSysColorTable() { s_htmlSysColorTable = new Hashtable(26); s_htmlSysColorTable["activeborder"] = ColorUtil.FromKnownColor(KnownColor.ActiveBorder); s_htmlSysColorTable["activecaption"] = ColorUtil.FromKnownColor(KnownColor.ActiveCaption); s_htmlSysColorTable["appworkspace"] = ColorUtil.FromKnownColor(KnownColor.AppWorkspace); s_htmlSysColorTable["background"] = ColorUtil.FromKnownColor(KnownColor.Desktop); s_htmlSysColorTable["buttonface"] = ColorUtil.FromKnownColor(KnownColor.Control); s_htmlSysColorTable["buttonhighlight"] = ColorUtil.FromKnownColor(KnownColor.ControlLightLight); s_htmlSysColorTable["buttonshadow"] = ColorUtil.FromKnownColor(KnownColor.ControlDark); s_htmlSysColorTable["buttontext"] = ColorUtil.FromKnownColor(KnownColor.ControlText); s_htmlSysColorTable["captiontext"] = ColorUtil.FromKnownColor(KnownColor.ActiveCaptionText); s_htmlSysColorTable["graytext"] = ColorUtil.FromKnownColor(KnownColor.GrayText); s_htmlSysColorTable["highlight"] = ColorUtil.FromKnownColor(KnownColor.Highlight); s_htmlSysColorTable["highlighttext"] = ColorUtil.FromKnownColor(KnownColor.HighlightText); s_htmlSysColorTable["inactiveborder"] = ColorUtil.FromKnownColor(KnownColor.InactiveBorder); s_htmlSysColorTable["inactivecaption"] = ColorUtil.FromKnownColor(KnownColor.InactiveCaption); s_htmlSysColorTable["inactivecaptiontext"] = ColorUtil.FromKnownColor(KnownColor.InactiveCaptionText); s_htmlSysColorTable["infobackground"] = ColorUtil.FromKnownColor(KnownColor.Info); s_htmlSysColorTable["infotext"] = ColorUtil.FromKnownColor(KnownColor.InfoText); s_htmlSysColorTable["menu"] = ColorUtil.FromKnownColor(KnownColor.Menu); s_htmlSysColorTable["menutext"] = ColorUtil.FromKnownColor(KnownColor.MenuText); s_htmlSysColorTable["scrollbar"] = ColorUtil.FromKnownColor(KnownColor.ScrollBar); s_htmlSysColorTable["threeddarkshadow"] = ColorUtil.FromKnownColor(KnownColor.ControlDarkDark); s_htmlSysColorTable["threedface"] = ColorUtil.FromKnownColor(KnownColor.Control); s_htmlSysColorTable["threedhighlight"] = ColorUtil.FromKnownColor(KnownColor.ControlLight); s_htmlSysColorTable["threedlightshadow"] = ColorUtil.FromKnownColor(KnownColor.ControlLightLight); s_htmlSysColorTable["window"] = ColorUtil.FromKnownColor(KnownColor.Window); s_htmlSysColorTable["windowframe"] = ColorUtil.FromKnownColor(KnownColor.WindowFrame); s_htmlSysColorTable["windowtext"] = ColorUtil.FromKnownColor(KnownColor.WindowText); } } }
using System; using System.Collections; using System.Collections.Generic; using System.Collections.Specialized; using System.ComponentModel; using System.Linq; using CoBuilder.Service.Interfaces; namespace CoBuilder.Service.Infrastructure.Config { public class ObservableDictionary<TKey, TValue> : IObservableDictionary<TKey, TValue> { #region Internals private const string IndexerName = "Item[]"; private IDictionary<TKey, TValue> _dictionary; protected IDictionary<TKey, TValue> Dictionary { get { return _dictionary; } } #endregion #region Constructors public ObservableDictionary() { _dictionary = new Dictionary<TKey, TValue>(); } public ObservableDictionary(IDictionary<TKey, TValue> dictionary) { _dictionary = new Dictionary<TKey, TValue>(dictionary); } public ObservableDictionary(IEqualityComparer<TKey> comparer) { _dictionary = new Dictionary<TKey, TValue>(comparer); } public ObservableDictionary(int capacity) { _dictionary = new Dictionary<TKey, TValue>(capacity); } public ObservableDictionary(IDictionary<TKey, TValue> dictionary, IEqualityComparer<TKey> comparer) { _dictionary = new Dictionary<TKey, TValue>(dictionary, comparer); } public ObservableDictionary(int capacity, IEqualityComparer<TKey> comparer) { _dictionary = new Dictionary<TKey, TValue>(capacity, comparer); } #endregion #region IDictionary<Tkey,TValue> Members public void Add(TKey key, TValue value) { Insert(key, value, true); } public bool ContainsKey(TKey key) { return Dictionary.ContainsKey(key); } public ICollection<TKey> Keys { get { return Dictionary.Keys; } } public bool Remove(TKey key) { if (key == null) throw new ArgumentNullException(nameof(key)); TValue value; Dictionary.TryGetValue(key, out value); var removed = Dictionary.Remove(key); if (removed) { OnCollectionChanged(NotifyCollectionChangedAction.Remove, new KeyValuePair<TKey, TValue>(key, value)); } return removed; } public bool TryGetValue(TKey key, out TValue value) { return Dictionary.TryGetValue(key, out value); } public ICollection<TValue> Values { get { return Dictionary.Values; } } public TValue this[TKey key] { get { return Dictionary[key]; } set { Insert(key, value, false); } } #endregion #region ICollection<KeyValuePair<TKey,TValue>> Members public void Add(KeyValuePair<TKey, TValue> item) { Insert(item.Key, item.Value, true); } public void Clear() { if (Dictionary.Count > 0) { Dictionary.Clear(); OnCollectionChanged(); } } public bool Contains(KeyValuePair<TKey, TValue> item) { return Dictionary.Contains(item); } public void CopyTo(KeyValuePair<TKey, TValue>[] array, int arrayIndex) { Dictionary.CopyTo(array,arrayIndex); } public int Count { get { return Dictionary.Count; } } public bool IsReadOnly { get { return Dictionary.IsReadOnly; } } public bool Remove(KeyValuePair<TKey, TValue> item) { return Remove(item.Key); } #endregion #region IEnumerable<KeyValuePair<Tkey,TValue> Members public IEnumerator<KeyValuePair<TKey, TValue>> GetEnumerator() { return Dictionary.GetEnumerator(); } #endregion #region IEnumerable Members IEnumerator IEnumerable.GetEnumerator() { return ((IEnumerable) Dictionary).GetEnumerator(); } #endregion #region INotifyCollectionChanged Members public event NotifyCollectionChangedEventHandler CollectionChanged; #endregion #region INotifyPropertyChanged Members public event PropertyChangedEventHandler PropertyChanged; #endregion #region Dictionary Members public void AddRange(IDictionary<TKey, TValue> items) { if (items == null) throw new ArgumentNullException(nameof(items)); if (items.Count <= 0) return; if (Dictionary.Count > 0) { if (items.Keys.Any(k => Dictionary.ContainsKey(k))) { throw new ArgumentException("An item with the same key has already been added."); } foreach (var item in items) Dictionary.Add(item); } else _dictionary = new Dictionary<TKey, TValue>(items); OnCollectionChanged(NotifyCollectionChangedAction.Add, items.ToArray()); } #endregion #region Private/Protected Methods private void Insert(TKey key, TValue value, bool add) { if (key == null) throw new ArgumentNullException(nameof(key)); TValue item; if (Dictionary.TryGetValue(key, out item)) { if (add) throw new ArgumentException("An item with the same key has already been added.", nameof(value)); if (Equals(item, value)) return; Dictionary[key] = value; OnCollectionChanged(NotifyCollectionChangedAction.Replace, new KeyValuePair<TKey, TValue>(key, value), new KeyValuePair<TKey, TValue>(key, item)); } else { Dictionary[key] = value; OnCollectionChanged(NotifyCollectionChangedAction.Add, new KeyValuePair<TKey, TValue>(key, value)); } } private void OnPropertyChanged() { OnPropertyChanged(nameof(Count)); OnPropertyChanged(IndexerName); OnPropertyChanged(nameof(Keys)); OnPropertyChanged(nameof(Values)); } protected virtual void OnPropertyChanged(string propertyName) { PropertyChanged?.Invoke(this, new PropertyChangedEventArgs(propertyName)); } private void OnCollectionChanged() { OnPropertyChanged(); CollectionChanged?.Invoke(this, new NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction.Reset)); } private void OnCollectionChanged(NotifyCollectionChangedAction action, KeyValuePair<TKey, TValue> changedItem) { OnPropertyChanged(); CollectionChanged?.Invoke(this, new NotifyCollectionChangedEventArgs(action, changedItem)); } private void OnCollectionChanged(NotifyCollectionChangedAction action, KeyValuePair<TKey, TValue> newItem, KeyValuePair<TKey, TValue> oldItem) { OnPropertyChanged(); CollectionChanged?.Invoke(this, new NotifyCollectionChangedEventArgs(action, newItem, oldItem)); } private void OnCollectionChanged(NotifyCollectionChangedAction action, IList newItems) { OnPropertyChanged(); CollectionChanged?.Invoke(this, new NotifyCollectionChangedEventArgs(action, newItems)); } #endregion } }
/******************************************************************** The Multiverse Platform is made available under the MIT License. Copyright (c) 2012 The Multiverse Foundation Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. *********************************************************************/ using System; using System.Collections.Generic; using System.Text; using Axiom.MathLib; using System.Diagnostics; namespace Axiom.SceneManagers.Multiverse { /// <summary> /// This class holds all of the SubPageHeightMaps for a page. /// </summary> public class PageHeightMap { private SubPageHeightMap[] subPages; private int subPagesPerPage; private int subPageSize; private int pageSize; private Vector3 location; private bool locationSet = false; public PageHeightMap(int subPagesPerPage, int pageSize, int maxMetersPerSample, int minMetersPerSample) { this.subPagesPerPage = subPagesPerPage; this.pageSize = pageSize; subPageSize = pageSize / subPagesPerPage; // allocate the array to hold the subpages subPages = new SubPageHeightMap[subPagesPerPage * subPagesPerPage]; // allocate all the subpages for ( int z = 0; z < subPagesPerPage; z++) { for (int x = 0; x < subPagesPerPage; x++) { subPages[x + z * subPagesPerPage] = new SubPageHeightMap(pageSize / subPagesPerPage, maxMetersPerSample, minMetersPerSample, x, z); } } } public SubPageHeightMap LookupSubPage(int x, int z) { return subPages[x + z * subPagesPerPage]; } public SubPageHeightMap LookupSubPage(Vector3 loc) { // make loc relative to page loc = loc - location; int xoff = (int)Math.Floor(loc.x / (((int)TerrainManager.oneMeter) * subPageSize)); int zoff = (int)Math.Floor(loc.z / (((int)TerrainManager.oneMeter) * subPageSize)); return subPages[xoff + zoff * subPagesPerPage]; } /// <summary> /// Get the height of the sample at the given coordinates within the page. /// Coordinates are in meters, and are relative to the page. /// </summary> /// <param name="x"></param> /// <param name="z"></param> /// <returns></returns> public float GetHeight(int x, int z) { int subPageMask = subPageSize - 1; // figure X and Z offsets of the subpage that contains the given point int subPageX = x / subPageSize; int subPageZ = z / subPageSize; SubPageHeightMap subPage = subPages[subPageX + subPageZ * subPagesPerPage]; return subPage.GetHeight(x & subPageMask, z & subPageMask); } public float GenHeight(int x, int z) { return TerrainManager.Instance.TerrainGenerator.GenerateHeightPointMM( new Vector3(location.x + x * TerrainManager.oneMeter, 0, location.z + z * TerrainManager.oneMeter)); } public Vector3 GetNormal(int x, int z) { int metersPerSample = TerrainManager.Instance.MetersPerSample(new Vector3(location.x + x * TerrainManager.oneMeter, 0, location.z + z * TerrainManager.oneMeter)); if (metersPerSample == 0) { metersPerSample = TerrainManager.Instance.MaxMetersPerSample; } float x1 = 0, x2 = 0, z1 = 0, z2 = 0; int x1off = x - metersPerSample; int x2off = x + metersPerSample; if (x1off < 0 || z >= pageSize) { x1 = GenHeight(x1off, z); } else { x1 = GetHeight(x1off, z); } if (x2off >= pageSize || z >= pageSize) { x2 = GenHeight(x2off, z); } else { x2 = GetHeight(x2off, z); } int z1off = z - metersPerSample; int z2off = z + metersPerSample; if (z1off < 0 || x >= pageSize) { z1 = GenHeight(x, z1off); } else { z1 = GetHeight(x, z1off); } if (z2off >= pageSize || x >= pageSize) { z2 = GenHeight(x, z2off); } else { z2 = GetHeight(x, z2off); } float unitsPerSample = metersPerSample * TerrainManager.oneMeter; // computer the normal Vector3 v = new Vector3(x1 - x2, 2.0f * unitsPerSample, z1 - z2); v.Normalize(); return v; } public float GetAreaHeight(float fx1, float fx2, float fz1, float fz2) { float height = float.MinValue; // make page relative and convert to samples int x1 = (int)Math.Floor((fx1 - location.x)/TerrainManager.oneMeter); int x2 = (int)Math.Floor((fx2 - location.x)/TerrainManager.oneMeter); int z1 = (int)Math.Floor((fz1 - location.z)/TerrainManager.oneMeter); int z2 = (int)Math.Floor((fz2 - location.z)/TerrainManager.oneMeter); // clip to the page if (x1 < 0) { x1 = 0; } if (x2 > ( pageSize - 1 )) { x2 = pageSize - 1; } if (z1 < 0) { z1 = 0; } if (z2 > ( pageSize - 1 )) { z2 = pageSize - 1; } // compute which sub pages we need to check int startSubPageX = x1 / subPageSize; int endSubPageX = x2 / subPageSize; int startSubPageZ = z1 / subPageSize; int endSubPageZ = z2 / subPageSize; for (int subPageZ = startSubPageZ; subPageZ <= endSubPageZ; subPageZ++) { int subPageLocZ = subPageZ * subPageSize; for (int subPageX = startSubPageX; subPageX <= endSubPageX; subPageX++) { int subPageLocX = subPageX * subPageSize; // compute area bounds relative to sub page origin int subPageX1 = x1 - subPageLocX; int subPageX2 = x2 - subPageLocX; int subPageZ1 = z1 - subPageLocZ; int subPageZ2 = z2 - subPageLocZ; // now clip to sub page if (subPageX1 < 0) { subPageX1 = 0; } if (subPageX2 > ( subPageSize - 1 )) { subPageX2 = subPageSize - 1; } if (subPageZ1 < 0) { subPageZ1 = 0; } if (subPageZ2 > (subPageSize - 1)) { subPageZ2 = subPageSize - 1; } // call the subpage with the clipped bounds to get the area height float subPageHeight = subPages[subPageX + subPageZ * subPagesPerPage].GetAreaHeight(subPageX1, subPageX2, subPageZ1, subPageZ2); if (subPageHeight > height) { height = subPageHeight; } } } return height; } public void SetPatchLOD(int startX, int startZ, int size, int metersPerSample) { int xoff = startX / subPageSize; int zoff = startZ / subPageSize; int numSubPages = size / subPageSize; for (int z = 0; z < numSubPages; z++) { for (int x = 0; x < numSubPages; x++) { subPages[x + xoff + (z + zoff) * subPagesPerPage].MetersPerSample = metersPerSample; } } } public void GetSubPageHeightBounds(int xOff, int zOff, int xSize, int zSize, out float minHeight, out float maxHeight) { // convert coords/sizes to number of subpages xOff = xOff / subPageSize; zOff = zOff / subPageSize; xSize = xSize / subPageSize; zSize = zSize / subPageSize; minHeight = float.MaxValue; maxHeight = float.MinValue; for (int z = zOff; z < (zOff + zSize); z++) { for (int x = xOff; x < (xOff + xSize); x++) { SubPageHeightMap subPage = subPages[x + z * subPagesPerPage]; if (subPage.MaxHeight > maxHeight) { maxHeight = subPage.MaxHeight; } if (subPage.MinHeight < minHeight) { minHeight = subPage.MinHeight; } } } } /// <summary> /// Set the location of all the sub pages in this page /// /// Location is in world coordinate space (1000 samples per meter) /// </summary> public Vector3 Location { get { return location; } set { if (!locationSet || (location != value)) { locationSet = true; location = value; int offset = 0; float subPageWorldSize = subPageSize * TerrainManager.oneMeter; float locz = value.z; for (int z = 0; z < subPagesPerPage; z++) { float locx = value.x; for (int x = 0; x < subPagesPerPage; x++) { subPages[offset].Location = new Vector3(locx, 0, locz); locx += subPageWorldSize; offset++; } locz += subPageWorldSize; } } } } public void ResetHeightMaps() { foreach (SubPageHeightMap subPage in subPages) { subPage.ResetHeightMaps(); } } } }
using System; using System.Collections.Generic; using Platform; namespace Platform.VirtualFileSystem.Providers { /// <summary> /// Wraps an <i>INode</i> and emulated delegation to redirect operations /// to the parent (wrapped) node (also known as the wrappee or target). /// </summary> /// <remarks> /// <para> /// Implementation redirection is performed using <c>delegation</c>. /// Full delegation is not possible but is emulated in the following ways: /// </para> /// <para> /// All <c>Resolve</c> calls to the <see cref="NodeDelegationWrapper.Resolver"/>. /// The default <see cref="NodeDelegationWrapper.Resolver"/> delegates all /// <c>Resolve</c> calls to /// <see cref="NodeDelegationWrapper.Resolve(string, NodeType, AddressScope)"/>. /// This allows subclasses of this class to simply override /// <see cref="NodeDelegationWrapper.Resolve(string, NodeType, AddressScope)"/> /// in order to delegate the behaviour of all the <c>Resolve</c> methods. /// </para> /// <para> /// The <see cref="NodeDelegationWrapper.ToString()"/> method is override to /// return <c>NodeDelegationWrapper.Address.Tostring()</c>. The default /// implementaion of <see cref="NodeDelegationWrapper.Address"/> returns the /// wrappee's <c>Address</c> so the default delegated behaviour for /// <see cref="NodeDelegationWrapper.ToString()"/> is the same as /// undelegated behaviour. Subclasses that override /// <see cref="NodeDelegationWrapper.Address"/> will automatically get the correct /// <see cref="NodeDelegationWrapper.ToString()"/> implementation based on /// the override <see cref="NodeDelegationWrapper.Address"/>. /// </para> /// <para> /// All methods intended to return the current node will not return /// the wrappee but will return the <see cref="NodeDelegationWrapper"/>. /// </para> /// <para> /// All methods (i.e. children or parent) returning new nodes (known as /// adapter-condidate nodes) will return a node, optionally converted /// (adapted) by the supplied /// <see cref="NodeDelegationWrapper.NodeAdapter"/>. The default /// <c>NodeAdapter</c> returns the node unadapted. /// </para> /// <para> /// References: /// http://javalab.cs.uni-bonn.de/research/darwin/delegation.html /// </para> /// </remarks> public abstract class NodeDelegationWrapper : NodeConsultationWrapper { internal class DefaultResolver : AbstractResolver { private readonly INode node; private INodeAddress parentAddress; public DefaultResolver(NodeDelegationWrapper node) { this.node = node; } public override INode Resolve(string name, NodeType nodeType, AddressScope scope) { if (this.parentAddress == null) { if (this.node.NodeType == NodeType.File) { this.parentAddress = this.node.Address.Parent; } else { this.parentAddress = this.node.Address; } } return this.node.FileSystem.Resolve(this.parentAddress.ResolveAddress(name).AbsolutePath, nodeType, scope); } } protected virtual Converter<INode, INode> NodeAdapter { get; set; } protected virtual INodeResolver NodeResolver { get; set; } protected NodeDelegationWrapper(INode innerNode) : this(innerNode, null, ConverterUtils<INode, INode>.NoConvert) { } /// <summary> /// Construct a new <see cref="NodeDelegationWrapper"/>. /// </summary> /// <param name="innerNode"> /// The <see cref="INode"/> to delegate to. /// </param> /// <param name="resolver"> /// The resolver used to delegate <c>Resolve</c> calls. /// </param> /// <param name="nodeAdapter"> /// The adapter that will adapt adapter candidate nodes returned be the /// <c>Resolve</c> and <c>ParentDirectory</c> methods. /// </param> protected NodeDelegationWrapper(INode innerNode, INodeResolver resolver, Converter<INode, INode> nodeAdapter) : base(innerNode) { if (resolver == null) { if (this.NodeType == NodeType.Directory) { resolver = new DefaultResolver(this); } else { resolver = new DefaultResolver(this); } } this.NodeResolver = resolver; this.NodeAdapter = nodeAdapter; } /// <summary> /// Overrides and returns the current node delegater's name. /// </summary> /// <remarks> /// This property delegates to <c>this.Address.Name</c> /// </remarks> public override string Name { get { return this.Address.Name; } } /// <summary> /// Overrides and returns the current node's parent directory /// optionally adapting the return result using the current /// object's <see cref="NodeAdapter"/>. /// </summary> public override IDirectory ParentDirectory { get { return (IDirectory)NodeAdapter(this.Wrappee.ParentDirectory); } } /// <summary> /// Resolves a file using the current object's <see cref="NodeResolver"/>. /// </summary> /// <remarks> /// The default implementation delegates to /// <see cref="Resolve(string, NodeType, AddressScope)"/>. /// </remarks> public override IFile ResolveFile(string name) { return this.NodeResolver.ResolveFile(name); } /// <summary> /// Resolves a file using the current object's <see cref="NodeResolver"/>. /// </summary> /// <remarks> /// The default implementation delegates to /// <see cref="Resolve(string, NodeType, AddressScope)"/>. /// </remarks> public override IDirectory ResolveDirectory(string name) { return this.NodeResolver.ResolveDirectory(name); } /// <summary> /// Resolves a file using the current object's <see cref="NodeResolver"/>. /// </summary> /// <remarks> /// The default implementation delegates to /// <see cref="Resolve(string, NodeType, AddressScope)"/>. /// </remarks> public override IFile ResolveFile(string name, AddressScope scope) { return this.NodeResolver.ResolveFile(name, scope); } /// <summary> /// Resolves a file using the current object's <see cref="NodeResolver"/>. /// </summary> /// <remarks> /// The default implementation delegates to /// <see cref="Resolve(string, NodeType, AddressScope)"/>. /// </remarks> public override IDirectory ResolveDirectory(string name, AddressScope scope) { return this.NodeResolver.ResolveDirectory(name, scope); } /// <summary> /// Resolves a file using the current object's <see cref="NodeResolver"/>. /// </summary> /// <remarks> /// The default implementation delegates to /// <see cref="Resolve(string, NodeType, AddressScope)"/>. /// </remarks> public override INode Resolve(string name) { return this.NodeResolver.Resolve(name); } /// <summary> /// Resolves a file using the current object's <see cref="NodeResolver"/>. /// </summary> /// <remarks> /// The default implementation delegates to /// <see cref="Resolve(string, NodeType, AddressScope)"/>. /// </remarks> public override INode Resolve(string name, AddressScope scope) { return this.NodeResolver.Resolve(name, scope); } /// <summary> /// Resolves a file using the current object's <see cref="NodeResolver"/>. /// </summary> /// <remarks> /// The default implementation delegates to /// <see cref="Resolve(string, NodeType, AddressScope)"/>. /// </remarks> public override INode Resolve(string name, NodeType nodeType) { return this.NodeResolver.Resolve(name, nodeType); } /// <summary> /// Resolves a file using the current object's <see cref="NodeResolver"/>. /// </summary> /// <remarks> /// The default implementation delegates to /// <see cref="Resolve(string, NodeType, AddressScope)"/>. /// </remarks> public override INode Resolve(string name, NodeType nodeType, AddressScope scope) { return this.NodeResolver.Resolve(name, nodeType, scope); } /// <summary> /// Overrides and returns the current node's /// <see cref="INode.OperationTargetDirectory"/> /// and adaptes the return result using the current /// object's <see cref="NodeAdapter"/>. /// </summary> public override IDirectory OperationTargetDirectory { get { return (IDirectory)NodeAdapter(this.Wrappee.OperationTargetDirectory); } } /// <summary> /// Simulates delegation by comparing the delegator's (this) <c>INodeAddress</c> /// and <c>NodeType</c>. /// </summary> /// <remarks> /// Uses <c>this.Address</c> and <c>this.NodeType</c>. /// </remarks> public override bool Equals(object obj) { var node = obj as INode; if (obj == null) { return false; } if (obj == this) { return true; } return this.NodeType.Equals(node.NodeType) && this.Address.Equals(node.Address); } /// <summary> /// Gets the hashcode based on the delegator's (this) <c>INodeAddress</c>. /// </summary> public override int GetHashCode() { return this.Address.GetHashCode(); } /// <summary> /// Returns a string representation of the object using the delegator's (this) <c>INodeAddress</c>. /// </summary> public override string ToString() { return this.Address.ToString(); } public override int CompareTo(INode other) { return System.String.Compare(this.Name, other.Name, StringComparison.OrdinalIgnoreCase); } public override void CheckAccess(FileSystemSecuredOperation operation) { if (!this.FileSystem.SecurityManager.CurrentContext.HasAccess ( new AccessVerificationContext(this, operation) )) { throw new FileSystemSecurityException(this.Address); } } public override INode GetDirectoryOperationTargetNode(IDirectory directory) { return NodeAdapter(this.Wrappee.GetDirectoryOperationTargetNode(directory)); } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Runtime.Remoting.Lifetime; using System.Threading; using System.Reflection; using System.Collections; using System.Collections.Generic; using OpenSim.Framework; using OpenSim.Region.Framework.Interfaces; using OpenSim.Region.ScriptEngine.Interfaces; using OpenSim.Region.ScriptEngine.Shared.Api.Interfaces; using integer = OpenSim.Region.ScriptEngine.Shared.LSL_Types.LSLInteger; using vector = OpenSim.Region.ScriptEngine.Shared.LSL_Types.Vector3; using rotation = OpenSim.Region.ScriptEngine.Shared.LSL_Types.Quaternion; using key = OpenSim.Region.ScriptEngine.Shared.LSL_Types.LSLString; using LSL_List = OpenSim.Region.ScriptEngine.Shared.LSL_Types.list; using LSL_String = OpenSim.Region.ScriptEngine.Shared.LSL_Types.LSLString; using LSL_Key = OpenSim.Region.ScriptEngine.Shared.LSL_Types.LSLString; using LSL_Float = OpenSim.Region.ScriptEngine.Shared.LSL_Types.LSLFloat; using LSL_Integer = OpenSim.Region.ScriptEngine.Shared.LSL_Types.LSLInteger; namespace OpenSim.Region.ScriptEngine.Shared.ScriptBase { public partial class ScriptBaseClass : MarshalByRefObject { public IOSSL_Api m_OSSL_Functions; public void ApiTypeOSSL(IScriptApi api) { if (!(api is IOSSL_Api)) return; m_OSSL_Functions = (IOSSL_Api)api; Prim = new OSSLPrim(this); } public void osSetRegionWaterHeight(double height) { m_OSSL_Functions.osSetRegionWaterHeight(height); } public void osSetRegionSunSettings(bool useEstateSun, bool sunFixed, double sunHour) { m_OSSL_Functions.osSetRegionSunSettings(useEstateSun, sunFixed, sunHour); } public void osSetEstateSunSettings(bool sunFixed, double sunHour) { m_OSSL_Functions.osSetEstateSunSettings(sunFixed, sunHour); } public double osGetCurrentSunHour() { return m_OSSL_Functions.osGetCurrentSunHour(); } public double osGetSunParam(string param) { return m_OSSL_Functions.osGetSunParam(param); } // Deprecated public double osSunGetParam(string param) { return m_OSSL_Functions.osSunGetParam(param); } public void osSetSunParam(string param, double value) { m_OSSL_Functions.osSetSunParam(param, value); } // Deprecated public void osSunSetParam(string param, double value) { m_OSSL_Functions.osSunSetParam(param, value); } public string osWindActiveModelPluginName() { return m_OSSL_Functions.osWindActiveModelPluginName(); } public void osSetWindParam(string plugin, string param, LSL_Float value) { m_OSSL_Functions.osSetWindParam(plugin, param, value); } public LSL_Float osGetWindParam(string plugin, string param) { return m_OSSL_Functions.osGetWindParam(plugin, param); } public void osParcelJoin(vector pos1, vector pos2) { m_OSSL_Functions.osParcelJoin(pos1,pos2); } public void osParcelSubdivide(vector pos1, vector pos2) { m_OSSL_Functions.osParcelSubdivide(pos1, pos2); } public void osSetParcelDetails(vector pos, LSL_List rules) { m_OSSL_Functions.osSetParcelDetails(pos, rules); } // Deprecated public void osParcelSetDetails(vector pos, LSL_List rules) { m_OSSL_Functions.osParcelSetDetails(pos,rules); } public double osList2Double(LSL_Types.list src, int index) { return m_OSSL_Functions.osList2Double(src, index); } public string osSetDynamicTextureURL(string dynamicID, string contentType, string url, string extraParams, int timer) { return m_OSSL_Functions.osSetDynamicTextureURL(dynamicID, contentType, url, extraParams, timer); } public string osSetDynamicTextureData(string dynamicID, string contentType, string data, string extraParams, int timer) { return m_OSSL_Functions.osSetDynamicTextureData(dynamicID, contentType, data, extraParams, timer); } public string osSetDynamicTextureURLBlend(string dynamicID, string contentType, string url, string extraParams, int timer, int alpha) { return m_OSSL_Functions.osSetDynamicTextureURLBlend(dynamicID, contentType, url, extraParams, timer, alpha); } public string osSetDynamicTextureDataBlend(string dynamicID, string contentType, string data, string extraParams, int timer, int alpha) { return m_OSSL_Functions.osSetDynamicTextureDataBlend(dynamicID, contentType, data, extraParams, timer, alpha); } public string osSetDynamicTextureURLBlendFace(string dynamicID, string contentType, string url, string extraParams, bool blend, int disp, int timer, int alpha, int face) { return m_OSSL_Functions.osSetDynamicTextureURLBlendFace(dynamicID, contentType, url, extraParams, blend, disp, timer, alpha, face); } public string osSetDynamicTextureDataBlendFace(string dynamicID, string contentType, string data, string extraParams, bool blend, int disp, int timer, int alpha, int face) { return m_OSSL_Functions.osSetDynamicTextureDataBlendFace(dynamicID, contentType, data, extraParams, blend, disp, timer, alpha, face); } public LSL_Float osGetTerrainHeight(int x, int y) { return m_OSSL_Functions.osGetTerrainHeight(x, y); } // Deprecated public LSL_Float osTerrainGetHeight(int x, int y) { return m_OSSL_Functions.osTerrainGetHeight(x, y); } public LSL_Integer osSetTerrainHeight(int x, int y, double val) { return m_OSSL_Functions.osSetTerrainHeight(x, y, val); } // Deprecated public LSL_Integer osTerrainSetHeight(int x, int y, double val) { return m_OSSL_Functions.osTerrainSetHeight(x, y, val); } public void osTerrainFlush() { m_OSSL_Functions.osTerrainFlush(); } public int osRegionRestart(double seconds) { return m_OSSL_Functions.osRegionRestart(seconds); } public void osRegionNotice(string msg) { m_OSSL_Functions.osRegionNotice(msg); } public bool osConsoleCommand(string Command) { return m_OSSL_Functions.osConsoleCommand(Command); } public void osSetParcelMediaURL(string url) { m_OSSL_Functions.osSetParcelMediaURL(url); } public void osSetParcelSIPAddress(string SIPAddress) { m_OSSL_Functions.osSetParcelSIPAddress(SIPAddress); } public void osSetPrimFloatOnWater(int floatYN) { m_OSSL_Functions.osSetPrimFloatOnWater(floatYN); } // Teleport Functions public void osTeleportAgent(string agent, string regionName, vector position, vector lookat) { m_OSSL_Functions.osTeleportAgent(agent, regionName, position, lookat); } public void osTeleportAgent(string agent, int regionX, int regionY, vector position, vector lookat) { m_OSSL_Functions.osTeleportAgent(agent, regionX, regionY, position, lookat); } public void osTeleportAgent(string agent, vector position, vector lookat) { m_OSSL_Functions.osTeleportAgent(agent, position, lookat); } public void osTeleportOwner(string regionName, vector position, vector lookat) { m_OSSL_Functions.osTeleportOwner(regionName, position, lookat); } public void osTeleportOwner(int regionX, int regionY, vector position, vector lookat) { m_OSSL_Functions.osTeleportOwner(regionX, regionY, position, lookat); } public void osTeleportOwner(vector position, vector lookat) { m_OSSL_Functions.osTeleportOwner(position, lookat); } // Avatar info functions public string osGetAgentIP(string agent) { return m_OSSL_Functions.osGetAgentIP(agent); } public LSL_List osGetAgents() { return m_OSSL_Functions.osGetAgents(); } // Animation Functions public void osAvatarPlayAnimation(string avatar, string animation) { m_OSSL_Functions.osAvatarPlayAnimation(avatar, animation); } public void osAvatarStopAnimation(string avatar, string animation) { m_OSSL_Functions.osAvatarStopAnimation(avatar, animation); } #region Attachment commands public void osForceAttachToAvatar(int attachmentPoint) { m_OSSL_Functions.osForceAttachToAvatar(attachmentPoint); } public void osForceAttachToAvatarFromInventory(string itemName, int attachmentPoint) { m_OSSL_Functions.osForceAttachToAvatarFromInventory(itemName, attachmentPoint); } public void osForceAttachToOtherAvatarFromInventory(string rawAvatarId, string itemName, int attachmentPoint) { m_OSSL_Functions.osForceAttachToOtherAvatarFromInventory(rawAvatarId, itemName, attachmentPoint); } public void osForceDetachFromAvatar() { m_OSSL_Functions.osForceDetachFromAvatar(); } public LSL_List osGetNumberOfAttachments(LSL_Key avatar, LSL_List attachmentPoints) { return m_OSSL_Functions.osGetNumberOfAttachments(avatar, attachmentPoints); } public void osMessageAttachments(LSL_Key avatar, string message, LSL_List attachmentPoints, int flags) { m_OSSL_Functions.osMessageAttachments(avatar, message, attachmentPoints, flags); } #endregion // Texture Draw functions public string osMovePen(string drawList, int x, int y) { return m_OSSL_Functions.osMovePen(drawList, x, y); } public string osDrawLine(string drawList, int startX, int startY, int endX, int endY) { return m_OSSL_Functions.osDrawLine(drawList, startX, startY, endX, endY); } public string osDrawLine(string drawList, int endX, int endY) { return m_OSSL_Functions.osDrawLine(drawList, endX, endY); } public string osDrawText(string drawList, string text) { return m_OSSL_Functions.osDrawText(drawList, text); } public string osDrawEllipse(string drawList, int width, int height) { return m_OSSL_Functions.osDrawEllipse(drawList, width, height); } public string osDrawRectangle(string drawList, int width, int height) { return m_OSSL_Functions.osDrawRectangle(drawList, width, height); } public string osDrawFilledRectangle(string drawList, int width, int height) { return m_OSSL_Functions.osDrawFilledRectangle(drawList, width, height); } public string osDrawPolygon(string drawList, LSL_List x, LSL_List y) { return m_OSSL_Functions.osDrawPolygon(drawList, x, y); } public string osDrawFilledPolygon(string drawList, LSL_List x, LSL_List y) { return m_OSSL_Functions.osDrawFilledPolygon(drawList, x, y); } public string osSetFontSize(string drawList, int fontSize) { return m_OSSL_Functions.osSetFontSize(drawList, fontSize); } public string osSetFontName(string drawList, string fontName) { return m_OSSL_Functions.osSetFontName(drawList, fontName); } public string osSetPenSize(string drawList, int penSize) { return m_OSSL_Functions.osSetPenSize(drawList, penSize); } public string osSetPenCap(string drawList, string direction, string type) { return m_OSSL_Functions.osSetPenCap(drawList, direction, type); } public string osSetPenColor(string drawList, string color) { return m_OSSL_Functions.osSetPenColor(drawList, color); } // Deprecated public string osSetPenColour(string drawList, string colour) { return m_OSSL_Functions.osSetPenColour(drawList, colour); } public string osDrawImage(string drawList, int width, int height, string imageUrl) { return m_OSSL_Functions.osDrawImage(drawList, width, height, imageUrl); } public vector osGetDrawStringSize(string contentType, string text, string fontName, int fontSize) { return m_OSSL_Functions.osGetDrawStringSize(contentType, text, fontName, fontSize); } public void osSetStateEvents(int events) { m_OSSL_Functions.osSetStateEvents(events); } public string osGetScriptEngineName() { return m_OSSL_Functions.osGetScriptEngineName(); } public LSL_Integer osCheckODE() { return m_OSSL_Functions.osCheckODE(); } public string osGetPhysicsEngineType() { return m_OSSL_Functions.osGetPhysicsEngineType(); } public string osGetSimulatorVersion() { return m_OSSL_Functions.osGetSimulatorVersion(); } public Hashtable osParseJSON(string JSON) { return m_OSSL_Functions.osParseJSON(JSON); } public Object osParseJSONNew(string JSON) { return m_OSSL_Functions.osParseJSONNew(JSON); } public void osMessageObject(key objectUUID,string message) { m_OSSL_Functions.osMessageObject(objectUUID,message); } public void osMakeNotecard(string notecardName, LSL_Types.list contents) { m_OSSL_Functions.osMakeNotecard(notecardName, contents); } public string osGetNotecardLine(string name, int line) { return m_OSSL_Functions.osGetNotecardLine(name, line); } public string osGetNotecard(string name) { return m_OSSL_Functions.osGetNotecard(name); } public int osGetNumberOfNotecardLines(string name) { return m_OSSL_Functions.osGetNumberOfNotecardLines(name); } public string osAvatarName2Key(string firstname, string lastname) { return m_OSSL_Functions.osAvatarName2Key(firstname, lastname); } public string osKey2Name(string id) { return m_OSSL_Functions.osKey2Name(id); } public string osGetGridNick() { return m_OSSL_Functions.osGetGridNick(); } public string osGetGridName() { return m_OSSL_Functions.osGetGridName(); } public string osGetGridLoginURI() { return m_OSSL_Functions.osGetGridLoginURI(); } public string osGetGridHomeURI() { return m_OSSL_Functions.osGetGridHomeURI(); } public string osGetGridGatekeeperURI() { return m_OSSL_Functions.osGetGridGatekeeperURI(); } public string osGetGridCustom(string key) { return m_OSSL_Functions.osGetGridCustom(key); } public LSL_String osFormatString(string str, LSL_List strings) { return m_OSSL_Functions.osFormatString(str, strings); } public LSL_List osMatchString(string src, string pattern, int start) { return m_OSSL_Functions.osMatchString(src, pattern, start); } public LSL_String osReplaceString(string src, string pattern, string replace, int count, int start) { return m_OSSL_Functions.osReplaceString(src,pattern,replace,count,start); } // Information about data loaded into the region public string osLoadedCreationDate() { return m_OSSL_Functions.osLoadedCreationDate(); } public string osLoadedCreationTime() { return m_OSSL_Functions.osLoadedCreationTime(); } public string osLoadedCreationID() { return m_OSSL_Functions.osLoadedCreationID(); } public LSL_List osGetLinkPrimitiveParams(int linknumber, LSL_List rules) { return m_OSSL_Functions.osGetLinkPrimitiveParams(linknumber, rules); } public void osForceCreateLink(string target, int parent) { m_OSSL_Functions.osForceCreateLink(target, parent); } public void osForceBreakLink(int linknum) { m_OSSL_Functions.osForceBreakLink(linknum); } public void osForceBreakAllLinks() { m_OSSL_Functions.osForceBreakAllLinks(); } public LSL_Integer osIsNpc(LSL_Key npc) { return m_OSSL_Functions.osIsNpc(npc); } public key osNpcCreate(string user, string name, vector position, key cloneFrom) { return m_OSSL_Functions.osNpcCreate(user, name, position, cloneFrom); } public key osNpcCreate(string user, string name, vector position, key cloneFrom, int options) { return m_OSSL_Functions.osNpcCreate(user, name, position, cloneFrom, options); } public key osNpcSaveAppearance(key npc, string notecard) { return m_OSSL_Functions.osNpcSaveAppearance(npc, notecard); } public void osNpcLoadAppearance(key npc, string notecard) { m_OSSL_Functions.osNpcLoadAppearance(npc, notecard); } public LSL_Key osNpcGetOwner(LSL_Key npc) { return m_OSSL_Functions.osNpcGetOwner(npc); } public vector osNpcGetPos(LSL_Key npc) { return m_OSSL_Functions.osNpcGetPos(npc); } public void osNpcMoveTo(key npc, vector position) { m_OSSL_Functions.osNpcMoveTo(npc, position); } public void osNpcMoveToTarget(key npc, vector target, int options) { m_OSSL_Functions.osNpcMoveToTarget(npc, target, options); } public rotation osNpcGetRot(key npc) { return m_OSSL_Functions.osNpcGetRot(npc); } public void osNpcSetRot(key npc, rotation rot) { m_OSSL_Functions.osNpcSetRot(npc, rot); } public void osNpcStopMoveToTarget(LSL_Key npc) { m_OSSL_Functions.osNpcStopMoveToTarget(npc); } public void osNpcSay(key npc, string message) { m_OSSL_Functions.osNpcSay(npc, message); } public void osNpcSay(key npc, int channel, string message) { m_OSSL_Functions.osNpcSay(npc, channel, message); } public void osNpcShout(key npc, int channel, string message) { m_OSSL_Functions.osNpcShout(npc, channel, message); } public void osNpcSit(LSL_Key npc, LSL_Key target, int options) { m_OSSL_Functions.osNpcSit(npc, target, options); } public void osNpcStand(LSL_Key npc) { m_OSSL_Functions.osNpcStand(npc); } public void osNpcRemove(key npc) { m_OSSL_Functions.osNpcRemove(npc); } public void osNpcPlayAnimation(LSL_Key npc, string animation) { m_OSSL_Functions.osNpcPlayAnimation(npc, animation); } public void osNpcStopAnimation(LSL_Key npc, string animation) { m_OSSL_Functions.osNpcStopAnimation(npc, animation); } public void osNpcWhisper(key npc, int channel, string message) { m_OSSL_Functions.osNpcWhisper(npc, channel, message); } public void osNpcTouch(LSL_Key npcLSL_Key, LSL_Key object_key, LSL_Integer link_num) { m_OSSL_Functions.osNpcTouch(npcLSL_Key, object_key, link_num); } public LSL_Key osOwnerSaveAppearance(string notecard) { return m_OSSL_Functions.osOwnerSaveAppearance(notecard); } public LSL_Key osAgentSaveAppearance(LSL_Key agentId, string notecard) { return m_OSSL_Functions.osAgentSaveAppearance(agentId, notecard); } public OSSLPrim Prim; [Serializable] public class OSSLPrim { internal ScriptBaseClass OSSL; public OSSLPrim(ScriptBaseClass bc) { OSSL = bc; Position = new OSSLPrim_Position(this); Rotation = new OSSLPrim_Rotation(this); } public OSSLPrim_Position Position; public OSSLPrim_Rotation Rotation; private TextStruct _text; public TextStruct Text { get { return _text; } set { _text = value; OSSL.llSetText(_text.Text, _text.color, _text.alpha); } } [Serializable] public struct TextStruct { public string Text; public LSL_Types.Vector3 color; public double alpha; } } [Serializable] public class OSSLPrim_Position { private OSSLPrim prim; private LSL_Types.Vector3 Position; public OSSLPrim_Position(OSSLPrim _prim) { prim = _prim; } private void Load() { Position = prim.OSSL.llGetPos(); } private void Save() { /* Remove temporarily until we have a handle to the region size if (Position.x > ((int)Constants.RegionSize - 1)) Position.x = ((int)Constants.RegionSize - 1); if (Position.y > ((int)Constants.RegionSize - 1)) Position.y = ((int)Constants.RegionSize - 1); */ if (Position.z > Constants.RegionHeight) Position.z = Constants.RegionHeight; if (Position.x < 0) Position.x = 0; if (Position.y < 0) Position.y = 0; if (Position.z < 0) Position.z = 0; prim.OSSL.llSetPos(Position); } public double x { get { Load(); return Position.x; } set { Load(); Position.x = value; Save(); } } public double y { get { Load(); return Position.y; } set { Load(); Position.y = value; Save(); } } public double z { get { Load(); return Position.z; } set { Load(); Position.z = value; Save(); } } } [Serializable] public class OSSLPrim_Rotation { private OSSLPrim prim; private LSL_Types.Quaternion Rotation; public OSSLPrim_Rotation(OSSLPrim _prim) { prim = _prim; } private void Load() { Rotation = prim.OSSL.llGetRot(); } private void Save() { prim.OSSL.llSetRot(Rotation); } public double x { get { Load(); return Rotation.x; } set { Load(); Rotation.x = value; Save(); } } public double y { get { Load(); return Rotation.y; } set { Load(); Rotation.y = value; Save(); } } public double z { get { Load(); return Rotation.z; } set { Load(); Rotation.z = value; Save(); } } public double s { get { Load(); return Rotation.s; } set { Load(); Rotation.s = value; Save(); } } } public string osGetGender(LSL_Key rawAvatarId) { return m_OSSL_Functions.osGetGender(rawAvatarId); } public key osGetMapTexture() { return m_OSSL_Functions.osGetMapTexture(); } public key osGetRegionMapTexture(string regionName) { return m_OSSL_Functions.osGetRegionMapTexture(regionName); } public LSL_List osGetRegionStats() { return m_OSSL_Functions.osGetRegionStats(); } public vector osGetRegionSize() { return m_OSSL_Functions.osGetRegionSize(); } /// <summary> /// Returns the amount of memory in use by the Simulator Daemon. /// Amount in bytes - if >= 4GB, returns 4GB. (LSL is not 64-bit aware) /// </summary> /// <returns></returns> public LSL_Integer osGetSimulatorMemory() { return m_OSSL_Functions.osGetSimulatorMemory(); } public void osKickAvatar(string FirstName,string SurName,string alert) { m_OSSL_Functions.osKickAvatar(FirstName, SurName, alert); } public void osSetSpeed(string UUID, LSL_Float SpeedModifier) { m_OSSL_Functions.osSetSpeed(UUID, SpeedModifier); } public LSL_Float osGetHealth(string avatar) { return m_OSSL_Functions.osGetHealth(avatar); } public void osCauseDamage(string avatar, double damage) { m_OSSL_Functions.osCauseDamage(avatar, damage); } public void osCauseHealing(string avatar, double healing) { m_OSSL_Functions.osCauseHealing(avatar, healing); } public void osForceOtherSit(string avatar) { m_OSSL_Functions.osForceOtherSit(avatar); } public void osForceOtherSit(string avatar, string target) { m_OSSL_Functions.osForceOtherSit(avatar, target); } public LSL_List osGetPrimitiveParams(LSL_Key prim, LSL_List rules) { return m_OSSL_Functions.osGetPrimitiveParams(prim, rules); } public void osSetPrimitiveParams(LSL_Key prim, LSL_List rules) { m_OSSL_Functions.osSetPrimitiveParams(prim, rules); } public void osSetProjectionParams(bool projection, LSL_Key texture, double fov, double focus, double amb) { m_OSSL_Functions.osSetProjectionParams(projection, texture, fov, focus, amb); } public void osSetProjectionParams(LSL_Key prim, bool projection, LSL_Key texture, double fov, double focus, double amb) { m_OSSL_Functions.osSetProjectionParams(prim, projection, texture, fov, focus, amb); } public LSL_List osGetAvatarList() { return m_OSSL_Functions.osGetAvatarList(); } public LSL_String osUnixTimeToTimestamp(long time) { return m_OSSL_Functions.osUnixTimeToTimestamp(time); } public LSL_String osGetInventoryDesc(string item) { return m_OSSL_Functions.osGetInventoryDesc(item); } public LSL_Integer osInviteToGroup(LSL_Key agentId) { return m_OSSL_Functions.osInviteToGroup(agentId); } public LSL_Integer osEjectFromGroup(LSL_Key agentId) { return m_OSSL_Functions.osEjectFromGroup(agentId); } public void osSetTerrainTexture(int level, LSL_Key texture) { m_OSSL_Functions.osSetTerrainTexture(level, texture); } public void osSetTerrainTextureHeight(int corner, double low, double high) { m_OSSL_Functions.osSetTerrainTextureHeight(corner, low, high); } public LSL_Integer osIsUUID(string thing) { return m_OSSL_Functions.osIsUUID(thing); } public LSL_Float osMin(double a, double b) { return m_OSSL_Functions.osMin(a, b); } public LSL_Float osMax(double a, double b) { return m_OSSL_Functions.osMax(a, b); } public LSL_Key osGetRezzingObject() { return m_OSSL_Functions.osGetRezzingObject(); } public void osSetContentType(LSL_Key id, string type) { m_OSSL_Functions.osSetContentType(id,type); } public void osDropAttachment() { m_OSSL_Functions.osDropAttachment(); } public void osForceDropAttachment() { m_OSSL_Functions.osForceDropAttachment(); } public void osDropAttachmentAt(vector pos, rotation rot) { m_OSSL_Functions.osDropAttachmentAt(pos, rot); } public void osForceDropAttachmentAt(vector pos, rotation rot) { m_OSSL_Functions.osForceDropAttachmentAt(pos, rot); } public LSL_Integer osListenRegex(int channelID, string name, string ID, string msg, int regexBitfield) { return m_OSSL_Functions.osListenRegex(channelID, name, ID, msg, regexBitfield); } public LSL_Integer osRegexIsMatch(string input, string pattern) { return m_OSSL_Functions.osRegexIsMatch(input, pattern); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.ComponentModel; using System.Data.Common; using System.Diagnostics; using System.Globalization; namespace System.Data.OleDb { [TypeConverter(typeof(OleDbParameter.OleDbParameterConverter))] public sealed partial class OleDbParameter : DbParameter, ICloneable, IDbDataParameter { private NativeDBType _metaType; private int _changeID; private string _parameterName; private byte _precision; private byte _scale; private bool _hasScale; private NativeDBType _coerceMetaType; public OleDbParameter() : base() { // V1.0 nothing } public OleDbParameter(string name, object value) : this() { Debug.Assert(!(value is OleDbType), "use OleDbParameter(string, OleDbType)"); Debug.Assert(!(value is SqlDbType), "use OleDbParameter(string, OleDbType)"); ParameterName = name; Value = value; } public OleDbParameter(string name, OleDbType dataType) : this() { ParameterName = name; OleDbType = dataType; } public OleDbParameter(string name, OleDbType dataType, int size) : this() { ParameterName = name; OleDbType = dataType; Size = size; } public OleDbParameter(string name, OleDbType dataType, int size, string srcColumn) : this() { ParameterName = name; OleDbType = dataType; Size = size; SourceColumn = srcColumn; } [EditorBrowsable(EditorBrowsableState.Advanced)] public OleDbParameter(string parameterName, OleDbType dbType, int size, ParameterDirection direction, bool isNullable, byte precision, byte scale, string srcColumn, DataRowVersion srcVersion, object value) : this() { // V1.0 everything ParameterName = parameterName; OleDbType = dbType; Size = size; Direction = direction; IsNullable = isNullable; PrecisionInternal = precision; ScaleInternal = scale; SourceColumn = srcColumn; SourceVersion = srcVersion; Value = value; } [EditorBrowsable(EditorBrowsableState.Advanced)] public OleDbParameter(string parameterName, OleDbType dbType, int size, ParameterDirection direction, byte precision, byte scale, string sourceColumn, DataRowVersion sourceVersion, bool sourceColumnNullMapping, object value) : this() { // V2.0 everything - round trip all browsable properties + precision/scale ParameterName = parameterName; OleDbType = dbType; Size = size; Direction = direction; PrecisionInternal = precision; ScaleInternal = scale; SourceColumn = sourceColumn; SourceVersion = sourceVersion; SourceColumnNullMapping = sourceColumnNullMapping; Value = value; } internal int ChangeID { get { return _changeID; } } public override DbType DbType { get { return GetBindType(Value).enumDbType; } set { NativeDBType dbtype = _metaType; if ((null == dbtype) || (dbtype.enumDbType != value)) { PropertyTypeChanging(); _metaType = NativeDBType.FromDbType(value); } } } public override void ResetDbType() { ResetOleDbType(); } [ RefreshProperties(RefreshProperties.All), DbProviderSpecificTypeProperty(true), ] public OleDbType OleDbType { get { return GetBindType(Value).enumOleDbType; } set { NativeDBType dbtype = _metaType; if ((null == dbtype) || (dbtype.enumOleDbType != value)) { PropertyTypeChanging(); _metaType = NativeDBType.FromDataType(value); } } } private bool ShouldSerializeOleDbType() { return (null != _metaType); } public void ResetOleDbType() { if (null != _metaType) { PropertyTypeChanging(); _metaType = null; } } public override string ParameterName { // V1.2.3300, XXXParameter V1.0.3300 get { string parameterName = _parameterName; return ((null != parameterName) ? parameterName : string.Empty); } set { if (_parameterName != value) { PropertyChanging(); _parameterName = value; } } } [DefaultValue((byte)0)] public new byte Precision { get { return PrecisionInternal; } set { PrecisionInternal = value; } } internal byte PrecisionInternal { get { byte precision = _precision; if (0 == precision) { precision = ValuePrecision(Value); } return precision; } set { if (_precision != value) { PropertyChanging(); _precision = value; } } } private bool ShouldSerializePrecision() { return (0 != _precision); } [DefaultValue((byte)0)] public new byte Scale { get { return ScaleInternal; } set { ScaleInternal = value; } } internal byte ScaleInternal { get { byte scale = _scale; if (!ShouldSerializeScale(scale)) { scale = ValueScale(Value); } return scale; } set { if (_scale != value || !_hasScale) { PropertyChanging(); _scale = value; _hasScale = true; } } } private bool ShouldSerializeScale() { return ShouldSerializeScale(_scale); } private bool ShouldSerializeScale(byte scale) { return _hasScale && ((0 != scale) || ShouldSerializePrecision()); } object ICloneable.Clone() { return new OleDbParameter(this); } private void CloneHelper(OleDbParameter destination) { CloneHelperCore(destination); destination._metaType = _metaType; destination._parameterName = _parameterName; destination._precision = _precision; destination._scale = _scale; destination._hasScale = _hasScale; } private void PropertyChanging() { unchecked { _changeID++; } } private void PropertyTypeChanging() { PropertyChanging(); _coerceMetaType = null; CoercedValue = null; } // goal: call virtual property getters only once per parameter internal bool BindParameter(int index, Bindings bindings) { int changeID = _changeID; object value = Value; NativeDBType dbtype = GetBindType(value); if (OleDbType.Empty == dbtype.enumOleDbType) { throw ODB.UninitializedParameters(index, dbtype.enumOleDbType); } _coerceMetaType = dbtype; value = CoerceValue(value, dbtype); CoercedValue = value; ParameterDirection direction = Direction; byte precision; if (ShouldSerializePrecision()) { precision = PrecisionInternal; } else { precision = ValuePrecision(value); } if (0 == precision) { precision = dbtype.maxpre; } byte scale; if (ShouldSerializeScale()) { scale = ScaleInternal; } else { scale = ValueScale(value); } int wtype = dbtype.wType; int bytecount, size; if (dbtype.islong) { // long data (image, text, ntext) bytecount = ADP.PtrSize; if (ShouldSerializeSize()) { size = Size; } else { if (NativeDBType.STR == dbtype.dbType) { size = int.MaxValue; } else if (NativeDBType.WSTR == dbtype.dbType) { size = int.MaxValue / 2; } else { size = int.MaxValue; } } wtype |= NativeDBType.BYREF; } else if (dbtype.IsVariableLength) { // variable length data (varbinary, varchar, nvarchar) if (!ShouldSerializeSize() && ADP.IsDirection(this, ParameterDirection.Output)) { throw ADP.UninitializedParameterSize(index, _coerceMetaType.dataType); } bool computedSize; if (ShouldSerializeSize()) { size = Size; computedSize = false; } else { size = ValueSize(value); computedSize = true; } if (0 < size) { if (NativeDBType.WSTR == dbtype.wType) { // maximum 0x3FFFFFFE characters, computed this way to avoid overflow exception bytecount = Math.Min(size, 0x3FFFFFFE) * 2 + 2; } else { Debug.Assert(NativeDBType.STR != dbtype.wType, "should have ANSI binding, describing is okay"); bytecount = size; } if (computedSize) { if (NativeDBType.STR == dbtype.dbType) { // maximum 0x7ffffffe characters, computed this way to avoid overflow exception size = Math.Min(size, 0x3FFFFFFE) * 2; } } if (ODB.LargeDataSize < bytecount) { bytecount = ADP.PtrSize; wtype |= NativeDBType.BYREF; } } else if (0 == size) { if (NativeDBType.WSTR == wtype) { // allow space for null termination character bytecount = 2; // 0 == size, okay for (STR == dbType) } else { Debug.Assert(NativeDBType.STR != dbtype.wType, "should have ANSI binding, describing is okay"); bytecount = 0; } } else if (-1 == size) { bytecount = ADP.PtrSize; wtype |= NativeDBType.BYREF; } else { throw ADP.InvalidSizeValue(size); } } else { // fixed length data bytecount = dbtype.fixlen; size = bytecount; } bindings.CurrentIndex = index; // tagDBPARAMBINDINFO info for SetParameterInfo bindings.DataSourceType = dbtype.dbString.DangerousGetHandle(); // NOTE: This is a constant and isn't exposed publicly, so there really isn't a potential for Handle Recycling. bindings.Name = ADP.PtrZero; bindings.ParamSize = new IntPtr(size); bindings.Flags = GetBindFlags(direction); //bindings.Precision = precision; //bindings.Scale = scale; // tagDBBINDING info for CreateAccessor bindings.Ordinal = (IntPtr)(index + 1); bindings.Part = dbtype.dbPart; bindings.ParamIO = GetBindDirection(direction); bindings.Precision = precision; bindings.Scale = scale; bindings.DbType = wtype; bindings.MaxLen = bytecount; // also increments databuffer size (uses DbType) //bindings.ValueOffset = bindings.DataBufferSize; // set via MaxLen //bindings.LengthOffset = i * sizeof_int64; //bindings.StatusOffset = i * sizeof_int64 + sizeof_int32; //bindings.TypeInfoPtr = 0; //bindings.ObjectPtr = 0; //bindings.BindExtPtr = 0; //bindings.MemOwner = /*DBMEMOWNER_CLIENTOWNED*/0; //bindings.Flags = 0; //bindings.ParameterChangeID = changeID; // bind until something changes Debug.Assert(_changeID == changeID, "parameter has unexpectedly changed"); return IsParameterComputed(); } private static object CoerceValue(object value, NativeDBType destinationType) { Debug.Assert(null != destinationType, "null destinationType"); if ((null != value) && (DBNull.Value != value) && (typeof(object) != destinationType.dataType)) { Type currentType = value.GetType(); if (currentType != destinationType.dataType) { try { if ((typeof(string) == destinationType.dataType) && (typeof(char[]) == currentType)) { } else if ((NativeDBType.CY == destinationType.dbType) && (typeof(string) == currentType)) { value = decimal.Parse((string)value, NumberStyles.Currency, (IFormatProvider)null); } else { value = Convert.ChangeType(value, destinationType.dataType, (IFormatProvider)null); } } catch (Exception e) { // UNDONE - should not be catching all exceptions!!! if (!ADP.IsCatchableExceptionType(e)) { throw; } throw ADP.ParameterConversionFailed(value, destinationType.dataType, e); } } } return value; } private NativeDBType GetBindType(object value) { NativeDBType dbtype = _metaType; if (null == dbtype) { if (ADP.IsNull(value)) { dbtype = OleDb.NativeDBType.Default; } else { dbtype = NativeDBType.FromSystemType(value); } } return dbtype; } internal object GetCoercedValue() { object value = CoercedValue; // will also be set during binding, will rebind everytime if _metaType not set if (null == value) { value = CoerceValue(Value, _coerceMetaType); CoercedValue = value; } return value; } internal bool IsParameterComputed() { NativeDBType metaType = _metaType; return ((null == metaType) || (!ShouldSerializeSize() && metaType.IsVariableLength) || ((NativeDBType.DECIMAL == metaType.dbType) || (NativeDBType.NUMERIC == metaType.dbType) && (!ShouldSerializeScale() || !ShouldSerializePrecision()) ) ); } // @devnote: use IsParameterComputed which is called in the normal case // only to call Prepare to throw the specialized error message // reducing the overall number of methods to actually jit internal void Prepare(OleDbCommand cmd) { Debug.Assert(IsParameterComputed(), "Prepare computed parameter"); if (null == _metaType) { throw ADP.PrepareParameterType(cmd); } else if (!ShouldSerializeSize() && _metaType.IsVariableLength) { throw ADP.PrepareParameterSize(cmd); } else if (!ShouldSerializePrecision() && !ShouldSerializeScale() && ((NativeDBType.DECIMAL == _metaType.wType) || (NativeDBType.NUMERIC == _metaType.wType))) { throw ADP.PrepareParameterScale(cmd, _metaType.wType.ToString("G", CultureInfo.InvariantCulture)); } } [ RefreshProperties(RefreshProperties.All), TypeConverter(typeof(StringConverter)), ] public override object Value { // V1.2.3300, XXXParameter V1.0.3300 get { return _value; } set { _coercedValue = null; _value = value; } } private byte ValuePrecision(object value) { return ValuePrecisionCore(value); } private byte ValueScale(object value) { return ValueScaleCore(value); } private int ValueSize(object value) { return ValueSizeCore(value); } private static int GetBindDirection(ParameterDirection direction) { return (ODB.ParameterDirectionFlag & (int)direction); /*switch(Direction) { default: case ParameterDirection.Input: return ODB.DBPARAMIO_INPUT; case ParameterDirection.Output: case ParameterDirection.ReturnValue: return ODB.DBPARAMIO_OUTPUT; case ParameterDirection.InputOutput: return (ODB.DBPARAMIO_INPUT | ODB.DBPARAMIO_OUTPUT); }*/ } private static int GetBindFlags(ParameterDirection direction) { return (ODB.ParameterDirectionFlag & (int)direction); /*switch(Direction) { default: case ParameterDirection.Input: return ODB.DBPARAMFLAGS_ISINPUT; case ParameterDirection.Output: case ParameterDirection.ReturnValue: return ODB.DBPARAMFLAGS_ISOUTPUT; case ParameterDirection.InputOutput: return (ODB.DBPARAMFLAGS_ISINPUT | ODB.DBPARAMFLAGS_ISOUTPUT); }*/ } // implemented as nested class to take advantage of the private/protected ShouldSerializeXXX methods internal sealed class OleDbParameterConverter : System.ComponentModel.ExpandableObjectConverter { // converter classes should have public ctor public OleDbParameterConverter() { } public override bool CanConvertTo(ITypeDescriptorContext context, Type destinationType) { if (typeof(System.ComponentModel.Design.Serialization.InstanceDescriptor) == destinationType) { return true; } return base.CanConvertTo(context, destinationType); } public override object ConvertTo(ITypeDescriptorContext context, CultureInfo culture, object value, Type destinationType) { if (null == destinationType) { throw ADP.ArgumentNull("destinationType"); } if ((typeof(System.ComponentModel.Design.Serialization.InstanceDescriptor) == destinationType) && (value is OleDbParameter)) { return ConvertToInstanceDescriptor(value as OleDbParameter); } return base.ConvertTo(context, culture, value, destinationType); } private System.ComponentModel.Design.Serialization.InstanceDescriptor ConvertToInstanceDescriptor(OleDbParameter p) { int flags = 0; if (p.ShouldSerializeOleDbType()) { flags |= 1; } if (p.ShouldSerializeSize()) { flags |= 2; } if (!ADP.IsEmpty(p.SourceColumn)) { flags |= 4; } if (null != p.Value) { flags |= 8; } if ((ParameterDirection.Input != p.Direction) || p.IsNullable || p.ShouldSerializePrecision() || p.ShouldSerializeScale() || (DataRowVersion.Current != p.SourceVersion)) { flags |= 16; // V1.0 everything } if (p.SourceColumnNullMapping) { flags |= 32; // v2.0 everything } Type[] ctorParams; object[] ctorValues; switch (flags) { case 0: // ParameterName case 1: // OleDbType ctorParams = new Type[] { typeof(string), typeof(OleDbType) }; ctorValues = new object[] { p.ParameterName, p.OleDbType }; break; case 2: // Size case 3: // Size, OleDbType ctorParams = new Type[] { typeof(string), typeof(OleDbType), typeof(int) }; ctorValues = new object[] { p.ParameterName, p.OleDbType, p.Size }; break; case 4: // SourceColumn case 5: // SourceColumn, OleDbType case 6: // SourceColumn, Size case 7: // SourceColumn, Size, OleDbType ctorParams = new Type[] { typeof(string), typeof(OleDbType), typeof(int), typeof(string) }; ctorValues = new object[] { p.ParameterName, p.OleDbType, p.Size, p.SourceColumn }; break; case 8: // Value ctorParams = new Type[] { typeof(string), typeof(object) }; ctorValues = new object[] { p.ParameterName, p.Value }; break; default: // everything else if (0 == (32 & flags)) { // V1.0 everything ctorParams = new Type[] { typeof(string), typeof(OleDbType), typeof(int), typeof(ParameterDirection), typeof(bool), typeof(byte), typeof(byte), typeof(string), typeof(DataRowVersion), typeof(object) }; ctorValues = new object[] { p.ParameterName, p.OleDbType, p.Size, p.Direction, p.IsNullable, p.PrecisionInternal, p.ScaleInternal, p.SourceColumn, p.SourceVersion, p.Value }; } else { // v2.0 everything - round trip all browsable properties + precision/scale ctorParams = new Type[] { typeof(string), typeof(OleDbType), typeof(int), typeof(ParameterDirection), typeof(byte), typeof(byte), typeof(string), typeof(DataRowVersion), typeof(bool), typeof(object) }; ctorValues = new object[] { p.ParameterName, p.OleDbType, p.Size, p.Direction, p.PrecisionInternal, p.ScaleInternal, p.SourceColumn, p.SourceVersion, p.SourceColumnNullMapping, p.Value }; } break; } System.Reflection.ConstructorInfo ctor = typeof(OleDbParameter).GetConstructor(ctorParams); return new System.ComponentModel.Design.Serialization.InstanceDescriptor(ctor, ctorValues); } } } }
/* FluorineFx open source library Copyright (C) 2007 Zoltan Csibi, zoltan@TheSilentGroup.com, FluorineFx.com This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ using System; using System.Web; using System.Web.Caching; using System.Collections; using System.Threading; using System.Diagnostics; using log4net; using FluorineFx.Messaging; using FluorineFx.Messaging.Api; using FluorineFx.Util; using FluorineFx.Messaging.Messages; using FluorineFx.Exceptions; using FluorineFx.Context; using FluorineFx.Messaging.Endpoints; namespace FluorineFx.Messaging { /// <summary> /// ClientManager manages clients connected to the FluorineFx server. /// </summary> /// <example> /// <code lang="CS"> /// classChatAdapter : MessagingAdapter, ISessionListener /// { /// private Hashtable _clients; /// /// public ChatAdapter() /// { /// _clients = new Hashtable(); /// ClientManager.AddSessionCreatedListener(this); /// } /// /// public void SessionCreated(IClient client) /// { /// lock (_clients.SyncRoot) /// { /// _clients.Add(client.Id, client); /// } /// client.AddSessionDestroyedListener(this); /// } /// /// public void SessionDestroyed(IClient client) /// { /// lock (_clients.SyncRoot) /// { /// _clients.Remove(client.Id); /// } /// } /// } /// </code> /// </example> [CLSCompliant(false)] public class ClientManager : IClientRegistry { private static readonly ILog log = LogManager.GetLogger(typeof(ClientManager)); object _objLock = new object(); MessageBroker _messageBroker; Hashtable _clients; private ClientManager() { } internal ClientManager(MessageBroker messageBroker) { _messageBroker = messageBroker; _clients = new Hashtable(); } internal string GetNextId() { return Guid.NewGuid().ToString("D"); } #region IClientRegistry Members /// <summary> /// Gets an object that can be used to synchronize access. /// </summary> public object SyncRoot { get { return _objLock; } } /// <summary> /// Returns an existing client from the message header transporting the global FlexClient Id value or creates a new one if not found. /// </summary> /// <param name="message">Message sent from client.</param> /// <returns>The client object.</returns> public IClient GetClient(IMessage message) { lock (this.SyncRoot) { IClient client = GetClient(message.GetFlexClientId()); if (message is MessageBase) (message as MessageBase).SetFlexClientId(client.Id); else Debug.Assert(false); return client; } } /// <summary> /// Returns an existing client from a client id or creates a new one if not found. /// </summary> /// <param name="id">The identity of the client to return.</param> /// <returns>The client object.</returns> public IClient GetClient(string id) { lock (this.SyncRoot) { if (_clients.ContainsKey(id)) { HttpRuntime.Cache.Get(id); return _clients[id] as Client; } if (id == null || id == "nil" || id == string.Empty) id = Guid.NewGuid().ToString("D"); Client client = new Client(this, id); _clients[id] = client; int clientLeaseTime = 1; log.Debug(__Res.GetString(__Res.Client_Create, id)); Renew(client, clientLeaseTime); //client.NotifyCreated(); return client; } } /// <summary> /// Check if a client with a given id exists. /// </summary> /// <param name="id">The identity of the client to check for.</param> /// <returns><c>true</c> if the client exists, <c>false</c> otherwise.</returns> public bool HasClient(string id) { if (id == null) return false; lock (this.SyncRoot) { return _clients.ContainsKey(id); } } /// <summary> /// Returns an existing client from a client id. /// </summary> /// <param name="clientId">The identity of the client to return.</param> /// <returns>The client object if exists, null otherwise.</returns> public IClient LookupClient(string clientId) { if (clientId == null) return null; lock (this.SyncRoot) { Client client = null; if (_clients.Contains(clientId)) { client = _clients[clientId] as Client; HttpRuntime.Cache.Get(clientId); } return client; } } #endregion internal void Renew(Client client, int clientLeaseTime) { if (client.ClientLeaseTime == clientLeaseTime) { //Keep the client in the cache. HttpRuntime.Cache.Get(client.Id); return; } lock (this.SyncRoot) { if (client.ClientLeaseTime < clientLeaseTime) { log.Debug(__Res.GetString(__Res.Client_Lease, client.Id, client.ClientLeaseTime, clientLeaseTime)); client.SetClientLeaseTime(clientLeaseTime); } if (clientLeaseTime == 0) { log.Debug(__Res.GetString(__Res.Client_Lease, client.Id, client.ClientLeaseTime, clientLeaseTime)); client.SetClientLeaseTime(0); } if (client.ClientLeaseTime != 0) { HttpRuntime.Cache.Remove(client.Id); // Add the FlexClient to the Cache with the expiration item HttpRuntime.Cache.Insert(client.Id, client, null, Cache.NoAbsoluteExpiration, new TimeSpan(0, client.ClientLeaseTime, 0), CacheItemPriority.NotRemovable, new CacheItemRemovedCallback(this.RemovedCallback)); } else HttpRuntime.Cache.Remove(client.Id); } } internal IClient RemoveSubscriber(IClient client) { lock (this.SyncRoot) { if (_clients.ContainsKey(client.Id)) { if (log.IsDebugEnabled) log.Debug(__Res.GetString(__Res.ClientManager_Remove, client.Id)); HttpRuntime.Cache.Remove(client.Id); _clients.Remove(client.Id); } return client; } } /// <summary> /// Cancels the timeout. /// </summary> /// <param name="client">The client.</param> public void CancelTimeout(IClient client) { HttpRuntime.Cache.Remove(client.Id); } internal void RemovedCallback(string key, object value, CacheItemRemovedReason callbackReason) { if (callbackReason == CacheItemRemovedReason.Expired) { lock (this.SyncRoot) { if (_clients.Contains(key)) { try { IClient client = LookupClient(key); if (client != null) { if (log.IsDebugEnabled) log.Debug(__Res.GetString(__Res.ClientManager_CacheExpired, client.Id)); _TimeoutContext context = new _TimeoutContext(client); FluorineWebSafeCallContext.SetData(FluorineContext.FluorineContextKey, context); client.Timeout(); RemoveSubscriber(client); FluorineWebSafeCallContext.FreeNamedDataSlot(FluorineContext.FluorineContextKey); } } catch (Exception ex) { if (log.IsErrorEnabled) log.Error(__Res.GetString(__Res.ClientManager_CacheExpired, key), ex); } } } } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. // ------------------------------------------------------------------------------ // Changes to this file must follow the http://aka.ms/api-review process. // ------------------------------------------------------------------------------ namespace System.Diagnostics { [System.Diagnostics.SwitchLevelAttribute(typeof(bool))] public partial class BooleanSwitch : System.Diagnostics.Switch { public BooleanSwitch(string displayName, string description) : base (default(string), default(string)) { } public BooleanSwitch(string displayName, string description, string defaultSwitchValue) : base (default(string), default(string)) { } public bool Enabled { get { throw null; } set { } } protected override void OnValueChanged() { } } public partial class CorrelationManager { internal CorrelationManager() { } public System.Guid ActivityId { get { throw null; } set { } } public System.Collections.Stack LogicalOperationStack { get { throw null; } } public void StartLogicalOperation() { } public void StartLogicalOperation(object operationId) { } public void StopLogicalOperation() { } } public partial class DefaultTraceListener : System.Diagnostics.TraceListener { public DefaultTraceListener() { } public bool AssertUiEnabled { get { throw null; } set { } } public string LogFileName { get { throw null; } set { } } public override void Fail(string message) { } public override void Fail(string message, string detailMessage) { } public override void Write(string message) { } public override void WriteLine(string message) { } } public partial class EventTypeFilter : System.Diagnostics.TraceFilter { public EventTypeFilter(System.Diagnostics.SourceLevels level) { } public System.Diagnostics.SourceLevels EventType { get { throw null; } set { } } public override bool ShouldTrace(System.Diagnostics.TraceEventCache cache, string source, System.Diagnostics.TraceEventType eventType, int id, string formatOrMessage, object[] args, object data1, object[] data) { throw null; } } public partial class SourceFilter : System.Diagnostics.TraceFilter { public SourceFilter(string source) { } public string Source { get { throw null; } set { } } public override bool ShouldTrace(System.Diagnostics.TraceEventCache cache, string source, System.Diagnostics.TraceEventType eventType, int id, string formatOrMessage, object[] args, object data1, object[] data) { throw null; } } [System.FlagsAttribute] public enum SourceLevels { [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Advanced)] ActivityTracing = 65280, All = -1, Critical = 1, Error = 3, Information = 15, Off = 0, Verbose = 31, Warning = 7, } public partial class SourceSwitch : System.Diagnostics.Switch { public SourceSwitch(string name) : base (default(string), default(string)) { } public SourceSwitch(string displayName, string defaultSwitchValue) : base (default(string), default(string)) { } public System.Diagnostics.SourceLevels Level { get { throw null; } set { } } protected override void OnValueChanged() { } public bool ShouldTrace(System.Diagnostics.TraceEventType eventType) { throw null; } } public abstract partial class Switch { protected Switch(string displayName, string description) { } protected Switch(string displayName, string description, string defaultSwitchValue) { } public System.Collections.Specialized.StringDictionary Attributes { get { throw null; } } public string Description { get { throw null; } } public string DisplayName { get { throw null; } } protected int SwitchSetting { get { throw null; } set { } } protected string Value { get { throw null; } set { } } protected virtual string[] GetSupportedAttributes() { throw null; } protected virtual void OnSwitchSettingChanged() { } protected virtual void OnValueChanged() { } } [System.AttributeUsageAttribute(System.AttributeTargets.Assembly | System.AttributeTargets.Class | System.AttributeTargets.Constructor | System.AttributeTargets.Event | System.AttributeTargets.Method | System.AttributeTargets.Property)] public sealed partial class SwitchAttribute : System.Attribute { public SwitchAttribute(string switchName, System.Type switchType) { } public string SwitchDescription { get { throw null; } set { } } public string SwitchName { get { throw null; } set { } } public System.Type SwitchType { get { throw null; } set { } } public static System.Diagnostics.SwitchAttribute[] GetAll(System.Reflection.Assembly assembly) { throw null; } } [System.AttributeUsageAttribute(System.AttributeTargets.Class)] public sealed partial class SwitchLevelAttribute : System.Attribute { public SwitchLevelAttribute(System.Type switchLevelType) { } public System.Type SwitchLevelType { get { throw null; } set { } } } public sealed partial class Trace { internal Trace() { } public static bool AutoFlush { get { throw null; } set { } } public static System.Diagnostics.CorrelationManager CorrelationManager { get { throw null; } } public static int IndentLevel { get { throw null; } set { } } public static int IndentSize { get { throw null; } set { } } public static System.Diagnostics.TraceListenerCollection Listeners { get { throw null; } } public static bool UseGlobalLock { get { throw null; } set { } } [System.Diagnostics.ConditionalAttribute("TRACE")] public static void Assert(bool condition) { } [System.Diagnostics.ConditionalAttribute("TRACE")] public static void Assert(bool condition, string message) { } [System.Diagnostics.ConditionalAttribute("TRACE")] public static void Assert(bool condition, string message, string detailMessage) { } [System.Diagnostics.ConditionalAttribute("TRACE")] public static void Close() { } [System.Diagnostics.ConditionalAttribute("TRACE")] public static void Fail(string message) { } [System.Diagnostics.ConditionalAttribute("TRACE")] public static void Fail(string message, string detailMessage) { } [System.Diagnostics.ConditionalAttribute("TRACE")] public static void Flush() { } [System.Diagnostics.ConditionalAttribute("TRACE")] public static void Indent() { } public static void Refresh() { } [System.Diagnostics.ConditionalAttribute("TRACE")] public static void TraceError(string message) { } [System.Diagnostics.ConditionalAttribute("TRACE")] public static void TraceError(string format, params object[] args) { } [System.Diagnostics.ConditionalAttribute("TRACE")] public static void TraceInformation(string message) { } [System.Diagnostics.ConditionalAttribute("TRACE")] public static void TraceInformation(string format, params object[] args) { } [System.Diagnostics.ConditionalAttribute("TRACE")] public static void TraceWarning(string message) { } [System.Diagnostics.ConditionalAttribute("TRACE")] public static void TraceWarning(string format, params object[] args) { } [System.Diagnostics.ConditionalAttribute("TRACE")] public static void Unindent() { } [System.Diagnostics.ConditionalAttribute("TRACE")] public static void Write(object value) { } [System.Diagnostics.ConditionalAttribute("TRACE")] public static void Write(object value, string category) { } [System.Diagnostics.ConditionalAttribute("TRACE")] public static void Write(string message) { } [System.Diagnostics.ConditionalAttribute("TRACE")] public static void Write(string message, string category) { } [System.Diagnostics.ConditionalAttribute("TRACE")] public static void WriteIf(bool condition, object value) { } [System.Diagnostics.ConditionalAttribute("TRACE")] public static void WriteIf(bool condition, object value, string category) { } [System.Diagnostics.ConditionalAttribute("TRACE")] public static void WriteIf(bool condition, string message) { } [System.Diagnostics.ConditionalAttribute("TRACE")] public static void WriteIf(bool condition, string message, string category) { } [System.Diagnostics.ConditionalAttribute("TRACE")] public static void WriteLine(object value) { } [System.Diagnostics.ConditionalAttribute("TRACE")] public static void WriteLine(object value, string category) { } [System.Diagnostics.ConditionalAttribute("TRACE")] public static void WriteLine(string message) { } [System.Diagnostics.ConditionalAttribute("TRACE")] public static void WriteLine(string message, string category) { } [System.Diagnostics.ConditionalAttribute("TRACE")] public static void WriteLineIf(bool condition, object value) { } [System.Diagnostics.ConditionalAttribute("TRACE")] public static void WriteLineIf(bool condition, object value, string category) { } [System.Diagnostics.ConditionalAttribute("TRACE")] public static void WriteLineIf(bool condition, string message) { } [System.Diagnostics.ConditionalAttribute("TRACE")] public static void WriteLineIf(bool condition, string message, string category) { } } public partial class TraceEventCache { public TraceEventCache() { } public string Callstack { get { throw null; } } public System.DateTime DateTime { get { throw null; } } public System.Collections.Stack LogicalOperationStack { get { throw null; } } public int ProcessId { get { throw null; } } public string ThreadId { get { throw null; } } public long Timestamp { get { throw null; } } } public enum TraceEventType { Critical = 1, Error = 2, Information = 8, Resume = 2048, Start = 256, Stop = 512, Suspend = 1024, Transfer = 4096, Verbose = 16, Warning = 4, } public abstract partial class TraceFilter { protected TraceFilter() { } public abstract bool ShouldTrace(System.Diagnostics.TraceEventCache cache, string source, System.Diagnostics.TraceEventType eventType, int id, string formatOrMessage, object[] args, object data1, object[] data); } public enum TraceLevel { Error = 1, Info = 3, Off = 0, Verbose = 4, Warning = 2, } public abstract partial class TraceListener : System.MarshalByRefObject, System.IDisposable { protected TraceListener() { } protected TraceListener(string name) { } public System.Collections.Specialized.StringDictionary Attributes { get { throw null; } } public System.Diagnostics.TraceFilter Filter { get { throw null; } set { } } public int IndentLevel { get { throw null; } set { } } public int IndentSize { get { throw null; } set { } } public virtual bool IsThreadSafe { get { throw null; } } public virtual string Name { get { throw null; } set { } } protected bool NeedIndent { get { throw null; } set { } } public System.Diagnostics.TraceOptions TraceOutputOptions { get { throw null; } set { } } public virtual void Close() { } public void Dispose() { } protected virtual void Dispose(bool disposing) { } public virtual void Fail(string message) { } public virtual void Fail(string message, string detailMessage) { } public virtual void Flush() { } protected virtual string[] GetSupportedAttributes() { throw null; } public virtual void TraceData(System.Diagnostics.TraceEventCache eventCache, string source, System.Diagnostics.TraceEventType eventType, int id, object data) { } public virtual void TraceData(System.Diagnostics.TraceEventCache eventCache, string source, System.Diagnostics.TraceEventType eventType, int id, params object[] data) { } public virtual void TraceEvent(System.Diagnostics.TraceEventCache eventCache, string source, System.Diagnostics.TraceEventType eventType, int id) { } public virtual void TraceEvent(System.Diagnostics.TraceEventCache eventCache, string source, System.Diagnostics.TraceEventType eventType, int id, string message) { } public virtual void TraceEvent(System.Diagnostics.TraceEventCache eventCache, string source, System.Diagnostics.TraceEventType eventType, int id, string format, params object[] args) { } public virtual void TraceTransfer(System.Diagnostics.TraceEventCache eventCache, string source, int id, string message, System.Guid relatedActivityId) { } public virtual void Write(object o) { } public virtual void Write(object o, string category) { } public abstract void Write(string message); public virtual void Write(string message, string category) { } protected virtual void WriteIndent() { } public virtual void WriteLine(object o) { } public virtual void WriteLine(object o, string category) { } public abstract void WriteLine(string message); public virtual void WriteLine(string message, string category) { } } public partial class TraceListenerCollection : System.Collections.ICollection, System.Collections.IEnumerable, System.Collections.IList { internal TraceListenerCollection() { } public int Count { get { throw null; } } public System.Diagnostics.TraceListener this[int i] { get { throw null; } set { } } public System.Diagnostics.TraceListener this[string name] { get { throw null; } } bool System.Collections.ICollection.IsSynchronized { get { throw null; } } object System.Collections.ICollection.SyncRoot { get { throw null; } } bool System.Collections.IList.IsFixedSize { get { throw null; } } bool System.Collections.IList.IsReadOnly { get { throw null; } } object System.Collections.IList.this[int index] { get { throw null; } set { } } public int Add(System.Diagnostics.TraceListener listener) { throw null; } public void AddRange(System.Diagnostics.TraceListenerCollection value) { } public void AddRange(System.Diagnostics.TraceListener[] value) { } public void Clear() { } public bool Contains(System.Diagnostics.TraceListener listener) { throw null; } public void CopyTo(System.Diagnostics.TraceListener[] listeners, int index) { } public System.Collections.IEnumerator GetEnumerator() { throw null; } public int IndexOf(System.Diagnostics.TraceListener listener) { throw null; } public void Insert(int index, System.Diagnostics.TraceListener listener) { } public void Remove(System.Diagnostics.TraceListener listener) { } public void Remove(string name) { } public void RemoveAt(int index) { } void System.Collections.ICollection.CopyTo(System.Array array, int index) { } int System.Collections.IList.Add(object value) { throw null; } bool System.Collections.IList.Contains(object value) { throw null; } int System.Collections.IList.IndexOf(object value) { throw null; } void System.Collections.IList.Insert(int index, object value) { } void System.Collections.IList.Remove(object value) { } } [System.FlagsAttribute] public enum TraceOptions { Callstack = 32, DateTime = 2, LogicalOperationStack = 1, None = 0, ProcessId = 8, ThreadId = 16, Timestamp = 4, } public partial class TraceSource { public TraceSource(string name) { } public TraceSource(string name, System.Diagnostics.SourceLevels defaultLevel) { } public System.Collections.Specialized.StringDictionary Attributes { get { throw null; } } public System.Diagnostics.TraceListenerCollection Listeners { get { throw null; } } public string Name { get { throw null; } } public System.Diagnostics.SourceSwitch Switch { get { throw null; } set { } } public void Close() { } public void Flush() { } protected virtual string[] GetSupportedAttributes() { throw null; } [System.Diagnostics.ConditionalAttribute("TRACE")] public void TraceData(System.Diagnostics.TraceEventType eventType, int id, object data) { } [System.Diagnostics.ConditionalAttribute("TRACE")] public void TraceData(System.Diagnostics.TraceEventType eventType, int id, params object[] data) { } [System.Diagnostics.ConditionalAttribute("TRACE")] public void TraceEvent(System.Diagnostics.TraceEventType eventType, int id) { } [System.Diagnostics.ConditionalAttribute("TRACE")] public void TraceEvent(System.Diagnostics.TraceEventType eventType, int id, string message) { } [System.Diagnostics.ConditionalAttribute("TRACE")] public void TraceEvent(System.Diagnostics.TraceEventType eventType, int id, string format, params object[] args) { } [System.Diagnostics.ConditionalAttribute("TRACE")] public void TraceInformation(string message) { } [System.Diagnostics.ConditionalAttribute("TRACE")] public void TraceInformation(string format, params object[] args) { } public void TraceTransfer(int id, string message, System.Guid relatedActivityId) { } } [System.Diagnostics.SwitchLevelAttribute(typeof(System.Diagnostics.TraceLevel))] public partial class TraceSwitch : System.Diagnostics.Switch { public TraceSwitch(string displayName, string description) : base (default(string), default(string)) { } public TraceSwitch(string displayName, string description, string defaultSwitchValue) : base (default(string), default(string)) { } public System.Diagnostics.TraceLevel Level { get { throw null; } set { } } public bool TraceError { get { throw null; } } public bool TraceInfo { get { throw null; } } public bool TraceVerbose { get { throw null; } } public bool TraceWarning { get { throw null; } } protected override void OnSwitchSettingChanged() { } protected override void OnValueChanged() { } } }
using System; using System.Collections.Generic; using System.Linq; using System.Reactive.Concurrency; using System.Diagnostics.Contracts; using System.Reactive; using System.Reactive.Linq; using System.Reactive.Subjects; using System.Reactive.Threading.Tasks; using ReactiveUIMicro; using System.Threading.Tasks; namespace ReactiveUIMicro.Xaml { /// <summary> /// ReactiveAsyncCommand represents commands that run an asynchronous /// operation in the background when invoked. The main benefit of this /// command is that it will keep track of in-flight operations and /// disable/enable CanExecute when there are too many of them (i.e. a /// "Search" button shouldn't have many concurrent requests running if the /// user clicks the button many times quickly) /// </summary> public class ReactiveAsyncCommand : IReactiveAsyncCommand, IDisposable, IEnableLogger { /// <summary> /// Constructs a new ReactiveAsyncCommand. /// </summary> /// <param name="canExecute">An Observable representing when the command /// can execute. If null, the Command can always execute.</param> /// <param name="maximumConcurrent">The maximum number of in-flight /// operations at a time - defaults to one.</param> /// <param name="scheduler">The scheduler to run the asynchronous /// operations on - defaults to the Taskpool scheduler.</param> /// <param name="initialCondition">Initial CanExecute state</param> public ReactiveAsyncCommand( IObservable<bool> canExecute = null, int maximumConcurrent = 1, IScheduler scheduler = null, bool initialCondition = true) { commonCtor(maximumConcurrent, scheduler, canExecute, initialCondition); } protected ReactiveAsyncCommand( Func<object, bool> canExecute, int maximumConcurrent = 1, IScheduler scheduler = null) { Contract.Requires(maximumConcurrent > 0); _canExecuteExplicitFunc = canExecute; commonCtor(maximumConcurrent, scheduler); } /// <summary> /// Create is a helper method to create a basic ReactiveAsyncCommand /// in a non-Rx way, closer to how BackgroundWorker works. /// </summary> /// <param name="calculationFunc">The function that will calculate /// results in the background</param> /// <param name="callbackFunc">The method to be called once the /// calculation function completes. This method is guaranteed to be /// called on the UI thread.</param> /// <param name="maximumConcurrent">The maximum number of in-flight /// operations at a time - defaults to one.</param> /// <param name="scheduler">The scheduler to run the asynchronous /// operations on - defaults to the Taskpool scheduler.</param> public static ReactiveAsyncCommand Create<TRet>( Func<object, TRet> calculationFunc, Action<TRet> callbackFunc, Func<object, bool> canExecute = null, int maximumConcurrent = 1, IScheduler scheduler = null) { var ret = new ReactiveAsyncCommand(canExecute, maximumConcurrent, scheduler); ret.RegisterAsyncFunction(calculationFunc).Subscribe(callbackFunc); return ret; } void commonCtor(int maximumConcurrent, IScheduler scheduler, IObservable<bool> canExecute = null, bool initialCondition = true) { _normalSched = scheduler ?? RxApp.DeferredScheduler; _canExecuteSubject = new ScheduledSubject<bool>(_normalSched); _executeSubject = new ScheduledSubject<object>(Scheduler.Immediate); _exSubject = new ScheduledSubject<Exception>(_normalSched, RxApp.DefaultExceptionHandler); AsyncStartedNotification = new ScheduledSubject<Unit>(RxApp.DeferredScheduler); AsyncCompletedNotification = new ScheduledSubject<Unit>(RxApp.DeferredScheduler); ItemsInflight = Observable.Merge( AsyncStartedNotification.Select(_ => 1), AsyncCompletedNotification.Select(_ => -1) ).Scan(0, (acc, x) => { var ret = acc + x; if (ret < 0) { this.Log().Fatal("Reference count dropped below zero"); } return ret; }).Multicast(new BehaviorSubject<int>(0)).PermaRef().ObserveOn(RxApp.DeferredScheduler); bool startCE = (_canExecuteExplicitFunc != null ? _canExecuteExplicitFunc(null) : initialCondition); CanExecuteObservable = Observable.CombineLatest( _canExecuteSubject.StartWith(startCE), ItemsInflight.Select(x => x < maximumConcurrent).StartWith(true), (canEx, slotsAvail) => canEx && slotsAvail) .DistinctUntilChanged(); CanExecuteObservable.Subscribe(x => { this.Log().Debug("Setting canExecuteLatest to {0}", x); _canExecuteLatest = x; raiseCanExecuteChanged(EventArgs.Empty); }); if (canExecute != null) { _inner = canExecute.Subscribe(_canExecuteSubject.OnNext, _exSubject.OnNext); } MaximumConcurrent = maximumConcurrent; ThrownExceptions = _exSubject; } IScheduler _normalSched; Func<object, bool> _canExecuteExplicitFunc = null; ISubject<bool> _canExecuteSubject; bool _canExecuteLatest; ISubject<object> _executeSubject; IDisposable _inner = null; ScheduledSubject<Exception> _exSubject; public int MaximumConcurrent { get; protected set; } public IObservable<int> ItemsInflight { get; protected set; } public ISubject<Unit> AsyncStartedNotification { get; protected set; } public ISubject<Unit> AsyncCompletedNotification { get; protected set; } public IObservable<bool> CanExecuteObservable { get; protected set; } public IObservable<Exception> ThrownExceptions { get; protected set; } public event EventHandler CanExecuteChanged; public bool CanExecute(object parameter) { if (_canExecuteExplicitFunc != null) { _canExecuteSubject.OnNext(_canExecuteExplicitFunc(parameter)); } this.Log().Debug("CanExecute: returning {0}", _canExecuteLatest); return _canExecuteLatest; } public void Execute(object parameter) { if (!CanExecute(parameter)) { this.Log().Error("Attempted to call Execute when CanExecute is False!"); return; } _executeSubject.OnNext(parameter); } public IDisposable Subscribe(IObserver<object> observer) { return _executeSubject.Subscribe( Observer.Create<object>( x => marshalFailures(observer.OnNext, x), ex => marshalFailures(observer.OnError, ex), () => marshalFailures(observer.OnCompleted))); } public void Dispose() { if (_inner != null) { _inner.Dispose(); } } public IObservable<TResult> RegisterAsyncObservable<TResult>(Func<object, IObservable<TResult>> calculationFunc) { Contract.Requires(calculationFunc != null); var ret = _executeSubject .Select(x => { AsyncStartedNotification.OnNext(Unit.Default); return calculationFunc(x) .Catch<TResult, Exception>(ex => { _exSubject.OnNext(ex); return Observable.Empty<TResult>(); }) .Finally(() => AsyncCompletedNotification.OnNext(Unit.Default)); }); return ret.Merge().Multicast(new ScheduledSubject<TResult>(RxApp.DeferredScheduler)).PermaRef(); } void marshalFailures<T>(Action<T> block, T param) { try { block(param); } catch (Exception ex) { _exSubject.OnNext(ex); } } void marshalFailures(Action block) { marshalFailures(_ => block(), Unit.Default); } protected virtual void raiseCanExecuteChanged(EventArgs e) { EventHandler handler = this.CanExecuteChanged; if (handler != null) { handler(this, e); } } } public static class ReactiveAsyncCommandMixins { /// <summary> /// This method returns the current number of items in flight. /// </summary> public static int CurrentItemsInFlight(this IReactiveAsyncCommand This) { return This.ItemsInflight.First(); } /// <summary> /// RegisterAsyncFunction registers an asynchronous method that returns a result /// to be called whenever the Command's Execute method is called. /// </summary> /// <param name="calculationFunc">The function to be run in the /// background.</param> /// <param name="scheduler"></param> /// <returns>An Observable that will fire on the UI thread once per /// invoecation of Execute, once the async method completes. Subscribe to /// this to retrieve the result of the calculationFunc.</returns> public static IObservable<TResult> RegisterAsyncFunction<TResult>(this IReactiveAsyncCommand This, Func<object, TResult> calculationFunc, IScheduler scheduler = null) { Contract.Requires(calculationFunc != null); var asyncFunc = calculationFunc.ToAsync(scheduler ?? RxApp.TaskpoolScheduler); return This.RegisterAsyncObservable(asyncFunc); } /// <summary> /// RegisterAsyncAction registers an asynchronous method that runs /// whenever the Command's Execute method is called and doesn't return a /// result. /// </summary> /// <param name="calculationFunc">The function to be run in the /// background.</param> public static IObservable<Unit> RegisterAsyncAction(this IReactiveAsyncCommand This, Action<object> calculationFunc, IScheduler scheduler = null) { Contract.Requires(calculationFunc != null); return This.RegisterAsyncFunction(x => { calculationFunc(x); return new Unit(); }, scheduler); } /// <summary> /// RegisterAsyncTask registers an TPL/Async method that runs when a /// Command gets executed and returns the result /// </summary> /// <returns>An Observable that will fire on the UI thread once per /// invoecation of Execute, once the async method completes. Subscribe to /// this to retrieve the result of the calculationFunc.</returns> public static IObservable<TResult> RegisterAsyncTask<TResult>(this IReactiveAsyncCommand This, Func<object, Task<TResult>> calculationFunc) { Contract.Requires(calculationFunc != null); return This.RegisterAsyncObservable(x => calculationFunc(x).ToObservable()); } /// <summary> /// RegisterAsyncTask registers an TPL/Async method that runs when a /// Command gets executed and returns no result. /// </summary> /// <param name="calculationFunc">The function to be run in the /// background.</param> /// <returns>An Observable that signals when the Task completes, on /// the UI thread.</returns> public static IObservable<Unit> RegisterAsyncTask<TResult>(this IReactiveAsyncCommand This, Func<object, Task> calculationFunc) { Contract.Requires(calculationFunc != null); return This.RegisterAsyncObservable(x => calculationFunc(x).ToObservable()); } } } // vim: tw=120 ts=4 sw=4 et :
//------------------------------------------------------------------------------ // <copyright file="Transfer.cs" company="Microsoft"> // Copyright (c) Microsoft Corporation // </copyright> //------------------------------------------------------------------------------ namespace Microsoft.Azure.Storage.DataMovement { using System; using System.Globalization; using System.IO; using System.Runtime.Serialization; using System.Threading; using System.Threading.Tasks; using Microsoft.Azure.Storage.Blob; /// <summary> /// Base class for transfer operation. /// </summary> #if !BINARY_SERIALIZATION [DataContract] [KnownType(typeof(AzureBlobDirectoryLocation))] [KnownType(typeof(AzureBlobLocation))] [KnownType(typeof(AzureFileDirectoryLocation))] [KnownType(typeof(AzureFileLocation))] [KnownType(typeof(DirectoryLocation))] [KnownType(typeof(FileLocation))] // StreamLocation intentionally omitted because it is not serializable [KnownType(typeof(UriLocation))] [KnownType(typeof(SingleObjectTransfer))] [KnownType(typeof(DirectoryTransfer))] [KnownType(typeof(SubDirectoryTransfer))] #endif internal abstract class Transfer : JournalItem, IDisposable #if BINARY_SERIALIZATION , ISerializable #endif // BINARY_SERIALIZATION { private const string FormatVersionName = "Version"; private const string SourceName = "Source"; private const string DestName = "Dest"; private const string TransferMethodName = "TransferMethod"; private const string TransferProgressName = "Progress"; // Currently, we have two ways to persist the transfer instance: // 1. User can persist a TransferCheckpoint instance with all transfer instances in it. // 2. User can input a stream to TransferCheckpoint that DMLib will persistant ongoing transfer instances to the stream. // 2# solution is used to transfer large amount of files without occupying too much memory. // With this solution, // a. when persisting a DirectoryTransfer, we don't save its subtransfers with it, instead we'll allocate a new // transfer chunk for each subtransfer. // b. We don't persist its TransferProgressTracker with Transfer instance, instead we save the TransferProgressTracker to a separate place. // Please reference to explaination in StreamJournal for details. #if !BINARY_SERIALIZATION [DataMember] private TransferProgressTracker progressTracker; #endif /// <summary> /// Initializes a new instance of the <see cref="Transfer"/> class. /// </summary> /// <param name="source">Transfer source.</param> /// <param name="dest">Transfer destination.</param> /// <param name="transferMethod">Transfer method, see <see cref="TransferMethod"/> for detail available methods.</param> public Transfer(TransferLocation source, TransferLocation dest, TransferMethod transferMethod) { this.Source = source; this.Destination = dest; this.TransferMethod = transferMethod; this.ProgressTracker = new TransferProgressTracker(); this.OriginalFormatVersion = Constants.FormatVersion; } #if BINARY_SERIALIZATION /// <summary> /// Initializes a new instance of the <see cref="Transfer"/> class. /// </summary> /// <param name="info">Serialization information.</param> /// <param name="context">Streaming context.</param> protected Transfer(SerializationInfo info, StreamingContext context) { if (info == null) { throw new System.ArgumentNullException("info"); } string version = info.GetString(FormatVersionName); if (!string.Equals(Constants.FormatVersion, version, StringComparison.Ordinal)) { throw new System.InvalidOperationException( string.Format( CultureInfo.CurrentCulture, Resources.DeserializationVersionNotMatchException, "TransferJob", version, Constants.FormatVersion)); } var serializableSourceLocation = (SerializableTransferLocation)info.GetValue(SourceName, typeof(SerializableTransferLocation)); var serializableDestLocation = (SerializableTransferLocation)info.GetValue(DestName, typeof(SerializableTransferLocation)); this.Source = serializableSourceLocation.Location; this.Destination = serializableDestLocation.Location; this.TransferMethod = (TransferMethod)info.GetValue(TransferMethodName, typeof(TransferMethod)); if (null == context.Context || !(context.Context is StreamJournal)) { this.ProgressTracker = (TransferProgressTracker)info.GetValue(TransferProgressName, typeof(TransferProgressTracker)); } else { this.ProgressTracker = new TransferProgressTracker(); } } #endif // BINARY_SERIALIZATION #if !BINARY_SERIALIZATION [OnSerializing] private void OnSerializingCallback(StreamingContext context) { if (!IsStreamJournal) { this.progressTracker = this.ProgressTracker; } } [OnDeserialized] private void OnDeserializedCallback(StreamingContext context) { if (!string.Equals(Constants.FormatVersion, OriginalFormatVersion, StringComparison.Ordinal)) { throw new System.InvalidOperationException( string.Format( CultureInfo.CurrentCulture, Resources.DeserializationVersionNotMatchException, "TransferJob", OriginalFormatVersion, Constants.FormatVersion)); } if (!IsStreamJournal) { this.ProgressTracker = this.progressTracker; } else { this.ProgressTracker = new TransferProgressTracker(); } if (this.Source != null) { this.Source.IsInstanceInfoFetched = null; } if (this.Destination != null) { this.Destination.IsInstanceInfoFetched = null; } } #endif /// <summary> /// Initializes a new instance of the <see cref="Transfer"/> class. /// </summary> protected Transfer(Transfer other) { this.Source = other.Source; this.Destination = other.Destination; this.TransferMethod = other.TransferMethod; this.OriginalFormatVersion = other.OriginalFormatVersion; this.PreserveSMBAttributes = other.PreserveSMBAttributes; } /// Used to ensure that deserialized transfers are only used /// in scenarios with the same format version they were serialized with. #if !BINARY_SERIALIZATION [DataMember] #endif private string OriginalFormatVersion { get; set; } /// <summary> /// Gets source location for this transfer. /// </summary> #if !BINARY_SERIALIZATION [DataMember] #endif public TransferLocation Source { get; private set; } /// <summary> /// Gets destination location for this transfer. /// </summary> #if !BINARY_SERIALIZATION [DataMember] #endif public TransferLocation Destination { get; private set; } /// <summary> /// Gets the transfer method used in this transfer. /// </summary> #if !BINARY_SERIALIZATION [DataMember] #endif public TransferMethod TransferMethod { get; private set; } #if !BINARY_SERIALIZATION /// <summary> /// Gets or sets a variable to indicate whether the transfer will be saved to a streamed journal. /// </summary> [DataMember] public bool IsStreamJournal { get; set; } #endif /// <summary> /// Gets or sets the transfer context of this transfer. /// </summary> public virtual TransferContext Context { get; set; } /// <summary> /// Gets or sets blob type of destination blob. /// </summary> public BlobType BlobType { get; set; } /// <summary> /// Gets or sets a flag that indicates whether to preserve SMB attributes /// during transferring between local file to Azure File Service. /// </summary> public bool PreserveSMBAttributes { get; set; } /// <summary> /// Gets or sets a value that indicates whether to preserve SMB permissions during transferring. /// Preserving SMB permissions is only supported on Windows. /// </summary> public PreserveSMBPermissions PreserveSMBPermissions { get; set; } /// <summary> /// Gets the progress tracker for this transfer. /// </summary> public TransferProgressTracker ProgressTracker { get; protected set; } #if BINARY_SERIALIZATION /// <summary> /// Serializes the object. /// </summary> /// <param name="info">Serialization info object.</param> /// <param name="context">Streaming context.</param> public virtual void GetObjectData(SerializationInfo info, StreamingContext context) { if (info == null) { throw new ArgumentNullException("info"); } info.AddValue(FormatVersionName, Constants.FormatVersion, typeof(string)); SerializableTransferLocation serializableSourceLocation = new SerializableTransferLocation(this.Source); SerializableTransferLocation serializableDestLocation = new SerializableTransferLocation(this.Destination); info.AddValue(SourceName, serializableSourceLocation, typeof(SerializableTransferLocation)); info.AddValue(DestName, serializableDestLocation, typeof(SerializableTransferLocation)); info.AddValue(TransferMethodName, this.TransferMethod); if (null == context.Context || !(context.Context is StreamJournal)) { info.AddValue(TransferProgressName, this.ProgressTracker); } } #endif // BINARY_SERIALIZATION /// <summary> /// Execute the transfer asynchronously. /// </summary> /// <param name="scheduler">Transfer scheduler</param> /// <param name="cancellationToken">Token that can be used to cancel the transfer.</param> /// <returns>A task representing the transfer operation.</returns> public abstract Task ExecuteAsync(TransferScheduler scheduler, CancellationToken cancellationToken); public void UpdateTransferJobStatus(TransferJob transferJob, TransferJobStatus targetStatus) { lock (this.ProgressTracker) { switch (targetStatus) { case TransferJobStatus.Transfer: case TransferJobStatus.Monitor: if (transferJob.Status == TransferJobStatus.Failed) { UpdateProgress(transferJob, () => this.ProgressTracker.AddNumberOfFilesFailed(-1)); } break; case TransferJobStatus.Skipped: UpdateProgress(transferJob, () => this.ProgressTracker.AddNumberOfFilesSkipped(1)); break; case TransferJobStatus.Finished: UpdateProgress(transferJob, () => this.ProgressTracker.AddNumberOfFilesTransferred(1)); break; case TransferJobStatus.Failed: UpdateProgress(transferJob, () => this.ProgressTracker.AddNumberOfFilesFailed(1)); break; case TransferJobStatus.NotStarted: default: break; } transferJob.Status = targetStatus; } transferJob.Transfer.UpdateJournal(); } public abstract Transfer Copy(); public void UpdateJournal() { this.Journal?.UpdateJournalItem(this); } private static void UpdateProgress(TransferJob job, Action updateAction) { try { job.ProgressUpdateLock?.EnterReadLock(); updateAction(); } finally { job.ProgressUpdateLock?.ExitReadLock(); } } /// <summary> /// Public dispose method to release all resources owned. /// </summary> public void Dispose() { this.Dispose(true); GC.SuppressFinalize(this); } protected virtual void Dispose(bool disposing) { // Nothing to dispose } } }
#region WatiN Copyright (C) 2006-2007 Jeroen van Menen //Copyright 2006-2007 Jeroen van Menen // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #endregion Copyright using System; using System.Collections; using System.Threading; using System.Runtime.InteropServices; using mshtml; using SHDocVw; using ItiN.Interfaces; using ItiN.Exceptions; using ItiN.UtilityClasses; namespace ItiN { /// <summary> /// This class hosts functionality for classes which are an entry point /// to a document and its elements and/or frames. Currently implemented /// by IE and HTMLDialog. /// </summary> public abstract class DomContainer : Document { private IHTMLDocument2 htmlDocument; private SimpleTimer waitForCompleteTimeout; private bool disposed = false; public DomContainer() { DomContainer = this; } public abstract IntPtr hWnd { get; } /// <summary> /// This method must be overriden by all sub classes /// </summary> internal abstract IHTMLDocument2 OnGetHtmlDocument(); /// <summary> /// Returns the 'raw' html document for the internet explorer DOM. /// </summary> public override IHTMLDocument2 HtmlDocument { get { if (htmlDocument == null) { htmlDocument = OnGetHtmlDocument(); } return htmlDocument; } } /// <summary> /// Fires the given event on the given element. /// </summary> /// <param name="element">Element to fire the event on</param> /// <param name="eventName">Name of the event to fire</param> public virtual void FireEvent(DispHTMLBaseElement element, string eventName) { // TODO: Passing the eventarguments in a new param of type array. This array // holds 0 or more name/value pairs where the name is a property of the event object // and the value is the value that's assigned to the property. // Execute the JScript to fire the event inside the Browser. string scriptCode = "var newEvt = document.createEventObject();"; scriptCode += "newEvt.button = 1;"; scriptCode += "document.getElementById('" + element.uniqueID + "').fireEvent('" + eventName + "', newEvt);"; try { RunScript(scriptCode); } catch (RunScriptException) { // In a cross domain automation scenario a System.UnauthorizedAccessException // is thrown. The following code works, but setting the event properties // has no effect so that is left out. object dummyEvt = null; // IHTMLEventObj2 mouseDownEvent = (IHTMLEventObj2)parentEvt; // mouseDownEvent.button = 1; object parentEvt = ((IHTMLDocument4)element.document).CreateEventObject(ref dummyEvt); element.FireEvent(eventName, ref parentEvt); } } /// <summary> /// Runs the javascript code in IE. /// </summary> /// <param name="scriptCode">The javascript code.</param> public void RunScript(string scriptCode) { RunScript(scriptCode, "javascript"); } /// <summary> /// Runs the script code in IE. /// </summary> /// <param name="scriptCode">The script code.</param> /// <param name="language">The language.</param> public void RunScript(string scriptCode, string language) { try { IHTMLWindow2 window = htmlDocument.parentWindow; window.execScript(scriptCode, language); } catch (Exception ex) { throw new ItiN.Exceptions.RunScriptException(ex); } } /// <summary> /// This method must be called by its inheritor to dispose references /// to internal resources. /// </summary> protected override void Dispose(bool disposing) { if (!disposed) { htmlDocument = null; disposed = true; base.Dispose(true); } } /// <summary> /// This method calls InitTimeOut and waits till IE is ready /// processing or the timeout period (30 seconds) has expired. /// To change the default time out, set <see cref="P:WatiN.Core.IE.Settings.WaitForCompleteTimeOut"/> /// </summary> public virtual void WaitForComplete() { InitTimeout(); WaitForCompleteOrTimeout(); } /// <summary> /// This method waits till IE is ready processing /// or the timeout period has expired. You should /// call InitTimeout prior to calling this method. /// </summary> protected internal void WaitForCompleteOrTimeout() { WaitWhileMainDocumentNotAvailable(this); WaitWhileDocumentStateNotComplete(HtmlDocument); //WaitForFramesToComplete(HtmlDocument); } //private void WaitForFramesToComplete(IHTMLDocument2 maindocument) //{ // HTMLDocument mainHtmlDocument = (HTMLDocument)maindocument; // int framesCount = WatiN.Core.Frame.GetFrameCountFromHTMLDocument(mainHtmlDocument); // for (int i = 0; i != framesCount; ++i) // { // IWebBrowser2 frame = WatiN.Core.Frame.GetFrameFromHTMLDocument(i, mainHtmlDocument); // if (frame != null) // { // IHTMLDocument2 document; // try // { // waitWhileIEBusy(frame); // waitWhileIEStateNotComplete(frame); // document = WaitWhileFrameDocumentNotAvailable(frame); // } // finally // { // // free frame // Marshal.ReleaseComObject(frame); // } // WaitWhileDocumentStateNotComplete(document); // WaitForFramesToComplete(document); // } // } //} /// <summary> /// This method is called to initialise the start time for /// determining a time out. It's set to the current time. /// </summary> /// <returns></returns> protected internal SimpleTimer InitTimeout() { waitForCompleteTimeout = new SimpleTimer(InfopathTester.Settings.WaitForCompleteTimeOut); return waitForCompleteTimeout; } /// <summary> /// This method checks the return value of IsTimedOut. When true, it will /// throw a TimeoutException with the timeoutMessage param as message. /// </summary> /// <param name="timeoutMessage">The message to present when the TimeoutException is thrown</param> protected internal void ThrowExceptionWhenTimeout(string timeoutMessage) { if (IsTimedOut()) { throw new ItiN.Exceptions.TimeoutException(timeoutMessage); } } /// <summary> /// This method evaluates the time between the last call to InitTimeOut /// and the current time. If the timespan is more than 30 seconds, the /// return value will be true. /// </summary> /// <returns>If the timespan is more than 30 seconds, the /// return value will be true</returns> protected internal bool IsTimedOut() { return waitForCompleteTimeout.Elapsed; } private void WaitWhileDocumentStateNotComplete(IHTMLDocument2 htmlDocument) { HTMLDocument document = (HTMLDocument)htmlDocument; while (document.readyState != "complete") { ThrowExceptionWhenTimeout("waiting for document state complete. Last state was '" + document.readyState + "'"); Thread.Sleep(100); } } private void WaitWhileMainDocumentNotAvailable(DomContainer domContainer) { IHTMLDocument2 document = null; while (document == null) { try { document = domContainer.HtmlDocument; } catch { } document = IsDocumentAvailable(document, "maindocument"); } } private IHTMLDocument2 WaitWhileFrameDocumentNotAvailable(IWebBrowser2 frame) { IHTMLDocument2 document = null; while (document == null) { try { document = frame.Document as IHTMLDocument2; } catch { } document = IsDocumentAvailable(document, "frame"); } return document; } private static bool IsDocumentReadyStateAvailable(IHTMLDocument2 document) { if (document != null) { // Sometimes an OutOfMemoryException or ComException occurs while accessing // the readystate property of IHTMLDocument2. Giving MSHTML some time // to do further processing seems to solve this problem. try { string readyState = ((HTMLDocument)document).readyState; return true; } catch { } } return false; } private IHTMLDocument2 IsDocumentAvailable(IHTMLDocument2 document, string documentType) { if (document == null) { ThrowExceptionWhenTimeout(String.Format("waiting for {0} document becoming available", documentType)); Thread.Sleep(100); } else if (!IsDocumentReadyStateAvailable(document)) { document = null; Thread.Sleep(500); } return document; } /// <summary> /// Gets the process ID the Internet Explorer or HTMLDialog is running in. /// </summary> /// <value>The process ID.</value> public int ProcessID { get { int iePid; NativeMethods.GetWindowThreadProcessId(hWnd, out iePid); return iePid; } } protected void waitWhileIEStateNotComplete(IWebBrowser2 ie) { while (IsIEReadyStateComplete(ie)) { ThrowExceptionWhenTimeout("Internet Explorer state not complete"); Thread.Sleep(100); } } private static bool IsIEReadyStateComplete(IWebBrowser2 ie) { try { return ie.ReadyState != tagREADYSTATE.READYSTATE_COMPLETE; } catch { return false; } } protected void waitWhileIEBusy(IWebBrowser2 ie) { while (IsIEBusy(ie)) { ThrowExceptionWhenTimeout("Internet Explorer busy"); Thread.Sleep(100); } } private static bool IsIEBusy(IWebBrowser2 ie) { try { return ie.Busy; } catch { return false; } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections.Generic; using System.Linq.Expressions; using Xunit; namespace System.Linq.Tests { public class GroupJoinTests : EnumerableBasedTests { public struct CustomerRec { public string name; public int? custID; } public struct OrderRec { public int? orderID; public int? custID; public int? total; } public struct AnagramRec { public string name; public int? orderID; public int? total; } public struct JoinRec : IEquatable<JoinRec> { public string name; public int?[] orderID; public int?[] total; public override int GetHashCode() { // Not great, but it'll serve. return name.GetHashCode() ^ orderID.Length ^ (total.Length * 31); } public bool Equals(JoinRec other) { if (!string.Equals(name, other.name)) return false; if (orderID == null) { if (other.orderID != null) return false; } else { if (other.orderID == null) return false; if (orderID.Length != other.orderID.Length) return false; for (int i = 0; i != other.orderID.Length; ++i) if (orderID[i] != other.orderID[i]) return false; } if (total == null) { if (other.total != null) return false; } else { if (other.total == null) return false; if (total.Length != other.total.Length) return false; for (int i = 0; i != other.total.Length; ++i) if (total[i] != other.total[i]) return false; } return true; } public override bool Equals(object obj) { return obj is JoinRec && Equals((JoinRec)obj); } } [Fact] public void OuterEmptyInnerNonEmpty() { CustomerRec[] outer = { }; OrderRec[] inner = new [] { new OrderRec{ orderID = 45321, custID = 98022, total = 50 }, new OrderRec{ orderID = 97865, custID = 32103, total = 25 } }; Assert.Empty(outer.AsQueryable().GroupJoin(inner.AsQueryable(), e => e.custID, e => e.custID, (cr, orIE) => new JoinRec { name = cr.name, orderID = orIE.Select(o => o.orderID).ToArray(), total = orIE.Select(o => o.total).ToArray() })); } [Fact] public void CustomComparer() { CustomerRec[] outer = new [] { new CustomerRec{ name = "Tim", custID = 1234 }, new CustomerRec{ name = "Bob", custID = 9865 }, new CustomerRec{ name = "Robert", custID = 9895 } }; AnagramRec[] inner = new [] { new AnagramRec{ name = "Robert", orderID = 93483, total = 19 }, new AnagramRec{ name = "miT", orderID = 93489, total = 45 } }; JoinRec[] expected = new [] { new JoinRec{ name = "Tim", orderID = new int?[]{ 93489 }, total = new int?[]{ 45 } }, new JoinRec{ name = "Bob", orderID = new int?[]{ }, total = new int?[]{ } }, new JoinRec{ name = "Robert", orderID = new int?[]{ 93483 }, total = new int?[]{ 19 } } }; Assert.Equal(expected, outer.AsQueryable().GroupJoin(inner.AsQueryable(), e => e.name, e => e.name, (cr, arIE) => new JoinRec { name = cr.name, orderID = arIE.Select(o => o.orderID).ToArray(), total = arIE.Select(o => o.total).ToArray() }, new AnagramEqualityComparer())); } [Fact] public void OuterNull() { IQueryable<CustomerRec> outer = null; AnagramRec[] inner = new AnagramRec[] { new AnagramRec{ name = "Robert", orderID = 93483, total = 19 }, new AnagramRec{ name = "miT", orderID = 93489, total = 45 } }; Assert.Throws<ArgumentNullException>("outer", () => outer.GroupJoin(inner.AsQueryable(), e => e.name, e => e.name, (cr, arIE) => new JoinRec { name = cr.name, orderID = arIE.Select(o => o.orderID).ToArray(), total = arIE.Select(o => o.total).ToArray() }, new AnagramEqualityComparer())); } [Fact] public void InnerNull() { CustomerRec[] outer = new [] { new CustomerRec{ name = "Tim", custID = 1234 }, new CustomerRec{ name = "Bob", custID = 9865 }, new CustomerRec{ name = "Robert", custID = 9895 } }; IQueryable<AnagramRec> inner = null; Assert.Throws<ArgumentNullException>("inner", () => outer.AsQueryable().GroupJoin(inner, e => e.name, e => e.name, (cr, arIE) => new JoinRec { name = cr.name, orderID = arIE.Select(o => o.orderID).ToArray(), total = arIE.Select(o => o.total).ToArray() }, new AnagramEqualityComparer())); } [Fact] public void OuterKeySelectorNull() { CustomerRec[] outer = new CustomerRec[] { new CustomerRec{ name = "Tim", custID = 1234 }, new CustomerRec{ name = "Bob", custID = 9865 }, new CustomerRec{ name = "Robert", custID = 9895 } }; AnagramRec[] inner = new AnagramRec[] { new AnagramRec{ name = "Robert", orderID = 93483, total = 19 }, new AnagramRec{ name = "miT", orderID = 93489, total = 45 } }; Assert.Throws<ArgumentNullException>("outerKeySelector", () => outer.AsQueryable().GroupJoin(inner.AsQueryable(), null, e => e.name, (cr, arIE) => new JoinRec { name = cr.name, orderID = arIE.Select(o => o.orderID).ToArray(), total = arIE.Select(o => o.total).ToArray() }, new AnagramEqualityComparer())); } [Fact] public void InnerKeySelectorNull() { CustomerRec[] outer = new CustomerRec[] { new CustomerRec{ name = "Tim", custID = 1234 }, new CustomerRec{ name = "Bob", custID = 9865 }, new CustomerRec{ name = "Robert", custID = 9895 } }; AnagramRec[] inner = new AnagramRec[] { new AnagramRec{ name = "Robert", orderID = 93483, total = 19 }, new AnagramRec{ name = "miT", orderID = 93489, total = 45 } }; Assert.Throws<ArgumentNullException>("innerKeySelector", () => outer.AsQueryable().GroupJoin(inner.AsQueryable(), e => e.name, null, (cr, arIE) => new JoinRec { name = cr.name, orderID = arIE.Select(o => o.orderID).ToArray(), total = arIE.Select(o => o.total).ToArray() }, new AnagramEqualityComparer())); } [Fact] public void ResultSelectorNull() { CustomerRec[] outer = new CustomerRec[] { new CustomerRec{ name = "Tim", custID = 1234 }, new CustomerRec{ name = "Bob", custID = 9865 }, new CustomerRec{ name = "Robert", custID = 9895 } }; AnagramRec[] inner = new AnagramRec[] { new AnagramRec{ name = "Robert", orderID = 93483, total = 19 }, new AnagramRec{ name = "miT", orderID = 93489, total = 45 } }; Assert.Throws<ArgumentNullException>("resultSelector", () => outer.AsQueryable().GroupJoin(inner.AsQueryable(), e => e.name, e => e.name, (Expression<Func<CustomerRec, IEnumerable<AnagramRec>, JoinRec>>)null, new AnagramEqualityComparer())); } [Fact] public void OuterNullNoComparer() { IQueryable<CustomerRec> outer = null; AnagramRec[] inner = new AnagramRec[] { new AnagramRec{ name = "Robert", orderID = 93483, total = 19 }, new AnagramRec{ name = "miT", orderID = 93489, total = 45 } }; Assert.Throws<ArgumentNullException>("outer", () => outer.GroupJoin(inner.AsQueryable(), e => e.name, e => e.name, (cr, arIE) => new JoinRec { name = cr.name, orderID = arIE.Select(o => o.orderID).ToArray(), total = arIE.Select(o => o.total).ToArray() })); } [Fact] public void InnerNullNoComparer() { CustomerRec[] outer = new[] { new CustomerRec{ name = "Tim", custID = 1234 }, new CustomerRec{ name = "Bob", custID = 9865 }, new CustomerRec{ name = "Robert", custID = 9895 } }; IQueryable<AnagramRec> inner = null; Assert.Throws<ArgumentNullException>("inner", () => outer.AsQueryable().GroupJoin(inner, e => e.name, e => e.name, (cr, arIE) => new JoinRec { name = cr.name, orderID = arIE.Select(o => o.orderID).ToArray(), total = arIE.Select(o => o.total).ToArray() })); } [Fact] public void OuterKeySelectorNullNoComparer() { CustomerRec[] outer = new CustomerRec[] { new CustomerRec{ name = "Tim", custID = 1234 }, new CustomerRec{ name = "Bob", custID = 9865 }, new CustomerRec{ name = "Robert", custID = 9895 } }; AnagramRec[] inner = new AnagramRec[] { new AnagramRec{ name = "Robert", orderID = 93483, total = 19 }, new AnagramRec{ name = "miT", orderID = 93489, total = 45 } }; Assert.Throws<ArgumentNullException>("outerKeySelector", () => outer.AsQueryable().GroupJoin(inner.AsQueryable(), null, e => e.name, (cr, arIE) => new JoinRec { name = cr.name, orderID = arIE.Select(o => o.orderID).ToArray(), total = arIE.Select(o => o.total).ToArray() })); } [Fact] public void InnerKeySelectorNullNoComparer() { CustomerRec[] outer = new CustomerRec[] { new CustomerRec{ name = "Tim", custID = 1234 }, new CustomerRec{ name = "Bob", custID = 9865 }, new CustomerRec{ name = "Robert", custID = 9895 } }; AnagramRec[] inner = new AnagramRec[] { new AnagramRec{ name = "Robert", orderID = 93483, total = 19 }, new AnagramRec{ name = "miT", orderID = 93489, total = 45 } }; Assert.Throws<ArgumentNullException>("innerKeySelector", () => outer.AsQueryable().GroupJoin(inner.AsQueryable(), e => e.name, null, (cr, arIE) => new JoinRec { name = cr.name, orderID = arIE.Select(o => o.orderID).ToArray(), total = arIE.Select(o => o.total).ToArray() })); } [Fact] public void ResultSelectorNullNoComparer() { CustomerRec[] outer = new CustomerRec[] { new CustomerRec{ name = "Tim", custID = 1234 }, new CustomerRec{ name = "Bob", custID = 9865 }, new CustomerRec{ name = "Robert", custID = 9895 } }; AnagramRec[] inner = new AnagramRec[] { new AnagramRec{ name = "Robert", orderID = 93483, total = 19 }, new AnagramRec{ name = "miT", orderID = 93489, total = 45 } }; Assert.Throws<ArgumentNullException>("resultSelector", () => outer.AsQueryable().GroupJoin(inner.AsQueryable(), e => e.name, e => e.name, (Expression<Func<CustomerRec, IEnumerable<AnagramRec>, JoinRec>>)null)); } [Fact] public void NullComparer() { CustomerRec[] outer = new [] { new CustomerRec{ name = "Tim", custID = 1234 }, new CustomerRec{ name = "Bob", custID = 9865 }, new CustomerRec{ name = "Robert", custID = 9895 } }; AnagramRec[] inner = new [] { new AnagramRec{ name = "Robert", orderID = 93483, total = 19 }, new AnagramRec{ name = "miT", orderID = 93489, total = 45 } }; JoinRec[] expected = new [] { new JoinRec{ name = "Tim", orderID = new int?[]{ }, total = new int?[]{ } }, new JoinRec{ name = "Bob", orderID = new int?[]{ }, total = new int?[]{ } }, new JoinRec{ name = "Robert", orderID = new int?[]{ 93483 }, total = new int?[]{ 19 } } }; Assert.Equal(expected, outer.AsQueryable().GroupJoin(inner.AsQueryable(), e => e.name, e => e.name, (cr, arIE) => new JoinRec { name = cr.name, orderID = arIE.Select(o => o.orderID).ToArray(), total = arIE.Select(o => o.total).ToArray() }, null)); } [Fact] public void GroupJoin1() { var count = (new int[] { 0, 1, 2 }).AsQueryable().GroupJoin(new int[] { 1, 2, 3 }, n1 => n1, n2 => n2, (n1, n2) => n1).Count(); Assert.Equal(3, count); } [Fact] public void GroupJoin2() { var count = (new int[] { 0, 1, 2 }).AsQueryable().GroupJoin(new int[] { 1, 2, 3 }, n1 => n1, n2 => n2, (n1, n2) => n1, EqualityComparer<int>.Default).Count(); Assert.Equal(3, count); } } }
// Copyright (c) The Avalonia Project. All rights reserved. // Licensed under the MIT license. See licence.md file in the project root for full license information. using System; using System.Collections.Specialized; using System.Linq; using System.Reactive.Linq; using Avalonia.Animation; using Avalonia.Collections; using Avalonia.Data; using Avalonia.Logging; using Avalonia.Media; using Avalonia.Rendering; using Avalonia.VisualTree; namespace Avalonia { /// <summary> /// Base class for controls that provides rendering and related visual properties. /// </summary> /// <remarks> /// The <see cref="Visual"/> class represents elements that have a visual on-screen /// representation and stores all the information needed for an /// <see cref="IRenderer"/> to render the control. To traverse the visual tree, use the /// extension methods defined in <see cref="VisualExtensions"/>. /// </remarks> public class Visual : Animatable, IVisual { /// <summary> /// Defines the <see cref="Bounds"/> property. /// </summary> public static readonly DirectProperty<Visual, Rect> BoundsProperty = AvaloniaProperty.RegisterDirect<Visual, Rect>(nameof(Bounds), o => o.Bounds); public static readonly DirectProperty<Visual, TransformedBounds?> TransformedBoundsProperty = AvaloniaProperty.RegisterDirect<Visual, TransformedBounds?>( nameof(TransformedBounds), o => o.TransformedBounds); /// <summary> /// Defines the <see cref="ClipToBounds"/> property. /// </summary> public static readonly StyledProperty<bool> ClipToBoundsProperty = AvaloniaProperty.Register<Visual, bool>(nameof(ClipToBounds)); /// <summary> /// Defines the <see cref="Clip"/> property. /// </summary> public static readonly StyledProperty<Geometry> ClipProperty = AvaloniaProperty.Register<Visual, Geometry>(nameof(Clip)); /// <summary> /// Defines the <see cref="IsVisibleProperty"/> property. /// </summary> public static readonly StyledProperty<bool> IsVisibleProperty = AvaloniaProperty.Register<Visual, bool>(nameof(IsVisible), true); /// <summary> /// Defines the <see cref="Opacity"/> property. /// </summary> public static readonly StyledProperty<double> OpacityProperty = AvaloniaProperty.Register<Visual, double>(nameof(Opacity), 1); /// <summary> /// Defines the <see cref="OpacityMask"/> property. /// </summary> public static readonly StyledProperty<IBrush> OpacityMaskProperty = AvaloniaProperty.Register<Visual, IBrush>(nameof(OpacityMask)); /// <summary> /// Defines the <see cref="RenderTransform"/> property. /// </summary> public static readonly StyledProperty<Transform> RenderTransformProperty = AvaloniaProperty.Register<Visual, Transform>(nameof(RenderTransform)); /// <summary> /// Defines the <see cref="RenderTransformOrigin"/> property. /// </summary> public static readonly StyledProperty<RelativePoint> RenderTransformOriginProperty = AvaloniaProperty.Register<Visual, RelativePoint>(nameof(RenderTransformOrigin), defaultValue: RelativePoint.Center); /// <summary> /// Defines the <see cref="IVisual.VisualParent"/> property. /// </summary> public static readonly DirectProperty<Visual, IVisual> VisualParentProperty = AvaloniaProperty.RegisterDirect<Visual, IVisual>("VisualParent", o => o._visualParent); /// <summary> /// Defines the <see cref="ZIndex"/> property. /// </summary> public static readonly StyledProperty<int> ZIndexProperty = AvaloniaProperty.Register<Visual, int>(nameof(ZIndex)); private Rect _bounds; private TransformedBounds? _transformedBounds; private IRenderRoot _visualRoot; private IVisual _visualParent; /// <summary> /// Initializes static members of the <see cref="Visual"/> class. /// </summary> static Visual() { AffectsRender( BoundsProperty, ClipProperty, ClipToBoundsProperty, IsVisibleProperty, OpacityProperty); RenderTransformProperty.Changed.Subscribe(RenderTransformChanged); } /// <summary> /// Initializes a new instance of the <see cref="Visual"/> class. /// </summary> public Visual() { var visualChildren = new AvaloniaList<IVisual>(); visualChildren.ResetBehavior = ResetBehavior.Remove; visualChildren.Validate = ValidateVisualChild; visualChildren.CollectionChanged += VisualChildrenChanged; VisualChildren = visualChildren; } /// <summary> /// Raised when the control is attached to a rooted visual tree. /// </summary> public event EventHandler<VisualTreeAttachmentEventArgs> AttachedToVisualTree; /// <summary> /// Raised when the control is detached from a rooted visual tree. /// </summary> public event EventHandler<VisualTreeAttachmentEventArgs> DetachedFromVisualTree; /// <summary> /// Gets the bounds of the control relative to its parent. /// </summary> public Rect Bounds { get { return _bounds; } protected set { SetAndRaise(BoundsProperty, ref _bounds, value); } } /// <summary> /// Gets the bounds of the control relative to the window, accounting for rendering transforms. /// </summary> public TransformedBounds? TransformedBounds => _transformedBounds; /// <summary> /// Gets a value indicating whether the control should be clipped to its bounds. /// </summary> public bool ClipToBounds { get { return GetValue(ClipToBoundsProperty); } set { SetValue(ClipToBoundsProperty, value); } } /// <summary> /// Gets or sets the geometry clip for this visual. /// </summary> public Geometry Clip { get { return GetValue(ClipProperty); } set { SetValue(ClipProperty, value); } } /// <summary> /// Gets a value indicating whether this control and all its parents are visible. /// </summary> public bool IsEffectivelyVisible { get { return this.GetSelfAndVisualAncestors().All(x => x.IsVisible); } } /// <summary> /// Gets a value indicating whether this control is visible. /// </summary> public bool IsVisible { get { return GetValue(IsVisibleProperty); } set { SetValue(IsVisibleProperty, value); } } /// <summary> /// Gets the opacity of the control. /// </summary> public double Opacity { get { return GetValue(OpacityProperty); } set { SetValue(OpacityProperty, value); } } /// <summary> /// Gets the opacity mask of the control. /// </summary> public IBrush OpacityMask { get { return GetValue(OpacityMaskProperty); } set { SetValue(OpacityMaskProperty, value); } } /// <summary> /// Gets the render transform of the control. /// </summary> public Transform RenderTransform { get { return GetValue(RenderTransformProperty); } set { SetValue(RenderTransformProperty, value); } } /// <summary> /// Gets the transform origin of the control. /// </summary> public RelativePoint RenderTransformOrigin { get { return GetValue(RenderTransformOriginProperty); } set { SetValue(RenderTransformOriginProperty, value); } } /// <summary> /// Gets the Z index of the control. /// </summary> /// <remarks> /// Controls with a higher <see cref="ZIndex"/> will appear in front of controls with /// a lower ZIndex. If two controls have the same ZIndex then the control that appears /// later in the containing element's children collection will appear on top. /// </remarks> public int ZIndex { get { return GetValue(ZIndexProperty); } set { SetValue(ZIndexProperty, value); } } /// <summary> /// Gets the control's child visuals. /// </summary> protected IAvaloniaList<IVisual> VisualChildren { get; private set; } /// <summary> /// Gets the root of the visual tree, if the control is attached to a visual tree. /// </summary> protected IRenderRoot VisualRoot => _visualRoot ?? (this as IRenderRoot); /// <summary> /// Gets a value indicating whether this control is attached to a visual root. /// </summary> bool IVisual.IsAttachedToVisualTree => VisualRoot != null; /// <summary> /// Gets the control's child controls. /// </summary> IAvaloniaReadOnlyList<IVisual> IVisual.VisualChildren => VisualChildren; /// <summary> /// Gets the control's parent visual. /// </summary> IVisual IVisual.VisualParent => _visualParent; /// <summary> /// Gets the root of the visual tree, if the control is attached to a visual tree. /// </summary> IRenderRoot IVisual.VisualRoot => VisualRoot; TransformedBounds? IVisual.TransformedBounds { get { return _transformedBounds; } set { SetAndRaise(TransformedBoundsProperty, ref _transformedBounds, value); } } /// <summary> /// Invalidates the visual and queues a repaint. /// </summary> public void InvalidateVisual() { VisualRoot?.Renderer?.AddDirty(this); } /// <summary> /// Renders the visual to a <see cref="DrawingContext"/>. /// </summary> /// <param name="context">The drawing context.</param> public virtual void Render(DrawingContext context) { Contract.Requires<ArgumentNullException>(context != null); } /// <summary> /// Returns a transform that transforms the visual's coordinates into the coordinates /// of the specified <paramref name="visual"/>. /// </summary> /// <param name="visual">The visual to translate the coordinates to.</param> /// <returns> /// A <see cref="Matrix"/> containing the transform or null if the visuals don't share a /// common ancestor. /// </returns> public Matrix? TransformToVisual(IVisual visual) { var common = this.FindCommonVisualAncestor(visual); if (common != null) { var thisOffset = GetOffsetFrom(common, this); var thatOffset = GetOffsetFrom(common, visual); return Matrix.CreateTranslation(-thatOffset) * Matrix.CreateTranslation(thisOffset); } return null; } /// <summary> /// Indicates that a property change should cause <see cref="InvalidateVisual"/> to be /// called. /// </summary> /// <param name="properties">The properties.</param> /// <remarks> /// This method should be called in a control's static constructor with each property /// on the control which when changed should cause a redraw. This is similar to WPF's /// FrameworkPropertyMetadata.AffectsRender flag. /// </remarks> protected static void AffectsRender(params AvaloniaProperty[] properties) { foreach (var property in properties) { property.Changed.Subscribe(AffectsRenderInvalidate); } } /// <summary> /// Calls the <see cref="OnAttachedToVisualTree(VisualTreeAttachmentEventArgs)"/> method /// for this control and all of its visual descendants. /// </summary> /// <param name="e">The event args.</param> protected virtual void OnAttachedToVisualTreeCore(VisualTreeAttachmentEventArgs e) { Logger.Verbose(LogArea.Visual, this, "Attached to visual tree"); _visualRoot = e.Root; if (RenderTransform != null) { RenderTransform.Changed += RenderTransformChanged; } OnAttachedToVisualTree(e); AttachedToVisualTree?.Invoke(this, e); InvalidateVisual(); if (VisualChildren != null) { foreach (Visual child in VisualChildren.OfType<Visual>()) { child.OnAttachedToVisualTreeCore(e); } } } /// <summary> /// Calls the <see cref="OnDetachedFromVisualTree(VisualTreeAttachmentEventArgs)"/> method /// for this control and all of its visual descendants. /// </summary> /// <param name="e">The event args.</param> protected virtual void OnDetachedFromVisualTreeCore(VisualTreeAttachmentEventArgs e) { Logger.Verbose(LogArea.Visual, this, "Detached from visual tree"); _visualRoot = null; if (RenderTransform != null) { RenderTransform.Changed -= RenderTransformChanged; } OnDetachedFromVisualTree(e); DetachedFromVisualTree?.Invoke(this, e); e.Root?.Renderer?.AddDirty(this); if (VisualChildren != null) { foreach (Visual child in VisualChildren.OfType<Visual>()) { child.OnDetachedFromVisualTreeCore(e); } } } /// <summary> /// Called when the control is added to a visual tree. /// </summary> /// <param name="e">The event args.</param> protected virtual void OnAttachedToVisualTree(VisualTreeAttachmentEventArgs e) { } /// <summary> /// Called when the control is removed from a visual tree. /// </summary> /// <param name="e">The event args.</param> protected virtual void OnDetachedFromVisualTree(VisualTreeAttachmentEventArgs e) { } /// <summary> /// Called when the control's visual parent changes. /// </summary> /// <param name="oldParent">The old visual parent.</param> /// <param name="newParent">The new visual parent.</param> protected virtual void OnVisualParentChanged(IVisual oldParent, IVisual newParent) { RaisePropertyChanged(VisualParentProperty, oldParent, newParent, BindingPriority.LocalValue); } /// <summary> /// Called when a property changes that should invalidate the visual. /// </summary> /// <param name="e">The event args.</param> private static void AffectsRenderInvalidate(AvaloniaPropertyChangedEventArgs e) { (e.Sender as Visual)?.InvalidateVisual(); } /// <summary> /// Gets the visual offset from the specified ancestor. /// </summary> /// <param name="ancestor">The ancestor visual.</param> /// <param name="visual">The visual.</param> /// <returns>The visual offset.</returns> private static Vector GetOffsetFrom(IVisual ancestor, IVisual visual) { var result = new Vector(); while (visual != ancestor) { result = new Vector(result.X + visual.Bounds.X, result.Y + visual.Bounds.Y); visual = visual.VisualParent; if (visual == null) { throw new ArgumentException("'visual' is not a descendant of 'ancestor'."); } } return result; } /// <summary> /// Called when a visual's <see cref="RenderTransform"/> changes. /// </summary> /// <param name="e">The event args.</param> private static void RenderTransformChanged(AvaloniaPropertyChangedEventArgs e) { var sender = e.Sender as Visual; if (sender?.VisualRoot != null) { var oldValue = e.OldValue as Transform; var newValue = e.NewValue as Transform; if (oldValue != null) { oldValue.Changed -= sender.RenderTransformChanged; } if (newValue != null) { newValue.Changed += sender.RenderTransformChanged; } sender.InvalidateVisual(); } } /// <summary> /// Ensures a visual child is not null and not already parented. /// </summary> /// <param name="c">The visual child.</param> private static void ValidateVisualChild(IVisual c) { if (c == null) { throw new ArgumentNullException("Cannot add null to VisualChildren."); } if (c.VisualParent != null) { throw new InvalidOperationException("The control already has a visual parent."); } } /// <summary> /// Called when the <see cref="RenderTransform"/>'s <see cref="Transform.Changed"/> event /// is fired. /// </summary> /// <param name="sender">The sender.</param> /// <param name="e">The event args.</param> private void RenderTransformChanged(object sender, EventArgs e) { InvalidateVisual(); } /// <summary> /// Sets the visual parent of the Visual. /// </summary> /// <param name="value">The visual parent.</param> private void SetVisualParent(Visual value) { if (_visualParent == value) { return; } var old = _visualParent; _visualParent = value; if (_visualRoot != null) { var e = new VisualTreeAttachmentEventArgs(old, VisualRoot); OnDetachedFromVisualTreeCore(e); } if (_visualParent is IRenderRoot || _visualParent?.IsAttachedToVisualTree == true) { var root = this.GetVisualAncestors().OfType<IRenderRoot>().FirstOrDefault(); var e = new VisualTreeAttachmentEventArgs(_visualParent, root); OnAttachedToVisualTreeCore(e); } OnVisualParentChanged(old, value); } /// <summary> /// Called when the <see cref="VisualChildren"/> collection changes. /// </summary> /// <param name="sender">The sender.</param> /// <param name="e">The event args.</param> private void VisualChildrenChanged(object sender, NotifyCollectionChangedEventArgs e) { switch (e.Action) { case NotifyCollectionChangedAction.Add: foreach (Visual v in e.NewItems) { v.SetVisualParent(this); } break; case NotifyCollectionChangedAction.Remove: foreach (Visual v in e.OldItems) { v.SetVisualParent(null); } break; case NotifyCollectionChangedAction.Replace: foreach (Visual v in e.OldItems) { v.SetVisualParent(null); } foreach (Visual v in e.NewItems) { v.SetVisualParent(this); } break; } } } }
using Abp.Authorization; using Abp.AutoMapper; using Abp.Collections.Extensions; using Abp.Domain.Repositories; using Abp.Localization; using Abp.UI; using Castle.Core.Internal; using Microsoft.AspNet.Identity; using NorthLion.Zero.Authorization; using NorthLion.Zero.Authorization.Roles; using NorthLion.Zero.PaginatedModel; using NorthLion.Zero.Users.Dto; using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; namespace NorthLion.Zero.Users { [AbpAuthorize(PermissionNames.Pages_Users)] public class UserAppService : ZeroAppServiceBase, IUserAppService { private readonly IRepository<User, long> _userRepository; private readonly IPermissionManager _permissionManager; private readonly RoleManager _roleManager; public UserAppService(IRepository<User, long> userRepository, IPermissionManager permissionManager, RoleManager roleManager) { _userRepository = userRepository; _permissionManager = permissionManager; _roleManager = roleManager; } public async Task ProhibitPermission(ProhibitPermissionInput input) { var user = await UserManager.GetUserByIdAsync(input.UserId); var permission = _permissionManager.GetPermission(input.PermissionName); await UserManager.ProhibitPermissionAsync(user, permission); } //Example for primitive method parameters. public async Task RemoveUserFromRole(long userId, string roleName) { CheckErrors(await UserManager.RemoveFromRoleAsync(userId, roleName)); } public async Task<UsersOutput> GetUsers(PaginatedInputDto input) { //This is only for demonstration purposes need perf tweaks if (input.GetAll) return AllResults; //-------------------------------------------------------- await Task.FromResult(0); //Fake Async var pagesToSkip = PaginationHelpers.GetSkipTotal(input.Page, input.RowsPerPage); //Possible specification pattern required var users = GetUsersByStringFilter(_userRepository.GetAll(), input.SearchString, input.Filter); users = GetSortedUsers(input.Sort, input.SortDir, users); var usersListEnum = users as IList<User> ?? users.ToList(); var totalPages = PaginationHelpers.GetRemainingPages(usersListEnum.Count(), input.RowsPerPage); var usersList = usersListEnum.Skip(pagesToSkip) .Take(input.RowsPerPage).ToList(); return new UsersOutput() //Implements IPaginableResult { RemainingPages = totalPages, Page = input.Page, Rows = input.RowsPerPage, SearchString = input.SearchString, Users = usersList.Select(a => a.MapTo<UserListDto>()).ToList() }; } public async Task CreateUser(CreateUserInput input) { var user = input.MapTo<User>(); user.TenantId = AbpSession.TenantId; user.Password = new PasswordHasher().HashPassword(input.Password); user.IsEmailConfirmed = true; CheckErrors(await UserManager.CreateAsync(user)); await CurrentUnitOfWork.SaveChangesAsync(); await AssignDefaultRoles(user.Id); } public async Task UpdateUserProfile(EditProfileInput input) { var userFound = await GetCurrentUserAsync(); var modified = input.MapTo(userFound); await UserManager.UpdateAsync(modified); } public async Task EditUser(UpdateUserInput input) { var userFound = _userRepository.Get(input.Id); var modified = input.MapTo(userFound); await UserManager.UpdateAsync(modified); //Notify user by email or something } public async Task<UpdateUserInput> GetUserForEdit(long? userId) { if (!userId.HasValue) return new UpdateUserInput(); var user = await UserManager.GetUserByIdAsync(userId.Value); var input = user.MapTo<UpdateUserInput>(); return input; } public async Task<UserRoleSelectorOutput> GetRolesForUser(long userId) { var userRoles = await UserManager.GetRolesAsync(userId); var allRoles = _roleManager.Roles.ToList(); var checkRoles = GetActiveAndInactiveRoles(userRoles, allRoles); var user = await UserManager.GetUserByIdAsync(userId); return new UserRoleSelectorOutput() { UserId = user.Id, Roles = checkRoles, }; } public async Task SetUserRoles(SetUserRolesInput input) { var user = await UserManager.GetUserByIdAsync(input.UserId); await UserManager.SetRoles(user, input.Roles.ToArray()); //Notify user by email or something } public async Task<EditProfileInput> GetUserProfileForEdit() { var user = await GetCurrentUserAsync(); var userProfileInfo = user.MapTo<EditProfileInput>(); userProfileInfo.MyRoles = (await UserManager.GetRolesAsync(userProfileInfo.Id)).ToList(); return userProfileInfo; } public async Task UpdateUserProfilePicture(long userId, string profilePicture) { var user = await _userRepository.FirstOrDefaultAsync(a => a.Id == userId); //Property not implemented for simplicity //user.ProfilePicture = profilePicture; } public async Task ChangeUserPassword(ChangePasswordInput input) { var user = await GetCurrentUserAsync(); var hasher = new PasswordHasher(); if (!string.IsNullOrEmpty(input.CurrentPassword)) { var checkedPassword = hasher.VerifyHashedPassword(user.Password, input.CurrentPassword); switch (checkedPassword) { case PasswordVerificationResult.Failed: //Is new password throw new UserFriendlyException(L("InvalidPassword")); case PasswordVerificationResult.Success: //Is old password user.Password = hasher.HashPassword(input.NewPassword); await UserManager.UpdateAsync(user); //Notify user by email or something break; case PasswordVerificationResult.SuccessRehashNeeded: break; default: throw new ArgumentOutOfRangeException(); } } } public async Task<CurrentUserPermissionsOutput> GetUserPermissions(long userId) { var allPermissions = _permissionManager.GetAllPermissions().Where(a => a.Parent == null).ToList(); var user = await UserManager.GetUserByIdAsync(userId); var userPermissions = (await UserManager.GetGrantedPermissionsAsync(user)).ToList(); var assignedPermissions = CheckPermissions(allPermissions, userPermissions).ToList(); return new CurrentUserPermissionsOutput() { UserId = userId, AssignedPermissions = assignedPermissions }; } public async Task ResetPermissions(long userId) { var user = await UserManager.GetUserByIdAsync(userId); await UserManager.ResetAllPermissionsAsync(user); //Notify user by email or something } public async Task UnlockUser(long userId) { var user = await UserManager.GetUserByIdAsync(userId); user.IsLockoutEnabled = false; //Notify user by email or something } public async Task LockUser(long userId) { var user = await UserManager.GetUserByIdAsync(userId); user.IsLockoutEnabled = true; //Five days //You can create a const for this user.LockoutEndDateUtc = DateTime.Now.AddDays(5); //Notify user by email or something } public async Task DeleteUser(long userId) { var userToDelete = await UserManager.GetUserByIdAsync(userId); await _userRepository.DeleteAsync(userToDelete); //Notify admin by email or something } public async Task SetUserSpecialPermissions(SetUserSpecialPermissionsInput input) { var user = await UserManager.GetUserByIdAsync(input.UserId); foreach (var inputAssignedPermission in input.AssignedPermissions) { var permission = _permissionManager.GetPermission(inputAssignedPermission.Name); if (inputAssignedPermission.Granted) { await UserManager.GrantPermissionAsync(user, permission); } else { await UserManager.ProhibitPermissionAsync(user, permission); } } //Notify user by email or something } public async Task ChangePasswordFromAdmin(ChangePasswordInput input) { if(input.NewPassword != input.NewPasswordConfirmation) throw new UserFriendlyException(L("PasswordsNotMatch")); var user = await UserManager.GetUserByIdAsync(input.UserId); var hasher = new PasswordHasher(); user.Password = hasher.HashPassword(input.NewPassword); await UserManager.UpdateAsync(user); } #region Helpers private UsersOutput AllResults { get { return new UsersOutput() { RemainingPages = 0, Page = 0, Rows = 0, SearchString = "", //This is only a sample Users = _userRepository.GetAll().ToList().Select(a => a.MapTo<UserListDto>()).ToList() }; } } private IEnumerable<User> GetUsersByStringFilter(IQueryable<User> users, string searchString, string filterProperty) { Func<User, bool> exp = a => a.UserName.ToUpper().Contains(searchString.ToUpper()); var usersResult = users .WhereIf(!searchString.IsNullOrEmpty(), exp); return usersResult; } private async Task AssignDefaultRoles(long userId) { var user = await UserManager.GetUserByIdAsync(userId); var roles = _roleManager.Roles.Where(a => a.IsDefault); await UserManager.AddToRolesAsync(user.Id, roles.Select(a => a.Name).ToArray()); } private IEnumerable<User> GetSortedUsers(string sort, string sortDir, IEnumerable<User> users) { switch (sort) { case "UserName": users = sortDir == "desc" ? users.OrderByDescending(a => a.UserName) : users.OrderBy(a => a.UserName); break; case "FullName": users = sortDir == "desc" ? users.OrderByDescending(a => a.FullName) : users.OrderBy(a => a.FullName); break; default: users = sortDir == "desc" ? users.OrderByDescending(a => a.Name) : users.OrderBy(a => a.Name); break; } return users; } private IEnumerable<UserAssignedPermission> CheckPermissions(IEnumerable<Permission> allPermissions, ICollection<Permission> userPermissions) { var permissionsFound = new List<UserAssignedPermission>(); foreach (var permission in allPermissions) { AddPermission(permissionsFound, userPermissions, permission, userPermissions.Any(a => a.Name == permission.Name)); } return permissionsFound; } private void AddPermission(ICollection<UserAssignedPermission> permissionsFound, ICollection<Permission> userPermissions, Permission allPermission, bool granted) { var childPermissions = new List<UserAssignedPermission>(); var permission = new UserAssignedPermission() { DisplayName = allPermission.DisplayName.Localize(new LocalizationContext(LocalizationManager)), Granted = granted, Name = allPermission.Name, ParentPermission = allPermission.Parent?.Name }; if (allPermission.Children.Any()) { foreach (var childPermission in allPermission.Children) { AddPermission(childPermissions, userPermissions, childPermission, userPermissions.Any(a => a.Name == childPermission.Name)); } permission.ChildPermissions.AddRange(childPermissions); } permissionsFound.Add(permission); } private List<UserSelectRoleDto> GetActiveAndInactiveRoles(IList<string> userRoles, IEnumerable<Role> allRoles) { var roleDtos = new List<UserSelectRoleDto>(); foreach (var allRole in allRoles) { roleDtos.Add(new UserSelectRoleDto() { DisplayName = allRole.DisplayName, Name = allRole.Name, IsSelected = userRoles.Any(a => a == allRole.Name), IsStatic = allRole.IsStatic }); } return roleDtos; } #endregion } }
/* * Location Intelligence APIs * * Incorporate our extensive geodata into everyday applications, business processes and workflows. * * OpenAPI spec version: 8.5.0 * * Generated by: https://github.com/swagger-api/swagger-codegen.git * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Linq; using System.IO; using System.Text; using System.Collections; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Runtime.Serialization; using Newtonsoft.Json; using Newtonsoft.Json.Converters; namespace pb.locationIntelligence.Model { /// <summary> /// TaxRateAddress /// </summary> [DataContract] public partial class TaxRateAddress : IEquatable<TaxRateAddress> { /// <summary> /// Initializes a new instance of the <see cref="TaxRateAddress" /> class. /// </summary> /// <param name="ObjectId">ObjectId.</param> /// <param name="MainAddressLine">MainAddressLine.</param> /// <param name="PlaceName">PlaceName.</param> /// <param name="AreaName1">AreaName1.</param> /// <param name="AreaName3">AreaName3.</param> /// <param name="PostCode1">PostCode1.</param> /// <param name="Country">Country.</param> public TaxRateAddress(string ObjectId = null, string MainAddressLine = null, string PlaceName = null, string AreaName1 = null, string AreaName3 = null, string PostCode1 = null, string Country = null) { this.ObjectId = ObjectId; this.MainAddressLine = MainAddressLine; this.PlaceName = PlaceName; this.AreaName1 = AreaName1; this.AreaName3 = AreaName3; this.PostCode1 = PostCode1; this.Country = Country; } /// <summary> /// Gets or Sets ObjectId /// </summary> [DataMember(Name="objectId", EmitDefaultValue=false)] public string ObjectId { get; set; } /// <summary> /// Gets or Sets MainAddressLine /// </summary> [DataMember(Name="mainAddressLine", EmitDefaultValue=false)] public string MainAddressLine { get; set; } /// <summary> /// Gets or Sets PlaceName /// </summary> [DataMember(Name="placeName", EmitDefaultValue=false)] public string PlaceName { get; set; } /// <summary> /// Gets or Sets AreaName1 /// </summary> [DataMember(Name="areaName1", EmitDefaultValue=false)] public string AreaName1 { get; set; } /// <summary> /// Gets or Sets AreaName3 /// </summary> [DataMember(Name="areaName3", EmitDefaultValue=false)] public string AreaName3 { get; set; } /// <summary> /// Gets or Sets PostCode1 /// </summary> [DataMember(Name="postCode1", EmitDefaultValue=false)] public string PostCode1 { get; set; } /// <summary> /// Gets or Sets Country /// </summary> [DataMember(Name="country", EmitDefaultValue=false)] public string Country { get; set; } /// <summary> /// Returns the string presentation of the object /// </summary> /// <returns>String presentation of the object</returns> public override string ToString() { var sb = new StringBuilder(); sb.Append("class TaxRateAddress {\n"); sb.Append(" ObjectId: ").Append(ObjectId).Append("\n"); sb.Append(" MainAddressLine: ").Append(MainAddressLine).Append("\n"); sb.Append(" PlaceName: ").Append(PlaceName).Append("\n"); sb.Append(" AreaName1: ").Append(AreaName1).Append("\n"); sb.Append(" AreaName3: ").Append(AreaName3).Append("\n"); sb.Append(" PostCode1: ").Append(PostCode1).Append("\n"); sb.Append(" Country: ").Append(Country).Append("\n"); sb.Append("}\n"); return sb.ToString(); } /// <summary> /// Returns the JSON string presentation of the object /// </summary> /// <returns>JSON string presentation of the object</returns> public string ToJson() { return JsonConvert.SerializeObject(this, Formatting.Indented); } /// <summary> /// Returns true if objects are equal /// </summary> /// <param name="obj">Object to be compared</param> /// <returns>Boolean</returns> public override bool Equals(object obj) { // credit: http://stackoverflow.com/a/10454552/677735 return this.Equals(obj as TaxRateAddress); } /// <summary> /// Returns true if TaxRateAddress instances are equal /// </summary> /// <param name="other">Instance of TaxRateAddress to be compared</param> /// <returns>Boolean</returns> public bool Equals(TaxRateAddress other) { // credit: http://stackoverflow.com/a/10454552/677735 if (other == null) return false; return ( this.ObjectId == other.ObjectId || this.ObjectId != null && this.ObjectId.Equals(other.ObjectId) ) && ( this.MainAddressLine == other.MainAddressLine || this.MainAddressLine != null && this.MainAddressLine.Equals(other.MainAddressLine) ) && ( this.PlaceName == other.PlaceName || this.PlaceName != null && this.PlaceName.Equals(other.PlaceName) ) && ( this.AreaName1 == other.AreaName1 || this.AreaName1 != null && this.AreaName1.Equals(other.AreaName1) ) && ( this.AreaName3 == other.AreaName3 || this.AreaName3 != null && this.AreaName3.Equals(other.AreaName3) ) && ( this.PostCode1 == other.PostCode1 || this.PostCode1 != null && this.PostCode1.Equals(other.PostCode1) ) && ( this.Country == other.Country || this.Country != null && this.Country.Equals(other.Country) ); } /// <summary> /// Gets the hash code /// </summary> /// <returns>Hash code</returns> public override int GetHashCode() { // credit: http://stackoverflow.com/a/263416/677735 unchecked // Overflow is fine, just wrap { int hash = 41; // Suitable nullity checks etc, of course :) if (this.ObjectId != null) hash = hash * 59 + this.ObjectId.GetHashCode(); if (this.MainAddressLine != null) hash = hash * 59 + this.MainAddressLine.GetHashCode(); if (this.PlaceName != null) hash = hash * 59 + this.PlaceName.GetHashCode(); if (this.AreaName1 != null) hash = hash * 59 + this.AreaName1.GetHashCode(); if (this.AreaName3 != null) hash = hash * 59 + this.AreaName3.GetHashCode(); if (this.PostCode1 != null) hash = hash * 59 + this.PostCode1.GetHashCode(); if (this.Country != null) hash = hash * 59 + this.Country.GetHashCode(); return hash; } } } }
using System; using System.Collections.Generic; using System.Linq; using Microsoft.Xna.Framework; using Microsoft.Xna.Framework.Graphics; using Pathoschild.Stardew.Common; using Pathoschild.Stardew.LookupAnything.Framework.Constants; using Pathoschild.Stardew.LookupAnything.Framework.Data; using Pathoschild.Stardew.LookupAnything.Framework.DebugFields; using Pathoschild.Stardew.LookupAnything.Framework.Fields; using Pathoschild.Stardew.LookupAnything.Framework.Models; using StardewModdingAPI; using StardewModdingAPI.Utilities; using StardewValley; using StardewValley.Characters; using StardewValley.Locations; using StardewValley.Monsters; using StardewValley.Network; using StardewValley.Objects; using SObject = StardewValley.Object; namespace Pathoschild.Stardew.LookupAnything.Framework.Lookups.Characters { /// <summary>Describes an NPC (including villagers, monsters, and pets).</summary> internal class CharacterSubject : BaseSubject { /********* ** Fields *********/ /// <summary>The NPC type.</summary> private readonly SubjectType TargetType; /// <summary>The lookup target.</summary> private readonly NPC Target; /// <summary>Provides subject entries.</summary> private readonly ISubjectRegistry Codex; /// <summary>Simplifies access to private game code.</summary> private readonly IReflectionHelper Reflection; /// <summary>Whether to only show content once the player discovers it.</summary> private readonly bool ProgressionMode; /// <summary>Whether to highlight item gift tastes which haven't been revealed in the NPC profile.</summary> private readonly bool HighlightUnrevealedGiftTastes; /// <summary>Whether to show all NPC gift tastes.</summary> private readonly bool ShowAllGiftTastes; /// <summary>Whether to look up the original entity when the game spawns a temporary copy.</summary> private readonly bool EnableTargetRedirection; /// <summary>Whether the NPC is Gourmand in the Fern Islands farm cave.</summary> private readonly bool IsGourmand; /// <summary>Whether the NPC is a haunted skull monster.</summary> private readonly bool IsHauntedSkull; /// <summary>Whether the NPC is a magma sprite monster.</summary> private readonly bool IsMagmaSprite; /********* ** Public methods *********/ /// <summary>Construct an instance.</summary> /// <param name="codex">Provides subject entries.</param> /// <param name="gameHelper">Provides utility methods for interacting with the game code.</param> /// <param name="npc">The lookup target.</param> /// <param name="type">The NPC type.</param> /// <param name="metadata">Provides metadata that's not available from the game data directly.</param> /// <param name="reflectionHelper">Simplifies access to private game code.</param> /// <param name="progressionMode">Whether to only show content once the player discovers it.</param> /// <param name="highlightUnrevealedGiftTastes">Whether to highlight item gift tastes which haven't been revealed in the NPC profile.</param> /// <param name="showAllGiftTastes">Whether to show all NPC gift tastes.</param> /// <param name="enableTargetRedirection">Whether to look up the original entity when the game spawns a temporary copy.</param> /// <remarks>Reverse engineered from <see cref="NPC"/>.</remarks> public CharacterSubject(ISubjectRegistry codex, GameHelper gameHelper, NPC npc, SubjectType type, Metadata metadata, IReflectionHelper reflectionHelper, bool progressionMode, bool highlightUnrevealedGiftTastes, bool showAllGiftTastes, bool enableTargetRedirection) : base(gameHelper) { this.Codex = codex; this.Reflection = reflectionHelper; this.ProgressionMode = progressionMode; this.HighlightUnrevealedGiftTastes = highlightUnrevealedGiftTastes; this.ShowAllGiftTastes = showAllGiftTastes; this.EnableTargetRedirection = enableTargetRedirection; // initialize this.Target = npc; this.TargetType = type; CharacterData overrides = metadata.GetCharacter(npc, type); this.Initialize( name: npc.getName(), description: overrides?.DescriptionKey != null ? I18n.GetByKey(overrides.DescriptionKey) : null, type: CharacterSubject.GetTypeName(npc, type) ); // detect special cases if (npc is Bat bat) { this.IsHauntedSkull = bat.hauntedSkull.Value; this.IsMagmaSprite = bat.magmaSprite.Value; } else this.IsGourmand = type == SubjectType.Villager && npc.Name == "Gourmand" && npc.currentLocation.Name == nameof(IslandFarmCave); } /// <summary>Get the data to display for this subject.</summary> public override IEnumerable<ICustomField> GetData() { NPC npc = this.Target; return this.TargetType switch { SubjectType.Monster => this.GetDataForMonster((Monster)npc), SubjectType.Pet => this.GetDataForPet((Pet)npc), SubjectType.Villager => npc switch { Child child => this.GetDataForChild(child), TrashBear trashBear => this.GetDataForTrashBear(trashBear), _ when this.IsGourmand => this.GetDataForGourmand(), _ => this.GetDataForVillager(npc) }, _ => Enumerable.Empty<ICustomField>() }; } /// <summary>Get raw debug data to display for this subject.</summary> public override IEnumerable<IDebugField> GetDebugFields() { NPC target = this.Target; Pet pet = target as Pet; // pinned fields yield return new GenericDebugField("facing direction", this.Stringify((FacingDirection)target.FacingDirection), pinned: true); yield return new GenericDebugField("walking towards player", this.Stringify(target.IsWalkingTowardPlayer), pinned: true); if (Game1.player.friendshipData.ContainsKey(target.Name)) { FriendshipModel friendship = this.GameHelper.GetFriendshipForVillager(Game1.player, target, Game1.player.friendshipData[target.Name]); yield return new GenericDebugField("friendship", $"{friendship.Points} (max {friendship.MaxPoints})", pinned: true); } if (pet != null) yield return new GenericDebugField("friendship", $"{pet.friendshipTowardFarmer} of {Pet.maxFriendship})", pinned: true); // raw fields foreach (IDebugField field in this.GetDebugFieldsFrom(target)) yield return field; } /// <summary>Draw the subject portrait (if available).</summary> /// <param name="spriteBatch">The sprite batch being drawn.</param> /// <param name="position">The position at which to draw.</param> /// <param name="size">The size of the portrait to draw.</param> /// <returns>Returns <c>true</c> if a portrait was drawn, else <c>false</c>.</returns> public override bool DrawPortrait(SpriteBatch spriteBatch, Vector2 position, Vector2 size) { NPC npc = this.Target; // special cases if (this.IsHauntedSkull || this.IsMagmaSprite) { var sourceRect = Game1.getSourceRectForStandardTileSheet(npc.Sprite.Texture, 4, 16, 16); spriteBatch.Draw(npc.Sprite.Texture, position: position, sourceRectangle: sourceRect, color: Color.White, rotation: 0, origin: Vector2.Zero, scale: new Vector2(size.X / 16), effects: SpriteEffects.None, layerDepth: 1); return true; } // use character portrait (most villager NPCs) if (npc.isVillager() && npc.Portrait != null && !this.IsGourmand) // Gourmand uses Professor Snail's portraits { spriteBatch.DrawSprite(npc.Portrait, new Rectangle(0, 0, NPC.portrait_width, NPC.portrait_height), position.X, position.Y, Color.White, size.X / NPC.portrait_width); return true; } // else draw sprite (e.g. for pets) npc.Sprite.draw(spriteBatch, position, 1, 0, 0, Color.White, scale: size.X / npc.Sprite.getWidth()); return true; } /********* ** Private methods *********/ /***** ** Data fields ****/ /// <summary>Get the fields to display for a child.</summary> /// <param name="child">The child for which to show info.</param> /// <remarks>Derived from <see cref="Child.dayUpdate"/>.</remarks> private IEnumerable<ICustomField> GetDataForChild(Child child) { // birthday SDate birthday = SDate.Now().AddDays(-child.daysOld.Value); yield return new GenericField(I18n.Npc_Birthday(), birthday.ToLocaleString(withYear: true)); // age { ChildAge stage = (ChildAge)child.Age; int daysOld = child.daysOld.Value; int daysToNext = this.GetDaysToNextChildGrowth(stage, daysOld); bool isGrown = daysToNext == -1; int daysAtNext = daysOld + (isGrown ? 0 : daysToNext); string ageDesc = isGrown ? I18n.Npc_Child_Age_DescriptionGrown(label: I18n.For(stage)) : I18n.Npc_Child_Age_DescriptionPartial(label: I18n.For(stage), count: daysToNext, nextLabel: I18n.For(stage + 1)); yield return new PercentageBarField(I18n.Npc_Child_Age(), child.daysOld.Value, daysAtNext, Color.Green, Color.Gray, ageDesc); } // friendship if (Game1.player.friendshipData.ContainsKey(child.Name)) { FriendshipModel friendship = this.GameHelper.GetFriendshipForVillager(Game1.player, child, Game1.player.friendshipData[child.Name]); yield return new CharacterFriendshipField(I18n.Npc_Friendship(), friendship); yield return new GenericField(I18n.Npc_TalkedToday(), this.Stringify(Game1.player.friendshipData[child.Name].TalkedToToday)); } } /// <summary>Get the fields to display for the gourmand frog.</summary> /// <remarks>Derived from <see cref="IslandFarmCave.IndexForRequest"/>.</remarks> private IEnumerable<ICustomField> GetDataForGourmand() { // get cave IslandFarmCave cave = (IslandFarmCave)Game1.getLocationFromName("IslandFarmCave"); if (cave == null) yield break; int questsDone = cave.gourmandRequestsFulfilled.Value; int maxQuests = IslandFarmCave.TOTAL_GOURMAND_REQUESTS; // show items wanted if (questsDone <= maxQuests) { var checkboxes = new List<KeyValuePair<IFormattedText[], bool>>(); for (int i = 0; i < maxQuests; i++) { int index = cave.IndexForRequest(i); if (index == -1) continue; checkboxes.Add( CheckboxListField.Checkbox( text: this.GameHelper.GetObjectBySpriteIndex(index).DisplayName, value: questsDone > i ) ); } if (checkboxes.Any()) yield return new CheckboxListField(I18n.TrashBearOrGourmand_ItemWanted(), checkboxes); } // show progress yield return new GenericField(I18n.TrashBearOrGourmand_QuestProgress(), I18n.Generic_Ratio(questsDone, maxQuests)); } /// <summary>Get the fields to display for a monster.</summary> /// <param name="monster">The monster for which to show info.</param> /// <remarks>Derived from <see cref="Monster.parseMonsterInfo"/>.</remarks> private IEnumerable<ICustomField> GetDataForMonster(Monster monster) { // basic info bool canRerollDrops = Game1.player.isWearingRing(Ring.burglarsRing); yield return new GenericField(I18n.Monster_Invincible(), I18n.Generic_Seconds(count: this.Reflection.GetField<int>(monster, "invincibleCountdown").GetValue()), hasValue: monster.isInvincible()); yield return new PercentageBarField(I18n.Monster_Health(), monster.Health, monster.MaxHealth, Color.Green, Color.Gray, I18n.Generic_PercentRatio(percent: (int)Math.Round((monster.Health / (monster.MaxHealth * 1f) * 100)), value: monster.Health, max: monster.MaxHealth)); yield return new ItemDropListField(this.GameHelper, I18n.Monster_Drops(), this.GetMonsterDrops(monster), fadeNonGuaranteed: true, crossOutNonGuaranteed: !canRerollDrops, defaultText: I18n.Monster_Drops_Nothing()); yield return new GenericField(I18n.Monster_Experience(), this.Stringify(monster.ExperienceGained)); yield return new GenericField(I18n.Monster_Defense(), this.Stringify(monster.resilience.Value)); yield return new GenericField(I18n.Monster_Attack(), this.Stringify(monster.DamageToFarmer)); // Adventure Guild quest AdventureGuildQuestData adventureGuildQuest = this.Metadata.GetAdventurerGuildQuest(monster.Name); if (adventureGuildQuest != null) { int kills = adventureGuildQuest.Targets.Select(p => Game1.stats.getMonstersKilled(p)).Sum(); string goalName = GameI18n.GetString($@"Strings\Locations:AdventureGuild_KillList_{adventureGuildQuest.KillListKey}"); var checkbox = CheckboxListField.Checkbox( text: I18n.Monster_AdventureGuild_EradicationGoal(name: goalName, count: kills, requiredCount: adventureGuildQuest.RequiredKills), value: kills >= adventureGuildQuest.RequiredKills ); yield return new CheckboxListField(I18n.Monster_AdventureGuild(), checkbox); } } /// <summary>Get the fields to display for a pet.</summary> /// <param name="pet">The pet for which to show info.</param> /// <remarks>Derived from <see cref="Pet.checkAction"/> and <see cref="Pet.dayUpdate"/>.</remarks> private IEnumerable<ICustomField> GetDataForPet(Pet pet) { Farm farm = Game1.getFarm(); // friendship yield return new CharacterFriendshipField(I18n.Pet_Love(), this.GameHelper.GetFriendshipForPet(Game1.player, pet)); // petted today / last petted int? lastDayPetted = this.GetLastDayPetted(pet, Game1.player.UniqueMultiplayerID); yield return new GenericField(I18n.Pet_PettedToday(), lastDayPetted == Game1.Date.TotalDays ? I18n.Pet_LastPetted_Yes() : this.Stringify(false)); if (!lastDayPetted.HasValue) yield return new GenericField(I18n.Pet_LastPetted(), I18n.Pet_LastPetted_Never()); else if (lastDayPetted != Game1.Date.TotalDays) { int daysSincePetted = Game1.Date.TotalDays - lastDayPetted.Value; yield return new GenericField(I18n.Pet_LastPetted(), daysSincePetted == 1 ? I18n.Generic_Yesterday() : I18n.Pet_LastPetted_DaysAgo(daysSincePetted)); } // water bowl yield return new GenericField(I18n.Pet_WaterBowl(), farm.petBowlWatered.Value ? I18n.Pet_WaterBowl_Filled() : I18n.Pet_WaterBowl_Empty()); } /// <summary>Get the fields to display for the trash bear.</summary> /// <param name="trashBear">The trash bear for which to show info.</param> /// <remarks>Derived from <see cref="TrashBear.checkAction"/>.</remarks> private IEnumerable<ICustomField> GetDataForTrashBear(TrashBear trashBear) { // get number of quests completed const int maxQuests = 4; int questsDone = 0; if (NetWorldState.checkAnywhereForWorldStateID("trashBear1")) questsDone = 1; if (NetWorldState.checkAnywhereForWorldStateID("trashBear2")) questsDone = 2; if (NetWorldState.checkAnywhereForWorldStateID("trashBear3")) questsDone = 3; if (NetWorldState.checkAnywhereForWorldStateID("trashBearDone")) questsDone = 4; // show item wanted if (questsDone < maxQuests) { this.Reflection.GetMethod(trashBear, "updateItemWanted").Invoke(); int itemWantedIndex = this.Reflection.GetField<int>(trashBear, "itemWantedIndex").GetValue(); yield return new ItemIconField(this.GameHelper, I18n.TrashBearOrGourmand_ItemWanted(), this.GameHelper.GetObjectBySpriteIndex(itemWantedIndex), this.Codex); } // show progress yield return new GenericField(I18n.TrashBearOrGourmand_QuestProgress(), I18n.Generic_Ratio(questsDone, maxQuests)); } /// <summary>Get the fields to display for a villager NPC.</summary> /// <param name="npc">The NPC for which to show info.</param> private IEnumerable<ICustomField> GetDataForVillager(NPC npc) { // special case: Abigail in the mines is a temporary instance with the name // 'AbigailMine', so the info shown will be incorrect. if (this.EnableTargetRedirection && npc.Name == "AbigailMine" && npc.currentLocation?.Name == "UndergroundMine20") npc = Game1.getCharacterFromName("Abigail") ?? npc; // social fields (birthday, friendship, gifting, etc) if (this.GameHelper.IsSocialVillager(npc)) { // birthday if (this.GameHelper.TryGetDate(npc.Birthday_Day, npc.Birthday_Season, out SDate birthday)) yield return new GenericField(I18n.Npc_Birthday(), I18n.Stringify(birthday)); // friendship if (Game1.player.friendshipData.ContainsKey(npc.Name)) { // friendship/romance FriendshipModel friendship = this.GameHelper.GetFriendshipForVillager(Game1.player, npc, Game1.player.friendshipData[npc.Name]); yield return new GenericField(I18n.Npc_CanRomance(), friendship.IsSpouse ? I18n.Npc_CanRomance_Married() : friendship.IsHousemate ? I18n.Npc_CanRomance_Housemate() : this.Stringify(friendship.CanDate)); yield return new CharacterFriendshipField(I18n.Npc_Friendship(), friendship); // talked/gifted today yield return new GenericField(I18n.Npc_TalkedToday(), this.Stringify(friendship.TalkedToday)); yield return new GenericField(I18n.Npc_GiftedToday(), this.Stringify(friendship.GiftsToday > 0)); // kissed/hugged today if (friendship.IsSpouse || friendship.IsHousemate) yield return new GenericField(friendship.IsSpouse ? I18n.Npc_KissedToday() : I18n.Npc_HuggedToday(), this.Stringify(npc.hasBeenKissedToday.Value)); // gifted this week if (!friendship.IsSpouse && !friendship.IsHousemate) yield return new GenericField(I18n.Npc_GiftedThisWeek(), I18n.Generic_Ratio(value: friendship.GiftsThisWeek, max: NPC.maxGiftsPerWeek)); } else yield return new GenericField(I18n.Npc_Friendship(), I18n.Npc_Friendship_NotMet()); // gift tastes { IDictionary<GiftTaste, GiftTasteModel[]> giftTastes = this.GetGiftTastes(npc); IDictionary<string, bool> ownedItems = CharacterGiftTastesField.GetOwnedItemsCache(this.GameHelper); yield return this.GetGiftTasteField(I18n.Npc_LovesGifts(), giftTastes, ownedItems, GiftTaste.Love); yield return this.GetGiftTasteField(I18n.Npc_LikesGifts(), giftTastes, ownedItems, GiftTaste.Like); yield return this.GetGiftTasteField(I18n.Npc_NeutralGifts(), giftTastes, ownedItems, GiftTaste.Neutral); if (this.ProgressionMode || this.HighlightUnrevealedGiftTastes || this.ShowAllGiftTastes) { yield return this.GetGiftTasteField(I18n.Npc_DislikesGifts(), giftTastes, ownedItems, GiftTaste.Dislike); yield return this.GetGiftTasteField(I18n.Npc_HatesGifts(), giftTastes, ownedItems, GiftTaste.Hate); } } } } /// <summary>Get a list of gift tastes for an NPC.</summary> /// <param name="label">The field label.</param> /// <param name="giftTastes">The gift taste data.</param> /// <param name="ownedItemsCache">A lookup cache for owned items, as created by <see cref="CharacterGiftTastesField.GetOwnedItemsCache"/>.</param> /// <param name="taste">The gift taste to display.</param> private ICustomField GetGiftTasteField(string label, IDictionary<GiftTaste, GiftTasteModel[]> giftTastes, IDictionary<string, bool> ownedItemsCache, GiftTaste taste) { return new CharacterGiftTastesField(label, giftTastes, taste, onlyRevealed: this.ProgressionMode, highlightUnrevealed: this.HighlightUnrevealedGiftTastes, ownedItemsCache); } /***** ** Other ****/ /// <summary>Get the display type for a character.</summary> /// <param name="npc">The lookup target.</param> /// <param name="type">The NPC type.</param> private static string GetTypeName(Character npc, SubjectType type) { switch (type) { case SubjectType.Villager: return I18n.Type_Villager(); case SubjectType.Horse: return GameI18n.GetString("Strings\\StringsFromCSFiles:StrengthGame.cs.11665"); case SubjectType.Monster: return I18n.Type_Monster(); case SubjectType.Pet: { string typeName = GameI18n.GetString($"Strings\\StringsFromCSFiles:Event.cs.{(npc is Cat ? "1242" : "1243")}"); if (typeName?.Length > 1) typeName = char.ToUpperInvariant(typeName[0]) + typeName.Substring(1); return typeName; } default: return npc.GetType().Name; } } /// <summary>Get how much an NPC likes receiving each item as a gift.</summary> /// <param name="npc">The NPC.</param> private IDictionary<GiftTaste, GiftTasteModel[]> GetGiftTastes(NPC npc) { return this.GameHelper.GetGiftTastes(npc) .GroupBy(entry => entry.Taste) .ToDictionary( tasteGroup => tasteGroup.Key, tasteGroup => tasteGroup.ToArray() ); } /// <summary>Get the number of days until a child grows to the next stage.</summary> /// <param name="stage">The child's current growth stage.</param> /// <param name="daysOld">The child's current age in days.</param> /// <returns>Returns a number of days, or <c>-1</c> if the child won't grow any further.</returns> /// <remarks>Derived from <see cref="Child.dayUpdate"/>.</remarks> private int GetDaysToNextChildGrowth(ChildAge stage, int daysOld) { return stage switch { ChildAge.Newborn => 13 - daysOld, ChildAge.Baby => 27 - daysOld, ChildAge.Crawler => 55 - daysOld, _ => -1 }; } /// <summary>Get the last day when the given player petted the pet.</summary> /// <param name="pet">The pet to check.</param> /// <param name="playerID">The unique multiplayer ID for the player to check.</param> private int? GetLastDayPetted(Pet pet, long playerID) { return pet.lastPetDay.TryGetValue(playerID, out int lastDay) ? lastDay : null; } /// <summary>Get a monster's possible drops.</summary> /// <param name="monster">The monster whose drops to get.</param> private IEnumerable<ItemDropData> GetMonsterDrops(Monster monster) { // get possible drops ItemDropData[] possibleDrops = this.GameHelper.GetMonsterData().FirstOrDefault(p => p.Name == monster.Name)?.Drops; if (this.IsHauntedSkull) possibleDrops ??= this.GameHelper.GetMonsterData().FirstOrDefault(p => p.Name == "Lava Bat")?.Drops; // haunted skulls use lava bat data possibleDrops ??= Array.Empty<ItemDropData>(); // get actual drops IDictionary<int, List<ItemDropData>> dropsLeft = monster .objectsToDrop .Select(this.GetActualDrop) .GroupBy(p => p.ItemID) .ToDictionary(group => group.Key, group => group.ToList()); // return possible drops foreach (var drop in possibleDrops.OrderByDescending(p => p.Probability)) { bool isGuaranteed = dropsLeft.TryGetValue(drop.ItemID, out List<ItemDropData> actualDrops) && actualDrops.Any(); if (isGuaranteed) { ItemDropData[] matches = actualDrops.Where(p => p.MinDrop >= drop.MinDrop && p.MaxDrop <= drop.MaxDrop).ToArray(); ItemDropData bestMatch = matches.FirstOrDefault(p => p.MinDrop == drop.MinDrop && p.MaxDrop == drop.MaxDrop) ?? matches.FirstOrDefault(); actualDrops.Remove(bestMatch); } yield return new ItemDropData( itemID: drop.ItemID, minDrop: 1, maxDrop: drop.MaxDrop, probability: isGuaranteed ? 1 : drop.Probability ); } // special case: return guaranteed drops that weren't matched foreach (var pair in dropsLeft.Where(p => p.Value.Any())) { foreach (var drop in pair.Value) yield return drop; } } /// <summary>Get the drop info for a <see cref="Monster.objectsToDrop"/> ID, if it's valid.</summary> /// <param name="id">The ID to parse.</param> /// <remarks>Derived from <see cref="GameLocation.monsterDrop"/> and the <see cref="Debris"/> constructor.</remarks> private ItemDropData GetActualDrop(int id) { // basic info int minDrop = 1; int maxDrop = 1; // negative ID means the monster will drop 1-3 of the item if (id < 0) { id = -id; maxDrop = 3; } // handle hardcoded ID mappings in Debris constructor id = id switch { 0 => SObject.copper, 2 => SObject.iron, 4 => SObject.coal, 6 => SObject.gold, 10 => SObject.iridium, 12 => SObject.wood, 14 => SObject.stone, _ => id }; // build model return new ItemDropData(itemID: id, minDrop: minDrop, maxDrop: maxDrop, probability: 1); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.IO; using System.Text; using Xunit; [SkipOnTargetFramework(TargetFrameworkMonikers.UapAot, "Issue https://github.com/dotnet/corefx/issues/18220")] public class SyncTextReader { // NOTE: These tests test the underlying SyncTextReader by // accessing the Console.In TextReader (which in fact is a SyncTextReader). static readonly string[] s_testLines = new string[] { "3232 Hello32 Hello 5032 Hello 50 532 Hello 50 5 aTrueaabcdbc1.23123.4561.23439505050System.ObjectHello World", "32", "", "32 Hello", "", "32 Hello 50", "", "32 Hello 50 5", "", "32 Hello 50 5 a", "", "True", "a", "abcd", "bc", "1.23", "123.456", "1.234", "39", "50", "50", "50", "System.Object", "Hello World", }; private static void Test(string content, Action action) { TextWriter savedStandardOutput = Console.Out; TextReader savedStandardInput = Console.In; try { using (MemoryStream memStream = new MemoryStream()) { // Write the content, but leave the stream open. using (StreamWriter sw = new StreamWriter(memStream, Encoding.Unicode, 1024, true)) { sw.Write(content); sw.Flush(); } memStream.Seek(0, SeekOrigin.Begin); using (StreamReader sr = new StreamReader(memStream)) { Console.SetIn(sr); action(); } } } finally { TextWriter oldWriter = Console.Out; Console.SetOut(savedStandardOutput); oldWriter.Dispose(); TextReader oldReader = Console.In; Console.SetIn(savedStandardInput); oldReader.Dispose(); } } [Fact] public void ReadToEnd() { var expected = string.Join(Environment.NewLine, s_testLines); Test(expected, () => { // Given, When var result = Console.In.ReadToEnd(); // Then Assert.Equal(expected, result); Assert.Equal(-1, Console.Read()); // We should be at EOF now. }); } [Fact] public void ReadBlock() { var expected = new[] { 'H', 'e', 'l', 'l', 'o' }; Test(new string(expected), () => { // Given var buffer = new char[expected.Length]; // When var result = Console.In.ReadBlock(buffer, 0, 5); // Then Assert.Equal(5, result); Assert.Equal(expected, buffer); Assert.Equal(-1, Console.Read()); // We should be at EOF now. }); } [Fact] public void Read() { var expected = new[] { 'H', 'e', 'l', 'l', 'o' }; Test(new string(expected), () => { // Given var buffer = new char[expected.Length]; // When var result = Console.In.Read(buffer, 0, 5); // Then Assert.Equal(5, result); Assert.Equal(expected, buffer); Assert.Equal(-1, Console.Read()); // We should be at EOF now. }); } [Fact] public void Peek() { const string expected = "ABC"; Test(expected, () => { foreach (char expectedChar in expected) { Assert.Equal(expectedChar, Console.In.Peek()); Assert.Equal(expectedChar, Console.In.Read()); } }); } [Fact] public void ReadToEndAsync() { var expected = string.Join(Environment.NewLine, s_testLines); Test(expected, () => { // Given, When var result = Console.In.ReadToEndAsync().Result; // Then Assert.Equal(expected, result); Assert.Equal(-1, Console.Read()); // We should be at EOF now. }); } [Fact] public void ReadBlockAsync() { var expected = new[] { 'H', 'e', 'l', 'l', 'o' }; Test(new string(expected), () => { // Given var buffer = new char[expected.Length]; // When var result = Console.In.ReadBlockAsync(buffer, 0, 5).Result; // Then Assert.Equal(5, result); Assert.Equal(expected, buffer); Assert.Equal(-1, Console.Read()); // We should be at EOF now. // Invalid args Assert.Throws<ArgumentNullException>(() => { Console.In.ReadBlockAsync(null, 0, 0); }); Assert.Throws<ArgumentOutOfRangeException>(() => { Console.In.ReadBlockAsync(new char[1], -1, 0); }); Assert.Throws<ArgumentOutOfRangeException>(() => { Console.In.ReadBlockAsync(new char[1], 0, -1); }); Assert.Throws<ArgumentException>(() => { Console.In.ReadBlockAsync(new char[1], 1, 1); }); }); } [Fact] public void ReadAsync() { var expected = new[] { 'H', 'e', 'l', 'l', 'o' }; Test(new string(expected), () => { // Given var buffer = new char[expected.Length]; // When var result = Console.In.ReadAsync(buffer, 0, 5).Result; // Then Assert.Equal(5, result); Assert.Equal(expected, buffer); Assert.Equal(-1, Console.Read()); // We should be at EOF now. // Invalid args Assert.Throws<ArgumentNullException>(() => { Console.In.ReadAsync(null, 0, 0); }); Assert.Throws<ArgumentOutOfRangeException>(() => { Console.In.ReadAsync(new char[1], -1, 0); }); Assert.Throws<ArgumentOutOfRangeException>(() => { Console.In.ReadAsync(new char[1], 0, -1); }); Assert.Throws<ArgumentException>(() => { Console.In.ReadAsync(new char[1], 1, 1); }); }); } [Fact] public void ReadLineAsync() { var expected = string.Join(Environment.NewLine, s_testLines); Test(expected, () => { for (int i = 0; i < s_testLines.Length; i++) { // Given, When var result = Console.In.ReadLineAsync().Result; // Then Assert.Equal(s_testLines[i], result); } Assert.Equal(-1, Console.Read()); }); } }
using System; using System.Collections.Generic; using System.Threading; using ExtensionLoader; using OpenMetaverse; using OpenMetaverse.Packets; namespace Simian.Extensions { public class Movement : IExtension<Simian> { const int UPDATE_ITERATION = 100; //rate in milliseconds to send ObjectUpdate const bool ENVIRONMENT_SOUNDS = true; //collision sounds, splashing, etc const float GRAVITY = 9.8f; //meters/sec const float WALK_SPEED = 3f; //meters/sec const float RUN_SPEED = 5f; //meters/sec const float FLY_SPEED = 10f; //meters/sec const float FALL_DELAY = 0.33f; //seconds before starting animation const float FALL_FORGIVENESS = 0.25f; //fall buffer in meters const float JUMP_IMPULSE_VERTICAL = 8.5f; //boost amount in meters/sec const float JUMP_IMPULSE_HORIZONTAL = 10f; //boost amount in meters/sec (no clue why this is so high) const float INITIAL_HOVER_IMPULSE = 2f; //boost amount in meters/sec const float PREJUMP_DELAY = 0.25f; //seconds before actually jumping const float AVATAR_TERMINAL_VELOCITY = 54f; //~120mph static readonly UUID BIG_SPLASH_SOUND = new UUID("486475b9-1460-4969-871e-fad973b38015"); const float SQRT_TWO = 1.41421356f; Simian server; Timer updateTimer; long lastTick; public int LastTick { get { return (int) Interlocked.Read(ref lastTick); } set { Interlocked.Exchange(ref lastTick, value); } } public Movement() { } public void Start(Simian server) { this.server = server; server.UDP.RegisterPacketCallback(PacketType.AgentUpdate, new PacketCallback(AgentUpdateHandler)); server.UDP.RegisterPacketCallback(PacketType.AgentHeightWidth, new PacketCallback(AgentHeightWidthHandler)); server.UDP.RegisterPacketCallback(PacketType.SetAlwaysRun, new PacketCallback(SetAlwaysRunHandler)); updateTimer = new Timer(new TimerCallback(UpdateTimer_Elapsed)); LastTick = Environment.TickCount; updateTimer.Change(UPDATE_ITERATION, UPDATE_ITERATION); } public void Stop() { updateTimer.Dispose(); } void UpdateTimer_Elapsed(object sender) { int tick = Environment.TickCount; float seconds = (float)((tick - LastTick) / 1000f); LastTick = tick; lock (server.Agents) { foreach (Agent agent in server.Agents.Values) { bool animsChanged = false; // Create forward and left vectors from the current avatar rotation Matrix4 rotMatrix = Matrix4.CreateFromQuaternion(agent.Avatar.Rotation); Vector3 fwd = Vector3.Transform(Vector3.UnitX, rotMatrix); Vector3 left = Vector3.Transform(Vector3.UnitY, rotMatrix); // Check control flags bool heldForward = (agent.ControlFlags & AgentManager.ControlFlags.AGENT_CONTROL_AT_POS) == AgentManager.ControlFlags.AGENT_CONTROL_AT_POS; bool heldBack = (agent.ControlFlags & AgentManager.ControlFlags.AGENT_CONTROL_AT_NEG) == AgentManager.ControlFlags.AGENT_CONTROL_AT_NEG; bool heldLeft = (agent.ControlFlags & AgentManager.ControlFlags.AGENT_CONTROL_LEFT_POS) == AgentManager.ControlFlags.AGENT_CONTROL_LEFT_POS; bool heldRight = (agent.ControlFlags & AgentManager.ControlFlags.AGENT_CONTROL_LEFT_NEG) == AgentManager.ControlFlags.AGENT_CONTROL_LEFT_NEG; bool heldTurnLeft = (agent.ControlFlags & AgentManager.ControlFlags.AGENT_CONTROL_TURN_LEFT) == AgentManager.ControlFlags.AGENT_CONTROL_TURN_LEFT; bool heldTurnRight = (agent.ControlFlags & AgentManager.ControlFlags.AGENT_CONTROL_TURN_RIGHT) == AgentManager.ControlFlags.AGENT_CONTROL_TURN_RIGHT; bool heldUp = (agent.ControlFlags & AgentManager.ControlFlags.AGENT_CONTROL_UP_POS) == AgentManager.ControlFlags.AGENT_CONTROL_UP_POS; bool heldDown = (agent.ControlFlags & AgentManager.ControlFlags.AGENT_CONTROL_UP_NEG) == AgentManager.ControlFlags.AGENT_CONTROL_UP_NEG; bool flying = (agent.ControlFlags & AgentManager.ControlFlags.AGENT_CONTROL_FLY) == AgentManager.ControlFlags.AGENT_CONTROL_FLY; bool mouselook = (agent.ControlFlags & AgentManager.ControlFlags.AGENT_CONTROL_MOUSELOOK) == AgentManager.ControlFlags.AGENT_CONTROL_MOUSELOOK; // direction in which the avatar is trying to move Vector3 move = Vector3.Zero; if (heldForward) { move.X += fwd.X; move.Y += fwd.Y; } if (heldBack) { move.X -= fwd.X; move.Y -= fwd.Y; } if (heldLeft) { move.X += left.X; move.Y += left.Y; } if (heldRight) { move.X -= left.X; move.Y -= left.Y; } if (heldUp) { move.Z += 1; } if (heldDown) { move.Z -= 1; } // is the avatar trying to move? bool moving = move != Vector3.Zero; bool jumping = agent.TickJump != 0; // 2-dimensional speed multipler float speed = seconds * (flying ? FLY_SPEED : agent.Running && !jumping ? RUN_SPEED : WALK_SPEED); if ((heldForward || heldBack) && (heldLeft || heldRight)) speed /= SQRT_TWO; // adjust multiplier for Z dimension float oldFloor = GetLandHeightAt(agent.Avatar.Position); float newFloor = GetLandHeightAt(agent.Avatar.Position + (move * speed)); if (!flying && newFloor != oldFloor) speed /= (1 + (SQRT_TWO * Math.Abs(newFloor - oldFloor))); // least possible distance from avatar to the ground // TODO: calculate to get rid of "bot squat" float lowerLimit = newFloor + agent.Avatar.Scale.Z / 2; // Z acceleration resulting from gravity float gravity = 0f; float waterChestHeight = server.Scene.WaterHeight - (agent.Avatar.Scale.Z * .33f); if (flying) { agent.TickFall = 0; agent.TickJump = 0; //velocity falloff while flying agent.Avatar.Velocity.X *= 0.66f; agent.Avatar.Velocity.Y *= 0.66f; agent.Avatar.Velocity.Z *= 0.33f; if (agent.Avatar.Position.Z == lowerLimit) agent.Avatar.Velocity.Z += INITIAL_HOVER_IMPULSE; if (move.X != 0 || move.Y != 0) { //flying horizontally if (server.Avatars.SetDefaultAnimation(agent, Animations.FLY)) animsChanged = true; } else if (move.Z > 0) { //flying straight up if (server.Avatars.SetDefaultAnimation(agent, Animations.HOVER_UP)) animsChanged = true; } else if (move.Z < 0) { //flying straight down if (server.Avatars.SetDefaultAnimation(agent, Animations.HOVER_DOWN)) animsChanged = true; } else { //hovering in the air if (server.Avatars.SetDefaultAnimation(agent, Animations.HOVER)) animsChanged = true; } } else if (agent.Avatar.Position.Z > lowerLimit + FALL_FORGIVENESS || agent.Avatar.Position.Z <= waterChestHeight) { //falling, floating, or landing from a jump if (agent.Avatar.Position.Z > server.Scene.WaterHeight) { //above water move = Vector3.Zero; //override controls while drifting agent.Avatar.Velocity *= 0.95f; //keep most of our inertia float fallElapsed = (float)(Environment.TickCount - agent.TickFall) / 1000f; if (agent.TickFall == 0 || (fallElapsed > FALL_DELAY && agent.Avatar.Velocity.Z >= 0f)) { //just started falling agent.TickFall = Environment.TickCount; } else { gravity = GRAVITY * fallElapsed * seconds; //normal gravity if (!jumping) { //falling if (fallElapsed > FALL_DELAY) { //falling long enough to trigger the animation if (server.Avatars.SetDefaultAnimation(agent, Animations.FALLDOWN)) animsChanged = true; } } } } else if (agent.Avatar.Position.Z >= waterChestHeight) { //at the water line gravity = 0f; agent.Avatar.Velocity *= 0.5f; agent.Avatar.Velocity.Z = 0f; if (move.Z < 1) agent.Avatar.Position.Z = waterChestHeight; if (move.Z > 0) { if (server.Avatars.SetDefaultAnimation(agent, Animations.HOVER_UP)) animsChanged = true; } else if (move.X != 0 || move.Y != 0) { if (server.Avatars.SetDefaultAnimation(agent, Animations.FLYSLOW)) animsChanged = true; } else { if (server.Avatars.SetDefaultAnimation(agent, Animations.HOVER)) animsChanged = true; } } else { //underwater gravity = 0f; //buoyant agent.Avatar.Velocity *= 0.5f * seconds; agent.Avatar.Velocity.Z += 0.75f * seconds; if (server.Avatars.SetDefaultAnimation(agent, Animations.FALLDOWN)) animsChanged = true; } } else { //on the ground agent.TickFall = 0; //friction agent.Avatar.Acceleration *= 0.2f; agent.Avatar.Velocity *= 0.2f; agent.Avatar.Position.Z = lowerLimit; if (move.Z > 0) { //jumping if (!jumping) { //begin prejump move.Z = 0; //override Z control if (server.Avatars.SetDefaultAnimation(agent, Animations.PRE_JUMP)) animsChanged = true; agent.TickJump = Environment.TickCount; } else if (Environment.TickCount - agent.TickJump > PREJUMP_DELAY * 1000) { //start actual jump if (agent.TickJump == -1) { //already jumping! end current jump agent.TickJump = 0; return; } if (server.Avatars.SetDefaultAnimation(agent, Animations.JUMP)) animsChanged = true; agent.Avatar.Velocity.X += agent.Avatar.Acceleration.X * JUMP_IMPULSE_HORIZONTAL; agent.Avatar.Velocity.Y += agent.Avatar.Acceleration.Y * JUMP_IMPULSE_HORIZONTAL; agent.Avatar.Velocity.Z = JUMP_IMPULSE_VERTICAL * seconds; agent.TickJump = -1; //flag that we are currently jumping } else move.Z = 0; //override Z control } else { //not jumping agent.TickJump = 0; if (move.X != 0 || move.Y != 0) { //not walking if (move.Z < 0) { //crouchwalking if (server.Avatars.SetDefaultAnimation(agent, Animations.CROUCHWALK)) animsChanged = true; } else if (agent.Running) { //running if (server.Avatars.SetDefaultAnimation(agent, Animations.RUN)) animsChanged = true; } else { //walking if (server.Avatars.SetDefaultAnimation(agent, Animations.WALK)) animsChanged = true; } } else { //walking if (move.Z < 0) { //crouching if (server.Avatars.SetDefaultAnimation(agent, Animations.CROUCH)) animsChanged = true; } else { //standing if (server.Avatars.SetDefaultAnimation(agent, Animations.STAND)) animsChanged = true; } } } } if (animsChanged) server.Avatars.SendAnimations(agent); float maxVel = AVATAR_TERMINAL_VELOCITY * seconds; // static acceleration when any control is held, otherwise none if (moving) { agent.Avatar.Acceleration = move * speed; if (agent.Avatar.Acceleration.Z < -maxVel) agent.Avatar.Acceleration.Z = -maxVel; else if (agent.Avatar.Acceleration.Z > maxVel) agent.Avatar.Acceleration.Z = maxVel; } else agent.Avatar.Acceleration = Vector3.Zero; agent.Avatar.Velocity += agent.Avatar.Acceleration - new Vector3(0f, 0f, gravity); if (agent.Avatar.Velocity.Z < -maxVel) agent.Avatar.Velocity.Z = -maxVel; else if (agent.Avatar.Velocity.Z > maxVel) agent.Avatar.Velocity.Z = maxVel; agent.Avatar.Position += agent.Avatar.Velocity; if (agent.Avatar.Position.X < 0) agent.Avatar.Position.X = 0f; else if (agent.Avatar.Position.X > 255) agent.Avatar.Position.X = 255f; if (agent.Avatar.Position.Y < 0) agent.Avatar.Position.Y = 0f; else if (agent.Avatar.Position.Y > 255) agent.Avatar.Position.Y = 255f; if (agent.Avatar.Position.Z < lowerLimit) agent.Avatar.Position.Z = lowerLimit; } } } void AgentUpdateHandler(Packet packet, Agent agent) { AgentUpdatePacket update = (AgentUpdatePacket)packet; agent.Avatar.Rotation = update.AgentData.BodyRotation; agent.ControlFlags = (AgentManager.ControlFlags)update.AgentData.ControlFlags; agent.State = update.AgentData.State; agent.Flags = (PrimFlags)update.AgentData.Flags; ObjectUpdatePacket fullUpdate = SimulationObject.BuildFullUpdate(agent.Avatar, server.RegionHandle, agent.State, agent.Flags); server.UDP.BroadcastPacket(fullUpdate, PacketCategory.State); } void SetAlwaysRunHandler(Packet packet, Agent agent) { SetAlwaysRunPacket run = (SetAlwaysRunPacket)packet; agent.Running = run.AgentData.AlwaysRun; } float GetLandHeightAt(Vector3 position) { int x = (int)position.X; int y = (int)position.Y; if (x > 255) x = 255; else if (x < 0) x = 0; if (y > 255) y = 255; else if (y < 0) y = 0; float center = server.Scene.Heightmap[y * 256 + x]; float distX = position.X - (int)position.X; float distY = position.Y - (int)position.Y; float nearestX; float nearestY; if (distX > 0) nearestX = server.Scene.Heightmap[y * 256 + x + (x < 255 ? 1 : 0)]; else nearestX = server.Scene.Heightmap[y * 256 + x - (x > 0 ? 1 : 0)]; if (distY > 0) nearestY = server.Scene.Heightmap[(y + (y < 255 ? 1 : 0)) * 256 + x]; else nearestY = server.Scene.Heightmap[(y - (y > 0 ? 1 : 0)) * 256 + x]; float lerpX = Utils.Lerp(center, nearestX, Math.Abs(distX)); float lerpY = Utils.Lerp(center, nearestY, Math.Abs(distY)); return ((lerpX + lerpY) / 2); } void AgentHeightWidthHandler(Packet packet, Agent agent) { AgentHeightWidthPacket heightWidth = (AgentHeightWidthPacket)packet; // TODO: These are the screen size dimensions. Useful when we start doing frustum culling //Logger.Log(String.Format("Agent wants to set height={0}, width={1}", // heightWidth.HeightWidthBlock.Height, heightWidth.HeightWidthBlock.Width), Helpers.LogLevel.Info); } } }
// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! using gax = Google.Api.Gax; using gaxgrpc = Google.Api.Gax.Grpc; using gaxgrpccore = Google.Api.Gax.Grpc.GrpcCore; using proto = Google.Protobuf; using grpccore = Grpc.Core; using grpcinter = Grpc.Core.Interceptors; using sys = System; using scg = System.Collections.Generic; using sco = System.Collections.ObjectModel; using st = System.Threading; using stt = System.Threading.Tasks; namespace Google.Ads.GoogleAds.V10.Services { /// <summary>Settings for <see cref="CustomerUserAccessInvitationServiceClient"/> instances.</summary> public sealed partial class CustomerUserAccessInvitationServiceSettings : gaxgrpc::ServiceSettingsBase { /// <summary> /// Get a new instance of the default <see cref="CustomerUserAccessInvitationServiceSettings"/>. /// </summary> /// <returns>A new instance of the default <see cref="CustomerUserAccessInvitationServiceSettings"/>.</returns> public static CustomerUserAccessInvitationServiceSettings GetDefault() => new CustomerUserAccessInvitationServiceSettings(); /// <summary> /// Constructs a new <see cref="CustomerUserAccessInvitationServiceSettings"/> object with default settings. /// </summary> public CustomerUserAccessInvitationServiceSettings() { } private CustomerUserAccessInvitationServiceSettings(CustomerUserAccessInvitationServiceSettings existing) : base(existing) { gax::GaxPreconditions.CheckNotNull(existing, nameof(existing)); MutateCustomerUserAccessInvitationSettings = existing.MutateCustomerUserAccessInvitationSettings; OnCopy(existing); } partial void OnCopy(CustomerUserAccessInvitationServiceSettings existing); /// <summary> /// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to /// <c>CustomerUserAccessInvitationServiceClient.MutateCustomerUserAccessInvitation</c> and /// <c>CustomerUserAccessInvitationServiceClient.MutateCustomerUserAccessInvitationAsync</c>. /// </summary> /// <remarks> /// <list type="bullet"> /// <item><description>Initial retry delay: 5000 milliseconds.</description></item> /// <item><description>Retry delay multiplier: 1.3</description></item> /// <item><description>Retry maximum delay: 60000 milliseconds.</description></item> /// <item><description>Maximum attempts: Unlimited</description></item> /// <item> /// <description> /// Retriable status codes: <see cref="grpccore::StatusCode.Unavailable"/>, /// <see cref="grpccore::StatusCode.DeadlineExceeded"/>. /// </description> /// </item> /// <item><description>Timeout: 3600 seconds.</description></item> /// </list> /// </remarks> public gaxgrpc::CallSettings MutateCustomerUserAccessInvitationSettings { get; set; } = gaxgrpc::CallSettingsExtensions.WithRetry(gaxgrpc::CallSettings.FromExpiration(gax::Expiration.FromTimeout(sys::TimeSpan.FromMilliseconds(3600000))), gaxgrpc::RetrySettings.FromExponentialBackoff(maxAttempts: 2147483647, initialBackoff: sys::TimeSpan.FromMilliseconds(5000), maxBackoff: sys::TimeSpan.FromMilliseconds(60000), backoffMultiplier: 1.3, retryFilter: gaxgrpc::RetrySettings.FilterForStatusCodes(grpccore::StatusCode.Unavailable, grpccore::StatusCode.DeadlineExceeded))); /// <summary>Creates a deep clone of this object, with all the same property values.</summary> /// <returns>A deep clone of this <see cref="CustomerUserAccessInvitationServiceSettings"/> object.</returns> public CustomerUserAccessInvitationServiceSettings Clone() => new CustomerUserAccessInvitationServiceSettings(this); } /// <summary> /// Builder class for <see cref="CustomerUserAccessInvitationServiceClient"/> to provide simple configuration of /// credentials, endpoint etc. /// </summary> internal sealed partial class CustomerUserAccessInvitationServiceClientBuilder : gaxgrpc::ClientBuilderBase<CustomerUserAccessInvitationServiceClient> { /// <summary>The settings to use for RPCs, or <c>null</c> for the default settings.</summary> public CustomerUserAccessInvitationServiceSettings Settings { get; set; } /// <summary>Creates a new builder with default settings.</summary> public CustomerUserAccessInvitationServiceClientBuilder() { UseJwtAccessWithScopes = CustomerUserAccessInvitationServiceClient.UseJwtAccessWithScopes; } partial void InterceptBuild(ref CustomerUserAccessInvitationServiceClient client); partial void InterceptBuildAsync(st::CancellationToken cancellationToken, ref stt::Task<CustomerUserAccessInvitationServiceClient> task); /// <summary>Builds the resulting client.</summary> public override CustomerUserAccessInvitationServiceClient Build() { CustomerUserAccessInvitationServiceClient client = null; InterceptBuild(ref client); return client ?? BuildImpl(); } /// <summary>Builds the resulting client asynchronously.</summary> public override stt::Task<CustomerUserAccessInvitationServiceClient> BuildAsync(st::CancellationToken cancellationToken = default) { stt::Task<CustomerUserAccessInvitationServiceClient> task = null; InterceptBuildAsync(cancellationToken, ref task); return task ?? BuildAsyncImpl(cancellationToken); } private CustomerUserAccessInvitationServiceClient BuildImpl() { Validate(); grpccore::CallInvoker callInvoker = CreateCallInvoker(); return CustomerUserAccessInvitationServiceClient.Create(callInvoker, Settings); } private async stt::Task<CustomerUserAccessInvitationServiceClient> BuildAsyncImpl(st::CancellationToken cancellationToken) { Validate(); grpccore::CallInvoker callInvoker = await CreateCallInvokerAsync(cancellationToken).ConfigureAwait(false); return CustomerUserAccessInvitationServiceClient.Create(callInvoker, Settings); } /// <summary>Returns the endpoint for this builder type, used if no endpoint is otherwise specified.</summary> protected override string GetDefaultEndpoint() => CustomerUserAccessInvitationServiceClient.DefaultEndpoint; /// <summary> /// Returns the default scopes for this builder type, used if no scopes are otherwise specified. /// </summary> protected override scg::IReadOnlyList<string> GetDefaultScopes() => CustomerUserAccessInvitationServiceClient.DefaultScopes; /// <summary>Returns the channel pool to use when no other options are specified.</summary> protected override gaxgrpc::ChannelPool GetChannelPool() => CustomerUserAccessInvitationServiceClient.ChannelPool; /// <summary>Returns the default <see cref="gaxgrpc::GrpcAdapter"/>to use if not otherwise specified.</summary> protected override gaxgrpc::GrpcAdapter DefaultGrpcAdapter => gaxgrpccore::GrpcCoreAdapter.Instance; } /// <summary>CustomerUserAccessInvitationService client wrapper, for convenient use.</summary> /// <remarks> /// This service manages the access invitation extended to users for a given /// customer. /// </remarks> public abstract partial class CustomerUserAccessInvitationServiceClient { /// <summary> /// The default endpoint for the CustomerUserAccessInvitationService service, which is a host of /// "googleads.googleapis.com" and a port of 443. /// </summary> public static string DefaultEndpoint { get; } = "googleads.googleapis.com:443"; /// <summary>The default CustomerUserAccessInvitationService scopes.</summary> /// <remarks> /// The default CustomerUserAccessInvitationService scopes are: /// <list type="bullet"><item><description>https://www.googleapis.com/auth/adwords</description></item></list> /// </remarks> public static scg::IReadOnlyList<string> DefaultScopes { get; } = new sco::ReadOnlyCollection<string>(new string[] { "https://www.googleapis.com/auth/adwords", }); internal static gaxgrpc::ChannelPool ChannelPool { get; } = new gaxgrpc::ChannelPool(DefaultScopes, UseJwtAccessWithScopes); internal static bool UseJwtAccessWithScopes { get { bool useJwtAccessWithScopes = true; MaybeUseJwtAccessWithScopes(ref useJwtAccessWithScopes); return useJwtAccessWithScopes; } } static partial void MaybeUseJwtAccessWithScopes(ref bool useJwtAccessWithScopes); /// <summary> /// Asynchronously creates a <see cref="CustomerUserAccessInvitationServiceClient"/> using the default /// credentials, endpoint and settings. To specify custom credentials or other settings, use /// <see cref="CustomerUserAccessInvitationServiceClientBuilder"/>. /// </summary> /// <param name="cancellationToken"> /// The <see cref="st::CancellationToken"/> to use while creating the client. /// </param> /// <returns> /// The task representing the created <see cref="CustomerUserAccessInvitationServiceClient"/>. /// </returns> public static stt::Task<CustomerUserAccessInvitationServiceClient> CreateAsync(st::CancellationToken cancellationToken = default) => new CustomerUserAccessInvitationServiceClientBuilder().BuildAsync(cancellationToken); /// <summary> /// Synchronously creates a <see cref="CustomerUserAccessInvitationServiceClient"/> using the default /// credentials, endpoint and settings. To specify custom credentials or other settings, use /// <see cref="CustomerUserAccessInvitationServiceClientBuilder"/>. /// </summary> /// <returns>The created <see cref="CustomerUserAccessInvitationServiceClient"/>.</returns> public static CustomerUserAccessInvitationServiceClient Create() => new CustomerUserAccessInvitationServiceClientBuilder().Build(); /// <summary> /// Creates a <see cref="CustomerUserAccessInvitationServiceClient"/> which uses the specified call invoker for /// remote operations. /// </summary> /// <param name="callInvoker"> /// The <see cref="grpccore::CallInvoker"/> for remote operations. Must not be null. /// </param> /// <param name="settings">Optional <see cref="CustomerUserAccessInvitationServiceSettings"/>.</param> /// <returns>The created <see cref="CustomerUserAccessInvitationServiceClient"/>.</returns> internal static CustomerUserAccessInvitationServiceClient Create(grpccore::CallInvoker callInvoker, CustomerUserAccessInvitationServiceSettings settings = null) { gax::GaxPreconditions.CheckNotNull(callInvoker, nameof(callInvoker)); grpcinter::Interceptor interceptor = settings?.Interceptor; if (interceptor != null) { callInvoker = grpcinter::CallInvokerExtensions.Intercept(callInvoker, interceptor); } CustomerUserAccessInvitationService.CustomerUserAccessInvitationServiceClient grpcClient = new CustomerUserAccessInvitationService.CustomerUserAccessInvitationServiceClient(callInvoker); return new CustomerUserAccessInvitationServiceClientImpl(grpcClient, settings); } /// <summary> /// Shuts down any channels automatically created by <see cref="Create()"/> and /// <see cref="CreateAsync(st::CancellationToken)"/>. Channels which weren't automatically created are not /// affected. /// </summary> /// <remarks> /// After calling this method, further calls to <see cref="Create()"/> and /// <see cref="CreateAsync(st::CancellationToken)"/> will create new channels, which could in turn be shut down /// by another call to this method. /// </remarks> /// <returns>A task representing the asynchronous shutdown operation.</returns> public static stt::Task ShutdownDefaultChannelsAsync() => ChannelPool.ShutdownChannelsAsync(); /// <summary>The underlying gRPC CustomerUserAccessInvitationService client</summary> public virtual CustomerUserAccessInvitationService.CustomerUserAccessInvitationServiceClient GrpcClient => throw new sys::NotImplementedException(); /// <summary> /// Creates or removes an access invitation. /// /// List of thrown errors: /// [AccessInvitationError]() /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual MutateCustomerUserAccessInvitationResponse MutateCustomerUserAccessInvitation(MutateCustomerUserAccessInvitationRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// Creates or removes an access invitation. /// /// List of thrown errors: /// [AccessInvitationError]() /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<MutateCustomerUserAccessInvitationResponse> MutateCustomerUserAccessInvitationAsync(MutateCustomerUserAccessInvitationRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// Creates or removes an access invitation. /// /// List of thrown errors: /// [AccessInvitationError]() /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<MutateCustomerUserAccessInvitationResponse> MutateCustomerUserAccessInvitationAsync(MutateCustomerUserAccessInvitationRequest request, st::CancellationToken cancellationToken) => MutateCustomerUserAccessInvitationAsync(request, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); /// <summary> /// Creates or removes an access invitation. /// /// List of thrown errors: /// [AccessInvitationError]() /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="customerId"> /// Required. The ID of the customer whose access invitation is being modified. /// </param> /// <param name="operation"> /// Required. The operation to perform on the access invitation /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual MutateCustomerUserAccessInvitationResponse MutateCustomerUserAccessInvitation(string customerId, CustomerUserAccessInvitationOperation operation, gaxgrpc::CallSettings callSettings = null) => MutateCustomerUserAccessInvitation(new MutateCustomerUserAccessInvitationRequest { CustomerId = gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), Operation = gax::GaxPreconditions.CheckNotNull(operation, nameof(operation)), }, callSettings); /// <summary> /// Creates or removes an access invitation. /// /// List of thrown errors: /// [AccessInvitationError]() /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="customerId"> /// Required. The ID of the customer whose access invitation is being modified. /// </param> /// <param name="operation"> /// Required. The operation to perform on the access invitation /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<MutateCustomerUserAccessInvitationResponse> MutateCustomerUserAccessInvitationAsync(string customerId, CustomerUserAccessInvitationOperation operation, gaxgrpc::CallSettings callSettings = null) => MutateCustomerUserAccessInvitationAsync(new MutateCustomerUserAccessInvitationRequest { CustomerId = gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), Operation = gax::GaxPreconditions.CheckNotNull(operation, nameof(operation)), }, callSettings); /// <summary> /// Creates or removes an access invitation. /// /// List of thrown errors: /// [AccessInvitationError]() /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="customerId"> /// Required. The ID of the customer whose access invitation is being modified. /// </param> /// <param name="operation"> /// Required. The operation to perform on the access invitation /// </param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<MutateCustomerUserAccessInvitationResponse> MutateCustomerUserAccessInvitationAsync(string customerId, CustomerUserAccessInvitationOperation operation, st::CancellationToken cancellationToken) => MutateCustomerUserAccessInvitationAsync(customerId, operation, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); } /// <summary>CustomerUserAccessInvitationService client wrapper implementation, for convenient use.</summary> /// <remarks> /// This service manages the access invitation extended to users for a given /// customer. /// </remarks> public sealed partial class CustomerUserAccessInvitationServiceClientImpl : CustomerUserAccessInvitationServiceClient { private readonly gaxgrpc::ApiCall<MutateCustomerUserAccessInvitationRequest, MutateCustomerUserAccessInvitationResponse> _callMutateCustomerUserAccessInvitation; /// <summary> /// Constructs a client wrapper for the CustomerUserAccessInvitationService service, with the specified gRPC /// client and settings. /// </summary> /// <param name="grpcClient">The underlying gRPC client.</param> /// <param name="settings"> /// The base <see cref="CustomerUserAccessInvitationServiceSettings"/> used within this client. /// </param> public CustomerUserAccessInvitationServiceClientImpl(CustomerUserAccessInvitationService.CustomerUserAccessInvitationServiceClient grpcClient, CustomerUserAccessInvitationServiceSettings settings) { GrpcClient = grpcClient; CustomerUserAccessInvitationServiceSettings effectiveSettings = settings ?? CustomerUserAccessInvitationServiceSettings.GetDefault(); gaxgrpc::ClientHelper clientHelper = new gaxgrpc::ClientHelper(effectiveSettings); _callMutateCustomerUserAccessInvitation = clientHelper.BuildApiCall<MutateCustomerUserAccessInvitationRequest, MutateCustomerUserAccessInvitationResponse>(grpcClient.MutateCustomerUserAccessInvitationAsync, grpcClient.MutateCustomerUserAccessInvitation, effectiveSettings.MutateCustomerUserAccessInvitationSettings).WithGoogleRequestParam("customer_id", request => request.CustomerId); Modify_ApiCall(ref _callMutateCustomerUserAccessInvitation); Modify_MutateCustomerUserAccessInvitationApiCall(ref _callMutateCustomerUserAccessInvitation); OnConstruction(grpcClient, effectiveSettings, clientHelper); } partial void Modify_ApiCall<TRequest, TResponse>(ref gaxgrpc::ApiCall<TRequest, TResponse> call) where TRequest : class, proto::IMessage<TRequest> where TResponse : class, proto::IMessage<TResponse>; partial void Modify_MutateCustomerUserAccessInvitationApiCall(ref gaxgrpc::ApiCall<MutateCustomerUserAccessInvitationRequest, MutateCustomerUserAccessInvitationResponse> call); partial void OnConstruction(CustomerUserAccessInvitationService.CustomerUserAccessInvitationServiceClient grpcClient, CustomerUserAccessInvitationServiceSettings effectiveSettings, gaxgrpc::ClientHelper clientHelper); /// <summary>The underlying gRPC CustomerUserAccessInvitationService client</summary> public override CustomerUserAccessInvitationService.CustomerUserAccessInvitationServiceClient GrpcClient { get; } partial void Modify_MutateCustomerUserAccessInvitationRequest(ref MutateCustomerUserAccessInvitationRequest request, ref gaxgrpc::CallSettings settings); /// <summary> /// Creates or removes an access invitation. /// /// List of thrown errors: /// [AccessInvitationError]() /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public override MutateCustomerUserAccessInvitationResponse MutateCustomerUserAccessInvitation(MutateCustomerUserAccessInvitationRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_MutateCustomerUserAccessInvitationRequest(ref request, ref callSettings); return _callMutateCustomerUserAccessInvitation.Sync(request, callSettings); } /// <summary> /// Creates or removes an access invitation. /// /// List of thrown errors: /// [AccessInvitationError]() /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public override stt::Task<MutateCustomerUserAccessInvitationResponse> MutateCustomerUserAccessInvitationAsync(MutateCustomerUserAccessInvitationRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_MutateCustomerUserAccessInvitationRequest(ref request, ref callSettings); return _callMutateCustomerUserAccessInvitation.Async(request, callSettings); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections.Generic; // Used only for WRITE_LOCK_NAME in deprecated create=true case: using System.Linq; using System.Threading; using Lucene.Net.Support; namespace Lucene.Net.Store { using System.IO; using Constants = Lucene.Net.Util.Constants; using IOUtils = Lucene.Net.Util.IOUtils; /// <summary> /// Base class for Directory implementations that store index /// files in the file system. /// <a name="subclasses"/> /// There are currently three core /// subclasses: /// /// <ul> /// /// <li> <seealso cref="SimpleFSDirectory"/> is a straightforward /// implementation using java.io.RandomAccessFile. /// However, it has poor concurrent performance /// (multiple threads will bottleneck) as it /// synchronizes when multiple threads read from the /// same file. /// /// <li> <seealso cref="NIOFSDirectory"/> uses java.nio's /// FileChannel's positional io when reading to avoid /// synchronization when reading from the same file. /// Unfortunately, due to a Windows-only <a /// href="http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6265734">Sun /// JRE bug</a> this is a poor choice for Windows, but /// on all other platforms this is the preferred /// choice. Applications using <seealso cref="Thread#interrupt()"/> or /// <seealso cref="Future#cancel(boolean)"/> should use /// <seealso cref="SimpleFSDirectory"/> instead. See <seealso cref="NIOFSDirectory"/> java doc /// for details. /// /// /// /// <li> <seealso cref="MMapDirectory"/> uses memory-mapped IO when /// reading. this is a good choice if you have plenty /// of virtual memory relative to your index size, eg /// if you are running on a 64 bit JRE, or you are /// running on a 32 bit JRE but your index sizes are /// small enough to fit into the virtual memory space. /// Java has currently the limitation of not being able to /// unmap files from user code. The files are unmapped, when GC /// releases the byte buffers. Due to /// <a href="http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4724038"> /// this bug</a> in Sun's JRE, MMapDirectory's <seealso cref="IndexInput#close"/> /// is unable to close the underlying OS file handle. Only when /// GC finally collects the underlying objects, which could be /// quite some time later, will the file handle be closed. /// this will consume additional transient disk usage: on Windows, /// attempts to delete or overwrite the files will result in an /// exception; on other platforms, which typically have a &quot;delete on /// last close&quot; semantics, while such operations will succeed, the bytes /// are still consuming space on disk. For many applications this /// limitation is not a problem (e.g. if you have plenty of disk space, /// and you don't rely on overwriting files on Windows) but it's still /// an important limitation to be aware of. this class supplies a /// (possibly dangerous) workaround mentioned in the bug report, /// which may fail on non-Sun JVMs. /// /// Applications using <seealso cref="Thread#interrupt()"/> or /// <seealso cref="Future#cancel(boolean)"/> should use /// <seealso cref="SimpleFSDirectory"/> instead. See <seealso cref="MMapDirectory"/> /// java doc for details. /// </ul> /// /// Unfortunately, because of system peculiarities, there is /// no single overall best implementation. Therefore, we've /// added the <seealso cref="#open"/> method, to allow Lucene to choose /// the best FSDirectory implementation given your /// environment, and the known limitations of each /// implementation. For users who have no reason to prefer a /// specific implementation, it's best to simply use {@link /// #open}. For all others, you should instantiate the /// desired implementation directly. /// /// <p>The locking implementation is by default {@link /// NativeFSLockFactory}, but can be changed by /// passing in a custom <seealso cref="LockFactory"/> instance. /// </summary> /// <seealso cref= Directory </seealso> public abstract class FSDirectory : BaseDirectory { /// <summary> /// Default read chunk size: 8192 bytes (this is the size up to which the JDK /// does not allocate additional arrays while reading/writing) </summary> /// @deprecated this constant is no longer used since Lucene 4.5. [Obsolete("this constant is no longer used since Lucene 4.5.")] public const int DEFAULT_READ_CHUNK_SIZE = 8192; protected internal readonly DirectoryInfo directory; // The underlying filesystem directory protected internal readonly ISet<string> StaleFiles = new HashSet<string>(); // Files written, but not yet sync'ed private int ChunkSize = DEFAULT_READ_CHUNK_SIZE; protected FSDirectory(DirectoryInfo dir) : this(dir, null) { } /// <summary> /// Create a new FSDirectory for the named location (ctor for subclasses). </summary> /// <param name="path"> the path of the directory </param> /// <param name="lockFactory"> the lock factory to use, or null for the default /// (<seealso cref="NativeFSLockFactory"/>); </param> /// <exception cref="System.IO.IOException"> if there is a low-level I/O error </exception> protected internal FSDirectory(DirectoryInfo path, LockFactory lockFactory) { // new ctors use always NativeFSLockFactory as default: if (lockFactory == null) { lockFactory = new NativeFSLockFactory(); } directory = path; // Lucene.NET doesn't need to call GetCanonicalPath since we already have DirectoryInfo handy if (File.Exists(path.FullName)) { throw new NoSuchDirectoryException("file '" + path.FullName + "' exists but is not a directory"); //should be NoSuchDirectoryException } LockFactory = lockFactory; } /// <summary> /// Creates an FSDirectory instance, trying to pick the /// best implementation given the current environment. /// The directory returned uses the <seealso cref="NativeFSLockFactory"/>. /// /// <p>Currently this returns <seealso cref="MMapDirectory"/> for most Solaris /// and Windows 64-bit JREs, <seealso cref="NIOFSDirectory"/> for other /// non-Windows JREs, and <seealso cref="SimpleFSDirectory"/> for other /// JREs on Windows. It is highly recommended that you consult the /// implementation's documentation for your platform before /// using this method. /// /// <p><b>NOTE</b>: this method may suddenly change which /// implementation is returned from release to release, in /// the event that higher performance defaults become /// possible; if the precise implementation is important to /// your application, please instantiate it directly, /// instead. For optimal performance you should consider using /// <seealso cref="MMapDirectory"/> on 64 bit JVMs. /// /// <p>See <a href="#subclasses">above</a> /// </summary> public static FSDirectory Open(DirectoryInfo path) { return Open(path, null); } /// <summary> /// Just like <seealso cref="#open(File)"/>, but allows you to /// also specify a custom <seealso cref="LockFactory"/>. /// </summary> public static FSDirectory Open(DirectoryInfo path, LockFactory lockFactory) { if ((Constants.WINDOWS || Constants.SUN_OS || Constants.LINUX) && Constants.JRE_IS_64BIT && MMapDirectory.UNMAP_SUPPORTED) { return new MMapDirectory(path, lockFactory); } else if (Constants.WINDOWS) { return new SimpleFSDirectory(path, lockFactory); } else { //NIOFSDirectory is not implemented in Lucene.Net //return new NIOFSDirectory(path, lockFactory); return new SimpleFSDirectory(path, lockFactory); } } public override LockFactory LockFactory { set { base.LockFactory = value; // for filesystem based LockFactory, delete the lockPrefix, if the locks are placed // in index dir. If no index dir is given, set ourselves if (value is FSLockFactory) { FSLockFactory lf = (FSLockFactory) value; DirectoryInfo dir = lf.LockDir; // if the lock factory has no lockDir set, use the this directory as lockDir if (dir == null) { lf.LockDir = directory; lf.LockPrefix = null; } else if (dir.FullName.Equals(directory.FullName)) { lf.LockPrefix = null; } } } } /// <summary> /// Lists all files (not subdirectories) in the /// directory. this method never returns null (throws /// <seealso cref="System.IO.IOException"/> instead). /// </summary> /// <exception cref="NoSuchDirectoryException"> if the directory /// does not exist, or does exist but is not a /// directory. </exception> /// <exception cref="System.IO.IOException"> if list() returns null </exception> public static string[] ListAll(DirectoryInfo dir) { if (!System.IO.Directory.Exists(dir.FullName)) { throw new NoSuchDirectoryException("directory '" + dir + "' does not exist"); } else if (File.Exists(dir.FullName)) { throw new NoSuchDirectoryException("file '" + dir + "' exists but is not a directory"); } // Exclude subdirs FileInfo[] files = dir.EnumerateFiles().ToArray(); string[] result = new String[files.Length]; for (int i = 0; i < files.Length; i++) { result[i] = files[i].Name; } if (result == null) { throw new System.IO.IOException("directory '" + dir + "' exists and is a directory, but cannot be listed: list() returned null"); } return result; } /* private class FilenameFilterAnonymousInnerClassHelper : FilenameFilter { private File Dir; public FilenameFilterAnonymousInnerClassHelper(File dir) { this.Dir = dir; } public override bool Accept(DirectoryInfo dir, string file) { return !(new DirectoryInfo(Path.Combine(dir.FullName, file))).Directory; } }*/ /// <summary> /// Lists all files (not subdirectories) in the /// directory. </summary> /// <seealso cref= #listAll(File) </seealso> public override string[] ListAll() { EnsureOpen(); return ListAll(directory); } /// <summary> /// Returns true iff a file with the given name exists. </summary> public override bool FileExists(string name) { EnsureOpen(); return File.Exists(Path.Combine(directory.FullName, name)); } /// <summary> /// Returns the length in bytes of a file in the directory. </summary> public override long FileLength(string name) { EnsureOpen(); FileInfo file = new FileInfo(Path.Combine(directory.FullName, name)); long len = file.Length; if (len == 0 && !file.Exists) { throw new FileNotFoundException(name); } else { return len; } } /// <summary> /// Removes an existing file in the directory. </summary> public override void DeleteFile(string name) { EnsureOpen(); FileInfo file = new FileInfo(Path.Combine(directory.FullName, name)); try { file.Delete(); } catch (Exception) { throw new System.IO.IOException("Cannot delete " + file); } StaleFiles.Remove(name); } /// <summary> /// Creates an IndexOutput for the file with the given name. </summary> public override IndexOutput CreateOutput(string name, IOContext context) { EnsureOpen(); EnsureCanWrite(name); return new FSIndexOutput(this, name); } protected internal virtual void EnsureCanWrite(string name) { if (!directory.Exists) { try { directory.Create(); } catch { throw new System.IO.IOException("Cannot create directory: " + directory); } } FileInfo file = new FileInfo(Path.Combine(directory.FullName, name)); if (file.Exists) // delete existing, if any { try { file.Delete(); } catch { throw new System.IO.IOException("Cannot overwrite: " + file); } } } protected internal virtual void OnIndexOutputClosed(FSIndexOutput io) { StaleFiles.Add(io.Name); } public override void Sync(ICollection<string> names) { EnsureOpen(); ISet<string> toSync = new HashSet<string>(names); toSync.IntersectWith(StaleFiles); foreach (var name in toSync) { Fsync(name); } // fsync the directory itsself, but only if there was any file fsynced before // (otherwise it can happen that the directory does not yet exist)! if (toSync.Count > 0) { IOUtils.Fsync(directory.FullName, true); } StaleFiles.ExceptWith(toSync); } public override string LockID { get { EnsureOpen(); string dirName; // name to be hashed try { dirName = directory.FullName; } catch (System.IO.IOException e) { throw new Exception(e.ToString(), e); } int digest = 0; for (int charIDX = 0; charIDX < dirName.Length; charIDX++) { char ch = dirName[charIDX]; digest = 31*digest + ch; } return "lucene-" + digest.ToString("x"); } } /// <summary> /// Closes the store to future operations. </summary> public override void Dispose() { isOpen = false; } /// <returns> the underlying filesystem directory </returns> public virtual DirectoryInfo Directory { get { EnsureOpen(); return directory; } } /// <summary> /// For debug output. </summary> public override string ToString() { return this.GetType().Name + "@" + directory + " lockFactory=" + LockFactory; } /// <summary> /// this setting has no effect anymore. </summary> /// @deprecated this is no longer used since Lucene 4.5. [Obsolete("this is no longer used since Lucene 4.5.")] public int ReadChunkSize { set { if (value <= 0) { throw new System.ArgumentException("chunkSize must be positive"); } this.ChunkSize = value; } get { return ChunkSize; } } /// <summary> /// Writes output with <seealso cref="RandomAccessFile#write(byte[], int, int)"/> /// </summary> protected internal class FSIndexOutput : BufferedIndexOutput { /// <summary> /// The maximum chunk size is 8192 bytes, because <seealso cref="RandomAccessFile"/> mallocs /// a native buffer outside of stack if the write buffer size is larger. /// </summary> internal const int CHUNK_SIZE = 8192; internal readonly FSDirectory Parent; internal readonly string Name; internal readonly FileStream File; internal volatile bool IsOpen; // remember if the file is open, so that we don't try to close it more than once public FSIndexOutput(FSDirectory parent, string name) : base(CHUNK_SIZE) { this.Parent = parent; this.Name = name; File = new FileStream(Path.Combine(parent.directory.FullName, name), FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.ReadWrite); IsOpen = true; } protected internal override void FlushBuffer(byte[] b, int offset, int size) { //Debug.Assert(IsOpen); while (size > 0) { int toWrite = Math.Min(CHUNK_SIZE, size); File.Write(b, offset, toWrite); offset += toWrite; size -= toWrite; } //Debug.Assert(size == 0); } public override void Dispose() { Parent.OnIndexOutputClosed(this); // only close the file if it has not been closed yet if (IsOpen) { System.IO.IOException priorE = null; try { base.Dispose(); } catch (System.IO.IOException ioe) { priorE = ioe; } finally { IsOpen = false; IOUtils.CloseWhileHandlingException(priorE, File); } } } /// <summary> /// Random-access methods </summary> public override void Seek(long pos) { base.Seek(pos); File.Seek(pos, SeekOrigin.Begin); } public override long Length { get { return File.Length; } } } /// <summary> /// /// </summary> /// <param name="name"></param> protected void Fsync(String name, bool isDir = false) { IOUtils.Fsync(Path.Combine(directory.FullName, name), isDir); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. using System; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; #nullable enable namespace Ignitor { public class ElementHive { private const string SelectValuePropname = "_blazorSelectValue"; public Dictionary<int, ComponentNode> Components { get; } = new Dictionary<int, ComponentNode>(); public string SerializedValue => NodeSerializer.Serialize(this); public void Update(RenderBatch batch) { for (var i = 0; i < batch.UpdatedComponents.Count; i++) { var diff = batch.UpdatedComponents.Array[i]; var componentId = diff.ComponentId; var edits = diff.Edits; UpdateComponent(batch, componentId, edits); } for (var i = 0; i < batch.DisposedComponentIDs.Count; i++) { DisposeComponent(batch.DisposedComponentIDs.Array[i]); } for (var i = 0; i < batch.DisposedEventHandlerIDs.Count; i++) { DisposeEventHandler(batch.DisposedEventHandlerIDs.Array[i]); } } public bool TryFindElementById(string id, [NotNullWhen(true)] out ElementNode? element) { foreach (var kvp in Components) { var component = kvp.Value; if (TryGetElementFromChildren(component, id, out element)) { return true; } } element = null; return false; } bool TryGetElementFromChildren(Node node, string id, [NotNullWhen(true)] out ElementNode? foundNode) { if (node is ElementNode elementNode && elementNode.Attributes.TryGetValue("id", out var elementId) && elementId.ToString() == id) { foundNode = elementNode; return true; } if (node is ContainerNode containerNode) { for (var i = 0; i < containerNode.Children.Count; i++) { if (TryGetElementFromChildren(containerNode.Children[i], id, out foundNode)) { return true; } } } foundNode = null; return false; } private void UpdateComponent(RenderBatch batch, int componentId, ArrayBuilderSegment<RenderTreeEdit> edits) { if (!Components.TryGetValue(componentId, out var component)) { component = new ComponentNode(componentId); Components.Add(componentId, component); } ApplyEdits(batch, component, 0, edits); } private void DisposeComponent(int componentId) { } private void DisposeEventHandler(ulong eventHandlerId) { } private void ApplyEdits(RenderBatch batch, ContainerNode parent, int childIndex, ArrayBuilderSegment<RenderTreeEdit> edits) { var currentDepth = 0; var childIndexAtCurrentDepth = childIndex; var permutations = new List<PermutationListEntry>(); for (var editIndex = edits.Offset; editIndex < edits.Offset + edits.Count; editIndex++) { var edit = edits.Array[editIndex]; switch (edit.Type) { case RenderTreeEditType.PrependFrame: { var frame = batch.ReferenceFrames.Array[edit.ReferenceFrameIndex]; var siblingIndex = edit.SiblingIndex; InsertFrame(batch, parent, childIndexAtCurrentDepth + siblingIndex, batch.ReferenceFrames.Array, frame, edit.ReferenceFrameIndex); break; } case RenderTreeEditType.RemoveFrame: { var siblingIndex = edit.SiblingIndex; parent.RemoveLogicalChild(childIndexAtCurrentDepth + siblingIndex); break; } case RenderTreeEditType.SetAttribute: { var frame = batch.ReferenceFrames.Array[edit.ReferenceFrameIndex]; var siblingIndex = edit.SiblingIndex; var node = parent.Children[childIndexAtCurrentDepth + siblingIndex]; if (node is ElementNode element) { ApplyAttribute(batch, element, frame); } else { throw new Exception("Cannot set attribute on non-element child"); } break; } case RenderTreeEditType.RemoveAttribute: { // Note that we don't have to dispose the info we track about event handlers here, because the // disposed event handler IDs are delivered separately (in the 'disposedEventHandlerIds' array) var siblingIndex = edit.SiblingIndex; var node = parent.Children[childIndexAtCurrentDepth + siblingIndex]; if (node is ElementNode element) { var attributeName = edit.RemovedAttributeName!; // First try to remove any special property we use for this attribute if (!TryApplySpecialProperty(batch, element, attributeName, default)) { // If that's not applicable, it's a regular DOM attribute so remove that element.RemoveAttribute(attributeName); } } else { throw new Exception("Cannot remove attribute from non-element child"); } break; } case RenderTreeEditType.UpdateText: { var frame = batch.ReferenceFrames.Array[edit.ReferenceFrameIndex]; var siblingIndex = edit.SiblingIndex; var node = parent.Children[childIndexAtCurrentDepth + siblingIndex]; if (node is TextNode textNode) { textNode.TextContent = frame.TextContent; } else { throw new Exception("Cannot set text content on non-text child"); } break; } case RenderTreeEditType.UpdateMarkup: { var frame = batch.ReferenceFrames.Array[edit.ReferenceFrameIndex]; var siblingIndex = edit.SiblingIndex; parent.RemoveLogicalChild(childIndexAtCurrentDepth + siblingIndex); InsertMarkup(parent, childIndexAtCurrentDepth + siblingIndex, frame); break; } case RenderTreeEditType.StepIn: { var siblingIndex = edit.SiblingIndex; parent = (ContainerNode)parent.Children[childIndexAtCurrentDepth + siblingIndex]; currentDepth++; childIndexAtCurrentDepth = 0; break; } case RenderTreeEditType.StepOut: { parent = parent.Parent ?? throw new InvalidOperationException($"Cannot step out of {parent}"); currentDepth--; childIndexAtCurrentDepth = currentDepth == 0 ? childIndex : 0; // The childIndex is only ever nonzero at zero depth break; } case RenderTreeEditType.PermutationListEntry: { permutations.Add(new PermutationListEntry(childIndexAtCurrentDepth + edit.SiblingIndex, childIndexAtCurrentDepth + edit.MoveToSiblingIndex)); break; } case RenderTreeEditType.PermutationListEnd: { throw new NotSupportedException(); //permuteLogicalChildren(parent, permutations!); //permutations.Clear(); //break; } default: { throw new Exception($"Unknown edit type: '{edit.Type}'"); } } } } private int InsertFrame(RenderBatch batch, ContainerNode parent, int childIndex, ArraySegment<RenderTreeFrame> frames, RenderTreeFrame frame, int frameIndex) { switch (frame.FrameType) { case RenderTreeFrameType.Element: { InsertElement(batch, parent, childIndex, frames, frame, frameIndex); return 1; } case RenderTreeFrameType.Text: { InsertText(parent, childIndex, frame); return 1; } case RenderTreeFrameType.Attribute: { throw new Exception("Attribute frames should only be present as leading children of element frames."); } case RenderTreeFrameType.Component: { InsertComponent(parent, childIndex, frame); return 1; } case RenderTreeFrameType.Region: { return InsertFrameRange(batch, parent, childIndex, frames, frameIndex + 1, frameIndex + frame.RegionSubtreeLength); } case RenderTreeFrameType.ElementReferenceCapture: { if (parent is ElementNode) { return 0; // A "capture" is a child in the diff, but has no node in the DOM } else { throw new Exception("Reference capture frames can only be children of element frames."); } } case RenderTreeFrameType.Markup: { InsertMarkup(parent, childIndex, frame); return 1; } } throw new Exception($"Unknown frame type: {frame.FrameType}"); } private void InsertText(ContainerNode parent, int childIndex, RenderTreeFrame frame) { var textContent = frame.TextContent; var newTextNode = new TextNode(textContent); parent.InsertLogicalChild(newTextNode, childIndex); } private void InsertComponent(ContainerNode parent, int childIndex, RenderTreeFrame frame) { // All we have to do is associate the child component ID with its location. We don't actually // do any rendering here, because the diff for the child will appear later in the render batch. var childComponentId = frame.ComponentId; var containerElement = parent.CreateAndInsertComponent(childComponentId, childIndex); Components[childComponentId] = containerElement; } private int InsertFrameRange(RenderBatch batch, ContainerNode parent, int childIndex, ArraySegment<RenderTreeFrame> frames, int startIndex, int endIndexExcl) { var origChildIndex = childIndex; for (var index = startIndex; index < endIndexExcl; index++) { var frame = batch.ReferenceFrames.Array[index]; var numChildrenInserted = InsertFrame(batch, parent, childIndex, frames, frame, index); childIndex += numChildrenInserted; // Skip over any descendants, since they are already dealt with recursively index += CountDescendantFrames(frame); } return (childIndex - origChildIndex); // Total number of children inserted } private void InsertElement(RenderBatch batch, ContainerNode parent, int childIndex, ArraySegment<RenderTreeFrame> frames, RenderTreeFrame frame, int frameIndex) { // Note: we don't handle SVG here var newElement = new ElementNode(frame.ElementName); var inserted = false; // Apply attributes for (var i = frameIndex + 1; i < frameIndex + frame.ElementSubtreeLength; i++) { var descendantFrame = batch.ReferenceFrames.Array[i]; if (descendantFrame.FrameType == RenderTreeFrameType.Attribute) { ApplyAttribute(batch, newElement, descendantFrame); } else { parent.InsertLogicalChild(newElement, childIndex); inserted = true; // As soon as we see a non-attribute child, all the subsequent child frames are // not attributes, so bail out and insert the remnants recursively InsertFrameRange(batch, newElement, 0, frames, i, frameIndex + frame.ElementSubtreeLength); break; } } // this element did not have any children, so it's not inserted yet. if (!inserted) { parent.InsertLogicalChild(newElement, childIndex); } } private void ApplyAttribute(RenderBatch batch, ElementNode elementNode, RenderTreeFrame attributeFrame) { var attributeName = attributeFrame.AttributeName; var eventHandlerId = attributeFrame.AttributeEventHandlerId; if (eventHandlerId != 0) { var firstTwoChars = attributeName.Substring(0, 2); var eventName = attributeName.Substring(2); if (firstTwoChars != "on" || string.IsNullOrEmpty(eventName)) { throw new InvalidOperationException($"Attribute has nonzero event handler ID, but attribute name '${attributeName}' does not start with 'on'."); } var descriptor = new ElementNode.ElementEventDescriptor(eventName, eventHandlerId); elementNode.SetEvent(eventName, descriptor); return; } // First see if we have special handling for this attribute if (!TryApplySpecialProperty(batch, elementNode, attributeName, attributeFrame)) { // If not, treat it as a regular string-valued attribute elementNode.SetAttribute( attributeName, attributeFrame.AttributeValue); } } private bool TryApplySpecialProperty(RenderBatch batch, ElementNode element, string attributeName, RenderTreeFrame attributeFrame) { switch (attributeName) { case "value": return TryApplyValueProperty(element, attributeFrame); case "checked": return TryApplyCheckedProperty(element, attributeFrame); default: return false; } } private bool TryApplyValueProperty(ElementNode element, RenderTreeFrame attributeFrame) { // Certain elements have built-in behaviour for their 'value' property switch (element.TagName) { case "INPUT": case "SELECT": case "TEXTAREA": { var value = attributeFrame.AttributeValue; element.SetProperty("value", value); if (element.TagName == "SELECT") { // <select> is special, in that anything we write to .value will be lost if there // isn't yet a matching <option>. To maintain the expected behavior no matter the // element insertion/update order, preserve the desired value separately so // we can recover it when inserting any matching <option>. element.SetProperty(SelectValuePropname, value); } return true; } case "OPTION": { var value = attributeFrame.AttributeValue; if (value != null) { element.SetAttribute("value", value); } else { element.RemoveAttribute("value"); } return true; } default: return false; } } private bool TryApplyCheckedProperty(ElementNode element, RenderTreeFrame attributeFrame) { // Certain elements have built-in behaviour for their 'checked' property if (element.TagName == "INPUT") { var value = attributeFrame.AttributeValue; element.SetProperty("checked", value); return true; } return false; } private void InsertMarkup(ContainerNode parent, int childIndex, RenderTreeFrame markupFrame) { var markupContainer = parent.CreateAndInsertContainer(childIndex); var markupContent = markupFrame.MarkupContent; var markupNode = new MarkupNode(markupContent); markupContainer.InsertLogicalChild(markupNode, childIndex); } private int CountDescendantFrames(RenderTreeFrame frame) { switch (frame.FrameType) { // The following frame types have a subtree length. Other frames may use that memory slot // to mean something else, so we must not read it. We should consider having nominal subtypes // of RenderTreeFramePointer that prevent access to non-applicable fields. case RenderTreeFrameType.Component: return frame.ComponentSubtreeLength - 1; case RenderTreeFrameType.Element: return frame.ElementSubtreeLength - 1; case RenderTreeFrameType.Region: return frame.RegionSubtreeLength - 1; default: return 0; } } private readonly struct PermutationListEntry { public readonly int From; public readonly int To; public PermutationListEntry(int from, int to) { From = from; To = to; } } } } #nullable restore
using System; using System.Collections.Generic; using System.Linq; using System.Linq.Expressions; using System.Threading.Tasks; using Bmz.Framework.Domain.Entities; using Bmz.Framework.MultiTenancy; using Bmz.Framework.Reflection.Extensions; namespace Bmz.Framework.Domain.Repositories { /// <summary> /// Base class to implement <see cref="IRepository{TEntity,TPrimaryKey}"/>. /// It implements some methods in most simple way. /// </summary> /// <typeparam name="TEntity">Type of the Entity for this repository</typeparam> /// <typeparam name="TPrimaryKey">Primary key of the entity</typeparam> public abstract class BmzRepositoryBase<TEntity, TPrimaryKey> : IRepository<TEntity, TPrimaryKey> where TEntity : class, IEntity<TPrimaryKey> { /// <summary> /// The multi tenancy side /// </summary> public static MultiTenancySides? MultiTenancySide { get; private set; } static BmzRepositoryBase() { var attr = typeof (TEntity).GetSingleAttributeOfTypeOrBaseTypesOrNull<MultiTenancySideAttribute>(); if (attr != null) { MultiTenancySide = attr.Side; } } public abstract IQueryable<TEntity> GetAll(); public virtual List<TEntity> GetAllList() { return GetAll().ToList(); } public virtual Task<List<TEntity>> GetAllListAsync() { return Task.FromResult(GetAllList()); } public virtual List<TEntity> GetAllList(Expression<Func<TEntity, bool>> predicate) { return GetAll().Where(predicate).ToList(); } public virtual Task<List<TEntity>> GetAllListAsync(Expression<Func<TEntity, bool>> predicate) { return Task.FromResult(GetAllList(predicate)); } public virtual T Query<T>(Func<IQueryable<TEntity>, T> queryMethod) { return queryMethod(GetAll()); } public virtual TEntity Get(TPrimaryKey id) { var entity = FirstOrDefault(id); if (entity == null) { throw new BmzException("There is no such an entity with given primary key. Entity type: " + typeof(TEntity).FullName + ", primary key: " + id); } return entity; } public virtual async Task<TEntity> GetAsync(TPrimaryKey id) { var entity = await FirstOrDefaultAsync(id); if (entity == null) { throw new BmzException("There is no such an entity with given primary key. Entity type: " + typeof(TEntity).FullName + ", primary key: " + id); } return entity; } public virtual TEntity Single(Expression<Func<TEntity, bool>> predicate) { return GetAll().Single(predicate); } public virtual Task<TEntity> SingleAsync(Expression<Func<TEntity, bool>> predicate) { return Task.FromResult(Single(predicate)); } public virtual TEntity FirstOrDefault(TPrimaryKey id) { return GetAll().FirstOrDefault(CreateEqualityExpressionForId(id)); } public virtual Task<TEntity> FirstOrDefaultAsync(TPrimaryKey id) { return Task.FromResult(FirstOrDefault(id)); } public virtual TEntity FirstOrDefault(Expression<Func<TEntity, bool>> predicate) { return GetAll().FirstOrDefault(predicate); } public virtual Task<TEntity> FirstOrDefaultAsync(Expression<Func<TEntity, bool>> predicate) { return Task.FromResult(FirstOrDefault(predicate)); } public virtual TEntity Load(TPrimaryKey id) { return Get(id); } public abstract TEntity Insert(TEntity entity); public virtual Task<TEntity> InsertAsync(TEntity entity) { return Task.FromResult(Insert(entity)); } public virtual TPrimaryKey InsertAndGetId(TEntity entity) { return Insert(entity).Id; } public virtual Task<TPrimaryKey> InsertAndGetIdAsync(TEntity entity) { return Task.FromResult(InsertAndGetId(entity)); } public virtual TEntity InsertOrUpdate(TEntity entity) { return entity.IsTransient() ? Insert(entity) : Update(entity); } public virtual async Task<TEntity> InsertOrUpdateAsync(TEntity entity) { return entity.IsTransient() ? await InsertAsync(entity) : await UpdateAsync(entity); } public virtual TPrimaryKey InsertOrUpdateAndGetId(TEntity entity) { return InsertOrUpdate(entity).Id; } public virtual Task<TPrimaryKey> InsertOrUpdateAndGetIdAsync(TEntity entity) { return Task.FromResult(InsertOrUpdateAndGetId(entity)); } public abstract TEntity Update(TEntity entity); public virtual Task<TEntity> UpdateAsync(TEntity entity) { return Task.FromResult(Update(entity)); } public virtual TEntity Update(TPrimaryKey id, Action<TEntity> updateAction) { var entity = Get(id); updateAction(entity); return entity; } public virtual async Task<TEntity> UpdateAsync(TPrimaryKey id, Func<TEntity, Task> updateAction) { var entity = await GetAsync(id); await updateAction(entity); return entity; } public abstract void Delete(TEntity entity); public virtual Task DeleteAsync(TEntity entity) { Delete(entity); return Task.FromResult(0); } public abstract void Delete(TPrimaryKey id); public virtual Task DeleteAsync(TPrimaryKey id) { Delete(id); return Task.FromResult(0); } public virtual void Delete(Expression<Func<TEntity, bool>> predicate) { foreach (var entity in GetAll().Where(predicate).ToList()) { Delete(entity); } } public virtual async Task DeleteAsync(Expression<Func<TEntity, bool>> predicate) { Delete(predicate); } public virtual int Count() { return GetAll().Count(); } public virtual Task<int> CountAsync() { return Task.FromResult(Count()); } public virtual int Count(Expression<Func<TEntity, bool>> predicate) { return GetAll().Where(predicate).Count(); } public virtual Task<int> CountAsync(Expression<Func<TEntity, bool>> predicate) { return Task.FromResult(Count(predicate)); } public virtual long LongCount() { return GetAll().LongCount(); } public virtual Task<long> LongCountAsync() { return Task.FromResult(LongCount()); } public virtual long LongCount(Expression<Func<TEntity, bool>> predicate) { return GetAll().Where(predicate).LongCount(); } public virtual Task<long> LongCountAsync(Expression<Func<TEntity, bool>> predicate) { return Task.FromResult(LongCount(predicate)); } protected static Expression<Func<TEntity, bool>> CreateEqualityExpressionForId(TPrimaryKey id) { var lambdaParam = Expression.Parameter(typeof(TEntity)); var lambdaBody = Expression.Equal( Expression.PropertyOrField(lambdaParam, "Id"), Expression.Constant(id, typeof(TPrimaryKey)) ); return Expression.Lambda<Func<TEntity, bool>>(lambdaBody, lambdaParam); } } }
// Licensed to the .NET Foundation under one or more agreements. // See the LICENSE file in the project root for more information. // (C) Copyright 2002 Franklin Wise // (C) Copyright 2003 Daniel Morgan // (C) Copyright 2003 Martin Willemoes Hansen // // // Copyright (C) 2004 Novell, Inc (http://www.novell.com) // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using Xunit; namespace System.Data.Tests { public class DataRowTest { private DataTable _table; private DataRow _row; public DataRowTest() { _table = MakeTable(); _row = _table.NewRow(); _row["FName"] = "Hello"; _row["LName"] = "World"; _table.Rows.Add(_row); } private DataTable MakeTable() { DataTable namesTable = new DataTable("Names"); DataColumn idColumn = new DataColumn(); idColumn.DataType = Type.GetType("System.Int32"); idColumn.ColumnName = "Id"; idColumn.AutoIncrement = true; namesTable.Columns.Add(idColumn); DataColumn fNameColumn = new DataColumn(); fNameColumn.DataType = Type.GetType("System.String"); fNameColumn.ColumnName = "Fname"; fNameColumn.DefaultValue = "Fname"; namesTable.Columns.Add(fNameColumn); DataColumn lNameColumn = new DataColumn(); lNameColumn.DataType = Type.GetType("System.String"); lNameColumn.ColumnName = "LName"; lNameColumn.DefaultValue = "LName"; namesTable.Columns.Add(lNameColumn); // Set the primary key for the table DataColumn[] keys = new DataColumn[1]; keys[0] = idColumn; namesTable.PrimaryKey = keys; // Return the new DataTable. return namesTable; } [Fact] public void SetColumnErrorTest() { string errorString; errorString = "Some error!"; // Set the error for the specified column of the row. _row.SetColumnError(1, errorString); GetColumnErrorTest(); GetAllErrorsTest(); } private void GetColumnErrorTest() { // Print the error of a specified column. Assert.Equal("Some error!", _row.GetColumnError(1)); } private void GetAllErrorsTest() { DataColumn[] colArr; if (_row.HasErrors) { colArr = _row.GetColumnsInError(); for (int i = 0; i < colArr.Length; i++) { Assert.Equal(_table.Columns[1], colArr[i]); } _row.ClearErrors(); } } [Fact] public void DeleteRowTest() { DataRow newRow; for (int i = 1; i <= 2; i++) { newRow = _table.NewRow(); newRow["FName"] = "Name " + i; newRow["LName"] = " Last Name" + i; _table.Rows.Add(newRow); } _table.AcceptChanges(); int cnt = 1; for (int i = 1; i < _table.Rows.Count; i++) { DataRow r = _table.Rows[i]; Assert.Equal("Name " + cnt, r["fName"]); cnt++; } // Create a DataView with the table. DataRowCollection rc = _table.Rows; rc[0].Delete(); rc[2].Delete(); Assert.Equal("Deleted", rc[0].RowState.ToString()); Assert.Equal("Deleted", rc[2].RowState.ToString()); // Accept changes _table.AcceptChanges(); Assert.Equal("Name 1", (_table.Rows[0])[1]); try { object o = rc[2]; Assert.False(true); } catch (Exception e) { // Never premise English. //Assert.Equal ("#A08", "There is no row at position 2."); } } [Fact] public void ParentRowTest() { //Clear all existing values from table for (int i = 0; i < _table.Rows.Count; i++) { _table.Rows[i].Delete(); } _table.AcceptChanges(); _row = _table.NewRow(); _row["FName"] = "My FName"; _row["Id"] = 0; _table.Rows.Add(_row); DataTable tableC = new DataTable("Child"); DataColumn colC; DataRow rowC; colC = new DataColumn(); colC.DataType = Type.GetType("System.Int32"); colC.ColumnName = "Id"; colC.AutoIncrement = true; tableC.Columns.Add(colC); colC = new DataColumn(); colC.DataType = Type.GetType("System.String"); colC.ColumnName = "Name"; tableC.Columns.Add(colC); rowC = tableC.NewRow(); rowC["Name"] = "My FName"; tableC.Rows.Add(rowC); var ds = new DataSet(); ds.Tables.Add(_table); ds.Tables.Add(tableC); DataRelation dr = new DataRelation("PO", _table.Columns["Id"], tableC.Columns["Id"]); ds.Relations.Add(dr); rowC.SetParentRow(_table.Rows[0], dr); Assert.Equal(_table.Rows[0], (tableC.Rows[0]).GetParentRow(dr)); Assert.Equal(tableC.Rows[0], (_table.Rows[0]).GetChildRows(dr)[0]); ds.Relations.Clear(); dr = new DataRelation("PO", _table.Columns["Id"], tableC.Columns["Id"], false); ds.Relations.Add(dr); rowC.SetParentRow(_table.Rows[0], dr); Assert.Equal(_table.Rows[0], (tableC.Rows[0]).GetParentRow(dr)); Assert.Equal(tableC.Rows[0], (_table.Rows[0]).GetChildRows(dr)[0]); ds.Relations.Clear(); dr = new DataRelation("PO", _table.Columns["Id"], tableC.Columns["Id"], false); tableC.ParentRelations.Add(dr); rowC.SetParentRow(_table.Rows[0]); Assert.Equal(_table.Rows[0], (tableC.Rows[0]).GetParentRow(dr)); Assert.Equal(tableC.Rows[0], (_table.Rows[0]).GetChildRows(dr)[0]); } [Fact] public void ParentRowTest2() { var ds = new DataSet(); DataTable tableP = ds.Tables.Add("Parent"); DataTable tableC = ds.Tables.Add("Child"); DataColumn colC; DataRow rowC; colC = new DataColumn(); colC.DataType = Type.GetType("System.Int32"); colC.ColumnName = "Id"; colC.AutoIncrement = true; tableP.Columns.Add(colC); colC = new DataColumn(); colC.DataType = Type.GetType("System.Int32"); colC.ColumnName = "Id"; tableC.Columns.Add(colC); _row = tableP.Rows.Add(new object[0]); rowC = tableC.NewRow(); ds.EnforceConstraints = false; DataRelation dr = new DataRelation("PO", tableP.Columns["Id"], tableC.Columns["Id"]); ds.Relations.Add(dr); rowC.SetParentRow(_row, dr); DataRow[] rows = rowC.GetParentRows(dr); Assert.Equal(1, rows.Length); Assert.Equal(tableP.Rows[0], rows[0]); try { rows = _row.GetParentRows(dr); } catch (InvalidConstraintException) { //Test done return; } catch (Exception e) { Assert.False(true); } } [Fact] public void ChildRowTest() { //Clear all existing values from table for (int i = 0; i < _table.Rows.Count; i++) { _table.Rows[i].Delete(); } _table.AcceptChanges(); _row = _table.NewRow(); _row["FName"] = "My FName"; _row["Id"] = 0; _table.Rows.Add(_row); DataTable tableC = new DataTable("Child"); DataColumn colC; DataRow rowC; colC = new DataColumn(); colC.DataType = Type.GetType("System.Int32"); colC.ColumnName = "Id"; colC.AutoIncrement = true; tableC.Columns.Add(colC); colC = new DataColumn(); colC.DataType = Type.GetType("System.String"); colC.ColumnName = "Name"; tableC.Columns.Add(colC); rowC = tableC.NewRow(); rowC["Name"] = "My FName"; tableC.Rows.Add(rowC); var ds = new DataSet(); ds.Tables.Add(_table); ds.Tables.Add(tableC); DataRelation dr = new DataRelation("PO", _table.Columns["Id"], tableC.Columns["Id"]); ds.Relations.Add(dr); rowC.SetParentRow(_table.Rows[0], dr); DataRow[] rows = (_table.Rows[0]).GetChildRows(dr); Assert.Equal(1, rows.Length); Assert.Equal(tableC.Rows[0], rows[0]); } [Fact] public void ChildRowTest2() { var ds = new DataSet(); DataTable tableP = ds.Tables.Add("Parent"); DataTable tableC = ds.Tables.Add("Child"); DataColumn colC; DataRow rowC; colC = new DataColumn(); colC.DataType = Type.GetType("System.Int32"); colC.ColumnName = "Id"; colC.AutoIncrement = true; tableP.Columns.Add(colC); colC = new DataColumn(); colC.DataType = Type.GetType("System.Int32"); colC.ColumnName = "Id"; tableC.Columns.Add(colC); _row = tableP.NewRow(); rowC = tableC.Rows.Add(new object[0]); ds.EnforceConstraints = false; DataRelation dr = new DataRelation("PO", tableP.Columns["Id"], tableC.Columns["Id"]); ds.Relations.Add(dr); rowC.SetParentRow(_row, dr); DataRow[] rows = _row.GetChildRows(dr); Assert.Equal(1, rows.Length); Assert.Equal(tableC.Rows[0], rows[0]); try { rows = rowC.GetChildRows(dr); } catch (InvalidConstraintException) { //Test done return; } catch (Exception e) { Assert.False(true); } } // tests item at row, column in table to be DBNull.Value private void DBNullTest(string message, DataTable dt, int row, int column) { object val = dt.Rows[row].ItemArray[column]; Assert.Equal(DBNull.Value, val); } // tests item at row, column in table to be null private void NullTest(string message, DataTable dt, int row, int column) { object val = dt.Rows[row].ItemArray[column]; Assert.Equal(null, val); } // tests item at row, column in table to be private void ValueTest(string message, DataTable dt, int row, int column, object value) { object val = dt.Rows[row].ItemArray[column]; Assert.Equal(value, val); } // test set null, DBNull.Value, and ItemArray short count [Fact] public void NullInItemArray() { string zero = "zero"; string one = "one"; string two = "two"; DataTable table = new DataTable(); table.Columns.Add(new DataColumn(zero, typeof(string))); table.Columns.Add(new DataColumn(one, typeof(string))); table.Columns.Add(new DataColumn(two, typeof(string))); object[] obj = new object[3]; // -- normal ----------------- obj[0] = zero; obj[1] = one; obj[2] = two; // results: // table.Rows[0].ItemArray.ItemArray[0] = "zero" // table.Rows[0].ItemArray.ItemArray[1] = "one" // table.Rows[0].ItemArray.ItemArray[2] = "two" DataRow row = table.NewRow(); try { row.ItemArray = obj; } catch (Exception e1) { Assert.False(true); } table.Rows.Add(row); // -- null ---------- obj[1] = null; // results: // table.Rows[1].ItemArray.ItemArray[0] = "zero" // table.Rows[1].ItemArray.ItemArray[1] = DBNull.Value // table.Rows[1].ItemArray.ItemArray[2] = "two" row = table.NewRow(); try { row.ItemArray = obj; } catch (Exception e2) { Assert.False(true); } table.Rows.Add(row); // -- DBNull.Value ------------- obj[1] = DBNull.Value; // results: // table.Rows[2].ItemArray.ItemArray[0] = "zero" // table.Rows[2].ItemArray.ItemArray[1] = DBNull.Value // table.Rows[2].ItemArray.ItemArray[2] = "two" row = table.NewRow(); try { row.ItemArray = obj; } catch (Exception e3) { Assert.False(true); } table.Rows.Add(row); // -- object array smaller than number of columns ----- string abc = "abc"; string def = "def"; obj = new object[2]; obj[0] = abc; obj[1] = def; // results: // table.Rows[3].ItemArray.ItemArray[0] = "abc" // table.Rows[3].ItemArray.ItemArray[1] = "def" // table.Rows[3].ItemArray.ItemArray[2] = DBNull.Value; row = table.NewRow(); try { row.ItemArray = obj; } catch (Exception e3) { Assert.False(true); } table.Rows.Add(row); // -- normal ----------------- ValueTest("DR5: normal value test", table, 0, 0, zero); ValueTest("DR6: normal value test", table, 0, 1, one); ValueTest("DR7: normal value test", table, 0, 2, two); // -- null ---------- ValueTest("DR8: null value test", table, 1, 0, zero); ValueTest("DR9: null value test", table, 1, 1, DBNull.Value); ValueTest("DR10: null value test", table, 1, 2, two); // -- DBNull.Value ------------- ValueTest("DR11: DBNull.Value value test", table, 2, 0, zero); ValueTest("DR12: DBNull.Value value test", table, 2, 1, DBNull.Value); ValueTest("DR13: DBNull.Value value test", table, 2, 2, two); // -- object array smaller than number of columns ----- ValueTest("DR14: array smaller value test", table, 3, 0, abc); ValueTest("DR15: array smaller value test", table, 3, 1, def); ValueTest("DR16: array smaller value test", table, 3, 2, DBNull.Value); } // test DefaultValue when setting ItemArray [Fact] public void DefaultValueInItemArray() { string zero = "zero"; DataTable table = new DataTable(); table.Columns.Add(new DataColumn("zero", typeof(string))); DataColumn column = new DataColumn("num", typeof(int)); column.DefaultValue = 15; table.Columns.Add(column); object[] obj = new object[2]; // -- normal ----------------- obj[0] = "zero"; obj[1] = 8; // results: // table.Rows[0].ItemArray.ItemArray[0] = "zero" // table.Rows[0].ItemArray.ItemArray[1] = 8 DataRow row = table.NewRow(); try { row.ItemArray = obj; } catch (Exception e1) { Assert.False(true); } table.Rows.Add(row); // -- null ---------- obj[1] = null; // results: // table.Rows[1].ItemArray.ItemArray[0] = "zero" // table.Rows[1].ItemArray.ItemArray[1] = 15 row = table.NewRow(); try { row.ItemArray = obj; } catch (Exception e2) { Assert.False(true); } table.Rows.Add(row); // -- DBNull.Value ------------- obj[1] = DBNull.Value; // results: // table.Rows[2].ItemArray.ItemArray[0] = "zero" // table.Rows[2].ItemArray.ItemArray[1] = DBNull.Value // even though internally, the v row = table.NewRow(); try { row.ItemArray = obj; } catch (Exception e3) { Assert.False(true); } table.Rows.Add(row); // -- object array smaller than number of columns ----- string abc = "abc"; string def = "def"; obj = new object[2]; obj[0] = abc; // results: // table.Rows[3].ItemArray.ItemArray[0] = "abc" // table.Rows[3].ItemArray.ItemArray[1] = DBNull.Value row = table.NewRow(); try { row.ItemArray = obj; } catch (Exception e3) { Assert.False(true); } table.Rows.Add(row); // -- normal ----------------- ValueTest("DR20: normal value test", table, 0, 0, zero); ValueTest("DR21: normal value test", table, 0, 1, 8); // -- null ---------- ValueTest("DR22: null value test", table, 1, 0, zero); ValueTest("DR23: null value test", table, 1, 1, 15); // -- DBNull.Value ------------- ValueTest("DR24: DBNull.Value value test", table, 2, 0, zero); DBNullTest("DR25: DBNull.Value value test", table, 2, 1); // -- object array smaller than number of columns ----- ValueTest("DR26: array smaller value test", table, 3, 0, abc); ValueTest("DR27: array smaller value test", table, 3, 1, 15); } // test AutoIncrement when setting ItemArray [Fact] public void AutoIncrementInItemArray() { string zero = "zero"; string num = "num"; DataTable table = new DataTable(); table.Columns.Add(new DataColumn(zero, typeof(string))); DataColumn column = new DataColumn("num", typeof(int)); column.AutoIncrement = true; table.Columns.Add(column); object[] obj = new object[2]; // -- normal ----------------- obj[0] = "zero"; obj[1] = 8; // results: // table.Rows[0].ItemArray.ItemArray[0] = "zero" // table.Rows[0].ItemArray.ItemArray[1] = 8 DataRow row = table.NewRow(); try { row.ItemArray = obj; } catch (Exception e1) { Assert.False(true); } table.Rows.Add(row); // -- null 1---------- obj[1] = null; // results: // table.Rows[1].ItemArray.ItemArray[0] = "zero" // table.Rows[1].ItemArray.ItemArray[1] = 9 row = table.NewRow(); try { row.ItemArray = obj; } catch (Exception e2) { Assert.False(true); } table.Rows.Add(row); // -- null 2---------- obj[1] = null; // results: // table.Rows[1].ItemArray.ItemArray[0] = "zero" // table.Rows[1].ItemArray.ItemArray[1] = 10 row = table.NewRow(); try { row.ItemArray = obj; } catch (Exception e2) { Assert.False(true); } table.Rows.Add(row); // -- null 3---------- obj[1] = null; // results: // table.Rows[1].ItemArray.ItemArray[0] = "zero" // table.Rows[1].ItemArray.ItemArray[1] = 11 row = table.NewRow(); try { row.ItemArray = obj; } catch (Exception e2) { Assert.False(true); } table.Rows.Add(row); // -- DBNull.Value ------------- obj[1] = DBNull.Value; // results: // table.Rows[2].ItemArray.ItemArray[0] = "zero" // table.Rows[2].ItemArray.ItemArray[1] = DBNull.Value // even though internally, the AutoIncrement value // is incremented row = table.NewRow(); try { row.ItemArray = obj; } catch (Exception e3) { Assert.False(true); } table.Rows.Add(row); // -- null 4---------- obj[1] = null; // results: // table.Rows[1].ItemArray.ItemArray[0] = "zero" // table.Rows[1].ItemArray.ItemArray[1] = 13 row = table.NewRow(); try { row.ItemArray = obj; } catch (Exception e2) { Assert.False(true); } table.Rows.Add(row); // -- object array smaller than number of columns ----- string abc = "abc"; string def = "def"; obj = new object[2]; obj[0] = abc; // results: // table.Rows[3].ItemArray.ItemArray[0] = "abc" // table.Rows[3].ItemArray.ItemArray[1] = 14 row = table.NewRow(); try { row.ItemArray = obj; } catch (Exception e3) { Assert.False(true); } table.Rows.Add(row); // -- normal ----------------- ValueTest("DR34: normal value test", table, 0, 0, zero); ValueTest("DR35: normal value test", table, 0, 1, 8); // -- null 1---------- ValueTest("DR36: null value test", table, 1, 0, zero); ValueTest("DR37: null value test", table, 1, 1, 9); // -- null 2---------- ValueTest("DR38: null value test", table, 2, 0, zero); ValueTest("DR39: null value test", table, 2, 1, 10); // -- null 3---------- ValueTest("DR40: null value test", table, 3, 0, zero); ValueTest("DR41: null value test", table, 3, 1, 11); // -- DBNull.Value ------------- ValueTest("DR42: DBNull.Value value test", table, 4, 0, zero); ValueTest("DR43: DBNull.Value value test", table, 4, 1, DBNull.Value); // -- null 4---------- ValueTest("DR44: null value test", table, 5, 0, zero); ValueTest("DR45: null value test", table, 5, 1, 13); // -- object array smaller than number of columns ----- ValueTest("DR46: array smaller value test", table, 6, 0, abc); ValueTest("DR47: array smaller value test", table, 6, 1, 14); } [Fact] public void AutoIncrementColumnIntegrity() { // AutoIncrement-column shouldn't raise index out of range // exception because of size mismatch of internal itemarray. DataTable dt = new DataTable(); dt.Columns.Add("foo"); dt.Rows.Add(new object[] { "value" }); DataColumn col = new DataColumn("bar"); col.AutoIncrement = true; dt.Columns.Add(col); dt.Rows[0][0] = "test"; } [Fact] public void EnforceConstraint() { int id = 100; // Setup stuff var ds = new DataSet(); DataTable parent = ds.Tables.Add("parent"); parent.Columns.Add("id", typeof(int)); DataTable child = ds.Tables.Add("child"); child.Columns.Add("idref", typeof(int)); Constraint uniqueId = null; parent.Constraints.Add(uniqueId = new UniqueConstraint("uniqueId", new DataColumn[] { parent.Columns["id"] }, true)); ForeignKeyConstraint fkc = new ForeignKeyConstraint("ParentChildConstraint", new DataColumn[] { parent.Columns["id"] }, new DataColumn[] { child.Columns["idref"] }); child.Constraints.Add(fkc); DataRelation relateParentChild = new DataRelation("relateParentChild", new DataColumn[] { parent.Columns["id"] }, new DataColumn[] { child.Columns["idref"] }, false); ds.Relations.Add(relateParentChild); ds.EnforceConstraints = false; DataRow parentRow = parent.Rows.Add(new object[] { id }); DataRow childRow = child.Rows.Add(new object[] { id }); if (parentRow == childRow.GetParentRow(relateParentChild)) { foreach (DataColumn dc in parent.Columns) Assert.Equal(100, parentRow[dc]); } } [Fact] public void DetachedRowItemException() { Assert.Throws<RowNotInTableException>(() => { DataTable dt = new DataTable("table"); dt.Columns.Add("col"); dt.Rows.Add((new object[] { "val" })); DataRow dr = dt.NewRow(); Assert.Equal(DataRowState.Detached, dr.RowState); dr.CancelEdit(); Assert.Equal(DataRowState.Detached, dr.RowState); object o = dr["col"]; }); } [Fact] public void SetParentRow_Null() { var ds = new DataSet(); DataTable child = ds.Tables.Add("child"); child.Columns.Add("column1"); DataRow r1 = child.NewRow(); r1.SetParentRow(null); } [Fact] public void SetParentRow_DataInheritance() { var ds = new DataSet(); var child = ds.Tables.Add("child"); var childColumn1 = child.Columns.Add("column1"); var childColumn2 = child.Columns.Add("column2"); var parent1 = ds.Tables.Add("parent1"); var parent1Column1 = parent1.Columns.Add("column1"); var parent1Column2 = parent1.Columns.Add("column2"); var parent2 = ds.Tables.Add("parent2"); var parent2Column1 = parent2.Columns.Add("column1"); var parent2Column2 = parent2.Columns.Add("column2"); var relation1 = ds.Relations.Add("parent1-child", parent1Column1, childColumn1); ds.Relations.Add("parent2-child", parent2Column2, childColumn2); var childRow1 = child.NewRow(); var parent1Row = parent1.NewRow(); var parent2Row = parent2.NewRow(); parent1Row[parent1Column1] = "p1c1"; parent1Row[parent1Column2] = "p1c2"; parent2Row[parent2Column1] = "p2c1"; parent2Row[parent2Column2] = "p2c2"; child.Rows.Add(childRow1); parent1.Rows.Add(parent1Row); parent2.Rows.Add(parent2Row); childRow1.SetParentRow(parent1Row); Assert.Equal("p1c1", childRow1[childColumn1]); Assert.Equal(DBNull.Value, childRow1[childColumn2]); childRow1.SetParentRow(parent2Row); Assert.Equal("p1c1", childRow1[childColumn1]); Assert.Equal("p2c2", childRow1[childColumn2]); childRow1.SetParentRow(null); Assert.Equal(DBNull.Value, childRow1[childColumn1]); Assert.Equal(DBNull.Value, childRow1[childColumn2]); childRow1.SetParentRow(parent2Row); Assert.Equal(DBNull.Value, childRow1[childColumn1]); Assert.Equal("p2c2", childRow1[childColumn2]); } [Fact] public void SetParentRow_with_Relation() { var ds = new DataSet(); var child = ds.Tables.Add("child"); var childColumn1 = child.Columns.Add("column1"); var childColumn2 = child.Columns.Add("column2"); var parent1 = ds.Tables.Add("parent1"); var parent1Column1 = parent1.Columns.Add("column1"); var parent1Column2 = parent1.Columns.Add("column2"); var parent2 = ds.Tables.Add("parent2"); var parent2Column1 = parent2.Columns.Add("column1"); var parent2Column2 = parent2.Columns.Add("column2"); var relation1 = ds.Relations.Add("parent1-child", parent1Column1, childColumn1); var relation2 = ds.Relations.Add("parent2-child", parent2Column2, childColumn2); var childRow1 = child.NewRow(); var parent1Row = parent1.NewRow(); var parent2Row = parent2.NewRow(); parent1Row[parent1Column1] = "p1c1"; parent1Row[parent1Column2] = "p1c2"; parent2Row[parent2Column1] = "p2c1"; parent2Row[parent2Column2] = "p2c2"; child.Rows.Add(childRow1); parent1.Rows.Add(parent1Row); parent2.Rows.Add(parent2Row); childRow1.SetParentRow(null, relation2); Assert.Equal(DBNull.Value, childRow1[childColumn1]); Assert.Equal(DBNull.Value, childRow1[childColumn2]); try { childRow1.SetParentRow(parent1Row, relation2); Assert.False(true); } catch (InvalidConstraintException e) { } Assert.Equal(DBNull.Value, childRow1[childColumn1]); Assert.Equal(DBNull.Value, childRow1[childColumn2]); childRow1.SetParentRow(parent1Row, relation1); Assert.Equal("p1c1", childRow1[childColumn1]); Assert.Equal(DBNull.Value, childRow1[childColumn2]); childRow1.SetParentRow(null, relation2); Assert.Equal("p1c1", childRow1[childColumn1]); Assert.Equal(DBNull.Value, childRow1[childColumn2]); childRow1.SetParentRow(null, relation1); Assert.Equal(DBNull.Value, childRow1[childColumn1]); Assert.Equal(DBNull.Value, childRow1[childColumn2]); } [Fact] public void SetParent_missing_ParentRow() { var ds = new DataSet(); var child = ds.Tables.Add("child"); var childColumn1 = child.Columns.Add("column1"); var childColumn2 = child.Columns.Add("column2"); var parent1 = ds.Tables.Add("parent1"); var parentColumn1 = parent1.Columns.Add("column1"); var parent2 = ds.Tables.Add("parent2"); var parentColumn2 = parent2.Columns.Add("column2"); ds.Relations.Add("parent1-child", parentColumn1, childColumn1); ds.Relations.Add("parent2-child", parentColumn2, childColumn2); var childRow = child.NewRow(); var parentRow = parent2.NewRow(); parentRow[parentColumn2] = "value"; child.Rows.Add(childRow); parent2.Rows.Add(parentRow); childRow.SetParentRow(parentRow); Assert.Equal(DBNull.Value, childRow[childColumn1]); Assert.Equal("value", childRow[childColumn2]); } } }
/* * QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals. * Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections.Generic; using System.Globalization; using System.IO; using NodaTime; using QuantConnect.Util; namespace QuantConnect.Data.UniverseSelection { /// <summary> /// ETF constituent data /// </summary> public class ETFConstituentData : BaseData { /// <summary> /// Time of the previous ETF constituent data update /// </summary> public DateTime? LastUpdate { get; set; } /// <summary> /// The percentage of the ETF allocated to this constituent /// </summary> public decimal? Weight { get; set; } /// <summary> /// Number of shares held in the ETF /// </summary> public decimal? SharesHeld { get; set; } /// <summary> /// Market value of the current asset held in U.S. dollars /// </summary> public decimal? MarketValue { get; set; } /// <summary> /// Period of the data /// </summary> public TimeSpan Period { get; set; } = TimeSpan.FromDays(1); /// <summary> /// Time that the data became available to use /// </summary> public override DateTime EndTime { get { return Time + Period; } set { Time = value - Period; } } /// <summary> /// Return the URL string source of the file. This will be converted to a stream /// </summary> /// <param name="config">Configuration object</param> /// <param name="date">Date of this source file</param> /// <param name="isLiveMode">true if we're in live mode, false for backtesting mode</param> /// <returns>String URL of source file.</returns> public override SubscriptionDataSource GetSource(SubscriptionDataConfig config, DateTime date, bool isLiveMode) { return new SubscriptionDataSource( Path.Combine( Globals.DataFolder, config.SecurityType.SecurityTypeToLower(), config.Market, "universes", "etf", config.Symbol.Underlying.Value.ToLowerInvariant(), $"{date:yyyyMMdd}.csv"), SubscriptionTransportMedium.LocalFile, FileFormat.Csv); } /// <summary> /// Reader converts each line of the data source into BaseData objects. Each data type creates its own factory method, and returns a new instance of the object /// each time it is called. /// </summary> /// <param name="config">Subscription data config setup object</param> /// <param name="line">Line of the source document</param> /// <param name="date">Date of the requested data</param> /// <param name="isLiveMode">true if we're in live mode, false for backtesting mode</param> /// <returns>Instance of the T:BaseData object generated by this line of the CSV</returns> public override BaseData Reader(SubscriptionDataConfig config, string line, DateTime date, bool isLiveMode) { if (string.IsNullOrEmpty(line)) { return null; } var split = line.Split(','); var symbol = new Symbol(SecurityIdentifier.Parse(split[1]), split[0]); var lastUpdateDate = Parse.TryParseExact(split[2], "yyyyMMdd", DateTimeStyles.None, out var lastUpdateDateParsed) ? lastUpdateDateParsed : (DateTime?)null; var weighting = split[3].IsNullOrEmpty() ? (decimal?)null : Parse.Decimal(split[3], NumberStyles.Any); var sharesHeld = split[4].IsNullOrEmpty() ? (decimal?)null : Parse.Decimal(split[4], NumberStyles.Any); var marketValue = split[5].IsNullOrEmpty() ? (decimal?)null : Parse.Decimal(split[5], NumberStyles.Any); return new ETFConstituentData { LastUpdate = lastUpdateDate, Weight = weighting, SharesHeld = sharesHeld, MarketValue = marketValue, Symbol = symbol, Time = date }; } /// <summary> /// Indicates if there is support for mapping /// </summary> /// <returns>True indicates mapping should be used</returns> public override bool RequiresMapping() { return true; } /// <summary> /// Creates a copy of the instance /// </summary> /// <returns>Clone of the instance</returns> public override BaseData Clone() { return new ETFConstituentData { LastUpdate = LastUpdate, Weight = Weight, SharesHeld = SharesHeld, MarketValue = MarketValue, Symbol = Symbol, Time = Time }; } /// <summary> /// Indicates that the data set is expected to be sparse /// </summary> /// <remarks>Relies on the <see cref="Symbol"/> property value</remarks> /// <remarks>This is a method and not a property so that python /// custom data types can override it</remarks> /// <returns>True if the data set represented by this type is expected to be sparse</returns> public override bool IsSparseData() { return true; } /// <summary> /// Gets the default resolution for this data and security type /// </summary> /// <remarks> /// This is a method and not a property so that python /// custom data types can override it. /// </remarks> public override Resolution DefaultResolution() { return Resolution.Daily; } /// <summary> /// Gets the supported resolution for this data and security type /// </summary> /// <remarks>Relies on the <see cref="Symbol"/> property value</remarks> /// <remarks>This is a method and not a property so that python /// custom data types can override it</remarks> public override List<Resolution> SupportedResolutions() { return DailyResolution; } /// <summary> /// Specifies the data time zone for this data type. This is useful for custom data types /// </summary> /// <remarks>Will throw <see cref="InvalidOperationException"/> for security types /// other than <see cref="SecurityType.Base"/></remarks> /// <returns>The <see cref="DateTimeZone"/> of this data type</returns> public override DateTimeZone DataTimeZone() { return TimeZones.Utc; } } }
using System; namespace Obscur.Core.Cryptography.Support.Math.EllipticCurve.Multiplier { public abstract class WNafUtilities { public static readonly string PRECOMP_NAME = "bc_wnaf"; private static readonly int[] DEFAULT_WINDOW_SIZE_CUTOFFS = new int[] { 13, 41, 121, 337, 897, 2305 }; private static readonly byte[] EMPTY_BYTES = new byte[0]; private static readonly int[] EMPTY_INTS = new int[0]; public static int[] GenerateCompactNaf(BigInteger k) { if ((k.BitLength >> 16) != 0) throw new ArgumentException("must have bitlength < 2^16", "k"); if (k.SignValue == 0) return EMPTY_INTS; BigInteger _3k = k.ShiftLeft(1).Add(k); int bits = _3k.BitLength; int[] naf = new int[bits >> 1]; BigInteger diff = _3k.Xor(k); int highBit = bits - 1, length = 0, zeroes = 0; for (int i = 1; i < highBit; ++i) { if (!diff.TestBit(i)) { ++zeroes; continue; } int digit = k.TestBit(i) ? -1 : 1; naf[length++] = (digit << 16) | zeroes; zeroes = 1; ++i; } naf[length++] = (1 << 16) | zeroes; if (naf.Length > length) { naf = Trim(naf, length); } return naf; } public static int[] GenerateCompactWindowNaf(int width, BigInteger k) { if (width == 2) { return GenerateCompactNaf(k); } if (width < 2 || width > 16) throw new ArgumentException("must be in the range [2, 16]", "width"); if ((k.BitLength >> 16) != 0) throw new ArgumentException("must have bitlength < 2^16", "k"); if (k.SignValue == 0) return EMPTY_INTS; int[] wnaf = new int[k.BitLength / width + 1]; // 2^width and a mask and sign bit set accordingly int pow2 = 1 << width; int mask = pow2 - 1; int sign = pow2 >> 1; bool carry = false; int length = 0, pos = 0; while (pos <= k.BitLength) { if (k.TestBit(pos) == carry) { ++pos; continue; } k = k.ShiftRight(pos); int digit = k.IntValue & mask; if (carry) { ++digit; } carry = (digit & sign) != 0; if (carry) { digit -= pow2; } int zeroes = length > 0 ? pos - 1 : pos; wnaf[length++] = (digit << 16) | zeroes; pos = width; } // Reduce the WNAF array to its actual length if (wnaf.Length > length) { wnaf = Trim(wnaf, length); } return wnaf; } public static byte[] GenerateJsf(BigInteger g, BigInteger h) { int digits = System.Math.Max(g.BitLength, h.BitLength) + 1; byte[] jsf = new byte[digits]; BigInteger k0 = g, k1 = h; int j = 0, d0 = 0, d1 = 0; int offset = 0; while ((d0 | d1) != 0 || k0.BitLength > offset || k1.BitLength > offset) { int n0 = ((int)((uint)k0.IntValue >> offset) + d0) & 7; int n1 = ((int)((uint)k1.IntValue >> offset) + d1) & 7; int u0 = n0 & 1; if (u0 != 0) { u0 -= (n0 & 2); if ((n0 + u0) == 4 && (n1 & 3) == 2) { u0 = -u0; } } int u1 = n1 & 1; if (u1 != 0) { u1 -= (n1 & 2); if ((n1 + u1) == 4 && (n0 & 3) == 2) { u1 = -u1; } } if ((d0 << 1) == 1 + u0) { d0 ^= 1; } if ((d1 << 1) == 1 + u1) { d1 ^= 1; } if (++offset == 30) { offset = 0; k0 = k0.ShiftRight(30); k1 = k1.ShiftRight(30); } jsf[j++] = (byte)((u0 << 4) | (u1 & 0xF)); } // Reduce the JSF array to its actual length if (jsf.Length > j) { jsf = Trim(jsf, j); } return jsf; } public static byte[] GenerateNaf(BigInteger k) { if (k.SignValue == 0) return EMPTY_BYTES; BigInteger _3k = k.ShiftLeft(1).Add(k); int digits = _3k.BitLength - 1; byte[] naf = new byte[digits]; BigInteger diff = _3k.Xor(k); for (int i = 1; i < digits; ++i) { if (diff.TestBit(i)) { naf[i - 1] = (byte)(k.TestBit(i) ? -1 : 1); ++i; } } naf[digits - 1] = 1; return naf; } /** * Computes the Window NAF (non-adjacent Form) of an integer. * @param width The width <code>w</code> of the Window NAF. The width is * defined as the minimal number <code>w</code>, such that for any * <code>w</code> consecutive digits in the resulting representation, at * most one is non-zero. * @param k The integer of which the Window NAF is computed. * @return The Window NAF of the given width, such that the following holds: * <code>k = &amp;sum;<sub>i=0</sub><sup>l-1</sup> k<sub>i</sub>2<sup>i</sup> * </code>, where the <code>k<sub>i</sub></code> denote the elements of the * returned <code>byte[]</code>. */ public static byte[] GenerateWindowNaf(int width, BigInteger k) { if (width == 2) { return GenerateNaf(k); } if (width < 2 || width > 8) throw new ArgumentException("must be in the range [2, 8]", "width"); if (k.SignValue == 0) return EMPTY_BYTES; byte[] wnaf = new byte[k.BitLength + 1]; // 2^width and a mask and sign bit set accordingly int pow2 = 1 << width; int mask = pow2 - 1; int sign = pow2 >> 1; bool carry = false; int length = 0, pos = 0; while (pos <= k.BitLength) { if (k.TestBit(pos) == carry) { ++pos; continue; } k = k.ShiftRight(pos); int digit = k.IntValue & mask; if (carry) { ++digit; } carry = (digit & sign) != 0; if (carry) { digit -= pow2; } length += (length > 0) ? pos - 1 : pos; wnaf[length++] = (byte)digit; pos = width; } // Reduce the WNAF array to its actual length if (wnaf.Length > length) { wnaf = Trim(wnaf, length); } return wnaf; } public static int GetNafWeight(BigInteger k) { if (k.SignValue == 0) return 0; BigInteger _3k = k.ShiftLeft(1).Add(k); BigInteger diff = _3k.Xor(k); return diff.BitCount; } public static WNafPreCompInfo GetWNafPreCompInfo(ECPoint p) { return GetWNafPreCompInfo(p.Curve.GetPreCompInfo(p, PRECOMP_NAME)); } public static WNafPreCompInfo GetWNafPreCompInfo(PreCompInfo preCompInfo) { if ((preCompInfo != null) && (preCompInfo is WNafPreCompInfo)) { return (WNafPreCompInfo)preCompInfo; } return new WNafPreCompInfo(); } /** * Determine window width to use for a scalar multiplication of the given size. * * @param bits the bit-length of the scalar to multiply by * @return the window size to use */ public static int GetWindowSize(int bits) { return GetWindowSize(bits, DEFAULT_WINDOW_SIZE_CUTOFFS); } /** * Determine window width to use for a scalar multiplication of the given size. * * @param bits the bit-length of the scalar to multiply by * @param windowSizeCutoffs a monotonically increasing list of bit sizes at which to increment the window width * @return the window size to use */ public static int GetWindowSize(int bits, int[] windowSizeCutoffs) { int w = 0; for (; w < windowSizeCutoffs.Length; ++w) { if (bits < windowSizeCutoffs[w]) { break; } } return w + 2; } public static ECPoint MapPointWithPrecomp(ECPoint p, int width, bool includeNegated, ECPointMap pointMap) { ECCurve c = p.Curve; WNafPreCompInfo wnafPreCompP = Precompute(p, width, includeNegated); ECPoint q = pointMap.Map(p); WNafPreCompInfo wnafPreCompQ = GetWNafPreCompInfo(c.GetPreCompInfo(q, PRECOMP_NAME)); ECPoint twiceP = wnafPreCompP.Twice; if (twiceP != null) { ECPoint twiceQ = pointMap.Map(twiceP); wnafPreCompQ.Twice = twiceQ; } ECPoint[] preCompP = wnafPreCompP.PreComp; ECPoint[] preCompQ = new ECPoint[preCompP.Length]; for (int i = 0; i < preCompP.Length; ++i) { preCompQ[i] = pointMap.Map(preCompP[i]); } wnafPreCompQ.PreComp = preCompQ; if (includeNegated) { ECPoint[] preCompNegQ = new ECPoint[preCompQ.Length]; for (int i = 0; i < preCompNegQ.Length; ++i) { preCompNegQ[i] = preCompQ[i].Negate(); } wnafPreCompQ.PreCompNeg = preCompNegQ; } c.SetPreCompInfo(q, PRECOMP_NAME, wnafPreCompQ); return q; } public static WNafPreCompInfo Precompute(ECPoint p, int width, bool includeNegated) { ECCurve c = p.Curve; WNafPreCompInfo wnafPreCompInfo = GetWNafPreCompInfo(c.GetPreCompInfo(p, PRECOMP_NAME)); ECPoint[] preComp = wnafPreCompInfo.PreComp; if (preComp == null) { preComp = new ECPoint[] { p }; } int preCompLen = preComp.Length; int reqPreCompLen = 1 << System.Math.Max(0, width - 2); if (preCompLen < reqPreCompLen) { preComp = ResizeTable(preComp, reqPreCompLen); if (reqPreCompLen == 2) { preComp[1] = preComp[0].ThreeTimes(); } else { ECPoint twiceP = wnafPreCompInfo.Twice; if (twiceP == null) { twiceP = preComp[0].Twice(); wnafPreCompInfo.Twice = twiceP; } for (int i = preCompLen; i < reqPreCompLen; i++) { /* * Compute the new ECPoints for the precomputation array. The values 1, 3, 5, ..., * 2^(width-1)-1 times p are computed */ preComp[i] = twiceP.Add(preComp[i - 1]); } } /* * Having oft-used operands in affine form makes operations faster. */ c.NormalizeAll(preComp); } wnafPreCompInfo.PreComp = preComp; if (includeNegated) { ECPoint[] preCompNeg = wnafPreCompInfo.PreCompNeg; int pos; if (preCompNeg == null) { pos = 0; preCompNeg = new ECPoint[reqPreCompLen]; } else { pos = preCompNeg.Length; if (pos < reqPreCompLen) { preCompNeg = ResizeTable(preCompNeg, reqPreCompLen); } } while (pos < reqPreCompLen) { preCompNeg[pos] = preComp[pos].Negate(); ++pos; } wnafPreCompInfo.PreCompNeg = preCompNeg; } c.SetPreCompInfo(p, PRECOMP_NAME, wnafPreCompInfo); return wnafPreCompInfo; } private static byte[] Trim(byte[] a, int length) { byte[] result = new byte[length]; Array.Copy(a, 0, result, 0, result.Length); return result; } private static int[] Trim(int[] a, int length) { int[] result = new int[length]; Array.Copy(a, 0, result, 0, result.Length); return result; } private static ECPoint[] ResizeTable(ECPoint[] a, int length) { ECPoint[] result = new ECPoint[length]; Array.Copy(a, 0, result, 0, a.Length); return result; } } }
// // Copyright 2012 Christopher Deutsch // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. using System; using MonoTouch.Dialog; using System.Drawing; using MonoTouch.UIKit; using MonoTouch.Foundation; using ClanceysLib; namespace MonoTouch.Dialog.PickerElement { public class PickerElement: EntryElement { protected UIComboBox ComboBox; static NSString skey = new NSString ("PickerElement"); static NSString skeyvalue = new NSString ("PickerElementValue"); public UITextAlignment Alignment = UITextAlignment.Center; public UILabel entry; public UIColor SelectedBackgroundColor = UIColor.FromRGBA(0.02f, 0.55f, 0.96f, 1f); public UIColor SelectedTextColor = UIColor.White; public bool ShowDoneButton = false; public float? ValueWidth = null; public UIColor TextColor = UIColor.Black; public UIColor ValueTextColor = UIColor.Black; private UITableViewCell cell = null; static UIFont font = UIFont.BoldSystemFontOfSize (17); private bool doneButtonVisible = false; private bool pickerVisible = false; public EventHandler ValueChanged {get;set;} public event NSAction Tapped; float modifiedHeightOffset = 0; UIColor originalCellBackgroundColor = null; UIColor originalEntryBackgroundColor = null; UIColor originalCellTextColor = null; UIColor originalEntryTextColor = null; public PickerElement (string caption, object[] Items , string DisplayMember, DialogViewController dvc) : base (caption, null, null) { this.Dvc = dvc; this.ComboBox = new UIComboBox(RectangleF.Empty); this.ComboBox.Items = Items; this.ComboBox.DisplayMember = DisplayMember; this.ComboBox.TextAlignment = UITextAlignment.Right; this.ComboBox.BorderStyle = UITextBorderStyle.None; this.ComboBox.PickerClosed += delegate { if (Dvc != null && doneButtonVisible) { Dvc.NavigationItem.RightBarButtonItem = oldRightBtn; doneButtonVisible = false; } RestoreTableView(); }; this.ComboBox.ValueChanged += delegate { Value = ComboBox.Text; RefreshValue(); if (ValueChanged != null) { ValueChanged(this, null); } }; this.ComboBox.PickerFadeInDidFinish += delegate { if (modifiedHeightOffset == 0f && !ComboBox.IsHiding) { // adjust size. var ff = Dvc.TableView.Frame; modifiedHeightOffset = 200f; Dvc.TableView.Frame = new RectangleF(ff.X, ff.Y, ff.Width, ff.Height - modifiedHeightOffset); Dvc.TableView.ScrollToRow (IndexPath, UITableViewScrollPosition.Middle, true); } }; Value = ComboBox.Text; } public void SetSelectedValue(string Value) { ComboBox.Text = Value; ComboBox.SetSelectedValue(Value); } public void SetSelectedIndex(int index) { ComboBox.SetSelectedIndex(index); } public Object SelectedItem { get { return ComboBox.SelectedItem; } } /// <summary> /// Can be a collection of anyting. If you don't set the ValueMember or DisplayMember, it will use ToString() for the value and Title. /// </summary> public object [] Items { get{return ComboBox.Items;} set{ComboBox.Items = value;} } public string DisplayMember { get{return ComboBox.DisplayMember;} set {ComboBox.DisplayMember = value;} } public string ValueMember { get{return ComboBox.ValueMember;} set {ComboBox.ValueMember = value;} } public float Width { get{return ComboBox.Width;} set {ComboBox.Width = value;} } private DialogViewController Dvc; private UIBarButtonItem oldRightBtn; private UIBarButtonItem doneButton; private bool wiredStarted = false; public bool ShouldDeselect = true; public override void Selected (DialogViewController dvc, UITableView tableView, NSIndexPath path) { if (Tapped != null) Tapped (); if(ShouldDeselect) tableView.DeselectRow (path, true); ShowPicker(); } public override void Deselected (DialogViewController dvc, UITableView tableView, NSIndexPath path) { base.Deselected (dvc, tableView, path); HidePicker(); } public void ShowPicker() { // get rid of keyboard if another element triggered it. Element root = Parent; while (root.Parent != null) { root = root.Parent; } ResignFirstResponders((RootElement)root); // MonoTouch.Dialog CUSTOM: Download custom MonoTouch.Dialog from here to enable hiding picker when other element is selected: // https://github.com/crdeutsch/MonoTouch.Dialog //if (EntryStarted != null) { // EntryStarted(this, null); //} // wire up ability to hide picker when other elements are selected. if (!wiredStarted) { foreach(var sect in (root as RootElement)) { foreach(var e in sect.Elements) { var ee = e as EntryElement; if (ee != null && ee != this) { // MonoTouch.Dialog CUSTOM: Download custom MonoTouch.Dialog from here to enable hiding picker when other element is selected: // https://github.com/crdeutsch/MonoTouch.Dialog //((EntryElement)e).EntryStarted += delegate { // ComboBox.HidePicker(); //}; } } } wiredStarted = true; } ComboBox.ShowPicker(); if (Dvc != null && ShowDoneButton) { if(Dvc.NavigationItem.RightBarButtonItem != doneButton) oldRightBtn = Dvc.NavigationItem.RightBarButtonItem; if(doneButton == null) doneButton = new UIBarButtonItem("Done",UIBarButtonItemStyle.Bordered, delegate{ ComboBox.HidePicker(); Dvc.NavigationItem.RightBarButtonItem = oldRightBtn; }); Dvc.NavigationItem.RightBarButtonItem = doneButton; doneButtonVisible = true; } if (originalCellBackgroundColor == null) { originalCellBackgroundColor = cell.BackgroundColor; cell.BackgroundColor = SelectedBackgroundColor; originalEntryBackgroundColor = entry.BackgroundColor; entry.BackgroundColor = SelectedBackgroundColor; originalCellTextColor = cell.TextLabel.TextColor; cell.TextLabel.TextColor = SelectedTextColor; originalEntryTextColor = entry.TextColor; entry.TextColor = SelectedTextColor; } pickerVisible = true; } public void HidePicker(bool Animated = true) { ComboBox.HidePicker(Animated); RestoreTableView(); } private void RestoreTableView() { // remove bg color if (originalCellBackgroundColor != null) { cell.BackgroundColor = originalCellBackgroundColor; originalCellBackgroundColor = null; entry.BackgroundColor = originalEntryBackgroundColor; originalEntryBackgroundColor = null; cell.TextLabel.TextColor = originalCellTextColor; originalCellTextColor = null; entry.TextColor = originalEntryTextColor; originalEntryTextColor = null; } if (modifiedHeightOffset > 0) { // adjust size. var ff = Dvc.TableView.Frame; Dvc.TableView.Frame = new RectangleF(ff.X, ff.Y, ff.Width, ff.Height + modifiedHeightOffset); modifiedHeightOffset = 0f; } // MonoTouch.Dialog CUSTOM: Download custom MonoTouch.Dialog from here to enable hiding picker when other element is selected: // https://github.com/crdeutsch/MonoTouch.Dialog //if (pickerVisible) { // if (EntryEnded != null) { // EntryEnded(this, null); // } //} pickerVisible = false; } // // Computes the X position for the entry by aligning all the entries in the Section // SizeF ComputeEntryPosition (UITableView tv, UITableViewCell cell) { Section s = Parent as Section; if (s.EntryAlignment.Width != 0) return s.EntryAlignment; // If all EntryElements have a null Caption, align UITextField with the Caption // offset of normal cells (at 10px). SizeF max = new SizeF (-15, tv.StringSize ("M", font).Height); foreach (var e in s.Elements){ var ee = e as EntryElement; if (ee == null) continue; if (ee.Caption != null) { var size = tv.StringSize (ee.Caption, font); if (size.Width > max.Width) max = size; } } s.EntryAlignment = new SizeF (25 + Math.Min (max.Width, 160), max.Height); return s.EntryAlignment; } public override UITableViewCell GetCell (UITableView tv) { ComboBox.ViewForPicker = tv.Superview; cell = tv.DequeueReusableCell (Value == null ? skey : skeyvalue); if (cell == null){ cell = new NoOverlapTableViewCell (UITableViewCellStyle.Value1, skey); cell.SelectionStyle = (Tapped != null) ? UITableViewCellSelectionStyle.Blue : UITableViewCellSelectionStyle.None; } else RemoveTag (cell, 1); cell.TextLabel.TextColor = TextColor; entry = cell.DetailTextLabel; entry.Text = Value ?? ""; entry.Tag = 1; entry.TextAlignment = Alignment; entry.BackgroundColor = UIColor.Clear; entry.TextColor = ValueTextColor; entry.AutoresizingMask = UIViewAutoresizing.FlexibleWidth | UIViewAutoresizing.FlexibleLeftMargin; SizeF size = ComputeEntryPosition (tv, cell); ((NoOverlapTableViewCell)cell).MaxEntryPosition = size; ((NoOverlapTableViewCell)cell).ContentViewBounds = cell.ContentView.Bounds; ((NoOverlapTableViewCell)cell).DetailTextLabelWidth = ValueWidth; if (originalEntryBackgroundColor != null) { // modify background color to stay consistant. cell.BackgroundColor = SelectedBackgroundColor; cell.TextLabel.TextColor = SelectedTextColor; entry.BackgroundColor = SelectedBackgroundColor; entry.TextColor = SelectedTextColor; } cell.TextLabel.Text = Caption; cell.ContentView.AddSubview (entry); return cell; } public void RefreshValue() { if (entry != null) { entry.Text = Value; } } public override string Summary () { return Caption; } public override bool Matches (string text) { return (Value != null ? Value.IndexOf (text, StringComparison.CurrentCultureIgnoreCase) != -1: false) || base.Matches (text); } private void ResignFirstResponders(RootElement root) { foreach(var sect in root) { foreach(var e in sect.Elements) { var ee = e as EntryElement; if (ee != null && ee != this) { ee.ResignFirstResponder(false); } } } } // MonoTouch.Dialog CUSTOM: Download custom MonoTouch.Dialog from here to enable support for "next" button being clicked. // https://github.com/crdeutsch/MonoTouch.Dialog /* bool becomeResponder; /// <summary> /// Makes this cell the first responder (get the focus) /// </summary> /// <param name="animated"> /// Whether scrolling to the location of this cell should be animated /// </param> public override void BecomeFirstResponder (bool animated) { becomeResponder = true; var tv = GetContainerTableView (); if (tv == null) return; tv.ScrollToRow (IndexPath, UITableViewScrollPosition.Middle, animated); if (entry != null){ ShowPicker(); becomeResponder = false; } } public override void ResignFirstResponder (bool animated) { becomeResponder = false; var tv = GetContainerTableView (); if (tv == null) return; tv.ScrollToRow (IndexPath, UITableViewScrollPosition.Middle, animated); if (entry != null) HidePicker(); } */ } }
//----------------------------------------------------------------------------- // <copyright file="Enlistment.cs" company="Microsoft"> // Copyright (c) Microsoft Corporation. All rights reserved. // </copyright> //----------------------------------------------------------------------------- namespace System.Transactions { using System; using System.Diagnostics; using System.Globalization; using System.Threading; using System.Transactions; using System.Transactions.Diagnostics; internal interface IPromotedEnlistment { void EnlistmentDone(); void Prepared(); void ForceRollback(); void ForceRollback(Exception e); void Committed(); void Aborted(); void Aborted(Exception e); void InDoubt(); void InDoubt(Exception e); byte[] GetRecoveryInformation(); InternalEnlistment InternalEnlistment { get; set; } } // // InternalEnlistment by itself can support a Phase0 volatile enlistment. // There are derived classes to support durable, phase1 volatile & PSPE // enlistments. // class InternalEnlistment : ISinglePhaseNotificationInternal { // Storage for the state of the enlistment. internal EnlistmentState twoPhaseState; // Interface implemented by the enlistment owner for notifications protected IEnlistmentNotification twoPhaseNotifications; // Store a reference to the single phase notification interface in case // the enlisment supports it. protected ISinglePhaseNotification singlePhaseNotifications; // Reference to the containing transaction. protected InternalTransaction transaction; // Reference to the lightweight transaction. Transaction atomicTransaction; // The EnlistmentTraceIdentifier for this enlistment. private EnlistmentTraceIdentifier traceIdentifier; // Unique value amongst all enlistments for a given internal transaction. int enlistmentId; internal Guid DistributedTxId { get { Guid returnValue = Guid.Empty; if (this.Transaction != null ) { returnValue = this.Transaction.DistributedTxId; } return returnValue; } } // Parent Enlistment Object Enlistment enlistment; PreparingEnlistment preparingEnlistment; SinglePhaseEnlistment singlePhaseEnlistment; // If this enlistment is promoted store the object it delegates to. IPromotedEnlistment promotedEnlistment; // For Recovering Enlistments protected InternalEnlistment( Enlistment enlistment, IEnlistmentNotification twoPhaseNotifications ) { Debug.Assert(this is RecoveringInternalEnlistment, "this is RecoveringInternalEnlistment"); this.enlistment = enlistment; this.twoPhaseNotifications = twoPhaseNotifications; this.enlistmentId = 1; this.traceIdentifier = EnlistmentTraceIdentifier.Empty; } // For Promotable Enlistments protected InternalEnlistment( Enlistment enlistment, InternalTransaction transaction, Transaction atomicTransaction ) { Debug.Assert(this is PromotableInternalEnlistment, "this is PromotableInternalEnlistment"); this.enlistment = enlistment; this.transaction = transaction; this.atomicTransaction = atomicTransaction; this.enlistmentId = transaction.enlistmentCount++; this.traceIdentifier = EnlistmentTraceIdentifier.Empty; } internal InternalEnlistment( Enlistment enlistment, InternalTransaction transaction, IEnlistmentNotification twoPhaseNotifications, ISinglePhaseNotification singlePhaseNotifications, Transaction atomicTransaction ) { this.enlistment = enlistment; this.transaction = transaction; this.twoPhaseNotifications = twoPhaseNotifications; this.singlePhaseNotifications = singlePhaseNotifications; this.atomicTransaction = atomicTransaction; this.enlistmentId = transaction.enlistmentCount++; this.traceIdentifier = EnlistmentTraceIdentifier.Empty; } internal InternalEnlistment( Enlistment enlistment, IEnlistmentNotification twoPhaseNotifications, InternalTransaction transaction, Transaction atomicTransaction ) { this.enlistment = enlistment; this.twoPhaseNotifications = twoPhaseNotifications; this.transaction = transaction; this.atomicTransaction = atomicTransaction; } internal EnlistmentState State { get { return this.twoPhaseState; } set { this.twoPhaseState = value; } } internal Enlistment Enlistment { get { return this.enlistment; } } internal PreparingEnlistment PreparingEnlistment { get { if (this.preparingEnlistment == null) { // If there is a ---- here one of the objects would simply be garbage collected. this.preparingEnlistment = new PreparingEnlistment(this); } return this.preparingEnlistment; } } internal SinglePhaseEnlistment SinglePhaseEnlistment { get { if (this.singlePhaseEnlistment == null) { // If there is a ---- here one of the objects would simply be garbage collected. this.singlePhaseEnlistment = new SinglePhaseEnlistment(this); } return this.singlePhaseEnlistment; } } internal InternalTransaction Transaction { get { return this.transaction; } } internal virtual object SyncRoot { get { Debug.Assert(this.transaction != null, "this.transaction != null"); return this.transaction; } } internal IEnlistmentNotification EnlistmentNotification { get { return this.twoPhaseNotifications; } } internal ISinglePhaseNotification SinglePhaseNotification { get { return this.singlePhaseNotifications; } } internal virtual IPromotableSinglePhaseNotification PromotableSinglePhaseNotification { get { Debug.Assert(false, "PromotableSinglePhaseNotification called for a non promotable enlistment."); throw new NotImplementedException(); } } internal IPromotedEnlistment PromotedEnlistment { get { return this.promotedEnlistment; } set { this.promotedEnlistment = value; } } internal EnlistmentTraceIdentifier EnlistmentTraceId { get { if (this.traceIdentifier == EnlistmentTraceIdentifier.Empty) { lock (this.SyncRoot) { if (this.traceIdentifier == EnlistmentTraceIdentifier.Empty) { EnlistmentTraceIdentifier temp; if (null != this.atomicTransaction) { temp = new EnlistmentTraceIdentifier( Guid.Empty, this.atomicTransaction.TransactionTraceId, this.enlistmentId ); } else { temp = new EnlistmentTraceIdentifier( Guid.Empty, new TransactionTraceIdentifier( InternalTransaction.InstanceIdentifier + Convert.ToString(Interlocked.Increment(ref InternalTransaction.nextHash), CultureInfo.InvariantCulture), 0), this.enlistmentId ); } Thread.MemoryBarrier(); this.traceIdentifier = temp; } } } return this.traceIdentifier; } } internal virtual void FinishEnlistment() { // Note another enlistment finished. this.Transaction.phase0Volatiles.preparedVolatileEnlistments++; CheckComplete(); } internal virtual void CheckComplete() { // Make certain we increment the right list. Debug.Assert(this.Transaction.phase0Volatiles.preparedVolatileEnlistments <= this.Transaction.phase0Volatiles.volatileEnlistmentCount + this.Transaction.phase0Volatiles.dependentClones); // Check to see if all of the volatile enlistments are done. if (this.Transaction.phase0Volatiles.preparedVolatileEnlistments == this.Transaction.phase0VolatileWaveCount + this.Transaction.phase0Volatiles.dependentClones) { this.Transaction.State.Phase0VolatilePrepareDone(this.Transaction); } } internal virtual Guid ResourceManagerIdentifier { get { Debug.Assert(false, "ResourceManagerIdentifier called for non durable enlistment"); throw new NotImplementedException(); } } void ISinglePhaseNotificationInternal.SinglePhaseCommit( IPromotedEnlistment singlePhaseEnlistment ) { bool spcCommitted = false; this.promotedEnlistment = singlePhaseEnlistment; try { this.singlePhaseNotifications.SinglePhaseCommit(this.SinglePhaseEnlistment); spcCommitted = true; } finally { if (!spcCommitted) { this.SinglePhaseEnlistment.InDoubt(); } } } void IEnlistmentNotificationInternal.Prepare( IPromotedEnlistment preparingEnlistment ) { this.promotedEnlistment = preparingEnlistment; this.twoPhaseNotifications.Prepare(this.PreparingEnlistment); } void IEnlistmentNotificationInternal.Commit( IPromotedEnlistment enlistment ) { this.promotedEnlistment = enlistment; this.twoPhaseNotifications.Commit(this.Enlistment); } void IEnlistmentNotificationInternal.Rollback( IPromotedEnlistment enlistment ) { this.promotedEnlistment = enlistment; this.twoPhaseNotifications.Rollback(this.Enlistment); } void IEnlistmentNotificationInternal.InDoubt( IPromotedEnlistment enlistment ) { this.promotedEnlistment = enlistment; this.twoPhaseNotifications.InDoubt(this.Enlistment); } } class DurableInternalEnlistment : InternalEnlistment { // Resource Manager Identifier for this enlistment if it is durable internal Guid resourceManagerIdentifier; internal DurableInternalEnlistment( Enlistment enlistment, Guid resourceManagerIdentifier, InternalTransaction transaction, IEnlistmentNotification twoPhaseNotifications, ISinglePhaseNotification singlePhaseNotifications, Transaction atomicTransaction ) : base(enlistment, transaction, twoPhaseNotifications, singlePhaseNotifications, atomicTransaction) { this.resourceManagerIdentifier = resourceManagerIdentifier; } protected DurableInternalEnlistment( Enlistment enlistment, IEnlistmentNotification twoPhaseNotifications ) : base(enlistment, twoPhaseNotifications) { } internal override Guid ResourceManagerIdentifier { get { return resourceManagerIdentifier; } } } // // Since RecoveringInternalEnlistment does not have a transaction it must take // a separate object as its [....] root. // class RecoveringInternalEnlistment : DurableInternalEnlistment { object syncRoot; internal RecoveringInternalEnlistment( Enlistment enlistment, IEnlistmentNotification twoPhaseNotifications, object syncRoot ) : base(enlistment, twoPhaseNotifications) { this.syncRoot = syncRoot; } internal override object SyncRoot { get { return this.syncRoot; } } } class PromotableInternalEnlistment : InternalEnlistment { // This class acts as the durable single phase enlistment for a // promotable single phase enlistment. IPromotableSinglePhaseNotification promotableNotificationInterface; internal PromotableInternalEnlistment( Enlistment enlistment, InternalTransaction transaction, IPromotableSinglePhaseNotification promotableSinglePhaseNotification, Transaction atomicTransaction ) : base(enlistment, transaction, atomicTransaction) { this.promotableNotificationInterface = promotableSinglePhaseNotification; } internal override IPromotableSinglePhaseNotification PromotableSinglePhaseNotification { get { return this.promotableNotificationInterface; } } } // This class supports volatile enlistments // internal class Phase1VolatileEnlistment : InternalEnlistment { public Phase1VolatileEnlistment( Enlistment enlistment, InternalTransaction transaction, IEnlistmentNotification twoPhaseNotifications, ISinglePhaseNotification singlePhaseNotifications, Transaction atomicTransaction ) : base(enlistment, transaction, twoPhaseNotifications, singlePhaseNotifications, atomicTransaction) { } internal override void FinishEnlistment() { // Note another enlistment finished. this.transaction.phase1Volatiles.preparedVolatileEnlistments++; CheckComplete(); } internal override void CheckComplete() { // Make certain we increment the right list. Debug.Assert(this.transaction.phase1Volatiles.preparedVolatileEnlistments <= this.transaction.phase1Volatiles.volatileEnlistmentCount + this.transaction.phase1Volatiles.dependentClones); // Check to see if all of the volatile enlistments are done. if (this.transaction.phase1Volatiles.preparedVolatileEnlistments == this.transaction.phase1Volatiles.volatileEnlistmentCount + this.transaction.phase1Volatiles.dependentClones) { this.transaction.State.Phase1VolatilePrepareDone(this.transaction); } } } public class Enlistment { // Interface for communicating with the state machine. internal InternalEnlistment internalEnlistment; internal Enlistment( InternalEnlistment internalEnlistment ) { this.internalEnlistment = internalEnlistment; } internal Enlistment( Guid resourceManagerIdentifier, InternalTransaction transaction, IEnlistmentNotification twoPhaseNotifications, ISinglePhaseNotification singlePhaseNotifications, Transaction atomicTransaction ) { this.internalEnlistment = new DurableInternalEnlistment( this, resourceManagerIdentifier, transaction, twoPhaseNotifications, singlePhaseNotifications, atomicTransaction ); } internal Enlistment( InternalTransaction transaction, IEnlistmentNotification twoPhaseNotifications, ISinglePhaseNotification singlePhaseNotifications, Transaction atomicTransaction, EnlistmentOptions enlistmentOptions ) { if ((enlistmentOptions & EnlistmentOptions.EnlistDuringPrepareRequired) != 0) { this.internalEnlistment = new InternalEnlistment( this, transaction, twoPhaseNotifications, singlePhaseNotifications, atomicTransaction ); } else { this.internalEnlistment = new Phase1VolatileEnlistment( this, transaction, twoPhaseNotifications, singlePhaseNotifications, atomicTransaction ); } } // This constructor is for a promotable single phase enlistment. internal Enlistment( InternalTransaction transaction, IPromotableSinglePhaseNotification promotableSinglePhaseNotification, Transaction atomicTransaction ) { this.internalEnlistment = new PromotableInternalEnlistment( this, transaction, promotableSinglePhaseNotification, atomicTransaction ); } internal Enlistment( IEnlistmentNotification twoPhaseNotifications, InternalTransaction transaction, Transaction atomicTransaction ) { this.internalEnlistment = new InternalEnlistment( this, twoPhaseNotifications, transaction, atomicTransaction ); } internal Enlistment( IEnlistmentNotification twoPhaseNotifications, object syncRoot ) { this.internalEnlistment = new RecoveringInternalEnlistment( this, twoPhaseNotifications, syncRoot ); } public void Done() { if (DiagnosticTrace.Verbose) { MethodEnteredTraceRecord.Trace(SR.GetString(SR.TraceSourceLtm), "Enlistment.Done" ); EnlistmentCallbackPositiveTraceRecord.Trace(SR.GetString(SR.TraceSourceLtm), this.internalEnlistment.EnlistmentTraceId, EnlistmentCallback.Done ); } lock (this.internalEnlistment.SyncRoot) { this.internalEnlistment.State.EnlistmentDone(this.internalEnlistment); } if (DiagnosticTrace.Verbose) { MethodExitedTraceRecord.Trace(SR.GetString(SR.TraceSourceLtm), "Enlistment.Done" ); } } internal InternalEnlistment InternalEnlistment { get { return this.internalEnlistment; } } } }
/******************************************************************** The Multiverse Platform is made available under the MIT License. Copyright (c) 2012 The Multiverse Foundation Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. *********************************************************************/ using System; using System.Collections.Generic; using System.Diagnostics; using System.Text; using System.Drawing; using System.Drawing.Text; using System.Xml; using Axiom.MathLib; using Axiom.Core; using Axiom.Input; using FontFamily = System.Drawing.FontFamily; namespace Multiverse.Gui { /// <summary> /// Variant of EditBox that allows me to control which widget /// is in front. I would like to redo a great deal of this design, /// so that there are two components. One for text display, and /// one for text input. /// </summary> public class LayeredEditBox : LayeredStaticText { protected bool textMasked = false; protected char maskChar = '*'; protected float lastTextOffset = 0; bool dragging = false; int dragAnchorIndex = 0; int maxTextLength = int.MaxValue; bool readOnly = false; int selectionStartIndex = 0; int selectionEndIndex = 0; TextStyle selectedTextStyle; Dictionary<KeyCodes, bool> keysPressed = new Dictionary<KeyCodes, bool>(); protected StringBuilder editBuffer = new StringBuilder(); protected string editText = string.Empty; protected int caretIndex = 0; // caret (cursor) protected TextureInfo caret; // What they have just typed, but not entered protected string historyTmp; // History lines protected List<string> historyLines = null; // index from the end of the history list. // a value of 0 corresponds to the most recent entry in the list // a value of -1 corresponds to an entry that is not yet in the list protected int historyIndex = -1; #region Event Declarations #region Unused events #if UNUSED_EVENTS /// <summary> /// The read-only mode for the edit box has been changed. /// </summary> public event EventHandler ReadOnlyChanged; /// <summary> /// The masked rendering mode (password mode) has been changed. /// </summary> public event EventHandler MaskedRenderingModeChanged; /// <summary> /// The code point (character) to use for masked text has been changed. /// </summary> public event EventHandler MaskCodePointChanged; /// <summary> /// The validation string has been changed. /// </summary> public event EventHandler ValidationStringChanged; /// <summary> /// The maximum allowable string length has been changed. /// </summary> public event EventHandler MaximumTextLengthChanged; /// <summary> /// Some operation has made the current text invalid with regards to the validation string. /// </summary> public event EventHandler TextInvalidated; /// <summary> /// The user attempted to modify the text in a way that would have made it invalid. /// </summary> public event EventHandler InvalidEntryAttempted; /// <summary> /// The text carat (insert point) has changed. /// </summary> public event EventHandler CaratMoved; /// <summary> /// The text widget has been enabled. /// </summary> public event EventHandler Enabled; /// <summary> /// The text widget has been disabled. /// </summary> public event EventHandler Disabled; #endif // UNUSED_EVENTS #endregion Unused events public event EventHandler PostCharacter; /// <summary> /// The current text selection has changed. /// </summary> public event EventHandler TextSelectionChanged; /// <summary> /// The number of characters in the edit box has reached the current maximum. /// </summary> public event EventHandler EditboxFull; /// <summary> /// The user has accepted the current text by pressing Return, Enter, or Tab. /// </summary> public event EventHandler TextAccepted; /// <summary> /// New events for the Interface system /// </summary> public event EventHandler EnterPressed; public event EventHandler EscapePressed; public event EventHandler SpacePressed; public event EventHandler TabPressed; // Use the BlahEvent version instead of the Blah version, since // Char is a class name. public event KeyboardEventHandler CharEvent; #endregion Event Declarations public LayeredEditBox(string name, Window clipWindow) : base(name, clipWindow) { selectedTextStyle = new TextStyle(this.NormalTextStyle); selectedTextStyle.bgColor.a = 0.5f; selectedTextStyle.bgColor.r = 0.5f; selectedTextStyle.bgColor.g = 0.5f; selectedTextStyle.bgColor.b = 0.5f; this.TextSelectionChanged += this.GenerateTextChunks; this.ScrollFromBottom = true; } /// <summary> /// Setting this will reset the selection to be at the caret. /// This is modifying the edit version of the text rather than /// what is drawn (which may be mask characters). /// </summary> public void SetText(string str, bool moveCaretToEnd, bool resetSelection) { editBuffer.Length = 0; editBuffer.Append(str); SetText(editBuffer, moveCaretToEnd, resetSelection); } protected void SetText(StringBuilder buffer, bool moveCaretToEnd, bool resetSelection) { if (moveCaretToEnd || caretIndex > buffer.Length) CaretIndex = buffer.Length; else if (caretIndex < 0) CaretIndex = 0; if (resetSelection) { SelectionStartIndex = caretIndex; SelectionEndIndex = caretIndex; } if (textMasked) base.SetText(new string(maskChar, buffer.Length)); else base.SetText(buffer.ToString()); } public override void SetText(string str) { SetText(str, true, true); } // variant that is called from TextSelectionChanged protected void GenerateTextChunks(object sender, EventArgs e) { GenerateTextChunks(GetAllText()); } protected override void GenerateTextChunks(string str) { List<TextChunk> chunks = this.TextChunks; chunks.Clear(); if (str == null) str = string.Empty; // The portion before the highlight section if (selectionStartIndex > 0) { TextChunk chunk = new TextChunk(new TextRange(0, selectionStartIndex), new TextStyle(this.NormalTextStyle)); chunks.Add(chunk); } if (selectionEndIndex > selectionStartIndex) { TextChunk chunk = new TextChunk(new TextRange(selectionStartIndex, selectionEndIndex), new TextStyle(this.SelectedTextStyle)); chunks.Add(chunk); } if (selectionEndIndex <= str.Length) { TextChunk chunk = new TextChunk(new TextRange(selectionEndIndex, str.Length), new TextStyle(this.NormalTextStyle)); chunks.Add(chunk); } } /// <summary> /// This computes how much vertical space would be required to draw /// all the text, wrapping based on window width. /// </summary> /// <returns>number of pixels of vertical space required to draw the text</returns> public override float GetTextHeight(bool includeEmpty) { float rv = base.GetTextHeight(includeEmpty); if (rv == 0.0) // if there is no text, but we have our cursor, make sure // we leave room for this empty line. return this.Font.LineSpacing; return rv; } protected override void DrawSelf(float z) { if (lines.Count == 0) { // add an empty line TextRange range = new TextRange(); range.start = 0; range.end = 0; lines.Add(range); } base.DrawSelf(z); Rect clipRect = this.PixelRect; // Draw the caret PointF pt = GetOffset(caretIndex); Vector3 drawPos = new Vector3(pt.X, pt.Y, z); float zOffset = (int)frameStrata * GuiZFrameStrataStep + (int)layerLevel * GuiZLayerLevelStep + (int)frameLevel * GuiZFrameLevelStep; float maxOffset = (int)FrameStrata.Maximum * GuiZFrameStrataStep; float curOffset = maxOffset - zOffset; drawPos.z = drawPos.z + curOffset - (int)SubLevel.Caret * GuiZSubLevelStep; if (drawPos.x < clipRect.Left) drawPos.x = clipRect.Left; else if (drawPos.x + caret.Width > clipRect.Right) drawPos.x = clipRect.Right - caret.Width; SizeF caretSize = new SizeF(caret.Width, this.Font.LineSpacing); ColorRect caretColorRect = new ColorRect(ColorEx.White); caret.Draw(drawPos, caretSize, clipRect, caretColorRect); } /// <summary> /// Set the widget's history list to be the one of the frame /// </summary> /// <param name="hist"></param> public void SetHistory(List<string> hist) { historyLines = hist; } #region Key Handlers /// <summary> /// Processing for the backspace key. /// </summary> protected void HandleBackspace() { if (!ReadOnly) { StringBuilder tmpBuffer = new StringBuilder(editBuffer.ToString()); int tmpIndex = caretIndex; if (SelectionLength != 0) { tmpBuffer = tmpBuffer.Remove(SelectionStartIndex, SelectionLength); tmpIndex = SelectionStartIndex; } else if (CaretIndex > 0) { tmpBuffer = tmpBuffer.Remove(CaretIndex - 1, 1); tmpIndex = CaretIndex - 1; } if (IsStringValid(tmpBuffer.ToString())) { // erase selection using mode that does not modify 'text' (we just want to update state) EraseSelectedText(false); // set text to the newly modified string editBuffer = tmpBuffer; // set the displayed text, update the caret and selection SetText(editBuffer, false, true); CaretIndex = tmpIndex; HandleTextChanged(); } else { // trigger invalid modification attempted event OnInvalidEntryAttempted(new EventArgs()); } } } /// <summary> /// Processing for the delete key. /// </summary> protected void HandleDelete() { if (!ReadOnly) { StringBuilder tmpBuffer = new StringBuilder(editBuffer.ToString()); int tmpIndex = caretIndex; if (SelectionLength != 0) { tmpBuffer = tmpBuffer.Remove(SelectionStartIndex, SelectionLength); tmpIndex = SelectionStartIndex; } else if (CaretIndex < tmpBuffer.Length) { tmpBuffer = tmpBuffer.Remove(CaretIndex, 1); tmpIndex = CaretIndex; } if (IsStringValid(tmpBuffer.ToString())) { // erase selection using mode that does not modify 'text' (we just want to update state) EraseSelectedText(false); // set text to the newly modified string editBuffer = tmpBuffer; // set the displayed text, update the caret and selection SetText(editBuffer, false, true); CaretIndex = tmpIndex; HandleTextChanged(); } else { // trigger invalid modification attempted event OnInvalidEntryAttempted(new EventArgs()); } } } /// <summary> /// Processing to move carat one character left. /// </summary> /// <param name="sysKeys">Current state of the system keys.</param> protected void HandleCharLeft(ModifierKeys sysKeys) { if (caretIndex > 0) { this.CaretIndex = caretIndex - 1; } if ((sysKeys & ModifierKeys.Shift) > 0) { SetSelection(caretIndex, dragAnchorIndex); } else { ClearSelection(); } } /// <summary> /// Processing to move carat one word left. /// </summary> /// <param name="sysKeys">Current state of the system keys.</param> protected void HandleWordLeft(ModifierKeys sysKeys) { if (caretIndex > 0) { this.CaretIndex = TextUtil.GetWordStartIndex(text, caretIndex - 1); } if ((sysKeys & ModifierKeys.Shift) > 0) { SetSelection(caretIndex, dragAnchorIndex); } else { ClearSelection(); } } /// <summary> /// Processing to move carat one character right. /// </summary> /// <param name="sysKeys">Current state of the system keys.</param> protected void HandleCharRight(ModifierKeys sysKeys) { if (caretIndex < text.Length) { CaretIndex = caretIndex + 1; } if ((sysKeys & ModifierKeys.Shift) > 0) { SetSelection(caretIndex, dragAnchorIndex); } else { ClearSelection(); } } /// <summary> /// Processing to move carat one word right. /// </summary> /// <param name="sysKeys">Current state of the system keys.</param> protected void HandleWordRight(ModifierKeys sysKeys) { if (caretIndex < text.Length) { CaretIndex = TextUtil.GetNextWordStartIndex(text, caretIndex + 1); } if ((sysKeys & ModifierKeys.Shift) > 0) { SetSelection(caretIndex, dragAnchorIndex); } else { ClearSelection(); } } /// <summary> /// Processing to move carat to the start of the text. /// </summary> /// <param name="sysKeys">Current state of the system keys.</param> protected void HandleHome(ModifierKeys sysKeys) { if (caretIndex > 0) { CaretIndex = 0; } if ((sysKeys & ModifierKeys.Shift) > 0) { SetSelection(caretIndex, dragAnchorIndex); } else { ClearSelection(); } } /// <summary> /// Processing to move carat to the end of the text. /// </summary> /// <param name="sysKeys">Current state of the system keys.</param> protected void HandleEnd(ModifierKeys sysKeys) { if (caretIndex < text.Length) { CaretIndex = text.Length; } if ((sysKeys & ModifierKeys.Shift) > 0) { SetSelection(caretIndex, dragAnchorIndex); } else { ClearSelection(); } } #endregion Key Handlers protected void HandleHistoryNext(ModifierKeys sysKeys) { // do we have command history? if (historyLines == null) return; // are we on the last line already? if (historyIndex == -1) return; int firstEntry = historyLines.Count - 1; if (historyIndex == 0) { historyIndex--; SetText(historyTmp, true, true); } else { historyIndex--; int offset = firstEntry - historyIndex; SetText(historyLines[offset], true, true); } } protected void HandleHistoryPrev(ModifierKeys sysKeys) { // do we have command history? if (historyLines == null) return; int firstEntry = historyLines.Count - 1; // are we already on the first line? if (historyIndex == firstEntry) return; // are we on the last line if (historyIndex == -1) { historyIndex++; historyTmp = this.Text; int offset = firstEntry - historyIndex; SetText(historyLines[offset], true, true); } else { historyIndex++; int offset = firstEntry - historyIndex; SetText(historyLines[offset], true, true); } } #region Miscellaneous methods from EditBox /// <summary> /// return true if the Editbox has input focus. /// </summary> /// <value> /// true if the Editbox has keyboard input focus. /// false if the Editbox does not have keyboard input focus. /// </value> public bool HasInputFocus { get { return this.IsActive; } } /// <summary> /// Using the current regex, the supplied text is validated. /// </summary> /// <param name="text">Text to validate.</param> /// <returns>True if the text is valid according to the validation string, false otherwise.</returns> protected bool IsStringValid(string text) { return true; } /// <summary> /// Erase the currently selected text. /// </summary> /// <param name="modifyText"> /// When true, the actual text will be modified. /// When false, everything is done except erasing the characters. /// </param> protected void EraseSelectedText(bool modifyText) { if (SelectionLength != 0) { // setup new carat position and remove selection highlight CaretIndex = SelectionStartIndex; // erase the selected characters (if required) if (modifyText) { // remove the text editBuffer.Remove(SelectionStartIndex, SelectionLength); SetText(editBuffer, false, true); // trigger notifications that the text has changed OnTextChanged(new EventArgs()); } ClearSelection(); } } /// <summary> /// Clear the current selection setting. /// </summary> protected void ClearSelection() { SetSelection(caretIndex, caretIndex); } public void SetSelection(int startPos, int endPos) { // ensure selection start point is within the valid range if (startPos > textBuffer.Length) { startPos = textBuffer.Length; } // ensure selection end point is within the valid range if (endPos > textBuffer.Length) { endPos = textBuffer.Length; } // swap values if start is after end if (startPos > endPos) { int tmp = endPos; endPos = startPos; startPos = tmp; } // only change state if values are different if ((startPos != selectionStartIndex) || endPos != selectionEndIndex) { // setup selection SelectionStartIndex = startPos; SelectionEndIndex = endPos; log.DebugFormat("Set selection: {0} {1}", startPos, endPos); // event trigger TextSelectionChanged(this, new EventArgs()); } } #endregion #region Overridden Event Trigger Methods protected internal override void OnMouseDown(MouseEventArgs e) { // base class handling base.OnMouseDown(e); if (GuiSystem.IsMouseButtonSet(e.Button, MouseButtons.Left)) { // grab inputs CaptureInput(); // handle mouse down ClearSelection(); dragging = true; Rect absRect = GetVisibleTextArea(); PointF pt = new PointF(e.X - absRect.Left, e.Y - absRect.Top); dragAnchorIndex = GetTextIndexFromPosition(pt); this.CaretIndex = dragAnchorIndex; e.Handled = true; } } protected internal override void OnMouseUp(MouseEventArgs e) { // base class processing base.OnMouseUp(e); if (GuiSystem.IsMouseButtonSet(e.Button, MouseButtons.Left)) { //ReleaseInput(); e.Handled = true; } } protected internal override void OnMouseDoubleClicked(MouseEventArgs e) { // base class processing base.OnMouseDoubleClicked(e); if (GuiSystem.IsMouseButtonSet(e.Button, MouseButtons.Left)) { // if masked, set up to select all if (TextMasked) { dragAnchorIndex = 0; this.CaretIndex = text.Length; } else { // not masked, so select the word that was double clicked dragAnchorIndex = TextUtil.GetWordStartIndex(text, (caretIndex == text.Length) ? caretIndex : caretIndex + 1); CaretIndex = TextUtil.GetNextWordStartIndex(text, (caretIndex == text.Length) ? caretIndex : caretIndex + 1); } // perform actual selection operation SetSelection(dragAnchorIndex, caretIndex); e.Handled = true; } } //protected internal override void OnMouseTripleClicked(MouseEventArgs e) { // // base class processing // base.OnMouseTripleClicked(e); // if (e.Button == MouseButton.Left) { // dragAnchorIndex = 0; // CaretIndex = text.Length; // SetSelection(dragAnchorIndex, caretIndex); // e.Handled = true; // } //} protected internal override void OnMouseMoved(MouseEventArgs e) { // base class processing base.OnMouseMoved(e); if (dragging) { Rect absRect = GetVisibleTextArea(); PointF pt = new PointF(e.X - absRect.Left, e.Y - absRect.Top); CaretIndex = GetTextIndexFromPosition(pt); SetSelection(caretIndex, dragAnchorIndex); } e.Handled = true; } protected internal override void OnCaptureLost(EventArgs e) { dragging = false; keysPressed.Clear(); // base class processing base.OnCaptureLost(e); // e.Handled = true; } /// <summary> /// Mark key down events as handled if we handle the associated /// key press event. /// </summary> /// <param name="e"></param> protected internal override void OnKeyDown(KeyEventArgs e) { // base class processing base.OnKeyDown(e); // only need to take notice if we have focus if (HasInputFocus && !ReadOnly) { // First see if it is a special character switch (e.Key) { case KeyCodes.LeftShift: case KeyCodes.RightShift: case KeyCodes.Backspace: case KeyCodes.Delete: case KeyCodes.Return: case KeyCodes.Escape: case KeyCodes.Enter: case KeyCodes.Left: case KeyCodes.Right: case KeyCodes.Home: case KeyCodes.End: e.Handled = true; break; case KeyCodes.Up: case KeyCodes.Down: case KeyCodes.A: case KeyCodes.B: case KeyCodes.C: case KeyCodes.D: case KeyCodes.F: case KeyCodes.K: case KeyCodes.U: case KeyCodes.V: case KeyCodes.W: case KeyCodes.X: if ((e.Modifiers & ModifierKeys.Control) > 0) e.Handled = true; break; default: break; } // switch // If we didn't have special handling for that character, see if // we can handle it as a displayed character if (!e.Handled && !e.IsAltDown && !e.IsControlDown && this.Font.IsCharacterAvailable(e.KeyChar)) e.Handled = true; // Space and Tab may or may not be displayed, but create a keydown, // so that we will generate a keyPress event for them. if (!e.Handled) { switch (e.Key) { case KeyCodes.Space: case KeyCodes.Tab: e.Handled = true; break; default: break; } } if (e.Handled) keysPressed[e.Key] = true; } } protected internal override void OnKeyPress(KeyEventArgs e) { // base class processing base.OnKeyPress(e); // only need to take notice if we have focus if (HasInputFocus && !ReadOnly) { // First see if it is a special character switch (e.Key) { case KeyCodes.LeftShift: case KeyCodes.RightShift: if (SelectionLength == 0) { dragAnchorIndex = CaretIndex; } e.Handled = true; break; case KeyCodes.Backspace: HandleBackspace(); e.Handled = true; break; case KeyCodes.Delete: HandleDelete(); e.Handled = true; break; case KeyCodes.Escape: // Pass the event that is used for the ui scripting OnEscapePressed(new EventArgs()); e.Handled = true; break; case KeyCodes.Space: // Pass the event that is used for the ui scripting OnSpacePressed(new EventArgs()); // We don't mark this handled here, since we may also want to display it break; case KeyCodes.Tab: // Pass the event that is used for the ui scripting OnTabPressed(new EventArgs()); // We don't mark this handled here, since we may also want to display it break; case KeyCodes.Return: case KeyCodes.Enter: // Pass the event that is used for the ui scripting OnEnterPressed(new EventArgs()); // fire input accepted event OnTextAccepted(new EventArgs()); e.Handled = true; break; case KeyCodes.Left: if ((e.Modifiers & ModifierKeys.Control) > 0) { HandleWordLeft(e.Modifiers); e.Handled = true; } else { HandleCharLeft(e.Modifiers); e.Handled = true; } break; case KeyCodes.Right: if ((e.Modifiers & ModifierKeys.Control) > 0) { HandleWordRight(e.Modifiers); e.Handled = true; } else { HandleCharRight(e.Modifiers); e.Handled = true; } break; case KeyCodes.Up: if ((e.Modifiers & ModifierKeys.Control) > 0) { HandleHistoryPrev(e.Modifiers); e.Handled = true; } break; case KeyCodes.Down: if ((e.Modifiers & ModifierKeys.Control) > 0) { HandleHistoryNext(e.Modifiers); e.Handled = true; } break; case KeyCodes.Home: HandleHome(e.Modifiers); e.Handled = true; break; case KeyCodes.End: HandleEnd(e.Modifiers); e.Handled = true; break; case KeyCodes.A: if ((e.Modifiers & ModifierKeys.Control) > 0) { SetSelection(0, editBuffer.Length); e.Handled = true; } break; case KeyCodes.B: if ((e.Modifiers & ModifierKeys.Control) > 0) { HandleCharLeft(e.Modifiers); e.Handled = true; } break; case KeyCodes.C: if ((e.Modifiers & ModifierKeys.Control) > 0) { // TODO: Copy selection } break; case KeyCodes.D: if ((e.Modifiers & ModifierKeys.Control) > 0) { HandleDelete(); e.Handled = true; } break; case KeyCodes.F: if ((e.Modifiers & ModifierKeys.Control) > 0) { HandleCharRight(e.Modifiers); e.Handled = true; } break; case KeyCodes.K: if ((e.Modifiers & ModifierKeys.Control) > 0) { // TODO: Kill after } break; case KeyCodes.U: if ((e.Modifiers & ModifierKeys.Control) > 0) { // TODO: Kill before } break; case KeyCodes.V: if ((e.Modifiers & ModifierKeys.Control) > 0) { // TODO: Paste selection } break; case KeyCodes.W: if ((e.Modifiers & ModifierKeys.Control) > 0) { // TODO: Kill word before } break; case KeyCodes.X: if ((e.Modifiers & ModifierKeys.Control) > 0) { // TODO: Cut selection } break; } // switch // If we didn't have special handling for that character, see if // we can handle it as a displayed character if (!e.Handled && !e.IsAltDown && !e.IsControlDown && this.Font.IsCharacterAvailable(e.KeyChar)) { // backup current text StringBuilder tmpBuffer = new StringBuilder(editBuffer.ToString()); tmpBuffer = tmpBuffer.Remove(SelectionStartIndex, SelectionLength); // if there is room if (tmpBuffer.Length < maxTextLength) { tmpBuffer = tmpBuffer.Insert(SelectionStartIndex, e.KeyChar.ToString()); if (IsStringValid(tmpBuffer.ToString())) { // erase selection using mode that does not modify 'text' (we just want to update state) EraseSelectedText(false); // set text to the newly modified string editBuffer = tmpBuffer; // advance carat CaretIndex++; // set the displayed text, update the caret and selection SetText(editBuffer, false, true); HandleTextChanged(); OnChar(e); } else { // trigger invalid modification attempted event OnInvalidEntryAttempted(new EventArgs()); } } else { // trigger text box full event OnEditboxFull(new EventArgs()); } e.Handled = true; } // Space and Tab may or may not be displayed, but mark them handled if we ran // the OnSpacePressed and OnTabPressed events. if (!e.Handled) { switch (e.Key) { // Ideally, I would check to see if we actually have a script registered // but unfortunately, I don't have any good way of getting that information. // Instead, just mark space and tab as handled. case KeyCodes.Space: case KeyCodes.Tab: e.Handled = true; break; default: break; } } } if (PostCharacter != null) PostCharacter(this, e); } /// <summary> /// Mark key up events as handled if we handle the associated /// key press event. /// </summary> /// <param name="e"></param> protected internal override void OnKeyUp(KeyEventArgs e) { // base class processing base.OnKeyUp(e); // only need to take notice if we have focus if (HasInputFocus) { // Basically, if we got the key down event (and handled it), // handle the key up event. if (keysPressed.ContainsKey(e.Key)) { keysPressed.Remove(e.Key); e.Handled = true; } } } protected internal override void OnTextChanged(EventArgs e) { // base class processing base.OnTextChanged(e); // clear selection ClearSelection(); // make sure carat is within the text if (this.CaretIndex > text.Length) { this.CaretIndex = text.Length; } // e.Handled = true; } #endregion Overridden Event Trigger Methods #region Other event trigger methods protected internal virtual void OnTextSelectionChanged(EventArgs e) { TextSelectionChanged(this, e); } /// <summary> /// Event fired internally when the user attempted to make a change to the edit box that would /// have caused it to fail validation. /// </summary> /// <param name="e">Event arguments.</param> protected internal virtual void OnInvalidEntryAttempted(EventArgs e) { return; } /// <summary> /// Event fired internally when the edit box text has reached the set maximum length. /// </summary> /// <param name="e">Event arguments.</param> protected internal virtual void OnEditboxFull(EventArgs e) { if (EditboxFull != null) { EditboxFull(this, e); } } /// <summary> /// Event fired internally when the user accepts the edit box text by pressing Return, Enter, or Tab. /// </summary> /// <param name="e">Event arguments.</param> protected internal virtual void OnTextAccepted(EventArgs e) { if (TextAccepted != null) TextAccepted(this, e); } protected internal virtual void OnEnterPressed(EventArgs e) { if (EnterPressed != null) EnterPressed(this, e); } protected internal virtual void OnEscapePressed(EventArgs e) { if (EscapePressed != null) EscapePressed(this, e); } protected internal virtual void OnSpacePressed(EventArgs e) { if (SpacePressed != null) SpacePressed(this, e); } protected internal virtual void OnTabPressed(EventArgs e) { if (TabPressed != null) TabPressed(this, e); } protected internal virtual void OnChar(KeyEventArgs e) { if (CharEvent != null) CharEvent(this, e); } #endregion public bool TextMasked { get { return textMasked; } set { if (textMasked != value) { textMasked = value; // regenerate the text buffer contents based on the edit buffer SetText(editBuffer, false, false); this.Dirty = true; } } } public char MaskChar { get { return maskChar; } set { if (maskChar != value) { maskChar = value; // regenerate the text buffer contents based on the edit buffer SetText(editBuffer, false, false); this.Dirty = true; } } } public bool ReadOnly { get { return readOnly; } set { readOnly = value; } } public int SelectionStartIndex { get { return selectionStartIndex; } set { if (selectionStartIndex != value) { selectionStartIndex = value; OnTextSelectionChanged(null); this.Dirty = true; } } } public int SelectionEndIndex { get { return selectionEndIndex; } set { if (selectionEndIndex != value) { selectionEndIndex = value; OnTextSelectionChanged(null); this.Dirty = true; } } } public int SelectionLength { get { return selectionEndIndex - selectionStartIndex; } } public TextStyle SelectedTextStyle { get { return selectedTextStyle; } set { ReplaceStyle(selectedTextStyle, value); selectedTextStyle = value; } } public int CaretIndex { get { return caretIndex; } set { if (value != caretIndex) { caretIndex = value; this.Dirty = true; } } } public TextureInfo Caret { get { return caret; } set { if (value != caret) { caret = value; this.Dirty = true; } } } public string EditText { get { return editBuffer.ToString(); } } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Fixtures.PetstoreV2NoSync { using System; using System.Collections; using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; using Microsoft.Rest; using Models; /// <summary> /// Extension methods for SwaggerPetstoreV2. /// </summary> public static partial class SwaggerPetstoreV2Extensions { /// <summary> /// Add a new pet to the store /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='body'> /// Pet object that needs to be added to the store /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Pet> AddPetAsync(this ISwaggerPetstoreV2 operations, Pet body, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.AddPetWithHttpMessagesAsync(body, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Update an existing pet /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='body'> /// Pet object that needs to be added to the store /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task UpdatePetAsync(this ISwaggerPetstoreV2 operations, Pet body, CancellationToken cancellationToken = default(CancellationToken)) { await operations.UpdatePetWithHttpMessagesAsync(body, null, cancellationToken).ConfigureAwait(false); } /// <summary> /// Finds Pets by status /// </summary> /// Multiple status values can be provided with comma seperated strings /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='status'> /// Status values that need to be considered for filter /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<IList<Pet>> FindPetsByStatusAsync(this ISwaggerPetstoreV2 operations, IList<string> status, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.FindPetsByStatusWithHttpMessagesAsync(status, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Finds Pets by tags /// </summary> /// Muliple tags can be provided with comma seperated strings. Use tag1, tag2, /// tag3 for testing. /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='tags'> /// Tags to filter by /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<IList<Pet>> FindPetsByTagsAsync(this ISwaggerPetstoreV2 operations, IList<string> tags, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.FindPetsByTagsWithHttpMessagesAsync(tags, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Find pet by Id /// </summary> /// Returns a single pet /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='petId'> /// Id of pet to return /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Pet> GetPetByIdAsync(this ISwaggerPetstoreV2 operations, long petId, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.GetPetByIdWithHttpMessagesAsync(petId, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Updates a pet in the store with form data /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='petId'> /// Id of pet that needs to be updated /// </param> /// <param name='fileContent'> /// File to upload. /// </param> /// <param name='fileName'> /// Updated name of the pet /// </param> /// <param name='status'> /// Updated status of the pet /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task UpdatePetWithFormAsync(this ISwaggerPetstoreV2 operations, long petId, System.IO.Stream fileContent, string fileName = default(string), string status = default(string), CancellationToken cancellationToken = default(CancellationToken)) { await operations.UpdatePetWithFormWithHttpMessagesAsync(petId, fileContent, fileName, status, null, cancellationToken).ConfigureAwait(false); } /// <summary> /// Deletes a pet /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='petId'> /// Pet id to delete /// </param> /// <param name='apiKey'> /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task DeletePetAsync(this ISwaggerPetstoreV2 operations, long petId, string apiKey = "", CancellationToken cancellationToken = default(CancellationToken)) { await operations.DeletePetWithHttpMessagesAsync(petId, apiKey, null, cancellationToken).ConfigureAwait(false); } /// <summary> /// Returns pet inventories by status /// </summary> /// Returns a map of status codes to quantities /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<IDictionary<string, int?>> GetInventoryAsync(this ISwaggerPetstoreV2 operations, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.GetInventoryWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Place an order for a pet /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='body'> /// order placed for purchasing the pet /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Order> PlaceOrderAsync(this ISwaggerPetstoreV2 operations, Order body, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.PlaceOrderWithHttpMessagesAsync(body, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Find purchase order by Id /// </summary> /// For valid response try integer IDs with value &lt;= 5 or &gt; 10. Other /// values will generated exceptions /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='orderId'> /// Id of pet that needs to be fetched /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Order> GetOrderByIdAsync(this ISwaggerPetstoreV2 operations, string orderId, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.GetOrderByIdWithHttpMessagesAsync(orderId, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Delete purchase order by Id /// </summary> /// For valid response try integer IDs with value &lt; 1000. Anything above /// 1000 or nonintegers will generate API errors /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='orderId'> /// Id of the order that needs to be deleted /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task DeleteOrderAsync(this ISwaggerPetstoreV2 operations, string orderId, CancellationToken cancellationToken = default(CancellationToken)) { await operations.DeleteOrderWithHttpMessagesAsync(orderId, null, cancellationToken).ConfigureAwait(false); } /// <summary> /// Create user /// </summary> /// This can only be done by the logged in user. /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='body'> /// Created user object /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task CreateUserAsync(this ISwaggerPetstoreV2 operations, User body, CancellationToken cancellationToken = default(CancellationToken)) { await operations.CreateUserWithHttpMessagesAsync(body, null, cancellationToken).ConfigureAwait(false); } /// <summary> /// Creates list of users with given input array /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='body'> /// List of user object /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task CreateUsersWithArrayInputAsync(this ISwaggerPetstoreV2 operations, IList<User> body, CancellationToken cancellationToken = default(CancellationToken)) { await operations.CreateUsersWithArrayInputWithHttpMessagesAsync(body, null, cancellationToken).ConfigureAwait(false); } /// <summary> /// Creates list of users with given input array /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='body'> /// List of user object /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task CreateUsersWithListInputAsync(this ISwaggerPetstoreV2 operations, IList<User> body, CancellationToken cancellationToken = default(CancellationToken)) { await operations.CreateUsersWithListInputWithHttpMessagesAsync(body, null, cancellationToken).ConfigureAwait(false); } /// <summary> /// Logs user into the system /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='username'> /// The user name for login /// </param> /// <param name='password'> /// The password for login in clear text /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<string> LoginUserAsync(this ISwaggerPetstoreV2 operations, string username, string password, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.LoginUserWithHttpMessagesAsync(username, password, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Logs out current logged in user session /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task LogoutUserAsync(this ISwaggerPetstoreV2 operations, CancellationToken cancellationToken = default(CancellationToken)) { await operations.LogoutUserWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false); } /// <summary> /// Get user by user name /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='username'> /// The name that needs to be fetched. Use user1 for testing. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<User> GetUserByNameAsync(this ISwaggerPetstoreV2 operations, string username, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.GetUserByNameWithHttpMessagesAsync(username, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Updated user /// </summary> /// This can only be done by the logged in user. /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='username'> /// name that need to be deleted /// </param> /// <param name='body'> /// Updated user object /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task UpdateUserAsync(this ISwaggerPetstoreV2 operations, string username, User body, CancellationToken cancellationToken = default(CancellationToken)) { await operations.UpdateUserWithHttpMessagesAsync(username, body, null, cancellationToken).ConfigureAwait(false); } /// <summary> /// Delete user /// </summary> /// This can only be done by the logged in user. /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='username'> /// The name that needs to be deleted /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task DeleteUserAsync(this ISwaggerPetstoreV2 operations, string username, CancellationToken cancellationToken = default(CancellationToken)) { await operations.DeleteUserWithHttpMessagesAsync(username, null, cancellationToken).ConfigureAwait(false); } } }
// // Copyright (c) 2004-2016 Jaroslaw Kowalski <jaak@jkowalski.net>, Kim Christensen, Julian Verdurmen // // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // // * Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // // * Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // * Neither the name of Jaroslaw Kowalski nor the names of its // contributors may be used to endorse or promote products derived from this // software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF // THE POSSIBILITY OF SUCH DAMAGE. // namespace NLog.UnitTests.Config { using System.IO; using MyExtensionNamespace; using NLog.Filters; using NLog.Layouts; using NLog.Targets; using Xunit; public class ExtensionTests : NLogTestBase { private string extensionAssemblyName1 = "SampleExtensions"; private string extensionAssemblyFullPath1 = Path.GetFullPath("SampleExtensions.dll"); [Fact] public void ExtensionTest1() { Assert.NotNull(typeof(FooLayout)); var configuration = CreateConfigurationFromString(@" <nlog throwExceptions='true'> <extensions> <add assemblyFile='" + this.extensionAssemblyFullPath1 + @"' /> </extensions> <targets> <target name='t' type='MyTarget' /> <target name='d1' type='Debug' layout='${foo}' /> <target name='d2' type='Debug'> <layout type='FooLayout' x='1'> </layout> </target> </targets> <rules> <logger name='*' writeTo='t'> <filters> <whenFoo x='44' action='Ignore' /> </filters> </logger> </rules> </nlog>"); Target myTarget = configuration.FindTargetByName("t"); Assert.Equal("MyExtensionNamespace.MyTarget", myTarget.GetType().FullName); var d1Target = (DebugTarget)configuration.FindTargetByName("d1"); var layout = d1Target.Layout as SimpleLayout; Assert.NotNull(layout); Assert.Equal(1, layout.Renderers.Count); Assert.Equal("MyExtensionNamespace.FooLayoutRenderer", layout.Renderers[0].GetType().FullName); var d2Target = (DebugTarget)configuration.FindTargetByName("d2"); Assert.Equal("MyExtensionNamespace.FooLayout", d2Target.Layout.GetType().FullName); Assert.Equal(1, configuration.LoggingRules[0].Filters.Count); Assert.Equal("MyExtensionNamespace.WhenFooFilter", configuration.LoggingRules[0].Filters[0].GetType().FullName); } [Fact] public void ExtensionTest2() { var configuration = CreateConfigurationFromString(@" <nlog throwExceptions='true'> <extensions> <add assembly='" + this.extensionAssemblyName1 + @"' /> </extensions> <targets> <target name='t' type='MyTarget' /> <target name='d1' type='Debug' layout='${foo}' /> <target name='d2' type='Debug'> <layout type='FooLayout' x='1'> </layout> </target> </targets> <rules> <logger name='*' writeTo='t'> <filters> <whenFoo x='44' action='Ignore' /> <when condition='myrandom(10)==3' action='Log' /> </filters> </logger> </rules> </nlog>"); Target myTarget = configuration.FindTargetByName("t"); Assert.Equal("MyExtensionNamespace.MyTarget", myTarget.GetType().FullName); var d1Target = (DebugTarget)configuration.FindTargetByName("d1"); var layout = d1Target.Layout as SimpleLayout; Assert.NotNull(layout); Assert.Equal(1, layout.Renderers.Count); Assert.Equal("MyExtensionNamespace.FooLayoutRenderer", layout.Renderers[0].GetType().FullName); var d2Target = (DebugTarget)configuration.FindTargetByName("d2"); Assert.Equal("MyExtensionNamespace.FooLayout", d2Target.Layout.GetType().FullName); Assert.Equal(2, configuration.LoggingRules[0].Filters.Count); Assert.Equal("MyExtensionNamespace.WhenFooFilter", configuration.LoggingRules[0].Filters[0].GetType().FullName); var cbf = configuration.LoggingRules[0].Filters[1] as ConditionBasedFilter; Assert.NotNull(cbf); Assert.Equal("(myrandom(10) == 3)", cbf.Condition.ToString()); } [Fact] public void ExtensionWithPrefixTest() { var configuration = CreateConfigurationFromString(@" <nlog throwExceptions='true'> <extensions> <add prefix='myprefix' assemblyFile='" + this.extensionAssemblyFullPath1 + @"' /> </extensions> <targets> <target name='t' type='myprefix.MyTarget' /> <target name='d1' type='Debug' layout='${myprefix.foo}' /> <target name='d2' type='Debug'> <layout type='myprefix.FooLayout' x='1'> </layout> </target> </targets> <rules> <logger name='*' writeTo='t'> <filters> <myprefix.whenFoo x='44' action='Ignore' /> </filters> </logger> </rules> </nlog>"); Target myTarget = configuration.FindTargetByName("t"); Assert.Equal("MyExtensionNamespace.MyTarget", myTarget.GetType().FullName); var d1Target = (DebugTarget)configuration.FindTargetByName("d1"); var layout = d1Target.Layout as SimpleLayout; Assert.NotNull(layout); Assert.Equal(1, layout.Renderers.Count); Assert.Equal("MyExtensionNamespace.FooLayoutRenderer", layout.Renderers[0].GetType().FullName); var d2Target = (DebugTarget)configuration.FindTargetByName("d2"); Assert.Equal("MyExtensionNamespace.FooLayout", d2Target.Layout.GetType().FullName); Assert.Equal(1, configuration.LoggingRules[0].Filters.Count); Assert.Equal("MyExtensionNamespace.WhenFooFilter", configuration.LoggingRules[0].Filters[0].GetType().FullName); } [Fact] public void ExtensionTest4() { Assert.NotNull(typeof(FooLayout)); var configuration = CreateConfigurationFromString(@" <nlog throwExceptions='true'> <extensions> <add type='" + typeof(MyTarget).AssemblyQualifiedName + @"' /> <add type='" + typeof(FooLayout).AssemblyQualifiedName + @"' /> <add type='" + typeof(FooLayoutRenderer).AssemblyQualifiedName + @"' /> <add type='" + typeof(WhenFooFilter).AssemblyQualifiedName + @"' /> </extensions> <targets> <target name='t' type='MyTarget' /> <target name='d1' type='Debug' layout='${foo}' /> <target name='d2' type='Debug'> <layout type='FooLayout' x='1'> </layout> </target> </targets> <rules> <logger name='*' writeTo='t'> <filters> <whenFoo x='44' action='Ignore' /> </filters> </logger> </rules> </nlog>"); Target myTarget = configuration.FindTargetByName("t"); Assert.Equal("MyExtensionNamespace.MyTarget", myTarget.GetType().FullName); var d1Target = (DebugTarget)configuration.FindTargetByName("d1"); var layout = d1Target.Layout as SimpleLayout; Assert.NotNull(layout); Assert.Equal(1, layout.Renderers.Count); Assert.Equal("MyExtensionNamespace.FooLayoutRenderer", layout.Renderers[0].GetType().FullName); var d2Target = (DebugTarget)configuration.FindTargetByName("d2"); Assert.Equal("MyExtensionNamespace.FooLayout", d2Target.Layout.GetType().FullName); Assert.Equal(1, configuration.LoggingRules[0].Filters.Count); Assert.Equal("MyExtensionNamespace.WhenFooFilter", configuration.LoggingRules[0].Filters[0].GetType().FullName); } [Fact] public void ExtensionTest_extensions_not_top_and_used() { Assert.NotNull(typeof(FooLayout)); var configuration = CreateConfigurationFromString(@" <nlog throwExceptions='true'> <targets> <target name='t' type='MyTarget' /> <target name='d1' type='Debug' layout='${foo}' /> <target name='d2' type='Debug'> <layout type='FooLayout' x='1'> </layout> </target> </targets> <rules> <logger name='*' writeTo='t'> <filters> <whenFoo x='44' action='Ignore' /> </filters> </logger> </rules> <extensions> <add assemblyFile='" + this.extensionAssemblyFullPath1 + @"' /> </extensions> </nlog>"); Target myTarget = configuration.FindTargetByName("t"); Assert.Equal("MyExtensionNamespace.MyTarget", myTarget.GetType().FullName); var d1Target = (DebugTarget)configuration.FindTargetByName("d1"); var layout = d1Target.Layout as SimpleLayout; Assert.NotNull(layout); Assert.Equal(1, layout.Renderers.Count); Assert.Equal("MyExtensionNamespace.FooLayoutRenderer", layout.Renderers[0].GetType().FullName); var d2Target = (DebugTarget)configuration.FindTargetByName("d2"); Assert.Equal("MyExtensionNamespace.FooLayout", d2Target.Layout.GetType().FullName); Assert.Equal(1, configuration.LoggingRules[0].Filters.Count); Assert.Equal("MyExtensionNamespace.WhenFooFilter", configuration.LoggingRules[0].Filters[0].GetType().FullName); } [Fact] public void ExtensionShouldThrowNLogConfiguratonExceptionWhenRegisteringInvalidType() { var configXml = @" <nlog throwConfigExceptions='true'> <extensions> <add type='some_type_that_doesnt_exist'/> </extensions> </nlog>"; Assert.Throws<NLogConfigurationException>(()=>CreateConfigurationFromString(configXml)); } [Fact] public void ExtensionShouldThrowNLogConfiguratonExceptionWhenRegisteringInvalidAssembly() { var configXml = @" <nlog throwConfigExceptions='true'> <extensions> <add assembly='some_assembly_that_doesnt_exist'/> </extensions> </nlog>"; Assert.Throws<NLogConfigurationException>(() => CreateConfigurationFromString(configXml)); } [Fact] public void ExtensionShouldThrowNLogConfiguratonExceptionWhenRegisteringInvalidAssemblyFile() { var configXml = @" <nlog throwConfigExceptions='true'> <extensions> <add assemblyfile='some_file_that_doesnt_exist'/> </extensions> </nlog>"; Assert.Throws<NLogConfigurationException>(() => CreateConfigurationFromString(configXml)); } [Fact] public void ExtensionShouldNotThrowWhenRegisteringInvalidTypeIfThrowConfigExceptionsFalse() { var configXml = @" <nlog throwConfigExceptions='false'> <extensions> <add type='some_type_that_doesnt_exist'/> <add assembly='NLog'/> </extensions> </nlog>"; CreateConfigurationFromString(configXml); } [Fact] public void ExtensionShouldNotThrowWhenRegisteringInvalidAssemblyIfThrowConfigExceptionsFalse() { var configXml = @" <nlog throwConfigExceptions='false'> <extensions> <add assembly='some_assembly_that_doesnt_exist'/> </extensions> </nlog>"; CreateConfigurationFromString(configXml); } [Fact] public void ExtensionShouldNotThrowWhenRegisteringInvalidAssemblyFileIfThrowConfigExceptionsFalse() { var configXml = @" <nlog throwConfigExceptions='false'> <extensions> <add assemblyfile='some_file_that_doesnt_exist'/> </extensions> </nlog>"; CreateConfigurationFromString(configXml); } [Fact] public void CustomXmlNamespaceTest() { var configuration = CreateConfigurationFromString(@" <nlog throwExceptions='true' xmlns:foo='http://bar'> <targets> <target name='d' type='foo:Debug' /> </targets> </nlog>"); var d1Target = (DebugTarget)configuration.FindTargetByName("d"); Assert.NotNull(d1Target); } [Fact] public void Extension_should_be_auto_loaded_when_following_NLog_dll_format() { var configuration = CreateConfigurationFromString(@" <nlog throwExceptions='true'> <targets> <target name='t' type='AutoLoadTarget' /> </targets> <rules> <logger name='*' writeTo='t'> </logger> </rules> </nlog>"); var autoLoadedTarget = configuration.FindTargetByName("t"); Assert.Equal("NLogAutloadExtension.AutoLoadTarget", autoLoadedTarget.GetType().FullName); } } }
using System; using System.IO; using System.Drawing; using System.Globalization; namespace BlueOnion { /// <summary> /// Serializable class to save program state /// </summary> internal class Settings : ICloneable, IDisposable { private bool disposed; private const string topMostS = "top_most"; private const string borderS = "border"; private const string showWeekNumbersS = "show_week_numbers"; private const string showTodayS = "show_today"; private const string showTodayCircleS = "show_today_circle"; private const string opacityS = "opacity"; private const string positionS = "position"; private const string firstDayS = "first_day"; private const string fontS = "font"; private const string colorBackS = "color_back"; private const string colorForeS = "color_fore"; private const string colorTitleBackS = "color_title_fore"; private const string colorTitleForeS = "color_title_back"; private const string colorTrailingForeS = "color_trailing_fore"; private const string colorHighlightDayForeS = "color_highlight_day_fore"; private const string colorGridlinesS = "color_gridlines"; private const string trayIconS = "tray_icon"; private const string gridlinesS = "gridlines"; private const string colorWeekdayForeS = "color_weekday_fore"; private const string colorWeekdayBackS = "color_weekday_back"; private const string colorWeeknumberForeS = "color_weeknumber_fore"; private const string colorWeeknumberBackS = "color_weeknumber_back"; private const string colorWeekdayBarS = "color_weekday_bar"; private const string startMonthJanuaryS = "start_month_january"; private const string startMonthPreviousS = "start_month_previous"; private bool topMost; private Calendar.BorderStyle border = Calendar.BorderStyle.Thick; private bool showWeekNumbers; private bool showToday = true; private bool showTodayCircle = true; private double opacity = 1.0; private Rectangle position = new Rectangle(10, 10, 0, 0); private System.DayOfWeek firstDay = System.DayOfWeek.Sunday; private Font font = new Font("Microsoft Sans Serif", 8.25f); private Color colorFore = SystemColors.WindowText; private Color colorBack = SystemColors.Window; private Color colorTitleFore = SystemColors.ActiveCaptionText; private Color colorTitleBack = SystemColors.ActiveCaption; private Color colorTrailingFore = SystemColors.AppWorkspace; private Color colorHighlightDayFore = Color.DarkRed; private Color colorGridlines = SystemColors.GrayText; private Color colorWeekdayFore = SystemColors.ActiveCaption; private Color colorWeekdayBack = SystemColors.ActiveCaptionText; private Color colorWeeknumberFore = SystemColors.ActiveCaption; private Color colorWeeknumberBack = SystemColors.ActiveCaptionText; private Color colorWeekdayBar = SystemColors.WindowText; private bool trayIcon; private bool gridlines; private bool startMonthJanuary; private int startMonthPrevious; // --------------------------------------------------------------------- public Settings() { startMonthPrevious = 0; } // --------------------------------------------------------------------- public bool Topmost { get { return topMost; } set { topMost = value; } } // --------------------------------------------------------------------- public Calendar.BorderStyle Border { get { return border; } set { border = value; } } // --------------------------------------------------------------------- public bool ShowWeekNumbers { get { return showWeekNumbers; } set { showWeekNumbers = value; } } // --------------------------------------------------------------------- public bool ShowToday { get { return showToday; } set { showToday = value; } } // --------------------------------------------------------------------- public bool ShowTodayCircle { get { return showTodayCircle; } set { showTodayCircle = value; } } // --------------------------------------------------------------------- public double Opacity { get { return opacity; } set { opacity = value; } } // --------------------------------------------------------------------- public Rectangle Position { get { return position; } set { position = value; } } // --------------------------------------------------------------------- public System.DayOfWeek FirstDay { get { return firstDay; } set { firstDay = value; } } // --------------------------------------------------------------------- public Font Font { get { return font; } set { font = value; } } // --------------------------------------------------------------------- public Color ColorFore { get { return colorFore; } set { colorFore = value; } } // --------------------------------------------------------------------- public Color ColorBack { get { return colorBack; } set { colorBack = value; } } // --------------------------------------------------------------------- public Color ColorTitleFore { get { return colorTitleFore; } set { colorTitleFore = value; } } // --------------------------------------------------------------------- public Color ColorTitleBack { get { return colorTitleBack; } set { colorTitleBack = value; } } // --------------------------------------------------------------------- public Color ColorTrailingFore { get { return colorTrailingFore; } set { colorTrailingFore = value; } } // --------------------------------------------------------------------- public Color ColorHighlightDayFore { get { return colorHighlightDayFore; } set { colorHighlightDayFore = value; } } // --------------------------------------------------------------------- public Color ColorGridlines { get { return colorGridlines; } set { colorGridlines = value; } } // --------------------------------------------------------------------- public Color ColorWeekdayFore { get { return colorWeekdayFore; } set { colorWeekdayFore = value; } } // --------------------------------------------------------------------- public Color ColorWeekdayBack { get { return colorWeekdayBack; } set { colorWeekdayBack = value; } } // --------------------------------------------------------------------- public Color ColorWeeknumberFore { get { return colorWeeknumberFore; } set { colorWeeknumberFore = value; } } // --------------------------------------------------------------------- public Color ColorWeeknumberBack { get { return colorWeeknumberBack; } set { colorWeeknumberBack = value; } } // --------------------------------------------------------------------- public Color ColorWeekdayBar { get { return colorWeekdayBar; } set { colorWeekdayBar = value; } } // --------------------------------------------------------------------- public bool TrayIcon { get { return trayIcon; } set { trayIcon = value; } } // --------------------------------------------------------------------- public bool Gridlines { get { return this.gridlines; } set { this.gridlines = value; } } // --------------------------------------------------------------------- public bool StartMonthJanuary { get { return this.startMonthJanuary; } set { this.startMonthJanuary = value; } } // --------------------------------------------------------------------- public int StartMonthPrevious { get { return this.startMonthPrevious; } set { this.startMonthPrevious = value; } } // --------------------------------------------------------------------- public object Clone() { // Shallow copy works since everything is a value type return this.MemberwiseClone(); } // --------------------------------------------------------------------- public override int GetHashCode() { int counter = 10000; return (this.topMost.GetHashCode() + counter++) ^ (this.border.GetHashCode() + counter++) ^ (this.showWeekNumbers.GetHashCode() + counter++) ^ (this.showToday.GetHashCode() + counter++) ^ (this.showTodayCircle.GetHashCode() + counter++) ^ this.opacity.GetHashCode() ^ this.position.GetHashCode() ^ this.firstDay.GetHashCode() ^ this.font.GetHashCode() ^ this.colorFore.GetHashCode() ^ this.colorBack.GetHashCode() ^ this.colorTitleFore.GetHashCode() ^ this.colorTitleBack.GetHashCode() ^ this.colorTrailingFore.GetHashCode() ^ this.colorHighlightDayFore.GetHashCode() ^ this.colorGridlines.GetHashCode() ^ this.colorWeekdayFore.GetHashCode() ^ this.colorWeekdayBack.GetHashCode() ^ this.colorWeeknumberFore.GetHashCode() ^ this.colorWeeknumberBack.GetHashCode() ^ this.colorWeekdayBar.GetHashCode() ^ (this.trayIcon.GetHashCode() + counter++) ^ (this.gridlines.GetHashCode() + counter++) ^ (this.startMonthJanuary.GetHashCode() + counter++) ^ this.StartMonthPrevious.GetHashCode(); } // --------------------------------------------------------------------- public override bool Equals(object obj) { if (obj == null) { return false; } if ((obj is Settings) == false) { return false; } Settings objSettings = (Settings)obj; return this.GetHashCode() == objSettings.GetHashCode(); } // --------------------------------------------------------------------- public static bool operator == (Settings lhs, Settings rightHandOperand) { return lhs.Equals(rightHandOperand); } // --------------------------------------------------------------------- public static bool operator != (Settings lhs, Settings rightHandOperand) { return !lhs.Equals(rightHandOperand); } // --------------------------------------------------------------------- public void Serialize(TextWriter textWriter) { textWriter.WriteLine(topMostS + "=" + topMost.ToString()); textWriter.WriteLine(borderS + "=" + border.ToString()); textWriter.WriteLine(showWeekNumbersS + "=" + showWeekNumbers.ToString()); textWriter.WriteLine(showTodayS + "=" + showToday.ToString()); textWriter.WriteLine(showTodayCircleS + "=" + showTodayCircle.ToString()); textWriter.WriteLine(opacityS + "=" + opacity.ToString(CultureInfo.InvariantCulture)); textWriter.WriteLine(positionS + "=" + position.ToString()); textWriter.WriteLine(firstDayS + "=" + firstDay.ToString()); textWriter.WriteLine(fontS + "=" + Settings.FontToString(font)); textWriter.WriteLine(colorForeS + "=" + colorFore.ToArgb().ToString(CultureInfo.InvariantCulture)); textWriter.WriteLine(colorBackS + "=" + colorBack.ToArgb().ToString(CultureInfo.InvariantCulture)); textWriter.WriteLine(colorTitleForeS + "=" + colorTitleFore.ToArgb().ToString(CultureInfo.InvariantCulture)); textWriter.WriteLine(colorTitleBackS + "=" + colorTitleBack.ToArgb().ToString(CultureInfo.InvariantCulture)); textWriter.WriteLine(colorTrailingForeS + "=" + colorTrailingFore.ToArgb().ToString(CultureInfo.InvariantCulture)); textWriter.WriteLine(colorHighlightDayForeS + "=" + colorHighlightDayFore.ToArgb().ToString(CultureInfo.InvariantCulture)); textWriter.WriteLine(colorGridlinesS + "=" + colorGridlines.ToArgb().ToString(CultureInfo.InvariantCulture)); textWriter.WriteLine(colorWeekdayForeS + "=" + colorWeekdayFore.ToArgb().ToString(CultureInfo.InvariantCulture)); textWriter.WriteLine(colorWeekdayBackS + "=" + colorWeekdayBack.ToArgb().ToString(CultureInfo.InvariantCulture)); textWriter.WriteLine(colorWeeknumberForeS + "=" + colorWeeknumberFore.ToArgb().ToString(CultureInfo.InvariantCulture)); textWriter.WriteLine(colorWeeknumberBackS + "=" + colorWeeknumberBack.ToArgb().ToString(CultureInfo.InvariantCulture)); textWriter.WriteLine(colorWeekdayBarS + "=" + colorWeekdayBar.ToArgb().ToString(CultureInfo.InvariantCulture)); textWriter.WriteLine(trayIconS + "=" + trayIcon.ToString()); textWriter.WriteLine(gridlinesS + "=" + gridlines.ToString()); textWriter.WriteLine(startMonthJanuaryS + "=" + startMonthJanuary.ToString()); textWriter.WriteLine(startMonthPreviousS + "=" + startMonthPrevious.ToString(CultureInfo.InvariantCulture)); } // --------------------------------------------------------------------- public static Settings Deserialize(TextReader textReader) { string line; char[] delimiter = {'='}; Settings settings = new Settings(); while ((line = textReader.ReadLine()) != null) { string[] nameValue = line.Split(delimiter, 2); switch (nameValue[0]) { case topMostS: settings.Topmost = bool.Parse(nameValue[1]); break; case borderS: try { settings.Border = (Calendar.BorderStyle)Enum.Parse (typeof(Calendar.BorderStyle), nameValue[1], true); } catch { settings.Border = Calendar.BorderStyle.Thick; } break; case showWeekNumbersS: settings.ShowWeekNumbers = bool.Parse(nameValue[1]); break; case showTodayS: settings.ShowToday = bool.Parse(nameValue[1]); break; case showTodayCircleS: settings.ShowTodayCircle = bool.Parse(nameValue[1]); break; case opacityS: settings.opacity = float.Parse(nameValue[1], NumberStyles.Float, CultureInfo.InvariantCulture); break; case positionS: char[] delimiters = {'=', ',', '}'}; string[] coords = nameValue[1].Split(delimiters); settings.position.X = EventsCollection.ToInt(coords[1]); settings.position.Y = EventsCollection.ToInt(coords[3]); settings.position.Width = EventsCollection.ToInt(coords[5]); settings.position.Height = EventsCollection.ToInt(coords[7]); break; case firstDayS: settings.FirstDay = (System.DayOfWeek) Enum.Parse(typeof(System.DayOfWeek), nameValue[1]); break; case fontS: settings.font = Settings.StringToFont(nameValue[1]); break; case colorForeS: settings.colorFore = Color.FromArgb(EventsCollection.ToInt(nameValue[1])); break; case colorBackS: settings.colorBack = Color.FromArgb(EventsCollection.ToInt(nameValue[1])); break; case colorTitleForeS: settings.colorTitleFore = Color.FromArgb(EventsCollection.ToInt(nameValue[1])); break; case colorTitleBackS: settings.colorTitleBack = Color.FromArgb(EventsCollection.ToInt(nameValue[1])); break; case colorTrailingForeS: settings.colorTrailingFore = Color.FromArgb(EventsCollection.ToInt(nameValue[1])); break; case colorHighlightDayForeS: settings.colorHighlightDayFore = Color.FromArgb(EventsCollection.ToInt(nameValue[1])); break; case colorGridlinesS: settings.colorGridlines = Color.FromArgb(EventsCollection.ToInt(nameValue[1])); break; case colorWeekdayForeS: settings.ColorWeekdayFore = Color.FromArgb(EventsCollection.ToInt(nameValue[1])); break; case colorWeekdayBackS: settings.ColorWeekdayBack = Color.FromArgb(EventsCollection.ToInt(nameValue[1])); break; case colorWeeknumberForeS: settings.ColorWeeknumberFore = Color.FromArgb(EventsCollection.ToInt(nameValue[1])); break; case colorWeeknumberBackS: settings.ColorWeeknumberBack = Color.FromArgb(EventsCollection.ToInt(nameValue[1])); break; case colorWeekdayBarS: settings.ColorWeekdayBar = Color.FromArgb(EventsCollection.ToInt(nameValue[1])); break; case trayIconS: settings.trayIcon = bool.Parse(nameValue[1]); break; case gridlinesS: settings.gridlines = bool.Parse(nameValue[1]); break; case startMonthJanuaryS: settings.startMonthJanuary = bool.Parse(nameValue[1]); break; case startMonthPreviousS: settings.StartMonthPrevious = int.Parse(nameValue[1], CultureInfo.InvariantCulture); break; } } return settings; } // --------------------------------------------------------------------- private static Font StringToFont(string font_string) { string[] font = font_string.Split(','); if (font.Length >= 3) { return new Font(font[0], Convert.ToSingle(font[1], CultureInfo.InvariantCulture), (FontStyle)Enum.Parse(typeof(FontStyle), font[2])); } if (font.Length == 2) { return new Font(font[0], Convert.ToSingle(font[1], CultureInfo.InvariantCulture)); } return new Font("Microsoft Sans Serif", 8.25f); } // --------------------------------------------------------------------- public static string FontToString(Font font) { string fontString = font.Name + ", " + font.SizeInPoints.ToString(CultureInfo.InvariantCulture) + ", " + font.Style; return fontString; } #region IDisposable Members // --------------------------------------------------------------------- public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } // --------------------------------------------------------------------- protected virtual void Dispose(bool disposing) { if (this.disposed == false) { this.disposed = true; if (disposing == true) { this.font.Dispose(); } } } // --------------------------------------------------------------------- ~Settings() { Dispose(false); } #endregion } }
/* Genuine Channels product. * * Copyright (c) 2002-2007 Dmitry Belikov. All rights reserved. * * This source code comes under and must be used and distributed according to the Genuine Channels license agreement. */ using System; using System.Collections; using System.IO; using System.Runtime.Remoting.Channels; using Belikov.GenuineChannels.Connection; using Belikov.GenuineChannels.DotNetRemotingLayer; using Belikov.GenuineChannels.Logbook; using Belikov.GenuineChannels.Messaging; using Belikov.GenuineChannels.TransportContext; namespace Belikov.GenuineChannels.DirectExchange { /// <summary> /// Provides methods for using and publishing remoted objects and stream handlers. /// </summary> public class DirectExchangeManager : MarshalByRefObject { /// <summary> /// Constructs an instance of the DirectExchangeManager class. /// </summary> /// <param name="iTransportContext">The Transport Context.</param> public DirectExchangeManager(ITransportContext iTransportContext) { this.ITransportContext = iTransportContext; } /// <summary> /// The Transport Context. /// </summary> public ITransportContext ITransportContext; #region -- Server local service support ---------------------------------------------------- /// <summary> /// Associates the server service provider with the specified entry. /// </summary> /// <param name="entryName">The name of the entry.</param> /// <param name="iServerServiceEntry">The server service provider.</param> /// <returns>The server service provider previously associated with this entry or a null reference.</returns> public IServerServiceEntry RegisterServerService(string entryName, IServerServiceEntry iServerServiceEntry) { IServerServiceEntry previousServerServiceEntry = this._services[entryName] as IServerServiceEntry; this._services[entryName] = iServerServiceEntry; return previousServerServiceEntry; } /// <summary> /// Unregisters the server service entry. /// </summary> /// <param name="entryName">The name of the entry.</param> /// <returns>The server service provider previously associated with this entry or a null reference.</returns> public IServerServiceEntry UnregisterServerService(string entryName) { IServerServiceEntry previousServerServiceEntry = this._services[entryName] as IServerServiceEntry; this._services.Remove(entryName); return previousServerServiceEntry; } /// <summary> /// Gets a service associated with the specified entry name. /// </summary> /// <param name="entryName">The name of the entry.</param> /// <returns>The service associated with the specified entry name.</returns> public IServerServiceEntry GetServerService(string entryName) { return this._services[entryName] as IServerServiceEntry; } /// <summary> /// Returns a list cotaining the names of all registered server service entries. /// Is used in debugging purposes. /// </summary> /// <returns>A list cotaining the names of all registered server service entries.</returns> public string[] GetListOfRegisteredServices() { lock (this._services.SyncRoot) { string[] services = new string[this._services.Count]; this._services.Keys.CopyTo(services, 0); return services; } } /// <summary> /// All registered services. /// </summary> private Hashtable _services = Hashtable.Synchronized(new Hashtable()); #endregion #region -- Server global service support --------------------------------------------------- /// <summary> /// Associates the global server service provider with the specified entry. /// </summary> /// <param name="entryName">The name of the entry.</param> /// <param name="iServerServiceEntry">The server service provider.</param> /// <returns>The server service provider previously associated with this entry or a null reference.</returns> public static IServerServiceEntry RegisterGlobalServerService(string entryName, IServerServiceEntry iServerServiceEntry) { IServerServiceEntry previousServerServiceEntry = _globalServices[entryName] as IServerServiceEntry; _globalServices[entryName] = iServerServiceEntry; return previousServerServiceEntry; } /// <summary> /// Unregisters the global server service entry. /// </summary> /// <param name="entryName">The name of the entry.</param> /// <returns>The global server service provider previously associated with this entry or a null reference.</returns> public static IServerServiceEntry UnregisterGlobalServerService(string entryName) { IServerServiceEntry previousServerServiceEntry = _globalServices[entryName] as IServerServiceEntry; _globalServices.Remove(entryName); return previousServerServiceEntry; } /// <summary> /// Returns a list cotaining the names of all registered global server service entries. /// Is used in debugging purposes. /// </summary> /// <returns>A list cotaining the names of all registered global server service entries.</returns> public static string[] GetListOfGlobalRegisteredServices() { lock (_globalServices.SyncRoot) { string[] services = new string[_globalServices.Count]; _globalServices.Keys.CopyTo(services, 0); return services; } } /// <summary> /// All registered services. /// </summary> private static Hashtable _globalServices = Hashtable.Synchronized(new Hashtable()); #endregion #region -- Direct sending support ---------------------------------------------------------- /// <summary> /// Sends the content to the specified remote host and returns a response sent by the remote /// host in reply. /// </summary> /// <param name="destination">The remote host.</param> /// <param name="serviceName">The name of the service.</param> /// <param name="content">The content.</param> /// <returns>The response.</returns> public Stream SendSync(HostInformation destination, string serviceName, Stream content) { // create the message Message message = new Message(this.ITransportContext, destination, 0, new TransportHeaders(), content); message.IsSynchronous = true; message.GenuineMessageType = GenuineMessageType.ExternalStreamConsumer; message.DestinationMarshalByRef = serviceName; // register the response catcher SyncResponseProcessorWithEvent syncResponseProcessorWithEvent = new SyncResponseProcessorWithEvent(message); this.ITransportContext.IIncomingStreamHandler.RegisterResponseProcessor(message.MessageId, syncResponseProcessorWithEvent); // and send the message this.ITransportContext.ConnectionManager.Send(message); int timeSpanInMilliseconds = GenuineUtility.GetMillisecondsLeft(message.FinishTime); if (timeSpanInMilliseconds <= 0) throw GenuineExceptions.Get_Send_ServerDidNotReply(); if (! syncResponseProcessorWithEvent.IsReceivedEvent.WaitOne(timeSpanInMilliseconds, false)) throw GenuineExceptions.Get_Send_ServerDidNotReply(); if (syncResponseProcessorWithEvent.DispatchedException != null) throw OperationException.WrapException(syncResponseProcessorWithEvent.DispatchedException); return syncResponseProcessorWithEvent.Response.Stream; } /// <summary> /// Sends the content to the specified remote host and redirects a response to the callback. /// </summary> /// <param name="destination">The remote host.</param> /// <param name="serviceName">The name of the service.</param> /// <param name="content">The content.</param> /// <param name="streamResponseEventHandler">The callback.</param> public void SendAsync(HostInformation destination, string serviceName, Stream content, StreamResponseEventHandler streamResponseEventHandler) { this.SendAsync(destination, serviceName, content, streamResponseEventHandler, null); } /// <summary> /// Sends the content to the specified remote host and redirects a response to the callback. /// </summary> /// <param name="destination">The remote host.</param> /// <param name="serviceName">The name of the service.</param> /// <param name="content">The content.</param> /// <param name="streamResponseEventHandler">The callback.</param> /// <param name="tag">The object that contains data about this invocation.</param> public void SendAsync(HostInformation destination, string serviceName, Stream content, StreamResponseEventHandler streamResponseEventHandler, object tag) { // create the message Message message = new Message(this.ITransportContext, destination, 0, new TransportHeaders(), content); message.IsSynchronous = false; message.GenuineMessageType = GenuineMessageType.ExternalStreamConsumer; message.DestinationMarshalByRef = serviceName; message.Tag = tag; // register the response catcher UniversalAsyncResponseProcessor universalAsyncResponseProcessor = new UniversalAsyncResponseProcessor(message, streamResponseEventHandler, null); this.ITransportContext.IIncomingStreamHandler.RegisterResponseProcessor(message.MessageId, universalAsyncResponseProcessor); // and send the message this.ITransportContext.ConnectionManager.Send(message); } /// <summary> /// Sends the content to the specified remote host and redirects a response to the callback. /// </summary> /// <param name="destination">The remote host.</param> /// <param name="serviceName">The name of the service.</param> /// <param name="content">The content.</param> /// <param name="iStreamResponseHandler">The response handler.</param> public void SendAsync(HostInformation destination, string serviceName, Stream content, IStreamResponseHandler iStreamResponseHandler) { this.SendAsync(destination, serviceName, content, iStreamResponseHandler, null); } /// <summary> /// Sends the content to the specified remote host and redirects a response to the callback. /// </summary> /// <param name="destination">The remote host.</param> /// <param name="serviceName">The name of the service.</param> /// <param name="content">The content.</param> /// <param name="iStreamResponseHandler">The response handler.</param> /// <param name="tag">The object that contains data about this invocation.</param> public void SendAsync(HostInformation destination, string serviceName, Stream content, IStreamResponseHandler iStreamResponseHandler, object tag) { // create the message Message message = new Message(this.ITransportContext, destination, 0, new TransportHeaders(), content); message.IsSynchronous = false; message.GenuineMessageType = GenuineMessageType.ExternalStreamConsumer; message.DestinationMarshalByRef = serviceName; message.Tag = tag; // register the response catcher UniversalAsyncResponseProcessor universalAsyncResponseProcessor = new UniversalAsyncResponseProcessor(message, null, iStreamResponseHandler); this.ITransportContext.IIncomingStreamHandler.RegisterResponseProcessor(message.MessageId, universalAsyncResponseProcessor); // and send the message this.ITransportContext.ConnectionManager.Send(message); } /// <summary> /// Sends one-way message to the remote host. Ignores all exceptions and does not /// receive the response. /// </summary> /// <param name="destination">The remote host.</param> /// <param name="serviceName">The name of the entry.</param> /// <param name="content">The content.</param> public void SendOneWay(HostInformation destination, string serviceName, Stream content) { try { // create the message Message message = new Message(this.ITransportContext, destination, 0, new TransportHeaders(), content); message.IsOneWay = true; message.GenuineMessageType = GenuineMessageType.ExternalStreamConsumer; message.DestinationMarshalByRef = serviceName; // and send the message this.ITransportContext.ConnectionManager.Send(message); } catch (Exception) { } } #endregion #region -- Direct receiving support -------------------------------------------------------- /// <summary> /// Handles the incoming request. /// </summary> /// <param name="message">The response.</param> /// <returns>The response.</returns> public Stream HandleRequest(Message message) { // fetch the name of the server service IServerServiceEntry iServerServiceEntry = null; string entryName = message.DestinationMarshalByRef as string; if (entryName != null) iServerServiceEntry = this._services[entryName] as IServerServiceEntry; // there is no service registered in the local collection, try the global collection if (entryName != null && iServerServiceEntry == null) iServerServiceEntry = _globalServices[entryName] as IServerServiceEntry; if (iServerServiceEntry == null) { // no services are registered to handle this request // message.ITransportContext.IEventLogger.Log(LogMessageCategory.Error, null, "DirectExchangeManager.HandlerRequest", // null, "There are no services associated with the \"{0}\" name. Incoming request is ignored.", entryName == null ? "<null!>" : entryName); message.ITransportContext.IGenuineEventProvider.Fire(new GenuineEventArgs(GenuineEventType.UnknownServerService, null, message.Sender, entryName)); throw GenuineExceptions.Get_Receive_NoServices(entryName); } return iServerServiceEntry.HandleMessage(message.Stream, message.Sender); } #endregion } }
// This file is part of YamlDotNet - A .NET library for YAML. // Copyright (c) Antoine Aubry and contributors // Permission is hereby granted, free of charge, to any person obtaining a copy of // this software and associated documentation files (the "Software"), to deal in // the Software without restriction, including without limitation the rights to // use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies // of the Software, and to permit persons to whom the Software is furnished to do // so, subject to the following conditions: // The above copyright notice and this permission notice shall be included in all // copies or substantial portions of the Software. // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE // SOFTWARE. using System; using System.Collections.Generic; using System.IO; using YamlDotNet.Core; using YamlDotNet.Core.Events; using YamlDotNet.Serialization.EventEmitters; using YamlDotNet.Serialization.NamingConventions; using YamlDotNet.Serialization.ObjectGraphTraversalStrategies; using YamlDotNet.Serialization.ObjectGraphVisitors; using YamlDotNet.Serialization.TypeInspectors; using YamlDotNet.Serialization.TypeResolvers; namespace YamlDotNet.Serialization { /// <summary> /// Writes objects to YAML. /// </summary> public sealed class Serializer { internal IList<IYamlTypeConverter> Converters { get; private set; } private readonly SerializationOptions options; private readonly INamingConvention namingConvention; private readonly ITypeResolver typeResolver; /// <summary> /// /// </summary> /// <param name="options">Options that control how the serialization is to be performed.</param> /// <param name="namingConvention">Naming strategy to use for serialized property names</param> public Serializer(SerializationOptions options = SerializationOptions.None, INamingConvention namingConvention = null) { this.options = options; this.namingConvention = namingConvention ?? new NullNamingConvention(); Converters = new List<IYamlTypeConverter>(); foreach (IYamlTypeConverter yamlTypeConverter in Utilities.YamlTypeConverters.BuiltInConverters) { Converters.Add(yamlTypeConverter); } typeResolver = IsOptionSet(SerializationOptions.DefaultToStaticType) ? (ITypeResolver)new StaticTypeResolver() : (ITypeResolver)new DynamicTypeResolver(); } private bool IsOptionSet(SerializationOptions option) { return (options & option) != 0; } /// <summary> /// Registers a type converter to be used to serialize and deserialize specific types. /// </summary> public void RegisterTypeConverter(IYamlTypeConverter converter) { Converters.Add(converter); } /// <summary> /// Serializes the specified object. /// </summary> /// <param name="writer">The <see cref="TextWriter" /> where to serialize the object.</param> /// <param name="graph">The object to serialize.</param> public void Serialize(TextWriter writer, object graph) { Serialize(new Emitter(writer), graph); } /// <summary> /// Serializes the specified object. /// </summary> /// <param name="writer">The <see cref="TextWriter" /> where to serialize the object.</param> /// <param name="graph">The object to serialize.</param> /// <param name="type">The static type of the object to serialize.</param> public void Serialize(TextWriter writer, object graph, Type type) { Serialize(new Emitter(writer), graph, type); } /// <summary> /// Serializes the specified object. /// </summary> /// <param name="emitter">The <see cref="IEmitter" /> where to serialize the object.</param> /// <param name="graph">The object to serialize.</param> public void Serialize(IEmitter emitter, object graph) { if (emitter == null) { throw new ArgumentNullException("emitter"); } EmitDocument(emitter, new ObjectDescriptor(graph, graph != null ? graph.GetType() : typeof(object), typeof(object))); } /// <summary> /// Serializes the specified object. /// </summary> /// <param name="emitter">The <see cref="IEmitter" /> where to serialize the object.</param> /// <param name="graph">The object to serialize.</param> /// <param name="type">The static type of the object to serialize.</param> public void Serialize(IEmitter emitter, object graph, Type type) { if (emitter == null) { throw new ArgumentNullException("emitter"); } if (type == null) { throw new ArgumentNullException("type"); } EmitDocument(emitter, new ObjectDescriptor(graph, type, type)); } private void EmitDocument(IEmitter emitter, IObjectDescriptor graph) { var traversalStrategy = CreateTraversalStrategy(); var eventEmitter = CreateEventEmitter(emitter); var emittingVisitor = CreateEmittingVisitor(emitter, traversalStrategy, eventEmitter, graph); emitter.Emit(new StreamStart()); emitter.Emit(new DocumentStart()); traversalStrategy.Traverse(graph, emittingVisitor); emitter.Emit(new DocumentEnd(true)); emitter.Emit(new StreamEnd()); } private IObjectGraphVisitor CreateEmittingVisitor(IEmitter emitter, IObjectGraphTraversalStrategy traversalStrategy, IEventEmitter eventEmitter, IObjectDescriptor graph) { IObjectGraphVisitor emittingVisitor = new EmittingObjectGraphVisitor(eventEmitter); emittingVisitor = new CustomSerializationObjectGraphVisitor(emitter, emittingVisitor, Converters); if (!IsOptionSet(SerializationOptions.DisableAliases)) { var anchorAssigner = new AnchorAssigner(); traversalStrategy.Traverse(graph, anchorAssigner); emittingVisitor = new AnchorAssigningObjectGraphVisitor(emittingVisitor, eventEmitter, anchorAssigner); } if (!IsOptionSet(SerializationOptions.EmitDefaults)) { emittingVisitor = new DefaultExclusiveObjectGraphVisitor(emittingVisitor); } return emittingVisitor; } private IEventEmitter CreateEventEmitter(IEmitter emitter) { var writer = new WriterEventEmitter(emitter); if (IsOptionSet(SerializationOptions.JsonCompatible)) { return new JsonEventEmitter(writer); } else { return new TypeAssigningEventEmitter(writer, IsOptionSet(SerializationOptions.Roundtrip)); } } private IObjectGraphTraversalStrategy CreateTraversalStrategy() { ITypeInspector typeDescriptor = new ReadablePropertiesTypeInspector(typeResolver); if (IsOptionSet(SerializationOptions.Roundtrip)) { typeDescriptor = new ReadableAndWritablePropertiesTypeInspector(typeDescriptor); } typeDescriptor = new NamingConventionTypeInspector(typeDescriptor, namingConvention); typeDescriptor = new YamlAttributesTypeInspector(typeDescriptor); if (IsOptionSet(SerializationOptions.DefaultToStaticType)) { typeDescriptor = new CachedTypeInspector(typeDescriptor); } if (IsOptionSet(SerializationOptions.Roundtrip)) { return new RoundtripObjectGraphTraversalStrategy(this, typeDescriptor, typeResolver, 50); } else { return new FullObjectGraphTraversalStrategy(this, typeDescriptor, typeResolver, 50, namingConvention); } } } }
// // TrackActions.cs // // Author: // Gabriel Burt <gburt@novell.com> // // Copyright (C) 2007-2008 Novell, Inc. // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System; using System.Collections.Generic; using Mono.Unix; using Gtk; using Hyena; using Hyena.Widgets; using Banshee.Query; using Banshee.Sources; using Banshee.Library; using Banshee.Playlist; using Banshee.Collection; using Banshee.Collection.Database; using Banshee.ServiceStack; using Banshee.MediaEngine; using Banshee.Widgets; using Banshee.Gui; using Banshee.Gui.Widgets; using Hyena.Data; namespace Banshee.Gui { public class TrackActions : BansheeActionGroup { private RatingActionProxy selected_tracks_rating_proxy; private RatingActionProxy playing_track_rating_proxy; private bool chosen_from_playing_track_submenu; private static readonly string [] require_selection_actions = new string [] { // Selected Track(s) > "SelectedTracksAction", "AddSelectedTracksToPlaylistAction", "RateSelectedTracksAction", "RemoveSelectedTracksAction", "RemoveTracksFromLibraryAction", "OpenSelectedTracksFolderAction", "DeleteSelectedTracksFromDriveAction", "SelectedTracksPropertiesAction", // Others > "SelectNoneAction", "PlayTrack" }; private static readonly string [] disable_for_filter_actions = new string [] { "SelectAllAction", "SelectNoneAction", "SearchMenuAction", // FIXME should be able to do this, just need to implement it "RateSelectedTracksAction" }; private Hyena.Collections.Selection filter_selection = new Hyena.Collections.Selection (); private bool filter_focused; public bool FilterFocused { get { return filter_focused; } set { if (filter_focused == value) return; filter_focused = value; if (value) { Selection = filter_selection; SuppressSelectActions (); } else { Selection = current_source.TrackModel.Selection; UnsuppressSelectActions (); } UpdateActions (); OnSelectionChanged (); } } public event EventHandler SelectionChanged; public Hyena.Collections.Selection Selection { get; private set; } public ModelSelection<TrackInfo> SelectedTracks { get { return FilterFocused ? new ModelSelection<TrackInfo> (current_source.TrackModel, Selection) : current_source.TrackModel.SelectedItems; } } public TrackActions () : base ("Track") { Add (new ActionEntry [] { /* * Playing Track ActionsEntries */ new ActionEntry ("PlayingTrackAction", null, Catalog.GetString ("Playing Track"), "", Catalog.GetString ("Options for playing track"), (o, e) => { ResetRating (); }), new ActionEntry ("AddPlayingTrackToPlaylistAction", null, Catalog.GetString ("Add _to Playlist"), "", Catalog.GetString ("Append playing items to playlist or create new playlist from playing track"), OnAddPlayingTrackToPlaylistMenu), new ActionEntry ("RatePlayingTrackAction", null, String.Empty, null, null, OnRatePlayingTrack), new ActionEntry ("PlayingTrackEditorAction", Stock.Edit, Catalog.GetString ("_Edit Track Information"), "E", Catalog.GetString ("Edit information on playing track"), OnPlayingTrackEditor), new ActionEntry ("RemovePlayingTrackAction", Stock.Remove, Catalog.GetString ("_Remove"), "", Catalog.GetString ("Remove playing track from this source"), OnRemovePlayingTrack), new ActionEntry ("DeletePlayingTrackFromDriveAction", null, Catalog.GetString ("_Delete From Drive"), "", Catalog.GetString ("Permanently delete playing item from medium"), OnDeletePlayingTrackFromDrive), new ActionEntry ("OpenPlayingTrackFolderAction", null, Catalog.GetString ("_Open Containing Folder"), "", Catalog.GetString ("Open the folder that contains playing item"), OnOpenPlayingTrackFolder), new ActionEntry ("PlayingTrackPropertiesAction", Stock.Properties, Catalog.GetString ("Properties"), "", Catalog.GetString ("View information on playing track"), OnPlayingTrackProperties), /* * Selected Track(s) ActionEntries */ new ActionEntry ("SelectedTracksAction", null, Catalog.GetString ("Selected Track(s)"), "", Catalog.GetString ("Options for selected track(s)"), (o, e) => { ResetRating (); }), new ActionEntry ("AddSelectedTracksToPlaylistAction", null, Catalog.GetString ("Add _to Playlist"), "", Catalog.GetString ("Append selected items to playlist or create new playlist from selection"), OnAddSelectedTracksToPlaylistMenu), new ActionEntry ("RateSelectedTracksAction", null, String.Empty, null, null, OnRateSelectedTracks), new ActionEntry ("SelectedTracksEditorAction", Stock.Edit, Catalog.GetString ("_Edit Track Information"), "", Catalog.GetString ("Edit information on selected tracks"), OnSelectedTracksEditor), new ActionEntry ("RemoveSelectedTracksAction", Stock.Remove, Catalog.GetString ("_Remove"), "Delete", Catalog.GetString ("Remove selected track(s) from this source"), OnRemoveSelectedTracks), new ActionEntry ("RemoveTracksFromLibraryAction", null, Catalog.GetString ("Remove From _Library"), "", Catalog.GetString ("Remove selected track(s) from library"), OnRemoveTracksFromLibrary), new ActionEntry ("DeleteSelectedTracksFromDriveAction", null, Catalog.GetString ("_Delete From Drive"), "", Catalog.GetString ("Permanently delete selected item(s) from medium"), OnDeleteSelectedTracksFromDrive), new ActionEntry ("OpenSelectedTracksFolderAction", null, Catalog.GetString ("_Open Containing Folder"), "", Catalog.GetString ("Open the folder that contains the selected item"), OnOpenSelectedTracksFolder), new ActionEntry ("SelectedTracksPropertiesAction", Stock.Properties, Catalog.GetString ("Properties"), "", Catalog.GetString ("View information on selected tracks"), OnSelectedTracksProperties), /* * Others */ new ActionEntry ("AddToNewPlaylistAction", Stock.New, Catalog.GetString ("New Playlist"), null, Catalog.GetString ("Create new playlist from selected tracks"), OnAddToNewPlaylist), new ActionEntry("SelectAllAction", null, Catalog.GetString("Select _All"), "<control>A", Catalog.GetString("Select all tracks"), OnSelectAll), new ActionEntry("SelectNoneAction", null, Catalog.GetString("Select _None"), "<control><shift>A", Catalog.GetString("Unselect all tracks"), OnSelectNone), new ActionEntry("TrackContextMenuAction", null, String.Empty, null, null, OnTrackContextMenu), new ActionEntry ("PlayTrack", null, Catalog.GetString ("_Play"), "", Catalog.GetString ("Play the selected item"), OnPlayTrack), new ActionEntry ("SearchMenuAction", Stock.Find, Catalog.GetString ("_Search"), null, Catalog.GetString ("Search for items matching certain criteria"), null), new ActionEntry ("SearchForSameAlbumAction", null, Catalog.GetString ("By Matching _Album"), "", Catalog.GetString ("Search all songs of this album"), OnSearchForSameAlbum), new ActionEntry ("SearchForSameArtistAction", null, Catalog.GetString ("By Matching A_rtist"), "", Catalog.GetString ("Search all songs of this artist"), OnSearchForSameArtist), }); Actions.UIManager.ActionsChanged += HandleActionsChanged; Actions.GlobalActions["EditMenuAction"].Activated += HandleEditMenuActivated; ServiceManager.SourceManager.ActiveSourceChanged += HandleActiveSourceChanged; ServiceManager.PlayerEngine.ConnectEvent (OnPlayerEvent, PlayerEvent.StateChange); this["AddPlayingTrackToPlaylistAction"].HideIfEmpty = false; this["AddSelectedTracksToPlaylistAction"].HideIfEmpty = false; this["PlayTrack"].StockId = Gtk.Stock.MediaPlay; } #region State Event Handlers private ITrackModelSource current_source; private void HandleActiveSourceChanged (SourceEventArgs args) { FilterFocused = false; if (current_source != null && current_source.TrackModel != null) { current_source.TrackModel.Reloaded -= OnReloaded; current_source.TrackModel.Selection.Changed -= HandleSelectionChanged; current_source = null; } ITrackModelSource new_source = ActiveSource as ITrackModelSource; if (new_source != null) { new_source.TrackModel.Selection.Changed += HandleSelectionChanged; new_source.TrackModel.Reloaded += OnReloaded; current_source = new_source; Selection = new_source.TrackModel.Selection; } ThreadAssist.ProxyToMain (UpdateActions); } private void OnReloaded (object sender, EventArgs args) { ThreadAssist.ProxyToMain (delegate { UpdateActions (); }); } private void OnPlayerEvent (PlayerEventArgs args) { ThreadAssist.ProxyToMain (() => { UpdateActions (); }); } private void HandleActionsChanged (object sender, EventArgs args) { if (Actions.UIManager.GetAction ("/MainMenu/EditMenu/SelectedTracks") != null && Actions.UIManager.GetAction ("/MainMenu/EditMenu/PlayingTrack") != null) { selected_tracks_rating_proxy = new RatingActionProxy (Actions.UIManager, this["RateSelectedTracksAction"]); playing_track_rating_proxy = new RatingActionProxy (Actions.UIManager, this["RatePlayingTrackAction"]); playing_track_rating_proxy.AddPath ("/MainMenu/EditMenu/PlayingTrack", "AddToPlaylist"); selected_tracks_rating_proxy.AddPath ("/MainMenu/EditMenu/SelectedTracks", "AddToPlaylist"); selected_tracks_rating_proxy.AddPath ("/TrackContextMenu", "AddToPlaylist"); Actions.UIManager.ActionsChanged -= HandleActionsChanged; } } private void HandleSelectionChanged (object sender, EventArgs args) { ThreadAssist.ProxyToMain (delegate { OnSelectionChanged (); UpdateActions (); }); } private void HandleEditMenuActivated (object sender, EventArgs args) { // inside the "Edit" menu it's a bit redundant to have a label that starts as "Edit Track..." this["PlayingTrackEditorAction"].Label = Catalog.GetString ("Track _Information"); this["SelectedTracksEditorAction"].Label = Catalog.GetString ("Track _Information"); if (Selection.Count > 1) { this ["SelectedTracksAction"].Label = Catalog.GetString ("Selected Tracks"); } else { this ["SelectedTracksAction"].Label = Catalog.GetString ("Selected Track"); } } private void OnSelectionChanged () { EventHandler handler = SelectionChanged; if (handler != null) { handler (this, EventArgs.Empty); } } #endregion #region Utility Methods private bool select_actions_suppressed = false; private void SuppressSelectActions () { if (!select_actions_suppressed) { this ["SelectAllAction"].DisconnectAccelerator (); this ["SelectNoneAction"].DisconnectAccelerator (); select_actions_suppressed = true; } } private void UnsuppressSelectActions () { if (select_actions_suppressed) { this ["SelectAllAction"].ConnectAccelerator (); this ["SelectNoneAction"].ConnectAccelerator (); select_actions_suppressed = false; } } public void UpdateActions () { Source source = ServiceManager.SourceManager.ActiveSource; if (source == null) { Sensitive = Visible = false; return; } bool in_database = source is DatabaseSource; PrimarySource primary_source = (source as PrimarySource) ?? (source.Parent as PrimarySource); var track_source = source as ITrackModelSource; if (track_source != null) { if (FilterFocused) { if (Selection == filter_selection) { filter_selection.MaxIndex = track_source.TrackModel.Selection.MaxIndex; filter_selection.Clear (false); filter_selection.SelectAll (); } else { Log.Error (new Exception ("Filter focused, but selection is not filter selection!")); } } else { UpdateActions (true, true, disable_for_filter_actions); } var selection = Selection; var playing_track = ServiceManager.PlayerEngine.CurrentTrack; var playback_source = (DatabaseSource)ServiceManager.PlaybackController.Source; int count = selection.Count; Sensitive = Visible = true; bool has_selection = count > 0; bool has_single_selection = count == 1; bool is_playing_or_paused = ServiceManager.PlayerEngine.CurrentState == PlayerState.Playing || ServiceManager.PlayerEngine.CurrentState == PlayerState.Paused; bool is_idle = ServiceManager.PlayerEngine.CurrentState == PlayerState.Idle; foreach (string action in require_selection_actions) { this[action].Sensitive = has_selection; } UpdateActions (source.CanSearch && !FilterFocused, has_single_selection, "SearchMenuAction", "SearchForSameArtistAction", "SearchForSameAlbumAction" ); this["SelectAllAction"].Sensitive = track_source.Count > 0 && !selection.AllSelected; UpdateAction ("PlayingTrackAction", !is_idle && playing_track is DatabaseTrackInfo, is_playing_or_paused, null); UpdateAction ("AddPlayingTrackToPlaylistAction", source is MusicLibrarySource, is_playing_or_paused, null); UpdateAction ("RatePlayingTrackAction", playback_source.HasEditableTrackProperties, is_playing_or_paused, null); UpdateAction ("PlayingTrackPropertiesAction", playback_source.HasViewableTrackProperties, is_playing_or_paused, source); UpdateAction ("PlayingTrackEditorAction", playback_source.HasEditableTrackProperties, is_playing_or_paused, source); UpdateAction ("RemovePlayingTrackAction", playback_source.CanRemoveTracks, is_playing_or_paused, source); UpdateAction ("DeletePlayingTrackFromDriveAction", playback_source.CanDeleteTracks, is_playing_or_paused, source); UpdateAction ("OpenPlayingTrackFolderAction", playback_source.CanDeleteTracks, is_playing_or_paused, source); UpdateAction ("SelectedTracksAction", has_selection, has_selection, null); UpdateAction ("AddSelectedTracksToPlaylistAction", in_database && primary_source != null && primary_source.SupportsPlaylists && !primary_source.PlaylistsReadOnly, has_selection, null); UpdateAction ("RateSelectedTracksAction", source.HasEditableTrackProperties, has_selection, null); UpdateAction ("SelectedTracksEditorAction", source.HasEditableTrackProperties, has_selection, source); UpdateAction ("RemoveSelectedTracksAction", track_source.CanRemoveTracks, has_selection, source); UpdateAction ("RemoveTracksFromLibraryAction", source.Parent is LibrarySource, has_selection, null); UpdateAction ("DeleteSelectedTracksFromDriveAction", track_source.CanDeleteTracks, has_selection, source); //if it can delete tracks, most likely it can open their folder UpdateAction ("OpenSelectedTracksFolderAction", track_source.CanDeleteTracks, has_single_selection, source); UpdateAction ("SelectedTracksPropertiesAction", source.HasViewableTrackProperties, has_selection, source); if (primary_source != null && !(primary_source is LibrarySource) && primary_source.StorageName != null) { this["DeleteSelectedTracksFromDriveAction"].Label = String.Format ( Catalog.GetString ("_Delete From \"{0}\""), primary_source.StorageName); this["DeletePlayingTrackFromDriveAction"].Label = String.Format ( Catalog.GetString ("_Delete From \"{0}\""), primary_source.StorageName); } if (FilterFocused) { UpdateActions (false, false, disable_for_filter_actions); } } else { Sensitive = Visible = false; } } private void ResetRating () { if (current_source != null) { int rating = 0; // If there is only one track, get the preset rating if (Selection.Count == 1) { foreach (TrackInfo track in SelectedTracks) { rating = track.Rating; } } selected_tracks_rating_proxy.Reset (rating); var playing_track = ServiceManager.PlayerEngine.CurrentTrack as TrackInfo; if (playing_track != null) { rating = playing_track.Rating; playing_track_rating_proxy.Reset (rating); } } } #endregion #region Action Handlers private void OnSelectAll (object o, EventArgs args) { if (current_source != null) current_source.TrackModel.Selection.SelectAll (); } private void OnSelectNone (object o, EventArgs args) { if (current_source != null) current_source.TrackModel.Selection.Clear (); } private void OnTrackContextMenu (object o, EventArgs args) { ResetRating (); UpdateActions (); ShowContextMenu ("/TrackContextMenu"); } private bool RunSourceOverrideHandler (string sourceOverrideHandler) { Source source = current_source as Source; InvokeHandler handler = source != null ? source.GetInheritedProperty<InvokeHandler> (sourceOverrideHandler) : null; if (handler != null) { handler (); return true; } return false; } private void OnPlayingTrackProperties (object o, EventArgs args) { var track = ServiceManager.PlayerEngine.CurrentTrack as TrackInfo; if (track != null && current_source != null && !RunSourceOverrideHandler ("PlayingTrackPropertiesActionHandler")) { var s = current_source as Source; var readonly_tabs = s != null && !s.HasEditableTrackProperties; TrackEditor.TrackEditorDialog.RunView (track, readonly_tabs); } } private void OnSelectedTracksProperties (object o, EventArgs args) { if (current_source != null && !RunSourceOverrideHandler ("SelectedTracksPropertiesActionHandler")) { var s = current_source as Source; var readonly_tabs = s != null && !s.HasEditableTrackProperties; TrackEditor.TrackEditorDialog.RunView (current_source.TrackModel, Selection, readonly_tabs); } } private void OnPlayingTrackEditor (object o, EventArgs args) { var track = ServiceManager.PlayerEngine.CurrentTrack as TrackInfo; if (track != null && current_source != null && !RunSourceOverrideHandler ("PlayingTrackEditorActionHandler")) { TrackEditor.TrackEditorDialog.RunEdit (track); } } private void OnSelectedTracksEditor (object o, EventArgs args) { if (current_source != null && !RunSourceOverrideHandler ("SelectedTracksEditorActionHandler")) { TrackEditor.TrackEditorDialog.RunEdit (current_source.TrackModel, Selection); } } private void OnPlayTrack (object o, EventArgs args) { var source = ServiceManager.SourceManager.ActiveSource as ITrackModelSource; if (source != null) { var track = source.TrackModel [FilterFocused ? 0 : source.TrackModel.Selection.FocusedIndex]; if (track.HasAttribute (TrackMediaAttributes.ExternalResource)) { System.Diagnostics.Process.Start (track.Uri); } else { ServiceManager.PlaybackController.Source = source; ServiceManager.PlayerEngine.OpenPlay (track); } } } // TODO This function works only for music library source now // but it should act on a source where a track is playing. private void OnAddPlayingTrackToPlaylistMenu (object o, EventArgs args) { List<Source> children; chosen_from_playing_track_submenu = true; lock (ServiceManager.SourceManager.MusicLibrary.Children) { children = new List<Source> (ServiceManager.SourceManager.MusicLibrary.Children); } OnAddToPlaylistMenu (o, children); } private void OnAddSelectedTracksToPlaylistMenu (object o, EventArgs args) { List<Source> children; chosen_from_playing_track_submenu = false; lock (ActivePrimarySource.Children) { children = new List<Source> (ActivePrimarySource.Children); } OnAddToPlaylistMenu (o, children); } // Called when the Add to Playlist action is highlighted. // Generates the menu of playlists to which you can add the selected tracks. private void OnAddToPlaylistMenu (object o, List<Source> children) { Source active_source = ServiceManager.SourceManager.ActiveSource; // TODO find just the menu that was activated instead of modifying all proxies foreach (Widget proxy_widget in ((Gtk.Action)o).Proxies) { MenuItem menu = proxy_widget as MenuItem; if (menu == null) continue; Menu submenu = new Menu (); menu.Submenu = submenu; submenu.Append (this ["AddToNewPlaylistAction"].CreateMenuItem ()); bool separator_added = false; foreach (Source child in children) { PlaylistSource playlist = child as PlaylistSource; if (playlist != null) { if (!separator_added) { submenu.Append (new SeparatorMenuItem ()); separator_added = true; } PlaylistMenuItem item = new PlaylistMenuItem (playlist); item.Image = new Gtk.Image ("playlist-source", IconSize.Menu); item.Activated += OnAddToExistingPlaylist; item.Sensitive = playlist != active_source; submenu.Append (item); } } submenu.ShowAll (); } } private void OnAddToNewPlaylist (object o, EventArgs args) { // TODO generate name based on the track selection, or begin editing it PlaylistSource playlist = new PlaylistSource (Catalog.GetString ("New Playlist"), ActivePrimarySource); playlist.Save (); playlist.PrimarySource.AddChildSource (playlist); AddToPlaylist (playlist); } private void OnAddToExistingPlaylist (object o, EventArgs args) { AddToPlaylist (((PlaylistMenuItem)o).Playlist); } private void AddToPlaylist (PlaylistSource playlist) { if (!FilterFocused) { var track = ServiceManager.PlayerEngine.CurrentTrack as DatabaseTrackInfo; if (chosen_from_playing_track_submenu && track != null) { playlist.AddTrack (track); } else { playlist.AddSelectedTracks (ActiveSource); } } else { playlist.AddAllTracks (ActiveSource); } } private void OnRemovePlayingTrack (object o, EventArgs args) { var playback_src = ServiceManager.PlaybackController.Source as DatabaseSource; var track = ServiceManager.PlayerEngine.CurrentTrack as DatabaseTrackInfo; if (playback_src != null && track != null) { if (!ConfirmRemove (playback_src, false, 1)) { return; } if (playback_src != null && playback_src.CanRemoveTracks) { playback_src.RemoveTrack (track); } } } private void OnRemoveSelectedTracks (object o, EventArgs args) { ITrackModelSource source = ActiveSource as ITrackModelSource; if (!ConfirmRemove (source, false, Selection.Count)) return; if (source != null && source.CanRemoveTracks) { source.RemoveTracks (Selection); } } private void OnRemoveTracksFromLibrary (object o, EventArgs args) { ITrackModelSource source = ActiveSource as ITrackModelSource; if (source != null) { LibrarySource library = source.Parent as LibrarySource; if (library != null) { if (!ConfirmRemove (library, false, Selection.Count)) { return; } ThreadAssist.SpawnFromMain (delegate { library.RemoveTracks (source.TrackModel as DatabaseTrackListModel, Selection); }); } } } private void OnOpenPlayingTrackFolder (object o, EventArgs args) { var track = ServiceManager.PlayerEngine.CurrentTrack as TrackInfo; if (track != null) { var path = System.IO.Path.GetDirectoryName (track.Uri.AbsolutePath); OpenContainingFolder (path); } } private void OnOpenSelectedTracksFolder (object o, EventArgs args) { var source = ActiveSource as ITrackModelSource; if (source == null || source.TrackModel == null) return; var items = SelectedTracks; if (items == null || items.Count != 1) { Log.Error ("Could not open containing folder"); return; } foreach (var track in items) { var path = System.IO.Path.GetDirectoryName (track.Uri.AbsolutePath); OpenContainingFolder (path); } } private void OpenContainingFolder (String path) { if (Banshee.IO.Directory.Exists (path)) { System.Diagnostics.Process.Start (path); return; } var md = new HigMessageDialog ( ServiceManager.Get<GtkElementsService> ().PrimaryWindow, DialogFlags.DestroyWithParent, MessageType.Warning, ButtonsType.None, Catalog.GetString ("The folder could not be found."), Catalog.GetString ("Please check that the track's location is accessible by the system.") ); md.AddButton ("gtk-ok", ResponseType.Ok, true); try { md.Run (); } finally { md.Destroy (); } } private void OnDeletePlayingTrackFromDrive (object o, EventArgs args) { var playback_src = ServiceManager.PlaybackController.Source as DatabaseSource; var track = ServiceManager.PlayerEngine.CurrentTrack as DatabaseTrackInfo; if (playback_src != null && track != null) { if (!ConfirmRemove (playback_src, true, 1)) { return; } if (playback_src != null && playback_src.CanDeleteTracks) { var selection = new Hyena.Collections.Selection (); selection.Select (playback_src.TrackModel.IndexOf (track)); playback_src.DeleteTracks (selection); } } } private void OnDeleteSelectedTracksFromDrive (object o, EventArgs args) { ITrackModelSource source = ActiveSource as ITrackModelSource; if (!ConfirmRemove (source, true, Selection.Count)) return; if (source != null && source.CanDeleteTracks) { source.DeleteTracks (Selection); } } // FIXME filter private void OnRateSelectedTracks (object o, EventArgs args) { ThreadAssist.SpawnFromMain (delegate { ((DatabaseSource)ActiveSource).RateSelectedTracks (selected_tracks_rating_proxy.LastRating); }); } private void OnRatePlayingTrack (object o, EventArgs args) { var track = ServiceManager.PlayerEngine.CurrentTrack as DatabaseTrackInfo; if (track != null) { track.SavedRating = playing_track_rating_proxy.LastRating; } } private void OnSearchForSameArtist (object o, EventArgs args) { if (current_source != null) { foreach (TrackInfo track in current_source.TrackModel.SelectedItems) { if (!String.IsNullOrEmpty (track.ArtistName)) { ActiveSource.FilterQuery = BansheeQuery.ArtistField.ToTermString (":", track.ArtistName); } break; } } } private void OnSearchForSameAlbum (object o, EventArgs args) { if (current_source != null) { foreach (TrackInfo track in current_source.TrackModel.SelectedItems) { if (!String.IsNullOrEmpty (track.AlbumTitle)) { ActiveSource.FilterQuery = BansheeQuery.AlbumField.ToTermString (":", track.AlbumTitle); } break; } } } #endregion private static bool ConfirmRemove (ITrackModelSource source, bool delete, int selCount) { if (!source.ConfirmRemoveTracks) { return true; } bool ret = false; string header = null; string message = null; string button_label = null; if (delete) { header = String.Format ( Catalog.GetPluralString ( "Are you sure you want to permanently delete this item?", "Are you sure you want to permanently delete the selected {0} items?", selCount ), selCount ); message = Catalog.GetString ("If you delete the selection, it will be permanently lost."); button_label = "gtk-delete"; } else { header = String.Format (Catalog.GetString ("Remove selection from {0}?"), source.Name); message = String.Format ( Catalog.GetPluralString ( "Are you sure you want to remove the selected item from your {1}?", "Are you sure you want to remove the selected {0} items from your {1}?", selCount ), selCount, source.GenericName ); button_label = "gtk-remove"; } HigMessageDialog md = new HigMessageDialog ( ServiceManager.Get<GtkElementsService> ().PrimaryWindow, DialogFlags.DestroyWithParent, delete ? MessageType.Warning : MessageType.Question, ButtonsType.None, header, message ); // Delete from Disk defaults to Cancel and the others to OK/Confirm. md.AddButton ("gtk-cancel", ResponseType.No, delete); md.AddButton (button_label, ResponseType.Yes, !delete); try { if (md.Run () == (int) ResponseType.Yes) { ret = true; } } finally { md.Destroy (); } return ret; } public static bool ConfirmRemove (string header) { string message = Catalog.GetString ("Are you sure you want to continue?"); bool remove_tracks = false; ThreadAssist.BlockingProxyToMain (() => { var md = new HigMessageDialog ( ServiceManager.Get<GtkElementsService> ().PrimaryWindow, DialogFlags.DestroyWithParent, MessageType.Warning, ButtonsType.None, header, message ); md.AddButton ("gtk-cancel", ResponseType.No, true); md.AddButton (Catalog.GetString ("Remove tracks"), ResponseType.Yes, false); try { if (md.Run () == (int) ResponseType.Yes) { remove_tracks = true; } } finally { md.Destroy (); } }); return remove_tracks; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Diagnostics.CodeAnalysis; namespace System.Drawing { [SuppressMessage("Microsoft.Design", "CA1008:EnumsShouldHaveZeroValue")] public enum KnownColor { // This enum is order dependant!!! // // The value of these known colors are indexes into a color array. // Do not modify this enum without updating KnownColorTable. // // 0 - reserved for "not a known color" FirstColor = 0, // "System" colors /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.ActiveBorder"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> ActiveBorder = 1, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.ActiveCaption"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> ActiveCaption, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.ActiveCaptionText"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> ActiveCaptionText, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.AppWorkspace"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> AppWorkspace, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Control"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Control, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.ControlDark"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> ControlDark, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.ControlDarkDark"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> ControlDarkDark, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.ControlLight"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> ControlLight, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.ControlLightLight"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> ControlLightLight, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.ControlText"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> ControlText, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Desktop"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Desktop, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.GrayText"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> GrayText, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Highlight"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Highlight, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.HighlightText"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> HighlightText, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.HotTrack"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> HotTrack, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.InactiveBorder"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> InactiveBorder, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.InactiveCaption"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> InactiveCaption, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.InactiveCaptionText"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> InactiveCaptionText, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Info"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Info, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.InfoText"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> InfoText, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Menu"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Menu, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.MenuText"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> MenuText, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.ScrollBar"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> ScrollBar, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Window"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Window, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.WindowFrame"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> WindowFrame, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.WindowText"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> WindowText, // "Web" Colors /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Transparent"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Transparent, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.AliceBlue"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> AliceBlue, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.AntiqueWhite"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> AntiqueWhite, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Aqua"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Aqua, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Aquamarine"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Aquamarine, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Azure"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Azure, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Beige"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Beige, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Bisque"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Bisque, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Black"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Black, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.BlanchedAlmond"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> BlanchedAlmond, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Blue"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Blue, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.BlueViolet"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> BlueViolet, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Brown"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Brown, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.BurlyWood"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> BurlyWood, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.CadetBlue"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> CadetBlue, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Chartreuse"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Chartreuse, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Chocolate"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Chocolate, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Coral"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Coral, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.CornflowerBlue"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> CornflowerBlue, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Cornsilk"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Cornsilk, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Crimson"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Crimson, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Cyan"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Cyan, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.DarkBlue"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> DarkBlue, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.DarkCyan"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> DarkCyan, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.DarkGoldenrod"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> DarkGoldenrod, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.DarkGray"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> DarkGray, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.DarkGreen"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> DarkGreen, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.DarkKhaki"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> DarkKhaki, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.DarkMagenta"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> DarkMagenta, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.DarkOliveGreen"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> DarkOliveGreen, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.DarkOrange"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> DarkOrange, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.DarkOrchid"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> DarkOrchid, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.DarkRed"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> DarkRed, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.DarkSalmon"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> DarkSalmon, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.DarkSeaGreen"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> DarkSeaGreen, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.DarkSlateBlue"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> DarkSlateBlue, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.DarkSlateGray"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> DarkSlateGray, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.DarkTurquoise"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> DarkTurquoise, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.DarkViolet"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> DarkViolet, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.DeepPink"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> DeepPink, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.DeepSkyBlue"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> DeepSkyBlue, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.DimGray"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> DimGray, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.DodgerBlue"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> DodgerBlue, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Firebrick"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Firebrick, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.FloralWhite"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> FloralWhite, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.ForestGreen"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> ForestGreen, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Fuchsia"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Fuchsia, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Gainsboro"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Gainsboro, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.GhostWhite"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> GhostWhite, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Gold"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Gold, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Goldenrod"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Goldenrod, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Gray"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Gray, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Green"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Green, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.GreenYellow"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> GreenYellow, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Honeydew"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Honeydew, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.HotPink"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> HotPink, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.IndianRed"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> IndianRed, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Indigo"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Indigo, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Ivory"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Ivory, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Khaki"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Khaki, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Lavender"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Lavender, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.LavenderBlush"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> LavenderBlush, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.LawnGreen"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> LawnGreen, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.LemonChiffon"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> LemonChiffon, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.LightBlue"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> LightBlue, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.LightCoral"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> LightCoral, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.LightCyan"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> LightCyan, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.LightGoldenrodYellow"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> LightGoldenrodYellow, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.LightGray"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> LightGray, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.LightGreen"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> LightGreen, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.LightPink"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> LightPink, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.LightSalmon"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> LightSalmon, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.LightSeaGreen"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> LightSeaGreen, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.LightSkyBlue"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> LightSkyBlue, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.LightSlateGray"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> LightSlateGray, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.LightSteelBlue"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> LightSteelBlue, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.LightYellow"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> LightYellow, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Lime"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Lime, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.LimeGreen"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> LimeGreen, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Linen"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Linen, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Magenta"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Magenta, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Maroon"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Maroon, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.MediumAquamarine"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> MediumAquamarine, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.MediumBlue"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> MediumBlue, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.MediumOrchid"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> MediumOrchid, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.MediumPurple"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> MediumPurple, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.MediumSeaGreen"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> MediumSeaGreen, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.MediumSlateBlue"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> MediumSlateBlue, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.MediumSpringGreen"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> MediumSpringGreen, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.MediumTurquoise"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> MediumTurquoise, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.MediumVioletRed"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> MediumVioletRed, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.MidnightBlue"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> MidnightBlue, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.MintCream"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> MintCream, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.MistyRose"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> MistyRose, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Moccasin"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Moccasin, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.NavajoWhite"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> NavajoWhite, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Navy"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Navy, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.OldLace"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> OldLace, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Olive"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Olive, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.OliveDrab"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> OliveDrab, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Orange"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Orange, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.OrangeRed"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> OrangeRed, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Orchid"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Orchid, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.PaleGoldenrod"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> PaleGoldenrod, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.PaleGreen"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> PaleGreen, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.PaleTurquoise"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> PaleTurquoise, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.PaleVioletRed"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> PaleVioletRed, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.PapayaWhip"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> PapayaWhip, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.PeachPuff"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> PeachPuff, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Peru"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Peru, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Pink"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Pink, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Plum"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Plum, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.PowderBlue"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> PowderBlue, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Purple"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Purple, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Red"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Red, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.RosyBrown"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> RosyBrown, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.RoyalBlue"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> RoyalBlue, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.SaddleBrown"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> SaddleBrown, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Salmon"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Salmon, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.SandyBrown"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> SandyBrown, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.SeaGreen"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> SeaGreen, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.SeaShell"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> SeaShell, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Sienna"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Sienna, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Silver"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Silver, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.SkyBlue"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> SkyBlue, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.SlateBlue"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> SlateBlue, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.SlateGray"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> SlateGray, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Snow"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Snow, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.SpringGreen"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> SpringGreen, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.SteelBlue"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> SteelBlue, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Tan"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Tan, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Teal"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Teal, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Thistle"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Thistle, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Tomato"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Tomato, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Turquoise"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Turquoise, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Violet"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Violet, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Wheat"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Wheat, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.White"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> White, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.WhiteSmoke"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> WhiteSmoke, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.Yellow"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> Yellow, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.YellowGreen"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> YellowGreen, // NEW ADDITIONS IN WHIDBEY - DO NOT MOVE THESE UP OR IT WILL BE A BREAKING CHANGE /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.ButtonFace"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> ButtonFace, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.ButtonHighlight"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> ButtonHighlight, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.ButtonShadow"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> ButtonShadow, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.GradientActiveCaption"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> GradientActiveCaption, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.GradientInactiveCaption"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> GradientInactiveCaption, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.MenuBar"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> MenuBar, /// <include file='doc\KnownColor.uex' path='docs/doc[@for="KnownColor.MenuHighlight"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> MenuHighlight, LastColor = MenuHighlight, } }
using DevTreks.Models; using System.Collections.Generic; using System.Data; using System.Data.SqlClient; using System.Text; using System.Threading.Tasks; using System.Xml.Linq; namespace DevTreks.Data.AppHelpers { /// <summary> ///Purpose: Support class holding constants, enums, and common methods /// for service agreements ///Author: www.devtreks.org ///Date: 2016, March ///References: www.devtreks.org/helptreks/linkedviews/help/linkedview/HelpFile/148 /// </summary> /// NOTE: public class Agreement { public Agreement() { } //switch to apps public const string SERVICE_BASE_QRY = "servicebase"; //base tables inserts/edits schema public const string AGREEMENTSBASE_SCHEMA = "ServiceAgreementBaseEdits.xml"; //namespaces public const string AGREEMENTS_NAMESPACE = "'urn:DevTreks-support-schemas:ServiceAgreement'"; //attribute names for edits private const string ISOWNER = "IsOwner"; public const string SERVICE_ID = "ServiceId"; public const string SERVICEGROUP_ID = "ServiceGroupId"; //serviceaccountgroup is accountclass -needed to that can navigate to other networks in selects panel //serviceaccount is account //service is accounttoservice //incentive is accounttoincentive public enum AGREEMENT_TYPES { serviceaccountgroup = 1, serviceaccount = 2, service = 3, incentive = 4 } //xml elements found in base table schemas (i.e. servicebase in all apps) public enum AGREEMENT_BASE_TYPES { //phase out accountbase servicebasegroup = 1, servicebase = 2, incentivebase = 3 } public enum SERVICE_STATUS_TYPES { notcurrent = 0, current = 1 } public enum PUBLIC_AUTHORIZATION_TYPES { public_not_authorized = 0, public_is_authorized = 1 } public enum SERVICE_UNIT_TYPES { day = 0, month = 1, year = 2 } public enum SERVICE_CURRENCY_TYPES { usdollar = 0, euro = 1 } public static Dictionary<string, string> GetAuthorizationLevelDictionary() { Dictionary<string, string> auths = new Dictionary<string, string>(); int iValue = (int)PUBLIC_AUTHORIZATION_TYPES.public_not_authorized; auths.Add(iValue.ToString(), PUBLIC_AUTHORIZATION_TYPES.public_not_authorized.ToString()); iValue = (int)PUBLIC_AUTHORIZATION_TYPES.public_is_authorized; auths.Add(iValue.ToString(), PUBLIC_AUTHORIZATION_TYPES.public_is_authorized.ToString()); return auths; } public static Dictionary<string, string> GetServiceUnitDictionary() { Dictionary<string, string> units = new Dictionary<string, string>(); units.Add(SERVICE_UNIT_TYPES.day.ToString(), SERVICE_UNIT_TYPES.day.ToString()); units.Add(SERVICE_UNIT_TYPES.month.ToString(), SERVICE_UNIT_TYPES.month.ToString()); units.Add(SERVICE_UNIT_TYPES.year.ToString(), SERVICE_UNIT_TYPES.year.ToString()); return units; } public static Dictionary<string, string> GetCurrencyDictionary() { Dictionary<string, string> curs = new Dictionary<string, string>(); curs.Add(SERVICE_CURRENCY_TYPES.usdollar.ToString(), SERVICE_CURRENCY_TYPES.usdollar.ToString()); curs.Add(SERVICE_CURRENCY_TYPES.euro.ToString(), SERVICE_CURRENCY_TYPES.euro.ToString()); return curs; } public static void SetAppSearchView(string currentNodeName, int currentId, ContentURI uri) { //the current params change depending on the node type if (currentNodeName == AGREEMENT_TYPES.serviceaccount.ToString() || currentNodeName == AGREEMENT_TYPES.serviceaccountgroup.ToString()) { if (uri.URIMember.ClubInUse.PrivateAuthorizationLevel == AccountHelper.AUTHORIZATION_LEVELS.fulledits) { uri.URIDataManager.EditViewEditType = Helpers.GeneralHelpers.VIEW_EDIT_TYPES.full; //checkboxes for node insertions uri.URIDataManager.SelectViewEditType = Helpers.GeneralHelpers.VIEW_EDIT_TYPES.part; } else { uri.URIDataManager.EditViewEditType = Helpers.GeneralHelpers.VIEW_EDIT_TYPES.print; uri.URIDataManager.SelectViewEditType = Helpers.GeneralHelpers.VIEW_EDIT_TYPES.part; } //link forwards uri.URIDataManager.ChildrenPanelType = Helpers.GeneralHelpers.UPDATE_PANEL_TYPES.select; if (currentNodeName == AGREEMENT_TYPES.serviceaccountgroup.ToString()) { uri.URIDataManager.ChildrenNodeName = AGREEMENT_TYPES.serviceaccount.ToString(); //no link backwards uri.URIDataManager.ParentPanelType = Helpers.GeneralHelpers.UPDATE_PANEL_TYPES.none; } else { uri.URIDataManager.ChildrenNodeName = AGREEMENT_TYPES.service.ToString(); uri.URIDataManager.ParentPanelType = Helpers.GeneralHelpers.UPDATE_PANEL_TYPES.select; } } else if (currentNodeName == AGREEMENT_TYPES.service.ToString()) { if (uri.URIMember.ClubInUse.PrivateAuthorizationLevel == AccountHelper.AUTHORIZATION_LEVELS.fulledits) { uri.URIDataManager.EditViewEditType = Helpers.GeneralHelpers.VIEW_EDIT_TYPES.full; //checkboxes for node insertions uri.URIDataManager.SelectViewEditType = Helpers.GeneralHelpers.VIEW_EDIT_TYPES.part; } else { uri.URIDataManager.EditViewEditType = Helpers.GeneralHelpers.VIEW_EDIT_TYPES.print; uri.URIDataManager.SelectViewEditType = Helpers.GeneralHelpers.VIEW_EDIT_TYPES.part; } //link backwards uri.URIDataManager.ParentPanelType = Helpers.GeneralHelpers.UPDATE_PANEL_TYPES.select; //link forwards uri.URIDataManager.ChildrenPanelType = Helpers.GeneralHelpers.UPDATE_PANEL_TYPES.select; uri.URIDataManager.ChildrenNodeName = AGREEMENT_BASE_TYPES.servicebase.ToString(); } else if (currentNodeName == AGREEMENT_BASE_TYPES.servicebase.ToString()) { if (uri.URIMember.ClubInUse.PrivateAuthorizationLevel == AccountHelper.AUTHORIZATION_LEVELS.fulledits) { uri.URIDataManager.EditViewEditType = Helpers.GeneralHelpers.VIEW_EDIT_TYPES.full; //checkboxes for node insertions uri.URIDataManager.SelectViewEditType = Helpers.GeneralHelpers.VIEW_EDIT_TYPES.part; } else { uri.URIDataManager.EditViewEditType = Helpers.GeneralHelpers.VIEW_EDIT_TYPES.print; uri.URIDataManager.SelectViewEditType = Helpers.GeneralHelpers.VIEW_EDIT_TYPES.part; } //link backwards uri.URIDataManager.ParentPanelType = Helpers.GeneralHelpers.UPDATE_PANEL_TYPES.select; //link forwards uri.URIDataManager.ChildrenPanelType = Helpers.GeneralHelpers.UPDATE_PANEL_TYPES.select; //can't navigate to incentives (edits made in service agreements) uri.URIDataManager.ChildrenNodeName = string.Empty; } } public static void GetChildForeignKeyNames(string parentNodeName, out string childForeignKeyName, out string baseForeignKeyName) { childForeignKeyName = string.Empty; baseForeignKeyName = string.Empty; if (parentNodeName == AGREEMENT_TYPES.serviceaccountgroup.ToString()) { baseForeignKeyName = "AccountId"; childForeignKeyName = "AccountId"; } else if (parentNodeName == AGREEMENT_BASE_TYPES.servicebase.ToString() || parentNodeName == AGREEMENT_TYPES.service.ToString()) { baseForeignKeyName = Agreement.SERVICE_ID; childForeignKeyName = "AccountToServiceId"; } else if (parentNodeName == AGREEMENT_BASE_TYPES.incentivebase.ToString() || parentNodeName == AGREEMENT_TYPES.incentive.ToString()) { baseForeignKeyName = "IncentiveId"; childForeignKeyName = string.Empty; } } public static string GetBaseTableParentForeignKeyName(string currentNodeName) { string sKeyName = string.Empty; if (currentNodeName == AGREEMENT_BASE_TYPES.servicebase.ToString() || currentNodeName == AGREEMENT_TYPES.service.ToString()) { sKeyName = "ServiceGroupId"; } else if (currentNodeName == AGREEMENT_BASE_TYPES.incentivebase.ToString() || currentNodeName == AGREEMENT_TYPES.incentive.ToString()) { sKeyName = "IncentiveGroupId"; } return sKeyName; } public async Task<List<ServiceClass>> GetServiceGroupsAsync(ContentURI uri) { Helpers.SqlIOAsync sqlIO = new Helpers.SqlIOAsync(uri); SqlDataReader serviceGroups = await sqlIO.RunProcAsync("0GetServiceGroups"); List<ServiceClass> colServiceGroups = new List<ServiceClass>(); if (serviceGroups != null) { using(serviceGroups) { //build a related service list to return to the client while (await serviceGroups.ReadAsync()) { ServiceClass newServiceGroup = new ServiceClass(); newServiceGroup.PKId = serviceGroups.GetInt32(0); newServiceGroup.ServiceClassNum = serviceGroups.GetString(1); newServiceGroup.ServiceClassName = serviceGroups.GetString(2); newServiceGroup.ServiceClassDesc = serviceGroups.GetString(3); newServiceGroup.Service = new List<Service>(); //nondb newServiceGroup.IsSelected = false; colServiceGroups.Add(newServiceGroup); } } } sqlIO.Dispose(); return colServiceGroups; } public async Task<List<AccountToService>> GetServiceAsync(SearchManager searcher) { Helpers.SqlIOAsync sqlIO = new Helpers.SqlIOAsync(searcher.SearchResult); SqlParameter[] colPrams = { sqlIO.MakeInParam("@AccountId", SqlDbType.Int, 4, searcher.SearchResult.URIMember.ClubInUse.PKId), sqlIO.MakeInParam("@NetworkId", SqlDbType.Int, 4, searcher.SearchResult.URINetwork.PKId), sqlIO.MakeInParam("@NetworkType", SqlDbType.NVarChar, 25, searcher.NetworkType.ToString()), sqlIO.MakeInParam("@ServiceGroupId", SqlDbType.Int, 4, searcher.ServiceGroupSelected.PKId) }; SqlDataReader services = await sqlIO.RunProcAsync("0GetServices", colPrams); List<AccountToService> colService = FillClubServiceList(services); sqlIO.Dispose(); return colService; } public async Task<List<AccountToService>> GetServiceByClubIdAsync( ContentURI uri, int accountId) { List<AccountToService> colClubService = new List<AccountToService>(); if (accountId == 0) { //set default objects AccountToService atos = new AccountToService(true); colClubService.Add(atos); return colClubService; } Helpers.SqlIOAsync sqlIO = new Helpers.SqlIOAsync(uri); SqlParameter[] colPrams = { sqlIO.MakeInParam("@AccountId", SqlDbType.Int, 4, accountId) }; //this sp must also run a subquery that sets the owning club id SqlDataReader dataReader = await sqlIO.RunProcAsync( "0GetServicesByClubId", colPrams); colClubService = FillClubServiceList(dataReader); sqlIO.Dispose(); return colClubService; } public async Task<List<AccountToService>> GetServiceByServiceIdAsync( ContentURI uri, int serviceId, bool isOwner) { List<AccountToService> colClubService = new List<AccountToService>(); if (serviceId == 0) { //set default objects AccountToService atos = new AccountToService(true); colClubService.Add(atos); return colClubService; } int iIsOwner = (isOwner) ? 1 : 0; Helpers.SqlIOAsync sqlIO = new Helpers.SqlIOAsync(uri); SqlParameter[] colPrams = { sqlIO.MakeInParam("@ServiceId", SqlDbType.Int, 4, serviceId), sqlIO.MakeInParam("@IsOwner", SqlDbType.Bit, 1, iIsOwner) }; //this sp must also run a subquery that sets the owning club id SqlDataReader dataReader = await sqlIO.RunProcAsync( "0GetServicesByServiceId", colPrams); colClubService = FillClubServiceList(dataReader); sqlIO.Dispose(); return colClubService; } public List<AccountToService> FillClubServiceList(SqlDataReader services) { List<AccountToService> colService = new List<AccountToService>(); if (services != null) { using (services) { //build a related service list to return to the client while (services.Read()) { AccountToService newService = new AccountToService(); newService.Service = new Service(); newService.PKId = services.GetInt32(0); newService.Name = services.GetString(1); newService.Amount1 = services.GetInt32(2); newService.Status = services.GetString(3); newService.LastChangedDate = services.GetDateTime(4); newService.AuthorizationLevel = services.GetInt16(5); newService.StartDate = services.GetDateTime(6); newService.EndDate = services.GetDateTime(7); newService.LastChangedDate = services.GetDateTime(8); newService.IsOwner = services.GetBoolean(9); newService.AccountId = services.GetInt32(10); newService.ServiceId = services.GetInt32(11); //base table (service being subscribed to or owned) newService.Service = new Service(); newService.Service.ServiceNum = services.GetString(12); newService.Service.ServiceName = services.GetString(13); newService.Service.ServiceDesc = services.GetString(14); newService.Service.ServicePrice1 = services.GetDecimal(15); newService.Service.ServiceUnit1 = services.GetString(16); newService.Service.ServiceClassId = services.GetInt32(17); newService.Service.NetworkId = services.GetInt32(18); newService.OwningClubId = services.GetInt32(19); newService.Service.ServiceCurrency1 = services.GetString(20); newService.Account = new Account(); newService.Account.PKId = services.GetInt32(21); newService.Account.AccountName = services.GetString(22); newService.Account.AccountDesc = services.GetString(23); newService.Account.AccountLongDesc = services.GetString(24); newService.Account.AccountEmail = services.GetString(25); newService.Account.AccountURI = services.GetString(26); newService.Account.AccountClassId = services.GetInt32(27); newService.Account.GeoRegionId = services.GetInt32(28); //not in db newService.Account.ClubDocFullPath = string.Empty; newService.Account.PrivateAuthorizationLevel = AccountHelper.AUTHORIZATION_LEVELS.none; newService.Account.NetCost = 0; newService.Account.TotalCost = 0; newService.Account.URIFull = string.Empty; colService.Add(newService); } } } return colService; } public async Task<SqlDataReader> GetServiceAsync( Helpers.SqlIOAsync sqlIO, ContentURI uri, bool isBaseService) { int iServiceId = (isBaseService == true) ? uri.URIService.ServiceId : uri.URIService.PKId; SqlParameter[] colPrams = { sqlIO.MakeInParam("@Id", SqlDbType.Int, 4, iServiceId) }; string sQry = (isBaseService == true) ? "0GetServiceBase" : "0GetService"; SqlDataReader dataReader = await sqlIO.RunProcAsync(sQry, colPrams); return dataReader; } public async Task<bool> ChangeApplicationAndServiceAsync(ContentURI uri, int serviceId, bool needsNewApp) { bool bIsBaseService = false; uri.URIService.PKId = serviceId; Helpers.SqlIOAsync sqlIO = new Helpers.SqlIOAsync(uri); SqlDataReader service = await GetServiceAsync(sqlIO, uri, bIsBaseService); if (service != null) { //set the uri's service object uri.URIService = FillServiceObject(service); } sqlIO.Dispose(); if (needsNewApp == true) { if (uri.URIService.Service != null) { if (uri.URIService.Service.ServiceClassId != 0) { Helpers.GeneralHelpers.SetAppTypes(uri.URIService.Service.ServiceClassId, uri); } } } return bIsBaseService; } public async Task<bool> ChangeApplicationAndServiceFromBaseServiceAsync( ContentURI uri) { bool bIsBaseService = true; uri.URIService.ServiceId = uri.URIId; Helpers.SqlIOAsync sqlIO = new Helpers.SqlIOAsync(uri); SqlDataReader service = await GetServiceAsync(sqlIO, uri, bIsBaseService); if (service != null) { //set the uri's service object uri.URIService = FillServiceObject(service); } sqlIO.Dispose(); Helpers.GeneralHelpers.SetAppTypes(uri.URIService.Service.ServiceClassId, uri); return bIsBaseService; } public AccountToService FillServiceObject(SqlDataReader service) { //this is a one-record reader int i = 0; //avoid null refs with object props AccountToService newService = new AccountToService(true); if (service != null) { using(service) { while (service.Read()) { if (i == 0) { newService.PKId = service.GetInt32(0); newService.Name = service.GetString(1); newService.Amount1 = service.GetInt32(2); newService.Status = service.GetString(3); newService.StatusDate = service.GetDateTime(4); newService.AuthorizationLevel = service.GetInt16(5); newService.StartDate = service.GetDateTime(6); newService.EndDate = service.GetDateTime(7); newService.LastChangedDate = service.GetDateTime(8); newService.IsOwner = service.GetBoolean(9); newService.AccountId = service.GetInt32(10); newService.ServiceId = service.GetInt32(11); //base table (service being subscribed to or owned) newService.Service = new Service(); newService.Service.PKId = newService.ServiceId; newService.Service.ServiceNum = service.GetString(12); newService.Service.ServiceName = service.GetString(13); newService.Service.ServiceDesc = service.GetString(14); newService.Service.ServicePrice1 = service.GetDecimal(15); newService.Service.ServiceUnit1 = service.GetString(16); newService.Service.ServiceClassId = service.GetInt32(17); //must switch uripatterns from now on to this networkid (on service layer) newService.Service.NetworkId = service.GetInt32(18); newService.OwningClubId = service.GetInt32(19); } i++; } } } return newService; } public static SERVICE_STATUS_TYPES GetStatusType(string statusType) { //(AppHelpers.SERVICE_STATUS_TYPES)Enum.Parse(typeof(AppHelpers.SERVICE_STATUS_TYPES), status); SERVICE_STATUS_TYPES eStatusType = SERVICE_STATUS_TYPES.notcurrent; if (statusType == SERVICE_STATUS_TYPES.current.ToString()) { eStatusType = SERVICE_STATUS_TYPES.current; } return eStatusType; } public async Task<string> GetAncestorsAndSetServiceAsync(ContentURI uri) { string ancestorArray = string.Empty; int iId = uri.URIId; string sNodeName = uri.URINodeName; if (uri.URINodeName == AGREEMENT_TYPES.service.ToString()) { //same rule enforced with contenthelper.geturichildren //only show the ancestors and children of owned services, not subscribed if (uri.URIService != null) { if (uri.URIService.ServiceId != 0) { iId = uri.URIService.ServiceId; sNodeName = AGREEMENT_BASE_TYPES.servicebase.ToString(); } } } //commontreks string sAdminNetworkPartName = Data.Helpers.GeneralHelpers.GetDefaultNetworkPartName(); //service object can be filled in from the same stored procedure Helpers.SqlIOAsync sqlIO = new Helpers.SqlIOAsync(uri); SqlParameter[] oPrams = { sqlIO.MakeInParam("@Id", SqlDbType.Int, 4, iId), sqlIO.MakeInParam("@NodeName", SqlDbType.NVarChar, 50, sNodeName), sqlIO.MakeInParam("@NetworkPartName", SqlDbType.NVarChar, 20, uri.URINetworkPartName), sqlIO.MakeInParam("@AdminNetworkPartName", SqlDbType.NVarChar, 20, sAdminNetworkPartName), sqlIO.MakeInParam("@ParamDelimiter", SqlDbType.NVarChar, 2, Helpers.GeneralHelpers.PARAMETER_DELIMITER), sqlIO.MakeOutParam("@AncestorNameArray", SqlDbType.NVarChar, 1000), sqlIO.MakeOutParam("@CurrentName", SqlDbType.NVarChar, 150), sqlIO.MakeOutParam("@ServiceId", SqlDbType.Int, 8), }; int iNotUsed = await sqlIO.RunProcIntAsync( "0GetAncestorNamesAgreement", oPrams); if (oPrams[5].Value != System.DBNull.Value) { ancestorArray = oPrams[5].Value.ToString(); } if (oPrams[6].Value != System.DBNull.Value) { //always use the name stored in db, not the name passed in the search url //if needed, fix the uri's name string sCurrentNameFromClient = uri.URIName; string sNewName = oPrams[6].Value.ToString(); RuleHelpers.ResourceRules.ValidateScriptArgument(ref sNewName); uri.URIName = sNewName; //make sure this is the same name sent by the client (i.e. no edit*) if (sCurrentNameFromClient.Equals(uri.URIName) == false) { uri.URIPattern = uri.URIPattern.Replace(sCurrentNameFromClient, uri.URIName); } } if (oPrams[7].Value != System.DBNull.Value) { //only applicable for service nodes //(supports switching between agreements and app groups) int iServiceId = (int) oPrams[7].Value; if (uri.URIService.ServiceId != iServiceId) { uri.URIService.ServiceId = iServiceId; bool bIsBaseService = true; //data readers always close sqlio connections, use a new connection Helpers.SqlIOAsync sqlIO2 = new Helpers.SqlIOAsync(uri); SqlDataReader services = await GetServiceAsync(sqlIO2, uri, bIsBaseService); uri.URIService = FillServiceObject(services); sqlIO2.Dispose(); } } sqlIO.Dispose(); return ancestorArray; } public async Task<string> GetAncestorsAndAuthorizationsAsync( ContentURI uri, int clubOrMemberId) { Dictionary<string, int> ancestors = new Dictionary<string,int>(); string ancestorArray = string.Empty; int iAuthorizationLevel = 0; string sAdminNetworkPartName = Data.Helpers.GeneralHelpers.GetDefaultNetworkPartName(); Helpers.SqlIOAsync sqlIO = new Helpers.SqlIOAsync(uri); SqlParameter[] oPrams = { sqlIO.MakeInParam("@AccountId", SqlDbType.Int, 4, clubOrMemberId), sqlIO.MakeInParam("@ServiceId", SqlDbType.Int, 4, uri.URIService.ServiceId), sqlIO.MakeInParam("@NetworkPartName", SqlDbType.NVarChar, 20, uri.URINetworkPartName), sqlIO.MakeInParam("@AdminNetworkPartName", SqlDbType.NVarChar, 20, sAdminNetworkPartName), sqlIO.MakeInParam("@ParamDelimiter", SqlDbType.NVarChar, 2, Helpers.GeneralHelpers.PARAMETER_DELIMITER), sqlIO.MakeOutParam("@AncestorNameArray", SqlDbType.NVarChar, 1000), sqlIO.MakeOutParam("@AuthorizationLevel", SqlDbType.SmallInt, 2) }; string sQryName = "0GetAncestorNamesAgreementByServiceId"; int iNotUsed = await sqlIO.RunProcIntAsync(sQryName, oPrams); if (oPrams[5].Value != System.DBNull.Value) { ancestorArray = oPrams[5].Value.ToString(); } if (oPrams[6].Value != System.DBNull.Value) { AccountHelper.AUTHORIZATION_LEVELS publicOrPrivateAL = AccountHelper.AUTHORIZATION_LEVELS.viewonly; iAuthorizationLevel = Helpers.GeneralHelpers.ConvertStringToInt(oPrams[6].Value.ToString()); publicOrPrivateAL = AccountHelper.GetAuthorizationLevel(iAuthorizationLevel); //new in 1.5.2 if (uri.URIMember == null) uri.URIMember = new AccountToMember(true); if (uri.URIMember.ClubInUse == null) uri.URIMember.ClubInUse = new Account(true); //set what the db says is their authorization level uri.URIMember.ClubInUse.PrivateAuthorizationLevel = publicOrPrivateAL; } sqlIO.Dispose(); return ancestorArray; } public async Task<string> GetBaseTableGroupIdFromBaseTableIdAync( ContentURI uri, string connect, string baseTableId, string currentNodeName) { string sBaseTableGroupId = string.Empty; Helpers.SqlIOAsync sqlIO = new Helpers.SqlIOAsync(uri); SqlParameter[] colPrams = { sqlIO.MakeInParam("@Id", SqlDbType.Int, 4, baseTableId), sqlIO.MakeInParam("@NodeName", SqlDbType.NVarChar, 25, currentNodeName), sqlIO.MakeOutParam("@BaseTableGroupId", SqlDbType.Int, 4) }; string sQryName = "0GetAgreementBaseTableGroupId"; int iNotUsed = await sqlIO.RunProcIntAsync(sQryName, colPrams); if (colPrams[2].Value != System.DBNull.Value) { sBaseTableGroupId = colPrams[2].Value.ToString(); } sqlIO.Dispose(); return sBaseTableGroupId; } public static string GetDeleteBaseandJoinQry() { return "0DeleteAgreements"; } public static void ChangeAttributesForInsertion( EditHelpers.EditHelper.ArgumentsEdits addsArguments, ContentURI selectedURI, XElement selectedElement) { //before inserting service nodes, change app-specfic attributes if (selectedURI.URINodeName == AppHelpers.Agreement.AGREEMENT_TYPES.service.ToString()) { //accountid is always currently logged-in user (for all db-inserted contracts) selectedElement.SetAttributeValue(General.ACCOUNTID, addsArguments.URIToEdit.URIMember.ClubInUse.PKId.ToString()); if (addsArguments.URIToEdit.URIDataManager.ServerSubActionType != Helpers.GeneralHelpers.SERVER_SUBACTION_TYPES.adddefaults) { //selections always false - they are not owned selectedElement.SetAttributeValue(ISOWNER, "0"); int iAuthorizationLevel = (int)AccountHelper.AUTHORIZATION_LEVELS.viewonly; //selections start as view only selectedElement.SetAttributeValue(Members.AUTHORIZATION_LEVEL, iAuthorizationLevel.ToString()); } else { //default insertions are owned selectedElement.SetAttributeValue(ISOWNER, "1"); int iAuthorizationLevel = (int)AccountHelper.AUTHORIZATION_LEVELS.viewonly; //public can view but not edit selectedElement.SetAttributeValue(Members.AUTHORIZATION_LEVEL, iAuthorizationLevel.ToString()); } } } public async Task<bool> UpdateSubscribedMemberCountAsync(ContentURI uri, int currentClubMemberCount, int accountId, int joinServiceId, int memberCount) { bool bHasUpdated = false; IMemberRepositoryEF memberReposit = new SqlRepositories.MemberRepository(uri); bHasUpdated = await memberReposit.UpdateSubscribedMemberCountAsync(uri, currentClubMemberCount, accountId, joinServiceId, memberCount); return bHasUpdated; } public async Task<List<SearchManager.SearchType>> GetTypesAsync( ContentURI uri, int networkId, int serviceGroupId) { Helpers.SqlIOAsync sqlIO = new Helpers.SqlIOAsync(uri); SqlParameter[] colPrams = { sqlIO.MakeInParam("@NetworkId", SqlDbType.Int, 4, networkId), sqlIO.MakeInParam("@ServiceGroupId", SqlDbType.Int, 4, serviceGroupId) }; SqlDataReader categories = await sqlIO.RunProcAsync( "0GetCategories", colPrams); List<SearchManager.SearchType> colSearchTypes = new List<SearchManager.SearchType>(); if (categories != null) { using(categories) { //categories return id, label, name, servicegroupid and networkid sorted by label //contenturi not strictly needed while (categories.Read()) { SearchManager.SearchType searchType = new SearchManager.SearchType(); searchType.Id = categories.GetInt32(0); searchType.Label = categories.GetString(1); searchType.Name = categories.GetString(2); searchType.NetworkId = categories.GetInt32(3); searchType.ServiceClassId = categories.GetInt32(4); colSearchTypes.Add(searchType); } } } sqlIO.Dispose(); return colSearchTypes; } public async Task<List<ContentURI>> GetNetworkCategoriesAsync( ContentURI serviceURI) { Helpers.SqlIOAsync sqlIO = new Helpers.SqlIOAsync(serviceURI); SqlParameter[] colPrams = { sqlIO.MakeInParam("@NetworkId", SqlDbType.Int, 4, serviceURI.URIService.Service.NetworkId), sqlIO.MakeInParam("@ServiceGroupId", SqlDbType.Int, 4, serviceURI.URIService.Service.ServiceClassId) }; SqlDataReader categories = await sqlIO.RunProcAsync("0GetCategories", colPrams); List<ContentURI> colCategories = new List<ContentURI>(); string sTypeNodeName = Helpers.GeneralHelpers.GetCategoryNodeName(serviceURI.URIService.Service.ServiceClassId); if (categories != null) { //categories return id, label, name, servicegroupid and networkid sorted by label //contenturi not strictly needed while (categories.Read()) { ContentURI category = new ContentURI( categories.GetString(2), categories.GetInt32(0), serviceURI.URINetworkPartName, sTypeNodeName, string.Empty); category.URIDataManager.Label = categories.GetString(1); if (category.URIService == null) category.URIService = new AccountToService(true); //networkid and servicegroupid can be 0 if it's a static DevTreks-wide category category.URIService.Service.NetworkId = categories.GetInt32(3); category.URIService.Service.ServiceClassId = categories.GetInt32(4); colCategories.Add(category); } } sqlIO.Dispose(); return colCategories; } public async Task<bool> AddNetworkCategoriesAsync(ContentURI uri, int serviceGroupId, int networkId, int numberToAdd) { bool bIsOkToSave = false; Helpers.SqlIOAsync sqlIO = new Helpers.SqlIOAsync(uri); SqlParameter[] colPrams = { sqlIO.MakeInParam("@NetworkId", SqlDbType.Int, 4, networkId), sqlIO.MakeInParam("@ServiceGroupId", SqlDbType.Int, 4, serviceGroupId), sqlIO.MakeInParam("@NumberToAdd", SqlDbType.Int, 4, numberToAdd) }; string sQryName = "0InsertCategories"; int iRowCount = await sqlIO.RunProcIntAsync(sQryName, colPrams); //no error means sp ran successfully if (iRowCount > 0) bIsOkToSave = true; sqlIO.Dispose(); return bIsOkToSave; } public static string GetServiceGroupCategoryName(ContentURI uri) { string sTypeDocFileName = string.Empty; string sTypeName = Helpers.GeneralHelpers.GetCategoryNodeName(uri.URIService.Service.ServiceClassId); if (!string.IsNullOrEmpty(sTypeName)) { sTypeDocFileName = string.Concat(sTypeName, Helpers.GeneralHelpers.EXTENSION_XML); } return sTypeDocFileName; } public static IList<ContentURI> MakeNetworkCategoryListForDisplay( ContentURI uri, string typeDocPath) { IList<ContentURI> categories = new List<ContentURI>(); if (Helpers.FileStorageIO.URIAbsoluteExists(uri, typeDocPath)) { XElement categoryRoot = Helpers.FileStorageIO.LoadXmlElement(uri, typeDocPath); if (categoryRoot != null) { if (categoryRoot.HasElements) { foreach (XElement categoryEl in categoryRoot.Elements()) { //need a minimal list for display purposes ContentURI category = new ContentURI(); string sId = EditHelpers.XmlLinq.GetAttributeValue( categoryEl, AppHelpers.Calculator.cId); category.URIId = Helpers.GeneralHelpers.ConvertStringToInt(sId); category.URIName = EditHelpers.XmlLinq.GetAttributeValue( categoryEl, AppHelpers.Calculator.cName); if (category.URIDataManager == null) category.URIDataManager = new ContentURI.DataManager(); category.URIDataManager.Label = EditHelpers.XmlLinq.GetAttributeValue( categoryEl, AppHelpers.Calculator.cLabel); categories.Add(category); } } } } return categories; } } }
using System; using System.Collections.Generic; using System.Net; using Primelabs.Twingly.KestrelApi.Configuration; using System.Linq; using Primelabs.Twingly.KestrelApi.Exceptions; using System.Configuration; namespace Primelabs.Twingly.KestrelApi { /* public interface IEndPointSelectionAlgorithm { IEnumerable<EndPoint> GetSuitableServer(KestrelOperation operation, string queueName); } public class RandomizedEndpointSelection : IEndPointSelectionAlgorithm { private List<EndPoint> _endpoints; public RandomizedEndpointSelection(IEnumerable<EndPoint> endpoints) { _endpoints = endpoints.ToList(); } public IEnumerable<EndPoint> GetSuitableServer(KestrelOperation operation, string queueName) { for (int i = 0; i < _endpoints.Count; i++) { yield return null; } } } */ public class KestrelClusterManager : IKestrelClusterApi, IDisposable { protected List<IPEndPoint> _serverEndpoints; protected ServerPool _serverPool; IKestrelClusterConfiguration _configuration; public KestrelClusterManager(string[] adresses) { _serverEndpoints = new List<IPEndPoint>(); foreach (var host in adresses) _serverEndpoints.Add(Utils.GetEndPoint(host)); _configuration = new KestrelClusterConfiguration(_serverEndpoints); _serverPool = new ServerPool(_configuration); } public KestrelClusterManager(string sectionName) { KestrelClusterConfigurationSection section = (KestrelClusterConfigurationSection)ConfigurationManager.GetSection(sectionName); if (section == null) throw new ConfigurationErrorsException("Section " + sectionName + " is not found."); _configuration = section; _serverPool = new ServerPool(_configuration); _serverEndpoints = _configuration.Servers.ToList(); } protected IEnumerable<ServerWrapper> IterateServers(bool randomOrder, bool dispose) { // TODO: Some kind of smarter selection wrt to multiple readers ? // Ideally we would like all readers spread out as evenly as possible // throughout the servers among the cluster var list = _serverEndpoints.ToList(); if (randomOrder) Utils.Shuffle(list); foreach (var endpoint in list) { var client = _serverPool.Acquire(endpoint); if (client != null) { if (dispose) { using (var retval = new ServerWrapper(_serverPool, client)) { yield return retval; } } else { yield return new ServerWrapper(_serverPool, client); } } } } protected ServerWrapper GetServer() { foreach (var server in IterateServers(true, false)) return server; throw new NoServersAvailable("Tried " + _serverEndpoints.Count + " endpoints, all found dead!"); } private TimeSpan GetTimeoutPerServer(TimeSpan userTimeout) { double tmp = userTimeout.TotalSeconds/_serverEndpoints.Count; if (tmp > _configuration.MinServerTimeout.TotalSeconds) return _configuration.MinServerTimeout; else return TimeSpan.FromSeconds(tmp); } public void Flush(string queueName) { using (var wrapper = GetServer()) { wrapper.Client.Flush(queueName); } } public void FlushAll() { using (var wrapper = GetServer()) { wrapper.Client.FlushAll(); } } public Dictionary<IPEndPoint, Stats> GetStats() { var retval = new Dictionary<IPEndPoint, Stats>(); foreach (var wrapper in IterateServers(false, true)) { try { retval[wrapper.Client.Protocol.Socket.EndPoint] = wrapper.Client.GetStats(); } catch (Exception) { // Empty on purpose, we just keep on iterating, trying to collect something. } } return retval; } protected bool IterateServersUntilSendSucceeds(Func<ServerWrapper, bool> action) { foreach (var wrapper in IterateServers(true, true)) { bool retval; try { retval = action(wrapper); } catch (Exception) { continue; } if (retval) return true; } throw new NoServersAvailable("Tried " + _serverEndpoints.Count + " endpoints, no server accepted the message!"); } public bool Send(string queueName, object value) { return IterateServersUntilSendSucceeds(x => x.Client.Send(queueName, value)); } public bool Send(string queueName, object value, TimeSpan expiresIn) { return IterateServersUntilSendSucceeds(x => x.Client.Send(queueName, value, expiresIn)); } public bool Send(string queueName, object value, DateTime expiresAt) { return IterateServersUntilSendSucceeds(x => x.Client.Send(queueName, value, expiresAt)); } public IEnumerable<IOpenMessage<T>> Open<T>(string queueName, TimeSpan timeout) { return Open<T>(queueName, timeout, () => new OpenMessage<T>()); } public IEnumerable<IOpenMessage<T>> Open<T>(string queueName, TimeSpan timeout, OpenMessageFactoryDelegate<T> factory) { bool shouldQuit = false; DateTime timeoutAt = DateTime.Now.AddSeconds(timeout.TotalSeconds); bool hasOpenUnacknowledgeMessage = false; while (!shouldQuit) { using (var server = GetServer()) { uint messageThisBatch = 0; while (!shouldQuit) { IOpenMessage<T> msg = factory(); T value = default(T); bool userBrokeIteration = true; TimeSpan thisRoundTimeout = timeoutAt - DateTime.Now; if (thisRoundTimeout.TotalSeconds < 0) { // We got a real timeout, ack any outstanding message, and make sure we quit if (hasOpenUnacknowledgeMessage) { server.Client.Close(queueName); } shouldQuit = true; break; } // As we ack the message inside the Open below hasOpenUnacknowledgeMessage = false; if (! server.Client.Open<T>(queueName, out value, GetTimeoutPerServer(thisRoundTimeout), true)) { // we got a timeout. break to choose a new server (or timeout), and go again break; } // we need to keep track of this here, in order to close any outstanding message in case // we timeout next time. hasOpenUnacknowledgeMessage = true; try { // We successfully got a message, prepare message and give it to the caller. msg.Value = value; msg.Close = true; yield return msg; userBrokeIteration = false; messageThisBatch += 1; timeoutAt = DateTime.Now + timeout; } finally { if (!msg.Close) { server.Client.Abort(queueName); } else if (userBrokeIteration || BreakDueToBatchSize(messageThisBatch)) { server.Client.Close(queueName); } shouldQuit = userBrokeIteration; } // User didn't break the iteration // If we have read enough message this batch, we should switch server. // otherwise, just continue on the same server if (BreakDueToBatchSize(messageThisBatch)) break; } } } } /// <summary> /// Checks if we should switch server due to batch size being exhausted.. /// </summary> /// <param name="messageThisBatch"></param> /// <returns></returns> protected bool BreakDueToBatchSize(uint messageThisBatch) { return (_configuration.MaxNumberOfMessageInBatch > 0 && messageThisBatch >= _configuration.MaxNumberOfMessageInBatch); } void IDisposable.Dispose() { // foreach server, close connections a.s.o... _serverPool.Dispose(); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.Diagnostics; using System.Diagnostics.Contracts; using System.Linq; namespace System.Collections.Immutable { /// <summary> /// A set of initialization methods for instances of <see cref="ImmutableArray{T}"/>. /// </summary> public static class ImmutableArray { /// <summary> /// A two element array useful for throwing exceptions the way LINQ does. /// </summary> internal static readonly byte[] TwoElementArray = new byte[2]; /// <summary> /// Creates an empty <see cref="ImmutableArray{T}"/>. /// </summary> /// <typeparam name="T">The type of element stored in the array.</typeparam> /// <returns>An empty array.</returns> [Pure] public static ImmutableArray<T> Create<T>() { return ImmutableArray<T>.Empty; } /// <summary> /// Creates an <see cref="ImmutableArray{T}"/> with the specified element as its only member. /// </summary> /// <typeparam name="T">The type of element stored in the array.</typeparam> /// <param name="item">The element to store in the array.</param> /// <returns>A 1-element array.</returns> [Pure] public static ImmutableArray<T> Create<T>(T item) { T[] array = new[] { item }; return new ImmutableArray<T>(array); } /// <summary> /// Creates an <see cref="ImmutableArray{T}"/> with the specified elements. /// </summary> /// <typeparam name="T">The type of element stored in the array.</typeparam> /// <param name="item1">The first element to store in the array.</param> /// <param name="item2">The second element to store in the array.</param> /// <returns>A 2-element array.</returns> [Pure] public static ImmutableArray<T> Create<T>(T item1, T item2) { T[] array = new[] { item1, item2 }; return new ImmutableArray<T>(array); } /// <summary> /// Creates an <see cref="ImmutableArray{T}"/> with the specified elements. /// </summary> /// <typeparam name="T">The type of element stored in the array.</typeparam> /// <param name="item1">The first element to store in the array.</param> /// <param name="item2">The second element to store in the array.</param> /// <param name="item3">The third element to store in the array.</param> /// <returns>A 3-element array.</returns> [Pure] public static ImmutableArray<T> Create<T>(T item1, T item2, T item3) { T[] array = new[] { item1, item2, item3 }; return new ImmutableArray<T>(array); } /// <summary> /// Creates an <see cref="ImmutableArray{T}"/> with the specified elements. /// </summary> /// <typeparam name="T">The type of element stored in the array.</typeparam> /// <param name="item1">The first element to store in the array.</param> /// <param name="item2">The second element to store in the array.</param> /// <param name="item3">The third element to store in the array.</param> /// <param name="item4">The fourth element to store in the array.</param> /// <returns>A 4-element array.</returns> [Pure] public static ImmutableArray<T> Create<T>(T item1, T item2, T item3, T item4) { T[] array = new[] { item1, item2, item3, item4 }; return new ImmutableArray<T>(array); } /// <summary> /// Creates an <see cref="ImmutableArray{T}"/> populated with the contents of the specified sequence. /// </summary> /// <typeparam name="T">The type of element stored in the array.</typeparam> /// <param name="items">The elements to store in the array.</param> /// <returns>An immutable array.</returns> [Pure] public static ImmutableArray<T> CreateRange<T>(IEnumerable<T> items) { Requires.NotNull(items, nameof(items)); // As an optimization, if the provided enumerable is actually a // boxed ImmutableArray<T> instance, reuse the underlying array if possible. // Note that this allows for automatic upcasting and downcasting of arrays // where the CLR allows it. var immutableArray = items as IImmutableArray; if (immutableArray != null) { Array array = immutableArray.Array; if (array == null) { throw new InvalidOperationException(SR.InvalidOperationOnDefaultArray); } // `array` must not be null at this point, and we know it's an // ImmutableArray<T> or ImmutableArray<SomethingDerivedFromT> as they are // the only types that could be both IEnumerable<T> and IImmutableArray. // As such, we know that items is either an ImmutableArray<T> or // ImmutableArray<TypeDerivedFromT>, and we can cast the array to T[]. return new ImmutableArray<T>((T[])array); } // We don't recognize the source as an array that is safe to use. // So clone the sequence into an array and return an immutable wrapper. int count; if (items.TryGetCount(out count)) { // We know how long the sequence is. Linq's built-in ToArray extension method // isn't as comprehensive in finding the length as we are, so call our own method // to avoid reallocating arrays as the sequence is enumerated. return new ImmutableArray<T>(items.ToArray(count)); } else { return new ImmutableArray<T>(items.ToArray()); } } /// <summary> /// Creates an <see cref="ImmutableArray{T}"/> with the specified elements. /// </summary> /// <typeparam name="T">The type of element stored in the array.</typeparam> /// <param name="items">The elements to store in the array.</param> /// <returns>An immutable array.</returns> [Pure] public static ImmutableArray<T> Create<T>(params T[] items) { if (items == null) { return Create<T>(); } // We can't trust that the array passed in will never be mutated by the caller. // The caller may have passed in an array explicitly (not relying on compiler params keyword) // and could then change the array after the call, thereby violating the immutable // guarantee provided by this struct. So we always copy the array to ensure it won't ever change. return CreateDefensiveCopy(items); } /// <summary> /// Initializes a new instance of the <see cref="ImmutableArray{T}"/> struct. /// </summary> /// <param name="items">The array to initialize the array with. A defensive copy is made.</param> /// <param name="start">The index of the first element in the source array to include in the resulting array.</param> /// <param name="length">The number of elements from the source array to include in the resulting array.</param> /// <remarks> /// This overload allows helper methods or custom builder classes to efficiently avoid paying a redundant /// tax for copying an array when the new array is a segment of an existing array. /// </remarks> [Pure] public static ImmutableArray<T> Create<T>(T[] items, int start, int length) { Requires.NotNull(items, nameof(items)); Requires.Range(start >= 0 && start <= items.Length, nameof(start)); Requires.Range(length >= 0 && start + length <= items.Length, nameof(length)); if (length == 0) { // Avoid allocating an array. return Create<T>(); } var array = new T[length]; for (int i = 0; i < array.Length; i++) { array[i] = items[start + i]; } return new ImmutableArray<T>(array); } /// <summary> /// Initializes a new instance of the <see cref="ImmutableArray{T}"/> struct. /// </summary> /// <param name="items">The array to initialize the array with. /// The selected array segment may be copied into a new array.</param> /// <param name="start">The index of the first element in the source array to include in the resulting array.</param> /// <param name="length">The number of elements from the source array to include in the resulting array.</param> /// <remarks> /// This overload allows helper methods or custom builder classes to efficiently avoid paying a redundant /// tax for copying an array when the new array is a segment of an existing array. /// </remarks> [Pure] public static ImmutableArray<T> Create<T>(ImmutableArray<T> items, int start, int length) { Requires.Range(start >= 0 && start <= items.Length, nameof(start)); Requires.Range(length >= 0 && start + length <= items.Length, nameof(length)); if (length == 0) { return Create<T>(); } if (start == 0 && length == items.Length) { return items; } var array = new T[length]; Array.Copy(items.array, start, array, 0, length); return new ImmutableArray<T>(array); } /// <summary> /// Initializes a new instance of the <see cref="ImmutableArray{T}"/> struct. /// </summary> /// <param name="items">The source array to initialize the resulting array with.</param> /// <param name="selector">The function to apply to each element from the source array.</param> /// <remarks> /// This overload allows efficient creation of an <see cref="ImmutableArray{T}"/> based on an existing /// <see cref="ImmutableArray{T}"/>, where a mapping function needs to be applied to each element from /// the source array. /// </remarks> [Pure] public static ImmutableArray<TResult> CreateRange<TSource, TResult>(ImmutableArray<TSource> items, Func<TSource, TResult> selector) { Requires.NotNull(selector, nameof(selector)); int length = items.Length; if (length == 0) { return Create<TResult>(); } var array = new TResult[length]; for (int i = 0; i < array.Length; i++) { array[i] = selector(items[i]); } return new ImmutableArray<TResult>(array); } /// <summary> /// Initializes a new instance of the <see cref="ImmutableArray{T}"/> struct. /// </summary> /// <param name="items">The source array to initialize the resulting array with.</param> /// <param name="start">The index of the first element in the source array to include in the resulting array.</param> /// <param name="length">The number of elements from the source array to include in the resulting array.</param> /// <param name="selector">The function to apply to each element from the source array included in the resulting array.</param> /// <remarks> /// This overload allows efficient creation of an <see cref="ImmutableArray{T}"/> based on a slice of an existing /// <see cref="ImmutableArray{T}"/>, where a mapping function needs to be applied to each element from the source array /// included in the resulting array. /// </remarks> [Pure] public static ImmutableArray<TResult> CreateRange<TSource, TResult>(ImmutableArray<TSource> items, int start, int length, Func<TSource, TResult> selector) { int itemsLength = items.Length; Requires.Range(start >= 0 && start <= itemsLength, nameof(start)); Requires.Range(length >= 0 && start + length <= itemsLength, nameof(length)); Requires.NotNull(selector, nameof(selector)); if (length == 0) { return Create<TResult>(); } var array = new TResult[length]; for (int i = 0; i < array.Length; i++) { array[i] = selector(items[i + start]); } return new ImmutableArray<TResult>(array); } /// <summary> /// Initializes a new instance of the <see cref="ImmutableArray{T}"/> struct. /// </summary> /// <param name="items">The source array to initialize the resulting array with.</param> /// <param name="selector">The function to apply to each element from the source array.</param> /// <param name="arg">An argument to be passed to the selector mapping function.</param> /// <remarks> /// This overload allows efficient creation of an <see cref="ImmutableArray{T}"/> based on an existing /// <see cref="ImmutableArray{T}"/>, where a mapping function needs to be applied to each element from /// the source array. /// </remarks> [Pure] public static ImmutableArray<TResult> CreateRange<TSource, TArg, TResult>(ImmutableArray<TSource> items, Func<TSource, TArg, TResult> selector, TArg arg) { Requires.NotNull(selector, nameof(selector)); int length = items.Length; if (length == 0) { return Create<TResult>(); } var array = new TResult[length]; for (int i = 0; i < array.Length; i++) { array[i] = selector(items[i], arg); } return new ImmutableArray<TResult>(array); } /// <summary> /// Initializes a new instance of the <see cref="ImmutableArray{T}"/> struct. /// </summary> /// <param name="items">The source array to initialize the resulting array with.</param> /// <param name="start">The index of the first element in the source array to include in the resulting array.</param> /// <param name="length">The number of elements from the source array to include in the resulting array.</param> /// <param name="selector">The function to apply to each element from the source array included in the resulting array.</param> /// <param name="arg">An argument to be passed to the selector mapping function.</param> /// <remarks> /// This overload allows efficient creation of an <see cref="ImmutableArray{T}"/> based on a slice of an existing /// <see cref="ImmutableArray{T}"/>, where a mapping function needs to be applied to each element from the source array /// included in the resulting array. /// </remarks> [Pure] public static ImmutableArray<TResult> CreateRange<TSource, TArg, TResult>(ImmutableArray<TSource> items, int start, int length, Func<TSource, TArg, TResult> selector, TArg arg) { int itemsLength = items.Length; Requires.Range(start >= 0 && start <= itemsLength, nameof(start)); Requires.Range(length >= 0 && start + length <= itemsLength, nameof(length)); Requires.NotNull(selector, nameof(selector)); if (length == 0) { return Create<TResult>(); } var array = new TResult[length]; for (int i = 0; i < array.Length; i++) { array[i] = selector(items[i + start], arg); } return new ImmutableArray<TResult>(array); } /// <summary> /// Initializes a new instance of the <see cref="ImmutableArray{T}.Builder"/> class. /// </summary> /// <typeparam name="T">The type of elements stored in the array.</typeparam> /// <returns>A new builder.</returns> [Pure] public static ImmutableArray<T>.Builder CreateBuilder<T>() { return Create<T>().ToBuilder(); } /// <summary> /// Initializes a new instance of the <see cref="ImmutableArray{T}.Builder"/> class. /// </summary> /// <typeparam name="T">The type of elements stored in the array.</typeparam> /// <param name="initialCapacity">The size of the initial array backing the builder.</param> /// <returns>A new builder.</returns> [Pure] public static ImmutableArray<T>.Builder CreateBuilder<T>(int initialCapacity) { return new ImmutableArray<T>.Builder(initialCapacity); } /// <summary> /// Enumerates a sequence exactly once and produces an immutable array of its contents. /// </summary> /// <typeparam name="TSource">The type of element in the sequence.</typeparam> /// <param name="items">The sequence to enumerate.</param> /// <returns>An immutable array.</returns> [Pure] public static ImmutableArray<TSource> ToImmutableArray<TSource>(this IEnumerable<TSource> items) { if (items is ImmutableArray<TSource>) { return (ImmutableArray<TSource>)items; } return CreateRange(items); } /// <summary> /// Searches an entire one-dimensional sorted <see cref="ImmutableArray{T}"/> for a specific element, /// using the <see cref="IComparable{T}"/> generic interface implemented by each element /// of the <see cref="ImmutableArray{T}"/> and by the specified object. /// </summary> /// <typeparam name="T">The type of element stored in the array.</typeparam> /// <param name="array">The sorted, one-dimensional array to search.</param> /// <param name="value">The object to search for.</param> /// <returns> /// The index of the specified <paramref name="value"/> in the specified array, if <paramref name="value"/> is found. /// If <paramref name="value"/> is not found and <paramref name="value"/> is less than one or more elements in array, /// a negative number which is the bitwise complement of the index of the first /// element that is larger than <paramref name="value"/>. If <paramref name="value"/> is not found and <paramref name="value"/> is greater /// than any of the elements in array, a negative number which is the bitwise /// complement of (the index of the last element plus 1). /// </returns> /// <exception cref="InvalidOperationException"> /// <paramref name="value"/> does not implement the <see cref="IComparable{T}"/> generic interface, and /// the search encounters an element that does not implement the <see cref="IComparable{T}"/> /// generic interface. /// </exception> [Pure] public static int BinarySearch<T>(this ImmutableArray<T> array, T value) { return Array.BinarySearch<T>(array.array, value); } /// <summary> /// Searches an entire one-dimensional sorted <see cref="ImmutableArray{T}"/> for a value using /// the specified <see cref="IComparer{T}"/> generic interface. /// </summary> /// <typeparam name="T">The type of element stored in the array.</typeparam> /// <param name="array">The sorted, one-dimensional array to search.</param> /// <param name="value">The object to search for.</param> /// <param name="comparer"> /// The <see cref="IComparer{T}"/> implementation to use when comparing /// elements; or null to use the <see cref="IComparable{T}"/> implementation of each /// element. /// </param> /// <returns> /// The index of the specified <paramref name="value"/> in the specified array, if <paramref name="value"/> is found. /// If <paramref name="value"/> is not found and <paramref name="value"/> is less than one or more elements in array, /// a negative number which is the bitwise complement of the index of the first /// element that is larger than <paramref name="value"/>. If <paramref name="value"/> is not found and <paramref name="value"/> is greater /// than any of the elements in array, a negative number which is the bitwise /// complement of (the index of the last element plus 1). /// </returns> /// <exception cref="InvalidOperationException"> /// <paramref name="comparer"/> is null, <paramref name="value"/> does not implement the <see cref="IComparable{T}"/> generic interface, and /// the search encounters an element that does not implement the <see cref="IComparable{T}"/> /// generic interface. /// </exception> [Pure] public static int BinarySearch<T>(this ImmutableArray<T> array, T value, IComparer<T> comparer) { return Array.BinarySearch<T>(array.array, value, comparer); } /// <summary> /// Searches a range of elements in a one-dimensional sorted <see cref="ImmutableArray{T}"/> for /// a value, using the <see cref="IComparable{T}"/> generic interface implemented by /// each element of the <see cref="ImmutableArray{T}"/> and by the specified value. /// </summary> /// <typeparam name="T">The type of element stored in the array.</typeparam> /// <param name="array">The sorted, one-dimensional array to search.</param> /// <param name="index">The starting index of the range to search.</param> /// <param name="length">The length of the range to search.</param> /// <param name="value">The object to search for.</param> /// <returns> /// The index of the specified <paramref name="value"/> in the specified <paramref name="array"/>, if <paramref name="value"/> is found. /// If <paramref name="value"/> is not found and <paramref name="value"/> is less than one or more elements in <paramref name="array"/>, /// a negative number which is the bitwise complement of the index of the first /// element that is larger than <paramref name="value"/>. If <paramref name="value"/> is not found and <paramref name="value"/> is greater /// than any of the elements in <paramref name="array"/>, a negative number which is the bitwise /// complement of (the index of the last element plus 1). /// </returns> /// <exception cref="InvalidOperationException"> /// <paramref name="value"/> does not implement the <see cref="IComparable{T}"/> generic interface, and /// the search encounters an element that does not implement the <see cref="IComparable{T}"/> /// generic interface. /// </exception> /// <exception cref="ArgumentException"> /// <paramref name="index"/> and <paramref name="length"/> do not specify a valid range in <paramref name="array"/>. /// </exception> /// <exception cref="ArgumentOutOfRangeException"> /// <paramref name="index"/> is less than the lower bound of <paramref name="array"/>. -or- <paramref name="length"/> is less than zero. /// </exception> [Pure] public static int BinarySearch<T>(this ImmutableArray<T> array, int index, int length, T value) { return Array.BinarySearch<T>(array.array, index, length, value); } /// <summary> /// Searches a range of elements in a one-dimensional sorted <see cref="ImmutableArray{T}"/> for /// a value, using the specified <see cref="IComparer{T}"/> generic /// interface. /// </summary> /// <typeparam name="T">The type of element stored in the array.</typeparam> /// <param name="array">The sorted, one-dimensional array to search.</param> /// <param name="index">The starting index of the range to search.</param> /// <param name="length">The length of the range to search.</param> /// <param name="value">The object to search for.</param> /// <param name="comparer"> /// The <see cref="IComparer{T}"/> implementation to use when comparing /// elements; or null to use the <see cref="IComparable{T}"/> implementation of each /// element. /// </param> /// <returns> /// The index of the specified <paramref name="value"/> in the specified <paramref name="array"/>, if <paramref name="value"/> is found. /// If <paramref name="value"/> is not found and <paramref name="value"/> is less than one or more elements in <paramref name="array"/>, /// a negative number which is the bitwise complement of the index of the first /// element that is larger than <paramref name="value"/>. If <paramref name="value"/> is not found and <paramref name="value"/> is greater /// than any of the elements in <paramref name="array"/>, a negative number which is the bitwise /// complement of (the index of the last element plus 1). /// </returns> /// <exception cref="InvalidOperationException"> /// <paramref name="comparer"/> is null, <paramref name="value"/> does not implement the <see cref="IComparable{T}"/> generic /// interface, and the search encounters an element that does not implement the /// <see cref="IComparable{T}"/> generic interface. /// </exception> /// <exception cref="ArgumentException"> /// <paramref name="index"/> and <paramref name="length"/> do not specify a valid range in <paramref name="array"/>.-or-<paramref name="comparer"/> is null, /// and <paramref name="value"/> is of a type that is not compatible with the elements of <paramref name="array"/>. /// </exception> /// <exception cref="ArgumentOutOfRangeException"> /// <paramref name="index"/> is less than the lower bound of <paramref name="array"/>. -or- <paramref name="length"/> is less than zero. /// </exception> [Pure] public static int BinarySearch<T>(this ImmutableArray<T> array, int index, int length, T value, IComparer<T> comparer) { return Array.BinarySearch<T>(array.array, index, length, value, comparer); } /// <summary> /// Initializes a new instance of the <see cref="ImmutableArray{T}"/> struct. /// </summary> /// <param name="items">The array from which to copy.</param> internal static ImmutableArray<T> CreateDefensiveCopy<T>(T[] items) { Debug.Assert(items != null); if (items.Length == 0) { return ImmutableArray<T>.Empty; // use just a shared empty array, allowing the input array to be potentially GC'd } // defensive copy var tmp = new T[items.Length]; Array.Copy(items, 0, tmp, 0, items.Length); return new ImmutableArray<T>(tmp); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Text; using System.Buffers.Binary; using System.Diagnostics; using System.Runtime.InteropServices; namespace System.Reflection.Emit { public sealed class SignatureHelper { #region Consts Fields private const int NO_SIZE_IN_SIG = -1; #endregion #region Static Members public static SignatureHelper GetMethodSigHelper(Module? mod, Type? returnType, Type[]? parameterTypes) { return GetMethodSigHelper(mod, CallingConventions.Standard, returnType, null, null, parameterTypes, null, null); } internal static SignatureHelper GetMethodSigHelper(Module? mod, CallingConventions callingConvention, Type? returnType, int cGenericParam) { return GetMethodSigHelper(mod, callingConvention, cGenericParam, returnType, null, null, null, null, null); } public static SignatureHelper GetMethodSigHelper(Module? mod, CallingConventions callingConvention, Type? returnType) { return GetMethodSigHelper(mod, callingConvention, returnType, null, null, null, null, null); } internal static SignatureHelper GetMethodSpecSigHelper(Module? scope, Type[] inst) { SignatureHelper sigHelp = new SignatureHelper(scope, MdSigCallingConvention.GenericInst); sigHelp.AddData(inst.Length); foreach (Type t in inst) sigHelp.AddArgument(t); return sigHelp; } internal static SignatureHelper GetMethodSigHelper( Module? scope, CallingConventions callingConvention, Type? returnType, Type[]? requiredReturnTypeCustomModifiers, Type[]? optionalReturnTypeCustomModifiers, Type[]? parameterTypes, Type[][]? requiredParameterTypeCustomModifiers, Type[][]? optionalParameterTypeCustomModifiers) { return GetMethodSigHelper(scope, callingConvention, 0, returnType, requiredReturnTypeCustomModifiers, optionalReturnTypeCustomModifiers, parameterTypes, requiredParameterTypeCustomModifiers, optionalParameterTypeCustomModifiers); } internal static SignatureHelper GetMethodSigHelper( Module? scope, CallingConventions callingConvention, int cGenericParam, Type? returnType, Type[]? requiredReturnTypeCustomModifiers, Type[]? optionalReturnTypeCustomModifiers, Type[]? parameterTypes, Type[][]? requiredParameterTypeCustomModifiers, Type[][]? optionalParameterTypeCustomModifiers) { SignatureHelper sigHelp; MdSigCallingConvention intCall; if (returnType == null) { returnType = typeof(void); } intCall = MdSigCallingConvention.Default; if ((callingConvention & CallingConventions.VarArgs) == CallingConventions.VarArgs) intCall = MdSigCallingConvention.Vararg; if (cGenericParam > 0) { intCall |= MdSigCallingConvention.Generic; } if ((callingConvention & CallingConventions.HasThis) == CallingConventions.HasThis) intCall |= MdSigCallingConvention.HasThis; sigHelp = new SignatureHelper(scope, intCall, cGenericParam, returnType, requiredReturnTypeCustomModifiers, optionalReturnTypeCustomModifiers); sigHelp.AddArguments(parameterTypes, requiredParameterTypeCustomModifiers, optionalParameterTypeCustomModifiers); return sigHelp; } public static SignatureHelper GetMethodSigHelper(Module? mod, CallingConvention unmanagedCallConv, Type? returnType) { MdSigCallingConvention intCall; returnType ??= typeof(void); if (unmanagedCallConv == CallingConvention.Cdecl) { intCall = MdSigCallingConvention.C; } else if (unmanagedCallConv == CallingConvention.StdCall || unmanagedCallConv == CallingConvention.Winapi) { intCall = MdSigCallingConvention.StdCall; } else if (unmanagedCallConv == CallingConvention.ThisCall) { intCall = MdSigCallingConvention.ThisCall; } else if (unmanagedCallConv == CallingConvention.FastCall) { intCall = MdSigCallingConvention.FastCall; } else { throw new ArgumentException(SR.Argument_UnknownUnmanagedCallConv, nameof(unmanagedCallConv)); } return new SignatureHelper(mod, intCall, returnType, null, null); } public static SignatureHelper GetLocalVarSigHelper() { return GetLocalVarSigHelper(null); } public static SignatureHelper GetMethodSigHelper(CallingConventions callingConvention, Type? returnType) { return GetMethodSigHelper(null, callingConvention, returnType); } public static SignatureHelper GetMethodSigHelper(CallingConvention unmanagedCallingConvention, Type? returnType) { return GetMethodSigHelper(null, unmanagedCallingConvention, returnType); } public static SignatureHelper GetLocalVarSigHelper(Module? mod) { return new SignatureHelper(mod, MdSigCallingConvention.LocalSig); } public static SignatureHelper GetFieldSigHelper(Module? mod) { return new SignatureHelper(mod, MdSigCallingConvention.Field); } public static SignatureHelper GetPropertySigHelper(Module? mod, Type? returnType, Type[]? parameterTypes) { return GetPropertySigHelper(mod, returnType, null, null, parameterTypes, null, null); } public static SignatureHelper GetPropertySigHelper(Module? mod, Type? returnType, Type[]? requiredReturnTypeCustomModifiers, Type[]? optionalReturnTypeCustomModifiers, Type[]? parameterTypes, Type[][]? requiredParameterTypeCustomModifiers, Type[][]? optionalParameterTypeCustomModifiers) { return GetPropertySigHelper(mod, (CallingConventions)0, returnType, requiredReturnTypeCustomModifiers, optionalReturnTypeCustomModifiers, parameterTypes, requiredParameterTypeCustomModifiers, optionalParameterTypeCustomModifiers); } public static SignatureHelper GetPropertySigHelper(Module? mod, CallingConventions callingConvention, Type? returnType, Type[]? requiredReturnTypeCustomModifiers, Type[]? optionalReturnTypeCustomModifiers, Type[]? parameterTypes, Type[][]? requiredParameterTypeCustomModifiers, Type[][]? optionalParameterTypeCustomModifiers) { SignatureHelper sigHelp; if (returnType == null) { returnType = typeof(void); } MdSigCallingConvention intCall = MdSigCallingConvention.Property; if ((callingConvention & CallingConventions.HasThis) == CallingConventions.HasThis) intCall |= MdSigCallingConvention.HasThis; sigHelp = new SignatureHelper(mod, intCall, returnType, requiredReturnTypeCustomModifiers, optionalReturnTypeCustomModifiers); sigHelp.AddArguments(parameterTypes, requiredParameterTypeCustomModifiers, optionalParameterTypeCustomModifiers); return sigHelp; } internal static SignatureHelper GetTypeSigToken(Module module, Type type) { if (module == null) throw new ArgumentNullException(nameof(module)); if (type == null) throw new ArgumentNullException(nameof(type)); return new SignatureHelper(module, type); } #endregion #region Private Data Members private byte[] m_signature = null!; private int m_currSig; // index into m_signature buffer for next available byte private int m_sizeLoc; // index into m_signature buffer to put m_argCount (will be NO_SIZE_IN_SIG if no arg count is needed) private ModuleBuilder? m_module; private bool m_sigDone; private int m_argCount; // tracking number of arguments in the signature #endregion #region Constructor private SignatureHelper(Module? mod, MdSigCallingConvention callingConvention) { // Use this constructor to instantiate a local var sig or Field where return type is not applied. Init(mod, callingConvention); } private SignatureHelper(Module? mod, MdSigCallingConvention callingConvention, int cGenericParameters, Type returnType, Type[]? requiredCustomModifiers, Type[]? optionalCustomModifiers) { // Use this constructor to instantiate a any signatures that will require a return type. Init(mod, callingConvention, cGenericParameters); if (callingConvention == MdSigCallingConvention.Field) throw new ArgumentException(SR.Argument_BadFieldSig); AddOneArgTypeHelper(returnType, requiredCustomModifiers, optionalCustomModifiers); } private SignatureHelper(Module? mod, MdSigCallingConvention callingConvention, Type returnType, Type[]? requiredCustomModifiers, Type[]? optionalCustomModifiers) : this(mod, callingConvention, 0, returnType, requiredCustomModifiers, optionalCustomModifiers) { } private SignatureHelper(Module mod, Type type) { Init(mod); AddOneArgTypeHelper(type); } private void Init(Module? mod) { m_signature = new byte[32]; m_currSig = 0; m_module = mod as ModuleBuilder; m_argCount = 0; m_sigDone = false; m_sizeLoc = NO_SIZE_IN_SIG; if (m_module == null && mod != null) throw new ArgumentException(SR.NotSupported_MustBeModuleBuilder); } private void Init(Module? mod, MdSigCallingConvention callingConvention) { Init(mod, callingConvention, 0); } private void Init(Module? mod, MdSigCallingConvention callingConvention, int cGenericParam) { Init(mod); AddData((byte)callingConvention); if (callingConvention == MdSigCallingConvention.Field || callingConvention == MdSigCallingConvention.GenericInst) { m_sizeLoc = NO_SIZE_IN_SIG; } else { if (cGenericParam > 0) AddData(cGenericParam); m_sizeLoc = m_currSig++; } } #endregion #region Private Members private void AddOneArgTypeHelper(Type argument, bool pinned) { if (pinned) AddElementType(CorElementType.ELEMENT_TYPE_PINNED); AddOneArgTypeHelper(argument); } private void AddOneArgTypeHelper(Type clsArgument, Type[]? requiredCustomModifiers, Type[]? optionalCustomModifiers) { // This function will not increase the argument count. It only fills in bytes // in the signature based on clsArgument. This helper is called for return type. Debug.Assert(clsArgument != null); if (optionalCustomModifiers != null) { for (int i = 0; i < optionalCustomModifiers.Length; i++) { Type t = optionalCustomModifiers[i]; if (t == null) throw new ArgumentNullException(nameof(optionalCustomModifiers)); if (t.HasElementType) throw new ArgumentException(SR.Argument_ArraysInvalid, nameof(optionalCustomModifiers)); if (t.ContainsGenericParameters) throw new ArgumentException(SR.Argument_GenericsInvalid, nameof(optionalCustomModifiers)); AddElementType(CorElementType.ELEMENT_TYPE_CMOD_OPT); int token = m_module!.GetTypeToken(t).Token; Debug.Assert(!MetadataToken.IsNullToken(token)); AddToken(token); } } if (requiredCustomModifiers != null) { for (int i = 0; i < requiredCustomModifiers.Length; i++) { Type t = requiredCustomModifiers[i]; if (t == null) throw new ArgumentNullException(nameof(requiredCustomModifiers)); if (t.HasElementType) throw new ArgumentException(SR.Argument_ArraysInvalid, nameof(requiredCustomModifiers)); if (t.ContainsGenericParameters) throw new ArgumentException(SR.Argument_GenericsInvalid, nameof(requiredCustomModifiers)); AddElementType(CorElementType.ELEMENT_TYPE_CMOD_REQD); int token = m_module!.GetTypeToken(t).Token; Debug.Assert(!MetadataToken.IsNullToken(token)); AddToken(token); } } AddOneArgTypeHelper(clsArgument); } private void AddOneArgTypeHelper(Type clsArgument) { AddOneArgTypeHelperWorker(clsArgument, false); } private void AddOneArgTypeHelperWorker(Type clsArgument, bool lastWasGenericInst) { if (clsArgument.IsGenericParameter) { if (clsArgument.DeclaringMethod != null) AddElementType(CorElementType.ELEMENT_TYPE_MVAR); else AddElementType(CorElementType.ELEMENT_TYPE_VAR); AddData(clsArgument.GenericParameterPosition); } else if (clsArgument.IsGenericType && (!clsArgument.IsGenericTypeDefinition || !lastWasGenericInst)) { AddElementType(CorElementType.ELEMENT_TYPE_GENERICINST); AddOneArgTypeHelperWorker(clsArgument.GetGenericTypeDefinition(), true); Type[] args = clsArgument.GetGenericArguments(); AddData(args.Length); foreach (Type t in args) AddOneArgTypeHelper(t); } else if (clsArgument is TypeBuilder) { TypeBuilder clsBuilder = (TypeBuilder)clsArgument; TypeToken tkType; if (clsBuilder.Module.Equals(m_module)) { tkType = clsBuilder.TypeToken; } else { tkType = m_module!.GetTypeToken(clsArgument); } if (clsArgument.IsValueType) { InternalAddTypeToken(tkType, CorElementType.ELEMENT_TYPE_VALUETYPE); } else { InternalAddTypeToken(tkType, CorElementType.ELEMENT_TYPE_CLASS); } } else if (clsArgument is EnumBuilder) { TypeBuilder clsBuilder = ((EnumBuilder)clsArgument).m_typeBuilder; TypeToken tkType; if (clsBuilder.Module.Equals(m_module)) { tkType = clsBuilder.TypeToken; } else { tkType = m_module!.GetTypeToken(clsArgument); } if (clsArgument.IsValueType) { InternalAddTypeToken(tkType, CorElementType.ELEMENT_TYPE_VALUETYPE); } else { InternalAddTypeToken(tkType, CorElementType.ELEMENT_TYPE_CLASS); } } else if (clsArgument.IsByRef) { AddElementType(CorElementType.ELEMENT_TYPE_BYREF); clsArgument = clsArgument.GetElementType()!; AddOneArgTypeHelper(clsArgument); } else if (clsArgument.IsPointer) { AddElementType(CorElementType.ELEMENT_TYPE_PTR); AddOneArgTypeHelper(clsArgument.GetElementType()!); } else if (clsArgument.IsArray) { if (clsArgument.IsSZArray) { AddElementType(CorElementType.ELEMENT_TYPE_SZARRAY); AddOneArgTypeHelper(clsArgument.GetElementType()!); } else { AddElementType(CorElementType.ELEMENT_TYPE_ARRAY); AddOneArgTypeHelper(clsArgument.GetElementType()!); // put the rank information int rank = clsArgument.GetArrayRank(); AddData(rank); // rank AddData(0); // upper bounds AddData(rank); // lower bound for (int i = 0; i < rank; i++) AddData(0); } } else { CorElementType type = CorElementType.ELEMENT_TYPE_MAX; if (clsArgument is RuntimeType) { type = RuntimeTypeHandle.GetCorElementType((RuntimeType)clsArgument); // GetCorElementType returns CorElementType.ELEMENT_TYPE_CLASS for both object and string if (type == CorElementType.ELEMENT_TYPE_CLASS) { if (clsArgument == typeof(object)) type = CorElementType.ELEMENT_TYPE_OBJECT; else if (clsArgument == typeof(string)) type = CorElementType.ELEMENT_TYPE_STRING; } } if (IsSimpleType(type)) { AddElementType(type); } else if (m_module == null) { InternalAddRuntimeType(clsArgument); } else if (clsArgument.IsValueType) { InternalAddTypeToken(m_module.GetTypeToken(clsArgument), CorElementType.ELEMENT_TYPE_VALUETYPE); } else { InternalAddTypeToken(m_module.GetTypeToken(clsArgument), CorElementType.ELEMENT_TYPE_CLASS); } } } private void AddData(int data) { if (m_currSig + 4 > m_signature.Length) { m_signature = ExpandArray(m_signature); } if (data <= 0x7F) { m_signature[m_currSig++] = (byte)data; } else if (data <= 0x3F_FF) { BinaryPrimitives.WriteInt16BigEndian(m_signature.AsSpan(m_currSig), (short)(data | 0x80_00)); m_currSig += 2; } else if (data <= 0x1F_FF_FF_FF) { BinaryPrimitives.WriteInt32BigEndian(m_signature.AsSpan(m_currSig), (int)(data | 0xC0_00_00_00)); m_currSig += 4; } else { throw new ArgumentException(SR.Argument_LargeInteger); } } private void AddElementType(CorElementType cvt) { // Adds an element to the signature. A managed represenation of CorSigCompressElement if (m_currSig + 1 > m_signature.Length) m_signature = ExpandArray(m_signature); m_signature[m_currSig++] = (byte)cvt; } private void AddToken(int token) { // A managed represenation of CompressToken // Pulls the token appart to get a rid, adds some appropriate bits // to the token and then adds this to the signature. int rid = (token & 0x00FFFFFF); // This is RidFromToken; MetadataTokenType type = (MetadataTokenType)(token & unchecked((int)0xFF000000)); // This is TypeFromToken; if (rid > 0x3FFFFFF) { // token is too big to be compressed throw new ArgumentException(SR.Argument_LargeInteger); } rid <<= 2; // TypeDef is encoded with low bits 00 // TypeRef is encoded with low bits 01 // TypeSpec is encoded with low bits 10 if (type == MetadataTokenType.TypeRef) { // if type is mdtTypeRef rid |= 0x1; } else if (type == MetadataTokenType.TypeSpec) { // if type is mdtTypeSpec rid |= 0x2; } AddData(rid); } private void InternalAddTypeToken(TypeToken clsToken, CorElementType CorType) { // Add a type token into signature. CorType will be either CorElementType.ELEMENT_TYPE_CLASS or CorElementType.ELEMENT_TYPE_VALUETYPE AddElementType(CorType); AddToken(clsToken.Token); } private unsafe void InternalAddRuntimeType(Type type) { // Add a runtime type into the signature. AddElementType(CorElementType.ELEMENT_TYPE_INTERNAL); IntPtr handle = type.GetTypeHandleInternal().Value; // Internal types must have their pointer written into the signature directly (we don't // want to convert to little-endian format on big-endian machines because the value is // going to be extracted and used directly as a pointer (and only within this process)). if (m_currSig + sizeof(void*) > m_signature.Length) m_signature = ExpandArray(m_signature); byte* phandle = (byte*)&handle; for (int i = 0; i < sizeof(void*); i++) m_signature[m_currSig++] = phandle[i]; } private static byte[] ExpandArray(byte[] inArray) { // Expand the signature buffer size return ExpandArray(inArray, inArray.Length * 2); } private static byte[] ExpandArray(byte[] inArray, int requiredLength) { // Expand the signature buffer size if (requiredLength < inArray.Length) requiredLength = inArray.Length * 2; byte[] outArray = new byte[requiredLength]; Buffer.BlockCopy(inArray, 0, outArray, 0, inArray.Length); return outArray; } private void IncrementArgCounts() { if (m_sizeLoc == NO_SIZE_IN_SIG) { // We don't have a size if this is a field. return; } m_argCount++; } private void SetNumberOfSignatureElements(bool forceCopy) { // For most signatures, this will set the number of elements in a byte which we have reserved for it. // However, if we have a field signature, we don't set the length and return. // If we have a signature with more than 128 arguments, we can't just set the number of elements, // we actually have to allocate more space (e.g. shift everything in the array one or more spaces to the // right. We do this by making a copy of the array and leaving the correct number of blanks. This new // array is now set to be m_signature and we use the AddData method to set the number of elements properly. // The forceCopy argument can be used to force SetNumberOfSignatureElements to make a copy of // the array. This is useful for GetSignature which promises to trim the array to be the correct size anyway. byte[] temp; int newSigSize; int currSigHolder = m_currSig; if (m_sizeLoc == NO_SIZE_IN_SIG) return; // If we have fewer than 128 arguments and we haven't been told to copy the // array, we can just set the appropriate bit and return. if (m_argCount < 0x80 && !forceCopy) { m_signature[m_sizeLoc] = (byte)m_argCount; return; } // We need to have more bytes for the size. Figure out how many bytes here. // Since we need to copy anyway, we're just going to take the cost of doing a // new allocation. if (m_argCount < 0x80) { newSigSize = 1; } else if (m_argCount < 0x4000) { newSigSize = 2; } else { newSigSize = 4; } // Allocate the new array. temp = new byte[m_currSig + newSigSize - 1]; // Copy the calling convention. The calling convention is always just one byte // so we just copy that byte. Then copy the rest of the array, shifting everything // to make room for the new number of elements. temp[0] = m_signature[0]; Buffer.BlockCopy(m_signature, m_sizeLoc + 1, temp, m_sizeLoc + newSigSize, currSigHolder - (m_sizeLoc + 1)); m_signature = temp; // Use the AddData method to add the number of elements appropriately compressed. m_currSig = m_sizeLoc; AddData(m_argCount); m_currSig = currSigHolder + (newSigSize - 1); } #endregion #region Internal Members internal int ArgumentCount => m_argCount; internal static bool IsSimpleType(CorElementType type) { if (type <= CorElementType.ELEMENT_TYPE_STRING) return true; if (type == CorElementType.ELEMENT_TYPE_TYPEDBYREF || type == CorElementType.ELEMENT_TYPE_I || type == CorElementType.ELEMENT_TYPE_U || type == CorElementType.ELEMENT_TYPE_OBJECT) return true; return false; } internal byte[] InternalGetSignature(out int length) { // An internal method to return the signature. Does not trim the // array, but passes out the length of the array in an out parameter. // This is the actual array -- not a copy -- so the callee must agree // to not copy it. // // param length : an out param indicating the length of the array. // return : A reference to the internal ubyte array. if (!m_sigDone) { m_sigDone = true; // If we have more than 128 variables, we can't just set the length, we need // to compress it. Unfortunately, this means that we need to copy the entire // array. SetNumberOfSignatureElements(false); } length = m_currSig; return m_signature; } internal byte[] InternalGetSignatureArray() { int argCount = m_argCount; int currSigLength = m_currSig; int newSigSize = currSigLength; // Allocate the new array. if (argCount < 0x7F) newSigSize++; else if (argCount < 0x3FFF) newSigSize += 2; else newSigSize += 4; byte[] temp = new byte[newSigSize]; // copy the sig int sigCopyIndex = 0; // calling convention temp[sigCopyIndex++] = m_signature[0]; // arg size if (argCount <= 0x7F) temp[sigCopyIndex++] = (byte)(argCount & 0xFF); else if (argCount <= 0x3FFF) { temp[sigCopyIndex++] = (byte)((argCount >> 8) | 0x80); temp[sigCopyIndex++] = (byte)(argCount & 0xFF); } else if (argCount <= 0x1FFFFFFF) { temp[sigCopyIndex++] = (byte)((argCount >> 24) | 0xC0); temp[sigCopyIndex++] = (byte)((argCount >> 16) & 0xFF); temp[sigCopyIndex++] = (byte)((argCount >> 8) & 0xFF); temp[sigCopyIndex++] = (byte)(argCount & 0xFF); } else throw new ArgumentException(SR.Argument_LargeInteger); // copy the sig part of the sig Buffer.BlockCopy(m_signature, 2, temp, sigCopyIndex, currSigLength - 2); // mark the end of sig temp[newSigSize - 1] = (byte)CorElementType.ELEMENT_TYPE_END; return temp; } #endregion #region Public Methods public void AddArgument(Type clsArgument) { AddArgument(clsArgument, null, null); } public void AddArgument(Type argument, bool pinned) { if (argument == null) throw new ArgumentNullException(nameof(argument)); IncrementArgCounts(); AddOneArgTypeHelper(argument, pinned); } public void AddArguments(Type[]? arguments, Type[][]? requiredCustomModifiers, Type[][]? optionalCustomModifiers) { if (requiredCustomModifiers != null && (arguments == null || requiredCustomModifiers.Length != arguments.Length)) throw new ArgumentException(SR.Format(SR.Argument_MismatchedArrays, nameof(requiredCustomModifiers), nameof(arguments))); if (optionalCustomModifiers != null && (arguments == null || optionalCustomModifiers.Length != arguments.Length)) throw new ArgumentException(SR.Format(SR.Argument_MismatchedArrays, nameof(optionalCustomModifiers), nameof(arguments))); if (arguments != null) { for (int i = 0; i < arguments.Length; i++) { AddArgument(arguments[i], requiredCustomModifiers?[i], optionalCustomModifiers?[i]); } } } public void AddArgument(Type argument, Type[]? requiredCustomModifiers, Type[]? optionalCustomModifiers) { if (m_sigDone) throw new ArgumentException(SR.Argument_SigIsFinalized); if (argument == null) throw new ArgumentNullException(nameof(argument)); IncrementArgCounts(); // Add an argument to the signature. Takes a Type and determines whether it // is one of the primitive types of which we have special knowledge or a more // general class. In the former case, we only add the appropriate short cut encoding, // otherwise we will calculate proper description for the type. AddOneArgTypeHelper(argument, requiredCustomModifiers, optionalCustomModifiers); } public void AddSentinel() { AddElementType(CorElementType.ELEMENT_TYPE_SENTINEL); } public override bool Equals(object? obj) { if (!(obj is SignatureHelper)) { return false; } SignatureHelper temp = (SignatureHelper)obj; if (!temp.m_module!.Equals(m_module) || temp.m_currSig != m_currSig || temp.m_sizeLoc != m_sizeLoc || temp.m_sigDone != m_sigDone) { return false; } for (int i = 0; i < m_currSig; i++) { if (m_signature[i] != temp.m_signature[i]) return false; } return true; } public override int GetHashCode() { // Start the hash code with the hash code of the module and the values of the member variables. int HashCode = m_module!.GetHashCode() + m_currSig + m_sizeLoc; // Add one if the sig is done. if (m_sigDone) HashCode++; // Then add the hash code of all the arguments. for (int i = 0; i < m_currSig; i++) HashCode += m_signature[i].GetHashCode(); return HashCode; } public byte[] GetSignature() { return GetSignature(false); } internal byte[] GetSignature(bool appendEndOfSig) { // Chops the internal signature to the appropriate length. Adds the // end token to the signature and marks the signature as finished so that // no further tokens can be added. Return the full signature in a trimmed array. if (!m_sigDone) { if (appendEndOfSig) AddElementType(CorElementType.ELEMENT_TYPE_END); SetNumberOfSignatureElements(true); m_sigDone = true; } // This case will only happen if the user got the signature through // InternalGetSignature first and then called GetSignature. if (m_signature.Length > m_currSig) { byte[] temp = new byte[m_currSig]; Array.Copy(m_signature, 0, temp, 0, m_currSig); m_signature = temp; } return m_signature; } public override string ToString() { StringBuilder sb = new StringBuilder(); sb.Append("Length: ").Append(m_currSig).AppendLine(); if (m_sizeLoc != -1) { sb.Append("Arguments: ").Append(m_signature[m_sizeLoc]).AppendLine(); } else { sb.AppendLine("Field Signature"); } sb.AppendLine("Signature: "); for (int i = 0; i <= m_currSig; i++) { sb.Append(m_signature[i]).Append(" "); } sb.AppendLine(); return sb.ToString(); } #endregion } }
//! \file ArcNitro.cs //! \date Wed Feb 25 20:28:14 2015 //! \brief Nitro+ PAK archives implementation. // // Copyright (C) 2015-2016 by morkt // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to // deal in the Software without restriction, including without limitation the // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or // sell copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS // IN THE SOFTWARE. // using System; using System.Collections.Generic; using System.ComponentModel.Composition; using System.IO; using System.Text; using GameRes.Compression; using GameRes.Utility; namespace GameRes.Formats.NitroPlus { internal class PakEntry : PackedEntry { public uint Key; } internal class NitroPak : ArcFile { public int Version; public NitroPak (ArcView arc, ArchiveFormat impl, ICollection<Entry> dir, int version) : base (arc, impl, dir) { Version = version; } } [Export(typeof(ArchiveFormat))] public class PakOpener : ArchiveFormat { public override string Tag { get { return "PAK/NITRO+"; } } public override string Description { get { return "Nitro+ resource archive"; } } public override uint Signature { get { return 0x03; } } public override bool IsHierarchic { get { return true; } } public override bool CanWrite { get { return false; } } public PakOpener () { Extensions = new string[] { "pak" }; Signatures = new uint[] { 2, 3 }; } public override ArcFile TryOpen (ArcView file) { int version = file.View.ReadInt32 (0); List<Entry> dir = null; if (2 == version) dir = OpenPakV2 (file); else if (3 == version) dir = OpenPakV3 (file); if (null == dir) return null; return new NitroPak (file, this, dir, version); } private List<Entry> OpenPakV2 (ArcView file) { int count = file.View.ReadInt32 (4); if (!IsSaneCount (count)) return null; int unpacked_size = file.View.ReadInt32 (8); uint packed_size = file.View.ReadUInt32 (0xC); using (var input = file.CreateStream (0x114, packed_size)) using (var header_stream = new ZLibStream (input, CompressionMode.Decompress)) using (var header = new BinaryReader (header_stream, Encoding.ASCII, true)) { long base_offset = 0x114 + packed_size; var name_buf = new byte[0x40]; var dir = new List<Entry> (count); for (int i = 0; i < count; ++i) { int name_length = header.ReadInt32(); if (name_length <= 0) return null; if (name_length > name_buf.Length) name_buf = new byte[name_length]; if (name_length != header.Read (name_buf, 0, name_length)) return null; var name = Encodings.cp932.GetString (name_buf, 0, name_length); var entry = FormatCatalog.Instance.Create<PackedEntry> (name); entry.Offset = base_offset + header.ReadUInt32(); entry.UnpackedSize = header.ReadUInt32(); entry.Size = header.ReadUInt32(); entry.IsPacked = header.ReadInt32() != 0; uint psize = header.ReadUInt32(); if (entry.IsPacked) entry.Size = psize; if (!entry.CheckPlacement (file.MaxOffset)) return null; dir.Add (entry); } return dir; } } private List<Entry> OpenPakV3 (ArcView file) { if (0x110 > file.View.Reserve (4, 0x110)) return null; uint size_xor = file.View.ReadUInt32 (0x104); if (0x64 != size_xor) return null; byte[] name_buf = file.View.ReadBytes (4, 0x100); int name_len = 0; for (int i = 0; i < name_buf.Length; ++i) { if (0 == name_buf[i]) break; if (name_buf[i] >= 0x80 || name_buf[i] < 0x20) return null; ++name_len; } if (0 == name_len || name_len > 0x10) return null; uint header_key = GetKey (name_buf, name_len); uint unpacked = file.View.ReadUInt32 (0x108) ^ header_key; int count = (int)(file.View.ReadUInt32 (0x10c) ^ header_key); if (!IsSaneCount (count)) return null; var dir = new List<Entry> (count); uint header_size = file.View.ReadUInt32 (0x110) ^ size_xor; long base_offset = 0x114 + header_size; using (var input = file.CreateStream (0x114, header_size)) using (var header_stream = new ZLibStream (input, CompressionMode.Decompress)) using (var header = new BinaryReader (header_stream, Encoding.ASCII, true)) { for (int i = 0; i < count; ++i) { name_len = header.ReadInt32(); if (name_len <= 0 || name_len > name_buf.Length) return null; if (name_len != header.Read (name_buf, 0, name_len)) return null; uint key = GetKey (name_buf, name_len); var name = Encodings.cp932.GetString (name_buf, 0, name_len); var entry = FormatCatalog.Instance.Create<PakEntry> (name); entry.Offset = (header.ReadUInt32() ^ key) + base_offset; entry.UnpackedSize = (header.ReadUInt32() ^ key); uint ignored = (header.ReadUInt32() ^ key); entry.IsPacked = (header.ReadUInt32() ^ key) != 0; uint packed_size = (header.ReadUInt32() ^ key); entry.Key = key; entry.Size = entry.IsPacked ? packed_size : entry.UnpackedSize; if (!entry.CheckPlacement (file.MaxOffset)) return null; dir.Add (entry); } return dir; } } static uint GetKey (byte[] name, int length) { int key = 0; for (int i = 0; i < length; ++i) { key *= 0x89; key += (sbyte)name[i]; } return (uint)key; } public override Stream OpenEntry (ArcFile arc, Entry entry) { var pak_entry = entry as PakEntry; if (pak_entry != null && !pak_entry.IsPacked) return OpenV3Entry (arc, pak_entry); Stream input = arc.File.CreateStream (entry.Offset, entry.Size); var packed_entry = entry as PackedEntry; if (packed_entry != null && packed_entry.IsPacked) input = new ZLibStream (input, CompressionMode.Decompress); return input; } private Stream OpenV3Entry (ArcFile arc, PakEntry entry) { uint enc_size = Math.Min (entry.Size, 0x10u); if (0 == enc_size) return Stream.Null; var buf = arc.File.View.ReadBytes (entry.Offset, enc_size); uint key = entry.Key; for (int i = 0; i < buf.Length; ++i) { buf[i] ^= (byte)key; key = Binary.RotR (key, 8); } if (enc_size == entry.Size) return new BinMemoryStream (buf, entry.Name); return new PrefixStream (buf, arc.File.CreateStream (entry.Offset+enc_size, entry.Size-enc_size)); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics; using System.Runtime; using Internal.Runtime.Augments; namespace System.Collections.ObjectModel { [System.Runtime.InteropServices.ComVisible(false)] [DebuggerTypeProxy(typeof(Mscorlib_CollectionDebugView<>))] [DebuggerDisplay("Count = {Count}")] public class Collection<T> : IList<T>, IList, IReadOnlyList<T> { private IList<T> _items; private Object _syncRoot; public Collection() { // Legacy compat: We must implement our backing list using List<T>() as we have store apps that call Collection<T>.Items and cast // the result to List<T>. _items = WinRTInterop.Callbacks.CreateSystemCollectionsGenericList<T>(); } public Collection(IList<T> list) { if (list == null) { throw new ArgumentNullException("list"); } _items = list; } public int Count { get { return _items.Count; } } protected IList<T> Items { get { return _items; } } public T this[int index] { get { return _items[index]; } set { if (_items.IsReadOnly) { throw new NotSupportedException(SR.NotSupported_ReadOnlyCollection); } if (index < 0 || index >= _items.Count) { throw new ArgumentOutOfRangeException("index", SR.ArgumentOutOfRange_ListItem); } SetItem(index, value); } } public void Add(T item) { if (_items.IsReadOnly) { throw new NotSupportedException(SR.NotSupported_ReadOnlyCollection); } int index = _items.Count; InsertItem(index, item); } public void Clear() { if (_items.IsReadOnly) { throw new NotSupportedException(SR.NotSupported_ReadOnlyCollection); } ClearItems(); } public void CopyTo(T[] array, int index) { _items.CopyTo(array, index); } public bool Contains(T item) { return _items.Contains(item); } public IEnumerator<T> GetEnumerator() { return _items.GetEnumerator(); } public int IndexOf(T item) { return _items.IndexOf(item); } public void Insert(int index, T item) { if (_items.IsReadOnly) { throw new NotSupportedException(SR.NotSupported_ReadOnlyCollection); } if (index < 0 || index > _items.Count) { throw new ArgumentOutOfRangeException("index", SR.ArgumentOutOfRange_ListInsert); } InsertItem(index, item); } public bool Remove(T item) { if (_items.IsReadOnly) { throw new NotSupportedException(SR.NotSupported_ReadOnlyCollection); } int index = _items.IndexOf(item); if (index < 0) return false; RemoveItem(index); return true; } public void RemoveAt(int index) { if (_items.IsReadOnly) { throw new NotSupportedException(SR.NotSupported_ReadOnlyCollection); } if (index < 0 || index >= _items.Count) { throw new ArgumentOutOfRangeException("index", SR.ArgumentOutOfRange_ListRemoveAt); } RemoveItem(index); } protected virtual void ClearItems() { _items.Clear(); } protected virtual void InsertItem(int index, T item) { _items.Insert(index, item); } protected virtual void RemoveItem(int index) { _items.RemoveAt(index); } protected virtual void SetItem(int index, T item) { _items[index] = item; } bool ICollection<T>.IsReadOnly { get { return _items.IsReadOnly; } } IEnumerator IEnumerable.GetEnumerator() { return ((IEnumerable)_items).GetEnumerator(); } bool ICollection.IsSynchronized { get { return false; } } object ICollection.SyncRoot { get { if (_syncRoot == null) { ICollection c = _items as ICollection; if (c != null) { _syncRoot = c.SyncRoot; } else { System.Threading.Interlocked.CompareExchange<Object>(ref _syncRoot, new Object(), null); } } return _syncRoot; } } void ICollection.CopyTo(Array array, int index) { if (array == null) { throw new ArgumentNullException("array"); } if (array.Rank != 1) { throw new ArgumentException(SR.Arg_RankMultiDimNotSupported); } if (array.GetLowerBound(0) != 0) { throw new ArgumentException(SR.Arg_NonZeroLowerBound); } if (index < 0) { throw new ArgumentOutOfRangeException("index", SR.ArgumentOutOfRange_NeedNonNegNum); } if (array.Length - index < Count) { throw new ArgumentException(SR.Arg_ArrayPlusOffTooSmall); } T[] tArray = array as T[]; if (tArray != null) { _items.CopyTo(tArray, index); } else { /* ProjectN port note: IsAssignable no longer available on Type surface area. This is a non-reliable check so we should be able to do without. // // Catch the obvious case assignment will fail. // We can found all possible problems by doing the check though. // For example, if the element type of the Array is derived from T, // we can't figure out if we can successfully copy the element beforehand. // IResolvedRuntimeType targetType = array.GetType().GetElementType().ResolvedType; IResolvedRuntimeType sourceType = typeof(T).ResolvedType; if(!(targetType.IsAssignableFrom(sourceType) || sourceType.IsAssignableFrom(targetType))) { throw new ArgumentException(SR.Argument_InvalidArrayType); } */ // // We can't cast array of value type to object[], so we don't support // widening of primitive types here. // object[] objects = array as object[]; if (objects == null) { throw new ArgumentException(SR.Argument_InvalidArrayType); } int count = _items.Count; try { for (int i = 0; i < count; i++) { objects[index++] = _items[i]; } } catch (ArrayTypeMismatchException) { throw new ArgumentException(SR.Argument_InvalidArrayType); } } } object IList.this[int index] { get { return _items[index]; } set { if (value == null && !(default(T) == null)) { throw new ArgumentNullException("value"); } try { this[index] = (T)value; } catch (InvalidCastException) { throw new ArgumentException(SR.Format(SR.Arg_WrongType, value, typeof(T)), "value"); } } } bool IList.IsReadOnly { get { return _items.IsReadOnly; } } bool IList.IsFixedSize { get { // There is no IList<T>.IsFixedSize, so we must assume that only // readonly collections are fixed size, if our internal item // collection does not implement IList. Note that Array implements // IList, and therefore T[] and U[] will be fixed-size. IList list = _items as IList; if (list != null) { return list.IsFixedSize; } return _items.IsReadOnly; } } int IList.Add(object value) { if (_items.IsReadOnly) { throw new NotSupportedException(SR.NotSupported_ReadOnlyCollection); } if (value == null && !(default(T) == null)) { throw new ArgumentNullException("value"); } try { Add((T)value); } catch (InvalidCastException) { throw new ArgumentException(SR.Format(SR.Arg_WrongType, value, typeof(T)), "value"); } return this.Count - 1; } bool IList.Contains(object value) { if (IsCompatibleObject(value)) { return Contains((T)value); } return false; } int IList.IndexOf(object value) { if (IsCompatibleObject(value)) { return IndexOf((T)value); } return -1; } void IList.Insert(int index, object value) { if (_items.IsReadOnly) { throw new NotSupportedException(SR.NotSupported_ReadOnlyCollection); } if (value == null && !(default(T) == null)) { throw new ArgumentNullException("value"); } try { Insert(index, (T)value); } catch (InvalidCastException) { throw new ArgumentException(SR.Format(SR.Arg_WrongType, value, typeof(T)), "value"); } } void IList.Remove(object value) { if (_items.IsReadOnly) { throw new NotSupportedException(SR.NotSupported_ReadOnlyCollection); } if (IsCompatibleObject(value)) { Remove((T)value); } } private static bool IsCompatibleObject(object value) { // Non-null values are fine. Only accept nulls if T is a class or Nullable<U>. // Note that default(T) is not equal to null for value types except when T is Nullable<U>. return ((value is T) || (value == null && default(T) == null)); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /*============================================================ ** ** Classes: Common Object Security class ** ** ===========================================================*/ using Microsoft.Win32; using System; using System.Collections; using System.Diagnostics; using System.Diagnostics.Contracts; using System.Runtime.InteropServices; using System.Security.Principal; namespace System.Security.AccessControl { public abstract class CommonObjectSecurity : ObjectSecurity { #region Constructors protected CommonObjectSecurity(bool isContainer) : base(isContainer, false) { } internal CommonObjectSecurity(CommonSecurityDescriptor securityDescriptor) : base(securityDescriptor) { } #endregion #region Private Methods // Ported from NDP\clr\src\BCL\System\Security\Principal\SID.cs since we can't access System.Security.Principal.IdentityReference's internals private static bool IsValidTargetTypeStatic(Type targetType) { if (targetType == typeof(NTAccount)) { return true; } else if (targetType == typeof(SecurityIdentifier)) { return true; } else { return false; } } private AuthorizationRuleCollection GetRules(bool access, bool includeExplicit, bool includeInherited, System.Type targetType) { ReadLock(); try { AuthorizationRuleCollection result = new AuthorizationRuleCollection(); if (!IsValidTargetTypeStatic(targetType)) { throw new ArgumentException( SR.Arg_MustBeIdentityReferenceType, nameof(targetType)); } CommonAcl acl = null; if (access) { if ((_securityDescriptor.ControlFlags & ControlFlags.DiscretionaryAclPresent) != 0) { acl = _securityDescriptor.DiscretionaryAcl; } } else // !access == audit { if ((_securityDescriptor.ControlFlags & ControlFlags.SystemAclPresent) != 0) { acl = _securityDescriptor.SystemAcl; } } if (acl == null) { // // The required ACL was not present; return an empty collection. // return result; } IdentityReferenceCollection irTarget = null; if (targetType != typeof(SecurityIdentifier)) { IdentityReferenceCollection irSource = new IdentityReferenceCollection(acl.Count); for (int i = 0; i < acl.Count; i++) { // // Calling the indexer on a common ACL results in cloning, // (which would not be the case if we were to use the internal RawAcl property) // but also ensures that the resulting order of ACEs is proper // However, this is a big price to pay - cloning all the ACEs just so that // the canonical order could be ascertained just once. // A better way would be to have an internal method that would canonicalize the ACL // and call it once, then use the RawAcl. // CommonAce ace = acl[i] as CommonAce; if (AceNeedsTranslation(ace, access, includeExplicit, includeInherited)) { irSource.Add(ace.SecurityIdentifier); } } irTarget = irSource.Translate(targetType); } int targetIndex = 0; for (int i = 0; i < acl.Count; i++) { // // Calling the indexer on a common ACL results in cloning, // (which would not be the case if we were to use the internal RawAcl property) // but also ensures that the resulting order of ACEs is proper // However, this is a big price to pay - cloning all the ACEs just so that // the canonical order could be ascertained just once. // A better way would be to have an internal method that would canonicalize the ACL // and call it once, then use the RawAcl. // CommonAce ace = acl[i] as CommonAce; if (AceNeedsTranslation(ace, access, includeExplicit, includeInherited)) { IdentityReference iref = (targetType == typeof(SecurityIdentifier)) ? ace.SecurityIdentifier : irTarget[targetIndex++]; if (access) { AccessControlType type; if (ace.AceQualifier == AceQualifier.AccessAllowed) { type = AccessControlType.Allow; } else { type = AccessControlType.Deny; } result.AddRule( AccessRuleFactory( iref, ace.AccessMask, ace.IsInherited, ace.InheritanceFlags, ace.PropagationFlags, type)); } else { result.AddRule( AuditRuleFactory( iref, ace.AccessMask, ace.IsInherited, ace.InheritanceFlags, ace.PropagationFlags, ace.AuditFlags)); } } } return result; } finally { ReadUnlock(); } } private bool AceNeedsTranslation(CommonAce ace, bool isAccessAce, bool includeExplicit, bool includeInherited) { if (ace == null) { // // Only consider common ACEs // return false; } if (isAccessAce) { if (ace.AceQualifier != AceQualifier.AccessAllowed && ace.AceQualifier != AceQualifier.AccessDenied) { return false; } } else { if (ace.AceQualifier != AceQualifier.SystemAudit) { return false; } } if ((includeExplicit && ((ace.AceFlags & AceFlags.Inherited) == 0)) || (includeInherited && ((ace.AceFlags & AceFlags.Inherited) != 0))) { return true; } return false; } // // Modifies the DACL // protected override bool ModifyAccess(AccessControlModification modification, AccessRule rule, out bool modified) { if (rule == null) { throw new ArgumentNullException(nameof(rule)); } Contract.EndContractBlock(); WriteLock(); try { bool result = true; if (_securityDescriptor.DiscretionaryAcl == null) { if (modification == AccessControlModification.Remove || modification == AccessControlModification.RemoveAll || modification == AccessControlModification.RemoveSpecific) { modified = false; return result; } _securityDescriptor.DiscretionaryAcl = new DiscretionaryAcl(IsContainer, IsDS, GenericAcl.AclRevision, 1); _securityDescriptor.AddControlFlags(ControlFlags.DiscretionaryAclPresent); } SecurityIdentifier sid = rule.IdentityReference.Translate(typeof(SecurityIdentifier)) as SecurityIdentifier; if (rule.AccessControlType == AccessControlType.Allow) { switch (modification) { case AccessControlModification.Add: _securityDescriptor.DiscretionaryAcl.AddAccess(AccessControlType.Allow, sid, rule.AccessMask, rule.InheritanceFlags, rule.PropagationFlags); break; case AccessControlModification.Set: _securityDescriptor.DiscretionaryAcl.SetAccess(AccessControlType.Allow, sid, rule.AccessMask, rule.InheritanceFlags, rule.PropagationFlags); break; case AccessControlModification.Reset: _securityDescriptor.DiscretionaryAcl.RemoveAccess(AccessControlType.Deny, sid, -1, InheritanceFlags.ContainerInherit | InheritanceFlags.ObjectInherit, 0); _securityDescriptor.DiscretionaryAcl.SetAccess(AccessControlType.Allow, sid, rule.AccessMask, rule.InheritanceFlags, rule.PropagationFlags); break; case AccessControlModification.Remove: result = _securityDescriptor.DiscretionaryAcl.RemoveAccess(AccessControlType.Allow, sid, rule.AccessMask, rule.InheritanceFlags, rule.PropagationFlags); break; case AccessControlModification.RemoveAll: result = _securityDescriptor.DiscretionaryAcl.RemoveAccess(AccessControlType.Allow, sid, -1, InheritanceFlags.ContainerInherit | InheritanceFlags.ObjectInherit, 0); if (result == false) { Debug.Assert(false, "Invalid operation"); throw new InvalidOperationException(); } break; case AccessControlModification.RemoveSpecific: _securityDescriptor.DiscretionaryAcl.RemoveAccessSpecific(AccessControlType.Allow, sid, rule.AccessMask, rule.InheritanceFlags, rule.PropagationFlags); break; default: throw new ArgumentOutOfRangeException( nameof(modification), SR.ArgumentOutOfRange_Enum); } } else if (rule.AccessControlType == AccessControlType.Deny) { switch (modification) { case AccessControlModification.Add: _securityDescriptor.DiscretionaryAcl.AddAccess(AccessControlType.Deny, sid, rule.AccessMask, rule.InheritanceFlags, rule.PropagationFlags); break; case AccessControlModification.Set: _securityDescriptor.DiscretionaryAcl.SetAccess(AccessControlType.Deny, sid, rule.AccessMask, rule.InheritanceFlags, rule.PropagationFlags); break; case AccessControlModification.Reset: _securityDescriptor.DiscretionaryAcl.RemoveAccess(AccessControlType.Allow, sid, -1, InheritanceFlags.ContainerInherit | InheritanceFlags.ObjectInherit, 0); _securityDescriptor.DiscretionaryAcl.SetAccess(AccessControlType.Deny, sid, rule.AccessMask, rule.InheritanceFlags, rule.PropagationFlags); break; case AccessControlModification.Remove: result = _securityDescriptor.DiscretionaryAcl.RemoveAccess(AccessControlType.Deny, sid, rule.AccessMask, rule.InheritanceFlags, rule.PropagationFlags); break; case AccessControlModification.RemoveAll: result = _securityDescriptor.DiscretionaryAcl.RemoveAccess(AccessControlType.Deny, sid, -1, InheritanceFlags.ContainerInherit | InheritanceFlags.ObjectInherit, 0); if (result == false) { Debug.Assert(false, "Invalid operation"); throw new InvalidOperationException(); } break; case AccessControlModification.RemoveSpecific: _securityDescriptor.DiscretionaryAcl.RemoveAccessSpecific(AccessControlType.Deny, sid, rule.AccessMask, rule.InheritanceFlags, rule.PropagationFlags); break; default: throw new ArgumentOutOfRangeException( nameof(modification), SR.ArgumentOutOfRange_Enum); } } else { Debug.Assert(false, "rule.AccessControlType unrecognized"); throw new ArgumentException(SR.Format(SR.Arg_EnumIllegalVal, (int)rule.AccessControlType), "rule.AccessControlType"); } modified = result; AccessRulesModified |= modified; return result; } finally { WriteUnlock(); } } // // Modifies the SACL // protected override bool ModifyAudit(AccessControlModification modification, AuditRule rule, out bool modified) { if (rule == null) { throw new ArgumentNullException(nameof(rule)); } Contract.EndContractBlock(); WriteLock(); try { bool result = true; if (_securityDescriptor.SystemAcl == null) { if (modification == AccessControlModification.Remove || modification == AccessControlModification.RemoveAll || modification == AccessControlModification.RemoveSpecific) { modified = false; return result; } _securityDescriptor.SystemAcl = new SystemAcl(IsContainer, IsDS, GenericAcl.AclRevision, 1); _securityDescriptor.AddControlFlags(ControlFlags.SystemAclPresent); } SecurityIdentifier sid = rule.IdentityReference.Translate(typeof(SecurityIdentifier)) as SecurityIdentifier; switch (modification) { case AccessControlModification.Add: _securityDescriptor.SystemAcl.AddAudit(rule.AuditFlags, sid, rule.AccessMask, rule.InheritanceFlags, rule.PropagationFlags); break; case AccessControlModification.Set: _securityDescriptor.SystemAcl.SetAudit(rule.AuditFlags, sid, rule.AccessMask, rule.InheritanceFlags, rule.PropagationFlags); break; case AccessControlModification.Reset: _securityDescriptor.SystemAcl.SetAudit(rule.AuditFlags, sid, rule.AccessMask, rule.InheritanceFlags, rule.PropagationFlags); break; case AccessControlModification.Remove: result = _securityDescriptor.SystemAcl.RemoveAudit(rule.AuditFlags, sid, rule.AccessMask, rule.InheritanceFlags, rule.PropagationFlags); break; case AccessControlModification.RemoveAll: result = _securityDescriptor.SystemAcl.RemoveAudit(AuditFlags.Failure | AuditFlags.Success, sid, -1, InheritanceFlags.ContainerInherit | InheritanceFlags.ObjectInherit, 0); if (result == false) { throw new InvalidOperationException(); } break; case AccessControlModification.RemoveSpecific: _securityDescriptor.SystemAcl.RemoveAuditSpecific(rule.AuditFlags, sid, rule.AccessMask, rule.InheritanceFlags, rule.PropagationFlags); break; default: throw new ArgumentOutOfRangeException( nameof(modification), SR.ArgumentOutOfRange_Enum); } modified = result; AuditRulesModified |= modified; return result; } finally { WriteUnlock(); } } #endregion #region Protected Methods #endregion #region Public Methods protected void AddAccessRule(AccessRule rule) { if (rule == null) { throw new ArgumentNullException(nameof(rule)); } Contract.EndContractBlock(); WriteLock(); try { bool modified; ModifyAccess(AccessControlModification.Add, rule, out modified); } finally { WriteUnlock(); } } protected void SetAccessRule(AccessRule rule) { if (rule == null) { throw new ArgumentNullException(nameof(rule)); } Contract.EndContractBlock(); WriteLock(); try { bool modified; ModifyAccess(AccessControlModification.Set, rule, out modified); } finally { WriteUnlock(); } } protected void ResetAccessRule(AccessRule rule) { if (rule == null) { throw new ArgumentNullException(nameof(rule)); } Contract.EndContractBlock(); WriteLock(); try { bool modified; ModifyAccess(AccessControlModification.Reset, rule, out modified); } finally { WriteUnlock(); } return; } protected bool RemoveAccessRule(AccessRule rule) { if (rule == null) { throw new ArgumentNullException(nameof(rule)); } Contract.EndContractBlock(); WriteLock(); try { if (_securityDescriptor == null) { return true; } bool modified; return ModifyAccess(AccessControlModification.Remove, rule, out modified); } finally { WriteUnlock(); } } protected void RemoveAccessRuleAll(AccessRule rule) { if (rule == null) { throw new ArgumentNullException(nameof(rule)); } Contract.EndContractBlock(); WriteLock(); try { if (_securityDescriptor == null) { return; } bool modified; ModifyAccess(AccessControlModification.RemoveAll, rule, out modified); } finally { WriteUnlock(); } return; } protected void RemoveAccessRuleSpecific(AccessRule rule) { if (rule == null) { throw new ArgumentNullException(nameof(rule)); } Contract.EndContractBlock(); WriteLock(); try { if (_securityDescriptor == null) { return; } bool modified; ModifyAccess(AccessControlModification.RemoveSpecific, rule, out modified); } finally { WriteUnlock(); } } protected void AddAuditRule(AuditRule rule) { if (rule == null) { throw new ArgumentNullException(nameof(rule)); } Contract.EndContractBlock(); WriteLock(); try { bool modified; ModifyAudit(AccessControlModification.Add, rule, out modified); } finally { WriteUnlock(); } } protected void SetAuditRule(AuditRule rule) { if (rule == null) { throw new ArgumentNullException(nameof(rule)); } Contract.EndContractBlock(); WriteLock(); try { bool modified; ModifyAudit(AccessControlModification.Set, rule, out modified); } finally { WriteUnlock(); } } protected bool RemoveAuditRule(AuditRule rule) { if (rule == null) { throw new ArgumentNullException(nameof(rule)); } Contract.EndContractBlock(); WriteLock(); try { bool modified; return ModifyAudit(AccessControlModification.Remove, rule, out modified); } finally { WriteUnlock(); } } protected void RemoveAuditRuleAll(AuditRule rule) { if (rule == null) { throw new ArgumentNullException(nameof(rule)); } Contract.EndContractBlock(); WriteLock(); try { bool modified; ModifyAudit(AccessControlModification.RemoveAll, rule, out modified); } finally { WriteUnlock(); } } protected void RemoveAuditRuleSpecific(AuditRule rule) { if (rule == null) { throw new ArgumentNullException(nameof(rule)); } Contract.EndContractBlock(); WriteLock(); try { bool modified; ModifyAudit(AccessControlModification.RemoveSpecific, rule, out modified); } finally { WriteUnlock(); } } public AuthorizationRuleCollection GetAccessRules(bool includeExplicit, bool includeInherited, System.Type targetType) { return GetRules(true, includeExplicit, includeInherited, targetType); } public AuthorizationRuleCollection GetAuditRules(bool includeExplicit, bool includeInherited, System.Type targetType) { return GetRules(false, includeExplicit, includeInherited, targetType); } #endregion } }
using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Drawing; using System.Linq; using System.Threading; using Rimss.GraphicsProcessing.Palette.Helpers; using Rimss.GraphicsProcessing.Palette.PathProviders; namespace Rimss.GraphicsProcessing.Palette.Quantizers { public abstract class BaseColorQuantizer : IColorQuantizer { #region | Constants | /// <summary> /// This index will represent invalid palette index. /// </summary> protected const Int32 InvalidIndex = -1; #endregion #region | Fields | private Boolean paletteFound; private Int64 uniqueColorIndex; private IPathProvider pathProvider; protected readonly ConcurrentDictionary<Int32, Int16> UniqueColors; #endregion #region | Constructors | /// <summary> /// Initializes a new instance of the <see cref="BaseColorQuantizer"/> class. /// </summary> protected BaseColorQuantizer() { pathProvider = null; uniqueColorIndex = -1; UniqueColors = new ConcurrentDictionary<Int32, Int16>(); } #endregion #region | Methods | /// <summary> /// Changes the path provider. /// </summary> /// <param name="pathProvider">The path provider.</param> public void ChangePathProvider(IPathProvider pathProvider) { this.pathProvider = pathProvider; } #endregion #region | Helper methods | private IPathProvider GetPathProvider() { // if there is no path provider, it attempts to create a default one; integrated in the quantizer IPathProvider result = pathProvider ?? (pathProvider = OnCreateDefaultPathProvider()); // if the provider exists; or default one was created for these purposes.. use it if (result == null) { String message = string.Format("The path provider is not initialized! Please use SetPathProvider() method on quantizer."); throw new ArgumentNullException(message); } // provider was obtained somehow, use it return result; } #endregion #region | Abstract/virtual methods | /// <summary> /// Called when quantizer is about to be prepared for next round. /// </summary> protected virtual void OnPrepare(ImageBuffer image) { uniqueColorIndex = -1; paletteFound = false; UniqueColors.Clear(); } /// <summary> /// Called when color is to be added. /// </summary> protected virtual void OnAddColor(Color color, Int32 key, Int32 x, Int32 y) { UniqueColors.AddOrUpdate(key, colorKey => (Byte) Interlocked.Increment(ref uniqueColorIndex), (colorKey, colorIndex) => colorIndex); } /// <summary> /// Called when a need to create default path provider arisen. /// </summary> protected virtual IPathProvider OnCreateDefaultPathProvider() { pathProvider = new StandardPathProvider(); return new StandardPathProvider(); } /// <summary> /// Called when quantized palette is needed. /// </summary> protected virtual List<Color> OnGetPalette(Int32 colorCount) { // early optimalization, in case the color count is lower than total unique color count if (UniqueColors.Count > 0 && colorCount >= UniqueColors.Count) { // palette was found paletteFound = true; // generates the palette from unique numbers return UniqueColors. OrderBy(pair => pair.Value). Select(pair => Color.FromArgb(pair.Key)). Select(color => Color.FromArgb(255, color.R, color.G, color.B)). ToList(); } // otherwise make it descendant responsibility return null; } /// <summary> /// Called when get palette index for a given color should be returned. /// </summary> protected virtual void OnGetPaletteIndex(Color color, Int32 key, Int32 x, Int32 y, out Int32 paletteIndex) { // by default unknown index is returned paletteIndex = InvalidIndex; Int16 foundIndex; // if we previously found palette quickly (without quantization), use it if (paletteFound && UniqueColors.TryGetValue(key, out foundIndex)) { paletteIndex = foundIndex; } } /// <summary> /// Called when get color count. /// </summary> protected virtual Int32 OnGetColorCount() { return UniqueColors.Count; } /// <summary> /// Called when about to clear left-overs after quantization. /// </summary> protected virtual void OnFinish() { // do nothing here } #endregion #region << IPathProvider >> /// <summary> /// See <see cref="IPathProvider.GetPointPath"/> for more details. /// </summary> public IList<Point> GetPointPath(Int32 width, Int32 heigth) { return GetPathProvider().GetPointPath(width, heigth); } #endregion #region << IColorQuantizer >> /// <summary> /// See <see cref="IColorQuantizer.AllowParallel"/> for more details. /// </summary> public abstract Boolean AllowParallel { get; } /// <summary> /// See <see cref="IColorQuantizer.Prepare"/> for more details. /// </summary> public void Prepare(ImageBuffer image) { OnPrepare(image); } /// <summary> /// See <see cref="IColorQuantizer.AddColor"/> for more details. /// </summary> public void AddColor(Color color, Int32 x, Int32 y) { Int32 key; color = QuantizationHelper.ConvertAlpha(color, out key); OnAddColor(color, key, x, y); } /// <summary> /// See <see cref="IColorQuantizer.GetColorCount"/> for more details. /// </summary> public Int32 GetColorCount() { return OnGetColorCount(); } /// <summary> /// See <see cref="IColorQuantizer.GetPalette"/> for more details. /// </summary> public List<Color> GetPalette(Int32 colorCount) { return OnGetPalette(colorCount); } /// <summary> /// See <see cref="IColorQuantizer.GetPaletteIndex"/> for more details. /// </summary> public Int32 GetPaletteIndex(Color color, Int32 x, Int32 y) { Int32 result, key; color = QuantizationHelper.ConvertAlpha(color, out key); OnGetPaletteIndex(color, key, x, y, out result); return result; } /// <summary> /// See <see cref="IColorQuantizer.Finish"/> for more details. /// </summary> public void Finish() { OnFinish(); } #endregion } }
/* Generated SBE (Simple Binary Encoding) message codec */ using System; using System.Text; using System.Collections.Generic; using Adaptive.Agrona; namespace Adaptive.Cluster.Codecs { public class ServiceAckEncoder { public const ushort BLOCK_LENGTH = 36; public const ushort TEMPLATE_ID = 33; public const ushort SCHEMA_ID = 111; public const ushort SCHEMA_VERSION = 7; private ServiceAckEncoder _parentMessage; private IMutableDirectBuffer _buffer; protected int _offset; protected int _limit; public ServiceAckEncoder() { _parentMessage = this; } public ushort SbeBlockLength() { return BLOCK_LENGTH; } public ushort SbeTemplateId() { return TEMPLATE_ID; } public ushort SbeSchemaId() { return SCHEMA_ID; } public ushort SbeSchemaVersion() { return SCHEMA_VERSION; } public string SbeSemanticType() { return ""; } public IMutableDirectBuffer Buffer() { return _buffer; } public int Offset() { return _offset; } public ServiceAckEncoder Wrap(IMutableDirectBuffer buffer, int offset) { this._buffer = buffer; this._offset = offset; Limit(offset + BLOCK_LENGTH); return this; } public ServiceAckEncoder WrapAndApplyHeader( IMutableDirectBuffer buffer, int offset, MessageHeaderEncoder headerEncoder) { headerEncoder .Wrap(buffer, offset) .BlockLength(BLOCK_LENGTH) .TemplateId(TEMPLATE_ID) .SchemaId(SCHEMA_ID) .Version(SCHEMA_VERSION); return Wrap(buffer, offset + MessageHeaderEncoder.ENCODED_LENGTH); } public int EncodedLength() { return _limit - _offset; } public int Limit() { return _limit; } public void Limit(int limit) { this._limit = limit; } public static int LogPositionEncodingOffset() { return 0; } public static int LogPositionEncodingLength() { return 8; } public static long LogPositionNullValue() { return -9223372036854775808L; } public static long LogPositionMinValue() { return -9223372036854775807L; } public static long LogPositionMaxValue() { return 9223372036854775807L; } public ServiceAckEncoder LogPosition(long value) { _buffer.PutLong(_offset + 0, value, ByteOrder.LittleEndian); return this; } public static int TimestampEncodingOffset() { return 8; } public static int TimestampEncodingLength() { return 8; } public static long TimestampNullValue() { return -9223372036854775808L; } public static long TimestampMinValue() { return -9223372036854775807L; } public static long TimestampMaxValue() { return 9223372036854775807L; } public ServiceAckEncoder Timestamp(long value) { _buffer.PutLong(_offset + 8, value, ByteOrder.LittleEndian); return this; } public static int AckIdEncodingOffset() { return 16; } public static int AckIdEncodingLength() { return 8; } public static long AckIdNullValue() { return -9223372036854775808L; } public static long AckIdMinValue() { return -9223372036854775807L; } public static long AckIdMaxValue() { return 9223372036854775807L; } public ServiceAckEncoder AckId(long value) { _buffer.PutLong(_offset + 16, value, ByteOrder.LittleEndian); return this; } public static int RelevantIdEncodingOffset() { return 24; } public static int RelevantIdEncodingLength() { return 8; } public static long RelevantIdNullValue() { return -9223372036854775808L; } public static long RelevantIdMinValue() { return -9223372036854775807L; } public static long RelevantIdMaxValue() { return 9223372036854775807L; } public ServiceAckEncoder RelevantId(long value) { _buffer.PutLong(_offset + 24, value, ByteOrder.LittleEndian); return this; } public static int ServiceIdEncodingOffset() { return 32; } public static int ServiceIdEncodingLength() { return 4; } public static int ServiceIdNullValue() { return -2147483648; } public static int ServiceIdMinValue() { return -2147483647; } public static int ServiceIdMaxValue() { return 2147483647; } public ServiceAckEncoder ServiceId(int value) { _buffer.PutInt(_offset + 32, value, ByteOrder.LittleEndian); return this; } public override string ToString() { return AppendTo(new StringBuilder(100)).ToString(); } public StringBuilder AppendTo(StringBuilder builder) { ServiceAckDecoder writer = new ServiceAckDecoder(); writer.Wrap(_buffer, _offset, BLOCK_LENGTH, SCHEMA_VERSION); return writer.AppendTo(builder); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Runtime.CompilerServices; using System.Runtime.Intrinsics; namespace System.Runtime.Intrinsics.X86 { /// <summary> /// This class provides access to Intel SSE hardware instructions via intrinsics /// </summary> [Intrinsic] [CLSCompliant(false)] public abstract class Sse { internal Sse() { } public static bool IsSupported { get => IsSupported; } [Intrinsic] public abstract class X64 { internal X64() { } public static bool IsSupported { get => IsSupported; } /// <summary> /// __int64 _mm_cvtss_si64 (__m128 a) /// CVTSS2SI r64, xmm/m32 /// This intrinisc is only available on 64-bit processes /// </summary> public static long ConvertToInt64(Vector128<float> value) => ConvertToInt64(value); /// <summary> /// __m128 _mm_cvtsi64_ss (__m128 a, __int64 b) /// CVTSI2SS xmm, reg/m64 /// This intrinisc is only available on 64-bit processes /// </summary> public static Vector128<float> ConvertScalarToVector128Single(Vector128<float> upper, long value) => ConvertScalarToVector128Single(upper, value); /// <summary> /// __int64 _mm_cvttss_si64 (__m128 a) /// CVTTSS2SI r64, xmm/m32 /// This intrinisc is only available on 64-bit processes /// </summary> public static long ConvertToInt64WithTruncation(Vector128<float> value) => ConvertToInt64WithTruncation(value); } /// <summary> /// __m128 _mm_add_ps (__m128 a, __m128 b) /// ADDPS xmm, xmm/m128 /// </summary> public static Vector128<float> Add(Vector128<float> left, Vector128<float> right) => Add(left, right); /// <summary> /// __m128 _mm_add_ss (__m128 a, __m128 b) /// ADDSS xmm, xmm/m32 /// </summary> public static Vector128<float> AddScalar(Vector128<float> left, Vector128<float> right) => AddScalar(left, right); /// <summary> /// __m128 _mm_and_ps (__m128 a, __m128 b) /// ANDPS xmm, xmm/m128 /// </summary> public static Vector128<float> And(Vector128<float> left, Vector128<float> right) => And(left, right); /// <summary> /// __m128 _mm_andnot_ps (__m128 a, __m128 b) /// ANDNPS xmm, xmm/m128 /// </summary> public static Vector128<float> AndNot(Vector128<float> left, Vector128<float> right) => AndNot(left, right); /// <summary> /// __m128 _mm_cmpeq_ps (__m128 a, __m128 b) /// CMPPS xmm, xmm/m128, imm8(0) /// </summary> public static Vector128<float> CompareEqual(Vector128<float> left, Vector128<float> right) => CompareEqual(left, right); /// <summary> /// int _mm_comieq_ss (__m128 a, __m128 b) /// COMISS xmm, xmm/m32 /// </summary> public static bool CompareEqualOrderedScalar(Vector128<float> left, Vector128<float> right) => CompareEqualOrderedScalar(left, right); /// <summary> /// int _mm_ucomieq_ss (__m128 a, __m128 b) /// UCOMISS xmm, xmm/m32 /// </summary> public static bool CompareEqualUnorderedScalar(Vector128<float> left, Vector128<float> right) => CompareEqualUnorderedScalar(left, right); /// <summary> /// __m128 _mm_cmpeq_ss (__m128 a, __m128 b) /// CMPSS xmm, xmm/m32, imm8(0) /// </summary> public static Vector128<float> CompareEqualScalar(Vector128<float> left, Vector128<float> right) => CompareEqualScalar(left, right); /// <summary> /// __m128 _mm_cmpgt_ps (__m128 a, __m128 b) /// CMPPS xmm, xmm/m128, imm8(6) /// </summary> public static Vector128<float> CompareGreaterThan(Vector128<float> left, Vector128<float> right) => CompareGreaterThan(left, right); /// <summary> /// int _mm_comigt_ss (__m128 a, __m128 b) /// COMISS xmm, xmm/m32 /// </summary> public static bool CompareGreaterThanOrderedScalar(Vector128<float> left, Vector128<float> right) => CompareGreaterThanOrderedScalar(left, right); /// <summary> /// int _mm_ucomigt_ss (__m128 a, __m128 b) /// UCOMISS xmm, xmm/m32 /// </summary> public static bool CompareGreaterThanUnorderedScalar(Vector128<float> left, Vector128<float> right) => CompareGreaterThanUnorderedScalar(left, right); /// <summary> /// __m128 _mm_cmpgt_ss (__m128 a, __m128 b) /// CMPSS xmm, xmm/m32, imm8(6) /// </summary> public static Vector128<float> CompareGreaterThanScalar(Vector128<float> left, Vector128<float> right) => CompareGreaterThanScalar(left, right); /// <summary> /// __m128 _mm_cmpge_ps (__m128 a, __m128 b) /// CMPPS xmm, xmm/m128, imm8(5) /// </summary> public static Vector128<float> CompareGreaterThanOrEqual(Vector128<float> left, Vector128<float> right) => CompareGreaterThanOrEqual(left, right); /// <summary> /// int _mm_comige_ss (__m128 a, __m128 b) /// COMISS xmm, xmm/m32 /// </summary> public static bool CompareGreaterThanOrEqualOrderedScalar(Vector128<float> left, Vector128<float> right) => CompareGreaterThanOrEqualOrderedScalar(left, right); /// <summary> /// int _mm_ucomige_ss (__m128 a, __m128 b) /// UCOMISS xmm, xmm/m32 /// </summary> public static bool CompareGreaterThanOrEqualUnorderedScalar(Vector128<float> left, Vector128<float> right) => CompareGreaterThanOrEqualUnorderedScalar(left, right); /// <summary> /// __m128 _mm_cmpge_ss (__m128 a, __m128 b) /// CMPPS xmm, xmm/m32, imm8(5) /// </summary> public static Vector128<float> CompareGreaterThanOrEqualScalar(Vector128<float> left, Vector128<float> right) => CompareGreaterThanOrEqualScalar(left, right); /// <summary> /// __m128 _mm_cmplt_ps (__m128 a, __m128 b) /// CMPPS xmm, xmm/m128, imm8(1) /// </summary> public static Vector128<float> CompareLessThan(Vector128<float> left, Vector128<float> right) => CompareLessThan(left, right); /// <summary> /// int _mm_comilt_ss (__m128 a, __m128 b) /// COMISS xmm, xmm/m32 /// </summary> public static bool CompareLessThanOrderedScalar(Vector128<float> left, Vector128<float> right) => CompareLessThanOrderedScalar(left, right); /// <summary> /// int _mm_ucomilt_ss (__m128 a, __m128 b) /// UCOMISS xmm, xmm/m32 /// </summary> public static bool CompareLessThanUnorderedScalar(Vector128<float> left, Vector128<float> right) => CompareLessThanUnorderedScalar(left, right); /// <summary> /// __m128 _mm_cmplt_ss (__m128 a, __m128 b) /// CMPSS xmm, xmm/m32, imm8(1) /// </summary> public static Vector128<float> CompareLessThanScalar(Vector128<float> left, Vector128<float> right) => CompareLessThanScalar(left, right); /// <summary> /// __m128 _mm_cmple_ps (__m128 a, __m128 b) /// CMPPS xmm, xmm/m128, imm8(2) /// </summary> public static Vector128<float> CompareLessThanOrEqual(Vector128<float> left, Vector128<float> right) => CompareLessThanOrEqual(left, right); /// <summary> /// int _mm_comile_ss (__m128 a, __m128 b) /// COMISS xmm, xmm/m32 /// </summary> public static bool CompareLessThanOrEqualOrderedScalar(Vector128<float> left, Vector128<float> right) => CompareLessThanOrEqualOrderedScalar(left, right); /// <summary> /// int _mm_ucomile_ss (__m128 a, __m128 b) /// UCOMISS xmm, xmm/m32 /// </summary> public static bool CompareLessThanOrEqualUnorderedScalar(Vector128<float> left, Vector128<float> right) => CompareLessThanOrEqualUnorderedScalar(left, right); /// <summary> /// __m128 _mm_cmple_ss (__m128 a, __m128 b) /// CMPSS xmm, xmm/m32, imm8(2) /// </summary> public static Vector128<float> CompareLessThanOrEqualScalar(Vector128<float> left, Vector128<float> right) => CompareLessThanOrEqualScalar(left, right); /// <summary> /// __m128 _mm_cmpneq_ps (__m128 a, __m128 b) /// CMPPS xmm, xmm/m128, imm8(4) /// </summary> public static Vector128<float> CompareNotEqual(Vector128<float> left, Vector128<float> right) => CompareNotEqual(left, right); /// <summary> /// int _mm_comineq_ss (__m128 a, __m128 b) /// COMISS xmm, xmm/m32 /// </summary> public static bool CompareNotEqualOrderedScalar(Vector128<float> left, Vector128<float> right) => CompareNotEqualOrderedScalar(left, right); /// <summary> /// int _mm_ucomineq_ss (__m128 a, __m128 b) /// UCOMISS xmm, xmm/m32 /// </summary> public static bool CompareNotEqualUnorderedScalar(Vector128<float> left, Vector128<float> right) => CompareNotEqualUnorderedScalar(left, right); /// <summary> /// __m128 _mm_cmpneq_ss (__m128 a, __m128 b) /// CMPSS xmm, xmm/m32, imm8(4) /// </summary> public static Vector128<float> CompareNotEqualScalar(Vector128<float> left, Vector128<float> right) => CompareNotEqualScalar(left, right); /// <summary> /// __m128 _mm_cmpngt_ps (__m128 a, __m128 b) /// CMPPS xmm, xmm/m128, imm8(2) /// </summary> public static Vector128<float> CompareNotGreaterThan(Vector128<float> left, Vector128<float> right) => CompareNotGreaterThan(left, right); /// <summary> /// __m128 _mm_cmpngt_ss (__m128 a, __m128 b) /// CMPSS xmm, xmm/m32, imm8(2) /// </summary> public static Vector128<float> CompareNotGreaterThanScalar(Vector128<float> left, Vector128<float> right) => CompareNotGreaterThanScalar(left, right); /// <summary> /// __m128 _mm_cmpnge_ps (__m128 a, __m128 b) /// CMPPS xmm, xmm/m128, imm8(1) /// </summary> public static Vector128<float> CompareNotGreaterThanOrEqual(Vector128<float> left, Vector128<float> right) => CompareNotGreaterThanOrEqual(left, right); /// <summary> /// __m128 _mm_cmpnge_ss (__m128 a, __m128 b) /// CMPSS xmm, xmm/m32, imm8(1) /// </summary> public static Vector128<float> CompareNotGreaterThanOrEqualScalar(Vector128<float> left, Vector128<float> right) => CompareNotGreaterThanOrEqualScalar(left, right); /// <summary> /// __m128 _mm_cmpnlt_ps (__m128 a, __m128 b) /// CMPPS xmm, xmm/m128, imm8(5) /// </summary> public static Vector128<float> CompareNotLessThan(Vector128<float> left, Vector128<float> right) => CompareNotLessThan(left, right); /// <summary> /// __m128 _mm_cmpnlt_ss (__m128 a, __m128 b) /// CMPSS xmm, xmm/m32, imm8(5) /// </summary> public static Vector128<float> CompareNotLessThanScalar(Vector128<float> left, Vector128<float> right) => CompareNotLessThanScalar(left, right); /// <summary> /// __m128 _mm_cmpnle_ps (__m128 a, __m128 b) /// CMPPS xmm, xmm/m128, imm8(6) /// </summary> public static Vector128<float> CompareNotLessThanOrEqual(Vector128<float> left, Vector128<float> right) => CompareNotLessThanOrEqual(left, right); /// <summary> /// __m128 _mm_cmpnle_ss (__m128 a, __m128 b) /// CMPSS xmm, xmm/m32, imm8(6) /// </summary> public static Vector128<float> CompareNotLessThanOrEqualScalar(Vector128<float> left, Vector128<float> right) => CompareNotLessThanOrEqualScalar(left, right); /// <summary> /// __m128 _mm_cmpord_ps (__m128 a, __m128 b) /// CMPPS xmm, xmm/m128, imm8(7) /// </summary> public static Vector128<float> CompareOrdered(Vector128<float> left, Vector128<float> right) => CompareOrdered(left, right); /// <summary> /// __m128 _mm_cmpord_ss (__m128 a, __m128 b) /// CMPSS xmm, xmm/m32, imm8(7) /// </summary> public static Vector128<float> CompareOrderedScalar(Vector128<float> left, Vector128<float> right) => CompareOrderedScalar(left, right); /// <summary> /// __m128 _mm_cmpunord_ps (__m128 a, __m128 b) /// CMPPS xmm, xmm/m128, imm8(3) /// </summary> public static Vector128<float> CompareUnordered(Vector128<float> left, Vector128<float> right) => CompareUnordered(left, right); /// <summary> /// __m128 _mm_cmpunord_ss (__m128 a, __m128 b) /// CMPSS xmm, xmm/m32, imm8(3) /// </summary> public static Vector128<float> CompareUnorderedScalar(Vector128<float> left, Vector128<float> right) => CompareUnorderedScalar(left, right); /// <summary> /// int _mm_cvtss_si32 (__m128 a) /// CVTSS2SI r32, xmm/m32 /// </summary> public static int ConvertToInt32(Vector128<float> value) => ConvertToInt32(value); /// <summary> /// __m128 _mm_cvtsi32_ss (__m128 a, int b) /// CVTSI2SS xmm, reg/m32 /// </summary> public static Vector128<float> ConvertScalarToVector128Single(Vector128<float> upper, int value) => ConvertScalarToVector128Single(upper, value); /// <summary> /// int _mm_cvttss_si32 (__m128 a) /// CVTTSS2SI r32, xmm/m32 /// </summary> public static int ConvertToInt32WithTruncation(Vector128<float> value) => ConvertToInt32WithTruncation(value); /// <summary> /// __m128 _mm_div_ps (__m128 a, __m128 b) /// DIVPS xmm, xmm/m128 /// </summary> public static Vector128<float> Divide(Vector128<float> left, Vector128<float> right) => Divide(left, right); /// <summary> /// __m128 _mm_div_ss (__m128 a, __m128 b) /// DIVSS xmm, xmm/m32 /// </summary> public static Vector128<float> DivideScalar(Vector128<float> left, Vector128<float> right) => DivideScalar(left, right); /// <summary> /// __m128 _mm_loadu_ps (float const* mem_address) /// MOVUPS xmm, m128 /// </summary> public static unsafe Vector128<float> LoadVector128(float* address) => LoadVector128(address); /// <summary> /// __m128 _mm_load_ss (float const* mem_address) /// MOVSS xmm, m32 /// </summary> public static unsafe Vector128<float> LoadScalarVector128(float* address) => LoadScalarVector128(address); /// <summary> /// __m128 _mm_load_ps (float const* mem_address) /// MOVAPS xmm, m128 /// </summary> public static unsafe Vector128<float> LoadAlignedVector128(float* address) => LoadAlignedVector128(address); /// <summary> /// __m128 _mm_loadh_pi (__m128 a, __m64 const* mem_addr) /// MOVHPS xmm, m64 /// </summary> public static unsafe Vector128<float> LoadHigh(Vector128<float> lower, float* address) => LoadHigh(lower, address); /// <summary> /// __m128 _mm_loadl_pi (__m128 a, __m64 const* mem_addr) /// MOVLPS xmm, m64 /// </summary> public static unsafe Vector128<float> LoadLow(Vector128<float> upper, float* address) => LoadLow(upper, address); /// <summary> /// __m128 _mm_max_ps (__m128 a, __m128 b) /// MAXPS xmm, xmm/m128 /// </summary> public static Vector128<float> Max(Vector128<float> left, Vector128<float> right) => Max(left, right); /// <summary> /// __m128 _mm_max_ss (__m128 a, __m128 b) /// MAXSS xmm, xmm/m32 /// </summary> public static Vector128<float> MaxScalar(Vector128<float> left, Vector128<float> right) => MaxScalar(left, right); /// <summary> /// __m128 _mm_min_ps (__m128 a, __m128 b) /// MINPS xmm, xmm/m128 /// </summary> public static Vector128<float> Min(Vector128<float> left, Vector128<float> right) => Min(left, right); /// <summary> /// __m128 _mm_min_ss (__m128 a, __m128 b) /// MINSS xmm, xmm/m32 /// </summary> public static Vector128<float> MinScalar(Vector128<float> left, Vector128<float> right) => MinScalar(left, right); /// <summary> /// __m128 _mm_move_ss (__m128 a, __m128 b) /// MOVSS xmm, xmm /// </summary> public static Vector128<float> MoveScalar(Vector128<float> upper, Vector128<float> value) => MoveScalar(upper, value); /// <summary> /// __m128 _mm_movehl_ps (__m128 a, __m128 b) /// MOVHLPS xmm, xmm /// </summary> public static Vector128<float> MoveHighToLow(Vector128<float> left, Vector128<float> right) => MoveHighToLow(left, right); /// <summary> /// __m128 _mm_movelh_ps (__m128 a, __m128 b) /// MOVLHPS xmm, xmm /// </summary> public static Vector128<float> MoveLowToHigh(Vector128<float> left, Vector128<float> right) => MoveLowToHigh(left, right); /// <summary> /// int _mm_movemask_ps (__m128 a) /// MOVMSKPS reg, xmm /// </summary> public static int MoveMask(Vector128<float> value) => MoveMask(value); /// <summary> /// __m128 _mm_mul_ps (__m128 a, __m128 b) /// MULPS xmm, xmm/m128 /// </summary> public static Vector128<float> Multiply(Vector128<float> left, Vector128<float> right) => Multiply(left, right); /// <summary> /// __m128 _mm_mul_ss (__m128 a, __m128 b) /// MULPS xmm, xmm/m32 /// </summary> public static Vector128<float> MultiplyScalar(Vector128<float> left, Vector128<float> right) => MultiplyScalar(left, right); /// <summary> /// __m128 _mm_or_ps (__m128 a, __m128 b) /// ORPS xmm, xmm/m128 /// </summary> public static Vector128<float> Or(Vector128<float> left, Vector128<float> right) => Or(left, right); /// <summary> /// void _mm_prefetch(char* p, int i) /// PREFETCHT0 m8 /// </summary> public static unsafe void Prefetch0(void* address) => Prefetch0(address); /// <summary> /// void _mm_prefetch(char* p, int i) /// PREFETCHT1 m8 /// </summary> public static unsafe void Prefetch1(void* address) => Prefetch1(address); /// <summary> /// void _mm_prefetch(char* p, int i) /// PREFETCHT2 m8 /// </summary> public static unsafe void Prefetch2(void* address) => Prefetch2(address); /// <summary> /// void _mm_prefetch(char* p, int i) /// PREFETCHNTA m8 /// </summary> public static unsafe void PrefetchNonTemporal(void* address) => PrefetchNonTemporal(address); /// <summary> /// __m128 _mm_rcp_ps (__m128 a) /// RCPPS xmm, xmm/m128 /// </summary> public static Vector128<float> Reciprocal(Vector128<float> value) => Reciprocal(value); /// <summary> /// __m128 _mm_rcp_ss (__m128 a) /// RCPSS xmm, xmm/m32 /// </summary> public static Vector128<float> ReciprocalScalar(Vector128<float> value) => ReciprocalScalar(value); /// <summary> /// __m128 _mm_rcp_ss (__m128 a, __m128 b) /// RCPSS xmm, xmm/m32 /// The above native signature does not exist. We provide this additional overload for consistency with the other scalar APIs. /// </summary> public static Vector128<float> ReciprocalScalar(Vector128<float> upper, Vector128<float> value) => ReciprocalScalar(upper, value); /// <summary> /// __m128 _mm_rsqrt_ps (__m128 a) /// RSQRTPS xmm, xmm/m128 /// </summary> public static Vector128<float> ReciprocalSqrt(Vector128<float> value) => ReciprocalSqrt(value); /// <summary> /// __m128 _mm_rsqrt_ss (__m128 a) /// RSQRTSS xmm, xmm/m32 /// </summary> public static Vector128<float> ReciprocalSqrtScalar(Vector128<float> value) => ReciprocalSqrtScalar(value); /// <summary> /// __m128 _mm_rsqrt_ss (__m128 a, __m128 b) /// RSQRTSS xmm, xmm/m32 /// The above native signature does not exist. We provide this additional overload for consistency with the other scalar APIs. /// </summary> public static Vector128<float> ReciprocalSqrtScalar(Vector128<float> upper, Vector128<float> value) => ReciprocalSqrtScalar(upper, value); /// <summary> /// __m128 _mm_shuffle_ps (__m128 a, __m128 b, unsigned int control) /// SHUFPS xmm, xmm/m128, imm8 /// </summary> public static Vector128<float> Shuffle(Vector128<float> left, Vector128<float> right, byte control) => Shuffle(left, right, control); /// <summary> /// __m128 _mm_sqrt_ps (__m128 a) /// SQRTPS xmm, xmm/m128 /// </summary> public static Vector128<float> Sqrt(Vector128<float> value) => Sqrt(value); /// <summary> /// __m128 _mm_sqrt_ss (__m128 a) /// SQRTSS xmm, xmm/m32 /// </summary> public static Vector128<float> SqrtScalar(Vector128<float> value) => SqrtScalar(value); /// <summary> /// __m128 _mm_sqrt_ss (__m128 a, __m128 b) /// SQRTSS xmm, xmm/m32 /// The above native signature does not exist. We provide this additional overload for consistency with the other scalar APIs. /// </summary> public static Vector128<float> SqrtScalar(Vector128<float> upper, Vector128<float> value) => SqrtScalar(upper, value); /// <summary> /// void _mm_store_ps (float* mem_addr, __m128 a) /// MOVAPS m128, xmm /// </summary> public static unsafe void StoreAligned(float* address, Vector128<float> source) => StoreAligned(address, source); /// <summary> /// void _mm_stream_ps (float* mem_addr, __m128 a) /// MOVNTPS m128, xmm /// </summary> public static unsafe void StoreAlignedNonTemporal(float* address, Vector128<float> source) => StoreAlignedNonTemporal(address, source); /// <summary> /// void _mm_storeu_ps (float* mem_addr, __m128 a) /// MOVUPS m128, xmm /// </summary> public static unsafe void Store(float* address, Vector128<float> source) => Store(address, source); /// <summary> /// void _mm_sfence(void) /// SFENCE /// </summary> public static void StoreFence() => StoreFence(); /// <summary> /// void _mm_store_ss (float* mem_addr, __m128 a) /// MOVSS m32, xmm /// </summary> public static unsafe void StoreScalar(float* address, Vector128<float> source) => StoreScalar(address, source); /// <summary> /// void _mm_storeh_pi (__m64* mem_addr, __m128 a) /// MOVHPS m64, xmm /// </summary> public static unsafe void StoreHigh(float* address, Vector128<float> source) => StoreHigh(address, source); /// <summary> /// void _mm_storel_pi (__m64* mem_addr, __m128 a) /// MOVLPS m64, xmm /// </summary> public static unsafe void StoreLow(float* address, Vector128<float> source) => StoreLow(address, source); /// <summary> /// __m128d _mm_sub_ps (__m128d a, __m128d b) /// SUBPS xmm, xmm/m128 /// </summary> public static Vector128<float> Subtract(Vector128<float> left, Vector128<float> right) => Subtract(left, right); /// <summary> /// __m128 _mm_sub_ss (__m128 a, __m128 b) /// SUBSS xmm, xmm/m32 /// </summary> public static Vector128<float> SubtractScalar(Vector128<float> left, Vector128<float> right) => SubtractScalar(left, right); /// <summary> /// __m128 _mm_unpackhi_ps (__m128 a, __m128 b) /// UNPCKHPS xmm, xmm/m128 /// </summary> public static Vector128<float> UnpackHigh(Vector128<float> left, Vector128<float> right) => UnpackHigh(left, right); /// <summary> /// __m128 _mm_unpacklo_ps (__m128 a, __m128 b) /// UNPCKLPS xmm, xmm/m128 /// </summary> public static Vector128<float> UnpackLow(Vector128<float> left, Vector128<float> right) => UnpackLow(left, right); /// <summary> /// __m128 _mm_xor_ps (__m128 a, __m128 b) /// XORPS xmm, xmm/m128 /// </summary> public static Vector128<float> Xor(Vector128<float> left, Vector128<float> right) => Xor(left, right); } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Globalization; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Runtime.Versioning; namespace System { [Serializable] [CLSCompliant(false)] [StructLayout(LayoutKind.Sequential)] [TypeForwardedFrom("mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089")] public readonly struct SByte : IComparable, IConvertible, IFormattable, IComparable<sbyte>, IEquatable<sbyte>, ISpanFormattable { private readonly sbyte m_value; // Do not rename (binary serialization) // The maximum value that a Byte may represent: 127. public const sbyte MaxValue = (sbyte)0x7F; // The minimum value that a Byte may represent: -128. public const sbyte MinValue = unchecked((sbyte)0x80); // Compares this object to another object, returning an integer that // indicates the relationship. // Returns a value less than zero if this object // null is considered to be less than any instance. // If object is not of type SByte, this method throws an ArgumentException. // public int CompareTo(object obj) { if (obj == null) { return 1; } if (!(obj is sbyte)) { throw new ArgumentException(SR.Arg_MustBeSByte); } return m_value - ((sbyte)obj).m_value; } public int CompareTo(sbyte value) { return m_value - value; } // Determines whether two Byte objects are equal. public override bool Equals(object obj) { if (!(obj is sbyte)) { return false; } return m_value == ((sbyte)obj).m_value; } [NonVersionable] public bool Equals(sbyte obj) { return m_value == obj; } // Gets a hash code for this instance. public override int GetHashCode() { return m_value; } // Provides a string representation of a byte. public override string ToString() { return Number.FormatInt32(m_value, null, null); } public string ToString(IFormatProvider provider) { return Number.FormatInt32(m_value, null, provider); } public string ToString(string format) { return ToString(format, null); } public string ToString(string format, IFormatProvider provider) { if (m_value < 0 && format != null && format.Length > 0 && (format[0] == 'X' || format[0] == 'x')) { uint temp = (uint)(m_value & 0x000000FF); return Number.FormatUInt32(temp, format, provider); } return Number.FormatInt32(m_value, format, provider); } public bool TryFormat(Span<char> destination, out int charsWritten, ReadOnlySpan<char> format = default, IFormatProvider provider = null) { if (m_value < 0 && format.Length > 0 && (format[0] == 'X' || format[0] == 'x')) { uint temp = (uint)(m_value & 0x000000FF); return Number.TryFormatUInt32(temp, format, provider, destination, out charsWritten); } return Number.TryFormatInt32(m_value, format, provider, destination, out charsWritten); } [CLSCompliant(false)] public static sbyte Parse(string s) { if (s == null) ThrowHelper.ThrowArgumentNullException(ExceptionArgument.s); return Parse((ReadOnlySpan<char>)s, NumberStyles.Integer, NumberFormatInfo.CurrentInfo); } [CLSCompliant(false)] public static sbyte Parse(string s, NumberStyles style) { NumberFormatInfo.ValidateParseStyleInteger(style); if (s == null) ThrowHelper.ThrowArgumentNullException(ExceptionArgument.s); return Parse((ReadOnlySpan<char>)s, style, NumberFormatInfo.CurrentInfo); } [CLSCompliant(false)] public static sbyte Parse(string s, IFormatProvider provider) { if (s == null) ThrowHelper.ThrowArgumentNullException(ExceptionArgument.s); return Parse((ReadOnlySpan<char>)s, NumberStyles.Integer, NumberFormatInfo.GetInstance(provider)); } // Parses a signed byte from a String in the given style. If // a NumberFormatInfo isn't specified, the current culture's // NumberFormatInfo is assumed. // [CLSCompliant(false)] public static sbyte Parse(string s, NumberStyles style, IFormatProvider provider) { NumberFormatInfo.ValidateParseStyleInteger(style); if (s == null) ThrowHelper.ThrowArgumentNullException(ExceptionArgument.s); return Parse((ReadOnlySpan<char>)s, style, NumberFormatInfo.GetInstance(provider)); } [CLSCompliant(false)] public static sbyte Parse(ReadOnlySpan<char> s, NumberStyles style = NumberStyles.Integer, IFormatProvider provider = null) { NumberFormatInfo.ValidateParseStyleInteger(style); return Parse(s, style, NumberFormatInfo.GetInstance(provider)); } private static sbyte Parse(string s, NumberStyles style, NumberFormatInfo info) { if (s == null) ThrowHelper.ThrowArgumentNullException(ExceptionArgument.s); return Parse((ReadOnlySpan<char>)s, style, info); } private static sbyte Parse(ReadOnlySpan<char> s, NumberStyles style, NumberFormatInfo info) { Number.ParsingStatus status = Number.TryParseInt32(s, style, info, out int i); if (status != Number.ParsingStatus.OK) { Number.ThrowOverflowOrFormatException(status, TypeCode.SByte); } // For hex number styles AllowHexSpecifier >> 2 == 0x80 and cancels out MinValue so the check is effectively: (uint)i > byte.MaxValue // For integer styles it's zero and the effective check is (uint)(i - MinValue) > byte.MaxValue if ((uint)(i - MinValue - ((int)(style & NumberStyles.AllowHexSpecifier) >> 2)) > byte.MaxValue) { Number.ThrowOverflowException(TypeCode.SByte); } return (sbyte)i; } [CLSCompliant(false)] public static bool TryParse(string s, out sbyte result) { if (s == null) { result = 0; return false; } return TryParse((ReadOnlySpan<char>)s, NumberStyles.Integer, NumberFormatInfo.CurrentInfo, out result); } [CLSCompliant(false)] public static bool TryParse(ReadOnlySpan<char> s, out sbyte result) { return TryParse(s, NumberStyles.Integer, NumberFormatInfo.CurrentInfo, out result); } [CLSCompliant(false)] public static bool TryParse(string s, NumberStyles style, IFormatProvider provider, out sbyte result) { NumberFormatInfo.ValidateParseStyleInteger(style); if (s == null) { result = 0; return false; } return TryParse((ReadOnlySpan<char>)s, style, NumberFormatInfo.GetInstance(provider), out result); } [CLSCompliant(false)] public static bool TryParse(ReadOnlySpan<char> s, NumberStyles style, IFormatProvider provider, out sbyte result) { NumberFormatInfo.ValidateParseStyleInteger(style); return TryParse(s, style, NumberFormatInfo.GetInstance(provider), out result); } private static bool TryParse(ReadOnlySpan<char> s, NumberStyles style, NumberFormatInfo info, out sbyte result) { // For hex number styles AllowHexSpecifier >> 2 == 0x80 and cancels out MinValue so the check is effectively: (uint)i > byte.MaxValue // For integer styles it's zero and the effective check is (uint)(i - MinValue) > byte.MaxValue if (Number.TryParseInt32(s, style, info, out int i) != Number.ParsingStatus.OK || (uint)(i - MinValue - ((int)(style & NumberStyles.AllowHexSpecifier) >> 2)) > byte.MaxValue) { result = 0; return false; } result = (sbyte)i; return true; } // // IConvertible implementation // public TypeCode GetTypeCode() { return TypeCode.SByte; } bool IConvertible.ToBoolean(IFormatProvider provider) { return Convert.ToBoolean(m_value); } char IConvertible.ToChar(IFormatProvider provider) { return Convert.ToChar(m_value); } sbyte IConvertible.ToSByte(IFormatProvider provider) { return m_value; } byte IConvertible.ToByte(IFormatProvider provider) { return Convert.ToByte(m_value); } short IConvertible.ToInt16(IFormatProvider provider) { return Convert.ToInt16(m_value); } ushort IConvertible.ToUInt16(IFormatProvider provider) { return Convert.ToUInt16(m_value); } int IConvertible.ToInt32(IFormatProvider provider) { return m_value; } uint IConvertible.ToUInt32(IFormatProvider provider) { return Convert.ToUInt32(m_value); } long IConvertible.ToInt64(IFormatProvider provider) { return Convert.ToInt64(m_value); } ulong IConvertible.ToUInt64(IFormatProvider provider) { return Convert.ToUInt64(m_value); } float IConvertible.ToSingle(IFormatProvider provider) { return Convert.ToSingle(m_value); } double IConvertible.ToDouble(IFormatProvider provider) { return Convert.ToDouble(m_value); } decimal IConvertible.ToDecimal(IFormatProvider provider) { return Convert.ToDecimal(m_value); } DateTime IConvertible.ToDateTime(IFormatProvider provider) { throw new InvalidCastException(SR.Format(SR.InvalidCast_FromTo, "SByte", "DateTime")); } object IConvertible.ToType(Type type, IFormatProvider provider) { return Convert.DefaultToType((IConvertible)this, type, provider); } } }
using System; using System.Collections.Generic; using System.IO; using System.Diagnostics; namespace Kekstoaster.Syntax { /// <summary> /// Ebnf Element for Syntax Parsing of text files /// /// <![CDATA[ /// There are 9 types of Ebnf-Elements that can be created /// * Char - - base-element, a single character, that must be matched /// * Range - - base-element, a range of characters, where one in that range must be matched /// * Any - - base-element, any single Character that can be found and EOF /// * EOF, - - base-element, end of file or end of stream /// * List - & - a list of Ebnf-Elements that must occour in the exact order specified /// * Optional - ! - an optional element can occour at the given position or is ignored /// * Choise - | - a list of Ebnf-elements where at least one element must match /// * Repeat - ~ - a single Ebnf-element that can occour 0 or arbitrary times /// * Not - - - an element that must not occour at that position /// * Permutation - ^ - a list of Ebnf-elements that occour exactly once but in random order /// (this is not a real EBNF-Element and can be created with the above elements, /// but creates much easier syntax-style with better performance) /// /// By default only Text-Elements can be created as new instance, all other elements /// can just be created by the use of operations /// /// /// /// ************************** /// ******** CHAR ******** /// ************************** /// /// A single char that must be matched /// The character can be any byte value, from 0 to 255 /// /// Creation: Ebnf x = new Ebnf('x'); /// Ebnf x = 'x'; /// /// /// ************************** /// ******* RANGE ******** /// ************************** /// /// A single char in a given character range that must be matched /// Range must be in between the byte value range 0 to 255 /// /// Creation: Ebnf x = Ebnf.Range('a', 'z'); /// /// /// ************************** /// ******** Any ********* /// ************************** /// /// A single char. It always matches, and it also matches EOF. /// /// Creation: Ebnf any = Ebnf.AnyChar; /// /// In the end of stream, checking for AnyChar always succeeds, so having a list of /// repeat(Any) as in /// /// Ebnf r = ~(Ebnf.AnyChar); /// /// will never terminate, ending up in an endless loop. So be sure always combining /// the Any repeat statement with a Not combination. /// /// Example: Match until end of line: /// /// Ebnf endOfLine = ~(Ebnf.EmptyList + -(Ebnf.EOF) + Ebnf.AnyChar); /// /// /// ************************** /// ******** EOF ********* /// ************************** /// /// Matches only end of file or end of stream, so all other characters in that stream must /// be matched before it it true. /// /// Creation: Ebnf eof = Ebnf.EOF; /// /// It is always a good idea to have a final Element matching the entire file like /// /// Ebnf file = whitespaces & content & whitespaces & oef; /// /// When a Ebnf element is matched, the parsing ends. So if any characters follow the /// defined element that are not included in the syntax, it will not be recogniced. /// If you want all following characters to be ignored, this is what you want. Otherwise /// ending with an Ebnf.EOF element ensures that the entire file matches the systax you /// defined. /// /// /// ************************** /// ******** List ******** /// ************************** /// /// Matches all specified elements in the given order. If parsing of one element fails, /// parsing of the entire list is considered a failure. /// /// Creation: Ebnf list = Ebnf.EmptyList + element1 + element2 + ...; /// Ebnf list = element1 & element2 & ...; /// /// Can be flaged as Unique. If a unique element is partly matched and an error occours /// such as the element is not completely matched, the entire parsing process is aborted. /// Example: match [x] or {x} or (x), where [x], {x} and (x) are 3 unique lists /// text found: [x /// /// so instead of trying to also parse {x} and (x) the parsing process is /// canceled because no other element can match [x /// /// whereas [x] or [y], with [x] and [y] being lists, they cannot be made unique, /// because both start with [ /// /// The common usage is the & concatanation of elements. /// Use the EmptyList method for reference combinations, the & method just copies values /// Example: match xyxyxyxyxyxyx... /// Ebnf x = Ebnf.EmptyList; /// Ebnf y = Ebnf.EmptyList + (new Ebnf('y')) + !x; /// x = Ebnf.EmptyList + (new Ebnf('x')) + !y; /// /// If two lists a with n elements and b with m elements are combined with a & b, /// a new list with m + n elements is created. If the + syntax is used a new list /// with only 2 elements is created, the first element being a, the second element /// being b; /// /// ************************** /// ****** Optional ****** /// ************************** /// /// Matches the specified element or continues with the next without a failure. /// /// Creation: Ebnf opt = !element; /// /// Example: match 1 with optional -, so 1 or -1: /// Ebnf one = !(new Ebnf('-')) & (new Ebnf('1')); /// /// /// ************************** /// ******* Choise ******* /// ************************** /// /// Returns the first matching element of a list of Ebnf elements. /// /// Creation: Ebnf choise = Ebnf.EmptyChoise + element1 + element2 + ...; /// Ebnf choise = element1 | element2 | ...; /// /// Like the list, the common syntax is |-combination of elements. Use EmptyChoise /// method for reference. /// Example: Match x, or (x) or ((x)) or (((x))) or ... /// Ebnf x = Ebnf.EmptyChoise; /// Ebnf l = '(', r = ')'; /// x = x + (new Ebnf('x')) + (Ebnd.EmptyList + l + x + r); /// /// /// ************************** /// ******* Repeat ******* /// ************************** /// /// Matches any number of occurences of a single Ebnf element. /// /// Creation: Ebnf repeat = ~element /// /// Example: match any number of spaces, but at least one /// Ebnf space = ' '; /// Ebnf spaces = space & ~space; /// /// /// ************************** /// ******** Not ********* /// ************************** /// /// Matches any element, that is not the specified element /// In contrast to all other elements is the stream position pointer not set to the end /// but reset to the start of the element. So any number of Not-elements can be tested /// /// Creation: Ebnf repeat = -element /// /// Example: match a string with like "anyString", meaning the quotes are leading and /// trailing, but not inbetween the string /// Ebnf quote = '"'; /// Ebnf str = Ebnf.EmptyList + quote + ~(-quote & Ebnf.AnyChar) + quote /// /// /// ************************** /// **** Permutation ***** /// ************************** /// /// Matches a list of Ebnf-elements. Each element is matched just once, but the order /// is not specified. /// /// Creation: Ebnf perm = Ebnf.EmptyPermutation + element1 + element2 + ...; /// Ebnf perm = element1 ^ element2 ^ ... /// /// Can be flaged as Unique. If a unique element is partly matched and an error occours /// such as the element is not completely matched, the entire parsing process is aborted. /// (See List for more explanation) /// /// If a permutation is combined with elements that are optional, like Optional, /// Repeat or Not, a permutation is optional. If none of the elements are matched, /// it is ignored itself. /// Example: 'function', or 'public static function', or 'static public function' /// or 'static function', or 'public function' /// Ebnf pub = "public "; /// Ebnf sta = "static "; /// Ebnf fnc = "function"; /// Ebnd funcCombination = (!pub ^ !sta) & function /// Important in this example is to include the spaces in the "public " and "static " /// or it will not work. /// /// Example: match: exactly xy or yx /// Ebnf x = 'x'; /// Ebnf y = 'y'; /// Ebnf xy = x ^ y; /// ]]> /// </summary> public abstract partial class Ebnf:ICloneable { // error message for bad parsing protected string _error = null; // unique flag, only applicable to list and permutation // if unique elements are partly matched but fail parsing, // the entire document fails parsing // i.e. a wrong string "foo bar // fails and cannot be something else so the document is broken //private bool _unique = false; // label for identifing the element // elements with equal label are considered to be the same element private string _label = null; private ParseAction _parse = null; private CompileAction _compile = null; protected ScopeType _scopeType; /// <summary> /// Initializes a new instance of the <see cref="Kekstoaster.Syntax.Ebnf"/> class as a character element. /// </summary> /// <param name="c">The character to check for.</param> /// <param name="scopetype">The scopetype when parsing the element.</param> internal Ebnf (ScopeType scopetype = ScopeType.Default) { this._scopeType = scopetype; } /// <summary> /// Gets or sets the label of the Ebnf element. It is used to identify the element among others, i.e. for initialization. /// Elements with equal label are considered to be the same element/same purpose. /// </summary> /// <value>The label of the element.</value> public string Label { get{ return this._label; } set{ this._label = value; } } public abstract bool CanBeEmpty { get; } public bool IsGeneric { get { return CheckGeneric (null); } } internal protected abstract bool CheckGeneric (HashSet<Ebnf> hashset); // ******************************************************************** // ***************************** Choice ******************************* // ******************************************************************** /// <param name="x1">The first value for the choise.</param> /// <param name="x2">The second value for the choise.</param> public static EbnfChoice operator | (Ebnf x1, Ebnf x2) { return Or (x1, x2); } /// <summary> /// Creates a choise element combining x1 and x2 to the list. /// If either x1, x2 or both are already choises the entries are added to the new choise rather adding both elements directly. /// But if any element x1 or x2 already have a compile action, it will be added directly to ensure the compilation. /// </summary> /// <param name="x1">The first value for the choise.</param> /// <param name="x2">The second value for the choise.</param> public static EbnfChoice Or (Ebnf x1, Ebnf x2) { // create a new choise to get a new reference EbnfChoice n = new EbnfChoice (); // if any x1 or x2 is a choise, combine the elements if (x1 is EbnfChoice && x1.IsGeneric) { n._list.AddRange (((EbnfChoice)x1)._list); if (x2 is EbnfChoice && x2.IsGeneric) { foreach (var item in ((EbnfChoice)x2)._list) { if (!(n._list.Contains (item))) { n._list.Add (item); } } } else { if (!(n._list.Contains (x2))) { n._list.Add (x2); } } } else { n._list.Add (x1); if (x2 is EbnfChoice && x2.IsGeneric) { foreach (var item in ((EbnfChoice)x2)._list) { if (!(n._list.Contains (item))) { n._list.Add (item); } } } else { // if no choise is used, simply add the elements if (x1 != x2) { n._list.Add (x2); } } } return n; } public EbnfChoice Or (Ebnf x) { return Or (this, x); } // ******************************************************************** // ****************************** List ******************************** // ******************************************************************** /// <param name="x1">The first value for the list.</param> /// <param name="x2">The second value for the list.</param> public static EbnfList operator & (Ebnf x1, Ebnf x2) { return And (x1, x2); } /// <summary> /// Creates a list element combining x1 and x2 to the list. /// If either x1, x2 or both are already lists the entries are added to the new list rather adding both elements directly. /// But if any element x1 or x2 already have a compile action, it will be added directly to ensure the compilation. /// </summary> /// <param name="x1">The first x value.</param> /// <param name="x2">The second x value.</param> public static EbnfList And (Ebnf x1, Ebnf x2) { EbnfList n = new EbnfList (); n._list = new List<Ebnf> (); if (x1 is EbnfList && x1.IsGeneric) { n._list.AddRange (((EbnfList)x1)._list); if (x2 is EbnfList && x2.IsGeneric) { n._list.AddRange (((EbnfList)x2)._list); } else { n._list.Add (x2); } } else { n._list.Add (x1); if (x2 is EbnfList && x2.IsGeneric) { n._list.AddRange (((EbnfList)x2)._list); } else { n._list.Add (x2); } } return n; } public EbnfList And (Ebnf x) { return And (this, x); } // ******************************************************************** // ***************************** Repeat ******************************* // ******************************************************************** /// <param name="x">The element that will be repeated.</param> public static EbnfRepeat operator ~ (Ebnf x) { return Repeat (x); } /// <summary> /// Creates a Repeat element with is matched 0 or an arbitrary number of times. /// if is matched 0 times, the element will be ignored. /// </summary> /// <param name="x">The element that will be repeated.</param> public static EbnfRepeat Repeat (Ebnf x) { EbnfRepeat n; if (x is EbnfRepeat) { n = (EbnfRepeat)x; } else { n = new EbnfRepeat (x); } return n; } /// <summary> /// Creates a Repeat element with is matched at least min times or an arbitrary number of times. /// If min equals 0 and is matched 0 times, the element will be ignored. /// </summary> /// <param name="x">The element that will be repeated.</param> /// <param name="min">Minimum number of occurence.</param> public static EbnfRepeat Repeat (Ebnf x, int min) { EbnfRepeat n = new EbnfRepeat (x, min); return n; } /// <summary> /// Creates a Repeat element with is matched at least min times and a maximum of max times. /// If min equals 0 and is matched 0 times, the element will be ignored. /// </summary> /// <param name="x">The element that will be repeated.</param> /// <param name="min">Minimum number of occurence.</param> /// <param name="max">Maximum number of occurence.</param> public static EbnfRepeat Repeat (Ebnf x, int min, int max) { EbnfRepeat n = Repeat (x, min, max); return n; } public EbnfRepeat Repeat () { return Repeat (this); } public EbnfRepeat Repeat (int min) { return Repeat (this, min); } public EbnfRepeat Repeat (int min, int max) { return Repeat (this, min, max); } // ******************************************************************** // **************************** Optional ****************************** // ******************************************************************** /// <param name="x">The element that will be made optional.</param> public static EbnfOptional operator ! (Ebnf x) { return Optional (x); } /// <summary> /// Creates an optional element. /// The same behaviour can be achieved by using <code>Ebnf.Repeat(x, 0, 1)</code> /// </summary> /// <param name="x">The element that will be made optional.</param> public static EbnfOptional Optional (Ebnf x) { EbnfOptional n; if (x is EbnfOptional) { n = (EbnfOptional)x; } else { n = new EbnfOptional (x); } return n; } public EbnfOptional Optional () { return Optional (this); } // ******************************************************************** // **************************** Negation ****************************** // ******************************************************************** /// <param name="x">The element that must not be matched.</param> public static EbnfExclusion operator - (Ebnf x) { return Not (x); } /// <summary> /// Negates the specified element. /// </summary> /// <param name="x">The element that must not be matched.</param> public static EbnfExclusion Not (Ebnf x) { EbnfExclusion n = new EbnfExclusion (x); return n; } public EbnfExclusion Exclude () { return Not (this); } // ******************************************************************** // *************************** Permutation **************************** // ******************************************************************** /// <param name="x1">The first element in the permutation.</param> /// <param name="x2">The second element in the permutation.</param> public static Ebnf operator ^ (Ebnf x1, Ebnf x2) { return Permutation (x1, x2); } /// <summary> /// Creates a new permutation element combining the two elements x1 and x2. /// If either x1, x2 or both are already permutations the entries are added to the new list rather adding both elements directly. /// But if any element x1 or x2 already have a compile action, it will be added directly to ensure the compilation. /// <remarks>Please notice, that unlike list and choise, parsing showes a different behavior when a compilation action is present. /// (a^b) ^ (c^d) matches any combination of 'abcd' without compilation action, but only abcd, bacd, abdc, badc, cdab, cdba, dcab, dcba with compilation action. /// </remarks> /// </summary> /// <param name="x1">The first element in the permutation.</param> /// <param name="x2">The second element in the permutation.</param> public static EbnfPermutation Permutation (Ebnf x1, Ebnf x2) { EbnfPermutation n = new EbnfPermutation (); if (x1 is EbnfPermutation && x1.IsGeneric) { n._list.AddRange (((EbnfPermutation)x1)._list); if (x2 is EbnfPermutation && x2.IsGeneric) { foreach (var item in ((EbnfPermutation)x2)._list) { if (!n._list.Contains (item)) { n._list.Add (item); } } } else { if (!n._list.Contains (x2)) { n._list.Add (x2); } } } else { n._list.Add (x1); if (x2 is EbnfPermutation && x2.IsGeneric) { foreach (var item in ((EbnfPermutation)x2)._list) { if (!n._list.Contains (item)) { n._list.Add (item); } } } else { if (x1 != x2) { n._list.Add (x2); } } } return n; } /// <param name="s">The string that is converted in a Ebnf list, matching exactly the passed string.</param> public static implicit operator Ebnf (string s) { EbnfList n = new EbnfList (); foreach (char c in s) { n._list.Add (new EbnfChar (c)); } n._label = s; return n; } /// <param name="c">The character that is converted to a Ebnf character, that must be matched.</param> public static implicit operator Ebnf (char c) { return new EbnfChar (c); } #region ICloneable implementation object ICloneable.Clone () { return this.Clone (); } #endregion /// <summary> /// Creates a clone of this instance. /// </summary> public abstract Ebnf Clone (); /// <param name="x1">The first element to compare.</param> /// <param name="x2">The second element to compare.</param> public static bool operator == (Ebnf x1, Ebnf x2) { return x1.Equals (x2); } /// <param name="x1">The first element to compare.</param> /// <param name="x2">The second element to compare.</param> public static bool operator != (Ebnf x1, Ebnf x2) { return !x1.Equals (x2); } /// <summary> /// Gets or sets the compile action. If set, the scopetype will be set to Force. /// </summary> /// <value>The compile action.</value> public CompileAction CompileAction { get { return this._compile; } set { this._compile = value; } } /// <summary> /// Gets or sets the parse action. If set, the scopetype will be set to Force. /// </summary> /// <value>The parse action.</value> public ParseAction ParseAction { get { return this._parse == null ? DefaultParseAction : this._parse; } set { this._parse = value; } } internal ScopeType GetScopeType (ScopeType stdScope) { ScopeType scope = this._scopeType; switch (this._scopeType) { case ScopeType.Default: if (this.IsGeneric) { scope = stdScope == ScopeType.Inhired ? ScopeType.Default : stdScope; } else { scope = ScopeType.Force; } break; case ScopeType.Inhired: if (this.IsGeneric) { scope = ScopeType.Default; } else { scope = ScopeType.Force; } break; default: break; } return scope; } /// <summary> /// Gets or sets the scope type. /// </summary> /// <value>The scope type.</value> public ScopeType ScopeType { get { return this._scopeType; } set { this._scopeType = value; } } /// <summary> /// Gets a new EOF element /// </summary> /// <value>The EOF element.</value> public static EbnfEOF EOF { get { EbnfEOF result = new EbnfEOF (); return result; } } /// <summary> /// Gets a new any-char element. /// </summary> /// <value>The Any-char element.</value> public static EbnfAny AnyChar { get { EbnfAny result = new EbnfAny (); return result; } } /// <summary> /// Gets a new any-char element. /// </summary> /// <value>The Any-char element.</value> public static EbnfAny AnyCharOrEOF { get { EbnfAny result = new EbnfAny (); result._allowEOF = true; return result; } } /// <summary> /// Gets a new empty list. /// </summary> /// <value>The new empty list.</value> public static EbnfList EmptyList { get { EbnfList n = new EbnfList (); return n; } } /// <summary> /// Gets a new empty Choise. /// </summary> /// <value>The new empty list.</value> public static EbnfChoice EmptyChoise { get { EbnfChoice n = new EbnfChoice (); return n; } } /// <summary> /// Gets a new empty permutation. /// </summary> /// <value>The new empty permutation.</value> public static EbnfPermutation EmptyPermutation { get { EbnfPermutation n = new EbnfPermutation (); return n; } } internal abstract string ToString (int depth); public string ErrorMessage { get{ return _error; } set{ _error = value; } } } }
using System; using Microsoft.Data.Entity; using Microsoft.Data.Entity.Metadata; using Microsoft.Data.Entity.Relational.Migrations; using Microsoft.Data.Entity.Relational.Migrations.Builders; using Microsoft.Data.Entity.Relational.Migrations.Infrastructure; namespace WebApplication.Migrations { public partial class CreateIdentitySchema : Migration { public override void Up(MigrationBuilder migrationBuilder) { migrationBuilder.CreateTable("AspNetRoles", c => new { Id = c.String(), Name = c.String() }) .PrimaryKey("PK_AspNetRoles", t => t.Id); migrationBuilder.CreateTable("AspNetRoleClaims", c => new { Id = c.Int(nullable: false, identity: true), ClaimType = c.String(), ClaimValue = c.String(), RoleId = c.String() }) .PrimaryKey("PK_AspNetRoleClaims", t => t.Id); migrationBuilder.CreateTable("AspNetUserClaims", c => new { Id = c.Int(nullable: false, identity: true), ClaimType = c.String(), ClaimValue = c.String(), UserId = c.String() }) .PrimaryKey("PK_AspNetUserClaims", t => t.Id); migrationBuilder.CreateTable("AspNetUserLogins", c => new { LoginProvider = c.String(), ProviderKey = c.String(), ProviderDisplayName = c.String(), UserId = c.String() }) .PrimaryKey("PK_AspNetUserLogins", t => new { t.LoginProvider, t.ProviderKey }); migrationBuilder.CreateTable("AspNetUserRoles", c => new { UserId = c.String(), RoleId = c.String() }) .PrimaryKey("PK_AspNetUserRoles", t => new { t.UserId, t.RoleId }); migrationBuilder.CreateTable("AspNetUsers", c => new { Id = c.String(), AccessFailedCount = c.Int(nullable: false), ConcurrencyStamp = c.String(), Email = c.String(), EmailConfirmed = c.Boolean(nullable: false), LockoutEnabled = c.Boolean(nullable: false), LockoutEnd = c.DateTimeOffset(), NormalizedEmail = c.String(), NormalizedUserName = c.String(), PasswordHash = c.String(), PhoneNumber = c.String(), PhoneNumberConfirmed = c.Boolean(nullable: false), SecurityStamp = c.String(), TwoFactorEnabled = c.Boolean(nullable: false), UserName = c.String() }) .PrimaryKey("PK_AspNetUsers", t => t.Id); migrationBuilder.AddForeignKey( "AspNetRoleClaims", "FK_AspNetRoleClaims_AspNetRoles_RoleId", new[] { "RoleId" }, "AspNetRoles", new[] { "Id" }, cascadeDelete: false); migrationBuilder.AddForeignKey( "AspNetUserClaims", "FK_AspNetUserClaims_AspNetUsers_UserId", new[] { "UserId" }, "AspNetUsers", new[] { "Id" }, cascadeDelete: false); migrationBuilder.AddForeignKey( "AspNetUserLogins", "FK_AspNetUserLogins_AspNetUsers_UserId", new[] { "UserId" }, "AspNetUsers", new[] { "Id" }, cascadeDelete: false); } public override void Down(MigrationBuilder migrationBuilder) { migrationBuilder.DropForeignKey("AspNetRoleClaims", "FK_AspNetRoleClaims_AspNetRoles_RoleId"); migrationBuilder.DropForeignKey("AspNetUserClaims", "FK_AspNetUserClaims_AspNetUsers_UserId"); migrationBuilder.DropForeignKey("AspNetUserLogins", "FK_AspNetUserLogins_AspNetUsers_UserId"); migrationBuilder.DropTable("AspNetRoles"); migrationBuilder.DropTable("AspNetRoleClaims"); migrationBuilder.DropTable("AspNetUserClaims"); migrationBuilder.DropTable("AspNetUserLogins"); migrationBuilder.DropTable("AspNetUserRoles"); migrationBuilder.DropTable("AspNetUsers"); } } [ContextType(typeof(Models.ApplicationDbContext))] public partial class CreateIdentitySchema : IMigrationMetadata { string IMigrationMetadata.MigrationId { get { return "000000000000000_CreateIdentitySchema"; } } string IMigrationMetadata.ProductVersion { get { return "7.0.0-beta2"; } } IModel IMigrationMetadata.TargetModel { get { var builder = new BasicModelBuilder(); builder.Entity("Microsoft.AspNet.Identity.IdentityRole", b => { b.Property<string>("Id"); b.Property<string>("Name"); b.Key("Id"); b.ForRelational().Table("AspNetRoles"); }); builder.Entity("Microsoft.AspNet.Identity.IdentityRoleClaim`1[[System.String, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089]]", b => { b.Property<string>("ClaimType"); b.Property<string>("ClaimValue"); b.Property<int>("Id") .GenerateValueOnAdd(); b.Property<string>("RoleId"); b.Key("Id"); b.ForRelational().Table("AspNetRoleClaims"); }); builder.Entity("Microsoft.AspNet.Identity.IdentityUserClaim`1[[System.String, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089]]", b => { b.Property<string>("ClaimType"); b.Property<string>("ClaimValue"); b.Property<int>("Id") .GenerateValueOnAdd(); b.Property<string>("UserId"); b.Key("Id"); b.ForRelational().Table("AspNetUserClaims"); }); builder.Entity("Microsoft.AspNet.Identity.IdentityUserLogin`1[[System.String, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089]]", b => { b.Property<string>("LoginProvider"); b.Property<string>("ProviderDisplayName"); b.Property<string>("ProviderKey"); b.Property<string>("UserId"); b.Key("LoginProvider", "ProviderKey"); b.ForRelational().Table("AspNetUserLogins"); }); builder.Entity("Microsoft.AspNet.Identity.IdentityUserRole`1[[System.String, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089]]", b => { b.Property<string>("RoleId"); b.Property<string>("UserId"); b.Key("UserId", "RoleId"); b.ForRelational().Table("AspNetUserRoles"); }); builder.Entity("WebApplication.Models.ApplicationUser", b => { b.Property<int>("AccessFailedCount"); b.Property<string>("ConcurrencyStamp"); b.Property<string>("Email"); b.Property<bool>("EmailConfirmed"); b.Property<string>("Id"); b.Property<bool>("LockoutEnabled"); b.Property<DateTimeOffset?>("LockoutEnd"); b.Property<string>("NormalizedEmail"); b.Property<string>("NormalizedUserName"); b.Property<string>("PasswordHash"); b.Property<string>("PhoneNumber"); b.Property<bool>("PhoneNumberConfirmed"); b.Property<string>("SecurityStamp"); b.Property<bool>("TwoFactorEnabled"); b.Property<string>("UserName"); b.Key("Id"); b.ForRelational().Table("AspNetUsers"); }); builder.Entity("Microsoft.AspNet.Identity.IdentityRoleClaim`1[[System.String, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089]]", b => { b.ForeignKey("Microsoft.AspNet.Identity.IdentityRole", "RoleId"); }); builder.Entity("Microsoft.AspNet.Identity.IdentityUserClaim`1[[System.String, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089]]", b => { b.ForeignKey("WebApplication.Models.ApplicationUser", "UserId"); }); builder.Entity("Microsoft.AspNet.Identity.IdentityUserLogin`1[[System.String, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089]]", b => { b.ForeignKey("WebApplication.Models.ApplicationUser", "UserId"); }); return builder.Model; } } } }
// ---------------------------------------------------------------------------------- // // Copyright Microsoft Corporation // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // ---------------------------------------------------------------------------------- namespace Microsoft.WindowsAzure.Management.ServiceManagement.Test.FunctionalTests { using System; using System.IO; using System.Collections.ObjectModel; using System.Reflection; using System.Threading; using Microsoft.VisualStudio.TestTools.UnitTesting; using Microsoft.WindowsAzure.Management.ServiceManagement.Model; using Microsoft.WindowsAzure.Management.ServiceManagement.Test.FunctionalTests.ConfigDataInfo; using System.Collections.Generic; using Microsoft.WindowsAzure.Management.ServiceManagement.PlatformImageRepository.Model; [TestClass] public class PIRTest : ServiceManagementTest { private const string vhdNamePrefix = "pirtestosvhd"; private const string imageNamePrefix = "pirtestosimage"; private string vhdName; private string vhdBlobLocation; private string image; private const string location1 = "West US"; private const string location2 = "North Central US"; private const string location3 = "East US"; private const string publisher = "publisher1"; private const string normaluser = "normaluser2"; private const string storageNormalUser = "normalstorage"; [ClassInitialize] public static void ClassInit(TestContext context) { if (defaultAzureSubscription.Equals(null)) { Assert.Inconclusive("No Subscription is selected!"); } } [TestInitialize] public void Initialize() { vhdName = Utilities.GetUniqueShortName(vhdNamePrefix); image = Utilities.GetUniqueShortName(imageNamePrefix); vhdBlobLocation = string.Format("{0}{1}/{2}", blobUrlRoot, vhdContainerName, vhdName); try { if (string.IsNullOrEmpty(localFile)) { CredentialHelper.CopyTestData(testDataContainer, osVhdName, vhdContainerName, vhdName); } else { vmPowershellCmdlets.AddAzureVhd(new FileInfo(localFile), vhdBlobLocation); } } catch (Exception e) { if (e.ToString().Contains("already exists") || e.ToString().Contains("currently a lease")) { // Use the already uploaded vhd. Console.WriteLine("Using already uploaded blob.."); } else { Console.WriteLine(e.ToString()); Assert.Inconclusive("Upload vhd is not set!"); } } try { vmPowershellCmdlets.AddAzureVMImage(image, vhdBlobLocation, OS.Windows); } catch (Exception e) { Console.WriteLine(e.ToString()); throw; } pass = false; testStartTime = DateTime.Now; } /// <summary> /// This test covers Get-AzurePlatformVMImage, Set-AzurePlatformVMImage and Remove-AzurePlatformVMImage cmdlets /// </summary> [TestMethod(), TestCategory("PIRTest"), TestProperty("Feature", "IAAS"), Priority(1), Owner("hylee"), Description("Test the cmdlet (Get,Set,Remove)-AzurePlatformVMImage)")] public void AzurePlatformVMImageSingleLocationTest() { StartTest(MethodBase.GetCurrentMethod().Name, testStartTime); try { // starting the test. PrintOSImageDetailsContext(vmPowershellCmdlets.GetAzurePlatformVMImage(image)); // Replicate the user image to "West US" and wait until the replication process is completed. vmPowershellCmdlets.SetAzurePlatformVMImageReplicate(image, new string[] { location1 }); PrintOSImageDetailsContext(vmPowershellCmdlets.GetAzurePlatformVMImage(image)); WaitForReplicationComplete(image); // Make the replicated image public and wait until the PIR image shows up. vmPowershellCmdlets.SetAzurePlatformVMImagePublic(image); OSImageContext pirImage = WaitForPIRAppear(image, publisher); PrintOSImageDetailsContext(vmPowershellCmdlets.GetAzurePlatformVMImage(image)); // Check the locations of the PIR image. string pirlocations = vmPowershellCmdlets.GetAzureVMImage(pirImage.ImageName)[0].Location; Assert.IsTrue(pirlocations.Contains(location1)); Assert.IsFalse(pirlocations.Contains(location2)); Assert.IsFalse(pirlocations.Contains(location3)); // Switch to the normal User and check the PIR image. SwitchToNormalUser(); Assert.IsTrue(Utilities.CheckRemove(vmPowershellCmdlets.GetAzureVMImage, image)); WaitForPIRAppear(image, publisher); // Switch to the publisher and make the PIR image private SwitchToPublisher(); vmPowershellCmdlets.SetAzurePlatformVMImagePrivate(image); // Switch to the normal User and wait until the PIR image disapper SwitchToNormalUser(); WaitForPIRDisappear(pirImage.ImageName); // Switch to the publisher and remove the PIR image. SwitchToPublisher(); vmPowershellCmdlets.RemoveAzurePlatformVMImage(image); Assert.AreEqual(0, vmPowershellCmdlets.GetAzurePlatformVMImage(image).ReplicationProgress.Count); PrintOSImageDetailsContext(vmPowershellCmdlets.GetAzurePlatformVMImage(image)); pass = true; } catch (Exception e) { Console.WriteLine(e.ToString()); throw; } } /// <summary> /// This test covers Get-AzurePlatformVMImage, Set-AzurePlatformVMImage and Remove-AzurePlatformVMImage cmdlets /// </summary> [TestMethod(), TestCategory("PIRTest"), TestProperty("Feature", "IAAS"), Priority(1), Owner("hylee"), Description("Test the cmdlet (Get,Set,Remove)-AzurePlatformVMImage)")] public void AzurePlatformVMImageMultipleLocationsTest() { StartTest(MethodBase.GetCurrentMethod().Name, testStartTime); try { // starting the test. PrintOSImageDetailsContext(vmPowershellCmdlets.GetAzurePlatformVMImage(image)); // Replicate the user image to "West US" and wait until the replication process is completed. vmPowershellCmdlets.SetAzurePlatformVMImageReplicate(image, new string[] { location1, location2 }); PrintOSImageDetailsContext(vmPowershellCmdlets.GetAzurePlatformVMImage(image)); WaitForReplicationComplete(image); // Make the replicated image public and wait until the PIR image shows up. vmPowershellCmdlets.SetAzurePlatformVMImagePublic(image); OSImageContext pirImage = WaitForPIRAppear(image, publisher); PrintOSImageDetailsContext(vmPowershellCmdlets.GetAzurePlatformVMImage(image)); // Check the locations of the PIR image. string pirlocations = vmPowershellCmdlets.GetAzureVMImage(pirImage.ImageName)[0].Location; Assert.IsTrue(pirlocations.Contains(location1)); Assert.IsTrue(pirlocations.Contains(location2)); Assert.IsFalse(pirlocations.Contains(location3)); // Switch to the normal User and check the PIR image. SwitchToNormalUser(); Assert.IsTrue(Utilities.CheckRemove(vmPowershellCmdlets.GetAzureVMImage, image)); WaitForPIRAppear(image, publisher); // Switch to the publisher and make the PIR image private SwitchToPublisher(); vmPowershellCmdlets.SetAzurePlatformVMImagePrivate(image); // Switch to the normal User and wait until the PIR image disapper SwitchToNormalUser(); WaitForPIRDisappear(pirImage.ImageName); // Switch to the publisher and remove the PIR image. SwitchToPublisher(); vmPowershellCmdlets.RemoveAzurePlatformVMImage(image); Assert.AreEqual(0, vmPowershellCmdlets.GetAzurePlatformVMImage(image).ReplicationProgress.Count); PrintOSImageDetailsContext(vmPowershellCmdlets.GetAzurePlatformVMImage(image)); pass = true; } catch (Exception e) { Console.WriteLine(e.ToString()); throw; } } /// <summary> /// This test covers Get-AzurePlatformVMImage, Set-AzurePlatformVMImage and Remove-AzurePlatformVMImage cmdlets /// </summary> [TestMethod(), TestCategory("PIRTest"), TestProperty("Feature", "IAAS"), Priority(1), Owner("hylee"), Description("Test the cmdlet (Get,Set,Remove)-AzurePlatformVMImage)")] public void AzurePlatformVMImageScenarioTest() { StartTest(MethodBase.GetCurrentMethod().Name, testStartTime); string vmName = Utilities.GetUniqueShortName("pirtestvm"); string svcName = Utilities.GetUniqueShortName("pirtestservice"); try { SwitchToNormalUser(); try { vmPowershellCmdlets.GetAzureStorageAccount(storageNormalUser); } catch (Exception e) { if (e.ToString().Contains("ResourceNotFound")) { vmPowershellCmdlets.NewAzureStorageAccount(storageNormalUser, location1); } else { Console.WriteLine(e.ToString()); throw; } } vmPowershellCmdlets.SetAzureSubscription(normaluser, storageNormalUser); // Replicate the user image to "West US" and wait until the replication process is completed. SwitchToPublisher(); vmPowershellCmdlets.SetAzurePlatformVMImageReplicate(image, new string[] { location1 }); // Make the replicated image public and wait until the PIR image shows up. vmPowershellCmdlets.SetAzurePlatformVMImagePublic(image); OSImageContext pirImage = WaitForPIRAppear(image, publisher); // Switch to the normal User and check the PIR image. SwitchToNormalUser(); WaitForPIRAppear(image, publisher); // Create a VM using the PIR image vmPowershellCmdlets.NewAzureQuickVM(OS.Windows, vmName, svcName, pirImage.ImageName, username, password, location1); Console.WriteLine("VM, {0}, is successfully created using the uploaded PIR image", vmPowershellCmdlets.GetAzureVM(vmName, svcName).Name); // Remove the service and VM vmPowershellCmdlets.RemoveAzureService(svcName); // Switch to the publisher and remove the PIR image SwitchToPublisher(); vmPowershellCmdlets.RemoveAzurePlatformVMImage(image); pass = true; } catch (Exception e) { Console.WriteLine(e.ToString()); throw; } } [TestCleanup] public virtual void CleanUp() { SwitchToPublisher(); Console.WriteLine("Test {0}", pass ? "passed" : "failed"); if ((cleanupIfPassed && pass) || (cleanupIfFailed && !pass)) { Console.WriteLine("Starting to clean up created VM and service."); try { vmPowershellCmdlets.RemoveAzureVMImage(image, true); } catch (Exception e) { Console.WriteLine("Exception occurs during cleanup: {0}", e.ToString()); } try { } catch (Exception e) { Console.WriteLine(e.ToString()); } } } private void SwitchToPublisher() { vmPowershellCmdlets.SetDefaultAzureSubscription(publisher); } private void SwitchToNormalUser() { vmPowershellCmdlets.SetDefaultAzureSubscription(normaluser); } private void WaitForReplicationComplete(string imageName) { DateTime startTime = DateTime.Now; OSImageDetailsContext state; try { do { state = vmPowershellCmdlets.GetAzurePlatformVMImage(imageName); foreach(var repro in state.ReplicationProgress) { Console.WriteLine(repro.ToString()); } } while (!state.ReplicationProgress.TrueForAll((s) => (s.Progress.Equals("100")))); Console.WriteLine("Replication completed after {0} minutes.", (DateTime.Now - startTime).TotalMinutes); PrintOSImageDetailsContext(state); } catch (Exception e) { Console.WriteLine(e.ToString()); throw; } } private OSImageContext WaitForPIRAppear(string imageName, string publisherName, int waitTimeInMin = 1, int maxWaitTimeInMin = 30) { DateTime startTime = DateTime.Now; while (true) { Collection<OSImageContext> vmImages = vmPowershellCmdlets.GetAzureVMImage(); foreach (OSImageContext image in vmImages) { if (Utilities.MatchKeywords(image.ImageName, new[]{imageName}, false) >= 0 && image.PublisherName.Equals(publisherName)) { Console.WriteLine("MATCHED PIR image found after {0} minutes:", (DateTime.Now - startTime).TotalMinutes); PrintContext<OSImageContext>(image); return image; } } if ((DateTime.Now - startTime).TotalMinutes < maxWaitTimeInMin) { Thread.Sleep(waitTimeInMin * 1000 * 60); } else { Assert.Fail("Cannot get PIR image, {0}, within {1} minutes!", imageName, maxWaitTimeInMin); } } } private bool WaitForPIRDisappear(string imageName, int waitTimeInMin = 1, int maxWaitTimeInMin = 30) { DateTime startTime = DateTime.Now; while (true) { try { OSImageContext imageContext = vmPowershellCmdlets.GetAzureVMImage(imageName)[0]; if ((DateTime.Now - startTime).TotalMinutes < maxWaitTimeInMin) { Thread.Sleep(waitTimeInMin * 1000 * 60); } else { Assert.Fail("Still has image, {0}, after {1} minutes!", imageName, maxWaitTimeInMin); } } catch (Exception e) { if (e.ToString().Contains("ResourceNotFound")) { Console.WriteLine("Image {0} disappered after {1} minutes.", imageName, (DateTime.Now - startTime).TotalMinutes); return true; } else { Console.WriteLine(e.ToString()); throw; } } } } private void PrintContext<T>(T obj) { Type type = typeof(T); foreach (PropertyInfo property in type.GetProperties(BindingFlags.Public | BindingFlags.Instance | BindingFlags.DeclaredOnly)) { string typeName = property.PropertyType.FullName; if (typeName.Equals("System.String") || typeName.Equals("System.Int32") || typeName.Equals("System.Uri") || typeName.Contains("Nullable")) { Console.WriteLine("{0}: {1}", property.Name, property.GetValue(obj, null)); } } } private void PrintOSImageDetailsContext(OSImageDetailsContext context) { PrintContext<OSImageContext>(context); foreach (var repro in context.ReplicationProgress) { Console.WriteLine("ReplicationProgress: {0}", repro.ToString()); } if (context.ReplicationProgress.Count == 0) { Console.WriteLine("There is no replication!"); } Console.WriteLine("IsCorrupted {0}", context.IsCorrupted); } } }
namespace XenAdmin.Controls.Wlb { partial class WlbReportView { /// <summary> /// Required designer variable. /// </summary> private System.ComponentModel.IContainer components = null; /// <summary> /// Clean up any resources being used. /// </summary> /// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param> protected override void Dispose(bool disposing) { if (disposing && (components != null)) { components.Dispose(); } base.Dispose(disposing); } #region Component Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { this.components = new System.ComponentModel.Container(); System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(WlbReportView)); this.labelHostCombo = new System.Windows.Forms.Label(); this.hostComboBox = new System.Windows.Forms.ComboBox(); this.btnRunReport = new System.Windows.Forms.Button(); this.btnLaterReport = new System.Windows.Forms.Button(); this.labelEndDate = new System.Windows.Forms.Label(); this.EndDatePicker = new System.Windows.Forms.DateTimePicker(); this.labelStartDate = new System.Windows.Forms.Label(); this.StartDatePicker = new System.Windows.Forms.DateTimePicker(); this.reportViewer1 = new Microsoft.Reporting.WinForms.ReportViewer(); this.btnSubscribe = new System.Windows.Forms.Button(); this.btnClose = new System.Windows.Forms.Button(); this.comboBoxView = new System.Windows.Forms.ComboBox(); this.labelShow = new System.Windows.Forms.Label(); this.panelHosts = new System.Windows.Forms.Panel(); this.panelShow = new System.Windows.Forms.Panel(); this.flowLayoutPanel1 = new System.Windows.Forms.FlowLayoutPanel(); this.timer1 = new System.Windows.Forms.Timer(this.components); this.lblExported = new System.Windows.Forms.Label(); this.flowLayoutPanelButtons = new System.Windows.Forms.FlowLayoutPanel(); this.toolTip1 = new System.Windows.Forms.ToolTip(this.components); this.flowLayoutPanel2 = new System.Windows.Forms.FlowLayoutPanel(); this.panelUsers = new System.Windows.Forms.Panel(); this.userComboBox = new XenAdmin.Controls.LongStringComboBox(); this.labelUsers = new System.Windows.Forms.Label(); this.panelObjects = new System.Windows.Forms.Panel(); this.labelObjects = new System.Windows.Forms.Label(); this.objectComboBox = new System.Windows.Forms.ComboBox(); this.panelHosts.SuspendLayout(); this.panelShow.SuspendLayout(); this.flowLayoutPanel1.SuspendLayout(); this.flowLayoutPanelButtons.SuspendLayout(); this.flowLayoutPanel2.SuspendLayout(); this.panelUsers.SuspendLayout(); this.panelObjects.SuspendLayout(); this.SuspendLayout(); // // labelHostCombo // resources.ApplyResources(this.labelHostCombo, "labelHostCombo"); this.labelHostCombo.Name = "labelHostCombo"; // // hostComboBox // this.hostComboBox.DrawMode = System.Windows.Forms.DrawMode.OwnerDrawFixed; this.hostComboBox.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList; this.hostComboBox.FormattingEnabled = true; resources.ApplyResources(this.hostComboBox, "hostComboBox"); this.hostComboBox.Name = "hostComboBox"; this.hostComboBox.DrawItem += new System.Windows.Forms.DrawItemEventHandler(this.comboBox_DrawItem); this.hostComboBox.Leave += new System.EventHandler(this.comboBox_DropDownClosed); this.hostComboBox.DropDownClosed += new System.EventHandler(this.comboBox_DropDownClosed); // // btnRunReport // resources.ApplyResources(this.btnRunReport, "btnRunReport"); this.btnRunReport.Name = "btnRunReport"; this.btnRunReport.UseVisualStyleBackColor = true; this.btnRunReport.Click += new System.EventHandler(this.btnRunReport_Click); // // btnLaterReport // resources.ApplyResources(this.btnLaterReport, "btnLaterReport"); this.btnLaterReport.Name = "btnLaterReport"; this.btnLaterReport.UseVisualStyleBackColor = true; this.btnLaterReport.Click += new System.EventHandler(this.btnLaterReport_Click); // // labelEndDate // resources.ApplyResources(this.labelEndDate, "labelEndDate"); this.labelEndDate.Name = "labelEndDate"; // // EndDatePicker // resources.ApplyResources(this.EndDatePicker, "EndDatePicker"); this.EndDatePicker.Name = "EndDatePicker"; this.EndDatePicker.ValueChanged += new System.EventHandler(this.comboBox_SelectionChanged); // // labelStartDate // resources.ApplyResources(this.labelStartDate, "labelStartDate"); this.labelStartDate.Name = "labelStartDate"; // // StartDatePicker // resources.ApplyResources(this.StartDatePicker, "StartDatePicker"); this.StartDatePicker.Name = "StartDatePicker"; this.StartDatePicker.ValueChanged += new System.EventHandler(this.comboBox_SelectionChanged); // // reportViewer1 // resources.ApplyResources(this.reportViewer1, "reportViewer1"); this.reportViewer1.Name = "reportViewer1"; this.reportViewer1.ShowRefreshButton = false; this.reportViewer1.ReportExport += new Microsoft.Reporting.WinForms.ExportEventHandler(this.reportViewer1_ReportExport); this.reportViewer1.Back += new Microsoft.Reporting.WinForms.BackEventHandler(this.reportViewer1_Back); this.reportViewer1.Drillthrough += new Microsoft.Reporting.WinForms.DrillthroughEventHandler(this.reportViewer1_Drillthrough); // // btnSubscribe // resources.ApplyResources(this.btnSubscribe, "btnSubscribe"); this.btnSubscribe.Name = "btnSubscribe"; this.btnSubscribe.UseVisualStyleBackColor = true; this.btnSubscribe.Click += new System.EventHandler(this.btnSubscribe_Click); // // btnClose // resources.ApplyResources(this.btnClose, "btnClose"); this.btnClose.Name = "btnClose"; this.btnClose.UseVisualStyleBackColor = true; this.btnClose.Click += new System.EventHandler(this.btnClose_Click); // // comboBoxView // this.comboBoxView.DrawMode = System.Windows.Forms.DrawMode.OwnerDrawFixed; this.comboBoxView.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList; this.comboBoxView.FormattingEnabled = true; resources.ApplyResources(this.comboBoxView, "comboBoxView"); this.comboBoxView.Name = "comboBoxView"; this.comboBoxView.DrawItem += new System.Windows.Forms.DrawItemEventHandler(this.comboBox_DrawItem); this.comboBoxView.SelectedIndexChanged += new System.EventHandler(this.comboBoxView_SelectedIndexChanged); this.comboBoxView.DropDownClosed += new System.EventHandler(this.comboBox_DropDownClosed); // // labelShow // resources.ApplyResources(this.labelShow, "labelShow"); this.labelShow.Name = "labelShow"; // // panelHosts // resources.ApplyResources(this.panelHosts, "panelHosts"); this.panelHosts.Controls.Add(this.hostComboBox); this.panelHosts.Controls.Add(this.labelHostCombo); this.panelHosts.Name = "panelHosts"; // // panelShow // resources.ApplyResources(this.panelShow, "panelShow"); this.panelShow.Controls.Add(this.labelShow); this.panelShow.Controls.Add(this.comboBoxView); this.panelShow.Name = "panelShow"; // // flowLayoutPanel1 // resources.ApplyResources(this.flowLayoutPanel1, "flowLayoutPanel1"); this.flowLayoutPanel1.Controls.Add(this.panelHosts); this.flowLayoutPanel1.Controls.Add(this.panelShow); this.flowLayoutPanel1.Name = "flowLayoutPanel1"; // // timer1 // this.timer1.Interval = 5000; this.timer1.Tick += new System.EventHandler(this.timer1_Tick); // // lblExported // resources.ApplyResources(this.lblExported, "lblExported"); this.lblExported.BackColor = System.Drawing.Color.Khaki; this.lblExported.Name = "lblExported"; // // flowLayoutPanelButtons // resources.ApplyResources(this.flowLayoutPanelButtons, "flowLayoutPanelButtons"); this.flowLayoutPanelButtons.Controls.Add(this.btnClose); this.flowLayoutPanelButtons.Controls.Add(this.btnSubscribe); this.flowLayoutPanelButtons.Controls.Add(this.btnRunReport); this.flowLayoutPanelButtons.Controls.Add(this.btnLaterReport); this.flowLayoutPanelButtons.Name = "flowLayoutPanelButtons"; // // flowLayoutPanel2 // resources.ApplyResources(this.flowLayoutPanel2, "flowLayoutPanel2"); this.flowLayoutPanel2.Controls.Add(this.panelUsers); this.flowLayoutPanel2.Controls.Add(this.panelObjects); this.flowLayoutPanel2.Name = "flowLayoutPanel2"; // // panelUsers // resources.ApplyResources(this.panelUsers, "panelUsers"); this.panelUsers.Controls.Add(this.userComboBox); this.panelUsers.Controls.Add(this.labelUsers); this.panelUsers.Name = "panelUsers"; // // userComboBox // this.userComboBox.DrawMode = System.Windows.Forms.DrawMode.OwnerDrawFixed; this.userComboBox.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList; this.userComboBox.FormattingEnabled = true; resources.ApplyResources(this.userComboBox, "userComboBox"); this.userComboBox.Name = "userComboBox"; this.userComboBox.DrawItem += new System.Windows.Forms.DrawItemEventHandler(this.comboBox_DrawItem); this.userComboBox.DropDownClosed += new System.EventHandler(this.comboBox_DropDownClosed); this.userComboBox.TextChanged += new System.EventHandler(this.comboBox_SelectionChanged); this.userComboBox.Leave += new System.EventHandler(this.comboBox_DropDownClosed); // // labelUsers // resources.ApplyResources(this.labelUsers, "labelUsers"); this.labelUsers.Name = "labelUsers"; // // panelObjects // resources.ApplyResources(this.panelObjects, "panelObjects"); this.panelObjects.Controls.Add(this.labelObjects); this.panelObjects.Controls.Add(this.objectComboBox); this.panelObjects.Name = "panelObjects"; // // labelObjects // resources.ApplyResources(this.labelObjects, "labelObjects"); this.labelObjects.Name = "labelObjects"; // // objectComboBox // this.objectComboBox.DrawMode = System.Windows.Forms.DrawMode.OwnerDrawFixed; this.objectComboBox.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList; this.objectComboBox.FormattingEnabled = true; resources.ApplyResources(this.objectComboBox, "objectComboBox"); this.objectComboBox.Name = "objectComboBox"; this.objectComboBox.DrawItem += new System.Windows.Forms.DrawItemEventHandler(this.comboBox_DrawItem); this.objectComboBox.DropDownClosed += new System.EventHandler(this.comboBox_DropDownClosed); this.objectComboBox.TextChanged += new System.EventHandler(this.comboBox_SelectionChanged); this.objectComboBox.Leave += new System.EventHandler(this.comboBox_DropDownClosed); // // WlbReportView // resources.ApplyResources(this, "$this"); this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Dpi; this.Controls.Add(this.flowLayoutPanel2); this.Controls.Add(this.flowLayoutPanelButtons); this.Controls.Add(this.flowLayoutPanel1); this.Controls.Add(this.lblExported); this.Controls.Add(this.labelEndDate); this.Controls.Add(this.EndDatePicker); this.Controls.Add(this.labelStartDate); this.Controls.Add(this.StartDatePicker); this.Controls.Add(this.reportViewer1); this.Name = "WlbReportView"; this.Load += new System.EventHandler(this.ReportView_Load); this.panelHosts.ResumeLayout(false); this.panelHosts.PerformLayout(); this.panelShow.ResumeLayout(false); this.flowLayoutPanel1.ResumeLayout(false); this.flowLayoutPanel1.PerformLayout(); this.flowLayoutPanelButtons.ResumeLayout(false); this.flowLayoutPanel2.ResumeLayout(false); this.flowLayoutPanel2.PerformLayout(); this.panelUsers.ResumeLayout(false); this.panelUsers.PerformLayout(); this.panelObjects.ResumeLayout(false); this.ResumeLayout(false); this.PerformLayout(); } #endregion public System.Windows.Forms.Label labelHostCombo; public System.Windows.Forms.ComboBox hostComboBox; public System.Windows.Forms.Button btnRunReport; public System.Windows.Forms.Button btnLaterReport; private System.Windows.Forms.Label labelEndDate; private System.Windows.Forms.DateTimePicker EndDatePicker; private System.Windows.Forms.Label labelStartDate; private System.Windows.Forms.DateTimePicker StartDatePicker; private Microsoft.Reporting.WinForms.ReportViewer reportViewer1; public System.Windows.Forms.Button btnSubscribe; public System.Windows.Forms.Button btnClose; private System.Windows.Forms.ComboBox comboBoxView; private System.Windows.Forms.Label labelShow; private System.Windows.Forms.Panel panelHosts; private System.Windows.Forms.Panel panelShow; private System.Windows.Forms.FlowLayoutPanel flowLayoutPanel1; private System.Windows.Forms.Timer timer1; private System.Windows.Forms.Label lblExported; private System.Windows.Forms.FlowLayoutPanel flowLayoutPanelButtons; private System.Windows.Forms.ToolTip toolTip1; private System.Windows.Forms.FlowLayoutPanel flowLayoutPanel2; private System.Windows.Forms.Panel panelUsers; public XenAdmin.Controls.LongStringComboBox userComboBox; public System.Windows.Forms.Label labelUsers; private System.Windows.Forms.Panel panelObjects; private System.Windows.Forms.Label labelObjects; private System.Windows.Forms.ComboBox objectComboBox; } }
// Copyright (c) MOSA Project. Licensed under the New BSD License. using Mosa.Compiler.Framework; using Mosa.Compiler.Framework.Platform; namespace Mosa.Platform.x86 { /// <summary> /// /// </summary> public abstract class BaseTransformationStage : BasePlatformTransformationStage, IX86Visitor { protected override string Platform { get { return "x86"; } } public static X86Instruction GetMove(Operand Destination, Operand Source) { if (Source.IsR8 && Destination.IsR8) { return X86.Movsd; } else if (Source.IsR4 && Destination.IsR4) { return X86.Movss; } else if (Source.IsR4 && Destination.IsR8) { return X86.Cvtss2sd; } else if (Source.IsR8 && Destination.IsR4) { return X86.Cvtsd2ss; } else if (Source.IsR8 && Destination.IsMemoryAddress) { return X86.Movsd; } else if (Source.IsR4 && Destination.IsMemoryAddress) { return X86.Movss; } else { return X86.Mov; } } #region IX86Visitor public virtual void Add(Context context) { } public virtual void Adc(Context context) { } public virtual void And(Context context) { } public virtual void Call(Context context) { } public virtual void DirectCompare(Context context) { } public virtual void Cmp(Context context) { } public virtual void Cmov(Context context) { } public virtual void Or(Context context) { } public virtual void Xor(Context context) { } public virtual void PXor(Context context) { } public virtual void Sub(Context context) { } public virtual void Sbb(Context context) { } public virtual void Mul(Context context) { } public virtual void Div(Context context) { } public virtual void IDiv(Context context) { } public virtual void IMul(Context context) { } public virtual void AddSs(Context context) { } public virtual void SubSS(Context context) { } public virtual void SubSD(Context context) { } public virtual void MulSS(Context context) { } public virtual void MulSD(Context context) { } public virtual void DivSS(Context context) { } public virtual void DivSD(Context context) { } public virtual void Sar(Context context) { } public virtual void Sal(Context context) { } public virtual void Shl(Context context) { } public virtual void Shr(Context context) { } public virtual void Rcr(Context context) { } public virtual void Cvtsi2ss(Context context) { } public virtual void Cvtsi2sd(Context context) { } public virtual void Cvtsd2ss(Context context) { } public virtual void Cvtss2sd(Context context) { } public virtual void Cvttsd2si(Context context) { } public virtual void Cvttss2si(Context context) { } public virtual void Setcc(Context context) { } public virtual void Cdq(Context context) { } public virtual void Shld(Context context) { } public virtual void Shrd(Context context) { } public virtual void Comisd(Context context) { } public virtual void Comiss(Context context) { } public virtual void Ucomisd(Context context) { } public virtual void Ucomiss(Context context) { } public virtual void Jns(Context context) { } public virtual void Branch(Context context) { } public virtual void Jump(Context context) { } public virtual void BochsDebug(Context context) { } public virtual void Cli(Context context) { } public virtual void Cld(Context context) { } public virtual void CmpXchg(Context context) { } public virtual void CpuId(Context context) { } public virtual void Hlt(Context context) { } public virtual void Invlpg(Context context) { } public virtual void In(Context context) { } public virtual void Inc(Context context) { } public virtual void Dec(Context context) { } public virtual void Int(Context context) { } public virtual void Iretd(Context context) { } public virtual void Lea(Context context) { } public virtual void Lgdt(Context context) { } public virtual void Lidt(Context context) { } public virtual void Lock(Context context) { } public virtual void Neg(Context context) { } public virtual void Mov(Context context) { } public virtual void Movsx(Context context) { } public virtual void Movss(Context context) { } public virtual void Movsd(Context context) { } public virtual void MovAPS(Context context) { } public virtual void Movzx(Context context) { } public virtual void Nop(Context context) { } public virtual void Out(Context context) { } public virtual void Pause(Context context) { } public virtual void Pop(Context context) { } public virtual void Popad(Context context) { } public virtual void Popfd(Context context) { } public virtual void Push(Context context) { } public virtual void Pushad(Context context) { } public virtual void Pushfd(Context context) { } public virtual void Rdmsr(Context context) { } public virtual void Rdpmc(Context context) { } public virtual void Rdtsc(Context context) { } public virtual void Rep(Context context) { } public virtual void Sti(Context context) { } public virtual void Stosb(Context context) { } public virtual void Stosd(Context context) { } public virtual void Xchg(Context context) { } public virtual void Not(Context context) { } public virtual void RoundSS(Context context) { } public virtual void RoundSD(Context context) { } public virtual void Test(Context context) { } #endregion IX86Visitor } }
using System; using System.Collections.Generic; using System.Linq; using System.Text; namespace HTLib2.Bioinfo { public partial class Universe { ///////////////////////////////////////////////////////////////////////////////// // // // http://en.wikipedia.org/wiki/Energy_minimization // // // ///////////////////////////////////////////////////////////////////////////////// // // // Algorithm // // coord[i]_(t) = coord[i]_(t-1) + k * h[i]_(t) // // h[i]_(t) = F[i]_(t) + r[i]_(t-1) * h[i]_(t-1) // // F[i]_(t) = force with atom i at coord[i]_(t) // // r[i]_(t-1) = F[i]_(t) . F[i]_(t) / F[i]_(t-1) . F[i]_(t-1) // // where coord[i] is the coordinate of atom i, // // F.F is inner product, and // // h[i]_0 = 0 (same to the steepest descent) // // // // Initial (step 1) // // h = 0 // // forces0 = forces // // Iteration (step n>1) // // a. get forces // // b. r = forces . forces / forces0 . forces0 // // c. h = forces + r * h // // d. coords = coords + k * h // // // ///////////////////////////////////////////////////////////////////////////////// // // // Procedure // // 0. Initial configuration of atoms // // 1. Save the position of atoms // // 2. Calculate the potential energy of system and the net forces on atoms // // 3. Check if every force reaches to zero // // A. if yes, END // // 4. Move atoms with conjugated gradient // // 5. Predict energy or forces on atoms // // 6. Check if the predicted forces or energy will exceed over the limit // // A. if no, goto 1 // // 7. Back to saved configuration // // 8. Move atoms with simple gradient // // 9. goto 1 // ///////////////////////////////////////////////////////////////////////////////// static Vector[] AddConditional(Vector[] coords, bool[] movables, Vector[] moves, Vector[] movesAdd, double leastMove) { HDebug.Assert(coords.Length == moves.Length, moves.Length == movables.Length); int countUpdated = 0; int size = coords.Length; Vector[] coordsUpdated = new Vector[size]; double leastMove2 = leastMove*leastMove; double[] dist2s = new double[size]; for(int i=0; i<size; i++) { moves[i] = moves[i]*0 + movesAdd[i]; dist2s[i] = moves[i].Dist2; } { double[] sorteds = dist2s.HSort().ToArray(); leastMove2 = Math.Min(leastMove2, sorteds[sorteds.Length/3]); } for(int i=0; i<size; i++) { if(movables[i] && (dist2s[i] > leastMove2)) { countUpdated++; coordsUpdated[i] = coords[i] + moves[i]; moves[i].SetZero(); } else coordsUpdated[i] = coords[i]; } return coordsUpdated; } static Vector[] AddConditional(Vector[] coords, Vector[] movements, bool[] movables) { int countupdated = 0; HDebug.Assert(coords.Length == movements.Length, movements.Length == movables.Length); int size = coords.Length; Vector[] coordsUpdated = new Vector[size]; for(int i=0; i<size; i++) { if(movables[i]) { countupdated++; coordsUpdated[i] = coords[i] + movements[i]; } else coordsUpdated[i] = coords[i]; } return coordsUpdated; } static double NormInf(Vector[] forces, bool[] movables) { HDebug.Assert(forces.Length == movables.Length); int size = forces.Length; List<double> norms = new List<double>(); for(int i=0; i<size; i++) if(movables[i]) norms.Add(forces[i].NormInf()); return (new Vector(norms.ToArray())).NormInf(); } static double Norm(int p, Vector[] forces, bool[] movables) { HDebug.Assert(forces.Length == movables.Length); int size = forces.Length; List<double> norms = new List<double>(); for(int i=0; i<size; i++) if(movables[i]) norms.Add(forces[i].Norm(p)); return (new Vector(norms.ToArray())).Norm(p); } public int Minimize_ConjugateGradient(List<ForceField.IForceField> frcflds ,string logpath = "" ,int iterInitial = 0 ,double? k = null ,int? max_iteration = null ,double max_atom_movement = 0.1 ,double threshold = 0.001 ,int randomPurturb = 0 ,bool[] atomsMovable = null ,InfoPack extra = null ) { IMinimizeLogger logger = new MinimizeLogger_PrintEnergyForceMag(logpath); return Minimize_ConjugateGradient_v1(iterInitial, frcflds, k, max_atom_movement, max_iteration, threshold, randomPurturb, atomsMovable, logger, extra, null); } public int Minimize_ConjugateGradient(List<ForceField.IForceField> frcflds ,IMinimizeLogger logger ,int iterInitial = 0 ,double? k = null ,double max_atom_movement = 0.1 ,double threshold = 0.001 ,int randomPurturb = 0 ,bool[] atomsMovable = null ,InfoPack extra = null ) { return Minimize_ConjugateGradient_v1(iterInitial, frcflds, k, max_atom_movement, null, threshold, randomPurturb, atomsMovable, logger, extra, null); } //public int Minimize_ConjugateGradient(List<ForceField.IForceField> frcflds) //{ // double k = 0.0001; // double threshold = 0.001; // double max_atom_movement = 0.1; // IMinimizeLogger logger = null; // int randomPurturb = 0; // no random purturbation // bool[] atomsMovable = null; // return Minimize_ConjugateGradient_v1(0, frcflds, k, max_atom_movement, threshold, randomPurturb, atomsMovable, logger); //} //public int Minimize_ConjugateGradient(List<ForceField.IForceField> frcflds, double k, double threshold) //{ // IMinimizeLogger logger = null; // double max_atom_movement = 0.1; // int randomPurturb = 0; // no random purturbation // bool[] atomsMovable = null; // return Minimize_ConjugateGradient_v1(0, frcflds, k, max_atom_movement, threshold, randomPurturb, atomsMovable, logger); //} //public int Minimize_ConjugateGradient(List<ForceField.IForceField> frcflds, double k, double threshold, int randomPurturb) //{ // IMinimizeLogger logger = null; // double max_atom_movement = 0.1; // bool[] atomsMovable = null; // return Minimize_ConjugateGradient_v1(0, frcflds, k, max_atom_movement, threshold, randomPurturb, atomsMovable, logger); //} public static void Minimize_ConjugateGradient_WriteLog(System.IO.TextWriter logwriter, long iter, double energy, Vectors forces, bool[] movables, string message="") { if(logwriter == null) return; double forces_NormInf = NormInf(forces, movables); double forces_Norm1 = Norm(1, forces, movables); double forces_Norm2 = Norm(2, forces, movables); //logwriter.Write("" + iter + "-iter: energy(" + energy + ")"); //logwriter.Write( ", force-norm-1(" + forces_Norm1 + ")"); //logwriter.Write( ", force-norm-2(" + forces_Norm2 + ")"); //logwriter.Write( ", force-norm-inf(" + forces_NormInf + ")"); logwriter.Write("{0,2:####}-iter: energy({1:0.00000000000000})", iter, energy); logwriter.Write( ", force-norm-1({0:0.00000000000000})",forces_Norm1); logwriter.Write( ", force-norm-2({0:0.00000000000000})",forces_Norm2); logwriter.Write( ", force-norm-inf({0:0.00000000000000})",forces_NormInf); if(message.Length != 0) logwriter.Write(" - " + message); logwriter.WriteLine(); } public interface IMinimizeLogger { void log(long iter, Vectors coords, double energy, Vectors forces, bool[] movables, string message=""); void logTrajectory(Universe univ, long iter, Vectors coords); } public class MinimizeLogger : IMinimizeLogger { public void log(long iter, Vectors coords, double energy, Vectors forces, bool[] movables, string message="") { } public void logTrajectory(Universe univ, long iter, Vectors coords) { } } public class MinimizeLogger_PrintEnergyForceMag : IMinimizeLogger { string logpath = null; public int logTrajectoryFrequency = 10; public MinimizeLogger_PrintEnergyForceMag(string logpath = null) { this.logpath = logpath; } public void log(long iter, Vectors coords, double energy, Vectors forces, string message="") { Minimize_ConjugateGradient_WriteLog(System.Console.Error, iter, energy, forces, null, message); } public void log(long iter, Vectors coords, double energy, Vectors forces, bool[] movables, string message="") { Minimize_ConjugateGradient_WriteLog(System.Console.Error, iter, energy, forces, movables, message); } public void logTrajectory(Universe univ, long iter, Vectors coords) { if(logpath == null) return; if(iter % logTrajectoryFrequency != 0) return; string pdbid = ""; if(univ.refs.ContainsKey("pdbid")) pdbid = univ.refs["pdbid"].String; string dir = logpath; if(dir == "") dir = "output-"+pdbid; System.IO.Directory.CreateDirectory(dir); string pdbname = string.Format("mini.conju.{0:D5}.pdb", iter); univ.pdb.ToFile(dir+"\\"+pdbname, coords.ToArray()); if(iter %100 == 0) univ.SaveCoords(dir+"\\conformation.coords", coords); string pmlloadname = (pdbid != "") ? pdbid : "eqlib"; try { System.IO.File.AppendAllLines(dir+"\\mini.conju.[animation].pml", new string[] { "load "+pdbname+", "+pmlloadname }); } catch(Exception) { } } } } }
/* * FastObjectListView - A listview that behaves like an ObjectListView but has the speed of a virtual list * * Author: Phillip Piper * Date: 27/09/2008 9:15 AM * * Change log: * 2014-10-15 JPP - Fire Filter event when applying filters * v2.8 * 2012-06-11 JPP - Added more efficient version of FilteredObjects * v2.5.1 * 2011-04-25 JPP - Fixed problem with removing objects from filtered or sorted list * v2.4 * 2010-04-05 JPP - Added filtering * v2.3 * 2009-08-27 JPP - Added GroupingStrategy * - Added optimized Objects property * v2.2.1 * 2009-01-07 JPP - Made all public and protected methods virtual * 2008-09-27 JPP - Separated from ObjectListView.cs * * Copyright (C) 2006-2014 Phillip Piper * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * * If you wish to use this code in a closed source application, please contact phillip.piper@gmail.com. */ using System; using System.Collections; using System.Collections.Generic; using System.ComponentModel; using System.Windows.Forms; namespace BrightIdeasSoftware { /// <summary> /// A FastObjectListView trades function for speed. /// </summary> /// <remarks> /// <para>On my mid-range laptop, this view builds a list of 10,000 objects in 0.1 seconds, /// as opposed to a normal ObjectListView which takes 10-15 seconds. Lists of up to 50,000 items should be /// able to be handled with sub-second response times even on low end machines.</para> /// <para> /// A FastObjectListView is implemented as a virtual list with many of the virtual modes limits (e.g. no sorting) /// fixed through coding. There are some functions that simply cannot be provided. Specifically, a FastObjectListView cannot: /// <list type="bullet"> /// <item><description>use Tile view</description></item> /// <item><description>show groups on XP</description></item> /// </list> /// </para> /// </remarks> public class FastObjectListView : VirtualObjectListView { /// <summary> /// Make a FastObjectListView /// </summary> public FastObjectListView() { this.VirtualListDataSource = new FastObjectListDataSource(this); this.GroupingStrategy = new FastListGroupingStrategy(); } /// <summary> /// Gets the collection of objects that survive any filtering that may be in place. /// </summary> [Browsable(false), DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden)] public override IEnumerable FilteredObjects { get { // This is much faster than the base method return ((FastObjectListDataSource)this.VirtualListDataSource).FilteredObjectList; } } /// <summary> /// Get/set the collection of objects that this list will show /// </summary> /// <remarks> /// <para> /// The contents of the control will be updated immediately after setting this property. /// </para> /// <para>This method preserves selection, if possible. Use SetObjects() if /// you do not want to preserve the selection. Preserving selection is the slowest part of this /// code and performance is O(n) where n is the number of selected rows.</para> /// <para>This method is not thread safe.</para> /// </remarks> [Browsable(false), DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden)] public override IEnumerable Objects { get { // This is much faster than the base method return ((FastObjectListDataSource)this.VirtualListDataSource).ObjectList; } set { base.Objects = value; } } /// <summary> /// Move the given collection of objects to the given index. /// </summary> /// <remarks>This operation only makes sense on non-grouped ObjectListViews.</remarks> /// <param name="index"></param> /// <param name="modelObjects"></param> public override void MoveObjects(int index, ICollection modelObjects) { if (this.InvokeRequired) { this.Invoke((MethodInvoker)delegate() { this.MoveObjects(index, modelObjects); }); return; } // If any object that is going to be moved is before the point where the insertion // will occur, then we have to reduce the location of our insertion point int displacedObjectCount = 0; foreach (object modelObject in modelObjects) { int i = this.IndexOf(modelObject); if (i >= 0 && i <= index) displacedObjectCount++; } index -= displacedObjectCount; this.BeginUpdate(); try { this.RemoveObjects(modelObjects); this.InsertObjects(index, modelObjects); } finally { this.EndUpdate(); } } /// <summary> /// Remove any sorting and revert to the given order of the model objects /// </summary> /// <remarks>To be really honest, Unsort() doesn't work on FastObjectListViews since /// the original ordering of model objects is lost when Sort() is called. So this method /// effectively just turns off sorting.</remarks> public override void Unsort() { this.ShowGroups = false; this.PrimarySortColumn = null; this.PrimarySortOrder = SortOrder.None; this.SetObjects(this.Objects); } } /// <summary> /// Provide a data source for a FastObjectListView /// </summary> /// <remarks> /// This class isn't intended to be used directly, but it is left as a public /// class just in case someone wants to subclass it. /// </remarks> public class FastObjectListDataSource : AbstractVirtualListDataSource { /// <summary> /// Create a FastObjectListDataSource /// </summary> /// <param name="listView"></param> public FastObjectListDataSource(FastObjectListView listView) : base(listView) { } #region IVirtualListDataSource Members /// <summary> /// Get n'th object /// </summary> /// <param name="n"></param> /// <returns></returns> public override object GetNthObject(int n) { if (n >= 0 && n < this.filteredObjectList.Count) return this.filteredObjectList[n]; return null; } /// <summary> /// How many items are in the data source /// </summary> /// <returns></returns> public override int GetObjectCount() { return this.filteredObjectList.Count; } /// <summary> /// Get the index of the given model /// </summary> /// <param name="model"></param> /// <returns></returns> public override int GetObjectIndex(object model) { int index; if (model != null && this.objectsToIndexMap.TryGetValue(model, out index)) return index; return -1; } /// <summary> /// /// </summary> /// <param name="text"></param> /// <param name="first"></param> /// <param name="last"></param> /// <param name="column"></param> /// <returns></returns> public override int SearchText(string text, int first, int last, OLVColumn column) { if (first <= last) { for (int i = first; i <= last; i++) { string data = column.GetStringValue(this.listView.GetNthItemInDisplayOrder(i).RowObject); if (data.StartsWith(text, StringComparison.CurrentCultureIgnoreCase)) return i; } } else { for (int i = first; i >= last; i--) { string data = column.GetStringValue(this.listView.GetNthItemInDisplayOrder(i).RowObject); if (data.StartsWith(text, StringComparison.CurrentCultureIgnoreCase)) return i; } } return -1; } /// <summary> /// /// </summary> /// <param name="column"></param> /// <param name="sortOrder"></param> public override void Sort(OLVColumn column, SortOrder sortOrder) { if (sortOrder != SortOrder.None) { ModelObjectComparer comparer = new ModelObjectComparer(column, sortOrder, this.listView.SecondarySortColumn, this.listView.SecondarySortOrder); this.fullObjectList.Sort(comparer); this.filteredObjectList.Sort(comparer); } this.RebuildIndexMap(); } /// <summary> /// /// </summary> /// <param name="modelObjects"></param> public override void AddObjects(ICollection modelObjects) { foreach (object modelObject in modelObjects) { if (modelObject != null) this.fullObjectList.Add(modelObject); } this.FilterObjects(); this.RebuildIndexMap(); } /// <summary> /// /// </summary> /// <param name="index"></param> /// <param name="modelObjects"></param> public override void InsertObjects(int index, ICollection modelObjects) { this.fullObjectList.InsertRange(index, modelObjects); this.FilterObjects(); this.RebuildIndexMap(); } /// <summary> /// Remove the given collection of models from this source. /// </summary> /// <param name="modelObjects"></param> public override void RemoveObjects(ICollection modelObjects) { // We have to unselect any object that is about to be deleted List<int> indicesToRemove = new List<int>(); foreach (object modelObject in modelObjects) { int i = this.GetObjectIndex(modelObject); if (i >= 0) indicesToRemove.Add(i); } // Sort the indices from highest to lowest so that we // remove latter ones before earlier ones. In this way, the // indices of the rows doesn't change after the deletes. indicesToRemove.Sort(); indicesToRemove.Reverse(); foreach (int i in indicesToRemove) this.listView.SelectedIndices.Remove(i); // Remove the objects from the unfiltered list foreach (object modelObject in modelObjects) this.fullObjectList.Remove(modelObject); this.FilterObjects(); this.RebuildIndexMap(); } /// <summary> /// /// </summary> /// <param name="collection"></param> public override void SetObjects(IEnumerable collection) { ArrayList newObjects = ObjectListView.EnumerableToArray(collection, true); this.fullObjectList = newObjects; this.FilterObjects(); this.RebuildIndexMap(); } /// <summary> /// Update/replace the nth object with the given object /// </summary> /// <param name="index"></param> /// <param name="modelObject"></param> public override void UpdateObject(int index, object modelObject) { if (index < 0 || index >= this.filteredObjectList.Count) return; int i = this.fullObjectList.IndexOf(this.filteredObjectList[index]); if (i < 0) return; if (ReferenceEquals(this.fullObjectList[i], modelObject)) return; this.fullObjectList[i] = modelObject; this.filteredObjectList[index] = modelObject; this.objectsToIndexMap[modelObject] = index; } private ArrayList fullObjectList = new ArrayList(); private ArrayList filteredObjectList = new ArrayList(); private IModelFilter modelFilter; private IListFilter listFilter; #endregion #region IFilterableDataSource Members /// <summary> /// Apply the given filters to this data source. One or both may be null. /// </summary> /// <param name="iModelFilter"></param> /// <param name="iListFilter"></param> public override void ApplyFilters(IModelFilter iModelFilter, IListFilter iListFilter) { this.modelFilter = iModelFilter; this.listFilter = iListFilter; this.SetObjects(this.fullObjectList); } #endregion #region Implementation /// <summary> /// Gets the full list of objects being used for this fast list. /// This list is unfiltered. /// </summary> public ArrayList ObjectList { get { return fullObjectList; } } /// <summary> /// Gets the list of objects from ObjectList which survive any installed filters. /// </summary> public ArrayList FilteredObjectList { get { return filteredObjectList; } } /// <summary> /// Rebuild the map that remembers which model object is displayed at which line /// </summary> protected void RebuildIndexMap() { this.objectsToIndexMap.Clear(); for (int i = 0; i < this.filteredObjectList.Count; i++) this.objectsToIndexMap[this.filteredObjectList[i]] = i; } readonly Dictionary<Object, int> objectsToIndexMap = new Dictionary<Object, int>(); /// <summary> /// Build our filtered list from our full list. /// </summary> protected void FilterObjects() { // If this list isn't filtered, we don't need to do anything else if (!this.listView.UseFiltering) { this.filteredObjectList = new ArrayList(this.fullObjectList); return; } // Tell the world to filter the objects. If they do so, don't do anything else // ReSharper disable PossibleMultipleEnumeration FilterEventArgs args = new FilterEventArgs(this.fullObjectList); this.listView.OnFilter(args); if (args.FilteredObjects != null) { this.filteredObjectList = ObjectListView.EnumerableToArray(args.FilteredObjects, false); return; } IEnumerable objects = (this.listFilter == null) ? this.fullObjectList : this.listFilter.Filter(this.fullObjectList); // Apply the object filter if there is one if (this.modelFilter == null) { this.filteredObjectList = ObjectListView.EnumerableToArray(objects, false); } else { this.filteredObjectList = new ArrayList(); foreach (object model in objects) { if (this.modelFilter.Filter(model)) this.filteredObjectList.Add(model); } } } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.IO; using System.Net.Security; using System.Net.Sockets; using System.Security.Authentication; using System.Text; using System.Threading; using System.Threading.Tasks; using Xunit; namespace System.Net.Test.Common { public class Http2LoopbackServer : GenericLoopbackServer, IDisposable { private Socket _listenSocket; private Http2Options _options; private Uri _uri; private List<Http2LoopbackConnection> _connections = new List<Http2LoopbackConnection>(); public bool AllowMultipleConnections { get; set; } private Http2LoopbackConnection Connection { get { RemoveInvalidConnections(); return _connections[0]; } } public static readonly TimeSpan Timeout = TimeSpan.FromSeconds(30); public Uri Address { get { var localEndPoint = (IPEndPoint)_listenSocket.LocalEndPoint; string host = _options.Address.AddressFamily == AddressFamily.InterNetworkV6 ? $"[{localEndPoint.Address}]" : localEndPoint.Address.ToString(); string scheme = _options.UseSsl ? "https" : "http"; _uri = new Uri($"{scheme}://{host}:{localEndPoint.Port}/"); return _uri; } } public static Http2LoopbackServer CreateServer() { return new Http2LoopbackServer(new Http2Options()); } public static Http2LoopbackServer CreateServer(Http2Options options) { return new Http2LoopbackServer(options); } private Http2LoopbackServer(Http2Options options) { _options = options; _listenSocket = new Socket(_options.Address.AddressFamily, SocketType.Stream, ProtocolType.Tcp); _listenSocket.Bind(new IPEndPoint(_options.Address, 0)); _listenSocket.Listen(_options.ListenBacklog); } private void RemoveInvalidConnections() { _connections.RemoveAll((c) => c.IsInvalid); } public async Task<Http2LoopbackConnection> AcceptConnectionAsync() { RemoveInvalidConnections(); if (!AllowMultipleConnections && _connections.Count != 0) { throw new InvalidOperationException("Connection already established. Set `AllowMultipleConnections = true` to bypass."); } Socket connectionSocket = await _listenSocket.AcceptAsync().ConfigureAwait(false); Http2LoopbackConnection connection = new Http2LoopbackConnection(connectionSocket, _options); _connections.Add(connection); return connection; } public async Task<Http2LoopbackConnection> EstablishConnectionAsync(params SettingsEntry[] settingsEntries) { (Http2LoopbackConnection connection, _) = await EstablishConnectionGetSettingsAsync().ConfigureAwait(false); return connection; } public async Task<(Http2LoopbackConnection, SettingsFrame)> EstablishConnectionGetSettingsAsync(params SettingsEntry[] settingsEntries) { Http2LoopbackConnection connection = await AcceptConnectionAsync().ConfigureAwait(false); // Receive the initial client settings frame. Frame receivedFrame = await connection.ReadFrameAsync(Timeout).ConfigureAwait(false); Assert.Equal(FrameType.Settings, receivedFrame.Type); Assert.Equal(FrameFlags.None, receivedFrame.Flags); Assert.Equal(0, receivedFrame.StreamId); var clientSettingsFrame = (SettingsFrame)receivedFrame; // Receive the initial client window update frame. receivedFrame = await connection.ReadFrameAsync(Timeout).ConfigureAwait(false); Assert.Equal(FrameType.WindowUpdate, receivedFrame.Type); Assert.Equal(FrameFlags.None, receivedFrame.Flags); Assert.Equal(0, receivedFrame.StreamId); // Send the initial server settings frame. SettingsFrame settingsFrame = new SettingsFrame(settingsEntries); await connection.WriteFrameAsync(settingsFrame).ConfigureAwait(false); // Send the client settings frame ACK. Frame settingsAck = new Frame(0, FrameType.Settings, FrameFlags.Ack, 0); await connection.WriteFrameAsync(settingsAck).ConfigureAwait(false); // The client will send us a SETTINGS ACK eventually, but not necessarily right away. await connection.ExpectSettingsAckAsync(); return (connection, clientSettingsFrame); } public override void Dispose() { if (_listenSocket != null) { _listenSocket.Dispose(); _listenSocket = null; } } // // GenericLoopbackServer implementation // public override async Task<HttpRequestData> HandleRequestAsync(HttpStatusCode statusCode = HttpStatusCode.OK, IList<HttpHeaderData> headers = null, string content = "") { Http2LoopbackConnection connection = await EstablishConnectionAsync().ConfigureAwait(false); (int streamId, HttpRequestData requestData) = await connection.ReadAndParseRequestHeaderAsync().ConfigureAwait(false); // We are about to close the connection, after we send the response. // So, send a GOAWAY frame now so the client won't inadvertantly try to reuse the connection. await connection.SendGoAway(streamId).ConfigureAwait(false); if (string.IsNullOrEmpty(content)) { await connection.SendResponseHeadersAsync(streamId, endStream: true, statusCode, isTrailingHeader: false, headers : headers).ConfigureAwait(false); } else { await connection.SendResponseHeadersAsync(streamId, endStream: false, statusCode, isTrailingHeader: false, headers : headers).ConfigureAwait(false); await connection.SendResponseBodyAsync(streamId, Encoding.ASCII.GetBytes(content)).ConfigureAwait(false); } await connection.WaitForConnectionShutdownAsync().ConfigureAwait(false); return requestData; } public override async Task AcceptConnectionAsync(Func<GenericLoopbackConnection, Task> funcAsync) { using (Http2LoopbackConnection connection = await EstablishConnectionAsync().ConfigureAwait(false)) { await funcAsync(connection).ConfigureAwait(false); } } public static async Task CreateClientAndServerAsync(Func<Uri, Task> clientFunc, Func<Http2LoopbackServer, Task> serverFunc, int timeout = 60_000) { using (var server = Http2LoopbackServer.CreateServer()) { Task clientTask = clientFunc(server.Address); Task serverTask = serverFunc(server); await new Task[] { clientTask, serverTask }.WhenAllOrAnyFailed(timeout).ConfigureAwait(false); } } } public class Http2Options : GenericLoopbackOptions { public int ListenBacklog { get; set; } = 1; public bool UseSsl { get; set; } = PlatformDetection.SupportsAlpn && !Capability.Http2ForceUnencryptedLoopback(); public SslProtocols SslProtocols { get; set; } = SslProtocols.Tls12; } public sealed class Http2LoopbackServerFactory : LoopbackServerFactory { public static readonly Http2LoopbackServerFactory Singleton = new Http2LoopbackServerFactory(); public static async Task CreateServerAsync(Func<Http2LoopbackServer, Uri, Task> funcAsync, int millisecondsTimeout = 60_000) { using (var server = Http2LoopbackServer.CreateServer()) { await funcAsync(server, server.Address).TimeoutAfter(millisecondsTimeout).ConfigureAwait(false); } } public override async Task CreateServerAsync(Func<GenericLoopbackServer, Uri, Task> funcAsync, int millisecondsTimeout = 60_000, GenericLoopbackOptions options = null) { Http2Options http2Options = new Http2Options(); if (options != null) { http2Options.Address = options.Address; } using (var server = Http2LoopbackServer.CreateServer(http2Options)) { await funcAsync(server, server.Address).TimeoutAfter(millisecondsTimeout).ConfigureAwait(false); } } public override bool IsHttp11 => false; public override bool IsHttp2 => true; } public enum ProtocolErrors { NO_ERROR = 0x0, PROTOCOL_ERROR = 0x1, INTERNAL_ERROR = 0x2, FLOW_CONTROL_ERROR = 0x3, SETTINGS_TIMEOUT = 0x4, STREAM_CLOSED = 0x5, FRAME_SIZE_ERROR = 0x6, REFUSED_STREAM = 0x7, CANCEL = 0x8, COMPRESSION_ERROR = 0x9, CONNECT_ERROR = 0xa, ENHANCE_YOUR_CALM = 0xb, INADEQUATE_SECURITY = 0xc, HTTP_1_1_REQUIRED = 0xd } }
using System; using System.Collections.Generic; using Blueprint41; using Blueprint41.Core; using Blueprint41.Query; namespace Domain.Data.Query { public partial class Node { public static SalesTerritoryNode SalesTerritory { get { return new SalesTerritoryNode(); } } } public partial class SalesTerritoryNode : Blueprint41.Query.Node { protected override string GetNeo4jLabel() { return "SalesTerritory"; } internal SalesTerritoryNode() { } internal SalesTerritoryNode(SalesTerritoryAlias alias, bool isReference = false) { NodeAlias = alias; IsReference = isReference; } internal SalesTerritoryNode(RELATIONSHIP relationship, DirectionEnum direction, string neo4jLabel = null) : base(relationship, direction, neo4jLabel) { } public SalesTerritoryNode Alias(out SalesTerritoryAlias alias) { alias = new SalesTerritoryAlias(this); NodeAlias = alias; return this; } public SalesTerritoryNode UseExistingAlias(AliasResult alias) { NodeAlias = alias; return this; } public SalesTerritoryIn In { get { return new SalesTerritoryIn(this); } } public class SalesTerritoryIn { private SalesTerritoryNode Parent; internal SalesTerritoryIn(SalesTerritoryNode parent) { Parent = parent; } public IFromIn_SALESTERRITORY_HAS_SALESTERRITORYHISTORY_REL SALESTERRITORY_HAS_SALESTERRITORYHISTORY { get { return new SALESTERRITORY_HAS_SALESTERRITORYHISTORY_REL(Parent, DirectionEnum.In); } } } public SalesTerritoryOut Out { get { return new SalesTerritoryOut(this); } } public class SalesTerritoryOut { private SalesTerritoryNode Parent; internal SalesTerritoryOut(SalesTerritoryNode parent) { Parent = parent; } public IFromOut_CUSTOMER_HAS_SALESTERRITORY_REL CUSTOMER_HAS_SALESTERRITORY { get { return new CUSTOMER_HAS_SALESTERRITORY_REL(Parent, DirectionEnum.Out); } } public IFromOut_SALESORDERHEADER_CONTAINS_SALESTERRITORY_REL SALESORDERHEADER_CONTAINS_SALESTERRITORY { get { return new SALESORDERHEADER_CONTAINS_SALESTERRITORY_REL(Parent, DirectionEnum.Out); } } public IFromOut_SALESPERSON_HAS_SALESTERRITORY_REL SALESPERSON_HAS_SALESTERRITORY { get { return new SALESPERSON_HAS_SALESTERRITORY_REL(Parent, DirectionEnum.Out); } } public IFromOut_STATEPROVINCE_HAS_SALESTERRITORY_REL STATEPROVINCE_HAS_SALESTERRITORY { get { return new STATEPROVINCE_HAS_SALESTERRITORY_REL(Parent, DirectionEnum.Out); } } } } public class SalesTerritoryAlias : AliasResult { internal SalesTerritoryAlias(SalesTerritoryNode parent) { Node = parent; } public override IReadOnlyDictionary<string, FieldResult> AliasFields { get { if (m_AliasFields == null) { m_AliasFields = new Dictionary<string, FieldResult>() { { "Name", new StringResult(this, "Name", Datastore.AdventureWorks.Model.Entities["SalesTerritory"], Datastore.AdventureWorks.Model.Entities["SalesTerritory"].Properties["Name"]) }, { "CountryRegionCode", new StringResult(this, "CountryRegionCode", Datastore.AdventureWorks.Model.Entities["SalesTerritory"], Datastore.AdventureWorks.Model.Entities["SalesTerritory"].Properties["CountryRegionCode"]) }, { "Group", new StringResult(this, "Group", Datastore.AdventureWorks.Model.Entities["SalesTerritory"], Datastore.AdventureWorks.Model.Entities["SalesTerritory"].Properties["Group"]) }, { "SalesYTD", new StringResult(this, "SalesYTD", Datastore.AdventureWorks.Model.Entities["SalesTerritory"], Datastore.AdventureWorks.Model.Entities["SalesTerritory"].Properties["SalesYTD"]) }, { "SalesLastYear", new StringResult(this, "SalesLastYear", Datastore.AdventureWorks.Model.Entities["SalesTerritory"], Datastore.AdventureWorks.Model.Entities["SalesTerritory"].Properties["SalesLastYear"]) }, { "CostYTD", new StringResult(this, "CostYTD", Datastore.AdventureWorks.Model.Entities["SalesTerritory"], Datastore.AdventureWorks.Model.Entities["SalesTerritory"].Properties["CostYTD"]) }, { "CostLastYear", new StringResult(this, "CostLastYear", Datastore.AdventureWorks.Model.Entities["SalesTerritory"], Datastore.AdventureWorks.Model.Entities["SalesTerritory"].Properties["CostLastYear"]) }, { "rowguid", new StringResult(this, "rowguid", Datastore.AdventureWorks.Model.Entities["SalesTerritory"], Datastore.AdventureWorks.Model.Entities["SalesTerritory"].Properties["rowguid"]) }, { "ModifiedDate", new DateTimeResult(this, "ModifiedDate", Datastore.AdventureWorks.Model.Entities["SalesTerritory"], Datastore.AdventureWorks.Model.Entities["SchemaBase"].Properties["ModifiedDate"]) }, { "Uid", new StringResult(this, "Uid", Datastore.AdventureWorks.Model.Entities["SalesTerritory"], Datastore.AdventureWorks.Model.Entities["Neo4jBase"].Properties["Uid"]) }, }; } return m_AliasFields; } } private IReadOnlyDictionary<string, FieldResult> m_AliasFields = null; public SalesTerritoryNode.SalesTerritoryIn In { get { return new SalesTerritoryNode.SalesTerritoryIn(new SalesTerritoryNode(this, true)); } } public SalesTerritoryNode.SalesTerritoryOut Out { get { return new SalesTerritoryNode.SalesTerritoryOut(new SalesTerritoryNode(this, true)); } } public StringResult Name { get { if ((object)m_Name == null) m_Name = (StringResult)AliasFields["Name"]; return m_Name; } } private StringResult m_Name = null; public StringResult CountryRegionCode { get { if ((object)m_CountryRegionCode == null) m_CountryRegionCode = (StringResult)AliasFields["CountryRegionCode"]; return m_CountryRegionCode; } } private StringResult m_CountryRegionCode = null; public StringResult Group { get { if ((object)m_Group == null) m_Group = (StringResult)AliasFields["Group"]; return m_Group; } } private StringResult m_Group = null; public StringResult SalesYTD { get { if ((object)m_SalesYTD == null) m_SalesYTD = (StringResult)AliasFields["SalesYTD"]; return m_SalesYTD; } } private StringResult m_SalesYTD = null; public StringResult SalesLastYear { get { if ((object)m_SalesLastYear == null) m_SalesLastYear = (StringResult)AliasFields["SalesLastYear"]; return m_SalesLastYear; } } private StringResult m_SalesLastYear = null; public StringResult CostYTD { get { if ((object)m_CostYTD == null) m_CostYTD = (StringResult)AliasFields["CostYTD"]; return m_CostYTD; } } private StringResult m_CostYTD = null; public StringResult CostLastYear { get { if ((object)m_CostLastYear == null) m_CostLastYear = (StringResult)AliasFields["CostLastYear"]; return m_CostLastYear; } } private StringResult m_CostLastYear = null; public StringResult rowguid { get { if ((object)m_rowguid == null) m_rowguid = (StringResult)AliasFields["rowguid"]; return m_rowguid; } } private StringResult m_rowguid = null; public DateTimeResult ModifiedDate { get { if ((object)m_ModifiedDate == null) m_ModifiedDate = (DateTimeResult)AliasFields["ModifiedDate"]; return m_ModifiedDate; } } private DateTimeResult m_ModifiedDate = null; public StringResult Uid { get { if ((object)m_Uid == null) m_Uid = (StringResult)AliasFields["Uid"]; return m_Uid; } } private StringResult m_Uid = null; } }
// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! using gax = Google.Api.Gax; using gcrv = Google.Cloud.ResourceSettings.V1; using sys = System; namespace Google.Cloud.ResourceSettings.V1 { /// <summary>Resource name for the <c>Setting</c> resource.</summary> public sealed partial class SettingName : gax::IResourceName, sys::IEquatable<SettingName> { /// <summary>The possible contents of <see cref="SettingName"/>.</summary> public enum ResourceNameType { /// <summary>An unparsed resource name.</summary> Unparsed = 0, /// <summary> /// A resource name with pattern <c>projects/{project_number}/settings/{setting_name}</c>. /// </summary> ProjectNumberSettingName = 1, /// <summary>A resource name with pattern <c>folders/{folder}/settings/{setting_name}</c>.</summary> FolderSettingName = 2, /// <summary> /// A resource name with pattern <c>organizations/{organization}/settings/{setting_name}</c>. /// </summary> OrganizationSettingName = 3, } private static gax::PathTemplate s_projectNumberSettingName = new gax::PathTemplate("projects/{project_number}/settings/{setting_name}"); private static gax::PathTemplate s_folderSettingName = new gax::PathTemplate("folders/{folder}/settings/{setting_name}"); private static gax::PathTemplate s_organizationSettingName = new gax::PathTemplate("organizations/{organization}/settings/{setting_name}"); /// <summary>Creates a <see cref="SettingName"/> containing an unparsed resource name.</summary> /// <param name="unparsedResourceName">The unparsed resource name. Must not be <c>null</c>.</param> /// <returns> /// A new instance of <see cref="SettingName"/> containing the provided <paramref name="unparsedResourceName"/>. /// </returns> public static SettingName FromUnparsed(gax::UnparsedResourceName unparsedResourceName) => new SettingName(ResourceNameType.Unparsed, gax::GaxPreconditions.CheckNotNull(unparsedResourceName, nameof(unparsedResourceName))); /// <summary> /// Creates a <see cref="SettingName"/> with the pattern <c>projects/{project_number}/settings/{setting_name}</c> /// . /// </summary> /// <param name="projectNumberId">The <c>ProjectNumber</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="settingNameId">The <c>SettingName</c> ID. Must not be <c>null</c> or empty.</param> /// <returns>A new instance of <see cref="SettingName"/> constructed from the provided ids.</returns> public static SettingName FromProjectNumberSettingName(string projectNumberId, string settingNameId) => new SettingName(ResourceNameType.ProjectNumberSettingName, projectNumberId: gax::GaxPreconditions.CheckNotNullOrEmpty(projectNumberId, nameof(projectNumberId)), settingNameId: gax::GaxPreconditions.CheckNotNullOrEmpty(settingNameId, nameof(settingNameId))); /// <summary> /// Creates a <see cref="SettingName"/> with the pattern <c>folders/{folder}/settings/{setting_name}</c>. /// </summary> /// <param name="folderId">The <c>Folder</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="settingNameId">The <c>SettingName</c> ID. Must not be <c>null</c> or empty.</param> /// <returns>A new instance of <see cref="SettingName"/> constructed from the provided ids.</returns> public static SettingName FromFolderSettingName(string folderId, string settingNameId) => new SettingName(ResourceNameType.FolderSettingName, folderId: gax::GaxPreconditions.CheckNotNullOrEmpty(folderId, nameof(folderId)), settingNameId: gax::GaxPreconditions.CheckNotNullOrEmpty(settingNameId, nameof(settingNameId))); /// <summary> /// Creates a <see cref="SettingName"/> with the pattern <c>organizations/{organization}/settings/{setting_name}</c> /// . /// </summary> /// <param name="organizationId">The <c>Organization</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="settingNameId">The <c>SettingName</c> ID. Must not be <c>null</c> or empty.</param> /// <returns>A new instance of <see cref="SettingName"/> constructed from the provided ids.</returns> public static SettingName FromOrganizationSettingName(string organizationId, string settingNameId) => new SettingName(ResourceNameType.OrganizationSettingName, organizationId: gax::GaxPreconditions.CheckNotNullOrEmpty(organizationId, nameof(organizationId)), settingNameId: gax::GaxPreconditions.CheckNotNullOrEmpty(settingNameId, nameof(settingNameId))); /// <summary> /// Formats the IDs into the string representation of this <see cref="SettingName"/> with pattern /// <c>projects/{project_number}/settings/{setting_name}</c>. /// </summary> /// <param name="projectNumberId">The <c>ProjectNumber</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="settingNameId">The <c>SettingName</c> ID. Must not be <c>null</c> or empty.</param> /// <returns> /// The string representation of this <see cref="SettingName"/> with pattern /// <c>projects/{project_number}/settings/{setting_name}</c>. /// </returns> public static string Format(string projectNumberId, string settingNameId) => FormatProjectNumberSettingName(projectNumberId, settingNameId); /// <summary> /// Formats the IDs into the string representation of this <see cref="SettingName"/> with pattern /// <c>projects/{project_number}/settings/{setting_name}</c>. /// </summary> /// <param name="projectNumberId">The <c>ProjectNumber</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="settingNameId">The <c>SettingName</c> ID. Must not be <c>null</c> or empty.</param> /// <returns> /// The string representation of this <see cref="SettingName"/> with pattern /// <c>projects/{project_number}/settings/{setting_name}</c>. /// </returns> public static string FormatProjectNumberSettingName(string projectNumberId, string settingNameId) => s_projectNumberSettingName.Expand(gax::GaxPreconditions.CheckNotNullOrEmpty(projectNumberId, nameof(projectNumberId)), gax::GaxPreconditions.CheckNotNullOrEmpty(settingNameId, nameof(settingNameId))); /// <summary> /// Formats the IDs into the string representation of this <see cref="SettingName"/> with pattern /// <c>folders/{folder}/settings/{setting_name}</c>. /// </summary> /// <param name="folderId">The <c>Folder</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="settingNameId">The <c>SettingName</c> ID. Must not be <c>null</c> or empty.</param> /// <returns> /// The string representation of this <see cref="SettingName"/> with pattern /// <c>folders/{folder}/settings/{setting_name}</c>. /// </returns> public static string FormatFolderSettingName(string folderId, string settingNameId) => s_folderSettingName.Expand(gax::GaxPreconditions.CheckNotNullOrEmpty(folderId, nameof(folderId)), gax::GaxPreconditions.CheckNotNullOrEmpty(settingNameId, nameof(settingNameId))); /// <summary> /// Formats the IDs into the string representation of this <see cref="SettingName"/> with pattern /// <c>organizations/{organization}/settings/{setting_name}</c>. /// </summary> /// <param name="organizationId">The <c>Organization</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="settingNameId">The <c>SettingName</c> ID. Must not be <c>null</c> or empty.</param> /// <returns> /// The string representation of this <see cref="SettingName"/> with pattern /// <c>organizations/{organization}/settings/{setting_name}</c>. /// </returns> public static string FormatOrganizationSettingName(string organizationId, string settingNameId) => s_organizationSettingName.Expand(gax::GaxPreconditions.CheckNotNullOrEmpty(organizationId, nameof(organizationId)), gax::GaxPreconditions.CheckNotNullOrEmpty(settingNameId, nameof(settingNameId))); /// <summary>Parses the given resource name string into a new <see cref="SettingName"/> instance.</summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item><description><c>projects/{project_number}/settings/{setting_name}</c></description></item> /// <item><description><c>folders/{folder}/settings/{setting_name}</c></description></item> /// <item><description><c>organizations/{organization}/settings/{setting_name}</c></description></item> /// </list> /// </remarks> /// <param name="settingName">The resource name in string form. Must not be <c>null</c>.</param> /// <returns>The parsed <see cref="SettingName"/> if successful.</returns> public static SettingName Parse(string settingName) => Parse(settingName, false); /// <summary> /// Parses the given resource name string into a new <see cref="SettingName"/> instance; optionally allowing an /// unparseable resource name. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item><description><c>projects/{project_number}/settings/{setting_name}</c></description></item> /// <item><description><c>folders/{folder}/settings/{setting_name}</c></description></item> /// <item><description><c>organizations/{organization}/settings/{setting_name}</c></description></item> /// </list> /// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>. /// </remarks> /// <param name="settingName">The resource name in string form. Must not be <c>null</c>.</param> /// <param name="allowUnparsed"> /// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/> /// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is /// specified. /// </param> /// <returns>The parsed <see cref="SettingName"/> if successful.</returns> public static SettingName Parse(string settingName, bool allowUnparsed) => TryParse(settingName, allowUnparsed, out SettingName result) ? result : throw new sys::ArgumentException("The given resource-name matches no pattern."); /// <summary> /// Tries to parse the given resource name string into a new <see cref="SettingName"/> instance. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item><description><c>projects/{project_number}/settings/{setting_name}</c></description></item> /// <item><description><c>folders/{folder}/settings/{setting_name}</c></description></item> /// <item><description><c>organizations/{organization}/settings/{setting_name}</c></description></item> /// </list> /// </remarks> /// <param name="settingName">The resource name in string form. Must not be <c>null</c>.</param> /// <param name="result"> /// When this method returns, the parsed <see cref="SettingName"/>, or <c>null</c> if parsing failed. /// </param> /// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns> public static bool TryParse(string settingName, out SettingName result) => TryParse(settingName, false, out result); /// <summary> /// Tries to parse the given resource name string into a new <see cref="SettingName"/> instance; optionally /// allowing an unparseable resource name. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item><description><c>projects/{project_number}/settings/{setting_name}</c></description></item> /// <item><description><c>folders/{folder}/settings/{setting_name}</c></description></item> /// <item><description><c>organizations/{organization}/settings/{setting_name}</c></description></item> /// </list> /// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>. /// </remarks> /// <param name="settingName">The resource name in string form. Must not be <c>null</c>.</param> /// <param name="allowUnparsed"> /// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/> /// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is /// specified. /// </param> /// <param name="result"> /// When this method returns, the parsed <see cref="SettingName"/>, or <c>null</c> if parsing failed. /// </param> /// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns> public static bool TryParse(string settingName, bool allowUnparsed, out SettingName result) { gax::GaxPreconditions.CheckNotNull(settingName, nameof(settingName)); gax::TemplatedResourceName resourceName; if (s_projectNumberSettingName.TryParseName(settingName, out resourceName)) { result = FromProjectNumberSettingName(resourceName[0], resourceName[1]); return true; } if (s_folderSettingName.TryParseName(settingName, out resourceName)) { result = FromFolderSettingName(resourceName[0], resourceName[1]); return true; } if (s_organizationSettingName.TryParseName(settingName, out resourceName)) { result = FromOrganizationSettingName(resourceName[0], resourceName[1]); return true; } if (allowUnparsed) { if (gax::UnparsedResourceName.TryParse(settingName, out gax::UnparsedResourceName unparsedResourceName)) { result = FromUnparsed(unparsedResourceName); return true; } } result = null; return false; } private SettingName(ResourceNameType type, gax::UnparsedResourceName unparsedResourceName = null, string folderId = null, string organizationId = null, string projectNumberId = null, string settingNameId = null) { Type = type; UnparsedResource = unparsedResourceName; FolderId = folderId; OrganizationId = organizationId; ProjectNumberId = projectNumberId; SettingNameId = settingNameId; } /// <summary> /// Constructs a new instance of a <see cref="SettingName"/> class from the component parts of pattern /// <c>projects/{project_number}/settings/{setting_name}</c> /// </summary> /// <param name="projectNumberId">The <c>ProjectNumber</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="settingNameId">The <c>SettingName</c> ID. Must not be <c>null</c> or empty.</param> public SettingName(string projectNumberId, string settingNameId) : this(ResourceNameType.ProjectNumberSettingName, projectNumberId: gax::GaxPreconditions.CheckNotNullOrEmpty(projectNumberId, nameof(projectNumberId)), settingNameId: gax::GaxPreconditions.CheckNotNullOrEmpty(settingNameId, nameof(settingNameId))) { } /// <summary>The <see cref="ResourceNameType"/> of the contained resource name.</summary> public ResourceNameType Type { get; } /// <summary> /// The contained <see cref="gax::UnparsedResourceName"/>. Only non-<c>null</c> if this instance contains an /// unparsed resource name. /// </summary> public gax::UnparsedResourceName UnparsedResource { get; } /// <summary> /// The <c>Folder</c> ID. May be <c>null</c>, depending on which resource name is contained by this instance. /// </summary> public string FolderId { get; } /// <summary> /// The <c>Organization</c> ID. May be <c>null</c>, depending on which resource name is contained by this /// instance. /// </summary> public string OrganizationId { get; } /// <summary> /// The <c>ProjectNumber</c> ID. May be <c>null</c>, depending on which resource name is contained by this /// instance. /// </summary> public string ProjectNumberId { get; } /// <summary> /// The <c>SettingName</c> ID. May be <c>null</c>, depending on which resource name is contained by this /// instance. /// </summary> public string SettingNameId { get; } /// <summary>Whether this instance contains a resource name with a known pattern.</summary> public bool IsKnownPattern => Type != ResourceNameType.Unparsed; /// <summary>The string representation of the resource name.</summary> /// <returns>The string representation of the resource name.</returns> public override string ToString() { switch (Type) { case ResourceNameType.Unparsed: return UnparsedResource.ToString(); case ResourceNameType.ProjectNumberSettingName: return s_projectNumberSettingName.Expand(ProjectNumberId, SettingNameId); case ResourceNameType.FolderSettingName: return s_folderSettingName.Expand(FolderId, SettingNameId); case ResourceNameType.OrganizationSettingName: return s_organizationSettingName.Expand(OrganizationId, SettingNameId); default: throw new sys::InvalidOperationException("Unrecognized resource-type."); } } /// <summary>Returns a hash code for this resource name.</summary> public override int GetHashCode() => ToString().GetHashCode(); /// <inheritdoc/> public override bool Equals(object obj) => Equals(obj as SettingName); /// <inheritdoc/> public bool Equals(SettingName other) => ToString() == other?.ToString(); /// <inheritdoc/> public static bool operator ==(SettingName a, SettingName b) => ReferenceEquals(a, b) || (a?.Equals(b) ?? false); /// <inheritdoc/> public static bool operator !=(SettingName a, SettingName b) => !(a == b); } public partial class Setting { /// <summary> /// <see cref="gcrv::SettingName"/>-typed view over the <see cref="Name"/> resource name property. /// </summary> public gcrv::SettingName SettingName { get => string.IsNullOrEmpty(Name) ? null : gcrv::SettingName.Parse(Name, allowUnparsed: true); set => Name = value?.ToString() ?? ""; } } public partial class ListSettingsRequest { /// <summary> /// <see cref="gax::IResourceName"/>-typed view over the <see cref="Parent"/> resource name property. /// </summary> public gax::IResourceName ParentAsResourceName { get => string.IsNullOrEmpty(Parent) ? null : gax::UnparsedResourceName.Parse(Parent); set => Parent = value?.ToString() ?? ""; } } public partial class GetSettingRequest { /// <summary> /// <see cref="gcrv::SettingName"/>-typed view over the <see cref="Name"/> resource name property. /// </summary> public gcrv::SettingName SettingName { get => string.IsNullOrEmpty(Name) ? null : gcrv::SettingName.Parse(Name, allowUnparsed: true); set => Name = value?.ToString() ?? ""; } } }
using System; using System.IO; using System.Linq; using System.Text; using System.Text.RegularExpressions; using System.Xml.Linq; namespace MagicChunks.Helpers { public static class XmlExtensions { private static readonly Regex NodeIndexEndingRegex = new Regex(@"\[\d+\]$", RegexOptions.CultureInvariant | RegexOptions.Compiled); private static readonly Regex ProcessingInstructionsPathElementRegex = new Regex(@"^\?.+", RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase | RegexOptions.Singleline); private static readonly Regex AttributeNodeRegex = new Regex(@"(?<attrName>\w+)\s*\=\s*\""(?<attrValue>.+?)\""", RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase | RegexOptions.Singleline); public static string ToStringWithDeclaration(this XDocument document) { if (document == null) { throw new ArgumentNullException(nameof(document)); } return document.ToStringWithDeclaration(SaveOptions.None); } public static string ToStringWithDeclaration(this XDocument document, SaveOptions options) { if (document == null) { throw new ArgumentNullException(nameof(document)); } var newLine = (options & SaveOptions.DisableFormatting) == SaveOptions.DisableFormatting ? string.Empty : Environment.NewLine; return document.Declaration == null ? document.ToString(options) : document.Declaration + newLine + document.ToString(options); } public static XName GetNameWithNamespace(this string name, XElement element, string defaultNamespace) { XName result; if (name.Contains(':') == false) result = XName.Get(name, defaultNamespace); else { var attributeNameParts = name.Split(':'); var attributeNamespace = element.GetNamespaceOfPrefix(attributeNameParts[0]); if (attributeNamespace != null) result = XName.Get(attributeNameParts[1], attributeNamespace.NamespaceName); else result = XName.Get(attributeNameParts[1], defaultNamespace); } return result; } public static XElement GetChildElementByName(this XElement source, string name) { if (!NodeIndexEndingRegex.IsMatch(name)) { bool isElementNameWithNamespace = name.IndexOf(':') > 0 && !(name.Split(':')[0].Contains("'") || (name.Split(':')[0].Contains(@""""))); return source?.Elements() .FirstOrDefault(e => !isElementNameWithNamespace ? String.Compare(e.Name.LocalName, name, StringComparison.OrdinalIgnoreCase) == 0 : e.Name == name.GetNameWithNamespace(source, String.Empty)); } else { string nodeName; int nodeIndex; try { nodeName = NodeIndexEndingRegex.Replace(name, String.Empty); nodeIndex = int.Parse(NodeIndexEndingRegex.Match(name).Value.Trim('[', ']')); if (nodeIndex < 0) throw new ArgumentException("Index should be greater than 0."); } catch (ArgumentException ex) { throw new ArgumentException($"Wrong element name: {name}", ex); } catch (FormatException ex) { throw new ArgumentException($"Wrong element name: {name}", ex); } catch (OverflowException ex) { throw new ArgumentException($"Wrong element name: {name}", ex); } var elements = source?.Elements() .Where(e => name.IndexOf(':') == -1 ? String.Compare(e.Name.LocalName, nodeName, StringComparison.OrdinalIgnoreCase) == 0 : e.Name == nodeName.GetNameWithNamespace(source, String.Empty)); return elements.Skip(nodeIndex).FirstOrDefault(); } } public static XProcessingInstruction GetChildProcessingInstructionByName(this XElement source, string name) { if (!NodeIndexEndingRegex.IsMatch(name)) { return source?.Nodes().OfType<XProcessingInstruction>() .FirstOrDefault(e => String.Compare(e.Target, name, StringComparison.OrdinalIgnoreCase) == 0); } else { string nodeName; int nodeIndex; try { nodeName = NodeIndexEndingRegex.Replace(name, String.Empty); nodeIndex = int.Parse(NodeIndexEndingRegex.Match(name).Value.Trim('[', ']')); if (nodeIndex < 0) throw new ArgumentException("Index should be greater than 0."); } catch (ArgumentException ex) { throw new ArgumentException($"Wrong element name: {name}", ex); } catch (FormatException ex) { throw new ArgumentException($"Wrong element name: {name}", ex); } catch (OverflowException ex) { throw new ArgumentException($"Wrong element name: {name}", ex); } var elements = source?.Nodes().OfType<XProcessingInstruction>() .Where(e => String.Compare(e.Target, nodeName, StringComparison.OrdinalIgnoreCase) == 0); return elements.Skip(nodeIndex).FirstOrDefault(); } } public static XElement GetChildElementByAttrValue(this XElement source, string name, string attr, string attrValue) { var elements = source.Elements() .Where(e => String.Compare(e.Name.LocalName, name, StringComparison.OrdinalIgnoreCase) == 0); return elements .FirstOrDefault(e => e.Attributes().Any(a => (a.Name == attr.GetNameWithNamespace(source, String.Empty)) && (a.Value == attrValue))); } public static XProcessingInstruction GetChildProcessingInstructionByAttrValue(this XElement source, string name, string attr, string attrValue) { var nodes = source.Nodes().OfType<XProcessingInstruction>() .Where(e => String.Compare(e.Target, name, StringComparison.OrdinalIgnoreCase) == 0); return nodes.FirstOrDefault(e => AttributeNodeRegex.Matches(e.Data).OfType<Match>().Any(a => { var eAttrName = a.Groups["attrName"]?.Value; var eAttrValue = a.Groups["attrValue"]?.Value; return (String.Compare(eAttrName, attr, StringComparison.OrdinalIgnoreCase) == 0) && (eAttrValue == attrValue); })); } public static XElement CreateChildElement(this XElement source, string documentNamespace, string elementName, string attrName = null, string attrValue = null) { var item = new XElement(elementName.GetNameWithNamespace(source, documentNamespace)); if (!String.IsNullOrWhiteSpace(attrName) && !String.IsNullOrWhiteSpace(attrValue)) { item.SetAttributeValue(attrName.GetNameWithNamespace(source, documentNamespace), attrValue); } source.Add(item); return item; } public static XProcessingInstruction CreateChildProcessingInstruction(this XElement source, string documentNamespace, string elementName, string attrName = null, string attrValue = null) { var item = new XProcessingInstruction(elementName, (!string.IsNullOrWhiteSpace(attrName) && !string.IsNullOrWhiteSpace(attrValue)) ? $"{attrName}=\"{attrValue}\"" : string.Empty); source.Add(item); return item; } public static XElement FindChildByAttrFilterMatch(this XElement source, Match attributeFilterMatch, string documentNamespace) { var elementName = attributeFilterMatch.Groups["element"].Value; var attrName = attributeFilterMatch.Groups["key"].Value; var attrValue = attributeFilterMatch.Groups["value"].Value; var item = source?.GetChildElementByAttrValue(elementName, attrName, attrValue); return item ?? source.CreateChildElement(documentNamespace, elementName, attrName, attrValue); } public static XProcessingInstruction FindProcessingInstructionByAttrFilterMatch(this XElement source, Match attributeFilterMatch, string documentNamespace) { var elementName = attributeFilterMatch.Groups["element"].Value?.TrimStart('?'); var attrName = attributeFilterMatch.Groups["key"].Value; var attrValue = attributeFilterMatch.Groups["value"].Value; var item = source?.GetChildProcessingInstructionByAttrValue(elementName, attrName, attrValue); return item ?? source.CreateChildProcessingInstruction(documentNamespace, elementName, attrName, attrValue); } } }
using EdiEngine.Common.Enums; using EdiEngine.Common.Definitions; using EdiEngine.Standards.X12_004010.Segments; namespace EdiEngine.Standards.X12_004010.Maps { public class M_820 : MapLoop { public M_820() : base(null) { Content.AddRange(new MapBaseEntity[] { new BPR() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new NTE() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new TRN() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, new CUR() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, new REF() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new DTM() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new L_N1(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new L_ENT(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new L_TXP(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new DED() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new L_LX(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new L_N9(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, new L_RYL(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, }); } //1000 public class L_N1 : MapLoop { public L_N1(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new N1() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new N2() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new N3() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new N4() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, new REF() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new PER() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new RDM() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, new DTM() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, }); } } //2000 public class L_ENT : MapLoop { public L_ENT(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new ENT() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new L_NM1(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new L_ADX(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new L_RMR(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, }); } } //2100 public class L_NM1 : MapLoop { public L_NM1(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new NM1() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new N2() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new N3() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new N4() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, new REF() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new PER() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, }); } } //2200 public class L_ADX : MapLoop { public L_ADX(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new ADX() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new NTE() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new PER() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new DTM() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, new L_REF(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new L_IT1(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, }); } } //2210 public class L_REF : MapLoop { public L_REF(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new REF() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new DTM() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, }); } } //2220 public class L_IT1 : MapLoop { public L_IT1(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new IT1() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new L_REF_1(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new L_SAC(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new L_SLN(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, }); } } //2221 public class L_REF_1 : MapLoop { public L_REF_1(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new REF() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new DTM() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, }); } } //2222 public class L_SAC : MapLoop { public L_SAC(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new SAC() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new TXI() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, }); } } //2223 public class L_SLN : MapLoop { public L_SLN(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new SLN() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new L_REF_2(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new L_SAC_1(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, }); } } //2224 public class L_REF_2 : MapLoop { public L_REF_2(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new REF() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new DTM() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, }); } } //2225 public class L_SAC_1 : MapLoop { public L_SAC_1(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new SAC() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new TXI() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, }); } } //2300 public class L_RMR : MapLoop { public L_RMR(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new RMR() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new NTE() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new REF() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new DTM() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new L_IT1_1(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new L_ADX_1(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, }); } } //2310 public class L_IT1_1 : MapLoop { public L_IT1_1(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new IT1() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new L_REF_3(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new L_SAC_2(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new L_SLN_1(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, }); } } //2311 public class L_REF_3 : MapLoop { public L_REF_3(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new REF() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new DTM() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, }); } } //2312 public class L_SAC_2 : MapLoop { public L_SAC_2(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new SAC() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new TXI() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, }); } } //2313 public class L_SLN_1 : MapLoop { public L_SLN_1(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new SLN() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new L_REF_4(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new L_SAC_3(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, }); } } //2314 public class L_REF_4 : MapLoop { public L_REF_4(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new REF() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new DTM() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, }); } } //2315 public class L_SAC_3 : MapLoop { public L_SAC_3(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new SAC() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new TXI() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, }); } } //2320 public class L_ADX_1 : MapLoop { public L_ADX_1(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new ADX() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new NTE() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new PER() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new L_REF_5(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new L_IT1_2(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, }); } } //2321 public class L_REF_5 : MapLoop { public L_REF_5(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new REF() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new DTM() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, }); } } //2322 public class L_IT1_2 : MapLoop { public L_IT1_2(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new IT1() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new L_REF_6(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new L_SAC_4(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new L_SLN_2(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, }); } } //2323 public class L_REF_6 : MapLoop { public L_REF_6(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new REF() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new DTM() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, }); } } //2324 public class L_SAC_4 : MapLoop { public L_SAC_4(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new SAC() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new TXI() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, }); } } //2325 public class L_SLN_2 : MapLoop { public L_SLN_2(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new SLN() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new L_REF_7(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new L_SAC_5(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, }); } } //2326 public class L_REF_7 : MapLoop { public L_REF_7(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new REF() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new DTM() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, }); } } //2327 public class L_SAC_5 : MapLoop { public L_SAC_5(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new SAC() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new TXI() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, }); } } //3000 public class L_TXP : MapLoop { public L_TXP(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new TXP() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new TXI() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, }); } } //4000 public class L_LX : MapLoop { public L_LX(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new LX() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new REF() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new TRN() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new L_NM1_1(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, }); } } //4100 public class L_NM1_1 : MapLoop { public L_NM1_1(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new NM1() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new REF() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new G53() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, new L_AIN(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new L_PEN(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, }); } } //4110 public class L_AIN : MapLoop { public L_AIN(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new AIN() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new QTY() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new DTP() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, }); } } //4120 public class L_PEN : MapLoop { public L_PEN(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new PEN() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new AMT() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new DTP() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new L_INV(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, }); } } //4121 public class L_INV : MapLoop { public L_INV(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new INV() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new DTP() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, }); } } //5000 public class L_N9 : MapLoop { public L_N9(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new N9() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new L_AMT(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new L_N1_1(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, }); } } //5100 public class L_AMT : MapLoop { public L_AMT(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new AMT() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new REF() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, }); } } //5200 public class L_N1_1 : MapLoop { public L_N1_1(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new N1() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new REF() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new L_EMS(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, }); } } //5210 public class L_EMS : MapLoop { public L_EMS(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new EMS() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new ATN() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new AIN() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new PYD() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, }); } } //6000 public class L_RYL : MapLoop { public L_RYL(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new RYL() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new L_NM1_2(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, }); } } //6100 public class L_NM1_2 : MapLoop { public L_NM1_2(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new NM1() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new L_LOC(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, new L_ASM(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, }); } } //6110 public class L_LOC : MapLoop { public L_LOC(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new LOC() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new L_PID(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, }); } } //6111 public class L_PID : MapLoop { public L_PID(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new PID() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new DTM() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, new L_PCT(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, }); } } //6112 public class L_PCT : MapLoop { public L_PCT(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new PCT() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new QTY() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, new L_AMT_1(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, }); } } //6113 public class L_AMT_1 : MapLoop { public L_AMT_1(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new AMT() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new ADX() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 }, }); } } //6120 public class L_ASM : MapLoop { public L_ASM(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new ASM() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new ADX() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, }); } } } }
// Copyright 2014 The Rector & Visitors of the University of Virginia // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. using System; using Xamarin.Forms; using Sensus.UI.UiProperties; using Newtonsoft.Json; using System.Collections.Generic; using System.Linq; using Sensus.UI.Inputs; using Sensus.Probes.User.Scripts; using Sensus.Exceptions; using System.ComponentModel; // register the input effect group [assembly: ResolutionGroupName(Input.EFFECT_RESOLUTION_GROUP_NAME)] namespace Sensus.UI.Inputs { public abstract class Input : INotifyPropertyChanged { public event PropertyChangedEventHandler PropertyChanged; public const string EFFECT_RESOLUTION_GROUP_NAME = "InputEffects"; private string _name; private string _id; private string _groupId; private string _labelText; private int _labelFontSize; private View _view; private bool _displayNumber; private bool _complete; private bool _needsToBeStored; private double? _latitude; private double? _longitude; private DateTimeOffset? _locationUpdateTimestamp; private bool _required; private bool _viewed; private DateTimeOffset? _completionTimestamp; private List<InputDisplayCondition> _displayConditions; private Color? _backgroundColor; private Thickness? _padding; private bool _frame; private List<InputCompletionRecord> _completionRecords; private DateTimeOffset? _submissionTimestamp; /// <summary> /// The name by which this input will be referred to within the Sensus app. /// </summary> /// <value>The name.</value> [EntryStringUiProperty("Name:", true, 0)] public string Name { get { return _name; } set { if (value != _name) { _name = value; PropertyChanged?.Invoke(this, new PropertyChangedEventArgs(nameof(Caption))); } } } public string Id { get { return _id; } set { _id = value; } } public string GroupId { get { return _groupId; } set { _groupId = value; } } /// <summary> /// The text to display next to the input when showing the field to the user for completion. If you would like to /// use the value of a survey-triggering <see cref="Script.CurrentDatum"/> within the input's label, you can do so /// by placing a <c>{0}</c> within <see cref="LabelText"/> as a placeholder. The placeholder will be replaced with /// the value of the triggering <see cref="Datum"/> at runtime. You can read more about the format of the /// placeholder [here](https://msdn.microsoft.com/en-us/library/system.string.format(v=vs.110).aspx). /// </summary> /// <value>The label text.</value> [EntryStringUiProperty("Label Text:", true, 1)] public string LabelText { get { return _labelText; } set { _labelText = value; } } public int LabelFontSize { get { return _labelFontSize; } set { _labelFontSize = value; } } public bool DisplayNumber { get { return _displayNumber; } set { _displayNumber = value; } } [JsonIgnore] public abstract object Value { get; } /// <summary> /// Gets or sets a value indicating whether the user has interacted with this <see cref="Input"/>, /// leaving it in a state of completion. Contrast with Valid, which merely indicates that the /// state of the input will not prevent the user from moving through an input request (e.g., in the case /// of inputs that are not required). /// </summary> /// <value><c>true</c> if complete; otherwise, <c>false</c>.</value> [JsonIgnore] public bool Complete { get { return _complete; } protected set { _complete = value; DateTimeOffset timestamp = DateTimeOffset.UtcNow; object inputValue = null; _completionTimestamp = null; if (_complete) { _completionTimestamp = timestamp; // get a deep copy of the value. some inputs have list values, and simply using the list reference wouldn't track the history, since the most up-to-date list would be used for all history values. inputValue = JsonConvert.DeserializeObject<object>(JsonConvert.SerializeObject(Value, SensusServiceHelper.JSON_SERIALIZER_SETTINGS), SensusServiceHelper.JSON_SERIALIZER_SETTINGS); } if (StoreCompletionRecords) { _completionRecords.Add(new InputCompletionRecord(timestamp, inputValue)); } // if this input defines a protocol variable, set that variable here. if (this is IVariableDefiningInput) { IVariableDefiningInput input = this as IVariableDefiningInput; string definedVariable = input.DefinedVariable; if (definedVariable != null) { Protocol protocolForInput = GetProtocolForInput(); // if the input is complete, set the variable on the protocol if (_complete) { protocolForInput.VariableValue[definedVariable] = inputValue.ToString(); } // if the input is incomplete, set the value to null on the protocol else { protocolForInput.VariableValue[definedVariable] = null; } } } } } /// <summary> /// Gets a value indicating whether this <see cref="Input"/> is valid. A valid input is one that /// is complete, one that has been viewed but is not required, or one that isn't displayed. It is /// an input in a state that should not prevent the user from proceeding through an input request. /// </summary> /// <value><c>true</c> if valid; otherwise, <c>false</c>.</value> [JsonIgnore] public bool Valid { get { return _complete || _viewed && !_required || !Display; } } public bool NeedsToBeStored { get { return _needsToBeStored; } set { _needsToBeStored = value; } } public double? Latitude { get { return _latitude; } set { _latitude = value; } } public double? Longitude { get { return _longitude; } set { _longitude = value; } } public DateTimeOffset? LocationUpdateTimestamp { get { return _locationUpdateTimestamp; } set { _locationUpdateTimestamp = value; } } [JsonIgnore] public abstract bool Enabled { get; set; } [JsonIgnore] public abstract string DefaultName { get; } /// <summary> /// Whether or not a valid value is required for this input. Also see <see cref="InputGroup.ForceValidInputs"/>. /// </summary> /// <value><c>true</c> if required; otherwise, <c>false</c>.</value> [OnOffUiProperty(null, true, 5)] public bool Required { get { return _required; } set { if (value != _required) { _required = value; PropertyChanged?.Invoke(this, new PropertyChangedEventArgs(nameof(Caption))); } } } public bool Viewed { get { return _viewed; } set { _viewed = value; } } [JsonIgnore] public DateTimeOffset? CompletionTimestamp { get { return _completionTimestamp; } } public List<InputDisplayCondition> DisplayConditions { get { return _displayConditions; } } public Color? BackgroundColor { get { return _backgroundColor; } set { _backgroundColor = value; } } public Thickness? Padding { get { return _padding; } set { _padding = value; } } public bool Frame { get { return _frame; } set { _frame = value; } } public List<InputCompletionRecord> CompletionRecords { get { return _completionRecords; } } public virtual bool StoreCompletionRecords { get { return true; } } [JsonIgnore] public bool Display { get { List<InputDisplayCondition> conjuncts = _displayConditions.Where(displayCondition => displayCondition.Conjunctive).ToList(); if (conjuncts.Count > 0 && conjuncts.Any(displayCondition => !displayCondition.Satisfied)) { return false; } List<InputDisplayCondition> disjuncts = _displayConditions.Where(displayCondition => !displayCondition.Conjunctive).ToList(); if (disjuncts.Count > 0 && disjuncts.All(displayCondition => !displayCondition.Satisfied)) { return false; } return true; } } public DateTimeOffset? SubmissionTimestamp { get { return _submissionTimestamp; } set { _submissionTimestamp = value; } } [JsonIgnore] public string Caption { get { return _name + (_name == DefaultName ? "" : " -- " + DefaultName) + (_required ? "*" : ""); } } /// <summary> /// Gets or sets the <see cref="Datum"/> that triggered the deployment of this <see cref="Input"/>. This /// is what will be used when formatting placeholder text in the input <see cref="LabelText"/>. /// </summary> /// <value>The triggering datum.</value> [JsonIgnore] public Datum TriggeringDatum { get; set; } public Input() { _name = DefaultName; _id = Guid.NewGuid().ToString(); _displayNumber = true; _complete = false; _needsToBeStored = true; _required = true; _viewed = false; _completionTimestamp = null; _labelFontSize = 20; _displayConditions = new List<InputDisplayCondition>(); _backgroundColor = null; _padding = null; _frame = true; _completionRecords = new List<InputCompletionRecord>(); _submissionTimestamp = null; } public Input(string labelText) : this() { _labelText = labelText; } public Input(string labelText, int labelFontSize) : this(labelText) { _labelFontSize = labelFontSize; } public Input(string labelText, string name) : this(labelText) { _name = name; } protected Label CreateLabel(int index) { return new Label { Text = GetLabelText(index), FontSize = _labelFontSize // set the style ID on the label so that we can retrieve it when UI testing #if UI_TESTING , StyleId = Name + " Label" #endif }; } protected string GetLabelText(int index) { if (string.IsNullOrWhiteSpace(_labelText)) { return ""; } else { string requiredStr = _required ? "*" : ""; string indexStr = index > 0 && _displayNumber ? index + ") " : ""; string labelTextStr = _labelText; // get the protocol that contains the current input in a script runner (if any) Protocol protocolForInput = GetProtocolForInput(); if (protocolForInput != null) { // replace all variables with their values foreach (string variable in protocolForInput.VariableValue.Keys) { // get the value for the variable as defined on the protocol string variableValue = protocolForInput.VariableValue[variable]; // if the variable's value has not been defined, then just use the variable name as a fallback. if (variableValue == null) { variableValue = variable; } // replace variable references with its value labelTextStr = labelTextStr.Replace("{" + variable + "}", variableValue); } } // if this input is being shown as part of a datum-triggered script, format the label // text of the input to replace any {0} references with the triggering datum's placeholder // value. if (TriggeringDatum != null) { labelTextStr = string.Format(labelTextStr, TriggeringDatum.StringPlaceholderValue.ToString().ToLower()); } return requiredStr + indexStr + labelTextStr; } } private Protocol GetProtocolForInput() { return SensusServiceHelper.Get().RegisteredProtocols.SingleOrDefault(protocol => protocol.Probes.OfType<ScriptProbe>() // get script probes .Single() // must be only 1 .ScriptRunners // get runners .SelectMany(runner => runner.Script.InputGroups) // get input groups for each runner .SelectMany(inputGroup => inputGroup.Inputs) // get inputs for each input group .Any(input => input.Id == _id)); // check if any inputs are the current one } public virtual View GetView(int index) { return _view; } protected virtual void SetView(View value) { ContentView viewContainer = new ContentView { Content = value }; if (_backgroundColor != null) { viewContainer.BackgroundColor = _backgroundColor.GetValueOrDefault(); } if (_padding != null) { viewContainer.Padding = _padding.GetValueOrDefault(); } _view = viewContainer; } public void Reset() { _view = null; _complete = false; _needsToBeStored = true; _latitude = null; _longitude = null; _locationUpdateTimestamp = null; _viewed = false; _completionTimestamp = null; _backgroundColor = null; _padding = null; } public virtual bool ValueMatches(object conditionValue, bool conjunctive) { // if either is null, both must be null to be equal if (Value == null || conditionValue == null) { return Value == null && conditionValue == null; } // if they're of the same type, compare else if (Value.GetType().Equals(conditionValue.GetType())) { return Value.Equals(conditionValue); } else { // this should never happen SensusException.Report(new Exception("Called Input.ValueMatches with conditionValue of type " + conditionValue.GetType() + ". Comparing with value of type " + Value.GetType() + ".")); return false; } } public Input Copy(bool newId) { Input copy = JsonConvert.DeserializeObject<Input>(JsonConvert.SerializeObject(this, SensusServiceHelper.JSON_SERIALIZER_SETTINGS), SensusServiceHelper.JSON_SERIALIZER_SETTINGS); copy.Reset(); // the reset on the previous line only resets the state of the input. it does not assign it a new/unique ID, which all inputs normally require. if (newId) { copy.Id = Guid.NewGuid().ToString(); } return copy; } /// <summary> /// Returns a <see cref="T:System.String"/> that represents the current <see cref="T:Sensus.UI.Inputs.Input"/>. This is needed /// when adding display conditions. /// </summary> /// <returns>A <see cref="T:System.String"/> that represents the current <see cref="T:Sensus.UI.Inputs.Input"/>.</returns> public override string ToString() { return _name; } } }
using UnityEngine.Rendering; namespace UnityEngine.PostProcessing { using Settings = MotionBlurModel.Settings; public sealed class MotionBlurComponent : PostProcessingComponentCommandBuffer<MotionBlurModel> { static class Uniforms { internal static readonly int _VelocityScale = Shader.PropertyToID("_VelocityScale"); internal static readonly int _MaxBlurRadius = Shader.PropertyToID("_MaxBlurRadius"); internal static readonly int _RcpMaxBlurRadius = Shader.PropertyToID("_RcpMaxBlurRadius"); internal static readonly int _VelocityTex = Shader.PropertyToID("_VelocityTex"); internal static readonly int _MainTex = Shader.PropertyToID("_MainTex"); internal static readonly int _Tile2RT = Shader.PropertyToID("_Tile2RT"); internal static readonly int _Tile4RT = Shader.PropertyToID("_Tile4RT"); internal static readonly int _Tile8RT = Shader.PropertyToID("_Tile8RT"); internal static readonly int _TileMaxOffs = Shader.PropertyToID("_TileMaxOffs"); internal static readonly int _TileMaxLoop = Shader.PropertyToID("_TileMaxLoop"); internal static readonly int _TileVRT = Shader.PropertyToID("_TileVRT"); internal static readonly int _NeighborMaxTex = Shader.PropertyToID("_NeighborMaxTex"); internal static readonly int _LoopCount = Shader.PropertyToID("_LoopCount"); internal static readonly int _TempRT = Shader.PropertyToID("_TempRT"); internal static readonly int _History1LumaTex = Shader.PropertyToID("_History1LumaTex"); internal static readonly int _History2LumaTex = Shader.PropertyToID("_History2LumaTex"); internal static readonly int _History3LumaTex = Shader.PropertyToID("_History3LumaTex"); internal static readonly int _History4LumaTex = Shader.PropertyToID("_History4LumaTex"); internal static readonly int _History1ChromaTex = Shader.PropertyToID("_History1ChromaTex"); internal static readonly int _History2ChromaTex = Shader.PropertyToID("_History2ChromaTex"); internal static readonly int _History3ChromaTex = Shader.PropertyToID("_History3ChromaTex"); internal static readonly int _History4ChromaTex = Shader.PropertyToID("_History4ChromaTex"); internal static readonly int _History1Weight = Shader.PropertyToID("_History1Weight"); internal static readonly int _History2Weight = Shader.PropertyToID("_History2Weight"); internal static readonly int _History3Weight = Shader.PropertyToID("_History3Weight"); internal static readonly int _History4Weight = Shader.PropertyToID("_History4Weight"); } enum Pass { VelocitySetup, TileMax1, TileMax2, TileMaxV, NeighborMax, Reconstruction, FrameCompression, FrameBlendingChroma, FrameBlendingRaw } public class ReconstructionFilter { // Texture format for storing 2D vectors. RenderTextureFormat m_VectorRTFormat = RenderTextureFormat.RGHalf; // Texture format for storing packed velocity/depth. RenderTextureFormat m_PackedRTFormat = RenderTextureFormat.ARGB2101010; public ReconstructionFilter() { CheckTextureFormatSupport(); } void CheckTextureFormatSupport() { // If 2:10:10:10 isn't supported, use ARGB32 instead. if (!SystemInfo.SupportsRenderTextureFormat(m_PackedRTFormat)) m_PackedRTFormat = RenderTextureFormat.ARGB32; } public bool IsSupported() { return SystemInfo.supportsMotionVectors; } public void ProcessImage(PostProcessingContext context, CommandBuffer cb, ref Settings settings, RenderTargetIdentifier source, RenderTargetIdentifier destination, Material material) { const float kMaxBlurRadius = 5f; // Calculate the maximum blur radius in pixels. int maxBlurPixels = (int)(kMaxBlurRadius * context.height / 100); // Calculate the TileMax size. // It should be a multiple of 8 and larger than maxBlur. int tileSize = ((maxBlurPixels - 1) / 8 + 1) * 8; // Pass 1 - Velocity/depth packing var velocityScale = settings.shutterAngle / 360f; cb.SetGlobalFloat(Uniforms._VelocityScale, velocityScale); cb.SetGlobalFloat(Uniforms._MaxBlurRadius, maxBlurPixels); cb.SetGlobalFloat(Uniforms._RcpMaxBlurRadius, 1f / maxBlurPixels); int vbuffer = Uniforms._VelocityTex; cb.GetTemporaryRT(vbuffer, context.width, context.height, 0, FilterMode.Point, m_PackedRTFormat, RenderTextureReadWrite.Linear); cb.Blit((Texture)null, vbuffer, material, (int)Pass.VelocitySetup); // Pass 2 - First TileMax filter (1/2 downsize) int tile2 = Uniforms._Tile2RT; cb.GetTemporaryRT(tile2, context.width / 2, context.height / 2, 0, FilterMode.Point, m_VectorRTFormat, RenderTextureReadWrite.Linear); cb.SetGlobalTexture(Uniforms._MainTex, vbuffer); cb.Blit(vbuffer, tile2, material, (int)Pass.TileMax1); // Pass 3 - Second TileMax filter (1/2 downsize) int tile4 = Uniforms._Tile4RT; cb.GetTemporaryRT(tile4, context.width / 4, context.height / 4, 0, FilterMode.Point, m_VectorRTFormat, RenderTextureReadWrite.Linear); cb.SetGlobalTexture(Uniforms._MainTex, tile2); cb.Blit(tile2, tile4, material, (int)Pass.TileMax2); cb.ReleaseTemporaryRT(tile2); // Pass 4 - Third TileMax filter (1/2 downsize) int tile8 = Uniforms._Tile8RT; cb.GetTemporaryRT(tile8, context.width / 8, context.height / 8, 0, FilterMode.Point, m_VectorRTFormat, RenderTextureReadWrite.Linear); cb.SetGlobalTexture(Uniforms._MainTex, tile4); cb.Blit(tile4, tile8, material, (int)Pass.TileMax2); cb.ReleaseTemporaryRT(tile4); // Pass 5 - Fourth TileMax filter (reduce to tileSize) var tileMaxOffs = Vector2.one * (tileSize / 8f - 1f) * -0.5f; cb.SetGlobalVector(Uniforms._TileMaxOffs, tileMaxOffs); cb.SetGlobalFloat(Uniforms._TileMaxLoop, (int)(tileSize / 8f)); int tile = Uniforms._TileVRT; cb.GetTemporaryRT(tile, context.width / tileSize, context.height / tileSize, 0, FilterMode.Point, m_VectorRTFormat, RenderTextureReadWrite.Linear); cb.SetGlobalTexture(Uniforms._MainTex, tile8); cb.Blit(tile8, tile, material, (int)Pass.TileMaxV); cb.ReleaseTemporaryRT(tile8); // Pass 6 - NeighborMax filter int neighborMax = Uniforms._NeighborMaxTex; int neighborMaxWidth = context.width / tileSize; int neighborMaxHeight = context.height / tileSize; cb.GetTemporaryRT(neighborMax, neighborMaxWidth, neighborMaxHeight, 0, FilterMode.Point, m_VectorRTFormat, RenderTextureReadWrite.Linear); cb.SetGlobalTexture(Uniforms._MainTex, tile); cb.Blit(tile, neighborMax, material, (int)Pass.NeighborMax); cb.ReleaseTemporaryRT(tile); // Pass 7 - Reconstruction pass cb.SetGlobalFloat(Uniforms._LoopCount, Mathf.Clamp(settings.sampleCount / 2, 1, 64)); cb.SetGlobalTexture(Uniforms._MainTex, source); cb.Blit(source, destination, material, (int)Pass.Reconstruction); cb.ReleaseTemporaryRT(vbuffer); cb.ReleaseTemporaryRT(neighborMax); } } public class FrameBlendingFilter { struct Frame { public RenderTexture lumaTexture; public RenderTexture chromaTexture; float m_Time; RenderTargetIdentifier[] m_MRT; public float CalculateWeight(float strength, float currentTime) { if (Mathf.Approximately(m_Time, 0f)) return 0f; var coeff = Mathf.Lerp(80f, 16f, strength); return Mathf.Exp((m_Time - currentTime) * coeff); } public void Release() { if (lumaTexture != null) RenderTexture.ReleaseTemporary(lumaTexture); if (chromaTexture != null) RenderTexture.ReleaseTemporary(chromaTexture); lumaTexture = null; chromaTexture = null; } public void MakeRecord(CommandBuffer cb, RenderTargetIdentifier source, int width, int height, Material material) { Release(); lumaTexture = RenderTexture.GetTemporary(width, height, 0, RenderTextureFormat.R8, RenderTextureReadWrite.Linear); chromaTexture = RenderTexture.GetTemporary(width, height, 0, RenderTextureFormat.R8, RenderTextureReadWrite.Linear); lumaTexture.filterMode = FilterMode.Point; chromaTexture.filterMode = FilterMode.Point; if (m_MRT == null) m_MRT = new RenderTargetIdentifier[2]; m_MRT[0] = lumaTexture; m_MRT[1] = chromaTexture; cb.SetGlobalTexture(Uniforms._MainTex, source); cb.SetRenderTarget(m_MRT, lumaTexture); cb.DrawMesh(GraphicsUtils.quad, Matrix4x4.identity, material, 0, (int)Pass.FrameCompression); m_Time = Time.time; } public void MakeRecordRaw(CommandBuffer cb, RenderTargetIdentifier source, int width, int height, RenderTextureFormat format) { Release(); lumaTexture = RenderTexture.GetTemporary(width, height, 0, format); lumaTexture.filterMode = FilterMode.Point; cb.SetGlobalTexture(Uniforms._MainTex, source); cb.Blit(source, lumaTexture); m_Time = Time.time; } } bool m_UseCompression; RenderTextureFormat m_RawTextureFormat; Frame[] m_FrameList; int m_LastFrameCount; public FrameBlendingFilter() { m_UseCompression = CheckSupportCompression(); m_RawTextureFormat = GetPreferredRenderTextureFormat(); m_FrameList = new Frame[4]; } public void Dispose() { foreach (var frame in m_FrameList) frame.Release(); } public void PushFrame(CommandBuffer cb, RenderTargetIdentifier source, int width, int height, Material material) { // Push only when actual update (do nothing while pausing) var frameCount = Time.frameCount; if (frameCount == m_LastFrameCount) return; // Update the frame record. var index = frameCount % m_FrameList.Length; if (m_UseCompression) m_FrameList[index].MakeRecord(cb, source, width, height, material); else m_FrameList[index].MakeRecordRaw(cb, source, width, height, m_RawTextureFormat); m_LastFrameCount = frameCount; } public void BlendFrames(CommandBuffer cb, float strength, RenderTargetIdentifier source, RenderTargetIdentifier destination, Material material) { var t = Time.time; var f1 = GetFrameRelative(-1); var f2 = GetFrameRelative(-2); var f3 = GetFrameRelative(-3); var f4 = GetFrameRelative(-4); cb.SetGlobalTexture(Uniforms._History1LumaTex, f1.lumaTexture); cb.SetGlobalTexture(Uniforms._History2LumaTex, f2.lumaTexture); cb.SetGlobalTexture(Uniforms._History3LumaTex, f3.lumaTexture); cb.SetGlobalTexture(Uniforms._History4LumaTex, f4.lumaTexture); cb.SetGlobalTexture(Uniforms._History1ChromaTex, f1.chromaTexture); cb.SetGlobalTexture(Uniforms._History2ChromaTex, f2.chromaTexture); cb.SetGlobalTexture(Uniforms._History3ChromaTex, f3.chromaTexture); cb.SetGlobalTexture(Uniforms._History4ChromaTex, f4.chromaTexture); cb.SetGlobalFloat(Uniforms._History1Weight, f1.CalculateWeight(strength, t)); cb.SetGlobalFloat(Uniforms._History2Weight, f2.CalculateWeight(strength, t)); cb.SetGlobalFloat(Uniforms._History3Weight, f3.CalculateWeight(strength, t)); cb.SetGlobalFloat(Uniforms._History4Weight, f4.CalculateWeight(strength, t)); cb.SetGlobalTexture(Uniforms._MainTex, source); cb.Blit(source, destination, material, m_UseCompression ? (int)Pass.FrameBlendingChroma : (int)Pass.FrameBlendingRaw); } // Check if the platform has the capability of compression. static bool CheckSupportCompression() { return SystemInfo.SupportsRenderTextureFormat(RenderTextureFormat.R8) && SystemInfo.supportedRenderTargetCount > 1; } // Determine which 16-bit render texture format is available. static RenderTextureFormat GetPreferredRenderTextureFormat() { RenderTextureFormat[] formats = { RenderTextureFormat.RGB565, RenderTextureFormat.ARGB1555, RenderTextureFormat.ARGB4444 }; foreach (var f in formats) if (SystemInfo.SupportsRenderTextureFormat(f)) return f; return RenderTextureFormat.Default; } // Retrieve a frame record with relative indexing. // Use a negative index to refer to previous frames. Frame GetFrameRelative(int offset) { var index = (Time.frameCount + m_FrameList.Length + offset) % m_FrameList.Length; return m_FrameList[index]; } } ReconstructionFilter m_ReconstructionFilter; public ReconstructionFilter reconstructionFilter { get { if (m_ReconstructionFilter == null) m_ReconstructionFilter = new ReconstructionFilter(); return m_ReconstructionFilter; } } FrameBlendingFilter m_FrameBlendingFilter; public FrameBlendingFilter frameBlendingFilter { get { if (m_FrameBlendingFilter == null) m_FrameBlendingFilter = new FrameBlendingFilter(); return m_FrameBlendingFilter; } } bool m_FirstFrame = true; public override bool active { get { var settings = model.settings; return model.enabled && ((settings.shutterAngle > 0f && reconstructionFilter.IsSupported()) || settings.frameBlending > 0f) && SystemInfo.graphicsDeviceType != GraphicsDeviceType.OpenGLES2 // No movecs on GLES2 platforms && !context.interrupted; } } public override string GetName() { return "Motion Blur"; } public void ResetHistory() { if (m_FrameBlendingFilter != null) m_FrameBlendingFilter.Dispose(); m_FrameBlendingFilter = null; } public override DepthTextureMode GetCameraFlags() { return DepthTextureMode.Depth | DepthTextureMode.MotionVectors; } public override CameraEvent GetCameraEvent() { return CameraEvent.BeforeImageEffects; } public override void OnEnable() { m_FirstFrame = true; } public override void PopulateCommandBuffer(CommandBuffer cb) { #if UNITY_EDITOR // Don't render motion blur preview when the editor is not playing as it can in some // cases results in ugly artifacts (i.e. when resizing the game view). if (!Application.isPlaying) return; #endif // Skip rendering in the first frame as motion vectors won't be abvailable until the // next one if (m_FirstFrame) { m_FirstFrame = false; return; } var material = context.materialFactory.Get("Hidden/Post FX/Motion Blur"); var blitMaterial = context.materialFactory.Get("Hidden/Post FX/Blit"); var settings = model.settings; var fbFormat = context.isHdr ? RenderTextureFormat.DefaultHDR : RenderTextureFormat.Default; int tempRT = Uniforms._TempRT; cb.GetTemporaryRT(tempRT, context.width, context.height, 0, FilterMode.Point, fbFormat); if (settings.shutterAngle > 0f && settings.frameBlending > 0f) { // Motion blur + frame blending reconstructionFilter.ProcessImage(context, cb, ref settings, BuiltinRenderTextureType.CameraTarget, tempRT, material); frameBlendingFilter.BlendFrames(cb, settings.frameBlending, tempRT, BuiltinRenderTextureType.CameraTarget, material); frameBlendingFilter.PushFrame(cb, tempRT, context.width, context.height, material); } else if (settings.shutterAngle > 0f) { // No frame blending cb.SetGlobalTexture(Uniforms._MainTex, BuiltinRenderTextureType.CameraTarget); cb.Blit(BuiltinRenderTextureType.CameraTarget, tempRT, blitMaterial, 0); reconstructionFilter.ProcessImage(context, cb, ref settings, tempRT, BuiltinRenderTextureType.CameraTarget, material); } else if (settings.frameBlending > 0f) { // Frame blending only cb.SetGlobalTexture(Uniforms._MainTex, BuiltinRenderTextureType.CameraTarget); cb.Blit(BuiltinRenderTextureType.CameraTarget, tempRT, blitMaterial, 0); frameBlendingFilter.BlendFrames(cb, settings.frameBlending, tempRT, BuiltinRenderTextureType.CameraTarget, material); frameBlendingFilter.PushFrame(cb, tempRT, context.width, context.height, material); } // Cleaning up cb.ReleaseTemporaryRT(tempRT); } public override void OnDisable() { if (m_FrameBlendingFilter != null) m_FrameBlendingFilter.Dispose(); } } }
//#define USE_SharpZipLib #if !UNITY_WEBPLAYER #define USE_FileIO #endif /* * * * * * A simple JSON Parser / builder * ------------------------------ * * It mainly has been written as a simple JSON parser. It can build a JSON string * from the node-tree, or generate a node tree from any valid JSON string. * * If you want to use compression when saving to file / stream / B64 you have to include * SharpZipLib ( http://www.icsharpcode.net/opensource/sharpziplib/ ) in your project and * define "USE_SharpZipLib" at the top of the file * * Written by Bunny83 * 2012-06-09 * * Modified by oPless, 2014-09-21 to round-trip properly * * Features / attributes: * - provides strongly typed node classes and lists / dictionaries * - provides easy access to class members / array items / data values * - the parser ignores data types. Each value is a string. * - only double quotes (") are used for quoting strings. * - values and names are not restricted to quoted strings. They simply add up and are trimmed. * - There are only 3 types: arrays(JSONArray), objects(JSONClass) and values(JSONData) * - provides "casting" properties to easily convert to / from those types: * int / float / double / bool * - provides a common interface for each node so no explicit casting is required. * - the parser try to avoid errors, but if malformed JSON is parsed the result is undefined * * * 2012-12-17 Update: * - Added internal JSONLazyCreator class which simplifies the construction of a JSON tree * Now you can simple reference any item that doesn't exist yet and it will return a JSONLazyCreator * The class determines the required type by it's further use, creates the type and removes itself. * - Added binary serialization / deserialization. * - Added support for BZip2 zipped binary format. Requires the SharpZipLib ( http://www.icsharpcode.net/opensource/sharpziplib/ ) * The usage of the SharpZipLib library can be disabled by removing or commenting out the USE_SharpZipLib define at the top * - The serializer uses different types when it comes to store the values. Since my data values * are all of type string, the serializer will "try" which format fits best. The order is: int, float, double, bool, string. * It's not the most efficient way but for a moderate amount of data it should work on all platforms. * * * * * */ using System; using System.Collections; using System.Collections.Generic; using System.Linq; namespace SimpleJSON { public enum JSONBinaryTag { Array = 1, Class = 2, Value = 3, IntValue = 4, DoubleValue = 5, BoolValue = 6, FloatValue = 7, } public abstract class JSONNode { #region common interface public virtual void Add(string aKey, JSONNode aItem) { } public virtual JSONNode this[int aIndex] { get { return null; } set { } } public virtual JSONNode this[string aKey] { get { return null; } set { } } public virtual string Value { get { return ""; } set { } } public virtual int Count { get { return 0; } } public virtual void Add(JSONNode aItem) { Add("", aItem); } public virtual JSONNode Remove(string aKey) { return null; } public virtual JSONNode Remove(int aIndex) { return null; } public virtual JSONNode Remove(JSONNode aNode) { return aNode; } public virtual IEnumerable<JSONNode> Children { get { yield break; } } public IEnumerable<JSONNode> DeepChildren { get { foreach (var C in Children) foreach (var D in C.DeepChildren) yield return D; } } public override string ToString() { return "JSONNode"; } public virtual string ToString(string aPrefix) { return "JSONNode"; } public abstract string ToJSON(int prefix); #endregion common interface #region typecasting properties public virtual JSONBinaryTag Tag { get; set; } public virtual int AsInt { get { int v = 0; if (int.TryParse(Value, out v)) return v; return 0; } set { Value = value.ToString(); Tag = JSONBinaryTag.IntValue; } } public virtual float AsFloat { get { float v = 0.0f; if (float.TryParse(Value, out v)) return v; return 0.0f; } set { Value = value.ToString(); Tag = JSONBinaryTag.FloatValue; } } public virtual double AsDouble { get { double v = 0.0; if (double.TryParse(Value, out v)) return v; return 0.0; } set { Value = value.ToString(); Tag = JSONBinaryTag.DoubleValue; } } public virtual bool AsBool { get { bool v = false; if (bool.TryParse(Value, out v)) return v; return !string.IsNullOrEmpty(Value); } set { Value = (value) ? "true" : "false"; Tag = JSONBinaryTag.BoolValue; } } public virtual JSONArray AsArray { get { return this as JSONArray; } } public virtual JSONClass AsObject { get { return this as JSONClass; } } #endregion typecasting properties #region operators public static implicit operator JSONNode(string s) { return new JSONData(s); } public static implicit operator string(JSONNode d) { return (d == null) ? null : d.Value; } public static bool operator ==(JSONNode a, object b) { if (b == null && a is JSONLazyCreator) return true; return System.Object.ReferenceEquals(a, b); } public static bool operator !=(JSONNode a, object b) { return !(a == b); } public override bool Equals(object obj) { return System.Object.ReferenceEquals(this, obj); } public override int GetHashCode() { return base.GetHashCode(); } #endregion operators internal static string Escape(string aText) { string result = ""; foreach (char c in aText) { switch (c) { case '\\': result += "\\\\"; break; case '\"': result += "\\\""; break; case '\n': result += "\\n"; break; case '\r': result += "\\r"; break; case '\t': result += "\\t"; break; case '\b': result += "\\b"; break; case '\f': result += "\\f"; break; default: result += c; break; } } return result; } private static JSONData Numberize(string token) { bool flag = false; int integer = 0; double real = 0; if (int.TryParse(token, out integer)) { return new JSONData(integer); } if (double.TryParse(token, out real)) { return new JSONData(real); } if (bool.TryParse(token, out flag)) { return new JSONData(flag); } throw new NotImplementedException(token); } private static void AddElement(JSONNode ctx, string token, string tokenName, bool tokenIsString) { if (tokenIsString) { if (ctx is JSONArray) ctx.Add(token); else ctx.Add(tokenName, token); // assume dictionary/object } else { JSONData number = Numberize(token); if (ctx is JSONArray) ctx.Add(number); else ctx.Add(tokenName, number); } } public static JSONNode Parse(string aJSON) { Stack<JSONNode> stack = new Stack<JSONNode>(); JSONNode ctx = null; int i = 0; string Token = ""; string TokenName = ""; bool QuoteMode = false; bool TokenIsString = false; while (i < aJSON.Length) { switch (aJSON[i]) { case '{': if (QuoteMode) { Token += aJSON[i]; break; } stack.Push(new JSONClass()); if (ctx != null) { TokenName = TokenName.Trim(); if (ctx is JSONArray) ctx.Add(stack.Peek()); else if (TokenName != "") ctx.Add(TokenName, stack.Peek()); } TokenName = ""; Token = ""; ctx = stack.Peek(); break; case '[': if (QuoteMode) { Token += aJSON[i]; break; } stack.Push(new JSONArray()); if (ctx != null) { TokenName = TokenName.Trim(); if (ctx is JSONArray) ctx.Add(stack.Peek()); else if (TokenName != "") ctx.Add(TokenName, stack.Peek()); } TokenName = ""; Token = ""; ctx = stack.Peek(); break; case '}': case ']': if (QuoteMode) { Token += aJSON[i]; break; } if (stack.Count == 0) throw new Exception("JSON Parse: Too many closing brackets"); stack.Pop(); if (Token != "") { TokenName = TokenName.Trim(); /* if (ctx is JSONArray) ctx.Add (Token); else if (TokenName != "") ctx.Add (TokenName, Token); */ AddElement(ctx, Token, TokenName, TokenIsString); TokenIsString = false; } TokenName = ""; Token = ""; if (stack.Count > 0) ctx = stack.Peek(); break; case ':': if (QuoteMode) { Token += aJSON[i]; break; } TokenName = Token; Token = ""; TokenIsString = false; break; case '"': QuoteMode ^= true; TokenIsString = QuoteMode == true ? true : TokenIsString; break; case ',': if (QuoteMode) { Token += aJSON[i]; break; } if (Token != "") { /* if (ctx is JSONArray) { ctx.Add (Token); } else if (TokenName != "") { ctx.Add (TokenName, Token); } */ AddElement(ctx, Token, TokenName, TokenIsString); TokenIsString = false; } TokenName = ""; Token = ""; TokenIsString = false; break; case '\r': case '\n': break; case ' ': case '\t': if (QuoteMode) Token += aJSON[i]; break; case '\\': ++i; if (QuoteMode) { char C = aJSON[i]; switch (C) { case 't': Token += '\t'; break; case 'r': Token += '\r'; break; case 'n': Token += '\n'; break; case 'b': Token += '\b'; break; case 'f': Token += '\f'; break; case 'u': { string s = aJSON.Substring(i + 1, 4); Token += (char) int.Parse( s, System.Globalization.NumberStyles.AllowHexSpecifier); i += 4; break; } default: Token += C; break; } } break; default: Token += aJSON[i]; break; } ++i; } if (QuoteMode) { throw new Exception("JSON Parse: Quotation marks seems to be messed up."); } return ctx; } public virtual void Serialize(System.IO.BinaryWriter aWriter) { } public void SaveToStream(System.IO.Stream aData) { var W = new System.IO.BinaryWriter(aData); Serialize(W); } #if USE_SharpZipLib public void SaveToCompressedStream(System.IO.Stream aData) { using (var gzipOut = new ICSharpCode.SharpZipLib.BZip2.BZip2OutputStream(aData)) { gzipOut.IsStreamOwner = false; SaveToStream(gzipOut); gzipOut.Close(); } } public void SaveToCompressedFile(string aFileName) { #if USE_FileIO System.IO.Directory.CreateDirectory((new System.IO.FileInfo(aFileName)).Directory.FullName); using(var F = System.IO.File.OpenWrite(aFileName)) { SaveToCompressedStream(F); } #else throw new Exception("Can't use File IO stuff in webplayer"); #endif } public string SaveToCompressedBase64() { using (var stream = new System.IO.MemoryStream()) { SaveToCompressedStream(stream); stream.Position = 0; return System.Convert.ToBase64String(stream.ToArray()); } } #else public void SaveToCompressedStream(System.IO.Stream aData) { throw new Exception("Can't use compressed functions. You need include the SharpZipLib and uncomment the define at the top of SimpleJSON"); } public void SaveToCompressedFile(string aFileName) { throw new Exception("Can't use compressed functions. You need include the SharpZipLib and uncomment the define at the top of SimpleJSON"); } public string SaveToCompressedBase64() { throw new Exception("Can't use compressed functions. You need include the SharpZipLib and uncomment the define at the top of SimpleJSON"); } #endif public void SaveToFile(string aFileName) { #if USE_FileIO System.IO.Directory.CreateDirectory((new System.IO.FileInfo(aFileName)).Directory.FullName); using (var F = System.IO.File.OpenWrite(aFileName)) { SaveToStream(F); } #else throw new Exception ("Can't use File IO stuff in webplayer"); #endif } public string SaveToBase64() { using (var stream = new System.IO.MemoryStream()) { SaveToStream(stream); stream.Position = 0; return System.Convert.ToBase64String(stream.ToArray()); } } public static JSONNode Deserialize(System.IO.BinaryReader aReader) { JSONBinaryTag type = (JSONBinaryTag) aReader.ReadByte(); switch (type) { case JSONBinaryTag.Array: { int count = aReader.ReadInt32(); JSONArray tmp = new JSONArray(); for (int i = 0; i < count; i++) tmp.Add(Deserialize(aReader)); return tmp; } case JSONBinaryTag.Class: { int count = aReader.ReadInt32(); JSONClass tmp = new JSONClass(); for (int i = 0; i < count; i++) { string key = aReader.ReadString(); var val = Deserialize(aReader); tmp.Add(key, val); } return tmp; } case JSONBinaryTag.Value: { return new JSONData(aReader.ReadString()); } case JSONBinaryTag.IntValue: { return new JSONData(aReader.ReadInt32()); } case JSONBinaryTag.DoubleValue: { return new JSONData(aReader.ReadDouble()); } case JSONBinaryTag.BoolValue: { return new JSONData(aReader.ReadBoolean()); } case JSONBinaryTag.FloatValue: { return new JSONData(aReader.ReadSingle()); } default: { throw new Exception("Error deserializing JSON. Unknown tag: " + type); } } } #if USE_SharpZipLib public static JSONNode LoadFromCompressedStream(System.IO.Stream aData) { var zin = new ICSharpCode.SharpZipLib.BZip2.BZip2InputStream(aData); return LoadFromStream(zin); } public static JSONNode LoadFromCompressedFile(string aFileName) { #if USE_FileIO using(var F = System.IO.File.OpenRead(aFileName)) { return LoadFromCompressedStream(F); } #else throw new Exception("Can't use File IO stuff in webplayer"); #endif } public static JSONNode LoadFromCompressedBase64(string aBase64) { var tmp = System.Convert.FromBase64String(aBase64); var stream = new System.IO.MemoryStream(tmp); stream.Position = 0; return LoadFromCompressedStream(stream); } #else public static JSONNode LoadFromCompressedFile(string aFileName) { throw new Exception("Can't use compressed functions. You need include the SharpZipLib and uncomment the define at the top of SimpleJSON"); } public static JSONNode LoadFromCompressedStream(System.IO.Stream aData) { throw new Exception("Can't use compressed functions. You need include the SharpZipLib and uncomment the define at the top of SimpleJSON"); } public static JSONNode LoadFromCompressedBase64(string aBase64) { throw new Exception("Can't use compressed functions. You need include the SharpZipLib and uncomment the define at the top of SimpleJSON"); } #endif public static JSONNode LoadFromStream(System.IO.Stream aData) { using (var R = new System.IO.BinaryReader(aData)) { return Deserialize(R); } } public static JSONNode LoadFromFile(string aFileName) { #if USE_FileIO using (var F = System.IO.File.OpenRead(aFileName)) { return LoadFromStream(F); } #else throw new Exception ("Can't use File IO stuff in webplayer"); #endif } public static JSONNode LoadFromBase64(string aBase64) { var tmp = System.Convert.FromBase64String(aBase64); var stream = new System.IO.MemoryStream(tmp); stream.Position = 0; return LoadFromStream(stream); } } // End of JSONNode public class JSONArray : JSONNode, IEnumerable { private List<JSONNode> m_List = new List<JSONNode>(); public override JSONNode this[int aIndex] { get { if (aIndex < 0 || aIndex >= m_List.Count) return new JSONLazyCreator(this); return m_List[aIndex]; } set { if (aIndex < 0 || aIndex >= m_List.Count) m_List.Add(value); else m_List[aIndex] = value; } } public override JSONNode this[string aKey] { get { return new JSONLazyCreator(this); } set { m_List.Add(value); } } public override int Count { get { return m_List.Count; } } public override void Add(string aKey, JSONNode aItem) { m_List.Add(aItem); } public override JSONNode Remove(int aIndex) { if (aIndex < 0 || aIndex >= m_List.Count) return null; JSONNode tmp = m_List[aIndex]; m_List.RemoveAt(aIndex); return tmp; } public override JSONNode Remove(JSONNode aNode) { m_List.Remove(aNode); return aNode; } public override IEnumerable<JSONNode> Children { get { foreach (JSONNode N in m_List) yield return N; } } public IEnumerator GetEnumerator() { foreach (JSONNode N in m_List) yield return N; } public override string ToString() { string result = "[ "; foreach (JSONNode N in m_List) { if (result.Length > 2) result += ", "; result += N.ToString(); } result += " ]"; return result; } public override string ToString(string aPrefix) { string result = "[ "; foreach (JSONNode N in m_List) { if (result.Length > 3) result += ", "; result += "\n" + aPrefix + " "; result += N.ToString(aPrefix + " "); } result += "\n" + aPrefix + "]"; return result; } public override string ToJSON(int prefix) { string s = new string(' ', (prefix + 1) * 2); string ret = "[ "; foreach (JSONNode n in m_List) { if (ret.Length > 3) ret += ", "; ret += "\n" + s; ret += n.ToJSON(prefix + 1); } ret += "\n" + s + "]"; return ret; } public override void Serialize(System.IO.BinaryWriter aWriter) { aWriter.Write((byte) JSONBinaryTag.Array); aWriter.Write(m_List.Count); for (int i = 0; i < m_List.Count; i++) { m_List[i].Serialize(aWriter); } } } // End of JSONArray public class JSONClass : JSONNode, IEnumerable { private Dictionary<string, JSONNode> m_Dict = new Dictionary<string, JSONNode>(); public Dictionary<string, JSONNode> Dict { get { return m_Dict; } } public override JSONNode this[string aKey] { get { if (m_Dict.ContainsKey(aKey)) return m_Dict[aKey]; else return new JSONLazyCreator(this, aKey); } set { if (m_Dict.ContainsKey(aKey)) m_Dict[aKey] = value; else m_Dict.Add(aKey, value); } } public override JSONNode this[int aIndex] { get { if (aIndex < 0 || aIndex >= m_Dict.Count) return null; return m_Dict.ElementAt(aIndex).Value; } set { if (aIndex < 0 || aIndex >= m_Dict.Count) return; string key = m_Dict.ElementAt(aIndex).Key; m_Dict[key] = value; } } public override int Count { get { return m_Dict.Count; } } public override void Add(string aKey, JSONNode aItem) { if (!string.IsNullOrEmpty(aKey)) { if (m_Dict.ContainsKey(aKey)) m_Dict[aKey] = aItem; else m_Dict.Add(aKey, aItem); } else m_Dict.Add(Guid.NewGuid().ToString(), aItem); } public override JSONNode Remove(string aKey) { if (!m_Dict.ContainsKey(aKey)) return null; JSONNode tmp = m_Dict[aKey]; m_Dict.Remove(aKey); return tmp; } public override JSONNode Remove(int aIndex) { if (aIndex < 0 || aIndex >= m_Dict.Count) return null; var item = m_Dict.ElementAt(aIndex); m_Dict.Remove(item.Key); return item.Value; } public override JSONNode Remove(JSONNode aNode) { try { var item = m_Dict.Where(k => k.Value == aNode).First(); m_Dict.Remove(item.Key); return aNode; } catch { return null; } } public override IEnumerable<JSONNode> Children { get { foreach (KeyValuePair<string, JSONNode> N in m_Dict) yield return N.Value; } } public IEnumerator GetEnumerator() { foreach (KeyValuePair<string, JSONNode> N in m_Dict) yield return N; } public override string ToString() { string result = "{"; foreach (KeyValuePair<string, JSONNode> N in m_Dict) { if (result.Length > 2) result += ", "; result += "\"" + Escape(N.Key) + "\":" + N.Value.ToString(); } result += "}"; return result; } public override string ToString(string aPrefix) { string result = "{ "; foreach (KeyValuePair<string, JSONNode> N in m_Dict) { if (result.Length > 3) result += ", "; result += "\n" + aPrefix + " "; result += "\"" + Escape(N.Key) + "\" : " + N.Value.ToString(aPrefix + " "); } result += "\n" + aPrefix + "}"; return result; } public override string ToJSON(int prefix) { string s = new string(' ', (prefix + 1) * 2); string ret = "{ "; foreach (KeyValuePair<string, JSONNode> n in m_Dict) { if (ret.Length > 3) ret += ", "; ret += "\n" + s; ret += string.Format("\"{0}\": {1}", n.Key, n.Value.ToJSON(prefix + 1)); } ret += "\n" + s + "}"; return ret; } public override void Serialize(System.IO.BinaryWriter aWriter) { aWriter.Write((byte) JSONBinaryTag.Class); aWriter.Write(m_Dict.Count); foreach (string K in m_Dict.Keys) { aWriter.Write(K); m_Dict[K].Serialize(aWriter); } } } // End of JSONClass public class JSONData : JSONNode { private string m_Data; public override string Value { get { return m_Data; } set { m_Data = value; Tag = JSONBinaryTag.Value; } } public JSONData(string aData) { m_Data = aData; Tag = JSONBinaryTag.Value; } public JSONData(float aData) { AsFloat = aData; Tag = JSONBinaryTag.FloatValue; } public JSONData(double aData) { AsDouble = aData; Tag = JSONBinaryTag.DoubleValue; } public JSONData(bool aData) { AsBool = aData; Tag = JSONBinaryTag.BoolValue; } public JSONData(int aData) { AsInt = aData; Tag = JSONBinaryTag.IntValue; } public override string ToString() { return "\"" + Escape(m_Data) + "\""; } public override string ToString(string aPrefix) { return "\"" + Escape(m_Data) + "\""; } public override string ToJSON(int prefix) { switch (Tag) { case JSONBinaryTag.DoubleValue: case JSONBinaryTag.FloatValue: case JSONBinaryTag.IntValue: case JSONBinaryTag.BoolValue: return m_Data; case JSONBinaryTag.Value: return string.Format("\"{0}\"", Escape(m_Data)); default: throw new NotSupportedException("This shouldn't be here: " + Tag.ToString()); } } public override void Serialize(System.IO.BinaryWriter aWriter) { var tmp = new JSONData(""); tmp.AsInt = AsInt; if (tmp.m_Data == this.m_Data) { aWriter.Write((byte) JSONBinaryTag.IntValue); aWriter.Write(AsInt); return; } tmp.AsFloat = AsFloat; if (tmp.m_Data == this.m_Data) { aWriter.Write((byte) JSONBinaryTag.FloatValue); aWriter.Write(AsFloat); return; } tmp.AsDouble = AsDouble; if (tmp.m_Data == this.m_Data) { aWriter.Write((byte) JSONBinaryTag.DoubleValue); aWriter.Write(AsDouble); return; } tmp.AsBool = AsBool; if (tmp.m_Data == this.m_Data) { aWriter.Write((byte) JSONBinaryTag.BoolValue); aWriter.Write(AsBool); return; } aWriter.Write((byte) JSONBinaryTag.Value); aWriter.Write(m_Data); } } // End of JSONData internal class JSONLazyCreator : JSONNode { private JSONNode m_Node = null; private string m_Key = null; public JSONLazyCreator(JSONNode aNode) { m_Node = aNode; m_Key = null; } public JSONLazyCreator(JSONNode aNode, string aKey) { m_Node = aNode; m_Key = aKey; } private void Set(JSONNode aVal) { if (m_Key == null) { m_Node.Add(aVal); } else { m_Node.Add(m_Key, aVal); } m_Node = null; // Be GC friendly. } public override JSONNode this[int aIndex] { get { return new JSONLazyCreator(this); } set { var tmp = new JSONArray(); tmp.Add(value); Set(tmp); } } public override JSONNode this[string aKey] { get { return new JSONLazyCreator(this, aKey); } set { var tmp = new JSONClass(); tmp.Add(aKey, value); Set(tmp); } } public override void Add(JSONNode aItem) { var tmp = new JSONArray(); tmp.Add(aItem); Set(tmp); } public override void Add(string aKey, JSONNode aItem) { var tmp = new JSONClass(); tmp.Add(aKey, aItem); Set(tmp); } public static bool operator ==(JSONLazyCreator a, object b) { if (b == null) return true; return System.Object.ReferenceEquals(a, b); } public static bool operator !=(JSONLazyCreator a, object b) { return !(a == b); } public override bool Equals(object obj) { if (obj == null) return true; return System.Object.ReferenceEquals(this, obj); } public override int GetHashCode() { return base.GetHashCode(); } public override string ToString() { return ""; } public override string ToString(string aPrefix) { return ""; } public override string ToJSON(int prefix) { return ""; } public override int AsInt { get { JSONData tmp = new JSONData(0); Set(tmp); return 0; } set { JSONData tmp = new JSONData(value); Set(tmp); } } public override float AsFloat { get { JSONData tmp = new JSONData(0.0f); Set(tmp); return 0.0f; } set { JSONData tmp = new JSONData(value); Set(tmp); } } public override double AsDouble { get { JSONData tmp = new JSONData(0.0); Set(tmp); return 0.0; } set { JSONData tmp = new JSONData(value); Set(tmp); } } public override bool AsBool { get { JSONData tmp = new JSONData(false); Set(tmp); return false; } set { JSONData tmp = new JSONData(value); Set(tmp); } } public override JSONArray AsArray { get { JSONArray tmp = new JSONArray(); Set(tmp); return tmp; } } public override JSONClass AsObject { get { JSONClass tmp = new JSONClass(); Set(tmp); return tmp; } } } // End of JSONLazyCreator public static class JSON { public static JSONNode Parse(string aJSON) { return JSONNode.Parse(aJSON); } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator 1.0.0.0 // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Microsoft.Azure.Insights { using Azure; using Rest; using Rest.Azure; using Rest.Azure.OData; using Models; using Newtonsoft.Json; using System.Collections; using System.Collections.Generic; using System.Linq; using System.Net; using System.Net.Http; using System.Threading; using System.Threading.Tasks; /// <summary> /// TenantActivityLogsOperations operations. /// </summary> internal partial class TenantActivityLogsOperations : IServiceOperations<MonitorClient>, ITenantActivityLogsOperations { /// <summary> /// Initializes a new instance of the TenantActivityLogsOperations class. /// </summary> /// <param name='client'> /// Reference to the service client. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> internal TenantActivityLogsOperations(MonitorClient client) { if (client == null) { throw new System.ArgumentNullException("client"); } Client = client; } /// <summary> /// Gets a reference to the MonitorClient /// </summary> public MonitorClient Client { get; private set; } /// <summary> /// get the Activity Logs for the Tenant. Everything that is applicable to the /// API to get the Activity Log for the subscription is applicable to this API /// (the parameters, $filter, etc.). One thing to point out here is that this /// API does *not* retrieve the logs at the individual subscription of the /// tenant but only surfaces the logs that were generated at the tenant level. /// The **$filter** is very restricted and allows only the following patterns. /// - List events for a resource group: $filter=eventTimestamp ge '&lt;Start /// Time&gt;' and eventTimestamp le '&lt;End Time&gt;' and eventChannels eq /// 'Admin, Operation' and resourceGroupName eq '&lt;ResourceGroupName&gt;'. - /// List events for resource: $filter=eventTimestamp ge '&lt;Start Time&gt;' /// and eventTimestamp le '&lt;End Time&gt;' and eventChannels eq 'Admin, /// Operation' and resourceUri eq '&lt;ResourceURI&gt;'. - List events for a /// subscription: $filter=eventTimestamp ge '&lt;Start Time&gt;' and /// eventTimestamp le '&lt;End Time&gt;' and eventChannels eq 'Admin, /// Operation'. - List evetns for a resource provider: $filter=eventTimestamp /// ge '&lt;Start Time&gt;' and eventTimestamp le '&lt;End Time&gt;' and /// eventChannels eq 'Admin, Operation' and resourceProvider eq /// '&lt;ResourceProviderName&gt;'. - List events for a correlation Id: /// api-version=2014-04-01&amp;$filter=eventTimestamp ge /// '2014-07-16T04:36:37.6407898Z' and eventTimestamp le /// '2014-07-20T04:36:37.6407898Z' and eventChannels eq 'Admin, Operation' and /// correlationId eq '&lt;CorrelationID&gt;'. No other syntax is allowed. /// </summary> /// <param name='odataQuery'> /// OData parameters to apply to the operation. /// </param> /// <param name='select'> /// Used to fetch events with only the given properties. The filter is a comma /// separated list of property names to be returned. Possible values are: /// authorization, channels, claims, correlationId, description, eventDataId, /// eventName, eventTimestamp, httpRequest, level, operationId, operationName, /// properties, resourceGroupName, resourceProviderName, resourceId, status, /// submissionTimestamp, subStatus, subscriptionId /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<AzureOperationResponse<IPage<EventData>>> ListWithHttpMessagesAsync(ODataQuery<EventData> odataQuery = default(ODataQuery<EventData>), string select = default(string), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { string apiVersion = "2015-04-01"; // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("odataQuery", odataQuery); tracingParameters.Add("apiVersion", apiVersion); tracingParameters.Add("select", select); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "List", tracingParameters); } // Construct URL var _baseUrl = Client.BaseUri.AbsoluteUri; var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "providers/microsoft.insights/eventtypes/management/values").ToString(); List<string> _queryParameters = new List<string>(); if (odataQuery != null) { var _odataFilter = odataQuery.ToString(); if (!string.IsNullOrEmpty(_odataFilter)) { _queryParameters.Add(_odataFilter); } } if (apiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(apiVersion))); } if (select != null) { _queryParameters.Add(string.Format("$select={0}", System.Uri.EscapeDataString(select))); } if (_queryParameters.Count > 0) { _url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters); } // Create HTTP transport objects var _httpRequest = new System.Net.Http.HttpRequestMessage(); System.Net.Http.HttpResponseMessage _httpResponse = null; _httpRequest.Method = new System.Net.Http.HttpMethod("GET"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString()); } if (Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse<IPage<EventData>>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<Page1<EventData>>(_responseContent, Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// get the Activity Logs for the Tenant. Everything that is applicable to the /// API to get the Activity Log for the subscription is applicable to this API /// (the parameters, $filter, etc.). One thing to point out here is that this /// API does *not* retrieve the logs at the individual subscription of the /// tenant but only surfaces the logs that were generated at the tenant level. /// The **$filter** is very restricted and allows only the following patterns. /// - List events for a resource group: $filter=eventTimestamp ge '&lt;Start /// Time&gt;' and eventTimestamp le '&lt;End Time&gt;' and eventChannels eq /// 'Admin, Operation' and resourceGroupName eq '&lt;ResourceGroupName&gt;'. - /// List events for resource: $filter=eventTimestamp ge '&lt;Start Time&gt;' /// and eventTimestamp le '&lt;End Time&gt;' and eventChannels eq 'Admin, /// Operation' and resourceUri eq '&lt;ResourceURI&gt;'. - List events for a /// subscription: $filter=eventTimestamp ge '&lt;Start Time&gt;' and /// eventTimestamp le '&lt;End Time&gt;' and eventChannels eq 'Admin, /// Operation'. - List evetns for a resource provider: $filter=eventTimestamp /// ge '&lt;Start Time&gt;' and eventTimestamp le '&lt;End Time&gt;' and /// eventChannels eq 'Admin, Operation' and resourceProvider eq /// '&lt;ResourceProviderName&gt;'. - List events for a correlation Id: /// api-version=2014-04-01&amp;$filter=eventTimestamp ge /// '2014-07-16T04:36:37.6407898Z' and eventTimestamp le /// '2014-07-20T04:36:37.6407898Z' and eventChannels eq 'Admin, Operation' and /// correlationId eq '&lt;CorrelationID&gt;'. No other syntax is allowed. /// </summary> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="ValidationException"> /// Thrown when a required parameter is null /// </exception> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<AzureOperationResponse<IPage<EventData>>> ListNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (nextPageLink == null) { throw new ValidationException(ValidationRules.CannotBeNull, "nextPageLink"); } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("nextPageLink", nextPageLink); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "ListNext", tracingParameters); } // Construct URL string _url = "{nextLink}"; _url = _url.Replace("{nextLink}", nextPageLink); List<string> _queryParameters = new List<string>(); if (_queryParameters.Count > 0) { _url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters); } // Create HTTP transport objects var _httpRequest = new System.Net.Http.HttpRequestMessage(); System.Net.Http.HttpResponseMessage _httpResponse = null; _httpRequest.Method = new System.Net.Http.HttpMethod("GET"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString()); } if (Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse<IPage<EventData>>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<Page1<EventData>>(_responseContent, Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections.Generic; using System.Linq; using Avro; using Avro.IO; using Avro.Generic; namespace Avro.Specific { /// <summary> /// Generic wrapper class for writing data from specific objects /// </summary> /// <typeparam name="T">type name of specific object</typeparam> public class SpecificWriter<T> : GenericWriter<T> { /// <summary> /// Initializes a new instance of the <see cref="SpecificWriter{T}"/> class. /// </summary> /// <param name="schema">Schema to use when writing.</param> public SpecificWriter(Schema schema) : base(new SpecificDefaultWriter(schema)) { } /// <summary> /// Initializes a new instance of the <see cref="SpecificWriter{T}"/> class using the /// provided <see cref="SpecificDefaultWriter"/>. /// </summary> /// <param name="writer">Default writer to use.</param> public SpecificWriter(SpecificDefaultWriter writer) : base(writer) { } } /// <summary> /// Class for writing data from any specific objects /// </summary> public class SpecificDefaultWriter : DefaultWriter { /// <summary> /// Constructor /// </summary> /// <param name="schema">schema of the object to be written</param> public SpecificDefaultWriter(Schema schema) : base(schema) { } /// <summary> /// Serialized a record using the given RecordSchema. It uses GetField method /// to extract the field value from the given object. /// </summary> /// <param name="schema">The RecordSchema to use for serialization</param> /// <param name="value">The value to be serialized</param> /// <param name="encoder">The Encoder for serialization</param> protected override void WriteRecord(RecordSchema schema, object value, Encoder encoder) { var rec = value as ISpecificRecord; if (rec == null) throw new AvroTypeException("Record object is not derived from ISpecificRecord"); foreach (Field field in schema) { try { Write(field.Schema, rec.Get(field.Pos), encoder); } catch (Exception ex) { throw new AvroException(ex.Message + " in field " + field.Name, ex); } } } /// <summary> /// Validates that the record is a fixed record object and that the schema in the object is the /// same as the given writer schema. Writes the given fixed record into the given encoder /// </summary> /// <param name="schema">writer schema</param> /// <param name="value">fixed object to write</param> /// <param name="encoder">encoder to write to</param> protected override void WriteFixed(FixedSchema schema, object value, Encoder encoder) { var fixedrec = value as SpecificFixed; if (fixedrec == null) throw new AvroTypeException("Fixed object is not derived from SpecificFixed"); encoder.WriteFixed(fixedrec.Value); } /// <summary> /// Writes the given enum value into the given encoder. /// </summary> /// <param name="schema">writer schema</param> /// <param name="value">enum value</param> /// <param name="encoder">encoder to write to</param> protected override void WriteEnum(EnumSchema schema, object value, Encoder encoder) { if (value == null) throw new AvroTypeException("value is null in SpecificDefaultWriter.WriteEnum"); encoder.WriteEnum(schema.Ordinal(value.ToString())); } /// <summary> /// Serialized an array. The default implementation calls EnsureArrayObject() to ascertain that the /// given value is an array. It then calls GetArrayLength() and GetArrayElement() /// to access the members of the array and then serialize them. /// </summary> /// <param name="schema">The ArraySchema for serialization</param> /// <param name="value">The value being serialized</param> /// <param name="encoder">The encoder for serialization</param> protected override void WriteArray(ArraySchema schema, object value, Encoder encoder) { var arr = value as System.Collections.IList; if (arr == null) throw new AvroTypeException("Array does not implement non-generic IList"); long l = arr.Count; encoder.WriteArrayStart(); encoder.SetItemCount(l); for (int i = 0; i < l; i++) { encoder.StartItem(); Write(schema.ItemSchema, arr[i], encoder); } encoder.WriteArrayEnd(); } /// <summary> /// Writes the given map into the given encoder. /// </summary> /// <param name="schema">writer schema</param> /// <param name="value">map to write</param> /// <param name="encoder">encoder to write to</param> protected override void WriteMap(MapSchema schema, object value, Encoder encoder) { var map = value as System.Collections.IDictionary; if (map == null) throw new AvroTypeException("Map does not implement non-generic IDictionary"); encoder.WriteArrayStart(); encoder.SetItemCount(map.Count); foreach (System.Collections.DictionaryEntry de in map) { encoder.StartItem(); encoder.WriteString(de.Key as string); Write(schema.ValueSchema, de.Value, encoder); } encoder.WriteMapEnd(); } /// <summary> /// Resolves the given value against the given UnionSchema and serializes the object against /// the resolved schema member. The default implementation of this method uses /// ResolveUnion to find the member schema within the UnionSchema. /// </summary> /// <param name="us">The UnionSchema to resolve against</param> /// <param name="value">The value to be serialized</param> /// <param name="encoder">The encoder for serialization</param> protected override void WriteUnion(UnionSchema us, object value, Encoder encoder) { for (int i = 0; i < us.Count; i++) { if (Matches(us[i], value)) { encoder.WriteUnionIndex(i); Write(us[i], value, encoder); return; } } throw new AvroException("Cannot find a match for " + value.GetType() + " in " + us); } /// <inheritdoc/> protected override bool Matches(Schema sc, object obj) { if (obj == null && sc.Tag != Avro.Schema.Type.Null) return false; switch (sc.Tag) { case Schema.Type.Null: return obj == null; case Schema.Type.Boolean: return obj is bool; case Schema.Type.Int: return obj is int; case Schema.Type.Long: return obj is long; case Schema.Type.Float: return obj is float; case Schema.Type.Double: return obj is double; case Schema.Type.Bytes: return obj is byte[]; case Schema.Type.String: return obj is string; case Schema.Type.Error: case Schema.Type.Record: return obj is ISpecificRecord && ((obj as ISpecificRecord).Schema as RecordSchema).SchemaName.Equals((sc as RecordSchema).SchemaName); case Schema.Type.Enumeration: return obj.GetType().IsEnum && (sc as EnumSchema).Symbols.Contains(obj.ToString()); case Schema.Type.Array: return obj is System.Collections.IList; case Schema.Type.Map: return obj is System.Collections.IDictionary; case Schema.Type.Union: return false; // Union directly within another union not allowed! case Schema.Type.Fixed: return obj is SpecificFixed && ((obj as SpecificFixed).Schema as FixedSchema).SchemaName.Equals((sc as FixedSchema).SchemaName); case Schema.Type.Logical: return (sc as LogicalSchema).LogicalType.IsInstanceOfLogicalType(obj); default: throw new AvroException("Unknown schema type: " + sc.Tag); } } } }
using UnityEngine; [AddComponentMenu("Image Effects/Blur Behind")] [ExecuteInEditMode] [RequireComponent(typeof(Camera))] public class BlurBehind : MonoBehaviour { // STATIC static RenderTexture storedTexture = null; static int count = 0; static Rect storedRect = new Rect(0, 0, 1, 1); public static void SetViewport() { Rect cameraRect = Camera.current.rect; if (cameraRect != new Rect(0f, 0f, 1f, 1f)) { Vector2 cameraTargetSize; if (Camera.current.targetTexture == null) { cameraTargetSize = new Vector2(Screen.width, Screen.height); } else { cameraTargetSize = new Vector2(Camera.current.targetTexture.width, Camera.current.targetTexture.height); } cameraRect.width = Mathf.Round(Mathf.Clamp01(cameraRect.width + cameraRect.x) * cameraTargetSize.x) / cameraTargetSize.x; cameraRect.height = Mathf.Round(Mathf.Clamp01(cameraRect.height + cameraRect.y) * cameraTargetSize.y) / cameraTargetSize.y; cameraRect.x = Mathf.Round(Mathf.Clamp01(cameraRect.x) * cameraTargetSize.x) / cameraTargetSize.x; cameraRect.y = Mathf.Round(Mathf.Clamp01(cameraRect.y) * cameraTargetSize.y) / cameraTargetSize.y; cameraRect.width -= cameraRect.x; cameraRect.height -= cameraRect.y; Shader.SetGlobalVector("_BlurBehindRect", new Vector4((storedRect.x - cameraRect.x) / cameraRect.width, storedRect.y / cameraRect.height + cameraRect.y, storedRect.width / cameraRect.width, storedRect.height / cameraRect.height)); } } public static void ResetViewport() { Shader.SetGlobalVector("_BlurBehindRect", new Vector4(storedRect.x, storedRect.y, storedRect.width, storedRect.height)); } // NON-STATIC public Shader blurShader = null; Material blurMaterial = null; public enum Mode { Absolute = 0, Relative = 1 }; public Mode mode = Mode.Relative; public float radius = 2.5f; public enum Settings { Standard = 0, Smooth = 1, Manual = 2 }; public Settings settings = Settings.Standard; public float downsample = 1; public int iterations = 1; public Rect cropRect = new Rect(0f, 0f, 1f, 1f); public Rect pixelOffset = new Rect(0f, 0f, 0f, 0f); void CheckSettings(int sourceSize) { if (radius < 0f) { radius = 0f; } if (downsample < 1f) { downsample = 1f; } if (iterations < 0) { iterations = 0; } if (settings != Settings.Manual) { float iterationRate = settings == Settings.Standard ? 36f : 6f; if (mode == Mode.Absolute) { if (radius > 0f) { if (radius < iterationRate) { iterations = 1; } else { iterations = Mathf.FloorToInt(Mathf.Log(radius, iterationRate)) + 1; } downsample = (radius / Mathf.Pow(3f, iterations)); if (downsample < 1f) { downsample = 1f; } } else { downsample = 1f; iterations = 0; } } else { if (radius > 0f) { float pixelRadius = radius / 100f * sourceSize; if (pixelRadius < iterationRate) { iterations = 1; } else { iterations = Mathf.FloorToInt(Mathf.Log(pixelRadius, iterationRate)) + 1; } downsample = sourceSize / (pixelRadius / Mathf.Pow(3f, iterations)); } else { downsample = float.PositiveInfinity; iterations = 0; } } } } void CheckOutput(int rtW, int rtH, RenderTextureFormat format) { if (storedTexture == null) { CreateOutput(rtW, rtH, format); } else if (storedTexture.width != rtW || storedTexture.height != rtH || storedTexture.format != format) { storedTexture.Release(); DestroyImmediate(storedTexture); CreateOutput(rtW, rtH, format); } else { storedTexture.DiscardContents(); } } bool CheckResources() { if (blurMaterial == null) { if (blurShader != null) { if (blurShader.isSupported) { blurMaterial = new Material(blurShader); blurMaterial.hideFlags = HideFlags.DontSave; } else { Debug.Log("Blur Behind UI: Shader not supported"); return false; } } else { Debug.Log("Blur Behind UI: Shader reference missing"); return false; } } return true; } bool CheckSupport() { if (!SystemInfo.supportsImageEffects) { Debug.Log("Blur Behind UI: Image effects not supported"); return false; } if (!SystemInfo.supportsRenderTextures) { Debug.Log("Blur Behind UI: Render textures not supported"); return false; } return true; } void CreateOutput(int width, int height, RenderTextureFormat format) { storedTexture = new RenderTexture(width, height, 0, format); storedTexture.filterMode = FilterMode.Bilinear; storedTexture.hideFlags = HideFlags.DontSave; Shader.SetGlobalTexture("_BlurBehindTex", storedTexture); Shader.EnableKeyword("BLUR_BEHIND_SET"); } RenderTexture CropSource(RenderTexture source) { Rect effectRect = new Rect(cropRect.x * source.width + pixelOffset.x, cropRect.y * source.height + pixelOffset.y, cropRect.width * source.width + pixelOffset.width, cropRect.height * source.height + pixelOffset.height); effectRect.width = Mathf.Clamp01(Mathf.Round(effectRect.width + effectRect.x) / source.width); effectRect.height = Mathf.Clamp01(Mathf.Round(effectRect.height + effectRect.y) / source.height); effectRect.x = Mathf.Clamp01(Mathf.Round(effectRect.x) / source.width); effectRect.y = Mathf.Clamp01(Mathf.Round(effectRect.y) / source.height); effectRect.width -= effectRect.x; effectRect.height -= effectRect.y; RenderTexture croppedSource; if (effectRect != new Rect(0f, 0f, 1f, 1f)) { croppedSource = RenderTexture.GetTemporary(Mathf.RoundToInt(effectRect.width * source.width), Mathf.RoundToInt(effectRect.height * source.height), 0, source.format); blurMaterial.SetVector("_Parameter", new Vector4(effectRect.x, effectRect.y, effectRect.width, effectRect.height)); Graphics.Blit(source, croppedSource, blurMaterial, 2); storedRect = effectRect; } else { croppedSource = source; storedRect = new Rect(0f, 0f, 1f, 1f); } Rect cameraRect = Camera.current.rect; if (cameraRect != new Rect(0f, 0f, 1f, 1f)) { Vector2 cameraTargetSize; if (Camera.current.targetTexture == null) { cameraTargetSize = new Vector2(Screen.width, Screen.height); } else { cameraTargetSize = new Vector2(Camera.current.targetTexture.width, Camera.current.targetTexture.height); } cameraRect.width = Mathf.Round(Mathf.Clamp01(cameraRect.width + cameraRect.x) * cameraTargetSize.x) / cameraTargetSize.x; cameraRect.height = Mathf.Round(Mathf.Clamp01(cameraRect.height + cameraRect.y) * cameraTargetSize.y) / cameraTargetSize.y; cameraRect.x = Mathf.Round(Mathf.Clamp01(cameraRect.x) * cameraTargetSize.x) / cameraTargetSize.x; cameraRect.y = Mathf.Round(Mathf.Clamp01(cameraRect.y) * cameraTargetSize.y) / cameraTargetSize.y; cameraRect.width -= cameraRect.x; cameraRect.height -= cameraRect.y; storedRect = new Rect(cameraRect.x + storedRect.x * cameraRect.width, cameraRect.y + storedRect.y * cameraRect.height, cameraRect.width * storedRect.width, cameraRect.height * storedRect.height); } Shader.SetGlobalVector("_BlurBehindRect", new Vector4(storedRect.x, storedRect.y, storedRect.width, storedRect.height)); return croppedSource; } void Downsample(RenderTexture source, RenderTexture dest, int width, int height) { int downsampleLeft = 0; if (source.width > source.height) { int testLength = source.width; while (testLength > width) { downsampleLeft++; testLength = testLength >> 1; } } else { int testLength = source.height; while (testLength > height) { downsampleLeft++; testLength = testLength >> 1; } } if (downsampleLeft > 1) { RenderTexture downsampleRT = source; while (downsampleLeft > 2) { int rtWidth = downsampleRT.width >> 2; if (rtWidth < 1) { rtWidth = 1; } int rtHeight = downsampleRT.height >> 2; if (rtHeight < 1) { rtHeight = 1; } downsampleRT.filterMode = FilterMode.Bilinear; RenderTexture tempRT = RenderTexture.GetTemporary(rtWidth, rtHeight, 0, downsampleRT.format); Graphics.Blit(downsampleRT, tempRT, blurMaterial, 1); if (downsampleRT != source) { RenderTexture.ReleaseTemporary(downsampleRT); } downsampleRT = tempRT; downsampleLeft -= 2; } if (downsampleLeft > 1) { Graphics.Blit(downsampleRT, dest, blurMaterial, 1); } else { Graphics.Blit(downsampleRT, dest); } if (downsampleRT != source) { RenderTexture.ReleaseTemporary(downsampleRT); } } else { Graphics.Blit(source, dest); } } void OnDisable() { if (blurMaterial) { #if UNITY_EDITOR DestroyImmediate(blurMaterial); #else Destroy(blurMaterial); #endif blurMaterial = null; } count--; if (count == 0) { if (storedTexture) { storedTexture.Release(); #if UNITY_EDITOR DestroyImmediate(storedTexture); #else Destroy(storedTexture); #endif storedTexture = null; Shader.SetGlobalTexture("_BlurBehindTex", null); Shader.DisableKeyword("BLUR_BEHIND_SET"); } } } void OnEnable() { count++; } void OnPreRender() { SetViewport(); } void OnRenderImage(RenderTexture source, RenderTexture destination) { if (!CheckSupport() || !CheckResources()) { enabled = false; Graphics.Blit(source, destination); return; } RenderTexture croppedSource = CropSource(source); int sourceSize = croppedSource.width > croppedSource.height ? croppedSource.width : croppedSource.height; CheckSettings(sourceSize); int outputWidth; int outputHeight; OutputSetting(croppedSource, out outputWidth, out outputHeight); CheckOutput(outputWidth, outputHeight, croppedSource.format); Downsample(croppedSource, storedTexture, outputWidth, outputHeight); // Release cropped if was used if (croppedSource != source) { RenderTexture.ReleaseTemporary(croppedSource); } // BLUR if (iterations > 0 && radius > 0f) { RenderTexture blurRT = RenderTexture.GetTemporary(outputWidth, outputHeight, 0, croppedSource.format); blurRT.filterMode = FilterMode.Bilinear; for (int i = 0; i < iterations; i++) { float iterationRadius = radius / 300f * Mathf.Pow(3f, i) / Mathf.Pow(3f, iterations - 1); if (mode == Mode.Absolute) { iterationRadius *= 100f / sourceSize; } float angle = i * 0.7853982f / iterations; // Iteration rotation Vector2 vector = new Vector2(Mathf.Sin(angle), Mathf.Cos(angle)) * iterationRadius; Vector2 viewMultiplier = outputWidth > outputHeight ? new Vector2(1f, 1f / outputHeight * outputWidth) : new Vector2(1f / outputWidth * outputHeight, 1f); // VERTICAL Vector4 parameter = new Vector4(vector.x * viewMultiplier.x, vector.y * viewMultiplier.y, 0f, 0f); parameter.z = -parameter.x; parameter.w = -parameter.y; blurMaterial.SetVector("_Parameter", parameter); Graphics.Blit(storedTexture, blurRT, blurMaterial, 0); storedTexture.DiscardContents(); // HORIZONTAL parameter = new Vector4(vector.y * viewMultiplier.x, -vector.x * viewMultiplier.y, 0f, 0f); parameter.z = -parameter.x; parameter.w = -parameter.y; blurMaterial.SetVector("_Parameter", parameter); Graphics.Blit(blurRT, storedTexture, blurMaterial, 0); blurRT.DiscardContents(); } RenderTexture.ReleaseTemporary(blurRT); } Graphics.Blit(source, destination); } void OutputSetting(RenderTexture source, out int width, out int height) { if (mode == Mode.Absolute) { width = Mathf.RoundToInt((float)source.width / downsample); height = Mathf.RoundToInt((float)source.height / downsample); } else { if (source.width > source.height) { if (source.width > downsample) { width = Mathf.RoundToInt(downsample); height = Mathf.RoundToInt(downsample / source.width * source.height); } else { width = source.width; height = source.height; } } else { if (source.height > downsample) { width = Mathf.RoundToInt(downsample / source.height * source.width); height = Mathf.RoundToInt(downsample); } else { width = source.width; height = source.height; } } } if (width < 1) { width = 1; } else if (width > source.width) { width = source.width; } if (height < 1) { height = 1; } else if (height > source.height) { height = source.height; } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System.Collections.Generic; using System.Runtime.InteropServices; using System.Threading; using System.Threading.Tasks; namespace System.Diagnostics { // Overview // -------- // We have a few constraints we're working under here: // - waitpid is used on Unix to get the exit status (including exit code) of a child process, but the first call // to it after the child has completed will reap the child removing the chance of subsequent calls getting status. // - The Process design allows for multiple indendent Process objects to be handed out, and each of those // objects may be used concurrently with each other, even if they refer to the same underlying process. // Same with ProcessWaitHandle objects. This is based on the Windows design where anyone with a handle to the // process can retrieve completion information about that process. // - There is no good Unix equivalent to a process handle nor to being able to asynchronously be notified // of a process' exit (without more intrusive mechanisms like ptrace), which means such support // needs to be layered on top of waitpid. // // As a result, we have the following scheme: // - We maintain a static/shared table that maps process ID to ProcessWaitState objects. // Access to this table requires taking a global lock, so we try to minimize the number of // times we need to access the table, primarily just the first time a Process object needs // access to process exit/wait information and subsequently when that Process object gets GC'd. // - Each process holds a ProcessWaitState.Holder object; when that object is constructed, // it ensures there's an appropriate entry in the mapping table and increments that entry's ref count. // - When a Process object is dropped and its ProcessWaitState.Holder is finalized, it'll // decrement the ref count, and when no more process objects exist for a particular process ID, // that entry in the table will be cleaned up. // - This approach effectively allows for multiple independent Process objects for the same process ID to all // share the same ProcessWaitState. And since they are sharing the same wait state object, // the wait state object uses its own lock to protect the per-process state. This includes // caching exit / exit code / exit time information so that a Process object for a process that's already // had waitpid called for it can get at its exit information. // // A negative ramification of this is that if a process exits, but there are outstanding wait handles // handed out (and rooted, so they can't be GC'd), and then a new process is created and the pid is recycled, // new calls to get that process's wait state will get the old processe's wait state. However, pid recycling // will be a more general issue, since pids are the only identifier we have to a process, so if a Process // object is created for a particular pid, then that process goes away and a new one comes in with the same pid, // our Process object will silently switch to referring to the new pid. Unix systems typically have a simple // policy for pid recycling, which is that they start at a low value, increment up to a system maximum (e.g. // 32768), and then wrap around and start reusing value that aren't currently in use. On Linux, // proc/sys/kernel/pid_max defines the max pid value. Given the conditions that would be required for this // to happen, it's possible but unlikely. /// <summary>Exit information and waiting capabilities for a process.</summary> internal sealed class ProcessWaitState : IDisposable { /// <summary> /// Finalizable holder for a process wait state. Instantiating one /// will ensure that a wait state object exists for a process, will /// grab it, and will increment its ref count. Dropping or disposing /// one will decrement the ref count and clean up after it if the ref /// count hits zero. /// </summary> internal sealed class Holder : IDisposable { internal ProcessWaitState _state; internal Holder(int processId) { _state = ProcessWaitState.AddRef(processId); } ~Holder() { if (_state != null) { _state.ReleaseRef(); } } public void Dispose() { if (_state != null) { _state.ReleaseRef(); _state = null; GC.SuppressFinalize(this); } } } /// <summary> /// Global table that maps process IDs to the associated shared wait state information. /// </summary> private static readonly Dictionary<int, ProcessWaitState> s_processWaitStates = new Dictionary<int, ProcessWaitState>(); /// <summary> /// Ensures that the mapping table contains an entry for the process ID, /// increments its ref count, and returns it. /// </summary> /// <param name="processId">The process ID for which we need wait state.</param> /// <returns>The wait state object.</returns> internal static ProcessWaitState AddRef(int processId) { lock (s_processWaitStates) { ProcessWaitState pws; if (!s_processWaitStates.TryGetValue(processId, out pws)) { pws = new ProcessWaitState(processId); s_processWaitStates.Add(processId, pws); } pws._outstandingRefCount++; return pws; } } /// <summary> /// Decrements the ref count on the wait state object, and if it's the last one, /// removes it from the table. /// </summary> internal void ReleaseRef() { lock (ProcessWaitState.s_processWaitStates) { ProcessWaitState pws; bool foundState = ProcessWaitState.s_processWaitStates.TryGetValue(_processId, out pws); Debug.Assert(foundState); if (foundState) { --pws._outstandingRefCount; if (pws._outstandingRefCount == 0) { ProcessWaitState.s_processWaitStates.Remove(_processId); pws.Dispose(); } } } } /// <summary> /// Synchroniation object used to protect all instance state. Any number of /// Process and ProcessWaitHandle objects may be using a ProcessWaitState /// instance concurrently. /// </summary> private readonly object _gate = new object(); /// <summary>ID of the associated process.</summary> private readonly int _processId; /// <summary>If a wait operation is in progress, the Task that represents it; otherwise, null.</summary> private Task _waitInProgress; /// <summary>The number of alive users of this object.</summary> private int _outstandingRefCount; /// <summary>Whether the associated process exited.</summary> private bool _exited; /// <summary>If the process exited, it's exit code, or null if we were unable to determine one.</summary> private int? _exitCode; /// <summary> /// The approximate time the process exited. We do not have the ability to know exact time a process /// exited, so we approximate it by storing the time that we discovered it exited. /// </summary> private DateTime _exitTime; /// <summary>A lazily-initialized event set when the process exits.</summary> private ManualResetEvent _exitedEvent; /// <summary>Initialize the wait state object.</summary> /// <param name="processId">The associated process' ID.</param> private ProcessWaitState(int processId) { Debug.Assert(processId >= 0); _processId = processId; } /// <summary>Releases managed resources used by the ProcessWaitState.</summary> public void Dispose() { Debug.Assert(!Monitor.IsEntered(_gate)); lock (_gate) { if (_exitedEvent != null) { _exitedEvent.Dispose(); _exitedEvent = null; } } } /// <summary>Notes that the process has exited.</summary> private void SetExited() { Debug.Assert(Monitor.IsEntered(_gate)); _exited = true; _exitTime = DateTime.Now; if (_exitedEvent != null) { _exitedEvent.Set(); } } /// <summary>Ensures an exited event has been initialized and returns it.</summary> /// <returns></returns> internal ManualResetEvent EnsureExitedEvent() { Debug.Assert(!Monitor.IsEntered(_gate)); lock (_gate) { // If we already have an initialized event, just return it. if (_exitedEvent == null) { // If we don't, create one, and if the process hasn't yet exited, // make sure we have a task that's actively monitoring the completion state. _exitedEvent = new ManualResetEvent(initialState: _exited); if (!_exited) { // If we haven't exited, we need to spin up an asynchronous operation that // will completed the exitedEvent when the other process exits. If there's already // another operation underway, then we'll just tack ours onto the end of it. _waitInProgress = _waitInProgress == null ? WaitForExitAsync() : _waitInProgress.ContinueWith((_, state) => ((ProcessWaitState)state).WaitForExitAsync(), this, CancellationToken.None, TaskContinuationOptions.None, TaskScheduler.Default).Unwrap(); } } return _exitedEvent; } } internal DateTime ExitTime { get { lock (_gate) { Debug.Assert(_exited); return _exitTime; } } } internal bool HasExited { get { int? ignored; return GetExited(out ignored); } } internal bool GetExited(out int? exitCode) { lock (_gate) { // Have we already exited? If so, return the cached results. if (_exited) { exitCode = _exitCode; return true; } // Is another wait operation in progress? If so, then we haven't exited, // and that task owns the right to call CheckForExit. if (_waitInProgress != null) { exitCode = null; return false; } // We don't know if we've exited, but no one else is currently // checking, so check. CheckForExit(); // We now have an up-to-date snapshot for whether we've exited, // and if we have, what the exit code is (if we were able to find out). exitCode = _exitCode; return _exited; } } private void CheckForExit(bool blockingAllowed = false) { Debug.Assert(Monitor.IsEntered(_gate)); Debug.Assert(!blockingAllowed); // see "PERF NOTE" comment in WaitForExit while (true) // in case of EINTR during system call { // Try to get the state of the (child) process int status; int waitResult = Interop.libc.waitpid(_processId, out status, blockingAllowed ? Interop.libc.WaitPidOptions.None : Interop.libc.WaitPidOptions.WNOHANG); if (waitResult == _processId) { // Process has exited if (Interop.libc.WIFEXITED(status)) { _exitCode = Interop.libc.WEXITSTATUS(status); } else if (Interop.libc.WIFSIGNALED(status)) { const int ExitCodeSignalOffset = 128; _exitCode = ExitCodeSignalOffset + Interop.libc.WTERMSIG(status); } SetExited(); return; } else if (waitResult == 0) { // Process is still running return; } else if (waitResult == -1) { // Something went wrong, e.g. it's not a child process, // or waitpid was already called for this child, or // that the call was interrupted by a signal. int errno = Marshal.GetLastWin32Error(); if (errno == Interop.Errors.EINTR) { // waitpid was interrupted. Try again. continue; } else if (errno == Interop.Errors.ECHILD) { // waitpid was used with a non-child process. We won't be // able to get an exit code, but we'll at least be able // to determine if the process is still running (assuming // there's not a race on its id). int killResult = Interop.libc.kill(_processId, 0); // 0 means don't send a signal if (killResult == 0) { // Process is still running return; } else // error from kill { errno = Marshal.GetLastWin32Error(); if (errno == Interop.Errors.ESRCH) { // Couldn't find the process; assume it's exited SetExited(); return; } else if (errno == Interop.Errors.EPERM) { // Don't have permissions to the process; assume it's alive return; } else Debug.Fail("Unexpected errno value from kill"); } } else Debug.Fail("Unexpected errno value from waitpid"); } else Debug.Fail("Unexpected process ID from waitpid."); SetExited(); return; } } /// <summary>Waits for the associated process to exit.</summary> /// <param name="millisecondsTimeout">The amount of time to wait, or -1 to wait indefinitely.</param> /// <returns>true if the process exited; false if the timeout occurred.</returns> internal bool WaitForExit(int millisecondsTimeout) { Debug.Assert(!Monitor.IsEntered(_gate)); // Track the time the we start waiting. long startTime = Stopwatch.GetTimestamp(); // Polling loop while (true) { bool createdTask = false; CancellationTokenSource cts = null; Task waitTask; // We're in a polling loop... determine how much time remains int remainingTimeout = millisecondsTimeout == Timeout.Infinite ? Timeout.Infinite : (int)Math.Max(millisecondsTimeout - ((Stopwatch.GetTimestamp() - startTime) / (double)Stopwatch.Frequency * 1000), 0); lock (_gate) { // If we already know that the process exited, we're done. if (_exited) { return true; } // If a timeout of 0 was supplied, then we simply need to poll // to see if the process has already exited. if (remainingTimeout == 0) { // If there's currently a wait-in-progress, then we know the other process // hasn't exited (barring races and the polling interval). if (_waitInProgress != null) { return false; } // No one else is checking for the process' exit... so check. // We're currently holding the _gate lock, so we don't want to // allow CheckForExit to block indefinitely. CheckForExit(); return _exited; } // The process has not yet exited (or at least we don't know it yet) // so we need to wait for it to exit, outside of the lock. // If there's already a wait in progress, we'll do so later // by waiting on that existing task. Otherwise, we'll spin up // such a task. if (_waitInProgress != null) { waitTask = _waitInProgress; } else { createdTask = true; CancellationToken token = remainingTimeout == Timeout.Infinite ? CancellationToken.None : (cts = new CancellationTokenSource(remainingTimeout)).Token; waitTask = WaitForExitAsync(token); // PERF NOTE: // At the moment, we never call CheckForExit(true) (which in turn allows // waitpid to block until the child has completed) because we currently call it while // holdling the _gate lock. This is probably unnecessary in some situations, and in particular // here if remainingTimeout == Timeout.Infinite. In that case, we should be able to set // _waitInProgress to be a TaskCompletionSource task, and then below outside of the lock // we could do a CheckForExit(blockingAllowed:true) and complete the TaskCompletionSource // after that. We would just need to make sure that there's no risk of the other state // on this instance experiencing torn reads. } } // lock(_gate) if (createdTask) { // We created this task, and it'll get canceled automatically after our timeout. // This Wait should only wake up when either the process has exited or the timeout // has expired. Either way, we'll loop around again; if the process exited, that'll // be caught first thing in the loop where we check _exited, and if it didn't exit, // our remaining time will be zero, so we'll do a quick remaining check and bail. waitTask.Wait(); if (cts != null) { cts.Dispose(); } } else { // It's someone else's task. We'll wait for it to complete. This could complete // either because our remainingTimeout expired or because the task completed, // which could happen because the process exited or because whoever created // that task gave it a timeout. In any case, we'll loop around again, and the loop // will catch these cases, potentially issuing another wait to make up any // remaining time. waitTask.Wait(remainingTimeout); } } } /// <summary>Spawns an asynchronous polling loop for process completion.</summary> /// <param name="cancellationToken">A token to monitor to exit the polling loop.</param> /// <returns>The task representing the loop.</returns> private Task WaitForExitAsync(CancellationToken cancellationToken = default(CancellationToken)) { Debug.Assert(Monitor.IsEntered(_gate)); Debug.Assert(_waitInProgress == null); return _waitInProgress = Task.Run(async delegate // Task.Run used because of potential blocking in CheckForExit { try { // While we're not canceled while (!cancellationToken.IsCancellationRequested) { // Poll lock (_gate) { if (!_exited) { CheckForExit(); } if (_exited) // may have been updated by CheckForExit { return; } } // Wait try { const int PollingIntervalMs = 100; // arbitrary value chosen to balance delays with polling overhead await Task.Delay(PollingIntervalMs, cancellationToken); } catch (OperationCanceledException) { } } } finally { // Task is no longer active lock (_gate) { _waitInProgress = null; } } }); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.Diagnostics; using System.Linq; using System.Reflection; using Xunit; namespace System.Collections.Immutable.Tests { public class ImmutableStackTest : SimpleElementImmutablesTestBase { /// <summary> /// A test for Empty /// </summary> /// <typeparam name="T">The type of elements held in the stack.</typeparam> private void EmptyTestHelper<T>() where T : new() { IImmutableStack<T> actual = ImmutableStack<T>.Empty; Assert.NotNull(actual); Assert.True(actual.IsEmpty); AssertAreSame(ImmutableStack<T>.Empty, actual.Clear()); AssertAreSame(ImmutableStack<T>.Empty, actual.Push(new T()).Clear()); } private ImmutableStack<T> InitStackHelper<T>(params T[] values) { Assert.NotNull(values); var result = ImmutableStack<T>.Empty; foreach (var value in values) { result = result.Push(value); } return result; } private void PushAndCountTestHelper<T>() where T : new() { var actual0 = ImmutableStack<T>.Empty; Assert.Equal(0, actual0.Count()); var actual1 = actual0.Push(new T()); Assert.Equal(1, actual1.Count()); Assert.Equal(0, actual0.Count()); var actual2 = actual1.Push(new T()); Assert.Equal(2, actual2.Count()); Assert.Equal(0, actual0.Count()); } private void PopTestHelper<T>(params T[] values) { Assert.NotNull(values); Assert.InRange(values.Length, 1, int.MaxValue); var full = this.InitStackHelper(values); var currentStack = full; // This loop tests the immutable properties of Pop. for (int expectedCount = values.Length; expectedCount > 0; expectedCount--) { Assert.Equal(expectedCount, currentStack.Count()); currentStack.Pop(); Assert.Equal(expectedCount, currentStack.Count()); var nextStack = currentStack.Pop(); Assert.Equal(expectedCount, currentStack.Count()); Assert.NotSame(currentStack, nextStack); AssertAreSame(currentStack.Pop(), currentStack.Pop(), "Popping the stack 2X should yield the same shorter stack."); currentStack = nextStack; } } private void PeekTestHelper<T>(params T[] values) { Assert.NotNull(values); Assert.InRange(values.Length, 1, int.MaxValue); var current = this.InitStackHelper(values); for (int i = values.Length - 1; i >= 0; i--) { AssertAreSame(values[i], current.Peek()); T element; current.Pop(out element); AssertAreSame(current.Peek(), element); var next = current.Pop(); AssertAreSame(values[i], current.Peek(), "Pop mutated the stack instance."); current = next; } } private void EnumeratorTestHelper<T>(params T[] values) { var full = this.InitStackHelper(values); int i = values.Length - 1; foreach (var element in full) { AssertAreSame(values[i--], element); } Assert.Equal(-1, i); i = values.Length - 1; foreach (T element in (System.Collections.IEnumerable)full) { AssertAreSame(values[i--], element); } Assert.Equal(-1, i); } [Fact] public void EmptyTest() { this.EmptyTestHelper<GenericParameterHelper>(); this.EmptyTestHelper<int>(); } [Fact] public void PushAndCountTest() { this.PushAndCountTestHelper<GenericParameterHelper>(); this.PushAndCountTestHelper<int>(); } [Fact] public void PopTest() { this.PopTestHelper( new GenericParameterHelper(1), new GenericParameterHelper(2), new GenericParameterHelper(3)); this.PopTestHelper(1, 2, 3); } [Fact] public void PopOutValue() { var stack = ImmutableStack<int>.Empty.Push(5).Push(6); int top; stack = stack.Pop(out top); Assert.Equal(6, top); var empty = stack.Pop(out top); Assert.Equal(5, top); Assert.True(empty.IsEmpty); // Try again with the interface to verify extension method behavior. IImmutableStack<int> stackInterface = stack; Assert.Same(empty, stackInterface.Pop(out top)); Assert.Equal(5, top); } [Fact] public void PeekTest() { this.PeekTestHelper( new GenericParameterHelper(1), new GenericParameterHelper(2), new GenericParameterHelper(3)); this.PeekTestHelper(1, 2, 3); } [Fact] public void EnumeratorTest() { this.EnumeratorTestHelper(new GenericParameterHelper(1), new GenericParameterHelper(2)); this.EnumeratorTestHelper<GenericParameterHelper>(); this.EnumeratorTestHelper(1, 2); this.EnumeratorTestHelper<int>(); var stack = ImmutableStack.Create<int>(5); var enumeratorStruct = stack.GetEnumerator(); Assert.Throws<InvalidOperationException>(() => enumeratorStruct.Current); Assert.True(enumeratorStruct.MoveNext()); Assert.Equal(5, enumeratorStruct.Current); Assert.False(enumeratorStruct.MoveNext()); Assert.Throws<InvalidOperationException>(() => enumeratorStruct.Current); Assert.False(enumeratorStruct.MoveNext()); var enumerator = ((IEnumerable<int>)stack).GetEnumerator(); Assert.Throws<InvalidOperationException>(() => enumerator.Current); Assert.True(enumerator.MoveNext()); Assert.Equal(5, enumerator.Current); Assert.False(enumerator.MoveNext()); Assert.Throws<InvalidOperationException>(() => enumerator.Current); Assert.False(enumerator.MoveNext()); enumerator.Reset(); Assert.Throws<InvalidOperationException>(() => enumerator.Current); Assert.True(enumerator.MoveNext()); Assert.Equal(5, enumerator.Current); Assert.False(enumerator.MoveNext()); enumerator.Dispose(); Assert.Throws<ObjectDisposedException>(() => enumerator.Reset()); Assert.Throws<ObjectDisposedException>(() => enumerator.MoveNext()); Assert.Throws<ObjectDisposedException>(() => enumerator.Current); } [Fact] public void EqualityTest() { Assert.False(ImmutableStack<int>.Empty.Equals(null)); Assert.False(ImmutableStack<int>.Empty.Equals("hi")); Assert.Equal(ImmutableStack<int>.Empty, ImmutableStack<int>.Empty); Assert.Equal(ImmutableStack<int>.Empty.Push(3), ImmutableStack<int>.Empty.Push(3)); Assert.NotEqual(ImmutableStack<int>.Empty.Push(5), ImmutableStack<int>.Empty.Push(3)); Assert.NotEqual(ImmutableStack<int>.Empty.Push(3).Push(5), ImmutableStack<int>.Empty.Push(3)); Assert.NotEqual(ImmutableStack<int>.Empty.Push(3), ImmutableStack<int>.Empty.Push(3).Push(5)); } [Fact] public void GetEnumerator_EmptyStackMoveNext_ReturnsFalse() { ImmutableStack<int> stack = ImmutableStack<int>.Empty; Assert.False(stack.GetEnumerator().MoveNext()); } [Fact] public void EmptyPeekThrows() { Assert.Throws<InvalidOperationException>(() => ImmutableStack<GenericParameterHelper>.Empty.Peek()); } [Fact] public void EmptyPopThrows() { Assert.Throws<InvalidOperationException>(() => ImmutableStack<GenericParameterHelper>.Empty.Pop()); } [Fact] public void Create() { ImmutableStack<int> stack = ImmutableStack.Create<int>(); Assert.True(stack.IsEmpty); stack = ImmutableStack.Create(1); Assert.False(stack.IsEmpty); Assert.Equal(new[] { 1 }, stack); stack = ImmutableStack.Create(1, 2); Assert.False(stack.IsEmpty); Assert.Equal(new[] { 2, 1 }, stack); stack = ImmutableStack.CreateRange((IEnumerable<int>)new[] { 1, 2 }); Assert.False(stack.IsEmpty); Assert.Equal(new[] { 2, 1 }, stack); AssertExtensions.Throws<ArgumentNullException>("items", () => ImmutableStack.CreateRange((IEnumerable<int>)null)); AssertExtensions.Throws<ArgumentNullException>("items", () => ImmutableStack.Create((int[])null)); } [Fact] [SkipOnTargetFramework(TargetFrameworkMonikers.UapAot, "Cannot do DebuggerAttribute testing on UapAot: requires internal Reflection on framework types.")] public void DebuggerAttributesValid() { DebuggerAttributes.ValidateDebuggerDisplayReferences(ImmutableStack.Create<int>()); ImmutableStack<string> stack = ImmutableStack.Create<string>("1", "2", "3"); DebuggerAttributeInfo info = DebuggerAttributes.ValidateDebuggerTypeProxyProperties(stack); PropertyInfo itemProperty = info.Properties.Single(pr => pr.GetCustomAttribute<DebuggerBrowsableAttribute>().State == DebuggerBrowsableState.RootHidden); string[] items = itemProperty.GetValue(info.Instance) as string[]; Assert.Equal(stack, items); } [Fact] [SkipOnTargetFramework(TargetFrameworkMonikers.UapAot, "Cannot do DebuggerAttribute testing on UapAot: requires internal Reflection on framework types.")] public static void TestDebuggerAttributes_Null() { Type proxyType = DebuggerAttributes.GetProxyType(ImmutableStack.Create<string>("1", "2", "3")); TargetInvocationException tie = Assert.Throws<TargetInvocationException>(() => Activator.CreateInstance(proxyType, (object)null)); Assert.IsType<ArgumentNullException>(tie.InnerException); } protected override IEnumerable<T> GetEnumerableOf<T>(params T[] contents) { var stack = ImmutableStack<T>.Empty; foreach (var value in contents.Reverse()) { stack = stack.Push(value); } return stack; } } }
// Copyright 2011 Chris Edwards // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. using System; using System.Globalization; using System.Text; // From SimpleTalk.WaffleEngine. namespace Fluency.DataGeneration { public class WaffleEngine { static readonly string[] s_Adverbs = new[] { "substantively", "intuitively", "uniquely", "semantically", "necessarily", "stringently", "precisely", "rigorously", "broadly", "generally", "implicitly", "inherently", "presumably", "preeminently", "analytically", "logically", "ontologically", "wholly", "basically", "demonstrably", "strictly", "functionally", "radically", "definitely", "positively", "intrinsically", "generally", "overwhelmingly", "essentially", "vitally", "operably", "fundamentally", "significantly", "retroactively", "retrospectively", "globally", "clearly", "disconcertingly" }; static readonly string[] s_ArtyNouns = new[] { "discordance", "legitimisation", "principle", "transposition", "dimension", "reciprocity", "fragmentation", "projection", "dichotomy", "concept", "theme", "teleology", "symbolism", "transformation", "antithesis", "desiderata", "metaphor", "metalanguage", "reciprocity", "consciousness", "feeling", "fact", "individuality", "comparison", "awareness", "expression", "appreciation", "correspondence", "interpretation", "interpolation", "interpenetration", "statement", "emphasis", "feeling", "empathy", "sensibility", "insight", "attitude", "consciousness", "absorbtion", "self-forgetfulness", "parallelism", "classification", "evidence", "aspect", "distinction", "idealism", "naturalism", "disposition", "apprehension", "morality", "object", "idealism", "quality", "romanticism", "realism", "idealism", "quality", "transposition", "determinism", "attitude", "terminology", "individuality", "category", "integration", "concept", "phenomenon", "element", "analogy", "perception", "principle", "aesthetic", "spirituality", "aspiration", "quality", "disposition", "subjectivism", "objectivism", "contemplation", "vivacity", "feeling", "empathy", "value", "sensation", "causation", "affectability", "impulse", "attitude", "sensibility", "material", "aspect", "problem", "implication", "hierarchy", "process", "provenance", "discord", "milieu" }; static readonly string[] s_Buzzphrases = new[] { "|1 |2 |3", "|1 |2 |3", "|2 |3", "|1 |2 |3", "|1 |2 |3", "|4", "|4" }; static readonly string[] s_CardinalSequence = new[] { "one", "two", "three", "four", "five", "six", "seven", "eight", "nine", "ten", "eleven", "twelve" }; static readonly string[] s_Cliches = new[] { "|o of |o", "|o of |o", "|o of |o", "|o of |o", "|o of |o", "|o of |o", "|o of |o", "|o of |o", "development strategy", "decision support", "fourth-generation environment", "application systems", "feedback process", "function hierarchy analysis", "structured business analysis", "base information", "final consolidation", "design criteria", "iterative design process", "common interface", "ongoing support", "relational flexibility", "referential integrity", "strategic framework", "dynamic systems strategy", "functional decomposition", "operational situation", "individual action plan", "key behavioural skills", "set of constraints", "structure plan", "contingency planning", "resource planning", "participant feedback", "referential function", "passive result" , "aims and constraints", "strategic opportunity", "development of systems resource", "major theme of the |c", "technical coherence", "cost-effective application", "high leverage area", "key leveraging technology", "known strategic opportunity", "internal resource capability", "interactive concern-control system", "key technology", "prime objective", "key area of opportunity", "present infrastructure", "enabling technology", "key objective", "areas of particular expertise", "overall business benefit", "competitive practice and technology", "flexible manufacturing system", "adequate resource level", "|e sophisticated hardware", "external agencies", "anticipated fourth-generation equipment", "maintenance of current standards", "adequate development of any necessary measures", "critical component in the", "active process of information gathering", "general milestones", "adequate timing control", "quantitative and discrete targets", "subsystem compatibility testing", "structural design, based on system engineering concepts", "key principles behind the |c", "constraints of manpower resourcing", "necessity for budgetary control", "discipline of resource planning", "diverse hardware environment", "product lead times", "access to corporate systems", "overall certification project", "commitment to industry standards", "general increase in office efficiency", "preliminary qualification limit", "calculus of consequence", "corollary", "reverse image", "logical data structure", "philosophy of commonality and standardization", "impact on overall performance", "multilingual cynicism", "functional synergy", "backbone of connectivity", "integrated set of requirements", "ongoing |3 philosophy", "strategic requirements", "integration of |c with strategic initiatives", "established analysis and design methodology", "corporate information exchange", "separate roles and significances of the |c", "formal strategic direction", "integrated set of facilities", "appreciation of vested responsibilities", "potential globalisation candidate", "tentative priority", "performance objectives", "global business practice", "functionality matrix", "priority sequence", "system elements", "life cycle phase", "operations scenario", " total system rationale", "conceptual baseline", "incremental delivery", "requirements hierarchy", "functional baseline", "system critical design", "capability constraint", "matrix of supporting elements", "lead group concept", "dominant factor", "modest correction", "element of volatility", "inevitability of amelioration", "attenuation of subsequent feedback", "chance of entropy within the system", "associated supporting element", "intrinsic homeostasis within the metasystem", "characterization of specific information", "organization structure", "constant flow of effective information", "key business objectives", "life cycle", "large portion of the co-ordination of communication", "corporate procedure", "proposed scenario" }; static readonly string[] s_FirstAdjectivePhrases = new[] { "comprehensive", "targeted", "realigned", "client focussed", "best practice", "value added", "quality driven", "basic", "principal", "central", "essential", "primary", "indicative", "continuous", "critical", "prevalent", "preeminent", "unequivocal", "sanctioned", "logical", "reproducible", "methodological", "relative", "integrated", "fundamental", "cohesive", "interactive", "comprehensive", "critical", "potential", "vibrant", "total", "additional", "secondary", "primary", "heuristic", "complex", "pivotal", "quasi-effectual", "dominant", "characteristic", "ideal", "doctrine of the", "key", "independent", "deterministic", "assumptions about the", "heuristic", "crucial", "meaningful", "implicit", "analogous", "explicit", "integrational", "non-viable", "directive", "consultative", "collaborative", "delegative", "tentative", "privileged", "common", "hypothetical", "metathetical", "marginalised", "systematised", "evolutional", "parallel", "functional", "responsive", "optical", "inductive", "objective", "synchronised", "compatible", "prominent", "three-phase", "two-phase", "balanced", "legitimate", "subordinated", "complementary", "proactive", "truly global", "interdisciplinary", "homogeneous" , "hierarchical", "technical", "alternative", "strategic", "environmental", "closely monitored", "three-tier", "inductive", "fully integrated", "fully interactive", "ad-hoc", "ongoing", "proactive", "dynamic", "flexible", "verifiable", "falsifiable" , "transitional", "mechanism-independent", "synergistic", "high-level" }; static readonly string[] s_Forenames = new[] { "Abraham", "Reginald", "Cheryl", "Michel", "Innes", "Ann", "Marjorie", "Matthew", "Mark", "Luke", "John", "Burt", "Lionel", "Humphrey", "Andrew", "Jenny", "Sheryl", "Livia", "Charlene", "Winston", "Heather", "Michael", "Sylvia", "Albert", "Anne", "Meander", "Dean", "Dirk", "Desmond", "Akiko" }; static readonly string[] s_MaybeHeading = new[] { "", "", "\nThe |uc.\n", "" }; static readonly string[] s_MaybeParagraph = new[] { "", "", "|n", "" }; static readonly string[] s_NounPhrases = new[] { "|o", "|o", "|o", "|o", "|o", "|o", "|o", "|o", "|o", "|o", "|o", "|o", "|o", "|o", "|o", "|o", "|o", "|o", "|o", "|o", "|o", "|o", "|o", "|o", "|o", "|o", "|o", "|o", "|o", "|o", "development", "program", "baseline", "reconstruction", "discordance", "monologism", "substructure", "legitimisation", "principle", "constraints", "management option", "strategy", "transposition", "auto-interruption", "derivation", "option", "flexibility", "proposal", "formulation", "item", "issue", "capability", "mobility", "programming", "concept", "time-phase", "dimension", "faculty", "capacity", "proficiency", "reciprocity", "fragmentation", "consolidation", "projection", "interface", "hardware", "contingency", "dialog", "dichotomy", "concept", "parameter", "algorithm", "milieu", "terms of reference", "item", "vibrancy", "reaction", "casuistry", "theme", "teleology", "symbolism", "resource allocation", "certification project", "functionality", "specification", "matrix", "rationalization", "consolidation", "remediation", "facilitation", "simulation" , "evaluation", "competence", "familiarisation", "transformation", "apriorism", "conventionalism", "verification", "functionality", "component", "factor", "antitheseis", "desiderata", "metaphor", "metalanguage", "globalisation", "initiative", "projection", "partnership", "priority", "service", "support", "best-practice", "change", "delivery", "funding", "resources" }; static readonly string[] s_ObjectPhrases = new[] { "the overall game-plan", "the slippery slope", "the strategic fit", "The total quality objectives", "the |c. This should be considered in the light of the |c" , "the |c. One must therefore dedicate resources to the |c immediately." , "the |c on a strictly limited basis", "this |c. This should present few practical problems", "what should be termed the |c", "the applicability and value of the |c", "the |c or the |c", "the negative aspects of any |c", "an unambiguous concept of the |c", "the thematic reconstruction of |c", "the scientific |o of the |c", "the evolution of |2 |o over a given time limit", "any commonality between the |c and the |c", "the greater |c of the |c", "the universe of |o", "any discrete or |2 configuration mode", "the |4", "an elemental change in the |c", "the work being done at the 'coal-face'", "what is beginning to be termed the '|c'", "the |c. We need to be able to rationalize |D", "the |c. We can then |e play back our understanding of |D" , "the |c. Everything should be done to expedite |D", "The |c. The advent of the |c |e |d |D", "the |c. The |c makes this |e inevitable", "the |c. The |3 is of a |2 nature", "the |c. This may be due to a lack of a |c.", "the |c. Therefore a maximum of flexibility is required", "any |c. This can be deduced from the |c", "the |c. This may |e flounder on the |c", "the |c. This may explain why the |c |e |d |D", "the |c. This trend may dissipate due to the |c" }; static readonly string[] s_OrdinalSequences = new[] { "first", "second", "third", "fourth", "fifth" }; static readonly string[] s_PreamblePhrases = new[] { "We have heard it said, tongue-in-cheek, that", "To be quite frank,", "Focussing on the agreed facts, we can say that", "To be perfectly truthful,", "In broad terms,", "To be perfectly honest,", "It was |f |s who first pointed out that", "Since |f |s's first formulation of the |c, it has become fairly obvious that" , "Since the seminal work of |f |s it has generally been accepted that" , "Without a doubt, |f |s iwas right in saying that", "As regards |h |c, We should put this one to bed. On the other hand," , "As regards |h |c, This may have a knock-on effect. On the other hand," , "We must take on board that fact that", "Without a doubt, |B |C |D. So, where to from here? Persumably," , "It has hitherto been accepted that", "At the end of the day,", "Under the provision of the overall |1 plan,", "Firming up the gaps, one can say that", "Within the bounds of |h |c,", "The |h |c provides us with a win-win situation. Especially if one considers that " , "There are swings and roundabouts in considering that", "To be precise,", "Whilst taking the subject of |h |c offline, one must add that" , "For example,", "An orthodox view is that", "To reiterate,", "To recapitulate,", "Strictly speaking,", "In a very real sense,", "Regarding the nature of |h |c,", "In a strictly mechanistic sense,", "One is struck quite forcibly by the fact that", "In any event,", "In particular,", "In assessing the |c, one should think outside the box. on the other hand," , "On the other hand,", "It is recognized that", "Focusing specifically on the relationship between |h |c and any |c," , "Although it is fair to say that |B |C |D, one should take this out of the loop" , "|bly,", "|bly,", "|bly,", "Be that as it may,", "Taking everything into consideration,", "As in so many cases, we can state that", "The |c cannot explain all the problems in maximizing the efficacy of |h |c. Generally" , "We can confidently base our case on an assumption that" , "An initial appraisal makes it evident that", "An investigation of the |1 factors suggests that", "It is common knowledge that", "Despite an element of volatility,", "The less obviously co-existential factors imply that", "To coin a phrase,", "One might venture to suggest that", "In all foreseeable circumstances,", "However,", "Similarly,", "As a resultant implication,", "There is a strong body of opinion that affirms that", "Up to a point,", "Quite frankly,", "In this regard,", "Based on integral subsystems,", "For example,", "Therefore,", "Within current constraints on manpower resources,", "Up to a certain point,", "In an ideal environment,", "It might seem reasonable to think of |h |c as involving |h |c. Nevertheless," , "It can be forcibly emphasized that", "Thus,", "Within the restrictions of |h |c,", "In respect to specific goals,", "It is important to realize that", "To put it concisely,", "To be perfectly frank,", "On any rational basis,", "In any event,", "On the basis of |h |2 |3,", "With all the relevant considerations taken into account, it can be stated that" , "Few would disagree, however, that", "It goes without saying that", "Only in the case of the |c can one state that", "if one considers the |c in the light of |h |c,", "The |c is taken to be a |c. Presumably,", "So far,", "It is quite instructive to compare |h |c and |h |c. In the latter case," , "Obviously,", "By and large,", "Possibly,", "One can, with a certain degree of confidence, conclude that" , "Without doubt,", "With due caution, one can postulate that", "The |c is clearly related to |h |c. Nevertheless,", "There is probably no causal link between the |c and |h |c. However" , "In the light of |h |c, it is clear that", "No one can deny the relevance of |h |c. Equally it is certain that" , "Albeit,", "It is precisely the influence of |h |c for |t that makes the |c inevitable, Equally," , "One must clearly state that", "In connection with |h |c,", "Normally", "one can, quite consistently, say that", "Clearly, it is becoming possible to resolve the difficulties in assuming that" , "Within normal variability,", "There can be little doubt that", "Few would deny that", "It is not often |e stated that", "In real terms,", "In this day and age,", "It is |e stated that", "The position in regard to the |c is that", "On one hand |B |C |D, but on the other hand", "One hears it stated that |B |C |D, but it is more likely that" , "Whilst it may be true that |B |C |D, one must not lose sight of the fact that" }; static readonly string[] s_Prefixes = new[] { "the", "the", "the", "the", "the", "the", "the", "the", "the", "any", "any", "what might be described as the", "what amounts to the", "a large proportion of the", "what has been termed the", "a unique facet of the", "a significant aspect of the", "the all-inclusiveness of the", "any inherent dangers of the", "the obvious necessity for the", "the basis of any", "the basis of the", "any formalization of the", "the quest for the", "any significant enhancements in the", "the underlying surrealism of the ", "the feasibility of the", "the requirements of", "an implementation strategy for", "any fundamental dichotomies of the", "a concept of what we have come to call the", "the infrastructure of the", "a proven solution to the", "a percentage of the", "a proportion of the", "an issue of the", "any consideration of the", "a factor within the", "the adequate functionality of the", "the principle of the", "the constraints of the", "a realization the importance of the" , "the criterion of", "a unique facet of", "the consolidation of the", "a preponderance of the" }; static readonly string[] s_SecondAdjectivePhrases = new[] { "fast-track", "transparent", "results-driven", "subsystem", "test", "configuration", "mission", "functional", "referential", "numinous", "paralyptic", "radical", "paratheoretical", "consistent", "macro", "interpersonal", "auxiliary", "empirical", "theoretical", "corroborated", "management", "organizational", "monitored", "consensus", "reciprocal", "unprejudiced", "digital", "logic", "transitional", "incremental", "equivalent", "universal", "sub-logical", "hypothetical", "conjectural", "conceptual", "empirical", "spatio-temporal", "third-generation", "epistemological", "diffusible", "specific", "non-referent", "overriding", "politico-strategical", "economico-social", "on-going", "extrinsic", "intrinsic", "multi-media", "integrated", "effective", "overall", "principal", "prime", "major", "empirical", "definitive", "explicit", "determinant", "precise", "cardinal", "principal", "affirming", "harmonizing", "central", "essential", "primary", "indicative", "mechanistic", "continuous", "critical", "prevalent", "preeminent", "unequivocal", "sanctioned", "logical ", "reproducible", "methodological", "relative", "integrated", "fundamental", "cohesive", "interactive", "comprehensive", "critical", "potential", "total", "additional", "secondary", "primary", "heuristic", "complex", "pivotal", "quasi-effectual", "dominant", "characteristic", "ideal", "independent", "deterministic", "heuristic", "crucial", "meaningful", "implicit", "analogous", "explicit", "integrational", "directive", "collaborative", "entative", "privileged", "common", "hypothetical", "metathetical", "marginalised", "systematised", "evolutional", "parallel", "functional", "responsive", "optical" , "inductive", "objective", "synchronised", "compatible", "prominent", "legitimate", "subordinated ", "complementary", "homogeneous", "hierarchical", "alternative", "environmental", "inductive", "transitional", "Philosophical", "latent", "conscious", "practical", "temperamental", "impersonal", "personal", "subjective", "objective", "dynamic", "inclusive", "paradoxical", "pure", "central", "psychic", "associative", "intuitive", "free-floating", "empirical", "superficial", "predominant", "actual", "mutual", "arbitrary", "inevitable", "immediate", "affirming", "functional", "referential", "numinous", "paralyptic", "radical", "paratheoretical", "consistent", "interpersonal", "auxiliary", "empirical", "theoretical", "reciprocal", "unprejudiced", "transitional", "incremental", "equivalent", "universal", "sub-logical", "hypothetical", "conjectural", "conceptual ", "empirical", "spatio-temporal", "epistemological", "diffusible", "specific", "non-referent", "overriding", "politico-strategical", "economico-social", "on-going", "extrinsic", "intrinsic", "effective", "principal", "prime", "major", "empirical", "definitive", "explicit", "determinant", "precise", "cardinal", "geometric", "naturalistic", "linear", "distinctive", "phylogenetic", "ethical", "theoretical", "economic", "aesthetic", "personal", "social", "discordant", "political", "religious", "artificial", "collective", "permanent", "metaphysical", "organic", "mensurable", "expressive", "governing", "subjective", "empathic", "imaginative", "ethical", "expressionistic", "resonant", "vibrant" }; static readonly string[] s_SubjectPhrases = new[] { "|h strategic goals", "|h gap analysis", "|h hardball", "|h purchaser - provider", "|h skill set", "|h knock-on effect", "|h strategic plan ", "|h the bottom line", "|h mindset", "|h benchmark", "|h core business", "|h big picture", "|h take home message", "|h lessons learnt", "|h movers and shakers", "|h knowledge base", "the ball-park figures for the |c", "The core drivers", "a particular factor, such as the |c, the |c, the |c or the |c" , "there is an apparent contradiction between the |c and |h |c. However, |h |c" , "the question of |h |c", "the desirability of attaining |h |c, as far as the |c is concerned," , "a persistent instability in |h |c", "examination of |2 instances", "the classic definition of |h |c", "firm assumptions about |c", "the |c and the resources needed to support it are mandatory. |A |B" , "significant progress has been made in the |c. |A |B", "efforts are already underway in the development of the |c. |A |B" , "a |2 operation of |h |c", "subdivisions of |h |c", "an anticipation of the effects of any |c", "an overall understanding of |h |c", "the assertion of the importance of the |c", "an understanding of the necessary relationship between the |c and any |c" , "the possibility, that the |c plays a decisive part in influencing |h |c, " , "any solution to the problem of |h |c", "the lack of understanding of |h |c", "the |c in its relation to |h |c", "parameters within |h |c", "the target population for |h |c", "initiation of |h |c", "both |c and |c", "|h |c", "an extrapolation of the |c", "|h |c", "the assessment of any significant weaknesses in the |c", "any subsequent interpolation", "|h |c is |e significant. On the other hand |h |c", "|h |c relates |e to any |c. Conversely, |h |c", "|h |c may be |e important. The |c", "the incorporation of the |c", "the quest for the |c", "the dangers inherent in the |c", "the value of the |c", "the |c", "an unambiguous concept of the |c", "a metonymic reconstruction of the |c", "a primary interrelationship between system and subsystem technologies" }; static readonly string[] s_Surnames = new[] { "Bennet", "Blotchet-Halls", "Carson", "Clarke", "DeFrance", "del Castillo", "Dull", "Green", "Greene", "Gringlesby", "Hunter", "Karsen", "Locksley", "MacFeather", "McBadden", "O'Leary", "Panteley", "Poel", "Powys-Lybbe", "Smith", "Straight", "Stringer", "White", "Yokomoto" }; static readonly string[] s_VerbPhrases = new[] { "|d the overall efficiency of", "|d the |4 and |C", "can fully utilize", "will move the goal posts for", "would stretch the envelope of", "enables us to tick the boxes of", "could go the extra mile for", "should empower employees to produce", "should touch base with", "probably |d", "is generally compatible with", "provides the bandwidth for", "gives a win-win situation for", "has clear ramifications for", "has been made imperative in view of", "provides the context for", "underpins the importance of", "focuses our attention on", "will require a substantial amount of effort. |A |B |C", "represents a different business risk. |A |B |C", "is of considerable importance from the production aspect. |A |B |C" , "should facilitate information exchange. |A |B |C", "has the intrinsic benefit of resilience, unlike the", "cannot be shown to be relevant. This is in contrast to", "cannot always help us. |A |B |C", "|C |D. A priority should be established based on a combination of |c and |c" , "|C |D. The objective of the |c is to delineate", "shows an interesting ambivalence with", "underlines the essential paradigm of", "can be taken in juxtaposition with", "provides an interesting insight into", "must seem oversimplistic in the light of", "seems to |e reinforce the importance of", "leads clearly to the rejection of the supremacy of", "allows us to see the clear significance of", "underlines the significance of", "reinforces the weaknesses in", "confuses the |c and", "|d the |c and", "|d", "|d", "|d", "|d", "|d", "|d", "provides a harmonic integration with", "is constantly directing the course of", "must intrinsically determine", "has fundamental repercussions for", "provides an idealized framework for", "|e alters the importance of", "|e changes the interrelationship between the|c and", "|e legitimises the significance of", "must utilize and be functionally interwoven with", "|d the probability of project success and", "|e |d the |c and", "|e |d the |c and", "|e |d the |c and", "|e |d the |c in its relationship with", "|d the dangers quite |e of", "has confirmed an expressed desire for", "is reciprocated by", "has no other function than to provide", "adds explicit performance limits to", "must be considered proactively, rather than reactively, in the light of" , "necessitates that urgent consideration be applied to", "requires considerable systems analysis and trade-off studies to arrive at" , "provides a heterogenous environment to", "cannot compare in its potential exigencies with", "is further compounded, when taking into account", "presents extremely interesting challenges to", "|d the importance of other systems and the necessity for", "provides one of the dominant factors of", "forms the basis for", "enhances the efficiency of", "develops a vision to leverage", "produces diagnostic feedback to", "capitalises on the strengths of", "effects a significant implementation of", "seems to counterpoint", "adds overriding performance constraints to", "manages to subsume", "provides a balanced perspective to", "rivals, in terms of resource implications,", "contrives through the medium of the |c to emphasize", "can be developed in parallel with", "commits resources to", "confounds the essential conformity of", "provides the bridge between the |c and", "should be provided to expedite investigation into", "poses problems and challenges for both the |c and", "should not divert attention from", "provides an insight into", "has considerable manpower implications when considered in the light of" , "may mean a wide diffusion of the |c into", "makes little difference to", "focuses our attention on", "exceeds the functionality of", "recognizes deficiencies in ", "needs to be factored into the equation alongside the", "needs to be addessed along with the" }; static readonly string[] s_Verbs = new[] { "stimulates", "spreads", "improves", "energises", "emphasizes", "subordinates", "posits", "perceives", "de-stabilizes", "Revisits", "connotes", "signifies", "indicates", "increases", "supports", "rationalises", "provokes", "de-actualises", "relocates", "yields", "implies", "designates", "reflects", "sustains", "supplements", "represents", "re-iterates", "juxtasposes", "provides", "maximizes", "identifies", "furnishes", "supplies", "affords", "yields", "formulates", "focuses on", "depicts", "embodies", "exemplifies", "expresses", "personifies", "symbolizes", "typifies", "replaces", "supplants", "denotes", "depicts", "expresses", "illustrates", "implies", "symbolizes", "delineates", "depicts", "illustrates", "portrays", "clarifies", "depicts", "interprets", "delineates", "reflects", "evinces", "expresses", "indicates", "manifests", "reveals", "shows", "delineates", "represents", "anticipates", "denotes", "identifies", "indicates", "symbolizes", "diminishes", "lessens", " represses", "suppresses", "weakens", "accentuates", "amplifies", "heightens", "highlights", "spotlights", "stresses", "underlines", "underscores", "asserts", "reiterates", "restates", "stresses", "enhances", "amends", "translates", "specifies" }; readonly Random m_Random; int m_CardinalSequence; int m_OrdinalSequence; string m_Title; public WaffleEngine( Random random ) { m_Random = random; } void EvaluateRandomPhrase( string[] phrases, StringBuilder output ) { EvaluatePhrase( phrases[m_Random.Next( 0, phrases.Length )], output ); } void EvaluatePhrase( string phrase, StringBuilder result ) { for ( int i = 0; i < phrase.Length; i++ ) { if ( phrase[i] == '|' && i + 1 < phrase.Length ) { i++; StringBuilder escape = result; bool titleCase = false; if ( phrase[i] == 'u' && i + 1 < phrase.Length ) { escape = new StringBuilder(); titleCase = true; i++; } switch ( phrase[i] ) { case 'a': EvaluateCardinalSequence( escape ); break; case 'b': EvaluateOrdinalSequence( escape ); break; case 'c': EvaluateRandomPhrase( s_Buzzphrases, escape ); break; case 'd': EvaluateRandomPhrase( s_Verbs, escape ); break; case 'e': EvaluateRandomPhrase( s_Adverbs, escape ); break; case 'f': EvaluateRandomPhrase( s_Forenames, escape ); break; case 's': EvaluateRandomPhrase( s_Surnames, escape ); break; case 'o': EvaluateRandomPhrase( s_ArtyNouns, escape ); break; case 'y': RandomDate( escape ); break; case 'h': EvaluateRandomPhrase( s_Prefixes, escape ); break; case 'A': EvaluateRandomPhrase( s_PreamblePhrases, escape ); break; case 'B': EvaluateRandomPhrase( s_SubjectPhrases, escape ); break; case 'C': EvaluateRandomPhrase( s_VerbPhrases, escape ); break; case 'D': EvaluateRandomPhrase( s_ObjectPhrases, escape ); break; case '1': EvaluateRandomPhrase( s_FirstAdjectivePhrases, escape ); break; case '2': EvaluateRandomPhrase( s_SecondAdjectivePhrases, escape ); break; case '3': EvaluateRandomPhrase( s_NounPhrases, escape ); break; case '4': EvaluateRandomPhrase( s_Cliches, escape ); break; case 't': escape.Append( m_Title ); break; case 'n': escape.Append( "\n" ); break; } if ( titleCase ) result.Append( TitleCaseWords( escape.ToString() ) ); } else result.Append( phrase[i] ); } } void EvaluateCardinalSequence( StringBuilder output ) { if ( m_CardinalSequence >= s_CardinalSequence.Length ) m_CardinalSequence = 0; output.Append( s_CardinalSequence[m_CardinalSequence++] ); } void EvaluateOrdinalSequence( StringBuilder output ) { if ( m_OrdinalSequence >= s_OrdinalSequences.Length ) m_OrdinalSequence = 0; output.Append( s_OrdinalSequences[m_OrdinalSequence++] ); } void RandomDate( StringBuilder output ) { output.AppendFormat( "{0:04u}", DateTime.Now.Year - m_Random.Next( 0, 31 ) ); } public static string TitleCaseWords( string input ) { return CultureInfo.CurrentCulture.TextInfo.ToTitleCase( input ); } public void HtmlWaffle( int paragraphs, Boolean includeHeading, StringBuilder result ) { m_Title = string.Empty; m_CardinalSequence = 0; m_OrdinalSequence = 0; if ( includeHeading ) { var title = new StringBuilder(); EvaluatePhrase( "the |o of |2 |o", title ); m_Title = TitleCaseWords( title.ToString() ); result.AppendLine( "<html>" ); result.AppendLine( "<head>" ); result.AppendFormat( "<title>{0}</title>", m_Title ); result.AppendLine(); result.AppendLine( "</head>" ); result.AppendLine( "<body>" ); result.AppendFormat( @"<h1>{0}</h1>", m_Title ); result.AppendLine(); EvaluatePhrase( "<blockquote>\"|A |B |C |t\"<br>", result ); EvaluatePhrase( "<cite>|f |s in The Journal of the |uc (|uy)</cite></blockquote>", result ); result.AppendLine(); EvaluatePhrase( "<h2>|c.</h2>", result ); result.AppendLine(); } result.Append( "<p>" ); for ( int i = 0; i < paragraphs; i++ ) { if ( i != 0 ) EvaluateRandomPhrase( s_MaybeHeading, result ); EvaluatePhrase( "|A |B |C |D. ", result ); EvaluateRandomPhrase( s_MaybeParagraph, result ); } result.AppendLine( "</p>" ); result.AppendLine( "</body>" ); result.AppendLine( "</html>" ); } public void TextWaffle( int paragraphs, Boolean includeHeading, StringBuilder result ) { m_Title = string.Empty; m_CardinalSequence = 0; m_OrdinalSequence = 0; if ( includeHeading ) { var title = new StringBuilder(); EvaluatePhrase( "the |o of |2 |o", title ); m_Title = TitleCaseWords( title.ToString() ); result.AppendLine( m_Title ); result.AppendLine(); EvaluatePhrase( "\"|A |B |C |t\"\n", result ); EvaluatePhrase( "(|f |s in The Journal of the |uc (|uy))", result ); result.AppendLine(); EvaluatePhrase( "|c.", result ); result.AppendLine(); } for ( int i = 0; i < paragraphs; i++ ) { if ( i != 0 ) EvaluateRandomPhrase( s_MaybeHeading, result ); EvaluatePhrase( "|A |B |C |D. ", result ); EvaluateRandomPhrase( s_MaybeParagraph, result ); } } public string GenerateTitle() { m_CardinalSequence = 0; m_OrdinalSequence = 0; var title = new StringBuilder(); EvaluatePhrase( "the |o of |2 |o", title ); return TitleCaseWords( title.ToString() ); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using Document = Lucene.Net.Documents.Document; using FieldSelector = Lucene.Net.Documents.FieldSelector; using Directory = Lucene.Net.Store.Directory; namespace Lucene.Net.Index { /// <summary>A <code>FilterIndexReader</code> contains another IndexReader, which it /// uses as its basic source of data, possibly transforming the data along the /// way or providing additional functionality. The class /// <code>FilterIndexReader</code> itself simply implements all abstract methods /// of <code>IndexReader</code> with versions that pass all requests to the /// contained index reader. Subclasses of <code>FilterIndexReader</code> may /// further override some of these methods and may also provide additional /// methods and fields. /// </summary> public class FilterIndexReader:IndexReader { /// <summary>Base class for filtering {@link TermDocs} implementations. </summary> public class FilterTermDocs : TermDocs { protected internal TermDocs in_Renamed; public FilterTermDocs(TermDocs in_Renamed) { this.in_Renamed = in_Renamed; } public virtual void Seek(Term term) { in_Renamed.Seek(term); } public virtual void Seek(TermEnum termEnum) { in_Renamed.Seek(termEnum); } public virtual int Doc() { return in_Renamed.Doc(); } public virtual int Freq() { return in_Renamed.Freq(); } public virtual bool Next() { return in_Renamed.Next(); } public virtual int Read(int[] docs, int[] freqs) { return in_Renamed.Read(docs, freqs); } public virtual bool SkipTo(int i) { return in_Renamed.SkipTo(i); } public virtual void Close() { in_Renamed.Close(); } } /// <summary>Base class for filtering {@link TermPositions} implementations. </summary> public class FilterTermPositions:FilterTermDocs, TermPositions { public FilterTermPositions(TermPositions in_Renamed):base(in_Renamed) { } public virtual int NextPosition() { return ((TermPositions) this.in_Renamed).NextPosition(); } public virtual int GetPayloadLength() { return ((TermPositions) this.in_Renamed).GetPayloadLength(); } public virtual byte[] GetPayload(byte[] data, int offset) { return ((TermPositions) this.in_Renamed).GetPayload(data, offset); } // TODO: Remove warning after API has been finalized public virtual bool IsPayloadAvailable() { return ((TermPositions) this.in_Renamed).IsPayloadAvailable(); } } /// <summary>Base class for filtering {@link TermEnum} implementations. </summary> public class FilterTermEnum:TermEnum { protected internal TermEnum in_Renamed; public FilterTermEnum(TermEnum in_Renamed) { this.in_Renamed = in_Renamed; } public override bool Next() { return in_Renamed.Next(); } public override Term Term() { return in_Renamed.Term(); } public override int DocFreq() { return in_Renamed.DocFreq(); } public override void Close() { in_Renamed.Close(); } } protected internal IndexReader in_Renamed; /// <summary> <p/>Construct a FilterIndexReader based on the specified base reader. /// Directory locking for delete, undeleteAll, and setNorm operations is /// left to the base reader.<p/> /// <p/>Note that base reader is closed if this FilterIndexReader is closed.<p/> /// </summary> /// <param name="in">specified base reader. /// </param> public FilterIndexReader(IndexReader in_Renamed):base() { this.in_Renamed = in_Renamed; } public override Directory Directory() { return in_Renamed.Directory(); } public override TermFreqVector[] GetTermFreqVectors(int docNumber) { EnsureOpen(); return in_Renamed.GetTermFreqVectors(docNumber); } public override TermFreqVector GetTermFreqVector(int docNumber, System.String field) { EnsureOpen(); return in_Renamed.GetTermFreqVector(docNumber, field); } public override void GetTermFreqVector(int docNumber, System.String field, TermVectorMapper mapper) { EnsureOpen(); in_Renamed.GetTermFreqVector(docNumber, field, mapper); } public override void GetTermFreqVector(int docNumber, TermVectorMapper mapper) { EnsureOpen(); in_Renamed.GetTermFreqVector(docNumber, mapper); } public override int NumDocs() { // Don't call ensureOpen() here (it could affect performance) return in_Renamed.NumDocs(); } public override int MaxDoc() { // Don't call ensureOpen() here (it could affect performance) return in_Renamed.MaxDoc(); } public override Document Document(int n, FieldSelector fieldSelector) { EnsureOpen(); return in_Renamed.Document(n, fieldSelector); } public override bool IsDeleted(int n) { // Don't call ensureOpen() here (it could affect performance) return in_Renamed.IsDeleted(n); } public override bool HasDeletions() { // Don't call ensureOpen() here (it could affect performance) return in_Renamed.HasDeletions(); } protected internal override void DoUndeleteAll() { in_Renamed.UndeleteAll(); } public override bool HasNorms(System.String field) { EnsureOpen(); return in_Renamed.HasNorms(field); } public override byte[] Norms(System.String f) { EnsureOpen(); return in_Renamed.Norms(f); } public override void Norms(System.String f, byte[] bytes, int offset) { EnsureOpen(); in_Renamed.Norms(f, bytes, offset); } protected internal override void DoSetNorm(int d, System.String f, byte b) { in_Renamed.SetNorm(d, f, b); } public override TermEnum Terms() { EnsureOpen(); return in_Renamed.Terms(); } public override TermEnum Terms(Term t) { EnsureOpen(); return in_Renamed.Terms(t); } public override int DocFreq(Term t) { EnsureOpen(); return in_Renamed.DocFreq(t); } public override TermDocs TermDocs() { EnsureOpen(); return in_Renamed.TermDocs(); } public override TermDocs TermDocs(Term term) { EnsureOpen(); return in_Renamed.TermDocs(term); } public override TermPositions TermPositions() { EnsureOpen(); return in_Renamed.TermPositions(); } protected internal override void DoDelete(int n) { in_Renamed.DeleteDocument(n); } /// <deprecated> /// </deprecated> [Obsolete] protected internal override void DoCommit() { DoCommit(null); } protected internal override void DoCommit(System.Collections.Generic.IDictionary<string, string> commitUserData) { in_Renamed.Commit(commitUserData); } protected internal override void DoClose() { in_Renamed.Close(); // NOTE: only needed in case someone had asked for // FieldCache for top-level reader (which is generally // not a good idea): Lucene.Net.Search.FieldCache_Fields.DEFAULT.Purge(this); } public override System.Collections.Generic.ICollection<string> GetFieldNames(IndexReader.FieldOption fieldNames) { EnsureOpen(); return in_Renamed.GetFieldNames(fieldNames); } public override long GetVersion() { EnsureOpen(); return in_Renamed.GetVersion(); } public override bool IsCurrent() { EnsureOpen(); return in_Renamed.IsCurrent(); } public override bool IsOptimized() { EnsureOpen(); return in_Renamed.IsOptimized(); } public override IndexReader[] GetSequentialSubReaders() { return in_Renamed.GetSequentialSubReaders(); } override public System.Object Clone() { System.Diagnostics.Debug.Fail("Port issue:", "Lets see if we need this FilterIndexReader.Clone()"); // {{Aroush-2.9}} return null; } /// <summary> /// If the subclass of FilteredIndexReader modifies the /// contents of the FieldCache, you must override this /// method to provide a different key */ ///</summary> public override object GetFieldCacheKey() { return in_Renamed.GetFieldCacheKey(); } /// <summary> /// If the subclass of FilteredIndexReader modifies the /// deleted docs, you must override this method to provide /// a different key */ /// </summary> public override object GetDeletesCacheKey() { return in_Renamed.GetDeletesCacheKey(); } } }
// // (C) Copyright 2003-2011 by Autodesk, Inc. // // Permission to use, copy, modify, and distribute this software in // object code form for any purpose and without fee is hereby granted, // provided that the above copyright notice appears in all copies and // that both that copyright notice and the limited warranty and // restricted rights notice below appear in all supporting // documentation. // // AUTODESK PROVIDES THIS PROGRAM "AS IS" AND WITH ALL FAULTS. // AUTODESK SPECIFICALLY DISCLAIMS ANY IMPLIED WARRANTY OF // MERCHANTABILITY OR FITNESS FOR A PARTICULAR USE. AUTODESK, INC. // DOES NOT WARRANT THAT THE OPERATION OF THE PROGRAM WILL BE // UNINTERRUPTED OR ERROR FREE. // // Use, duplication, or disclosure by the U.S. Government is subject to // restrictions set forth in FAR 52.227-19 (Commercial Computer // Software - Restricted Rights) and DFAR 252.227-7013(c)(1)(ii) // (Rights in Technical Data and Computer Software), as applicable. // namespace Revit.SDK.Samples.ViewPrinter.CS { partial class viewSheetSetForm { /// <summary> /// Required designer variable. /// </summary> private System.ComponentModel.IContainer components = null; /// <summary> /// Clean up any resources being used. /// </summary> /// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param> protected override void Dispose(bool disposing) { if (disposing && (components != null)) { components.Dispose(); } base.Dispose(disposing); } #region Windows Form Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { this.label1 = new System.Windows.Forms.Label(); this.viewSheetSetNameComboBox = new System.Windows.Forms.ComboBox(); this.groupBox1 = new System.Windows.Forms.GroupBox(); this.showViewsCheckBox = new System.Windows.Forms.CheckBox(); this.showSheetsCheckBox = new System.Windows.Forms.CheckBox(); this.saveButton = new System.Windows.Forms.Button(); this.saveAsButton = new System.Windows.Forms.Button(); this.revertButton = new System.Windows.Forms.Button(); this.reNameButton = new System.Windows.Forms.Button(); this.deleteButton = new System.Windows.Forms.Button(); this.checkAllButton = new System.Windows.Forms.Button(); this.checkNoneButton = new System.Windows.Forms.Button(); this.cancelButton = new System.Windows.Forms.Button(); this.okButton = new System.Windows.Forms.Button(); this.viewSheetSetListView = new System.Windows.Forms.ListView(); this.groupBox1.SuspendLayout(); this.SuspendLayout(); // // label1 // this.label1.AutoSize = true; this.label1.Location = new System.Drawing.Point(12, 21); this.label1.Name = "label1"; this.label1.Size = new System.Drawing.Size(38, 13); this.label1.TabIndex = 0; this.label1.Text = "Name:"; // // viewSheetSetNameComboBox // this.viewSheetSetNameComboBox.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList; this.viewSheetSetNameComboBox.FormattingEnabled = true; this.viewSheetSetNameComboBox.Location = new System.Drawing.Point(56, 18); this.viewSheetSetNameComboBox.Name = "viewSheetSetNameComboBox"; this.viewSheetSetNameComboBox.Size = new System.Drawing.Size(243, 21); this.viewSheetSetNameComboBox.TabIndex = 1; // // groupBox1 // this.groupBox1.Controls.Add(this.showViewsCheckBox); this.groupBox1.Controls.Add(this.showSheetsCheckBox); this.groupBox1.Location = new System.Drawing.Point(12, 293); this.groupBox1.Name = "groupBox1"; this.groupBox1.Size = new System.Drawing.Size(287, 61); this.groupBox1.TabIndex = 4; this.groupBox1.TabStop = false; this.groupBox1.Text = "Show"; // // showViewsCheckBox // this.showViewsCheckBox.AutoSize = true; this.showViewsCheckBox.Checked = true; this.showViewsCheckBox.CheckState = System.Windows.Forms.CheckState.Checked; this.showViewsCheckBox.Location = new System.Drawing.Point(118, 28); this.showViewsCheckBox.Name = "showViewsCheckBox"; this.showViewsCheckBox.Size = new System.Drawing.Size(54, 17); this.showViewsCheckBox.TabIndex = 7; this.showViewsCheckBox.Text = "&Views"; this.showViewsCheckBox.UseVisualStyleBackColor = true; this.showViewsCheckBox.CheckedChanged += new System.EventHandler(this.showViewsCheckBox_CheckedChanged); // // showSheetsCheckBox // this.showSheetsCheckBox.AutoSize = true; this.showSheetsCheckBox.Checked = true; this.showSheetsCheckBox.CheckState = System.Windows.Forms.CheckState.Checked; this.showSheetsCheckBox.Location = new System.Drawing.Point(6, 28); this.showSheetsCheckBox.Name = "showSheetsCheckBox"; this.showSheetsCheckBox.Size = new System.Drawing.Size(57, 17); this.showSheetsCheckBox.TabIndex = 7; this.showSheetsCheckBox.Text = "s&heets"; this.showSheetsCheckBox.UseVisualStyleBackColor = true; this.showSheetsCheckBox.CheckedChanged += new System.EventHandler(this.showSheetsCheckBox_CheckedChanged); // // saveButton // this.saveButton.Enabled = false; this.saveButton.Location = new System.Drawing.Point(328, 21); this.saveButton.Name = "saveButton"; this.saveButton.Size = new System.Drawing.Size(172, 23); this.saveButton.TabIndex = 5; this.saveButton.Text = "&Save"; this.saveButton.UseVisualStyleBackColor = true; this.saveButton.Click += new System.EventHandler(this.saveButton_Click); // // saveAsButton // this.saveAsButton.Location = new System.Drawing.Point(328, 50); this.saveAsButton.Name = "saveAsButton"; this.saveAsButton.Size = new System.Drawing.Size(172, 23); this.saveAsButton.TabIndex = 5; this.saveAsButton.Text = "Sa&veAs..."; this.saveAsButton.UseVisualStyleBackColor = true; this.saveAsButton.Click += new System.EventHandler(this.saveAsButton_Click); // // revertButton // this.revertButton.Enabled = false; this.revertButton.Location = new System.Drawing.Point(328, 79); this.revertButton.Name = "revertButton"; this.revertButton.Size = new System.Drawing.Size(172, 23); this.revertButton.TabIndex = 5; this.revertButton.Text = "&Revert"; this.revertButton.UseVisualStyleBackColor = true; this.revertButton.Click += new System.EventHandler(this.revertButton_Click); // // reNameButton // this.reNameButton.Location = new System.Drawing.Point(328, 108); this.reNameButton.Name = "reNameButton"; this.reNameButton.Size = new System.Drawing.Size(172, 23); this.reNameButton.TabIndex = 5; this.reNameButton.Text = "Ren&ame"; this.reNameButton.UseVisualStyleBackColor = true; this.reNameButton.Click += new System.EventHandler(this.reNameButton_Click); // // deleteButton // this.deleteButton.Location = new System.Drawing.Point(328, 137); this.deleteButton.Name = "deleteButton"; this.deleteButton.Size = new System.Drawing.Size(172, 23); this.deleteButton.TabIndex = 5; this.deleteButton.Text = "&Delete"; this.deleteButton.UseVisualStyleBackColor = true; this.deleteButton.Click += new System.EventHandler(this.deleteButton_Click); // // checkAllButton // this.checkAllButton.Location = new System.Drawing.Point(328, 186); this.checkAllButton.Name = "checkAllButton"; this.checkAllButton.Size = new System.Drawing.Size(172, 23); this.checkAllButton.TabIndex = 5; this.checkAllButton.Text = "&Check All"; this.checkAllButton.UseVisualStyleBackColor = true; this.checkAllButton.Click += new System.EventHandler(this.checkAllButton_Click); // // checkNoneButton // this.checkNoneButton.Location = new System.Drawing.Point(328, 215); this.checkNoneButton.Name = "checkNoneButton"; this.checkNoneButton.Size = new System.Drawing.Size(172, 23); this.checkNoneButton.TabIndex = 5; this.checkNoneButton.Text = "Check &None"; this.checkNoneButton.UseVisualStyleBackColor = true; this.checkNoneButton.Click += new System.EventHandler(this.checkNoneButton_Click); // // cancelButton // this.cancelButton.DialogResult = System.Windows.Forms.DialogResult.Cancel; this.cancelButton.Location = new System.Drawing.Point(425, 370); this.cancelButton.Name = "cancelButton"; this.cancelButton.Size = new System.Drawing.Size(75, 23); this.cancelButton.TabIndex = 6; this.cancelButton.Text = "Cancel"; this.cancelButton.UseVisualStyleBackColor = true; // // okButton // this.okButton.DialogResult = System.Windows.Forms.DialogResult.OK; this.okButton.Location = new System.Drawing.Point(344, 370); this.okButton.Name = "okButton"; this.okButton.Size = new System.Drawing.Size(75, 23); this.okButton.TabIndex = 6; this.okButton.Text = "OK"; this.okButton.UseVisualStyleBackColor = true; // // viewSheetSetListView // this.viewSheetSetListView.CheckBoxes = true; this.viewSheetSetListView.Location = new System.Drawing.Point(12, 45); this.viewSheetSetListView.Name = "viewSheetSetListView"; this.viewSheetSetListView.Size = new System.Drawing.Size(287, 242); this.viewSheetSetListView.Sorting = System.Windows.Forms.SortOrder.Descending; this.viewSheetSetListView.TabIndex = 7; this.viewSheetSetListView.UseCompatibleStateImageBehavior = false; this.viewSheetSetListView.View = System.Windows.Forms.View.List; // // viewSheetSetForm // this.AcceptButton = this.okButton; this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F); this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font; this.CancelButton = this.cancelButton; this.ClientSize = new System.Drawing.Size(512, 405); this.Controls.Add(this.viewSheetSetListView); this.Controls.Add(this.okButton); this.Controls.Add(this.cancelButton); this.Controls.Add(this.checkNoneButton); this.Controls.Add(this.checkAllButton); this.Controls.Add(this.deleteButton); this.Controls.Add(this.reNameButton); this.Controls.Add(this.revertButton); this.Controls.Add(this.saveAsButton); this.Controls.Add(this.saveButton); this.Controls.Add(this.groupBox1); this.Controls.Add(this.viewSheetSetNameComboBox); this.Controls.Add(this.label1); this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedDialog; this.MaximizeBox = false; this.MinimizeBox = false; this.Name = "viewSheetSetForm"; this.ShowInTaskbar = false; this.StartPosition = System.Windows.Forms.FormStartPosition.CenterParent; this.Text = "View/Sheet Set"; this.Load += new System.EventHandler(this.ViewSheetSetForm_Load); this.groupBox1.ResumeLayout(false); this.groupBox1.PerformLayout(); this.ResumeLayout(false); this.PerformLayout(); } #endregion private System.Windows.Forms.Label label1; private System.Windows.Forms.ComboBox viewSheetSetNameComboBox; private System.Windows.Forms.GroupBox groupBox1; private System.Windows.Forms.Button saveButton; private System.Windows.Forms.Button saveAsButton; private System.Windows.Forms.Button revertButton; private System.Windows.Forms.Button reNameButton; private System.Windows.Forms.Button deleteButton; private System.Windows.Forms.Button checkAllButton; private System.Windows.Forms.Button checkNoneButton; private System.Windows.Forms.CheckBox showViewsCheckBox; private System.Windows.Forms.CheckBox showSheetsCheckBox; private System.Windows.Forms.Button cancelButton; private System.Windows.Forms.Button okButton; private System.Windows.Forms.ListView viewSheetSetListView; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ using System; using System.Linq; using System.Diagnostics; using System.Collections.Generic; using System.Threading; using System.Security.Cryptography.X509Certificates; using Thrift.Collections; using Thrift.Protocol; using Thrift.Transport; using Thrift.Test; using System.Security.Authentication; namespace Test { public class TestClient { private class TestParams { public int numIterations = 1; public string host = "localhost"; public int port = 9090; public string url; public string pipe; public bool buffered; public bool framed; public string protocol; public bool encrypted = false; protected bool _isFirstTransport = true; public TTransport CreateTransport() { if (url == null) { // endpoint transport TTransport trans = null; if (pipe != null) trans = new TNamedPipeClientTransport(pipe); else { if (encrypted) { string certPath = "../../../../test/keys/client.p12"; X509Certificate cert = new X509Certificate2(certPath, "thrift"); trans = new TTLSSocket(host, port, 0, cert, (o, c, chain, errors) => true, null, SslProtocols.Tls); } else { trans = new TSocket(host, port); } } // layered transport if (buffered) trans = new TBufferedTransport(trans); if (framed) trans = new TFramedTransport(trans); if (_isFirstTransport) { //ensure proper open/close of transport trans.Open(); trans.Close(); _isFirstTransport = false; } return trans; } else { return new THttpClient(new Uri(url)); } } public TProtocol CreateProtocol(TTransport transport) { if (protocol == "compact") return new TCompactProtocol(transport); else if (protocol == "json") return new TJSONProtocol(transport); else return new TBinaryProtocol(transport); } }; private const int ErrorBaseTypes = 1; private const int ErrorStructs = 2; private const int ErrorContainers = 4; private const int ErrorExceptions = 8; private const int ErrorUnknown = 64; private class ClientTest { private readonly TTransport transport; private readonly ThriftTest.Client client; private readonly int numIterations; private bool done; public int ReturnCode { get; set; } public ClientTest(TestParams param) { transport = param.CreateTransport(); client = new ThriftTest.Client(param.CreateProtocol(transport)); numIterations = param.numIterations; } public void Execute() { if (done) { Console.WriteLine("Execute called more than once"); throw new InvalidOperationException(); } for (int i = 0; i < numIterations; i++) { try { if (!transport.IsOpen) transport.Open(); } catch (TTransportException ex) { Console.WriteLine("*** FAILED ***"); Console.WriteLine("Connect failed: " + ex.Message); ReturnCode |= ErrorUnknown; Console.WriteLine(ex.Message + " ST: " + ex.StackTrace); continue; } try { ReturnCode |= ExecuteClientTest(client); } catch (Exception ex) { Console.WriteLine("*** FAILED ***"); Console.WriteLine(ex.Message + " ST: " + ex.StackTrace); ReturnCode |= ErrorUnknown; } } try { transport.Close(); } catch(Exception ex) { Console.WriteLine("Error while closing transport"); Console.WriteLine(ex.Message + " ST: " + ex.StackTrace); } done = true; } } public static int Execute(string[] args) { try { TestParams param = new TestParams(); int numThreads = 1; try { for (int i = 0; i < args.Length; i++) { if (args[i] == "-u") { param.url = args[++i]; } else if (args[i] == "-n") { param.numIterations = Convert.ToInt32(args[++i]); } else if (args[i] == "-pipe") // -pipe <name> { param.pipe = args[++i]; Console.WriteLine("Using named pipes transport"); } else if (args[i].Contains("--host=")) { param.host = args[i].Substring(args[i].IndexOf("=") + 1); } else if (args[i].Contains("--port=")) { param.port = int.Parse(args[i].Substring(args[i].IndexOf("=")+1)); } else if (args[i] == "-b" || args[i] == "--buffered" || args[i] == "--transport=buffered") { param.buffered = true; Console.WriteLine("Using buffered sockets"); } else if (args[i] == "-f" || args[i] == "--framed" || args[i] == "--transport=framed") { param.framed = true; Console.WriteLine("Using framed transport"); } else if (args[i] == "-t") { numThreads = Convert.ToInt32(args[++i]); } else if (args[i] == "--compact" || args[i] == "--protocol=compact") { param.protocol = "compact"; Console.WriteLine("Using compact protocol"); } else if (args[i] == "--json" || args[i] == "--protocol=json") { param.protocol = "json"; Console.WriteLine("Using JSON protocol"); } else if (args[i] == "--ssl") { param.encrypted = true; Console.WriteLine("Using encrypted transport"); } } } catch (Exception ex) { Console.WriteLine("*** FAILED ***"); Console.WriteLine("Error while parsing arguments"); Console.WriteLine(ex.Message + " ST: " + ex.StackTrace); return ErrorUnknown; } var tests = Enumerable.Range(0, numThreads).Select(_ => new ClientTest(param)).ToArray(); //issue tests on separate threads simultaneously var threads = tests.Select(test => new Thread(test.Execute)).ToArray(); DateTime start = DateTime.Now; foreach (var t in threads) t.Start(); foreach (var t in threads) t.Join(); Console.WriteLine("Total time: " + (DateTime.Now - start)); Console.WriteLine(); return tests.Select(t => t.ReturnCode).Aggregate((r1, r2) => r1 | r2); } catch (Exception outerEx) { Console.WriteLine("*** FAILED ***"); Console.WriteLine("Unexpected error"); Console.WriteLine(outerEx.Message + " ST: " + outerEx.StackTrace); return ErrorUnknown; } } public static string BytesToHex(byte[] data) { return BitConverter.ToString(data).Replace("-", string.Empty); } public static byte[] PrepareTestData(bool randomDist) { byte[] retval = new byte[0x100]; int initLen = Math.Min(0x100,retval.Length); // linear distribution, unless random is requested if (!randomDist) { for (var i = 0; i < initLen; ++i) { retval[i] = (byte)i; } return retval; } // random distribution for (var i = 0; i < initLen; ++i) { retval[i] = (byte)0; } var rnd = new Random(); for (var i = 1; i < initLen; ++i) { while( true) { int nextPos = rnd.Next() % initLen; if (retval[nextPos] == 0) { retval[nextPos] = (byte)i; break; } } } return retval; } public static int ExecuteClientTest(ThriftTest.Client client) { int returnCode = 0; Console.Write("testVoid()"); client.testVoid(); Console.WriteLine(" = void"); Console.Write("testString(\"Test\")"); string s = client.testString("Test"); Console.WriteLine(" = \"" + s + "\""); if ("Test" != s) { Console.WriteLine("*** FAILED ***"); returnCode |= ErrorBaseTypes; } Console.Write("testBool(true)"); bool t = client.testBool((bool)true); Console.WriteLine(" = " + t); if (!t) { Console.WriteLine("*** FAILED ***"); returnCode |= ErrorBaseTypes; } Console.Write("testBool(false)"); bool f = client.testBool((bool)false); Console.WriteLine(" = " + f); if (f) { Console.WriteLine("*** FAILED ***"); returnCode |= ErrorBaseTypes; } Console.Write("testByte(1)"); sbyte i8 = client.testByte((sbyte)1); Console.WriteLine(" = " + i8); if (1 != i8) { Console.WriteLine("*** FAILED ***"); returnCode |= ErrorBaseTypes; } Console.Write("testI32(-1)"); int i32 = client.testI32(-1); Console.WriteLine(" = " + i32); if (-1 != i32) { Console.WriteLine("*** FAILED ***"); returnCode |= ErrorBaseTypes; } Console.Write("testI64(-34359738368)"); long i64 = client.testI64(-34359738368); Console.WriteLine(" = " + i64); if (-34359738368 != i64) { Console.WriteLine("*** FAILED ***"); returnCode |= ErrorBaseTypes; } // TODO: Validate received message Console.Write("testDouble(5.325098235)"); double dub = client.testDouble(5.325098235); Console.WriteLine(" = " + dub); if (5.325098235 != dub) { Console.WriteLine("*** FAILED ***"); returnCode |= ErrorBaseTypes; } Console.Write("testDouble(-0.000341012439638598279)"); dub = client.testDouble(-0.000341012439638598279); Console.WriteLine(" = " + dub); if (-0.000341012439638598279 != dub) { Console.WriteLine("*** FAILED ***"); returnCode |= ErrorBaseTypes; } byte[] binOut = PrepareTestData(true); Console.Write("testBinary(" + BytesToHex(binOut) + ")"); try { byte[] binIn = client.testBinary(binOut); Console.WriteLine(" = " + BytesToHex(binIn)); if (binIn.Length != binOut.Length) { Console.WriteLine("*** FAILED ***"); returnCode |= ErrorBaseTypes; } for (int ofs = 0; ofs < Math.Min(binIn.Length, binOut.Length); ++ofs) if (binIn[ofs] != binOut[ofs]) { Console.WriteLine("*** FAILED ***"); returnCode |= ErrorBaseTypes; } } catch (Thrift.TApplicationException ex) { Console.WriteLine("*** FAILED ***"); returnCode |= ErrorBaseTypes; Console.WriteLine(ex.Message + " ST: " + ex.StackTrace); } // binary equals? only with hashcode option enabled ... Console.WriteLine("Test CrazyNesting"); if( typeof(CrazyNesting).GetMethod("Equals").DeclaringType == typeof(CrazyNesting)) { CrazyNesting one = new CrazyNesting(); CrazyNesting two = new CrazyNesting(); one.String_field = "crazy"; two.String_field = "crazy"; one.Binary_field = new byte[10] { 0x00, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0xFF }; two.Binary_field = new byte[10] { 0x00, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0xFF }; if (!one.Equals(two)) { Console.WriteLine("*** FAILED ***"); returnCode |= ErrorContainers; throw new Exception("CrazyNesting.Equals failed"); } } // TODO: Validate received message Console.Write("testStruct({\"Zero\", 1, -3, -5})"); Xtruct o = new Xtruct(); o.String_thing = "Zero"; o.Byte_thing = (sbyte)1; o.I32_thing = -3; o.I64_thing = -5; Xtruct i = client.testStruct(o); Console.WriteLine(" = {\"" + i.String_thing + "\", " + i.Byte_thing + ", " + i.I32_thing + ", " + i.I64_thing + "}"); // TODO: Validate received message Console.Write("testNest({1, {\"Zero\", 1, -3, -5}, 5})"); Xtruct2 o2 = new Xtruct2(); o2.Byte_thing = (sbyte)1; o2.Struct_thing = o; o2.I32_thing = 5; Xtruct2 i2 = client.testNest(o2); i = i2.Struct_thing; Console.WriteLine(" = {" + i2.Byte_thing + ", {\"" + i.String_thing + "\", " + i.Byte_thing + ", " + i.I32_thing + ", " + i.I64_thing + "}, " + i2.I32_thing + "}"); Dictionary<int, int> mapout = new Dictionary<int, int>(); for (int j = 0; j < 5; j++) { mapout[j] = j - 10; } Console.Write("testMap({"); bool first = true; foreach (int key in mapout.Keys) { if (first) { first = false; } else { Console.Write(", "); } Console.Write(key + " => " + mapout[key]); } Console.Write("})"); Dictionary<int, int> mapin = client.testMap(mapout); Console.Write(" = {"); first = true; foreach (int key in mapin.Keys) { if (first) { first = false; } else { Console.Write(", "); } Console.Write(key + " => " + mapin[key]); } Console.WriteLine("}"); // TODO: Validate received message List<int> listout = new List<int>(); for (int j = -2; j < 3; j++) { listout.Add(j); } Console.Write("testList({"); first = true; foreach (int j in listout) { if (first) { first = false; } else { Console.Write(", "); } Console.Write(j); } Console.Write("})"); List<int> listin = client.testList(listout); Console.Write(" = {"); first = true; foreach (int j in listin) { if (first) { first = false; } else { Console.Write(", "); } Console.Write(j); } Console.WriteLine("}"); //set // TODO: Validate received message THashSet<int> setout = new THashSet<int>(); for (int j = -2; j < 3; j++) { setout.Add(j); } Console.Write("testSet({"); first = true; foreach (int j in setout) { if (first) { first = false; } else { Console.Write(", "); } Console.Write(j); } Console.Write("})"); THashSet<int> setin = client.testSet(setout); Console.Write(" = {"); first = true; foreach (int j in setin) { if (first) { first = false; } else { Console.Write(", "); } Console.Write(j); } Console.WriteLine("}"); Console.Write("testEnum(ONE)"); Numberz ret = client.testEnum(Numberz.ONE); Console.WriteLine(" = " + ret); if (Numberz.ONE != ret) { Console.WriteLine("*** FAILED ***"); returnCode |= ErrorStructs; } Console.Write("testEnum(TWO)"); ret = client.testEnum(Numberz.TWO); Console.WriteLine(" = " + ret); if (Numberz.TWO != ret) { Console.WriteLine("*** FAILED ***"); returnCode |= ErrorStructs; } Console.Write("testEnum(THREE)"); ret = client.testEnum(Numberz.THREE); Console.WriteLine(" = " + ret); if (Numberz.THREE != ret) { Console.WriteLine("*** FAILED ***"); returnCode |= ErrorStructs; } Console.Write("testEnum(FIVE)"); ret = client.testEnum(Numberz.FIVE); Console.WriteLine(" = " + ret); if (Numberz.FIVE != ret) { Console.WriteLine("*** FAILED ***"); returnCode |= ErrorStructs; } Console.Write("testEnum(EIGHT)"); ret = client.testEnum(Numberz.EIGHT); Console.WriteLine(" = " + ret); if (Numberz.EIGHT != ret) { Console.WriteLine("*** FAILED ***"); returnCode |= ErrorStructs; } Console.Write("testTypedef(309858235082523)"); long uid = client.testTypedef(309858235082523L); Console.WriteLine(" = " + uid); if (309858235082523L != uid) { Console.WriteLine("*** FAILED ***"); returnCode |= ErrorStructs; } // TODO: Validate received message Console.Write("testMapMap(1)"); Dictionary<int, Dictionary<int, int>> mm = client.testMapMap(1); Console.Write(" = {"); foreach (int key in mm.Keys) { Console.Write(key + " => {"); Dictionary<int, int> m2 = mm[key]; foreach (int k2 in m2.Keys) { Console.Write(k2 + " => " + m2[k2] + ", "); } Console.Write("}, "); } Console.WriteLine("}"); // TODO: Validate received message Insanity insane = new Insanity(); insane.UserMap = new Dictionary<Numberz, long>(); insane.UserMap[Numberz.FIVE] = 5000L; Xtruct truck = new Xtruct(); truck.String_thing = "Truck"; truck.Byte_thing = (sbyte)8; truck.I32_thing = 8; truck.I64_thing = 8; insane.Xtructs = new List<Xtruct>(); insane.Xtructs.Add(truck); Console.Write("testInsanity()"); Dictionary<long, Dictionary<Numberz, Insanity>> whoa = client.testInsanity(insane); Console.Write(" = {"); foreach (long key in whoa.Keys) { Dictionary<Numberz, Insanity> val = whoa[key]; Console.Write(key + " => {"); foreach (Numberz k2 in val.Keys) { Insanity v2 = val[k2]; Console.Write(k2 + " => {"); Dictionary<Numberz, long> userMap = v2.UserMap; Console.Write("{"); if (userMap != null) { foreach (Numberz k3 in userMap.Keys) { Console.Write(k3 + " => " + userMap[k3] + ", "); } } else { Console.Write("null"); } Console.Write("}, "); List<Xtruct> xtructs = v2.Xtructs; Console.Write("{"); if (xtructs != null) { foreach (Xtruct x in xtructs) { Console.Write("{\"" + x.String_thing + "\", " + x.Byte_thing + ", " + x.I32_thing + ", " + x.I32_thing + "}, "); } } else { Console.Write("null"); } Console.Write("}"); Console.Write("}, "); } Console.Write("}, "); } Console.WriteLine("}"); sbyte arg0 = 1; int arg1 = 2; long arg2 = long.MaxValue; Dictionary<short, string> multiDict = new Dictionary<short, string>(); multiDict[1] = "one"; Numberz arg4 = Numberz.FIVE; long arg5 = 5000000; Console.Write("Test Multi(" + arg0 + "," + arg1 + "," + arg2 + "," + multiDict + "," + arg4 + "," + arg5 + ")"); Xtruct multiResponse = client.testMulti(arg0, arg1, arg2, multiDict, arg4, arg5); Console.Write(" = Xtruct(byte_thing:" + multiResponse.Byte_thing + ",String_thing:" + multiResponse.String_thing + ",i32_thing:" + multiResponse.I32_thing + ",i64_thing:" + multiResponse.I64_thing + ")\n"); try { Console.WriteLine("testException(\"Xception\")"); client.testException("Xception"); Console.WriteLine("*** FAILED ***"); returnCode |= ErrorExceptions; } catch (Xception ex) { if (ex.ErrorCode != 1001 || ex.Message != "Xception") { Console.WriteLine("*** FAILED ***"); returnCode |= ErrorExceptions; } } catch (Exception ex) { Console.WriteLine("*** FAILED ***"); returnCode |= ErrorExceptions; Console.WriteLine(ex.Message + " ST: " + ex.StackTrace); } try { Console.WriteLine("testException(\"TException\")"); client.testException("TException"); Console.WriteLine("*** FAILED ***"); returnCode |= ErrorExceptions; } catch (Thrift.TException) { // OK } catch (Exception ex) { Console.WriteLine("*** FAILED ***"); returnCode |= ErrorExceptions; Console.WriteLine(ex.Message + " ST: " + ex.StackTrace); } try { Console.WriteLine("testException(\"ok\")"); client.testException("ok"); // OK } catch (Exception ex) { Console.WriteLine("*** FAILED ***"); returnCode |= ErrorExceptions; Console.WriteLine(ex.Message + " ST: " + ex.StackTrace); } try { Console.WriteLine("testMultiException(\"Xception\", ...)"); client.testMultiException("Xception", "ignore"); Console.WriteLine("*** FAILED ***"); returnCode |= ErrorExceptions; } catch (Xception ex) { if (ex.ErrorCode != 1001 || ex.Message != "This is an Xception") { Console.WriteLine("*** FAILED ***"); returnCode |= ErrorExceptions; } } catch (Exception ex) { Console.WriteLine("*** FAILED ***"); returnCode |= ErrorExceptions; Console.WriteLine(ex.Message + " ST: " + ex.StackTrace); } try { Console.WriteLine("testMultiException(\"Xception2\", ...)"); client.testMultiException("Xception2", "ignore"); Console.WriteLine("*** FAILED ***"); returnCode |= ErrorExceptions; } catch (Xception2 ex) { if (ex.ErrorCode != 2002 || ex.Struct_thing.String_thing != "This is an Xception2") { Console.WriteLine("*** FAILED ***"); returnCode |= ErrorExceptions; } } catch (Exception ex) { Console.WriteLine("*** FAILED ***"); returnCode |= ErrorExceptions; Console.WriteLine(ex.Message + " ST: " + ex.StackTrace); } try { Console.WriteLine("testMultiException(\"success\", \"OK\")"); if ("OK" != client.testMultiException("success", "OK").String_thing) { Console.WriteLine("*** FAILED ***"); returnCode |= ErrorExceptions; } } catch (Exception ex) { Console.WriteLine("*** FAILED ***"); returnCode |= ErrorExceptions; Console.WriteLine(ex.Message + " ST: " + ex.StackTrace); } Stopwatch sw = new Stopwatch(); sw.Start(); Console.WriteLine("Test Oneway(1)"); client.testOneway(1); sw.Stop(); if (sw.ElapsedMilliseconds > 1000) { Console.WriteLine("*** FAILED ***"); returnCode |= ErrorBaseTypes; } Console.Write("Test Calltime()"); var startt = DateTime.UtcNow; for ( int k=0; k<1000; ++k ) client.testVoid(); Console.WriteLine(" = " + (DateTime.UtcNow - startt).TotalSeconds.ToString() + " ms a testVoid() call" ); return returnCode; } } }
using System; using System.Globalization; using System.IO; using System.Runtime.CompilerServices; using System.Text; namespace YAF.Lucene.Net.Search { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using AtomicReader = YAF.Lucene.Net.Index.AtomicReader; using BinaryDocValues = YAF.Lucene.Net.Index.BinaryDocValues; using IBits = YAF.Lucene.Net.Util.IBits; using BytesRef = YAF.Lucene.Net.Util.BytesRef; using NumericUtils = YAF.Lucene.Net.Util.NumericUtils; using RamUsageEstimator = YAF.Lucene.Net.Util.RamUsageEstimator; using SortedDocValues = YAF.Lucene.Net.Index.SortedDocValues; using SortedSetDocValues = YAF.Lucene.Net.Index.SortedSetDocValues; using Terms = YAF.Lucene.Net.Index.Terms; using TermsEnum = YAF.Lucene.Net.Index.TermsEnum; /// <summary> /// Expert: Maintains caches of term values. /// /// <para/>Created: May 19, 2004 11:13:14 AM /// <para/> /// @lucene.internal /// <para/> /// @since lucene 1.4 </summary> /// <seealso cref="Lucene.Net.Util.FieldCacheSanityChecker"/> public interface IFieldCache { /// <summary> /// Checks the internal cache for an appropriate entry, and if none is found, /// reads the terms in <paramref name="field"/> and returns a bit set at the size of /// <c>reader.MaxDoc</c>, with turned on bits for each docid that /// does have a value for this field. /// </summary> IBits GetDocsWithField(AtomicReader reader, string field); /// <summary> /// Checks the internal cache for an appropriate entry, and if none is /// found, reads the terms in <paramref name="field"/> as a single <see cref="byte"/> and returns an array /// of size <c>reader.MaxDoc</c> of the value each document /// has in the given field. </summary> /// <param name="reader"> Used to get field values. </param> /// <param name="field"> Which field contains the single <see cref="byte"/> values. </param> /// <param name="setDocsWithField"> If true then <see cref="GetDocsWithField(AtomicReader, string)"/> will /// also be computed and stored in the <see cref="IFieldCache"/>. </param> /// <returns> The values in the given field for each document. </returns> /// <exception cref="IOException"> If any error occurs. </exception> [Obsolete("(4.4) Index as a numeric field using Int32Field and then use GetInt32s(AtomicReader, string, bool) instead.")] FieldCache.Bytes GetBytes(AtomicReader reader, string field, bool setDocsWithField); /// <summary> /// Checks the internal cache for an appropriate entry, and if none is found, /// reads the terms in <paramref name="field"/> as bytes and returns an array of /// size <c>reader.MaxDoc</c> of the value each document has in the /// given field. </summary> /// <param name="reader"> Used to get field values. </param> /// <param name="field"> Which field contains the <see cref="byte"/>s. </param> /// <param name="parser"> Computes <see cref="byte"/> for string values. </param> /// <param name="setDocsWithField"> If true then <see cref="GetDocsWithField(AtomicReader, string)"/> will /// also be computed and stored in the <see cref="IFieldCache"/>. </param> /// <returns> The values in the given field for each document. </returns> /// <exception cref="IOException"> If any error occurs. </exception> [Obsolete("(4.4) Index as a numeric field using Int32Field and then use GetInt32s(AtomicReader, string, bool) instead.")] FieldCache.Bytes GetBytes(AtomicReader reader, string field, FieldCache.IByteParser parser, bool setDocsWithField); /// <summary> /// Checks the internal cache for an appropriate entry, and if none is /// found, reads the terms in <paramref name="field"/> as <see cref="short"/>s and returns an array /// of size <c>reader.MaxDoc</c> of the value each document /// has in the given field. /// <para/> /// NOTE: this was getShorts() in Lucene /// </summary> /// <param name="reader"> Used to get field values. </param> /// <param name="field"> Which field contains the <see cref="short"/>s. </param> /// <param name="setDocsWithField"> If true then <see cref="GetDocsWithField(AtomicReader, string)"/> will /// also be computed and stored in the <see cref="IFieldCache"/>. </param> /// <returns> The values in the given field for each document. </returns> /// <exception cref="IOException"> If any error occurs. </exception> [Obsolete("(4.4) Index as a numeric field using Int32Field and then use GetInt32s(AtomicReader, string, bool) instead.")] FieldCache.Int16s GetInt16s(AtomicReader reader, string field, bool setDocsWithField); /// <summary> /// Checks the internal cache for an appropriate entry, and if none is found, /// reads the terms in <paramref name="field"/> as shorts and returns an array of /// size <c>reader.MaxDoc</c> of the value each document has in the /// given field. /// <para/> /// NOTE: this was getShorts() in Lucene /// </summary> /// <param name="reader"> Used to get field values. </param> /// <param name="field"> Which field contains the <see cref="short"/>s. </param> /// <param name="parser"> Computes <see cref="short"/> for string values. </param> /// <param name="setDocsWithField"> If true then <see cref="GetDocsWithField(AtomicReader, string)"/> will /// also be computed and stored in the <see cref="IFieldCache"/>. </param> /// <returns> The values in the given field for each document. </returns> /// <exception cref="IOException"> If any error occurs. </exception> [Obsolete("(4.4) Index as a numeric field using Int32Field and then use GetInt32s(AtomicReader, string, bool) instead.")] FieldCache.Int16s GetInt16s(AtomicReader reader, string field, FieldCache.IInt16Parser parser, bool setDocsWithField); /// <summary> /// Returns an <see cref="FieldCache.Int32s"/> over the values found in documents in the given /// field. /// <para/> /// NOTE: this was getInts() in Lucene /// </summary> /// <seealso cref="GetInt32s(AtomicReader, string, FieldCache.IInt32Parser, bool)"/> FieldCache.Int32s GetInt32s(AtomicReader reader, string field, bool setDocsWithField); /// <summary> /// Returns an <see cref="FieldCache.Int32s"/> over the values found in documents in the given /// field. If the field was indexed as <see cref="Documents.NumericDocValuesField"/>, it simply /// uses <see cref="AtomicReader.GetNumericDocValues(string)"/> to read the values. /// Otherwise, it checks the internal cache for an appropriate entry, and if /// none is found, reads the terms in <paramref name="field"/> as <see cref="int"/>s and returns /// an array of size <c>reader.MaxDoc</c> of the value each document /// has in the given field. /// <para/> /// NOTE: this was getInts() in Lucene /// </summary> /// <param name="reader"> /// Used to get field values. </param> /// <param name="field"> /// Which field contains the <see cref="int"/>s. </param> /// <param name="parser"> /// Computes <see cref="int"/> for string values. May be <c>null</c> if the /// requested field was indexed as <see cref="Documents.NumericDocValuesField"/> or /// <see cref="Documents.Int32Field"/>. </param> /// <param name="setDocsWithField"> /// If true then <see cref="GetDocsWithField(AtomicReader, string)"/> will also be computed and /// stored in the <see cref="IFieldCache"/>. </param> /// <returns> The values in the given field for each document. </returns> /// <exception cref="IOException"> /// If any error occurs. </exception> FieldCache.Int32s GetInt32s(AtomicReader reader, string field, FieldCache.IInt32Parser parser, bool setDocsWithField); /// <summary> /// Returns a <see cref="FieldCache.Singles"/> over the values found in documents in the given /// field. /// <para/> /// NOTE: this was getFloats() in Lucene /// </summary> /// <seealso cref="GetSingles(AtomicReader, string, FieldCache.ISingleParser, bool)"/> FieldCache.Singles GetSingles(AtomicReader reader, string field, bool setDocsWithField); /// <summary> /// Returns a <see cref="FieldCache.Singles"/> over the values found in documents in the given /// field. If the field was indexed as <see cref="Documents.NumericDocValuesField"/>, it simply /// uses <see cref="AtomicReader.GetNumericDocValues(string)"/> to read the values. /// Otherwise, it checks the internal cache for an appropriate entry, and if /// none is found, reads the terms in <paramref name="field"/> as <see cref="float"/>s and returns /// an array of size <c>reader.MaxDoc</c> of the value each document /// has in the given field. /// <para/> /// NOTE: this was getFloats() in Lucene /// </summary> /// <param name="reader"> /// Used to get field values. </param> /// <param name="field"> /// Which field contains the <see cref="float"/>s. </param> /// <param name="parser"> /// Computes <see cref="float"/> for string values. May be <c>null</c> if the /// requested field was indexed as <see cref="Documents.NumericDocValuesField"/> or /// <see cref="Documents.SingleField"/>. </param> /// <param name="setDocsWithField"> /// If true then <see cref="GetDocsWithField(AtomicReader, string)"/> will also be computed and /// stored in the <see cref="IFieldCache"/>. </param> /// <returns> The values in the given field for each document. </returns> /// <exception cref="IOException"> /// If any error occurs. </exception> FieldCache.Singles GetSingles(AtomicReader reader, string field, FieldCache.ISingleParser parser, bool setDocsWithField); /// <summary> /// Returns a <see cref="FieldCache.Int64s"/> over the values found in documents in the given /// field. /// <para/> /// NOTE: this was getLongs() in Lucene /// </summary> /// <seealso cref="GetInt64s(AtomicReader, string, FieldCache.IInt64Parser, bool)"/> FieldCache.Int64s GetInt64s(AtomicReader reader, string field, bool setDocsWithField); /// <summary> /// Returns a <see cref="FieldCache.Int64s"/> over the values found in documents in the given /// field. If the field was indexed as <see cref="Documents.NumericDocValuesField"/>, it simply /// uses <see cref="AtomicReader.GetNumericDocValues(string)"/> to read the values. /// Otherwise, it checks the internal cache for an appropriate entry, and if /// none is found, reads the terms in <paramref name="field"/> as <see cref="long"/>s and returns /// an array of size <c>reader.MaxDoc</c> of the value each document /// has in the given field. /// <para/> /// NOTE: this was getLongs() in Lucene /// </summary> /// <param name="reader"> /// Used to get field values. </param> /// <param name="field"> /// Which field contains the <see cref="long"/>s. </param> /// <param name="parser"> /// Computes <see cref="long"/> for string values. May be <c>null</c> if the /// requested field was indexed as <see cref="Documents.NumericDocValuesField"/> or /// <see cref="Documents.Int64Field"/>. </param> /// <param name="setDocsWithField"> /// If true then <see cref="GetDocsWithField(AtomicReader, string)"/> will also be computed and /// stored in the <see cref="IFieldCache"/>. </param> /// <returns> The values in the given field for each document. </returns> /// <exception cref="IOException"> /// If any error occurs. </exception> FieldCache.Int64s GetInt64s(AtomicReader reader, string field, FieldCache.IInt64Parser parser, bool setDocsWithField); /// <summary> /// Returns a <see cref="FieldCache.Doubles"/> over the values found in documents in the given /// field. /// </summary> /// <seealso cref="GetDoubles(AtomicReader, string, FieldCache.IDoubleParser, bool)"/> FieldCache.Doubles GetDoubles(AtomicReader reader, string field, bool setDocsWithField); /// <summary> /// Returns a <see cref="FieldCache.Doubles"/> over the values found in documents in the given /// field. If the field was indexed as <see cref="Documents.NumericDocValuesField"/>, it simply /// uses <see cref="AtomicReader.GetNumericDocValues(string)"/> to read the values. /// Otherwise, it checks the internal cache for an appropriate entry, and if /// none is found, reads the terms in <paramref name="field"/> as <see cref="double"/>s and returns /// an array of size <c>reader.MaxDoc</c> of the value each document /// has in the given field. /// </summary> /// <param name="reader"> /// Used to get field values. </param> /// <param name="field"> /// Which field contains the <see cref="double"/>s. </param> /// <param name="parser"> /// Computes <see cref="double"/> for string values. May be <c>null</c> if the /// requested field was indexed as <see cref="Documents.NumericDocValuesField"/> or /// <see cref="Documents.DoubleField"/>. </param> /// <param name="setDocsWithField"> /// If true then <see cref="GetDocsWithField(AtomicReader, string)"/> will also be computed and /// stored in the <see cref="IFieldCache"/>. </param> /// <returns> The values in the given field for each document. </returns> /// <exception cref="IOException"> /// If any error occurs. </exception> FieldCache.Doubles GetDoubles(AtomicReader reader, string field, FieldCache.IDoubleParser parser, bool setDocsWithField); /// <summary> /// Checks the internal cache for an appropriate entry, and if none /// is found, reads the term values in <paramref name="field"/> /// and returns a <see cref="BinaryDocValues"/> instance, providing a /// method to retrieve the term (as a <see cref="BytesRef"/>) per document. </summary> /// <param name="reader"> Used to get field values. </param> /// <param name="field"> Which field contains the strings. </param> /// <param name="setDocsWithField"> If true then <see cref="GetDocsWithField(AtomicReader, string)"/> will /// also be computed and stored in the <see cref="IFieldCache"/>. </param> /// <returns> The values in the given field for each document. </returns> /// <exception cref="IOException"> If any error occurs. </exception> BinaryDocValues GetTerms(AtomicReader reader, string field, bool setDocsWithField); /// <summary> /// Expert: just like <see cref="GetTerms(AtomicReader, string, bool)"/>, /// but you can specify whether more RAM should be consumed in exchange for /// faster lookups (default is "true"). Note that the /// first call for a given reader and field "wins", /// subsequent calls will share the same cache entry. /// </summary> BinaryDocValues GetTerms(AtomicReader reader, string field, bool setDocsWithField, float acceptableOverheadRatio); /// <summary> /// Checks the internal cache for an appropriate entry, and if none /// is found, reads the term values in <paramref name="field"/> /// and returns a <see cref="SortedDocValues"/> instance, /// providing methods to retrieve sort ordinals and terms /// (as a <see cref="BytesRef"/>) per document. </summary> /// <param name="reader"> Used to get field values. </param> /// <param name="field"> Which field contains the strings. </param> /// <returns> The values in the given field for each document. </returns> /// <exception cref="IOException"> If any error occurs. </exception> SortedDocValues GetTermsIndex(AtomicReader reader, string field); /// <summary> /// Expert: just like /// <see cref="GetTermsIndex(AtomicReader, string)"/>, but you can specify /// whether more RAM should be consumed in exchange for /// faster lookups (default is "true"). Note that the /// first call for a given reader and field "wins", /// subsequent calls will share the same cache entry. /// </summary> SortedDocValues GetTermsIndex(AtomicReader reader, string field, float acceptableOverheadRatio); /// <summary> /// Checks the internal cache for an appropriate entry, and if none is found, reads the term values /// in <paramref name="field"/> and returns a <see cref="SortedSetDocValues"/> instance, providing a method to retrieve /// the terms (as ords) per document. /// </summary> /// <param name="reader"> Used to build a <see cref="SortedSetDocValues"/> instance </param> /// <param name="field"> Which field contains the strings. </param> /// <returns> a <see cref="SortedSetDocValues"/> instance </returns> /// <exception cref="IOException"> If any error occurs. </exception> SortedSetDocValues GetDocTermOrds(AtomicReader reader, string field); // LUCENENET specific CacheEntry moved to FieldCache static class /// <summary> /// EXPERT: Generates an array of <see cref="FieldCache.CacheEntry"/> objects representing all items /// currently in the <see cref="IFieldCache"/>. /// <para> /// NOTE: These <see cref="FieldCache.CacheEntry"/> objects maintain a strong reference to the /// Cached Values. Maintaining references to a <see cref="FieldCache.CacheEntry"/> the <see cref="AtomicReader"/> /// associated with it has garbage collected will prevent the Value itself /// from being garbage collected when the Cache drops the <see cref="WeakReference"/>. /// </para> /// @lucene.experimental /// </summary> FieldCache.CacheEntry[] GetCacheEntries(); /// <summary> /// <para> /// EXPERT: Instructs the FieldCache to forcibly expunge all entries /// from the underlying caches. This is intended only to be used for /// test methods as a way to ensure a known base state of the Cache /// (with out needing to rely on GC to free <see cref="WeakReference"/>s). /// It should not be relied on for "Cache maintenance" in general /// application code. /// </para> /// @lucene.experimental /// </summary> void PurgeAllCaches(); /// <summary> /// Expert: drops all cache entries associated with this /// reader <see cref="Index.IndexReader.CoreCacheKey"/>. NOTE: this cache key must /// precisely match the reader that the cache entry is /// keyed on. If you pass a top-level reader, it usually /// will have no effect as Lucene now caches at the segment /// reader level. /// </summary> void PurgeByCacheKey(object coreCacheKey); /// <summary> /// If non-null, <see cref="FieldCacheImpl"/> will warn whenever /// entries are created that are not sane according to /// <see cref="Lucene.Net.Util.FieldCacheSanityChecker"/>. /// </summary> TextWriter InfoStream { set; get; } } public static class FieldCache { /// <summary> /// Field values as 8-bit signed bytes /// </summary> public abstract class Bytes { /// <summary> /// Return a single Byte representation of this field's value. /// </summary> public abstract byte Get(int docID); /// <summary> /// Zero value for every document /// </summary> public static readonly Bytes EMPTY = new EmptyBytes(); private sealed class EmptyBytes : Bytes { public override byte Get(int docID) { return 0; } } } /// <summary> /// Field values as 16-bit signed shorts /// <para/> /// NOTE: This was Shorts in Lucene /// </summary> public abstract class Int16s { /// <summary> /// Return a <see cref="short"/> representation of this field's value. /// </summary> public abstract short Get(int docID); /// <summary> /// Zero value for every document /// </summary> public static readonly Int16s EMPTY = new EmptyInt16s(); private sealed class EmptyInt16s : Int16s { public override short Get(int docID) { return 0; } } } /// <summary> /// Field values as 32-bit signed integers /// <para/> /// NOTE: This was Ints in Lucene /// </summary> public abstract class Int32s { /// <summary> /// Return an <see cref="int"/> representation of this field's value. /// </summary> public abstract int Get(int docID); /// <summary> /// Zero value for every document /// </summary> public static readonly Int32s EMPTY = new EmptyInt32s(); private sealed class EmptyInt32s : Int32s { public override int Get(int docID) { return 0; } } } /// <summary> /// Field values as 64-bit signed long integers /// <para/> /// NOTE: This was Longs in Lucene /// </summary> public abstract class Int64s { /// <summary> /// Return an <see cref="long"/> representation of this field's value. /// </summary> public abstract long Get(int docID); /// <summary> /// Zero value for every document /// </summary> public static readonly Int64s EMPTY = new EmptyInt64s(); private sealed class EmptyInt64s : Int64s { public override long Get(int docID) { return 0; } } } /// <summary> /// Field values as 32-bit floats /// <para/> /// NOTE: This was Floats in Lucene /// </summary> public abstract class Singles { /// <summary> /// Return an <see cref="float"/> representation of this field's value. /// </summary> public abstract float Get(int docID); /// <summary> /// Zero value for every document /// </summary> public static readonly Singles EMPTY = new EmptySingles(); private sealed class EmptySingles : Singles { public override float Get(int docID) { return 0; } } } /// <summary> /// Field values as 64-bit doubles /// </summary> public abstract class Doubles { /// <summary> /// Return a <see cref="double"/> representation of this field's value. /// </summary> /// <param name="docID"></param> public abstract double Get(int docID); /// <summary> /// Zero value for every document /// </summary> public static readonly Doubles EMPTY = new EmptyDoubles(); private sealed class EmptyDoubles : Doubles { public override double Get(int docID) { return 0; } } } /// <summary> /// Placeholder indicating creation of this cache is currently in-progress. /// </summary> public sealed class CreationPlaceholder { internal object Value { get; set; } } /// <summary> /// Marker interface as super-interface to all parsers. It /// is used to specify a custom parser to /// <see cref="SortField.SortField(string, IParser)"/>. /// </summary> public interface IParser { /// <summary> /// Pulls a <see cref="Index.TermsEnum"/> from the given <see cref="Index.Terms"/>. This method allows certain parsers /// to filter the actual <see cref="Index.TermsEnum"/> before the field cache is filled. /// </summary> /// <param name="terms">The <see cref="Index.Terms"/> instance to create the <see cref="Index.TermsEnum"/> from.</param> /// <returns>A possibly filtered <see cref="Index.TermsEnum"/> instance, this method must not return <c>null</c>.</returns> /// <exception cref="System.IO.IOException">If an <see cref="IOException"/> occurs</exception> TermsEnum TermsEnum(Terms terms); } /// <summary> /// Interface to parse bytes from document fields. /// </summary> /// <seealso cref="IFieldCache.GetBytes(AtomicReader, string, IByteParser, bool)"/> [Obsolete] public interface IByteParser : IParser { /// <summary> /// Return a single Byte representation of this field's value. /// </summary> byte ParseByte(BytesRef term); } /// <summary> /// Interface to parse <see cref="short"/>s from document fields. /// <para/> /// NOTE: This was ShortParser in Lucene /// </summary> /// <seealso cref="IFieldCache.GetInt16s(AtomicReader, string, IInt16Parser, bool)"/> [Obsolete] public interface IInt16Parser : IParser { /// <summary> /// Return a <see cref="short"/> representation of this field's value. /// <para/> /// NOTE: This was parseShort() in Lucene /// </summary> short ParseInt16(BytesRef term); } /// <summary> /// Interface to parse <see cref="int"/>s from document fields. /// <para/> /// NOTE: This was IntParser in Lucene /// </summary> /// <seealso cref="IFieldCache.GetInt32s(AtomicReader, string, IInt32Parser, bool)"/> public interface IInt32Parser : IParser { /// <summary> /// Return an <see cref="int"/> representation of this field's value. /// <para/> /// NOTE: This was parseInt() in Lucene /// </summary> int ParseInt32(BytesRef term); } /// <summary> /// Interface to parse <see cref="float"/>s from document fields. /// <para/> /// NOTE: This was FloatParser in Lucene /// </summary> public interface ISingleParser : IParser { /// <summary> /// Return an <see cref="float"/> representation of this field's value. /// <para/> /// NOTE: This was parseFloat() in Lucene /// </summary> float ParseSingle(BytesRef term); } /// <summary> /// Interface to parse <see cref="long"/> from document fields. /// <para/> /// NOTE: This was LongParser in Lucene /// </summary> /// <seealso cref="IFieldCache.GetInt64s(AtomicReader, string, IInt64Parser, bool)"/> public interface IInt64Parser : IParser { /// <summary> /// Return a <see cref="long"/> representation of this field's value. /// <para/> /// NOTE: This was parseLong() in Lucene /// </summary> long ParseInt64(BytesRef term); } /// <summary> /// Interface to parse <see cref="double"/>s from document fields. /// </summary> /// <seealso cref="IFieldCache.GetDoubles(AtomicReader, string, IDoubleParser, bool)"/> public interface IDoubleParser : IParser { /// <summary> /// Return an <see cref="double"/> representation of this field's value. /// </summary> double ParseDouble(BytesRef term); } /// <summary> /// Expert: The cache used internally by sorting and range query classes. /// </summary> public static IFieldCache DEFAULT = new FieldCacheImpl(); /// <summary> /// The default parser for byte values, which are encoded by <see cref="sbyte.ToString(string, IFormatProvider)"/> /// using <see cref="CultureInfo.InvariantCulture"/>. /// </summary> [Obsolete] public static readonly IByteParser DEFAULT_BYTE_PARSER = new ByteParser(); [Obsolete] private sealed class ByteParser : IByteParser { public byte ParseByte(BytesRef term) { // TODO: would be far better to directly parse from // UTF8 bytes... but really users should use // IntField, instead, which already decodes // directly from byte[] return (byte)sbyte.Parse(term.Utf8ToString(), CultureInfo.InvariantCulture); } public override string ToString() { return typeof(IFieldCache).FullName + ".DEFAULT_BYTE_PARSER"; } public TermsEnum TermsEnum(Terms terms) { return terms.GetIterator(null); } } /// <summary> /// The default parser for <see cref="short"/> values, which are encoded by <see cref="short.ToString(string, IFormatProvider)"/> /// using <see cref="CultureInfo.InvariantCulture"/>. /// <para/> /// NOTE: This was DEFAULT_SHORT_PARSER in Lucene /// </summary> [Obsolete] public static readonly IInt16Parser DEFAULT_INT16_PARSER = new Int16Parser(); [Obsolete] private sealed class Int16Parser : IInt16Parser { /// <summary> /// NOTE: This was parseShort() in Lucene /// </summary> public short ParseInt16(BytesRef term) { // TODO: would be far better to directly parse from // UTF8 bytes... but really users should use // IntField, instead, which already decodes // directly from byte[] return short.Parse(term.Utf8ToString(), NumberStyles.Integer, CultureInfo.InvariantCulture); } public override string ToString() { return typeof(IFieldCache).FullName + ".DEFAULT_INT16_PARSER"; } public TermsEnum TermsEnum(Terms terms) { return terms.GetIterator(null); } } /// <summary> /// The default parser for <see cref="int"/> values, which are encoded by <see cref="int.ToString(string, IFormatProvider)"/> /// using <see cref="CultureInfo.InvariantCulture"/>. /// <para/> /// NOTE: This was DEFAULT_INT_PARSER in Lucene /// </summary> [Obsolete] public static readonly IInt32Parser DEFAULT_INT32_PARSER = new Int32Parser(); [Obsolete] private sealed class Int32Parser : IInt32Parser { /// <summary> /// NOTE: This was parseInt() in Lucene /// </summary> public int ParseInt32(BytesRef term) { // TODO: would be far better to directly parse from // UTF8 bytes... but really users should use // IntField, instead, which already decodes // directly from byte[] return int.Parse(term.Utf8ToString(), NumberStyles.Integer, CultureInfo.InvariantCulture); } public TermsEnum TermsEnum(Terms terms) { return terms.GetIterator(null); } public override string ToString() { return typeof(IFieldCache).FullName + ".DEFAULT_INT32_PARSER"; } } /// <summary> /// The default parser for <see cref="float"/> values, which are encoded by <see cref="float.ToString(string, IFormatProvider)"/> /// using <see cref="CultureInfo.InvariantCulture"/>. /// <para/> /// NOTE: This was DEFAULT_FLOAT_PARSER in Lucene /// </summary> [Obsolete] public static readonly ISingleParser DEFAULT_SINGLE_PARSER = new SingleParser(); [Obsolete] private sealed class SingleParser : ISingleParser { /// <summary> /// NOTE: This was parseFloat() in Lucene /// </summary> public float ParseSingle(BytesRef term) { // TODO: would be far better to directly parse from // UTF8 bytes... but really users should use // FloatField, instead, which already decodes // directly from byte[] // LUCENENET: We parse to double first and then cast to float, which allows us to parse // double.MaxValue.ToString("R") (resulting in Infinity). This is how it worked in Java // and the TestFieldCache.TestInfoStream() test depends on this behavior to pass. // We also need to use the same logic as DEFAULT_DOUBLE_PARSER to ensure we have signed zero // support, so just call it directly rather than duplicating the logic here. return (float)DEFAULT_DOUBLE_PARSER.ParseDouble(term); } public TermsEnum TermsEnum(Terms terms) { return terms.GetIterator(null); } public override string ToString() { return typeof(IFieldCache).FullName + ".DEFAULT_SINGLE_PARSER"; } } /// <summary> /// The default parser for <see cref="long"/> values, which are encoded by <see cref="long.ToString(string, IFormatProvider)"/> /// using <see cref="CultureInfo.InvariantCulture"/>. /// <para/> /// NOTE: This was DEFAULT_LONG_PARSER in Lucene /// </summary> [Obsolete] public static readonly IInt64Parser DEFAULT_INT64_PARSER = new Int64Parser(); [Obsolete] private sealed class Int64Parser : IInt64Parser { /// <summary> /// NOTE: This was parseLong() in Lucene /// </summary> public long ParseInt64(BytesRef term) { // TODO: would be far better to directly parse from // UTF8 bytes... but really users should use // LongField, instead, which already decodes // directly from byte[] return long.Parse(term.Utf8ToString(), NumberStyles.Integer, CultureInfo.InvariantCulture); } public TermsEnum TermsEnum(Terms terms) { return terms.GetIterator(null); } public override string ToString() { return typeof(IFieldCache).FullName + ".DEFAULT_INT64_PARSER"; } } /// <summary> /// The default parser for <see cref="double"/> values, which are encoded by <see cref="double.ToString(string, IFormatProvider)"/> /// using <see cref="CultureInfo.InvariantCulture"/>. /// </summary> [Obsolete] public static readonly IDoubleParser DEFAULT_DOUBLE_PARSER = new DoubleParser(); [Obsolete] private sealed class DoubleParser : IDoubleParser { public double ParseDouble(BytesRef term) { // TODO: would be far better to directly parse from // UTF8 bytes... but really users should use // DoubleField, instead, which already decodes // directly from byte[] string text = term.Utf8ToString(); double value = double.Parse(text, NumberStyles.Float, CultureInfo.InvariantCulture); // LUCENENET specific special case - check whether a negative // zero was passed in and, if so, convert the sign. Unfotunately, double.Parse() // doesn't take care of this for us. if (value == 0 && text.TrimStart().StartsWith("-", StringComparison.Ordinal)) { value = -0d; // Hard-coding the value in case double.Parse() works right someday (which would break if we did value * -1) } return value; } public TermsEnum TermsEnum(Terms terms) { return terms.GetIterator(null); } public override string ToString() { return typeof(IFieldCache).FullName + ".DEFAULT_DOUBLE_PARSER"; } } /// <summary> /// A parser instance for <see cref="int"/> values encoded by <see cref="NumericUtils"/>, e.g. when indexed /// via <see cref="Documents.Int32Field"/>/<see cref="Analysis.NumericTokenStream"/>. /// <para/> /// NOTE: This was NUMERIC_UTILS_INT_PARSER in Lucene /// </summary> public static readonly IInt32Parser NUMERIC_UTILS_INT32_PARSER = new NumericUtilsInt32Parser(); private sealed class NumericUtilsInt32Parser : IInt32Parser { /// <summary> /// NOTE: This was parseInt() in Lucene /// </summary> public int ParseInt32(BytesRef term) { return NumericUtils.PrefixCodedToInt32(term); } public TermsEnum TermsEnum(Terms terms) { return NumericUtils.FilterPrefixCodedInt32s(terms.GetIterator(null)); } public override string ToString() { return typeof(IFieldCache).FullName + ".NUMERIC_UTILS_INT32_PARSER"; } } /// <summary> /// A parser instance for <see cref="float"/> values encoded with <see cref="NumericUtils"/>, e.g. when indexed /// via <see cref="Documents.SingleField"/>/<see cref="Analysis.NumericTokenStream"/>. /// <para/> /// NOTE: This was NUMERIC_UTILS_FLOAT_PARSER in Lucene /// </summary> public static readonly ISingleParser NUMERIC_UTILS_SINGLE_PARSER = new NumericUtilsSingleParser(); private sealed class NumericUtilsSingleParser : ISingleParser { /// <summary> /// NOTE: This was parseFloat() in Lucene /// </summary> public float ParseSingle(BytesRef term) { return NumericUtils.SortableInt32ToSingle(NumericUtils.PrefixCodedToInt32(term)); } public override string ToString() { return typeof(IFieldCache).FullName + ".NUMERIC_UTILS_SINGLE_PARSER"; } public TermsEnum TermsEnum(Terms terms) { return NumericUtils.FilterPrefixCodedInt32s(terms.GetIterator(null)); } } /// <summary> /// A parser instance for <see cref="long"/> values encoded by <see cref="NumericUtils"/>, e.g. when indexed /// via <see cref="Documents.Int64Field"/>/<see cref="Analysis.NumericTokenStream"/>. /// <para/> /// NOTE: This was NUMERIC_UTILS_LONG_PARSER in Lucene /// </summary> public static readonly IInt64Parser NUMERIC_UTILS_INT64_PARSER = new NumericUtilsInt64Parser(); private sealed class NumericUtilsInt64Parser : IInt64Parser { /// <summary> /// NOTE: This was parseLong() in Lucene /// </summary> public long ParseInt64(BytesRef term) { return NumericUtils.PrefixCodedToInt64(term); } public override string ToString() { return typeof(IFieldCache).FullName + ".NUMERIC_UTILS_INT64_PARSER"; } public TermsEnum TermsEnum(Terms terms) { return NumericUtils.FilterPrefixCodedInt64s(terms.GetIterator(null)); } } /// <summary> /// A parser instance for <see cref="double"/> values encoded with <see cref="NumericUtils"/>, e.g. when indexed /// via <see cref="Documents.DoubleField"/>/<see cref="Analysis.NumericTokenStream"/>. /// </summary> public static readonly IDoubleParser NUMERIC_UTILS_DOUBLE_PARSER = new NumericUtilsDoubleParser(); private sealed class NumericUtilsDoubleParser : IDoubleParser { public double ParseDouble(BytesRef term) { return NumericUtils.SortableInt64ToDouble(NumericUtils.PrefixCodedToInt64(term)); } public override string ToString() { return typeof(IFieldCache).FullName + ".NUMERIC_UTILS_DOUBLE_PARSER"; } public TermsEnum TermsEnum(Terms terms) { return NumericUtils.FilterPrefixCodedInt64s(terms.GetIterator(null)); } } // .NET Port: skipping down to about line 681 of java version. The actual interface methods of FieldCache are in IFieldCache below. /// <summary> /// EXPERT: A unique Identifier/Description for each item in the <see cref="IFieldCache"/>. /// Can be useful for logging/debugging. /// <para/> /// @lucene.experimental /// </summary> public sealed class CacheEntry { private readonly object readerKey; private readonly string fieldName; private readonly Type cacheType; private readonly object custom; private readonly object value; private string size; public CacheEntry(object readerKey, string fieldName, Type cacheType, object custom, object value) { this.readerKey = readerKey; this.fieldName = fieldName; this.cacheType = cacheType; this.custom = custom; this.value = value; } public object ReaderKey { get { return readerKey; } } public string FieldName { get { return fieldName; } } public Type CacheType { get { return cacheType; } } public object Custom { get { return custom; } } public object Value { get { return value; } } /// <summary> /// Computes (and stores) the estimated size of the cache <see cref="Value"/> /// </summary> /// <seealso cref="EstimatedSize"/> public void EstimateSize() { long bytesUsed = RamUsageEstimator.SizeOf(Value); size = RamUsageEstimator.HumanReadableUnits(bytesUsed); } /// <summary> /// The most recently estimated size of the value, <c>null</c> unless /// <see cref="EstimateSize()"/> has been called. /// </summary> public string EstimatedSize { get { return size; } } public override string ToString() { StringBuilder b = new StringBuilder(); b.Append("'").Append(ReaderKey).Append("'=>"); b.Append("'").Append(FieldName).Append("',"); b.Append(CacheType).Append(",").Append(Custom); b.Append("=>").Append(Value.GetType().FullName).Append("#"); b.Append(RuntimeHelpers.GetHashCode(Value)); String s = EstimatedSize; if (null != s) { b.Append(" (size =~ ").Append(s).Append(')'); } return b.ToString(); } } } }
/* * Copyright 2001-2010 Terracotta, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy * of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * */ using System; using System.Collections.Generic; using System.Globalization; using System.IO; using System.Reflection; using System.Threading; using System.Threading.Tasks; using System.Xml; using System.Xml.Schema; using System.Xml.Serialization; using Quartz.Impl.Matchers; using Quartz.Logging; using Quartz.Spi; using Quartz.Util; using Quartz.Xml.JobSchedulingData20; namespace Quartz.Xml { /// <summary> /// Parses an XML file that declares Jobs and their schedules (Triggers). /// </summary> /// <remarks> /// <para> /// The xml document must conform to the format defined in "job_scheduling_data_2_0.xsd" /// </para> /// /// <para> /// After creating an instance of this class, you should call one of the <see cref="ProcessFile(CancellationToken)" /> /// functions, after which you may call the ScheduledJobs() /// function to get a handle to the defined Jobs and Triggers, which can then be /// scheduled with the <see cref="IScheduler" />. Alternatively, you could call /// the <see cref="ProcessFileAndScheduleJobs(Quartz.IScheduler, CancellationToken)" /> function to do all of this /// in one step. /// </para> /// /// <para> /// The same instance can be used again and again, with the list of defined Jobs /// being cleared each time you call a <see cref="ProcessFile(CancellationToken)" /> method, /// however a single instance is not thread-safe. /// </para> /// </remarks> /// <author><a href="mailto:bonhamcm@thirdeyeconsulting.com">Chris Bonham</a></author> /// <author>James House</author> /// <author>Marko Lahma (.NET)</author> /// <author>Christian Krumm (.NET Bugfix)</author> public class XMLSchedulingDataProcessor { public const string QuartzXmlFileName = "quartz_jobs.xml"; public const string QuartzXsdResourceName = "Quartz.Xml.job_scheduling_data_2_0.xsd"; // pre-processing commands private readonly List<string> jobGroupsToDelete = new List<string>(); private readonly List<string> triggerGroupsToDelete = new List<string>(); private readonly List<JobKey> jobsToDelete = new List<JobKey>(); private readonly List<TriggerKey> triggersToDelete = new List<TriggerKey>(); // scheduling commands private readonly List<IJobDetail> loadedJobs = new List<IJobDetail>(); private readonly List<ITrigger> loadedTriggers = new List<ITrigger>(); // directives private readonly List<Exception> validationExceptions = new List<Exception>(); private readonly List<string> jobGroupsToNeverDelete = new List<string>(); private readonly List<string> triggerGroupsToNeverDelete = new List<string>(); /// <summary> /// Constructor for XMLSchedulingDataProcessor. /// </summary> public XMLSchedulingDataProcessor(ITypeLoadHelper typeLoadHelper) { OverWriteExistingData = true; IgnoreDuplicates = false; Log = LogProvider.GetLogger(GetType()); TypeLoadHelper = typeLoadHelper; } /// <summary> /// Whether the existing scheduling data (with same identifiers) will be /// overwritten. /// </summary> /// <remarks> /// If false, and <see cref="IgnoreDuplicates" /> is not false, and jobs or /// triggers with the same names already exist as those in the file, an /// error will occur. /// </remarks> /// <seealso cref="IgnoreDuplicates" /> public virtual bool OverWriteExistingData { get; set; } /// <summary> /// If true (and <see cref="OverWriteExistingData" /> is false) then any /// job/triggers encountered in this file that have names that already exist /// in the scheduler will be ignored, and no error will be produced. /// </summary> /// <seealso cref="OverWriteExistingData"/> public virtual bool IgnoreDuplicates { get; set; } /// <summary> /// If true (and <see cref="OverWriteExistingData" /> is true) then any /// job/triggers encountered in this file that already exist is scheduler /// will be updated with start time relative to old trigger. Effectively /// new trigger's last fire time will be updated to old trigger's last fire time /// and trigger's next fire time will updated to be next from this last fire time. /// </summary> public virtual bool ScheduleTriggerRelativeToReplacedTrigger { get; set; } /// <summary> /// Gets the log. /// </summary> /// <value>The log.</value> private ILog Log { get; } protected virtual IReadOnlyList<IJobDetail> LoadedJobs => loadedJobs.AsReadOnly(); protected virtual IReadOnlyList<ITrigger> LoadedTriggers => loadedTriggers.AsReadOnly(); protected ITypeLoadHelper TypeLoadHelper { get; } /// <summary> /// Process the xml file in the default location (a file named /// "quartz_jobs.xml" in the current working directory). /// </summary> /// <param name="cancellationToken">The cancellation instruction.</param> public virtual Task ProcessFile(CancellationToken cancellationToken = default) { return ProcessFile(QuartzXmlFileName, cancellationToken); } /// <summary> /// Process the xml file named <see param="fileName" />. /// </summary> /// <param name="fileName">meta data file name.</param> /// <param name="cancellationToken">The cancellation instruction.</param> public virtual Task ProcessFile( string fileName, CancellationToken cancellationToken = default) { return ProcessFile(fileName, fileName, cancellationToken); } /// <summary> /// Process the xmlfile named <see param="fileName" /> with the given system /// ID. /// </summary> /// <param name="fileName">Name of the file.</param> /// <param name="systemId">The system id.</param> /// <param name="cancellationToken">The cancellation instruction.</param> public virtual async Task ProcessFile( string fileName, string systemId, CancellationToken cancellationToken = default) { // resolve file name first fileName = FileUtil.ResolveFile(fileName) ?? fileName; Log.InfoFormat("Parsing XML file: {0} with systemId: {1}", fileName, systemId); using (var stream = File.Open(fileName, FileMode.Open, FileAccess.Read, FileShare.Read)) using (StreamReader sr = new StreamReader(stream)) { ProcessInternal(await sr.ReadToEndAsync().ConfigureAwait(false)); } } /// <summary> /// Process the xmlfile named <see param="fileName" /> with the given system /// ID. /// </summary> /// <param name="stream">The stream.</param> /// <param name="systemId">The system id.</param> /// <param name="cancellationToken">The cancellation instruction.</param> public virtual async Task ProcessStream( Stream stream, string? systemId, CancellationToken cancellationToken = default) { Log.InfoFormat("Parsing XML from stream with systemId: {0}", systemId); using StreamReader sr = new StreamReader(stream); ProcessInternal(await sr.ReadToEndAsync().ConfigureAwait(false)); } protected virtual void PrepForProcessing() { ClearValidationExceptions(); OverWriteExistingData = true; IgnoreDuplicates = false; jobGroupsToDelete.Clear(); jobsToDelete.Clear(); triggerGroupsToDelete.Clear(); triggersToDelete.Clear(); loadedJobs.Clear(); loadedTriggers.Clear(); } protected virtual void ProcessInternal(string xml) { PrepForProcessing(); ValidateXml(xml); MaybeThrowValidationException(); // deserialize as object model var xs = new XmlSerializer(typeof (QuartzXmlConfiguration20)); var data = (QuartzXmlConfiguration20?) xs.Deserialize(new StringReader(xml)); if (data == null) { throw new SchedulerConfigException("Job definition data from XML was null after deserialization"); } // // Extract pre-processing commands // if (data.preprocessingcommands != null) { foreach (preprocessingcommandsType command in data.preprocessingcommands) { if (command.deletejobsingroup != null) { foreach (string s in command.deletejobsingroup) { var deleteJobGroup = s.NullSafeTrim(); if (!string.IsNullOrEmpty(deleteJobGroup)) { jobGroupsToDelete.Add(deleteJobGroup); } } } if (command.deletetriggersingroup != null) { foreach (string s in command.deletetriggersingroup) { var deleteTriggerGroup = s.NullSafeTrim(); if (!string.IsNullOrEmpty(deleteTriggerGroup)) { triggerGroupsToDelete.Add(deleteTriggerGroup); } } } if (command.deletejob != null) { foreach (preprocessingcommandsTypeDeletejob s in command.deletejob) { var name = s.name.TrimEmptyToNull(); var group = s.group.TrimEmptyToNull(); if (name == null) { throw new SchedulerConfigException("Encountered a 'delete-job' command without a name specified."); } jobsToDelete.Add(new JobKey(name, group!)); } } if (command.deletetrigger != null) { foreach (preprocessingcommandsTypeDeletetrigger s in command.deletetrigger) { var name = s.name.TrimEmptyToNull(); var group = s.group.TrimEmptyToNull(); if (name == null) { throw new SchedulerConfigException("Encountered a 'delete-trigger' command without a name specified."); } triggersToDelete.Add(new TriggerKey(name, group!)); } } } } if (Log.IsDebugEnabled()) { Log.Debug("Found " + jobGroupsToDelete.Count + " delete job group commands."); Log.Debug("Found " + triggerGroupsToDelete.Count + " delete trigger group commands."); Log.Debug("Found " + jobsToDelete.Count + " delete job commands."); Log.Debug("Found " + triggersToDelete.Count + " delete trigger commands."); } // // Extract directives // if (data.processingdirectives != null && data.processingdirectives.Length > 0) { bool overWrite = data.processingdirectives[0].overwriteexistingdata; Log.Debug("Directive 'overwrite-existing-data' specified as: " + overWrite); OverWriteExistingData = overWrite; } else { Log.Debug("Directive 'overwrite-existing-data' not specified, defaulting to " + OverWriteExistingData); } if (data.processingdirectives != null && data.processingdirectives.Length > 0) { bool ignoreduplicates = data.processingdirectives[0].ignoreduplicates; Log.Debug("Directive 'ignore-duplicates' specified as: " + ignoreduplicates); IgnoreDuplicates = ignoreduplicates; } else { Log.Debug("Directive 'ignore-duplicates' not specified, defaulting to " + IgnoreDuplicates); } if (data.processingdirectives != null && data.processingdirectives.Length > 0) { bool scheduleRelative = data.processingdirectives[0].scheduletriggerrelativetoreplacedtrigger; Log.Debug("Directive 'schedule-trigger-relative-to-replaced-trigger' specified as: " + scheduleRelative); ScheduleTriggerRelativeToReplacedTrigger = scheduleRelative; } else { Log.Debug("Directive 'schedule-trigger-relative-to-replaced-trigger' not specified, defaulting to " + ScheduleTriggerRelativeToReplacedTrigger); } // // Extract Job definitions... // List<jobdetailType> jobNodes = new List<jobdetailType>(); if (data.schedule != null) { foreach (var schedule in data.schedule) { if (schedule?.job != null) { jobNodes.AddRange(schedule.job); } } } Log.Debug("Found " + jobNodes.Count + " job definitions."); foreach (jobdetailType jobDetailType in jobNodes) { var jobName = jobDetailType.name.TrimEmptyToNull(); var jobGroup = jobDetailType.group.TrimEmptyToNull(); var jobDescription = jobDetailType.description.TrimEmptyToNull(); var jobTypeName = jobDetailType.jobtype.TrimEmptyToNull(); bool jobDurability = jobDetailType.durable; bool jobRecoveryRequested = jobDetailType.recover; Type jobType = TypeLoadHelper.LoadType(jobTypeName!)!; IJobDetail jobDetail = JobBuilder.Create(jobType!) .WithIdentity(jobName!, jobGroup!) .WithDescription(jobDescription) .StoreDurably(jobDurability) .RequestRecovery(jobRecoveryRequested) .Build(); if (jobDetailType.jobdatamap != null && jobDetailType.jobdatamap.entry != null) { foreach (entryType entry in jobDetailType.jobdatamap.entry) { var key = entry.key.Trim(); var value = entry.value.TrimEmptyToNull(); jobDetail.JobDataMap.Add(key, value!); } } if (Log.IsDebugEnabled()) { Log.Debug("Parsed job definition: " + jobDetail); } AddJobToSchedule(jobDetail); } // // Extract Trigger definitions... // List<triggerType> triggerEntries = new List<triggerType>(); if (data.schedule != null) { foreach (var schedule in data.schedule) { if (schedule != null && schedule.trigger != null) { triggerEntries.AddRange(schedule.trigger); } } } Log.Debug("Found " + triggerEntries.Count + " trigger definitions."); foreach (triggerType triggerNode in triggerEntries) { var triggerName = triggerNode.Item.name.TrimEmptyToNull()!; var triggerGroup = triggerNode.Item.group.TrimEmptyToNull()!; var triggerDescription = triggerNode.Item.description.TrimEmptyToNull(); var triggerCalendarRef = triggerNode.Item.calendarname.TrimEmptyToNull(); string triggerJobName = triggerNode.Item.jobname.TrimEmptyToNull()!; string triggerJobGroup = triggerNode.Item.jobgroup.TrimEmptyToNull()!; int triggerPriority = TriggerConstants.DefaultPriority; if (!triggerNode.Item.priority.IsNullOrWhiteSpace()) { triggerPriority = Convert.ToInt32(triggerNode.Item.priority); } DateTimeOffset triggerStartTime = SystemTime.UtcNow(); if (triggerNode.Item.Item != null) { if (triggerNode.Item.Item is DateTime time) { triggerStartTime = new DateTimeOffset(time); } else { triggerStartTime = triggerStartTime.AddSeconds(Convert.ToInt32(triggerNode.Item.Item)); } } DateTime? triggerEndTime = triggerNode.Item.endtimeSpecified ? triggerNode.Item.endtime : (DateTime?) null; IScheduleBuilder sched; if (triggerNode.Item is simpleTriggerType simpleTrigger) { var repeatCountString = simpleTrigger.repeatcount.TrimEmptyToNull(); var repeatIntervalString = simpleTrigger.repeatinterval.TrimEmptyToNull(); int repeatCount = ParseSimpleTriggerRepeatCount(repeatCountString!); TimeSpan repeatInterval = repeatIntervalString == null ? TimeSpan.Zero : TimeSpan.FromMilliseconds(Convert.ToInt64(repeatIntervalString)); sched = SimpleScheduleBuilder.Create() .WithInterval(repeatInterval) .WithRepeatCount(repeatCount); if (!simpleTrigger.misfireinstruction.IsNullOrWhiteSpace()) { ((SimpleScheduleBuilder) sched).WithMisfireHandlingInstruction(ReadMisfireInstructionFromString(simpleTrigger.misfireinstruction)); } } else if (triggerNode.Item is cronTriggerType) { cronTriggerType cronTrigger = (cronTriggerType) triggerNode.Item; var cronExpression = cronTrigger.cronexpression.TrimEmptyToNull(); var timezoneString = cronTrigger.timezone.TrimEmptyToNull(); TimeZoneInfo? tz = timezoneString != null ? TimeZoneUtil.FindTimeZoneById(timezoneString) : null; sched = CronScheduleBuilder.CronSchedule(cronExpression!) .InTimeZone(tz!); if (!cronTrigger.misfireinstruction.IsNullOrWhiteSpace()) { ((CronScheduleBuilder) sched).WithMisfireHandlingInstruction(ReadMisfireInstructionFromString(cronTrigger.misfireinstruction)); } } else if (triggerNode.Item is calendarIntervalTriggerType) { calendarIntervalTriggerType calendarIntervalTrigger = (calendarIntervalTriggerType) triggerNode.Item; var repeatIntervalString = calendarIntervalTrigger.repeatinterval.TrimEmptyToNull(); IntervalUnit intervalUnit = ParseDateIntervalTriggerIntervalUnit(calendarIntervalTrigger.repeatintervalunit.TrimEmptyToNull()); int repeatInterval = repeatIntervalString == null ? 0 : Convert.ToInt32(repeatIntervalString); sched = CalendarIntervalScheduleBuilder.Create() .WithInterval(repeatInterval, intervalUnit); if (!calendarIntervalTrigger.misfireinstruction.IsNullOrWhiteSpace()) { ((CalendarIntervalScheduleBuilder) sched).WithMisfireHandlingInstruction(ReadMisfireInstructionFromString(calendarIntervalTrigger.misfireinstruction)); } } else { throw new SchedulerConfigException("Unknown trigger type in XML configuration"); } IMutableTrigger trigger = (IMutableTrigger) TriggerBuilder.Create() .WithIdentity(triggerName, triggerGroup) .WithDescription(triggerDescription) .ForJob(triggerJobName, triggerJobGroup) .StartAt(triggerStartTime) .EndAt(triggerEndTime) .WithPriority(triggerPriority) .ModifiedByCalendar(triggerCalendarRef) .WithSchedule(sched) .Build(); if (triggerNode.Item.jobdatamap != null && triggerNode.Item.jobdatamap.entry != null) { foreach (entryType entry in triggerNode.Item.jobdatamap.entry) { string key = entry.key.TrimEmptyToNull()!; var value = entry.value.TrimEmptyToNull(); trigger.JobDataMap.Add(key, value!); } } if (Log.IsDebugEnabled()) { Log.Debug("Parsed trigger definition: " + trigger); } AddTriggerToSchedule(trigger); } } protected virtual void AddJobToSchedule(IJobDetail job) { loadedJobs.Add(job); } protected virtual void AddTriggerToSchedule(IMutableTrigger trigger) { loadedTriggers.Add(trigger); } protected virtual int ParseSimpleTriggerRepeatCount(string repeatcount) { int value = Convert.ToInt32(repeatcount, CultureInfo.InvariantCulture); return value; } protected virtual int ReadMisfireInstructionFromString(string misfireinstruction) { Constants c = new Constants(typeof (MisfireInstruction), typeof (MisfireInstruction.CronTrigger), typeof (MisfireInstruction.SimpleTrigger)); return c.AsNumber(misfireinstruction); } protected virtual IntervalUnit ParseDateIntervalTriggerIntervalUnit(string? intervalUnit) { if (string.IsNullOrEmpty(intervalUnit)) { return IntervalUnit.Day; } if (!TryParseEnum(intervalUnit, out IntervalUnit retValue)) { throw new SchedulerConfigException("Unknown interval unit for DateIntervalTrigger: " + intervalUnit); } return retValue; } protected virtual bool TryParseEnum<T>(string str, out T value) where T : struct { var names = Enum.GetNames(typeof (T)); value = (T) Enum.GetValues(typeof (T)).GetValue(0)!; foreach (var name in names) { if (name == str) { value = (T) Enum.Parse(typeof (T), name); return true; } } return false; } private void ValidateXml(string xml) { try { var settings = new XmlReaderSettings { ValidationType = ValidationType.Schema, ValidationFlags = XmlSchemaValidationFlags.ProcessInlineSchema | XmlSchemaValidationFlags.ProcessSchemaLocation | XmlSchemaValidationFlags.ReportValidationWarnings }; using var stream = typeof(XMLSchedulingDataProcessor).Assembly.GetManifestResourceStream(QuartzXsdResourceName); if (stream is null) { throw new Exception("Could not read XSD from embedded resource"); } var schema = XmlSchema.Read(stream, XmlValidationCallBack); settings.Schemas.Add(schema!); settings.ValidationEventHandler += XmlValidationCallBack; // stream to validate using var reader = XmlReader.Create(new StringReader(xml), settings); while (reader.Read()) { } } catch (Exception ex) { Log.WarnException("Unable to validate XML with schema: " + ex.Message, ex); } } private void XmlValidationCallBack(object? sender, ValidationEventArgs e) { if (e.Severity == XmlSeverityType.Error) { validationExceptions.Add(e.Exception); } else { Log.Warn(e.Message); } } /// <summary> /// Process the xml file in the default location, and schedule all of the jobs defined within it. /// </summary> /// <remarks>Note that we will set overWriteExistingJobs after the default xml is parsed.</remarks> public async Task ProcessFileAndScheduleJobs( IScheduler sched, bool overWriteExistingJobs, CancellationToken cancellationToken = default) { await ProcessFile(QuartzXmlFileName, QuartzXmlFileName, cancellationToken).ConfigureAwait(false); // The overWriteExistingJobs flag was set by processFile() -> prepForProcessing(), then by xml parsing, and then now // we need to reset it again here by this method parameter to override it. OverWriteExistingData = overWriteExistingJobs; await ExecutePreProcessCommands(sched, cancellationToken).ConfigureAwait(false); await ScheduleJobs(sched, cancellationToken).ConfigureAwait(false); } /// <summary> /// Process the xml file in the default location, and schedule all of the /// jobs defined within it. /// </summary> public virtual Task ProcessFileAndScheduleJobs( IScheduler sched, CancellationToken cancellationToken = default) { return ProcessFileAndScheduleJobs(QuartzXmlFileName, sched, cancellationToken); } /// <summary> /// Process the xml file in the given location, and schedule all of the /// jobs defined within it. /// </summary> /// <param name="fileName">meta data file name.</param> /// <param name="sched">The scheduler.</param> /// <param name="cancellationToken">The cancellation instruction.</param> public virtual Task ProcessFileAndScheduleJobs( string fileName, IScheduler sched, CancellationToken cancellationToken = default) { return ProcessFileAndScheduleJobs(fileName, fileName, sched, cancellationToken); } /// <summary> /// Process the xml file in the given location, and schedule all of the /// jobs defined within it. /// </summary> /// <param name="fileName">Name of the file.</param> /// <param name="systemId">The system id.</param> /// <param name="sched">The sched.</param> /// <param name="cancellationToken">The cancellation instruction.</param> public virtual async Task ProcessFileAndScheduleJobs( string fileName, string systemId, IScheduler sched, CancellationToken cancellationToken = default) { await ProcessFile(fileName, systemId, cancellationToken).ConfigureAwait(false); await ExecutePreProcessCommands(sched, cancellationToken).ConfigureAwait(false); await ScheduleJobs(sched, cancellationToken).ConfigureAwait(false); } /// <summary> /// Process the xml file in the given location, and schedule all of the /// jobs defined within it. /// </summary> /// <param name="stream">stream to read XML data from.</param> /// <param name="sched">The sched.</param> /// <param name="cancellationToken">The cancellation instruction.</param> public virtual async Task ProcessStreamAndScheduleJobs( Stream stream, IScheduler sched, CancellationToken cancellationToken = default) { using (var sr = new StreamReader(stream)) { ProcessInternal(await sr.ReadToEndAsync().ConfigureAwait(false)); } await ExecutePreProcessCommands(sched, cancellationToken).ConfigureAwait(false); await ScheduleJobs(sched, cancellationToken).ConfigureAwait(false); } /// <summary> /// Schedules the given sets of jobs and triggers. /// </summary> /// <param name="sched">The sched.</param> /// <param name="cancellationToken">The cancellation instruction.</param> public virtual async Task ScheduleJobs( IScheduler sched, CancellationToken cancellationToken = default) { List<IJobDetail> jobs = new List<IJobDetail>(LoadedJobs); List<ITrigger> triggers = new List<ITrigger>(LoadedTriggers); Log.Info("Adding " + jobs.Count + " jobs, " + triggers.Count + " triggers."); IDictionary<JobKey, List<IMutableTrigger>> triggersByFQJobName = BuildTriggersByFQJobNameMap(triggers); // add each job, and it's associated triggers while (jobs.Count > 0) { // remove jobs as we handle them... IJobDetail detail = jobs[0]; jobs.Remove(detail); IJobDetail? dupeJ = null; try { // The existing job could have been deleted, and Quartz API doesn't allow us to query this without // loading the job class, so use try/catch to handle it. dupeJ = await sched.GetJobDetail(detail.Key, cancellationToken).ConfigureAwait(false); } catch (JobPersistenceException e) { if (e.InnerException is TypeLoadException && OverWriteExistingData) { // We are going to replace jobDetail anyway, so just delete it first. Log.Info("Removing job: " + detail.Key); await sched.DeleteJob(detail.Key, cancellationToken).ConfigureAwait(false); } else { throw; } } if (dupeJ != null) { if (!OverWriteExistingData && IgnoreDuplicates) { Log.Info("Not overwriting existing job: " + dupeJ.Key); continue; // just ignore the entry } if (!OverWriteExistingData && !IgnoreDuplicates) { throw new ObjectAlreadyExistsException(detail); } } if (dupeJ != null) { Log.Info("Replacing job: " + detail.Key); } else { Log.Info("Adding job: " + detail.Key); } triggersByFQJobName.TryGetValue(detail.Key, out var triggersOfJob); if (!detail.Durable && (triggersOfJob == null || triggersOfJob.Count == 0)) { if (dupeJ == null) { throw new SchedulerException( "A new job defined without any triggers must be durable: " + detail.Key); } if (dupeJ.Durable && (await sched.GetTriggersOfJob(detail.Key, cancellationToken).ConfigureAwait(false)).Count == 0) { throw new SchedulerException( "Can't change existing durable job without triggers to non-durable: " + detail.Key); } } if (dupeJ != null || detail.Durable) { if (triggersOfJob != null && triggersOfJob.Count > 0) { await sched.AddJob(detail, true, true, cancellationToken).ConfigureAwait(false); // add the job regardless is durable or not b/c we have trigger to add } else { await sched.AddJob(detail, true, false, cancellationToken).ConfigureAwait(false); // add the job only if a replacement or durable, else exception will throw! } } else { bool addJobWithFirstSchedule = true; // Add triggers related to the job... while (triggersOfJob!.Count > 0) { IMutableTrigger trigger = triggersOfJob[0]; // remove triggers as we handle them... triggersOfJob.Remove(trigger); ITrigger? dupeT = await sched.GetTrigger(trigger.Key, cancellationToken).ConfigureAwait(false); if (dupeT != null) { if (OverWriteExistingData) { if (Log.IsDebugEnabled()) { Log.DebugFormat("Rescheduling job: {0} with updated trigger: {1}", trigger.JobKey, trigger.Key); } } else if (IgnoreDuplicates) { Log.Info("Not overwriting existing trigger: " + dupeT.Key); continue; // just ignore the trigger (and possibly job) } else { throw new ObjectAlreadyExistsException(trigger); } if (!dupeT.JobKey.Equals(trigger.JobKey)) { ReportDuplicateTrigger(trigger); } await DoRescheduleJob(sched, trigger, dupeT, cancellationToken).ConfigureAwait(false); } else { if (Log.IsDebugEnabled()) { Log.DebugFormat("Scheduling job: {0} with trigger: {1}", trigger.JobKey, trigger.Key); } try { if (addJobWithFirstSchedule) { await sched.ScheduleJob(detail, trigger, cancellationToken).ConfigureAwait(false); // add the job if it's not in yet... addJobWithFirstSchedule = false; } else { await sched.ScheduleJob(trigger, cancellationToken).ConfigureAwait(false); } } catch (ObjectAlreadyExistsException) { if (Log.IsDebugEnabled()) { Log.DebugFormat("Adding trigger: {0} for job: {1} failed because the trigger already existed. " + "This is likely due to a race condition between multiple instances " + "in the cluster. Will try to reschedule instead.", trigger.Key, detail.Key); } // Let's try one more time as reschedule. var oldTrigger = await sched.GetTrigger(trigger.Key, cancellationToken).ConfigureAwait(false); await DoRescheduleJob(sched, trigger, oldTrigger, cancellationToken).ConfigureAwait(false); } } } } } // add triggers that weren't associated with a new job... (those we already handled were removed above) foreach (IMutableTrigger trigger in triggers) { ITrigger? dupeT = await sched.GetTrigger(trigger.Key, cancellationToken).ConfigureAwait(false); if (dupeT != null) { if (OverWriteExistingData) { if (Log.IsDebugEnabled()) { Log.DebugFormat("Rescheduling job: " + trigger.JobKey + " with updated trigger: " + trigger.Key); } } else if (IgnoreDuplicates) { Log.Info("Not overwriting existing trigger: " + dupeT.Key); continue; // just ignore the trigger } else { throw new ObjectAlreadyExistsException(trigger); } if (!dupeT.JobKey.Equals(trigger.JobKey)) { ReportDuplicateTrigger(trigger); } await DoRescheduleJob(sched, trigger, dupeT, cancellationToken).ConfigureAwait(false); } else { if (Log.IsDebugEnabled()) { Log.DebugFormat("Scheduling job: {0} with trigger: {1}", trigger.JobKey, trigger.Key); } try { await sched.ScheduleJob(trigger, cancellationToken).ConfigureAwait(false); } catch (ObjectAlreadyExistsException) { if (Log.IsDebugEnabled()) { Log.Debug( "Adding trigger: " + trigger.Key + " for job: " + trigger.JobKey + " failed because the trigger already existed. " + "This is likely due to a race condition between multiple instances " + "in the cluster. Will try to reschedule instead."); } // Let's rescheduleJob one more time. var oldTrigger = await sched.GetTrigger(trigger.Key, cancellationToken).ConfigureAwait(false); await DoRescheduleJob(sched, trigger, oldTrigger, cancellationToken).ConfigureAwait(false); } } } } private void ReportDuplicateTrigger(IMutableTrigger trigger) { Log.WarnFormat("Possibly duplicately named ({0}) trigger in configuration, this can be caused by not having a fixed job key for targeted jobs", trigger.Key); } private Task DoRescheduleJob( IScheduler sched, IMutableTrigger trigger, ITrigger? oldTrigger, CancellationToken cancellationToken = default) { // if this is a trigger with default start time we can consider relative scheduling if (oldTrigger != null && trigger.StartTimeUtc - SystemTime.UtcNow() < TimeSpan.FromSeconds(5) && ScheduleTriggerRelativeToReplacedTrigger) { Log.DebugFormat("Using relative scheduling for trigger with key {0}", trigger.Key); var oldTriggerPreviousFireTime = oldTrigger.GetPreviousFireTimeUtc(); trigger.StartTimeUtc = oldTrigger.StartTimeUtc; ((IOperableTrigger)trigger).SetPreviousFireTimeUtc(oldTriggerPreviousFireTime); // if oldTriggerPreviousFireTime is null then NextFireTime should be set relative to oldTrigger.StartTimeUtc // to be able to handle misfiring for an existing trigger that has never been executed before. ((IOperableTrigger)trigger).SetNextFireTimeUtc(trigger.GetFireTimeAfter(oldTriggerPreviousFireTime ?? oldTrigger.StartTimeUtc)); } return sched.RescheduleJob(trigger.Key, trigger, cancellationToken); } protected virtual IDictionary<JobKey, List<IMutableTrigger>> BuildTriggersByFQJobNameMap(List<ITrigger> triggers) { Dictionary<JobKey, List<IMutableTrigger>> triggersByFQJobName = new Dictionary<JobKey, List<IMutableTrigger>>(); foreach (IMutableTrigger trigger in triggers) { if (!triggersByFQJobName.TryGetValue(trigger.JobKey, out var triggersOfJob)) { triggersOfJob = new List<IMutableTrigger>(); triggersByFQJobName[trigger.JobKey] = triggersOfJob; } triggersOfJob.Add(trigger); } return triggersByFQJobName; } protected async Task ExecutePreProcessCommands( IScheduler scheduler, CancellationToken cancellationToken = default) { foreach (string group in jobGroupsToDelete) { if (group.Equals("*")) { Log.Info("Deleting all jobs in ALL groups."); foreach (string groupName in await scheduler.GetJobGroupNames(cancellationToken).ConfigureAwait(false)) { if (!jobGroupsToNeverDelete.Contains(groupName)) { foreach (JobKey key in await scheduler.GetJobKeys(GroupMatcher<JobKey>.GroupEquals(groupName), cancellationToken).ConfigureAwait(false)) { await scheduler.DeleteJob(key, cancellationToken).ConfigureAwait(false); } } } } else { if (!jobGroupsToNeverDelete.Contains(group)) { Log.InfoFormat("Deleting all jobs in group: {0}", group); foreach (JobKey key in await scheduler.GetJobKeys(GroupMatcher<JobKey>.GroupEquals(group), cancellationToken).ConfigureAwait(false)) { await scheduler.DeleteJob(key, cancellationToken).ConfigureAwait(false); } } } } foreach (string group in triggerGroupsToDelete) { if (group.Equals("*")) { Log.Info("Deleting all triggers in ALL groups."); foreach (string groupName in await scheduler.GetTriggerGroupNames(cancellationToken).ConfigureAwait(false)) { if (!triggerGroupsToNeverDelete.Contains(groupName)) { foreach (TriggerKey key in await scheduler.GetTriggerKeys(GroupMatcher<TriggerKey>.GroupEquals(groupName), cancellationToken).ConfigureAwait(false)) { await scheduler.UnscheduleJob(key, cancellationToken).ConfigureAwait(false); } } } } else { if (!triggerGroupsToNeverDelete.Contains(group)) { Log.InfoFormat("Deleting all triggers in group: {0}", group); foreach (TriggerKey key in await scheduler.GetTriggerKeys(GroupMatcher<TriggerKey>.GroupEquals(group), cancellationToken).ConfigureAwait(false)) { await scheduler.UnscheduleJob(key, cancellationToken).ConfigureAwait(false); } } } } foreach (JobKey key in jobsToDelete) { if (!jobGroupsToNeverDelete.Contains(key.Group)) { Log.InfoFormat("Deleting job: {0}", key); await scheduler.DeleteJob(key, cancellationToken).ConfigureAwait(false); } } foreach (TriggerKey key in triggersToDelete) { if (!triggerGroupsToNeverDelete.Contains(key.Group)) { Log.InfoFormat("Deleting trigger: {0}", key); await scheduler.UnscheduleJob(key, cancellationToken).ConfigureAwait(false); } } } /// <summary> /// Adds a detected validation exception. /// </summary> /// <param name="e">The exception.</param> protected virtual void AddValidationException(XmlException e) { validationExceptions.Add(e); } /// <summary> /// Resets the number of detected validation exceptions. /// </summary> protected virtual void ClearValidationExceptions() { validationExceptions.Clear(); } /// <summary> /// Throws a ValidationException if the number of validationExceptions /// detected is greater than zero. /// </summary> /// <exception cref="ValidationException"> /// DTD validation exception. /// </exception> protected virtual void MaybeThrowValidationException() { if (validationExceptions.Count > 0) { throw new ValidationException(validationExceptions); } } public void AddJobGroupToNeverDelete(string jobGroupName) { jobGroupsToNeverDelete.Add(jobGroupName); } public void AddTriggerGroupToNeverDelete(string triggerGroupName) { triggerGroupsToNeverDelete.Add(triggerGroupName); } /// <summary> /// Helper class to map constant names to their values. /// </summary> internal class Constants { private readonly Type[] types; public Constants(params Type[] reflectedTypes) { types = reflectedTypes; } public int AsNumber(string field) { foreach (Type type in types) { FieldInfo? fi = type.GetField(field); if (fi != null) { return Convert.ToInt32(fi.GetValue(null), CultureInfo.InvariantCulture); } } // not found throw new Exception($"Unknown field '{field}'"); } } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections; using System.Collections.Generic; using System.Runtime.InteropServices; using System.Threading; namespace OpenSim.Framework { public interface IHandle { } [Serializable, ComVisible(false)] public class MinHeap<T> : ICollection<T>, ICollection { private class Handle : IHandle { internal int index = -1; internal MinHeap<T> heap = null; internal void Clear() { this.index = -1; this.heap = null; } } private struct HeapItem { internal T value; internal Handle handle; internal HeapItem(T value, Handle handle) { this.value = value; this.handle = handle; } internal void Clear() { if (this.handle != null) this.handle.Clear(); ClearRef(); } internal void ClearRef() { this.value = default(T); this.handle = null; } } public const int DEFAULT_CAPACITY = 4; private HeapItem[] items; private int size; private object sync_root; private int version; private Comparison<T> comparison; public MinHeap() : this(DEFAULT_CAPACITY, Comparer<T>.Default) { } public MinHeap(int capacity) : this(capacity, Comparer<T>.Default) { } public MinHeap(IComparer<T> comparer) : this(DEFAULT_CAPACITY, comparer) { } public MinHeap(int capacity, IComparer<T> comparer) : this(capacity, new Comparison<T>(comparer.Compare)) { } public MinHeap(Comparison<T> comparison) : this(DEFAULT_CAPACITY, comparison) { } public MinHeap(int capacity, Comparison<T> comparison) { this.items = new HeapItem[capacity]; this.comparison = comparison; this.size = this.version = 0; } public int Count { get { return this.size; } } public bool IsReadOnly { get { return false; } } public bool IsSynchronized { get { return false; } } public T this[IHandle key] { get { Handle handle = ValidateThisHandle(key); return this.items[handle.index].value; } set { Handle handle = ValidateThisHandle(key); this.items[handle.index].value = value; if (!BubbleUp(handle.index)) BubbleDown(handle.index); } } public object SyncRoot { get { if (this.sync_root == null) Interlocked.CompareExchange<object>(ref this.sync_root, new object(), null); return this.sync_root; } } private Handle ValidateHandle(IHandle ihandle) { if (ihandle == null) throw new ArgumentNullException("handle"); Handle handle = ihandle as Handle; if (handle == null) throw new InvalidOperationException("handle is not valid"); return handle; } private Handle ValidateThisHandle(IHandle ihandle) { Handle handle = ValidateHandle(ihandle); if (!object.ReferenceEquals(handle.heap, this)) throw new InvalidOperationException("handle is not valid for this heap"); if (handle.index < 0) throw new InvalidOperationException("handle is not associated to a value"); return handle; } private void Set(HeapItem item, int index) { this.items[index] = item; if (item.handle != null) item.handle.index = index; } private bool BubbleUp(int index) { HeapItem item = this.items[index]; int current, parent; for (current = index, parent = (current - 1) / 2; (current > 0) && (this.comparison(this.items[parent].value, item.value)) > 0; current = parent, parent = (current - 1) / 2) { Set(this.items[parent], current); } if (current != index) { Set(item, current); ++this.version; return true; } return false; } private void BubbleDown(int index) { HeapItem item = this.items[index]; int current, child; for (current = index, child = (2 * current) + 1; current < this.size / 2; current = child, child = (2 * current) + 1) { if ((child < this.size - 1) && this.comparison(this.items[child].value, this.items[child + 1].value) > 0) ++child; if (this.comparison(this.items[child].value, item.value) >= 0) break; Set(this.items[child], current); } if (current != index) { Set(item, current); ++this.version; } } public bool TryGetValue(IHandle key, out T value) { Handle handle = ValidateHandle(key); if (handle.index > -1) { value = this.items[handle.index].value; return true; } value = default(T); return false; } public bool ContainsHandle(IHandle ihandle) { Handle handle = ValidateHandle(ihandle); return object.ReferenceEquals(handle.heap, this) && handle.index > -1; } public void Add(T value, ref IHandle handle) { if (handle == null) handle = new Handle(); Add(value, handle); } public void Add(T value, IHandle ihandle) { if (this.size == this.items.Length) { int capacity = (int)((this.items.Length * 200L) / 100L); if (capacity < (this.items.Length + DEFAULT_CAPACITY)) capacity = this.items.Length + DEFAULT_CAPACITY; Array.Resize<HeapItem>(ref this.items, capacity); } Handle handle = null; if (ihandle != null) { handle = ValidateHandle(ihandle); handle.heap = this; } HeapItem item = new MinHeap<T>.HeapItem(value, handle); Set(item, this.size); BubbleUp(this.size++); } public void Add(T value) { Add(value, null); } public T Min() { if (this.size == 0) throw new InvalidOperationException("Heap is empty"); return this.items[0].value; } public void Clear() { for (int index = 0; index < this.size; ++index) this.items[index].Clear(); this.size = 0; ++this.version; } public void TrimExcess() { int length = (int)(this.items.Length * 0.9); if (this.size < length) Array.Resize<HeapItem>(ref this.items, Math.Min(this.size, DEFAULT_CAPACITY)); } private void RemoveAt(int index) { if (this.size == 0) throw new InvalidOperationException("Heap is empty"); if (index >= this.size) throw new ArgumentOutOfRangeException("index"); this.items[index].Clear(); if (--this.size > 0 && index != this.size) { Set(this.items[this.size], index); this.items[this.size].ClearRef(); if (!BubbleUp(index)) BubbleDown(index); } } public T RemoveMin() { if (this.size == 0) throw new InvalidOperationException("Heap is empty"); HeapItem item = this.items[0]; RemoveAt(0); return item.value; } public T Remove(IHandle ihandle) { Handle handle = ValidateThisHandle(ihandle); HeapItem item = this.items[handle.index]; RemoveAt(handle.index); return item.value; } private int GetIndex(T value) { EqualityComparer<T> comparer = EqualityComparer<T>.Default; int index; for (index = 0; index < this.size; ++index) { if (comparer.Equals(this.items[index].value, value)) return index; } return -1; } public bool Contains(T value) { return GetIndex(value) != -1; } public bool Remove(T value) { int index = GetIndex(value); if (index != -1) { RemoveAt(index); return true; } return false; } public void CopyTo(T[] array, int index) { if (array == null) throw new ArgumentNullException("array"); if (array.Rank != 1) throw new ArgumentException("Multidimensional array not supported"); if (array.GetLowerBound(0) != 0) throw new ArgumentException("Non-zero lower bound array not supported"); int length = array.Length; if ((index < 0) || (index > length)) throw new ArgumentOutOfRangeException("index"); if ((length - index) < this.size) throw new ArgumentException("Not enough space available in array starting at index"); for (int i = 0; i < this.size; ++i) array[index + i] = this.items[i].value; } public void CopyTo(Array array, int index) { if (array == null) throw new ArgumentNullException("array"); if (array.Rank != 1) throw new ArgumentException("Multidimensional array not supported"); if (array.GetLowerBound(0) != 0) throw new ArgumentException("Non-zero lower bound array not supported"); int length = array.Length; if ((index < 0) || (index > length)) throw new ArgumentOutOfRangeException("index"); if ((length - index) < this.size) throw new ArgumentException("Not enough space available in array starting at index"); try { for (int i = 0; i < this.size; ++i) array.SetValue(this.items[i].value, index + i); } catch (ArrayTypeMismatchException) { throw new ArgumentException("Invalid array type"); } } public IEnumerator<T> GetEnumerator() { int version = this.version; for (int index = 0; index < this.size; ++index) { if (version != this.version) throw new InvalidOperationException("Heap was modified while enumerating"); yield return this.items[index].value; } } IEnumerator IEnumerable.GetEnumerator() { return GetEnumerator(); } } }
using System; using Microsoft.SPOT; using System.Net.Sockets; using System.Net; using System.Text; using System.Collections; namespace uPLibrary.IoT.ThingSpeak { /// <summary> /// Class of the ThingSpeak platform client /// </summary> public class ThingSpeakClient { internal const string CRLF = "\r\n"; // max dimension for receive buffer private const int RECEIVE_BUFFER_SIZE = 1024; // host name and ports internal const string THING_SPEAK_HOST = "api.thingspeak.com"; internal const int THING_SPEAK_PORT = 80; internal const int THING_SPEAK_SSL_PORT = 443; // path for channel updating internal const string THING_SPEAK_UPDATE_PATH = "/update"; // max dimensions internal const int THING_SPEAK_MAX_FIELDS = 8; internal const int THING_SPEAK_MAX_STATUS = 140; // socket for connecting to the host private Socket socket; // host endpoint private IPEndPoint hostIpEndPoint; // if HTTPS connection is requested private bool SSL; // buffers for send/receive data by socket private byte[] sendBuffer; private byte[] receiveBuffer; /// <summary> /// Constructor /// </summary> /// <param name="SSL">HTTPS connection requested</param> public ThingSpeakClient(bool SSL) { this.SSL = SSL; // get ip address for the host IPHostEntry hostEntry = Dns.GetHostEntry(THING_SPEAK_HOST); // create host endpoint this.hostIpEndPoint = new IPEndPoint(hostEntry.AddressList[0], this.SSL ? THING_SPEAK_SSL_PORT : THING_SPEAK_PORT); } /// <summary> /// Update a channel /// </summary> /// <param name="writeApiKey">Write API Key for the channel to update</param> /// <param name="dataEntry">Data entry for updating channel</param> /// <returns>Update result</returns> public bool Update( string writeApiKey, DataEntry dataEntry) { // check for a mandatory write API Key if ((writeApiKey == null) || (writeApiKey == String.Empty)) throw new ArgumentNullException("writeApiKey", "You must specify a write API Key"); // check max fields number if (dataEntry.Fields.Length > THING_SPEAK_MAX_FIELDS) throw new ArgumentException("fields", "Max number of field is " + THING_SPEAK_MAX_FIELDS); // check at leaset one field value not empty bool checkFields = false; for (int i = 0; i < dataEntry.Fields.Length; i++) { if (dataEntry.Fields[i] != null) { checkFields = true; break; } } if (!checkFields) throw new ArgumentNullException("fields", "You must specify a field value at least"); // check status message if ((dataEntry.Status != null) && (dataEntry.Status.Length > THING_SPEAK_MAX_STATUS)) throw new ArgumentException("status", "Max status length is " + THING_SPEAK_MAX_STATUS); // check twitter account and message if (((dataEntry.Twitter == null) && (dataEntry.Tweet != null)) || ((dataEntry.Twitter != null) && (dataEntry.Tweet == null))) throw new ArgumentException("twitter and tweet parameters must be both valued"); // build body string body = String.Empty; // fields... for (int i = 0; i < dataEntry.Fields.Length; i++) { if ((dataEntry.Fields[i] != null) && (dataEntry.Fields[i] != String.Empty)) { if (i > 0) body += "&"; body += "field" + (i + 1) + "=" + dataEntry.Fields[i]; } } // ...location... if (dataEntry.Location != null) body += "&lat=" + dataEntry.Location.Latitude + "&long=" + dataEntry.Location.Longitude + "&elevation=" + dataEntry.Location.Elevation; // ...status... if (dataEntry.Status != null) body += "&status=" + dataEntry.Status; // ...twitter... if ((dataEntry.Twitter != null) && (dataEntry.Tweet != null)) body += "&twitter=" + dataEntry.Twitter + "&tweet=" + dataEntry.Tweet; // build HTTP request string request = "POST " + THING_SPEAK_UPDATE_PATH + " HTTP/1.1" + CRLF; request += "Host: " + THING_SPEAK_HOST + CRLF; request += "Connection: close" + CRLF; request += "X-THINGSPEAKAPIKEY: " + writeApiKey + CRLF; request += "Content-Type: application/x-www-form-urlencoded" + CRLF; request += "Content-Length: " + body.Length + CRLF; request += CRLF; request += body + CRLF; string result = String.Empty; // open socket e connect to the host using (this.socket = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp)) { this.socket.Connect(this.hostIpEndPoint); // send HTTP request this.sendBuffer = Encoding.UTF8.GetBytes(request); if (this.SSL) { // TODO : HTTPS communication } else { // send HTTP request this.socket.Send(this.sendBuffer); // receive HTTP response this.receiveBuffer = new byte[RECEIVE_BUFFER_SIZE]; // poll on socket for reading (timeout 30 sec) while (this.socket.Poll(30 * 1000000, SelectMode.SelectRead)) { // no data on th socket (closed or timeout) if (this.socket.Available == 0) break; // empty buffer Array.Clear(this.receiveBuffer, 0, this.receiveBuffer.Length); // read data this.socket.Receive(this.receiveBuffer); // append response result += new String(Encoding.UTF8.GetChars(this.receiveBuffer)); } } } // decode HTTP response ThingSpeakHttpResponse httpResponse = ThingSpeakHttpResponse.Parse(result); Debug.Print(result); if (httpResponse.StatusCode == 200) { // set entry Id received from the server dataEntry.Id = Convert.ToInt32(httpResponse.Body); return true; } else return false; } /// <summary> /// Read last entry in a field feed /// </summary> /// <param name="readApiKey">Read API Key for the channel to read (null if channel is public)</param> /// <param name="channelId">Channel ID</param> /// <param name="fieldId">Field ID</param> /// <param name="status">Include status update in feed</param> /// <param name="location">Include latitude, longitude and elevation in feed</param> /// <returns>List of all data entries read</returns> public ArrayList ReadLastFieldEntry(string readApiKey, int channelId, int fieldId, bool status = false, bool location = false) { return this.ReadChannel(readApiKey, channelId, "/field/" + fieldId + "/last.csv" + this.ReadMakeQueryString(status, location)); } /// <summary> /// Read a field feed /// </summary> /// <param name="readApiKey">Read API Key for the channel to read (null if channel is public)</param> /// <param name="channelId">Channel ID</param> /// <param name="fieldId">Field ID</param> /// <param name="status">Include status update in feed</param> /// <param name="location">Include latitude, longitude and elevation in feed</param> /// <returns>List of all data entries read</returns> public ArrayList ReadField(string readApiKey, int channelId, int fieldId, bool status = false, bool location = false) { return this.ReadChannel(readApiKey, channelId, "/field/" + fieldId + ".csv" + this.ReadMakeQueryString(status, location)); } /// <summary> /// Read channel feeds /// </summary> /// <param name="readApiKey">Read API Key for the channel to read (null if channel is public)</param> /// <param name="channelId">Channel ID</param> /// <param name="status">Include status update in feed</param> /// <param name="location">Include latitude, longitude and elevation in feed</param> /// <returns>List of all data entries read</returns> public ArrayList ReadFeeds(string readApiKey, int channelId, bool status = false, bool location = false) { return this.ReadChannel(readApiKey, channelId, "/feed.csv" + this.ReadMakeQueryString(status, location)); } /// <summary> /// Read last entry in channel feed /// </summary> /// <param name="readApiKey">Read API Key for the channel to read (null if channel is public)</param> /// <param name="channelId">Channel ID</param> /// <param name="status">Include status update in feed</param> /// <param name="location">Include latitude, longitude and elevation in feed</param> /// <returns>List of all data entries read</returns> public ArrayList ReadLastFeedEntry(string readApiKey, int channelId, bool status = false, bool location = false) { return this.ReadChannel(readApiKey, channelId, "/feed/last.csv" + this.ReadMakeQueryString(status, location)); } /// <summary> /// Read status updates /// </summary> /// <param name="readApiKey">Read API Key for the channel to read (null if channel is public)</param> /// <param name="channelId">Channel ID</param> /// <returns>List of all data entries read with only status update</returns> public ArrayList ReadStatusUpdate(string readApiKey, int channelId) { return this.ReadChannel(readApiKey, channelId, "/status.csv"); } /// <summary> /// Make query string for reading commands /// </summary> /// <param name="status">Include status update in feed</param> /// <param name="location">Include latitude, longitude and elevation in feed</param> /// <returns>Query string made</returns> private string ReadMakeQueryString(bool status, bool location) { string queryString = String.Empty; if (status) queryString += "?status=true"; if (location) { if (queryString != String.Empty) queryString += "&"; else queryString += "?"; queryString += "location=true"; } return queryString; } /// <summary> /// Read channel data entries /// </summary> /// <param name="readApiKey">Read API Key for the channel to read (null if channel is public)</param> /// <param name="channelId">Channel ID</param> /// <param name="path">Path for reading channel</param> /// <returns>List of all data entries read</returns> private ArrayList ReadChannel(string readApiKey, int channelId, string path) { // build HTTP request string request = "GET /channels/" + channelId + path + " HTTP/1.1" + CRLF; request += "Host: " + THING_SPEAK_HOST + CRLF; request += "Connection: close" + CRLF; if ((readApiKey != null) && (readApiKey != String.Empty)) request += "X-THINGSPEAKAPIKEY: " + readApiKey + CRLF; request += CRLF; string result = String.Empty; // open socket e connect to the host using (this.socket = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp)) { this.socket.Connect(this.hostIpEndPoint); this.sendBuffer = Encoding.UTF8.GetBytes(request); if (this.SSL) { // TODO : HTTPS communication } else { // send HTTP request this.socket.Send(this.sendBuffer); // receive HTTP response this.receiveBuffer = new byte[RECEIVE_BUFFER_SIZE]; // poll on socket for reading (timeout 30 sec) while (this.socket.Poll(30 * 1000000, SelectMode.SelectRead)) { // no data on th socket (closed or timeout) if (this.socket.Available == 0) break; // empty buffer Array.Clear(this.receiveBuffer, 0, this.receiveBuffer.Length); // read data this.socket.Receive(this.receiveBuffer); // append response result += new String(Encoding.UTF8.GetChars(this.receiveBuffer)); } } } // decode HTTP response ThingSpeakHttpResponse httpResponse = ThingSpeakHttpResponse.Parse(result); Debug.Print(result); if (httpResponse.StatusCode == 200) return DataEntry.ParseCsv(httpResponse.Body); else return null; } } }
using System; using System.Collections; using System.Text; using System.Threading; using ScanMaster.Acquire.Plugin; using ScanMaster.GUI; namespace ScanMaster { /// <summary> /// This class has its Run method called by the controller in its own thread. /// The class reads input from the command manager window and processes it. /// </summary> public class CommandProcessor { SettingsReflector sr = new SettingsReflector(); ProfileManager manager; public bool groupEditMode = false; public CommandProcessor( ProfileManager manager ) { this.manager = manager; } public void Start() { Thread commandThread = new Thread(new ThreadStart(Run)); commandThread.Name = "Command proccessor"; commandThread.Start(); commandThread.IsBackground = true; } public void Run() { manager.Window.WriteLine("ScanMaster command shell."); for (;;) { String command = manager.Window.GetNextLine(); if (manager.CurrentProfile == null) { manager.Window.WriteLine("No profile selected !"); continue; } if (Controller.GetController().appState != Controller.AppState.stopped) { if (command.StartsWith("tweak")) { manager.Window.WriteLine("Entering tweak mode ..."); TweakMode(command); continue; } manager.Window.WriteLine("Only tweak is available when acquiring."); continue; } // info on the current profile if (command == "i") { manager.Window.WriteLine(manager.CurrentProfile.ToString()); continue; } // update profile set to incorporate any newly introduced settings if (command == "refresh") { manager.UpdateProfiles(); manager.Window.WriteLine("Updated profiles."); continue; } if (command == "g") { if (groupEditMode) { groupEditMode = false; manager.Window.WriteLine("Group edit mode is off"); manager.Window.Prompt = ":> "; manager.Window.OutputColor = System.Drawing.Color.Lime; continue; } else { groupEditMode = true; manager.Window.WriteLine("Group edit mode is on. Current group " + manager.CurrentProfile.Group); manager.Window.Prompt = manager.CurrentProfile.Group + ":> "; manager.Window.OutputColor = System.Drawing.Color.White; continue; } } // anything after here (apart from a syntax error) will change the profiles // so this is an appropriate point to manager.ProfilesChanged = true; if (command.StartsWith("set") && groupEditMode) { manager.Window.WriteLine("You can't set things in group mode."); continue; } // changing plugins if (command == "set out") { String[] plugins = PluginRegistry.GetRegistry().GetOutputPlugins(); int r = ChoosePluginDialog(plugins); if (r != -1) manager.CurrentProfile.AcquisitorConfig.SetOutputPlugin(plugins[r]); continue; } if (command == "set shot") { String[] plugins = PluginRegistry.GetRegistry().GetShotGathererPlugins(); int r = ChoosePluginDialog(plugins); if (r != -1) manager.CurrentProfile.AcquisitorConfig.SetShotGathererPlugin(plugins[r]); continue; } if (command == "set pg") { String[] plugins = PluginRegistry.GetRegistry().GetPatternPlugins(); int r = ChoosePluginDialog(plugins); if (r != -1) manager.CurrentProfile.AcquisitorConfig.SetPatternPlugin(plugins[r]); continue; } if (command == "set yag") { String[] plugins = PluginRegistry.GetRegistry().GetYAGPlugins(); int r = ChoosePluginDialog(plugins); if (r != -1) manager.CurrentProfile.AcquisitorConfig.SetYAGPlugin(plugins[r]); continue; } if (command == "set analog") { String[] plugins = PluginRegistry.GetRegistry().GetAnalogPlugins(); int r = ChoosePluginDialog(plugins); if (r != -1) manager.CurrentProfile.AcquisitorConfig.SetAnalogPlugin(plugins[r]); continue; } if (command == "set switch") { String[] plugins = PluginRegistry.GetRegistry().GetSwitchPlugins(); int r = ChoosePluginDialog(plugins); if (r != -1) manager.CurrentProfile.AcquisitorConfig.SetSwitchPlugin(plugins[r]); continue; } if (command == "set gpib") { String[] plugins = PluginRegistry.GetRegistry().GetGPIBPlugins(); int r = ChoosePluginDialog(plugins); if (r != -1) manager.CurrentProfile.AcquisitorConfig.SetGPIBPlugin(plugins[r]); continue; } // changing group if (command.StartsWith("set group")) { String[] bits = command.Split(new char[] {' '}); if (bits.Length != 3) { manager.Window.WriteLine("Syntax error."); continue; } manager.CurrentProfile.Group = bits[2]; manager.Window.WriteLine("Group changed"); continue; } // listing plugin settings if (command == "out") { String settings = sr.ListSettings(manager.CurrentProfile.AcquisitorConfig.outputPlugin); manager.Window.WriteLine(settings); continue; } if (command == "analog") { String settings = sr.ListSettings(manager.CurrentProfile.AcquisitorConfig.analogPlugin); manager.Window.WriteLine(settings); continue; } if (command == "switch") { String settings = sr.ListSettings(manager.CurrentProfile.AcquisitorConfig.switchPlugin); manager.Window.WriteLine(settings); continue; } if (command == "pg") { String settings = sr.ListSettings(manager.CurrentProfile.AcquisitorConfig.pgPlugin); manager.Window.WriteLine(settings); continue; } if (command == "yag") { String settings = sr.ListSettings(manager.CurrentProfile.AcquisitorConfig.yagPlugin); manager.Window.WriteLine(settings); continue; } if (command == "shot") { String settings = sr.ListSettings(manager.CurrentProfile.AcquisitorConfig.shotGathererPlugin); manager.Window.WriteLine(settings); continue; } if (command == "gpib") { String settings = sr.ListSettings(manager.CurrentProfile.AcquisitorConfig.gpibPlugin); manager.Window.WriteLine(settings); continue; } if (command == "gui") { manager.Window.WriteLine("tofUpdate " + manager.CurrentProfile.GUIConfig.updateTOFsEvery); manager.Window.WriteLine("spectraUpdate " + manager.CurrentProfile.GUIConfig.updateSpectraEvery); manager.Window.WriteLine("switch " + manager.CurrentProfile.GUIConfig.displaySwitch); manager.Window.WriteLine("average " + manager.CurrentProfile.GUIConfig.average); continue; } // changing plugin settings if (command.StartsWith("out:") | command.StartsWith("analog:") | command.StartsWith("pg:") | command.StartsWith("yag:") | command.StartsWith("switch:") | command.StartsWith("shot:") | command.StartsWith("gui:") | command.StartsWith("gpib:")) { String[] bits = command.Split(new char[] {':', ' '}); if (bits.Length != 3) { manager.Window.WriteLine("Syntax error."); continue; } // special case for GUI settings (it's not a plugin) if (bits[0] == "gui") { if (groupEditMode) { manager.Window.WriteLine("Sorry, but, hilariously, there is no " + "group edit mode for GUI settings."); continue; } GUIConfiguration guiConfig = manager.CurrentProfile.GUIConfig; try { if (bits[1] == "tofUpdate") { guiConfig.updateTOFsEvery = Convert.ToInt32(bits[2]); manager.Window.WriteLine("GUI:tofUpdate updated."); continue; } if (bits[1] == "spectraUpdate") { guiConfig.updateSpectraEvery = Convert.ToInt32(bits[2]); manager.Window.WriteLine("GUI:spectraUpdate updated."); continue; } if (bits[1] == "switch") { guiConfig.displaySwitch = Convert.ToBoolean(bits[2]); manager.Window.WriteLine("GUI:switch updated."); continue; } if (bits[1] == "average") { guiConfig.average = Convert.ToBoolean(bits[2]); manager.Window.WriteLine("GUI:average updated."); continue; } manager.Window.WriteLine("Unrecognised parameter"); } catch (Exception) { manager.Window.WriteLine("Error."); } } else { if (groupEditMode) { // first, check to make sure that every profile in the group has such // a setting. ArrayList groupProfiles = manager.ProfilesInGroup(manager.CurrentProfile.Group); bool fieldFlag = true; foreach (Profile p in groupProfiles) { AcquisitorPlugin pl = PluginForString(p, bits[0]); if (!sr.HasField(pl,bits[1])) fieldFlag = false; } if (!fieldFlag) { manager.Window.WriteLine("You can only change the value of a setting in group " + "edit mode if all profiles in the group have that setting."); continue; } // if so, then set them all foreach (Profile p in groupProfiles) { AcquisitorPlugin plugin = PluginForString(p, bits[0]); if (sr.SetField(plugin, bits[1], bits[2])) manager.Window.WriteLine(p.Name + ":" + bits[0] + ":" + bits[1] + " modified."); else manager.Window.WriteLine("Error setting field"); } } else { AcquisitorPlugin plugin = PluginForString(manager.CurrentProfile, bits[0]); if(bits[2]=="?") manager.Window.WriteLine(bits[0] + ":" + bits[1] + " = " + sr.GetField(plugin, bits[1]).ToString()); else if (sr.SetField(plugin, bits[1], bits[2])) manager.Window.WriteLine(bits[0] + ":" + bits[1] + " modified."); else manager.Window.WriteLine("Error setting field"); } } continue; } // tweaking a setting // if we reach here there must be a syntax error manager.Window.WriteLine("Syntax error"); } } public String[] GetCommandSuggestions(String commandStub) { String[] bits = commandStub.Split(new char[] {':'}); // return null if can't help if (bits.Length !=2) return null; ArrayList suggestions = new ArrayList(); AcquisitorPlugin plugin = PluginForString( manager.CurrentProfile, bits[0]); if (plugin == null) return null; String[] fieldNames = sr.ListSettingNames(plugin); if (fieldNames == null) return null; for (int i = 0 ; i < fieldNames.Length ; i++) if (fieldNames[i].StartsWith(bits[1])) suggestions.Add(bits[0] + ":" + fieldNames[i]); String[] r = new String[suggestions.Count]; suggestions.CopyTo(r,0); return r; } public AcquisitorPlugin PluginForString(Profile p, String pluginType) { AcquisitorPlugin plugin = null; switch(pluginType) { case "out": plugin = p.AcquisitorConfig.outputPlugin; break; case "pg": plugin = p.AcquisitorConfig.pgPlugin; break; case "switch": plugin = p.AcquisitorConfig.switchPlugin; break; case "shot": plugin = p.AcquisitorConfig.shotGathererPlugin; break; case "analog": plugin = p.AcquisitorConfig.analogPlugin; break; case "yag": plugin = p.AcquisitorConfig.yagPlugin; break; } return plugin; } private int ChoosePluginDialog(String[] plugins) { StringBuilder sb = new StringBuilder(); for (int i = 0 ; i < plugins.Length ; i++) sb.Append(" " + i + ": " + plugins[i] + Environment.NewLine); sb.Append("Choose a plugin:"); manager.Window.WriteLine(sb.ToString()); String pluginNumber = manager.Window.GetNextLine(); try { int index = Convert.ToInt32(pluginNumber); if (index >= plugins.Length | index < 0) { manager.Window.WriteLine("Invalid input"); return -1; } manager.Window.WriteLine(plugins[index] + " selected."); return index; } catch (System.FormatException) { manager.Window.WriteLine("Invalid input."); } return -1; } private void TweakMode(String command) { String[] bits = command.Split(new char[] {' '}); if (bits.Length != 3) { manager.Window.WriteLine("Syntax error."); return; } // check if this is a valid parameter to tweak PatternPlugin plugin = manager.CurrentProfile.AcquisitorConfig.pgPlugin; if (!sr.HasField(plugin, bits[1])) { manager.Window.WriteLine("The current profile's pg plugin has no field named " + bits[1]); return; } // is the increment valid int increment = 0; try { increment = Convert.ToInt32(bits[2]); } catch (Exception) { manager.Window.WriteLine("Invalid increment"); return; } manager.Window.WriteLine("Tweaking - i for increment, d for decrement, e for exit."); for (;;) { String s = manager.Window.GetNextLine(); // check if the user wants to exit. Also check if acquisition has stopped. // This is not ideal but should stop anything terrible happening. if (s == "e" | Controller.GetController().appState != Controller.AppState.running) { manager.Window.WriteLine("Exiting tweak mode"); return; } if (s == "i") { manager.Window.WriteLine("Incrementing " + bits[1] + " by " + increment + "."); int oldValue = (int)sr.GetField(plugin, bits[1]); int newValue = oldValue + increment; manager.Window.WriteLine("New value: " + newValue); sr.SetField(plugin, bits[1], newValue.ToString()); manager.FireTweak(new TweakEventArgs(bits[1], newValue)); continue; } if (s == "d") { manager.Window.WriteLine("Decrementing " + bits[1] + " by " + increment + "."); int oldValue = (int)sr.GetField(plugin, bits[1]); int newValue = oldValue - increment; manager.Window.WriteLine("New value: " + newValue); sr.SetField(plugin, bits[1], newValue.ToString()); manager.FireTweak(new TweakEventArgs(bits[1], newValue)); continue; } manager.Window.WriteLine("Syntax error"); } } } }
/* * Exchange Web Services Managed API * * Copyright (c) Microsoft Corporation * All rights reserved. * * MIT License * * Permission is hereby granted, free of charge, to any person obtaining a copy of this * software and associated documentation files (the "Software"), to deal in the Software * without restriction, including without limitation the rights to use, copy, modify, merge, * publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons * to whom the Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all copies or * substantial portions of the Software. * * THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR * PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE * FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS IN THE SOFTWARE. */ namespace Microsoft.Exchange.WebServices.Data { using System; using System.Collections.Generic; using System.Collections.ObjectModel; /// <summary> /// Represents a collection of Conversation related properties. /// Properties available on this object are defined in the ConversationSchema class. /// </summary> [ServiceObjectDefinition(XmlElementNames.Conversation)] public class Conversation : ServiceObject { /// <summary> /// Initializes an unsaved local instance of <see cref="Conversation"/>. /// </summary> /// <param name="service">The ExchangeService object to which the item will be bound.</param> internal Conversation(ExchangeService service) : base(service) { } /// <summary> /// Internal method to return the schema associated with this type of object. /// </summary> /// <returns>The schema associated with this type of object.</returns> internal override ServiceObjectSchema GetSchema() { return ConversationSchema.Instance; } /// <summary> /// Gets the minimum required server version. /// </summary> /// <returns>Earliest Exchange version in which this service object type is supported.</returns> internal override ExchangeVersion GetMinimumRequiredServerVersion() { return ExchangeVersion.Exchange2010_SP1; } /// <summary> /// The property definition for the Id of this object. /// </summary> /// <returns>A PropertyDefinition instance.</returns> internal override PropertyDefinition GetIdPropertyDefinition() { return ConversationSchema.Id; } #region Not Supported Methods or properties /// <summary> /// This method is not supported in this object. /// Loads the specified set of properties on the object. /// </summary> /// <param name="propertySet">The properties to load.</param> internal override void InternalLoad(PropertySet propertySet) { throw new NotSupportedException(); } /// <summary> /// This is not supported in this object. /// Deletes the object. /// </summary> /// <param name="deleteMode">The deletion mode.</param> /// <param name="sendCancellationsMode">Indicates whether meeting cancellation messages should be sent.</param> /// <param name="affectedTaskOccurrences">Indicate which occurrence of a recurring task should be deleted.</param> internal override void InternalDelete(DeleteMode deleteMode, SendCancellationsMode? sendCancellationsMode, AffectedTaskOccurrence? affectedTaskOccurrences) { throw new NotSupportedException(); } /// <summary> /// This method is not supported in this object. /// Gets the name of the change XML element. /// </summary> /// <returns>XML element name,</returns> internal override string GetChangeXmlElementName() { throw new NotSupportedException(); } /// <summary> /// This method is not supported in this object. /// Gets the name of the delete field XML element. /// </summary> /// <returns>XML element name,</returns> internal override string GetDeleteFieldXmlElementName() { throw new NotSupportedException(); } /// <summary> /// This method is not supported in this object. /// Gets the name of the set field XML element. /// </summary> /// <returns>XML element name,</returns> internal override string GetSetFieldXmlElementName() { throw new NotSupportedException(); } /// <summary> /// This method is not supported in this object. /// Gets a value indicating whether a time zone SOAP header should be emitted in a CreateItem /// or UpdateItem request so this item can be property saved or updated. /// </summary> /// <param name="isUpdateOperation">Indicates whether the operation being petrformed is an update operation.</param> /// <returns><c>true</c> if a time zone SOAP header should be emitted; otherwise, <c>false</c>.</returns> internal override bool GetIsTimeZoneHeaderRequired(bool isUpdateOperation) { throw new NotSupportedException(); } #endregion #region Conversation Action Methods /// <summary> /// Sets up a conversation so that any item received within that conversation is always categorized. /// Calling this method results in a call to EWS. /// </summary> /// <param name="categories">The categories that should be stamped on items in the conversation.</param> /// <param name="processSynchronously">Indicates whether the method should return only once enabling this rule and stamping existing items /// in the conversation is completely done. If processSynchronously is false, the method returns immediately. /// </param> public void EnableAlwaysCategorizeItems(IEnumerable<string> categories, bool processSynchronously) { this.Service.EnableAlwaysCategorizeItemsInConversations( new ConversationId[] { this.Id }, categories, processSynchronously)[0].ThrowIfNecessary(); } /// <summary> /// Sets up a conversation so that any item received within that conversation is no longer categorized. /// Calling this method results in a call to EWS. /// </summary> /// <param name="processSynchronously">Indicates whether the method should return only once disabling this rule and removing the categories from existing items /// in the conversation is completely done. If processSynchronously is false, the method returns immediately. /// </param> public void DisableAlwaysCategorizeItems(bool processSynchronously) { this.Service.DisableAlwaysCategorizeItemsInConversations( new ConversationId[] { this.Id }, processSynchronously)[0].ThrowIfNecessary(); } /// <summary> /// Sets up a conversation so that any item received within that conversation is always moved to Deleted Items folder. /// Calling this method results in a call to EWS. /// </summary> /// <param name="processSynchronously">Indicates whether the method should return only once enabling this rule and deleting existing items /// in the conversation is completely done. If processSynchronously is false, the method returns immediately. /// </param> public void EnableAlwaysDeleteItems(bool processSynchronously) { this.Service.EnableAlwaysDeleteItemsInConversations( new ConversationId[] { this.Id }, processSynchronously)[0].ThrowIfNecessary(); } /// <summary> /// Sets up a conversation so that any item received within that conversation is no longer moved to Deleted Items folder. /// Calling this method results in a call to EWS. /// </summary> /// <param name="processSynchronously">Indicates whether the method should return only once disabling this rule and restoring the items /// in the conversation is completely done. If processSynchronously is false, the method returns immediately. /// </param> public void DisableAlwaysDeleteItems(bool processSynchronously) { this.Service.DisableAlwaysDeleteItemsInConversations( new ConversationId[] { this.Id }, processSynchronously)[0].ThrowIfNecessary(); } /// <summary> /// Sets up a conversation so that any item received within that conversation is always moved to a specific folder. /// Calling this method results in a call to EWS. /// </summary> /// <param name="destinationFolderId">The Id of the folder to which conversation items should be moved.</param> /// <param name="processSynchronously">Indicates whether the method should return only once enabling this rule /// and moving existing items in the conversation is completely done. If processSynchronously is false, the method /// returns immediately. /// </param> public void EnableAlwaysMoveItems(FolderId destinationFolderId, bool processSynchronously) { this.Service.EnableAlwaysMoveItemsInConversations( new ConversationId[] { this.Id }, destinationFolderId, processSynchronously)[0].ThrowIfNecessary(); } /// <summary> /// Sets up a conversation so that any item received within that conversation is no longer moved to a specific /// folder. Calling this method results in a call to EWS. /// </summary> /// <param name="processSynchronously">Indicates whether the method should return only once disabling this /// rule is completely done. If processSynchronously is false, the method returns immediately. /// </param> public void DisableAlwaysMoveItemsInConversation(bool processSynchronously) { this.Service.DisableAlwaysMoveItemsInConversations( new ConversationId[] { this.Id }, processSynchronously)[0].ThrowIfNecessary(); } /// <summary> /// Deletes items in the specified conversation. /// Calling this method results in a call to EWS. /// </summary> /// <param name="contextFolderId">The Id of the folder items must belong to in order to be deleted. If contextFolderId is /// null, items across the entire mailbox are deleted.</param> /// <param name="deleteMode">The deletion mode.</param> public void DeleteItems( FolderId contextFolderId, DeleteMode deleteMode) { this.Service.DeleteItemsInConversations( new KeyValuePair<ConversationId, DateTime?>[] { new KeyValuePair<ConversationId, DateTime?>( this.Id, this.GlobalLastDeliveryTime) }, contextFolderId, deleteMode)[0].ThrowIfNecessary(); } /// <summary> /// Moves items in the specified conversation to a specific folder. /// Calling this method results in a call to EWS. /// </summary> /// <param name="contextFolderId">The Id of the folder items must belong to in order to be moved. If contextFolderId is null, /// items across the entire mailbox are moved.</param> /// <param name="destinationFolderId">The Id of the destination folder.</param> public void MoveItemsInConversation( FolderId contextFolderId, FolderId destinationFolderId) { this.Service.MoveItemsInConversations( new KeyValuePair<ConversationId, DateTime?>[] { new KeyValuePair<ConversationId, DateTime?>( this.Id, this.GlobalLastDeliveryTime) }, contextFolderId, destinationFolderId)[0].ThrowIfNecessary(); } /// <summary> /// Copies items in the specified conversation to a specific folder. Calling this method results in a call to EWS. /// </summary> /// <param name="contextFolderId">The Id of the folder items must belong to in order to be copied. If contextFolderId /// is null, items across the entire mailbox are copied.</param> /// <param name="destinationFolderId">The Id of the destination folder.</param> public void CopyItemsInConversation( FolderId contextFolderId, FolderId destinationFolderId) { this.Service.CopyItemsInConversations( new KeyValuePair<ConversationId, DateTime?>[] { new KeyValuePair<ConversationId, DateTime?>( this.Id, this.GlobalLastDeliveryTime) }, contextFolderId, destinationFolderId)[0].ThrowIfNecessary(); } /// <summary> /// Sets the read state of items in the specified conversation. Calling this method results in a call to EWS. /// </summary> /// <param name="contextFolderId">The Id of the folder items must belong to in order for their read state to /// be set. If contextFolderId is null, the read states of items across the entire mailbox are set.</param> /// <param name="isRead">if set to <c>true</c>, conversation items are marked as read; otherwise they are /// marked as unread.</param> public void SetReadStateForItemsInConversation( FolderId contextFolderId, bool isRead) { this.Service.SetReadStateForItemsInConversations( new KeyValuePair<ConversationId, DateTime?>[] { new KeyValuePair<ConversationId, DateTime?>( this.Id, this.GlobalLastDeliveryTime) }, contextFolderId, isRead)[0].ThrowIfNecessary(); } /// <summary> /// Sets the read state of items in the specified conversation. Calling this method results in a call to EWS. /// </summary> /// <param name="contextFolderId">The Id of the folder items must belong to in order for their read state to /// be set. If contextFolderId is null, the read states of items across the entire mailbox are set.</param> /// <param name="isRead">if set to <c>true</c>, conversation items are marked as read; otherwise they are /// marked as unread.</param> /// <param name="suppressReadReceipts">if set to <c>true</c> read receipts are suppressed.</param> public void SetReadStateForItemsInConversation( FolderId contextFolderId, bool isRead, bool suppressReadReceipts) { this.Service.SetReadStateForItemsInConversations( new KeyValuePair<ConversationId, DateTime?>[] { new KeyValuePair<ConversationId, DateTime?>( this.Id, this.GlobalLastDeliveryTime) }, contextFolderId, isRead, suppressReadReceipts)[0].ThrowIfNecessary(); } /// <summary> /// Sets the retention policy of items in the specified conversation. Calling this method results in a call to EWS. /// </summary> /// <param name="contextFolderId">The Id of the folder items must belong to in order for their retention policy to /// be set. If contextFolderId is null, the retention policy of items across the entire mailbox are set.</param> /// <param name="retentionPolicyType">Retention policy type.</param> /// <param name="retentionPolicyTagId">Retention policy tag id. Null will clear the policy.</param> public void SetRetentionPolicyForItemsInConversation( FolderId contextFolderId, RetentionType retentionPolicyType, Guid? retentionPolicyTagId) { this.Service.SetRetentionPolicyForItemsInConversations( new KeyValuePair<ConversationId, DateTime?>[] { new KeyValuePair<ConversationId, DateTime?>( this.Id, this.GlobalLastDeliveryTime) }, contextFolderId, retentionPolicyType, retentionPolicyTagId)[0].ThrowIfNecessary(); } /// <summary> /// Flag conversation items as complete. Calling this method results in a call to EWS. /// </summary> /// <param name="contextFolderId">The Id of the folder items must belong to in order to be flagged as complete. If contextFolderId is /// null, items in conversation across the entire mailbox are marked as complete.</param> /// <param name="completeDate">The complete date (can be null).</param> public void FlagItemsComplete( FolderId contextFolderId, DateTime? completeDate) { Flag flag = new Flag() { FlagStatus = ItemFlagStatus.Complete }; if (completeDate.HasValue) { flag.CompleteDate = completeDate.Value; } this.Service.SetFlagStatusForItemsInConversations( new KeyValuePair<ConversationId, DateTime?>[] { new KeyValuePair<ConversationId, DateTime?>( this.Id, this.GlobalLastDeliveryTime) }, contextFolderId, flag)[0].ThrowIfNecessary(); } /// <summary> /// Clear flags for conversation items. Calling this method results in a call to EWS. /// </summary> /// <param name="contextFolderId">The Id of the folder items must belong to in order to be unflagged. If contextFolderId is /// null, flags for items in conversation across the entire mailbox are cleared.</param> public void ClearItemFlags(FolderId contextFolderId) { Flag flag = new Flag() { FlagStatus = ItemFlagStatus.NotFlagged }; this.Service.SetFlagStatusForItemsInConversations( new KeyValuePair<ConversationId, DateTime?>[] { new KeyValuePair<ConversationId, DateTime?>( this.Id, this.GlobalLastDeliveryTime) }, contextFolderId, flag)[0].ThrowIfNecessary(); } /// <summary> /// Flags conversation items. Calling this method results in a call to EWS. /// </summary> /// <param name="contextFolderId">The Id of the folder items must belong to in order to be flagged. If contextFolderId is /// null, items in conversation across the entire mailbox are flagged.</param> /// <param name="startDate">The start date (can be null).</param> /// <param name="dueDate">The due date (can be null).</param> public void FlagItems( FolderId contextFolderId, DateTime? startDate, DateTime? dueDate) { Flag flag = new Flag() { FlagStatus = ItemFlagStatus.Flagged }; if (startDate.HasValue) { flag.StartDate = startDate.Value; } if (dueDate.HasValue) { flag.DueDate = dueDate.Value; } this.Service.SetFlagStatusForItemsInConversations( new KeyValuePair<ConversationId, DateTime?>[] { new KeyValuePair<ConversationId, DateTime?>( this.Id, this.GlobalLastDeliveryTime) }, contextFolderId, flag)[0].ThrowIfNecessary(); } #endregion #region Properties /// <summary> /// Gets the Id of this Conversation. /// </summary> public ConversationId Id { get { return (ConversationId)this.PropertyBag[this.GetIdPropertyDefinition()]; } } /// <summary> /// Gets the topic of this Conversation. /// </summary> public String Topic { get { String returnValue = String.Empty; // This property need not be present hence the property bag may not contain it. // Check for the presence of this property before accessing it. if (this.PropertyBag.Contains(ConversationSchema.Topic)) { this.PropertyBag.TryGetProperty<string>( ConversationSchema.Topic, out returnValue); } return returnValue; } } /// <summary> /// Gets a list of all the people who have received messages in this conversation in the current folder only. /// </summary> public StringList UniqueRecipients { get { return (StringList)this.PropertyBag[ConversationSchema.UniqueRecipients]; } } /// <summary> /// Gets a list of all the people who have received messages in this conversation across all folders in the mailbox. /// </summary> public StringList GlobalUniqueRecipients { get { return (StringList)this.PropertyBag[ConversationSchema.GlobalUniqueRecipients]; } } /// <summary> /// Gets a list of all the people who have sent messages that are currently unread in this conversation in the current folder only. /// </summary> public StringList UniqueUnreadSenders { get { StringList unreadSenders = null; // This property need not be present hence the property bag may not contain it. // Check for the presence of this property before accessing it. if (this.PropertyBag.Contains(ConversationSchema.UniqueUnreadSenders)) { this.PropertyBag.TryGetProperty<StringList>( ConversationSchema.UniqueUnreadSenders, out unreadSenders); } return unreadSenders; } } /// <summary> /// Gets a list of all the people who have sent messages that are currently unread in this conversation across all folders in the mailbox. /// </summary> public StringList GlobalUniqueUnreadSenders { get { StringList unreadSenders = null; // This property need not be present hence the property bag may not contain it. // Check for the presence of this property before accessing it. if (this.PropertyBag.Contains(ConversationSchema.GlobalUniqueUnreadSenders)) { this.PropertyBag.TryGetProperty<StringList>( ConversationSchema.GlobalUniqueUnreadSenders, out unreadSenders); } return unreadSenders; } } /// <summary> /// Gets a list of all the people who have sent messages in this conversation in the current folder only. /// </summary> public StringList UniqueSenders { get { return (StringList)this.PropertyBag[ConversationSchema.UniqueSenders]; } } /// <summary> /// Gets a list of all the people who have sent messages in this conversation across all folders in the mailbox. /// </summary> public StringList GlobalUniqueSenders { get { return (StringList)this.PropertyBag[ConversationSchema.GlobalUniqueSenders]; } } /// <summary> /// Gets the delivery time of the message that was last received in this conversation in the current folder only. /// </summary> public DateTime LastDeliveryTime { get { return (DateTime)this.PropertyBag[ConversationSchema.LastDeliveryTime]; } } /// <summary> /// Gets the delivery time of the message that was last received in this conversation across all folders in the mailbox. /// </summary> public DateTime GlobalLastDeliveryTime { get { return (DateTime)this.PropertyBag[ConversationSchema.GlobalLastDeliveryTime]; } } /// <summary> /// Gets a list summarizing the categories stamped on messages in this conversation, in the current folder only. /// </summary> public StringList Categories { get { StringList returnValue = null; // This property need not be present hence the property bag may not contain it. // Check for the presence of this property before accessing it. if (this.PropertyBag.Contains(ConversationSchema.Categories)) { this.PropertyBag.TryGetProperty<StringList>( ConversationSchema.Categories, out returnValue); } return returnValue; } } /// <summary> /// Gets a list summarizing the categories stamped on messages in this conversation, across all folders in the mailbox. /// </summary> public StringList GlobalCategories { get { StringList returnValue = null; // This property need not be present hence the property bag may not contain it. // Check for the presence of this property before accessing it. if (this.PropertyBag.Contains(ConversationSchema.GlobalCategories)) { this.PropertyBag.TryGetProperty<StringList>( ConversationSchema.GlobalCategories, out returnValue); } return returnValue; } } /// <summary> /// Gets the flag status for this conversation, calculated by aggregating individual messages flag status in the current folder. /// </summary> public ConversationFlagStatus FlagStatus { get { ConversationFlagStatus returnValue = ConversationFlagStatus.NotFlagged; // This property need not be present hence the property bag may not contain it. // Check for the presence of this property before accessing it. if (this.PropertyBag.Contains(ConversationSchema.FlagStatus)) { this.PropertyBag.TryGetProperty<ConversationFlagStatus>(ConversationSchema.FlagStatus, out returnValue); } return returnValue; } } /// <summary> /// Gets the flag status for this conversation, calculated by aggregating individual messages flag status across all folders in the mailbox. /// </summary> public ConversationFlagStatus GlobalFlagStatus { get { ConversationFlagStatus returnValue = ConversationFlagStatus.NotFlagged; // This property need not be present hence the property bag may not contain it. // Check for the presence of this property before accessing it. if (this.PropertyBag.Contains(ConversationSchema.GlobalFlagStatus)) { this.PropertyBag.TryGetProperty<ConversationFlagStatus>( ConversationSchema.GlobalFlagStatus, out returnValue); } return returnValue; } } /// <summary> /// Gets a value indicating if at least one message in this conversation, in the current folder only, has an attachment. /// </summary> public bool HasAttachments { get { return (bool)this.PropertyBag[ConversationSchema.HasAttachments]; } } /// <summary> /// Gets a value indicating if at least one message in this conversation, across all folders in the mailbox, has an attachment. /// </summary> public bool GlobalHasAttachments { get { return (bool)this.PropertyBag[ConversationSchema.GlobalHasAttachments]; } } /// <summary> /// Gets the total number of messages in this conversation in the current folder only. /// </summary> public int MessageCount { get { return (int)this.PropertyBag[ConversationSchema.MessageCount]; } } /// <summary> /// Gets the total number of messages in this conversation across all folders in the mailbox. /// </summary> public int GlobalMessageCount { get { return (int)this.PropertyBag[ConversationSchema.GlobalMessageCount]; } } /// <summary> /// Gets the total number of unread messages in this conversation in the current folder only. /// </summary> public int UnreadCount { get { int returnValue = 0; // This property need not be present hence the property bag may not contain it. // Check for the presence of this property before accessing it. if (this.PropertyBag.Contains(ConversationSchema.UnreadCount)) { this.PropertyBag.TryGetProperty<int>( ConversationSchema.UnreadCount, out returnValue); } return returnValue; } } /// <summary> /// Gets the total number of unread messages in this conversation across all folders in the mailbox. /// </summary> public int GlobalUnreadCount { get { int returnValue = 0; // This property need not be present hence the property bag may not contain it. // Check for the presence of this property before accessing it. if (this.PropertyBag.Contains(ConversationSchema.GlobalUnreadCount)) { this.PropertyBag.TryGetProperty<int>( ConversationSchema.GlobalUnreadCount, out returnValue); } return returnValue; } } /// <summary> /// Gets the size of this conversation, calculated by adding the sizes of all messages in the conversation in the current folder only. /// </summary> public int Size { get { return (int)this.PropertyBag[ConversationSchema.Size]; } } /// <summary> /// Gets the size of this conversation, calculated by adding the sizes of all messages in the conversation across all folders in the mailbox. /// </summary> public int GlobalSize { get { return (int)this.PropertyBag[ConversationSchema.GlobalSize]; } } /// <summary> /// Gets a list summarizing the classes of the items in this conversation, in the current folder only. /// </summary> public StringList ItemClasses { get { return (StringList)this.PropertyBag[ConversationSchema.ItemClasses]; } } /// <summary> /// Gets a list summarizing the classes of the items in this conversation, across all folders in the mailbox. /// </summary> public StringList GlobalItemClasses { get { return (StringList)this.PropertyBag[ConversationSchema.GlobalItemClasses]; } } /// <summary> /// Gets the importance of this conversation, calculated by aggregating individual messages importance in the current folder only. /// </summary> public Importance Importance { get { return (Importance)this.PropertyBag[ConversationSchema.Importance]; } } /// <summary> /// Gets the importance of this conversation, calculated by aggregating individual messages importance across all folders in the mailbox. /// </summary> public Importance GlobalImportance { get { return (Importance)this.PropertyBag[ConversationSchema.GlobalImportance]; } } /// <summary> /// Gets the Ids of the messages in this conversation, in the current folder only. /// </summary> public ItemIdCollection ItemIds { get { return (ItemIdCollection)this.PropertyBag[ConversationSchema.ItemIds]; } } /// <summary> /// Gets the Ids of the messages in this conversation, across all folders in the mailbox. /// </summary> public ItemIdCollection GlobalItemIds { get { return (ItemIdCollection)this.PropertyBag[ConversationSchema.GlobalItemIds]; } } /// <summary> /// Gets the date and time this conversation was last modified. /// </summary> public DateTime LastModifiedTime { get { return (DateTime)this.PropertyBag[ConversationSchema.LastModifiedTime]; } } /// <summary> /// Gets the conversation instance key. /// </summary> public byte[] InstanceKey { get { return (byte[])this.PropertyBag[ConversationSchema.InstanceKey]; } } /// <summary> /// Gets the conversation Preview. /// </summary> public string Preview { get { return (string)this.PropertyBag[ConversationSchema.Preview]; } } /// <summary> /// Gets the conversation IconIndex. /// </summary> public IconIndex IconIndex { get { return (IconIndex)this.PropertyBag[ConversationSchema.IconIndex]; } } /// <summary> /// Gets the conversation global IconIndex. /// </summary> public IconIndex GlobalIconIndex { get { return (IconIndex)this.PropertyBag[ConversationSchema.GlobalIconIndex]; } } /// <summary> /// Gets the draft item ids. /// </summary> public ItemIdCollection DraftItemIds { get { return (ItemIdCollection)this.PropertyBag[ConversationSchema.DraftItemIds]; } } /// <summary> /// Gets a value indicating if at least one message in this conversation, in the current folder only, is an IRM. /// </summary> public bool HasIrm { get { return (bool)this.PropertyBag[ConversationSchema.HasIrm]; } } /// <summary> /// Gets a value indicating if at least one message in this conversation, across all folders in the mailbox, is an IRM. /// </summary> public bool GlobalHasIrm { get { return (bool)this.PropertyBag[ConversationSchema.GlobalHasIrm]; } } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. namespace System.Text { using System.Runtime.Serialization; using System.Security.Permissions; using System.Text; using System; using System.Diagnostics.Contracts; // A Decoder is used to decode a sequence of blocks of bytes into a // sequence of blocks of characters. Following instantiation of a decoder, // sequential blocks of bytes are converted into blocks of characters through // calls to the GetChars method. The decoder maintains state between the // conversions, allowing it to correctly decode byte sequences that span // adjacent blocks. // // Instances of specific implementations of the Decoder abstract base // class are typically obtained through calls to the GetDecoder method // of Encoding objects. // #if FEATURE_SERIALIZATION [Serializable] #endif internal class DecoderNLS : Decoder #if FEATURE_SERIALIZATION , ISerializable #endif { // Remember our encoding protected Encoding m_encoding; [NonSerialized] protected bool m_mustFlush; [NonSerialized] internal bool m_throwOnOverflow; [NonSerialized] internal int m_bytesUsed; #region Serialization // Constructor called by serialization. called during deserialization. internal DecoderNLS(SerializationInfo info, StreamingContext context) { throw new NotSupportedException( String.Format( System.Globalization.CultureInfo.CurrentCulture, Environment.GetResourceString("NotSupported_TypeCannotDeserialized"), this.GetType())); } #if FEATURE_SERIALIZATION // ISerializable implementation. called during serialization. [System.Security.SecurityCritical] // auto-generated_required void ISerializable.GetObjectData(SerializationInfo info, StreamingContext context) { SerializeDecoder(info); info.AddValue("encoding", this.m_encoding); info.SetType(typeof(Encoding.DefaultDecoder)); } #endif #endregion Serialization internal DecoderNLS( Encoding encoding ) { this.m_encoding = encoding; this.m_fallback = this.m_encoding.DecoderFallback; this.Reset(); } // This is used by our child deserializers internal DecoderNLS( ) { this.m_encoding = null; this.Reset(); } public override void Reset() { if (m_fallbackBuffer != null) m_fallbackBuffer.Reset(); } public override unsafe int GetCharCount(byte[] bytes, int index, int count) { return GetCharCount(bytes, index, count, false); } [System.Security.SecuritySafeCritical] // auto-generated public override unsafe int GetCharCount(byte[] bytes, int index, int count, bool flush) { // Validate Parameters if (bytes == null) throw new ArgumentNullException("bytes", Environment.GetResourceString("ArgumentNull_Array")); if (index < 0 || count < 0) throw new ArgumentOutOfRangeException((index<0 ? "index" : "count"), Environment.GetResourceString("ArgumentOutOfRange_NeedNonNegNum")); if (bytes.Length - index < count) throw new ArgumentOutOfRangeException("bytes", Environment.GetResourceString("ArgumentOutOfRange_IndexCountBuffer")); Contract.EndContractBlock(); // Avoid null fixed problem if (bytes.Length == 0) bytes = new byte[1]; // Just call pointer version fixed (byte* pBytes = bytes) return GetCharCount(pBytes + index, count, flush); } [System.Security.SecurityCritical] // auto-generated public unsafe override int GetCharCount(byte* bytes, int count, bool flush) { // Validate parameters if (bytes == null) throw new ArgumentNullException("bytes", Environment.GetResourceString("ArgumentNull_Array")); if (count < 0) throw new ArgumentOutOfRangeException("count", Environment.GetResourceString("ArgumentOutOfRange_NeedNonNegNum")); Contract.EndContractBlock(); // Remember the flush this.m_mustFlush = flush; this.m_throwOnOverflow = true; // By default just call the encoding version, no flush by default return m_encoding.GetCharCount(bytes, count, this); } public override unsafe int GetChars(byte[] bytes, int byteIndex, int byteCount, char[] chars, int charIndex) { return GetChars(bytes, byteIndex, byteCount, chars, charIndex, false); } [System.Security.SecuritySafeCritical] // auto-generated public override unsafe int GetChars(byte[] bytes, int byteIndex, int byteCount, char[] chars, int charIndex, bool flush) { // Validate Parameters if (bytes == null || chars == null) throw new ArgumentNullException(bytes == null ? "bytes" : "chars", Environment.GetResourceString("ArgumentNull_Array")); if (byteIndex < 0 || byteCount < 0) throw new ArgumentOutOfRangeException((byteIndex<0 ? "byteIndex" : "byteCount"), Environment.GetResourceString("ArgumentOutOfRange_NeedNonNegNum")); if ( bytes.Length - byteIndex < byteCount) throw new ArgumentOutOfRangeException("bytes", Environment.GetResourceString("ArgumentOutOfRange_IndexCountBuffer")); if (charIndex < 0 || charIndex > chars.Length) throw new ArgumentOutOfRangeException("charIndex", Environment.GetResourceString("ArgumentOutOfRange_Index")); Contract.EndContractBlock(); // Avoid empty input fixed problem if (bytes.Length == 0) bytes = new byte[1]; int charCount = chars.Length - charIndex; if (chars.Length == 0) chars = new char[1]; // Just call pointer version fixed (byte* pBytes = bytes) fixed (char* pChars = chars) // Remember that charCount is # to decode, not size of array return GetChars(pBytes + byteIndex, byteCount, pChars + charIndex, charCount, flush); } [System.Security.SecurityCritical] // auto-generated public unsafe override int GetChars(byte* bytes, int byteCount, char* chars, int charCount, bool flush) { // Validate parameters if (chars == null || bytes == null) throw new ArgumentNullException((chars == null ? "chars" : "bytes"), Environment.GetResourceString("ArgumentNull_Array")); if (byteCount < 0 || charCount < 0) throw new ArgumentOutOfRangeException((byteCount<0 ? "byteCount" : "charCount"), Environment.GetResourceString("ArgumentOutOfRange_NeedNonNegNum")); Contract.EndContractBlock(); // Remember our flush m_mustFlush = flush; m_throwOnOverflow = true; // By default just call the encoding's version return m_encoding.GetChars(bytes, byteCount, chars, charCount, this); } // This method is used when the output buffer might not be big enough. // Just call the pointer version. (This gets chars) [System.Security.SecuritySafeCritical] // auto-generated public override unsafe void Convert(byte[] bytes, int byteIndex, int byteCount, char[] chars, int charIndex, int charCount, bool flush, out int bytesUsed, out int charsUsed, out bool completed) { // Validate parameters if (bytes == null || chars == null) throw new ArgumentNullException((bytes == null ? "bytes" : "chars"), Environment.GetResourceString("ArgumentNull_Array")); if (byteIndex < 0 || byteCount < 0) throw new ArgumentOutOfRangeException((byteIndex<0 ? "byteIndex" : "byteCount"), Environment.GetResourceString("ArgumentOutOfRange_NeedNonNegNum")); if (charIndex < 0 || charCount < 0) throw new ArgumentOutOfRangeException((charIndex<0 ? "charIndex" : "charCount"), Environment.GetResourceString("ArgumentOutOfRange_NeedNonNegNum")); if (bytes.Length - byteIndex < byteCount) throw new ArgumentOutOfRangeException("bytes", Environment.GetResourceString("ArgumentOutOfRange_IndexCountBuffer")); if (chars.Length - charIndex < charCount) throw new ArgumentOutOfRangeException("chars", Environment.GetResourceString("ArgumentOutOfRange_IndexCountBuffer")); Contract.EndContractBlock(); // Avoid empty input problem if (bytes.Length == 0) bytes = new byte[1]; if (chars.Length == 0) chars = new char[1]; // Just call the pointer version (public overrides can't do this) fixed (byte* pBytes = bytes) { fixed (char* pChars = chars) { Convert(pBytes + byteIndex, byteCount, pChars + charIndex, charCount, flush, out bytesUsed, out charsUsed, out completed); } } } // This is the version that used pointers. We call the base encoding worker function // after setting our appropriate internal variables. This is getting chars [System.Security.SecurityCritical] // auto-generated public unsafe override void Convert(byte* bytes, int byteCount, char* chars, int charCount, bool flush, out int bytesUsed, out int charsUsed, out bool completed) { // Validate input parameters if (chars == null || bytes == null) throw new ArgumentNullException(chars == null ? "chars" : "bytes", Environment.GetResourceString("ArgumentNull_Array")); if (byteCount < 0 || charCount < 0) throw new ArgumentOutOfRangeException((byteCount<0 ? "byteCount" : "charCount"), Environment.GetResourceString("ArgumentOutOfRange_NeedNonNegNum")); Contract.EndContractBlock(); // We don't want to throw this.m_mustFlush = flush; this.m_throwOnOverflow = false; this.m_bytesUsed = 0; // Do conversion charsUsed = this.m_encoding.GetChars(bytes, byteCount, chars, charCount, this); bytesUsed = this.m_bytesUsed; // Its completed if they've used what they wanted AND if they didn't want flush or if we are flushed completed = (bytesUsed == byteCount) && (!flush || !this.HasState) && (m_fallbackBuffer == null || m_fallbackBuffer.Remaining == 0); // Our data thingys are now full, we can return } public bool MustFlush { get { return m_mustFlush; } } // Anything left in our decoder? internal virtual bool HasState { get { return false; } } // Allow encoding to clear our must flush instead of throwing (in ThrowCharsOverflow) internal void ClearMustFlush() { m_mustFlush = false; } } }
/* * QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals. * Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections.Generic; using System.Linq; using System.Reflection; using QuantConnect.Data; using QuantConnect.Interfaces; using QuantConnect.Orders; using QuantConnect.Securities; namespace QuantConnect.Algorithm.CSharp { /// <summary> /// This regression algorithm tests Out of The Money (OTM) index option expiry for short calls. /// We expect 2 orders from the algorithm, which are: /// /// * Initial entry, sell SPX Call Option (expiring OTM) /// - Profit the option premium, since the option was not assigned. /// /// * Liquidation of SPX call OTM contract on the last trade date /// /// Additionally, we test delistings for index options and assert that our /// portfolio holdings reflect the orders the algorithm has submitted. /// </summary> public class IndexOptionShortCallOTMExpiryRegressionAlgorithm : QCAlgorithm, IRegressionAlgorithmDefinition { private Symbol _spx; private Symbol _spxOption; private Symbol _expectedContract; public override void Initialize() { SetStartDate(2021, 1, 4); SetEndDate(2021, 1, 31); _spx = AddIndex("SPX", Resolution.Minute).Symbol; // Select a index option expiring ITM, and adds it to the algorithm. _spxOption = AddIndexOptionContract(OptionChainProvider.GetOptionContractList(_spx, Time) .Where(x => x.ID.StrikePrice >= 4250m && x.ID.OptionRight == OptionRight.Call && x.ID.Date.Year == 2021 && x.ID.Date.Month == 1) .OrderBy(x => x.ID.StrikePrice) .Take(1) .Single(), Resolution.Minute).Symbol; _expectedContract = QuantConnect.Symbol.CreateOption(_spx, Market.USA, OptionStyle.European, OptionRight.Call, 4250m, new DateTime(2021, 1, 15)); if (_spxOption != _expectedContract) { throw new Exception($"Contract {_expectedContract} was not found in the chain"); } Schedule.On(DateRules.Tomorrow, TimeRules.AfterMarketOpen(_spx, 1), () => { MarketOrder(_spxOption, -1); }); } public override void OnData(Slice data) { // Assert delistings, so that we can make sure that we receive the delisting warnings at // the expected time. These assertions detect bug #4872 foreach (var delisting in data.Delistings.Values) { if (delisting.Type == DelistingType.Warning) { if (delisting.Time != new DateTime(2021, 1, 15)) { throw new Exception($"Delisting warning issued at unexpected date: {delisting.Time}"); } } if (delisting.Type == DelistingType.Delisted) { if (delisting.Time != new DateTime(2021, 1, 16)) { throw new Exception($"Delisting happened at unexpected date: {delisting.Time}"); } } } } public override void OnOrderEvent(OrderEvent orderEvent) { if (orderEvent.Status != OrderStatus.Filled) { // There's lots of noise with OnOrderEvent, but we're only interested in fills. return; } if (!Securities.ContainsKey(orderEvent.Symbol)) { throw new Exception($"Order event Symbol not found in Securities collection: {orderEvent.Symbol}"); } var security = Securities[orderEvent.Symbol]; if (security.Symbol == _spx) { throw new Exception($"Expected no order events for underlying Symbol {security.Symbol}"); } if (security.Symbol == _expectedContract) { AssertIndexOptionContractOrder(orderEvent, security); } else { throw new Exception($"Received order event for unknown Symbol: {orderEvent.Symbol}"); } Log($"{orderEvent}"); } private void AssertIndexOptionContractOrder(OrderEvent orderEvent, Security optionContract) { if (orderEvent.Direction == OrderDirection.Sell && optionContract.Holdings.Quantity != -1) { throw new Exception($"No holdings were created for option contract {optionContract.Symbol}"); } if (orderEvent.Direction == OrderDirection.Buy && optionContract.Holdings.Quantity != 0) { throw new Exception("Expected no options holdings after closing position"); } if (orderEvent.IsAssignment) { throw new Exception($"Assignment was not expected for {orderEvent.Symbol}"); } } /// <summary> /// Ran at the end of the algorithm to ensure the algorithm has no holdings /// </summary> /// <exception cref="Exception">The algorithm has holdings</exception> public override void OnEndOfAlgorithm() { if (Portfolio.Invested) { throw new Exception($"Expected no holdings at end of algorithm, but are invested in: {string.Join(", ", Portfolio.Keys)}"); } } /// <summary> /// This is used by the regression test system to indicate if the open source Lean repository has the required data to run this algorithm. /// </summary> public bool CanRunLocally { get; } = true; /// <summary> /// This is used by the regression test system to indicate which languages this algorithm is written in. /// </summary> public Language[] Languages { get; } = { Language.CSharp, Language.Python }; /// <summary> /// This is used by the regression test system to indicate what the expected statistics are from running the algorithm /// </summary> public Dictionary<string, string> ExpectedStatistics => new Dictionary<string, string> { {"Total Trades", "2"}, {"Average Win", "0.01%"}, {"Average Loss", "0%"}, {"Compounding Annual Return", "0.142%"}, {"Drawdown", "0%"}, {"Expectancy", "0"}, {"Net Profit", "0.010%"}, {"Sharpe Ratio", "5.161"}, {"Probabilistic Sharpe Ratio", "99.241%"}, {"Loss Rate", "0%"}, {"Win Rate", "100%"}, {"Profit-Loss Ratio", "0"}, {"Alpha", "0.001"}, {"Beta", "-0"}, {"Annual Standard Deviation", "0"}, {"Annual Variance", "0"}, {"Information Ratio", "-0.358"}, {"Tracking Error", "0.155"}, {"Treynor Ratio", "-4.064"}, {"Total Fees", "$0.00"}, {"Estimated Strategy Capacity", "$22000.00"}, {"Lowest Capacity Asset", "SPX XL80P59H5E6M|SPX 31"}, {"Fitness Score", "0"}, {"Kelly Criterion Estimate", "0"}, {"Kelly Criterion Probability Value", "0"}, {"Sortino Ratio", "79228162514264337593543950335"}, {"Return Over Maximum Drawdown", "79228162514264337593543950335"}, {"Portfolio Turnover", "0"}, {"Total Insights Generated", "0"}, {"Total Insights Closed", "0"}, {"Total Insights Analysis Completed", "0"}, {"Long Insight Count", "0"}, {"Short Insight Count", "0"}, {"Long/Short Ratio", "100%"}, {"Estimated Monthly Alpha Value", "$0"}, {"Total Accumulated Estimated Alpha Value", "$0"}, {"Mean Population Estimated Insight Value", "$0"}, {"Mean Population Direction", "0%"}, {"Mean Population Magnitude", "0%"}, {"Rolling Averaged Population Direction", "0%"}, {"Rolling Averaged Population Magnitude", "0%"}, {"OrderListHash", "b55e2b2bd35bc3200e228b4e6e77dd90"} }; } }
using System; using Microsoft.Xna.Framework; using Microsoft.Xna.Framework.Audio; using Microsoft.Xna.Framework.Graphics; using Microsoft.Xna.Framework.Input; namespace karl_assign1_pong { #region Game Variables /// <summary> /// This is the main type for your game /// </summary> public class Game1 : Microsoft.Xna.Framework.Game { GraphicsDeviceManager graphics; SpriteBatch spriteBatch; Paddle paddle1; Paddle paddle2; Ball ball; Computer computer; SpeedPower speedPower; StrobePower strobePower; int score1 = 0; int score2 = 0; Texture2D midLine; SpriteFont Font1; SpriteFont Font2; Random Rand; Color[] menuColor = {Color.White, Color.White, Color.White, Color.White, Color.White}; SoundEffect paddleSound; SoundEffect wallSound; SoundEffect scoreSound; /// <summary> /// Different states for the menu (menu options) /// </summary> enum MenuState { SinglePlayer, Multiplayer, Exit, } MenuState menuSelect = MenuState.SinglePlayer; /// <summary> /// Options for the end screen menu /// </summary> enum EndState { MainMenu, Exit, } EndState endSelect = EndState.MainMenu; int menuDelay = 0; /// <summary> /// enum for the different gamestates /// </summary> enum GameState { MainMenu, SinglePlayer, Multiplayer, ScoreSreen, } GameState currentGamesState = GameState.MainMenu; // Gamepad states used to determine button presses GamePadState currentGamePadState1; GamePadState previousGamePadState1; GamePadState currentGamePadState2; GamePadState previousGamePadState2; #endregion public Game1() { graphics = new GraphicsDeviceManager(this); Content.RootDirectory = "Content"; } #region Initialize and Reset /// <summary> /// Allows the game to perform any initialization it needs to before starting to run. /// This is where it can query for any required services and load any non-graphic /// related content. Calling base.Initialize will enumerate through any components /// and initialize them as well. /// </summary> protected override void Initialize() { // Initialize paddles paddle1 = new Paddle(); paddle2 = new Paddle(); // Initialize ball ball = new Ball(); // Initialize the computer computer = new Computer(); // Initialize the powerUps speedPower = new SpeedPower(); strobePower = new StrobePower(); Rand = new Random(); base.Initialize(); } /// <summary> /// Function to reset the game to play again /// </summary> private void resetGame() { score1 = 0; score2 = 0; // Reset the ball float direction = Rand.Next(2) * 2 - 1; Vector2 ballPosition = new Vector2(GraphicsDevice.Viewport.TitleSafeArea.X + GraphicsDevice.Viewport.TitleSafeArea.Width / 2, GraphicsDevice.Viewport.TitleSafeArea.Y + GraphicsDevice.Viewport.TitleSafeArea.Height / 2); Vector2 ballDirection = new Vector2(direction, (float)(Rand.NextDouble() * 1.5 - 1)); ball.Reset(ballPosition, ballDirection, 500); // Reset the powerups speedPower.Reset(); strobePower.Reset(); } #endregion #region Load /// <summary> /// LoadContent will be called once per game and is the place to load /// all of your content. /// </summary> protected override void LoadContent() { // Create a new SpriteBatch, which can be used to draw textures. spriteBatch = new SpriteBatch(GraphicsDevice); //graphics.PreferredBackBufferWidth = 400; //graphics.PreferredBackBufferHeight = 220; //graphics.IsFullScreen = true; graphics.ApplyChanges(); // Load the paddles float buffer = 18; Vector2 paddlePosition1 = new Vector2(GraphicsDevice.Viewport.TitleSafeArea.X + buffer, GraphicsDevice.Viewport.TitleSafeArea.Y + GraphicsDevice.Viewport.TitleSafeArea.Height / 2); paddle1.Initialize(Content.Load<Texture2D>("paddle"), paddlePosition1); Vector2 paddlePosition2 = new Vector2(GraphicsDevice.Viewport.TitleSafeArea.X + GraphicsDevice.Viewport.TitleSafeArea.Width - buffer, GraphicsDevice.Viewport.TitleSafeArea.Y + GraphicsDevice.Viewport.TitleSafeArea.Height / 2); paddle2.Initialize(Content.Load<Texture2D>("paddle"), paddlePosition2); // Load the ball float direction = Rand.Next(2) * 2 - 1; Vector2 ballPosition = new Vector2(GraphicsDevice.Viewport.TitleSafeArea.X + GraphicsDevice.Viewport.TitleSafeArea.Width / 2, GraphicsDevice.Viewport.TitleSafeArea.Y + GraphicsDevice.Viewport.TitleSafeArea.Height / 2); Vector2 ballDirection = new Vector2(direction, (float)(Rand.NextDouble() * 1.5 - 1)); ball.Initialize(Content.Load<Texture2D>("ball"), ballPosition, ballDirection, 1000); // load midline midLine = Content.Load<Texture2D>("midLine"); // load the powerUps Rectangle spawnArea = new Rectangle((int)paddlePosition1.X + paddle1.Width, GraphicsDevice.Viewport.Y + (int)buffer, GraphicsDevice.Viewport.Width - (int)buffer * 4 - paddle1.Width * 2 - Content.Load<Texture2D>("speed").Width, GraphicsDevice.Viewport.Height - (int)buffer * 2 - Content.Load<Texture2D>("speed").Height); speedPower.Initialize(Content.Load<Texture2D>("speed3"), Content.Load<SoundEffect>("sound/powerSpeed"), spawnArea); strobePower.Initialize(Content.Load<Texture2D>("strobe2"), Content.Load<SoundEffect>("sound/power1"), spawnArea); // load the fonts Font1 = Content.Load<SpriteFont>("Font1"); Font2 = Content.Load<SpriteFont>("Font2"); //load sounds paddleSound = Content.Load<SoundEffect>("sound/paddle"); wallSound = Content.Load<SoundEffect>("sound/wall"); scoreSound = Content.Load<SoundEffect>("sound/score"); } /// <summary> /// UnloadContent will be called once per game and is the place to unload /// all content. /// </summary> protected override void UnloadContent() { // TODO: Unload any non ContentManager content here } #endregion #region Update /// <summary> /// Allows the game to run logic such as updating the world, /// checking for collisions, gathering input, and playing audio. /// </summary> /// <param name="gameTime">Provides a snapshot of timing values.</param> protected override void Update(GameTime gameTime) { previousGamePadState1 = currentGamePadState1; previousGamePadState2 = currentGamePadState2; currentGamePadState1 = GamePad.GetState(PlayerIndex.One); currentGamePadState2 = GamePad.GetState(PlayerIndex.Two); switch (currentGamesState) { #region MainMenu case GameState.MainMenu: // Allows the game to exit if (GamePad.GetState(PlayerIndex.One).Buttons.Back == ButtonState.Pressed) this.Exit(); MenuState NewMenuSelect = menuSelect; resetGame(); switch (menuSelect) { case MenuState.SinglePlayer: menuColor[0] = Color.Red; menuColor[1] = Color.White; menuColor[2] = Color.White; if (GamePad.GetState(PlayerIndex.One).ThumbSticks.Left.Y < -0.9) { NewMenuSelect = MenuState.Multiplayer; } else if (currentGamePadState1.Buttons.A == ButtonState.Released && previousGamePadState1.Buttons.A == ButtonState.Pressed) { currentGamesState = GameState.SinglePlayer; } break; case MenuState.Multiplayer: menuColor[0] = Color.White; menuColor[1] = Color.Red; menuColor[2] = Color.White; if (GamePad.GetState(PlayerIndex.One).ThumbSticks.Left.Y > 0.9) { NewMenuSelect = MenuState.SinglePlayer; } else if (GamePad.GetState(PlayerIndex.One).ThumbSticks.Left.Y < -0.9) { NewMenuSelect = MenuState.Exit; } else if (currentGamePadState1.Buttons.A == ButtonState.Released && previousGamePadState1.Buttons.A == ButtonState.Pressed) { currentGamesState = GameState.Multiplayer; } break; case MenuState.Exit: menuColor[0] = Color.White; menuColor[1] = Color.White; menuColor[2] = Color.Red; if (GamePad.GetState(PlayerIndex.One).ThumbSticks.Left.Y > 0.9) { NewMenuSelect = MenuState.Multiplayer; } else if (currentGamePadState1.Buttons.A == ButtonState.Released && previousGamePadState1.Buttons.A == ButtonState.Pressed) { this.Exit(); } break; } if (gameTime.ElapsedGameTime.Milliseconds > menuDelay) { menuSelect = NewMenuSelect; menuDelay = 100; } else { menuDelay -= gameTime.ElapsedGameTime.Milliseconds; } break; #endregion #region SinglePlayer case GameState.SinglePlayer: paddle2.PaddleMoveSpeed = 10f * computer.maxSpeed; paddle2.Update(gameTime, computer.Move(ball, paddle2)); paddle2.Position.Y = MathHelper.Clamp(paddle2.Position.Y, 0, GraphicsDevice.Viewport.Height - paddle2.Height); UpdateGame(gameTime); break; #endregion #region Multiplayer case GameState.Multiplayer: paddle2.Update(gameTime, currentGamePadState2.ThumbSticks.Left.Y); paddle2.Position.Y = MathHelper.Clamp(paddle2.Position.Y, 0, GraphicsDevice.Viewport.Height - paddle2.Height); UpdateGame(gameTime); break; #endregion #region ScoreScreen case GameState.ScoreSreen: EndState newEndSelect = endSelect; switch (endSelect) { case EndState.MainMenu: menuColor[3] = Color.Red; menuColor[4] = Color.White; if (GamePad.GetState(PlayerIndex.One).ThumbSticks.Left.Y < -0.9) { newEndSelect = EndState.Exit; } else if (currentGamePadState1.Buttons.A == ButtonState.Released && previousGamePadState1.Buttons.A == ButtonState.Pressed) { currentGamesState = GameState.MainMenu; } break; case EndState.Exit: menuColor[3] = Color.White; menuColor[4] = Color.Red; if (GamePad.GetState(PlayerIndex.One).ThumbSticks.Left.Y > 0.9) { newEndSelect = EndState.MainMenu; } else if (currentGamePadState1.Buttons.A == ButtonState.Released && previousGamePadState1.Buttons.A == ButtonState.Pressed) { this.Exit(); } break; } if (gameTime.ElapsedGameTime.Milliseconds > menuDelay) { endSelect = newEndSelect; menuDelay = 100; } else { menuDelay -= gameTime.ElapsedGameTime.Milliseconds; } break; #endregion } base.Update(gameTime); } /// <summary> /// function to update the parts of the game loop that are common for singleplayer and multiplayer /// </summary> /// <param name="gameTime"></param> private void UpdateGame(GameTime gameTime) { paddle1.Update(gameTime, currentGamePadState1.ThumbSticks.Left.Y); paddle1.Position.Y = MathHelper.Clamp(paddle1.Position.Y, 0, GraphicsDevice.Viewport.Height - paddle1.Height); ball.Update(gameTime, GraphicsDevice.Viewport.Height, wallSound); UpdateCollision(); CheckForScore(); speedPower.Update(gameTime, Rand); strobePower.Update(gameTime, Rand); } #endregion #region Collision and Score /// <summary> /// check to see if the ball is colliding with anything else and handle the collision /// </summary> private void UpdateCollision() { Rectangle ballBox; Rectangle paddle1Box; Rectangle paddle2Box; Rectangle speedBox; Rectangle strobeBox; ballBox = new Rectangle((int)ball.Position.X,(int)ball.Position.Y,ball.Width,ball.Height); paddle1Box = new Rectangle((int)paddle1.Position.X + paddle1.Width * 3 / 4, (int)paddle1.Position.Y, paddle1.Width / 4, paddle1.Height); paddle2Box = new Rectangle((int)paddle2.Position.X, (int)paddle2.Position.Y, paddle2.Width / 4, paddle2.Height); speedBox = new Rectangle((int)speedPower.Position.X, (int)speedPower.Position.Y, speedPower.Width, speedPower.Height); strobeBox = new Rectangle((int)strobePower.Position.X, (int)strobePower.Position.Y, strobePower.Width, strobePower.Height); if (ballBox.Intersects(paddle1Box)) { ball.Collide(true, paddle1.InputSpeed); paddleSound.Play(); } if (ballBox.Intersects(paddle2Box)) { ball.Collide(false, paddle2.InputSpeed); paddleSound.Play(); } if (ballBox.Intersects(speedBox)) { speedPower.Collide(ball, Rand); } if (ballBox.Intersects(strobeBox)) { strobePower.Collide(ball); } } /// <summary> /// check to see if someone scored by seeing if the ball has left the screen, update the score appropriately /// </summary> private void CheckForScore() { Rectangle ballBox; Rectangle screenBox; ballBox = new Rectangle((int)ball.Position.X, (int)ball.Position.Y, ball.Width, ball.Height); screenBox = new Rectangle(GraphicsDevice.Viewport.X, GraphicsDevice.Viewport.Y, GraphicsDevice.Viewport.Width, GraphicsDevice.Viewport.Height); if (!ballBox.Intersects(screenBox)) { scoreSound.Play(); float direction = Rand.Next(2) * 2 - 1; Vector2 ballPosition = new Vector2(GraphicsDevice.Viewport.X + GraphicsDevice.Viewport.Width / 2, GraphicsDevice.Viewport.Y + GraphicsDevice.Viewport.Height / 2); if (ball.Position.X < 0) { score2++; direction = 1.0f; if (score2 >= 7) { currentGamesState = GameState.ScoreSreen; } } if (ball.Position.X > GraphicsDevice.Viewport.X + GraphicsDevice.Viewport.Width) { score1++; direction = -1.0f; if (score1 >= 7) { currentGamesState = GameState.ScoreSreen; } } Vector2 ballDirection = new Vector2(direction, (float)(Rand.NextDouble() * 1.5 - 1)); ball.Reset(ballPosition, ballDirection, 500); } } #endregion #region Draw /// <summary> /// This is called when the game should draw itself. /// </summary> /// <param name="gameTime">Provides a snapshot of timing values.</param> protected override void Draw(GameTime gameTime) { GraphicsDevice.Clear(Color.Black); spriteBatch.Begin(); switch (currentGamesState) { case GameState.MainMenu: Vector2 origin = Font2.MeasureString("Spin") / 2; spriteBatch.DrawString(Font2, "Spin", new Vector2(GraphicsDevice.Viewport.Width / 2, 100), Color.White, 0f, origin, 1f, SpriteEffects.None, 0.5f); spriteBatch.DrawString(Font1, "Singleplayer", new Vector2(GraphicsDevice.Viewport.Width / 2, 160), menuColor[0], 0f, origin, 1f, SpriteEffects.None, 0.5f); spriteBatch.DrawString(Font1, "Multiplayer", new Vector2(GraphicsDevice.Viewport.Width / 2, 180), menuColor[1], 0f, origin, 1f, SpriteEffects.None, 0.5f); spriteBatch.DrawString(Font1, "Exit", new Vector2(GraphicsDevice.Viewport.Width / 2, 200), menuColor[2], 0f, origin, 1f, SpriteEffects.None, 0.5f); break; case GameState.ScoreSreen: if (score1 > score2) { Vector2 origin1 = Font2.MeasureString("Player One Wins!") / 2; spriteBatch.DrawString(Font2, "Player One Wins!", new Vector2(GraphicsDevice.Viewport.Width / 2, 100), Color.White, 0f, origin1, 1f, SpriteEffects.None, 0.5f); } else { Vector2 origin2 = Font2.MeasureString("Player Two Wins!") / 2; spriteBatch.DrawString(Font2, "Player Two Wins!", new Vector2(GraphicsDevice.Viewport.Width / 2, 100), Color.White, 0f, origin2, 1f, SpriteEffects.None, 0.5f); } spriteBatch.DrawString(Font1, "MainMenu", new Vector2(GraphicsDevice.Viewport.Width / 2 + 50, 200), menuColor[3], 0f, new Vector2(0,0), 1f, SpriteEffects.None, 0.5f); spriteBatch.DrawString(Font1, "Exit", new Vector2(GraphicsDevice.Viewport.Width / 2 + 50, 220), menuColor[4], 0f, new Vector2(0,0), 1f, SpriteEffects.None, 0.5f); DrawGame(); break; case GameState.Multiplayer: case GameState.SinglePlayer: DrawGame(); break; } spriteBatch.End(); base.Draw(gameTime); } private void DrawGame() { Vector2 midLinePos = new Vector2(0, -36); while (midLinePos.Y < GraphicsDevice.Viewport.Width) { midLinePos = new Vector2(GraphicsDevice.Viewport.TitleSafeArea.X + GraphicsDevice.Viewport.TitleSafeArea.Width / 2 - Content.Load<Texture2D>("midLine").Width / 2, midLinePos.Y + 48); spriteBatch.Draw(midLine, midLinePos, null, Color.White, 0f, Vector2.Zero, 1f, SpriteEffects.None, 0f); } paddle1.Draw(spriteBatch); paddle2.Draw(spriteBatch); ball.Draw(spriteBatch); speedPower.Draw(spriteBatch); strobePower.Draw(spriteBatch); Vector2 origin1 = Font2.MeasureString(score1.ToString()) / 2; Vector2 origin2 = Font2.MeasureString(score2.ToString()) / 2; spriteBatch.DrawString(Font2, score1.ToString(), new Vector2(GraphicsDevice.Viewport.Width * 7 / 16, GraphicsDevice.Viewport.Height / 10), Color.White, 0f, origin1, 1f, SpriteEffects.None, 0.5f); spriteBatch.DrawString(Font2, score2.ToString(), new Vector2(GraphicsDevice.Viewport.Width * 9 / 16, GraphicsDevice.Viewport.Height / 10), Color.White, 0f, origin1, 1f, SpriteEffects.None, 0.5f); } #endregion } }