context
stringlengths
2.52k
185k
gt
stringclasses
1 value
using System; using HBYW.Utils; using System.Collections.Generic; using UnityEngine; using System.IO; using HBYW.Templates; namespace HBYW.Data { public class AngJSONDataSource : IAngDataSource { JSONObject jsonObject; string fileDSN; string dSN; bool use_resources = false; string jsonstring = string.Empty; public string DSN { get { return dSN; } set { dSN = value; } } public AngJSONDataSource(string inputstring) { jsonObject = new JSONObject(JSONObject.Type.OBJECT); InputJSON(inputstring, false); } public AngJSONDataSource() { jsonObject = new JSONObject(JSONObject.Type.OBJECT); } public AngJSONDataSource(string path, string table, bool create) { BuildJSONDataSource(path, table, create); } public void Clear() { jsonObject.Clear(); } public void InputJSON(string inputstring, bool clear) { if (clear) { jsonObject.Clear(); } jsonObject.Absorb(new JSONObject(inputstring)); } public AngJSONDataSource BuildJSONDataSource(string path, string table, bool create) { ////this is so we can load a file for editing that normally has .bytes appended to the end so it can be loaded from resources. Otherwise the filename comes out "filename.bytes.es2" or the like. //if (!table.Contains(AngDataSource.STR_DOT_BYTES)) //{ // table = table + "." + myDataSourceType.ToString(); //} jsonObject = new JSONObject(JSONObject.Type.OBJECT); DSN = table; use_resources = path.Contains(AngDS.TOKEN_RESOURCES); if (use_resources) { fileDSN = DSN; } else { fileDSN = LocalPath.CombinePathsPrettily(path, table); } if (use_resources || CheckDataSource()) //there (currently at least) is no way to check a resource other than trying to load it and use it, so...screw that. { ImportDataSource(); } else { if (create) { Directory.CreateDirectory(path); CreateDataSource(); ImportDataSource(); } } return this; } public bool CheckDataSource() { if (File.Exists(fileDSN)) return true; else AngLog.Severe("JSON datasource not found: " + fileDSN); return false; } public string[] GetProperties() { return jsonObject.keys.ToArray(); } public bool PropertyExists(string property) { return jsonObject.HasField(property); } public void Save(string property, Vector3 value) { jsonObject.SetField(property, JSONTemplates.FromVector3(value)); } public void Save(string property, Vector2 value) { jsonObject.SetField(property, JSONTemplates.FromVector2(value)); } public void Save(string property, bool value) { jsonObject.SetField(property, value); } public void Save(string property, float value) { jsonObject.SetField(property, value); } public void Save(string property, int value) { jsonObject.SetField(property, value); } public void Save(string property, string value) { jsonObject.SetField(property, value); } public void Save(string property, Transform value) { jsonObject.SetField(property, Templates.JSONTemplates.FromTransform(value)); } public void Save(string property, string[] value) { JSONObject sArray = new JSONObject(JSONObject.Type.ARRAY); foreach (string s in value) { sArray.Add(s); } jsonObject.SetField(property, sArray); } public void Save(string property, Color value) { jsonObject.SetField(property, JSONTemplates.FromColor(value)); } public void Save(string property, Dictionary<string, string> value) { JSONObject d = new JSONObject(value); jsonObject.SetField(property, d); } public void Save(string property, LayerMask value) { jsonObject.SetField(property, JSONTemplates.FromLayerMask(value)); } public void CommitDS() { //string outputstring = jsonObject.ToString(); string outputstring = jsonObject.Print(true);//turn on pretty so we can read the file StreamWriter sw = new StreamWriter(fileDSN); sw.Write(outputstring); sw.Close(); //if (File.Exists(fileDSN)) ; } public string GetJSON() { return jsonObject.Print(true); } public void DeleteDataSource() { File.Delete(fileDSN); } public bool ImportDataSource() { if (jsonstring == string.Empty) { if (use_resources || CheckDataSource()) { if (use_resources) { TextAsset loaded_resource = Resources.Load<TextAsset>(fileDSN); if (loaded_resource == null || loaded_resource.text == "") { AngLog.Severe("AngJSONDataSource.ImportDataSource(): Unable to load datasource " + fileDSN + " from resources!"); return false; } else { jsonstring = loaded_resource.text; } } else { StreamReader sr = new StreamReader(fileDSN); jsonstring = sr.ReadToEnd(); sr.Close(); } } jsonObject.Clear(); jsonObject = null; jsonObject = new JSONObject(JSONObject.Type.OBJECT); jsonObject.Absorb(new JSONObject(jsonstring)); //check that an object with KEYS was imported if (jsonObject.keys.Count > 0) { return true; } else { return false; } } else { AngLog.Severe("AngJSONDataSource.ImportDataSource(): CheckDataSource failed."); return false; } } public void CreateDataSource() { if (!CheckDataSource()) { jsonObject.Clear(); jsonObject = null; jsonObject = new JSONObject(JSONObject.Type.OBJECT); CommitDS(); } } public void Save<T>(string property, T value) { Save(property, value); } public float LoadFloat(string property) { return jsonObject[property].f; } public int LoadInt(string property) { return (int)jsonObject[property].n; } public bool LoadBool(string property) { return jsonObject[property].b; } public string LoadString(string property) { if (PropertyExists(property)) return jsonObject[property].str; else return null; } public Vector3 LoadVector3(string property) { return JSONTemplates.ToVector3(jsonObject[property]); } public Vector2 LoadVector2(string property) { return JSONTemplates.ToVector2(jsonObject[property]); } public void LoadTransform(string property, Transform t) { Templates.JSONTemplates.ToTransform(jsonObject[property], t); } public string[] LoadStringArr(string property) { List<string> templist = new List<string>(); JSONObject tempobj = jsonObject[property]; if (tempobj != null && tempobj.list.Count > 0) { foreach (JSONObject obj in tempobj.list) { templist.Add(obj.str); } } return templist.ToArray(); } public Color LoadColor(string property) { return JSONTemplates.ToColor(jsonObject[property]); } public LayerMask LoadLayerMask(string property) { JSONObject j = jsonObject[property]; if (j != null) { return JSONTemplates.ToLayerMask(j); } else return (LayerMask)0; } public void LoadDictionary(string property, out Dictionary<string, string> dict) { string val = jsonObject[property].ToString(); dict = new JSONObject(val).ToDictionary(); } } }
/* * REST API Documentation for the MOTI School Bus Application * * The School Bus application tracks that inspections are performed in a timely fashion. For each school bus the application tracks information about the bus (including data from ICBC, NSC, etc.), it's past and next inspection dates and results, contacts, and the inspector responsible for next inspecting the bus. * * OpenAPI spec version: v1 * * */ using System; using System.Linq; using System.IO; using System.Text; using System.Collections; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Runtime.Serialization; using Newtonsoft.Json; using System.ComponentModel.DataAnnotations.Schema; namespace SchoolBusAPI.Models { /// <summary> /// /// </summary> public partial class SchoolBusOwner : IEquatable<SchoolBusOwner> { /// <summary> /// Default constructor, required by entity framework /// </summary> public SchoolBusOwner() { this.Id = 0; } /// <summary> /// Initializes a new instance of the <see cref="SchoolBusOwner" /> class. /// </summary> /// <param name="Id">Primary Key (required).</param> /// <param name="Name">The name of the School Bus owner as defined by the user/Inspector. Not tied to the ICBC or NSC names, but whatever is most useful for the Inspectors..</param> /// <param name="Status">Status of the School Bus owner - enumerated value Active, Archived.</param> /// <param name="DateCreated">The date-time of the creation of the record from the audit fields. Since this might be surfaced in the API, adding it to the definition..</param> /// <param name="PrimaryContact">Link to the designated Primary Contact for the Inspector to the School Bus Owner organization..</param> /// <param name="ServiceArea">The District to which this School Bus is affliated..</param> /// <param name="NextInspectionDate">The calculated next inspection date from across the School Buses associated with this School Bus Owner.</param> /// <param name="NumberOfBuses">The calculated count of the number of School Buses associated with this School Bus Owner.</param> /// <param name="Contacts">Contacts.</param> public SchoolBusOwner(int Id, string Name = null, string Status = null, DateTime? DateCreated = null, SchoolBusOwnerContact PrimaryContact = null, ServiceArea ServiceArea = null, DateTime? NextInspectionDate = null, int? NumberOfBuses = null, List<SchoolBusOwnerContact> Contacts = null) { this.Id = Id; this.Name = Name; this.Status = Status; this.DateCreated = DateCreated; this.PrimaryContact = PrimaryContact; this.ServiceArea = ServiceArea; this.NextInspectionDate = NextInspectionDate; this.NumberOfBuses = NumberOfBuses; this.Contacts = Contacts; } /// <summary> /// Primary Key /// </summary> /// <value>Primary Key</value> [MetaDataExtension (Description = "Primary Key")] public int Id { get; set; } /// <summary> /// The name of the School Bus owner as defined by the user/Inspector. Not tied to the ICBC or NSC names, but whatever is most useful for the Inspectors. /// </summary> /// <value>The name of the School Bus owner as defined by the user/Inspector. Not tied to the ICBC or NSC names, but whatever is most useful for the Inspectors.</value> [MetaDataExtension (Description = "The name of the School Bus owner as defined by the user/Inspector. Not tied to the ICBC or NSC names, but whatever is most useful for the Inspectors.")] public string Name { get; set; } /// <summary> /// Status of the School Bus owner - enumerated value Active, Archived /// </summary> /// <value>Status of the School Bus owner - enumerated value Active, Archived</value> [MetaDataExtension (Description = "Status of the School Bus owner - enumerated value Active, Archived")] public string Status { get; set; } /// <summary> /// The date-time of the creation of the record from the audit fields. Since this might be surfaced in the API, adding it to the definition. /// </summary> /// <value>The date-time of the creation of the record from the audit fields. Since this might be surfaced in the API, adding it to the definition.</value> [MetaDataExtension (Description = "The date-time of the creation of the record from the audit fields. Since this might be surfaced in the API, adding it to the definition.")] public DateTime? DateCreated { get; set; } /// <summary> /// Link to the designated Primary Contact for the Inspector to the School Bus Owner organization. /// </summary> /// <value>Link to the designated Primary Contact for the Inspector to the School Bus Owner organization.</value> [MetaDataExtension (Description = "Link to the designated Primary Contact for the Inspector to the School Bus Owner organization.")] public SchoolBusOwnerContact PrimaryContact { get; set; } [ForeignKey("PrimaryContact")] public int? PrimaryContactRefId { get; set; } /// <summary> /// The District to which this School Bus is affliated. /// </summary> /// <value>The District to which this School Bus is affliated.</value> [MetaDataExtension (Description = "The District to which this School Bus is affliated.")] public ServiceArea ServiceArea { get; set; } [ForeignKey("ServiceArea")] public int? ServiceAreaRefId { get; set; } /// <summary> /// The calculated next inspection date from across the School Buses associated with this School Bus Owner /// </summary> /// <value>The calculated next inspection date from across the School Buses associated with this School Bus Owner</value> [MetaDataExtension (Description = "The calculated next inspection date from across the School Buses associated with this School Bus Owner")] public DateTime? NextInspectionDate { get; set; } /// <summary> /// The calculated count of the number of School Buses associated with this School Bus Owner /// </summary> /// <value>The calculated count of the number of School Buses associated with this School Bus Owner</value> [MetaDataExtension (Description = "The calculated count of the number of School Buses associated with this School Bus Owner")] public int? NumberOfBuses { get; set; } /// <summary> /// Gets or Sets Contacts /// </summary> public List<SchoolBusOwnerContact> Contacts { get; set; } /// <summary> /// Returns the string presentation of the object /// </summary> /// <returns>String presentation of the object</returns> public override string ToString() { var sb = new StringBuilder(); sb.Append("class SchoolBusOwner {\n"); sb.Append(" Id: ").Append(Id).Append("\n"); sb.Append(" Name: ").Append(Name).Append("\n"); sb.Append(" Status: ").Append(Status).Append("\n"); sb.Append(" DateCreated: ").Append(DateCreated).Append("\n"); sb.Append(" PrimaryContact: ").Append(PrimaryContact).Append("\n"); sb.Append(" ServiceArea: ").Append(ServiceArea).Append("\n"); sb.Append(" NextInspectionDate: ").Append(NextInspectionDate).Append("\n"); sb.Append(" NumberOfBuses: ").Append(NumberOfBuses).Append("\n"); sb.Append(" Contacts: ").Append(Contacts).Append("\n"); sb.Append("}\n"); return sb.ToString(); } /// <summary> /// Returns the JSON string presentation of the object /// </summary> /// <returns>JSON string presentation of the object</returns> public string ToJson() { return JsonConvert.SerializeObject(this, Formatting.Indented); } /// <summary> /// Returns true if objects are equal /// </summary> /// <param name="obj">Object to be compared</param> /// <returns>Boolean</returns> public override bool Equals(object obj) { if (ReferenceEquals(null, obj)) { return false; } if (ReferenceEquals(this, obj)) { return true; } if (obj.GetType() != GetType()) { return false; } return Equals((SchoolBusOwner)obj); } /// <summary> /// Returns true if SchoolBusOwner instances are equal /// </summary> /// <param name="other">Instance of SchoolBusOwner to be compared</param> /// <returns>Boolean</returns> public bool Equals(SchoolBusOwner other) { if (ReferenceEquals(null, other)) { return false; } if (ReferenceEquals(this, other)) { return true; } return ( this.Id == other.Id || this.Id.Equals(other.Id) ) && ( this.Name == other.Name || this.Name != null && this.Name.Equals(other.Name) ) && ( this.Status == other.Status || this.Status != null && this.Status.Equals(other.Status) ) && ( this.DateCreated == other.DateCreated || this.DateCreated != null && this.DateCreated.Equals(other.DateCreated) ) && ( this.PrimaryContact == other.PrimaryContact || this.PrimaryContact != null && this.PrimaryContact.Equals(other.PrimaryContact) ) && ( this.ServiceArea == other.ServiceArea || this.ServiceArea != null && this.ServiceArea.Equals(other.ServiceArea) ) && ( this.NextInspectionDate == other.NextInspectionDate || this.NextInspectionDate != null && this.NextInspectionDate.Equals(other.NextInspectionDate) ) && ( this.NumberOfBuses == other.NumberOfBuses || this.NumberOfBuses != null && this.NumberOfBuses.Equals(other.NumberOfBuses) ) && ( this.Contacts == other.Contacts || this.Contacts != null && this.Contacts.SequenceEqual(other.Contacts) ); } /// <summary> /// Gets the hash code /// </summary> /// <returns>Hash code</returns> public override int GetHashCode() { // credit: http://stackoverflow.com/a/263416/677735 unchecked // Overflow is fine, just wrap { int hash = 41; // Suitable nullity checks if (this.Id != null) { hash = hash * 59 + this.Id.GetHashCode(); } if (this.Name != null) { hash = hash * 59 + this.Name.GetHashCode(); } if (this.Status != null) { hash = hash * 59 + this.Status.GetHashCode(); } if (this.DateCreated != null) { hash = hash * 59 + this.DateCreated.GetHashCode(); } if (this.PrimaryContact != null) { hash = hash * 59 + this.PrimaryContact.GetHashCode(); } if (this.ServiceArea != null) { hash = hash * 59 + this.ServiceArea.GetHashCode(); } if (this.NextInspectionDate != null) { hash = hash * 59 + this.NextInspectionDate.GetHashCode(); } if (this.NumberOfBuses != null) { hash = hash * 59 + this.NumberOfBuses.GetHashCode(); } if (this.Contacts != null) { hash = hash * 59 + this.Contacts.GetHashCode(); } return hash; } } #region Operators public static bool operator ==(SchoolBusOwner left, SchoolBusOwner right) { return Equals(left, right); } public static bool operator !=(SchoolBusOwner left, SchoolBusOwner right) { return !Equals(left, right); } #endregion Operators } }
#region License // Copyright (c) 2010-2019, Mark Final // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are met: // // * Redistributions of source code must retain the above copyright notice, this // list of conditions and the following disclaimer. // // * Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // * Neither the name of BuildAMation nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #endregion // License namespace EmbedStaticIntoDynamicLibrary { class CProxyForStaticLibrary : C.CObjectFileCollection { protected override void Init() { base.Init(); this.AddFiles("$(packagedir)/source/static/*.c"); this.PublicPatch((settings, appliedTo) => { if (settings is C.ICommonPreprocessorSettings preprocessor) { preprocessor.IncludePaths.AddUnique(this.CreateTokenizedString("$(packagedir)/include/static")); } }); this.PrivatePatch(settings => { var preprocessor = settings as C.ICommonPreprocessorSettings; preprocessor.PreprocessorDefines.Add("STATICLIB_SOURCE"); // the source files are shared for both C and C++ compilation // but this option will only be set when compiled as C - the preprocessor checks this preprocessor.PreprocessorDefines.Add("COMPILE_AS_C"); }); } } class CxxProxyForStaticLibrary : C.Cxx.ObjectFileCollection { protected override void Init() { base.Init(); this.AddFiles("$(packagedir)/source/static/*.c"); this.PublicPatch((settings, appliedTo) => { if (settings is C.ICommonPreprocessorSettings preprocessor) { preprocessor.IncludePaths.AddUnique(this.CreateTokenizedString("$(packagedir)/include/static")); } }); this.PrivatePatch(settings => { var preprocessor = settings as C.ICommonPreprocessorSettings; preprocessor.PreprocessorDefines.Add("STATICLIB_SOURCE"); // the source files are shared for both C and C++ compilation // but this option will only be set when compiled as C++ - the preprocessor checks this preprocessor.PreprocessorDefines.Add("COMPILE_AS_CXX"); }); } } sealed class CDynamicLibrary : C.DynamicLibrary { protected override void Init() { base.Init(); this.SetSemanticVersion(Bam.Core.Graph.Instance.ProcessState as Bam.Core.ISemanticVersion); this.Macros["Description"] = Bam.Core.TokenizedString.CreateVerbatim("EmbedStaticIntoDynamicLibrary: Example C dynamic library"); var source = this.CreateCSourceCollection("$(packagedir)/source/dynamic/*.c"); source.PrivatePatch(settings => { var preprocessor = settings as C.ICommonPreprocessorSettings; preprocessor.PreprocessorDefines.Add("DYNAMICLIB_SOURCE"); }); // publicly because the app requires the include path from the dependent this.ExtendSourcePublicly<CProxyForStaticLibrary>(source); this.PublicPatch((settings, appliedTo) => { if (settings is C.ICommonPreprocessorSettings preprocessor) { preprocessor.IncludePaths.AddUnique(this.CreateTokenizedString("$(packagedir)/include/dynamic")); } }); } } sealed class CxxDynamicLibrary : C.Cxx.DynamicLibrary { protected override void Init() { base.Init(); this.SetSemanticVersion(Bam.Core.Graph.Instance.ProcessState as Bam.Core.ISemanticVersion); this.Macros["Description"] = Bam.Core.TokenizedString.CreateVerbatim("EmbedStaticIntoDynamicLibrary: Example C++ dynamic library"); var source = this.CreateCxxSourceCollection("$(packagedir)/source/dynamic/*.c"); source.PrivatePatch(settings => { var preprocessor = settings as C.ICommonPreprocessorSettings; preprocessor.PreprocessorDefines.Add("DYNAMICLIB_SOURCE"); }); // publicly because the app requires the include path from the dependent this.ExtendSourcePublicly<CxxProxyForStaticLibrary>(source); this.PublicPatch((settings, appliedTo) => { if (settings is C.ICommonPreprocessorSettings preprocessor) { preprocessor.IncludePaths.AddUnique(this.CreateTokenizedString("$(packagedir)/include/dynamic")); } }); } } class CApp : C.ConsoleApplication { protected override void Init() { base.Init(); var source = this.CreateCSourceCollection("$(packagedir)/source/app/*.c"); this.CompileAndLinkAgainst<CDynamicLibrary>(source); if (this.Linker is GccCommon.LinkerBase) { this.PrivatePatch(settings => { var gccLinker = settings as GccCommon.ICommonLinkerSettings; gccLinker.CanUseOrigin = true; gccLinker.RPath.AddUnique("$ORIGIN"); }); } } } class CxxApp : C.Cxx.ConsoleApplication { protected override void Init() { base.Init(); var source = this.CreateCxxSourceCollection("$(packagedir)/source/app/*.c"); this.CompileAndLinkAgainst<CxxDynamicLibrary>(source); if (this.Linker is GccCommon.LinkerBase) { this.PrivatePatch(settings => { var gccLinker = settings as GccCommon.ICommonLinkerSettings; gccLinker.CanUseOrigin = true; gccLinker.RPath.AddUnique("$ORIGIN"); }); } } } sealed class CAppRuntime : Publisher.Collation { protected override void Init() { base.Init(); this.SetDefaultMacrosAndMappings(EPublishingType.ConsoleApplication); this.Include<CApp>(C.ConsoleApplication.ExecutableKey); } } sealed class CxxAppRuntime : Publisher.Collation { protected override void Init() { base.Init(); this.SetDefaultMacrosAndMappings(EPublishingType.ConsoleApplication); this.Include<CxxApp>(C.Cxx.ConsoleApplication.ExecutableKey); } } }
#region Disclaimer/Info /////////////////////////////////////////////////////////////////////////////////////////////////// // Subtext WebLog // // Subtext is an open source weblog system that is a fork of the .TEXT // weblog system. // // For updated news and information please visit http://subtextproject.com/ // Subtext is hosted at Google Code at http://code.google.com/p/subtext/ // The development mailing list is at subtext@googlegroups.com // // This project is licensed under the BSD license. See the License.txt file for more information. /////////////////////////////////////////////////////////////////////////////////////////////////// #endregion using System; using System.Collections.Generic; using System.Globalization; using System.IO; using Subtext.Framework.Configuration; using Subtext.Framework.Routing; using Subtext.Framework.Text; using Subtext.Framework.Tracking; namespace Subtext.Framework.Syndication { /// <summary> /// Abstract base class used to write RSS feeds. /// </summary> public abstract class GenericRssWriter<T> : BaseSyndicationWriter<T> { private bool _isBuilt; protected GenericRssWriter(TextWriter writer, DateTime dateLastViewedFeedItemPublished, bool useDeltaEncoding, ISubtextContext context) : base(writer, dateLastViewedFeedItemPublished, useDeltaEncoding, context) { } /// <summary> /// Builds the RSS feed. /// </summary> protected override void Build() { Build(DateLastViewedFeedItemPublishedUtc); } /// <summary> /// Builds the specified last id viewed. /// </summary> /// <param name="dateLastViewedFeedItemPublished">Last id viewed.</param> protected override void Build(DateTime dateLastViewedFeedItemPublished) { if (!_isBuilt) { StartDocument(); SetNamespaces(); StartChannel(); WriteChannel(); WriteEntries(); EndChannel(); EndDocument(); _isBuilt = true; } } /// <summary> /// Sets the namespaces used within the RSS feed. /// </summary> protected virtual void SetNamespaces() { WriteAttributeString("xmlns:dc", "http://purl.org/dc/elements/1.1/"); WriteAttributeString("xmlns:trackback", "http://madskills.com/public/xml/rss/module/trackback/"); WriteAttributeString("xmlns:wfw", "http://wellformedweb.org/CommentAPI/"); WriteAttributeString("xmlns:slash", "http://purl.org/rss/1.0/modules/slash/"); // Copyright notice WriteAttributeString("xmlns:copyright", "http://blogs.law.harvard.edu/tech/rss"); if (!string.IsNullOrEmpty(Blog.LicenseUrl)) { // Used to specify a license. Does not have to be a creative commons license. // see http://backend.userland.com/creativeCommonsRssModule WriteAttributeString("xmlns:creativeCommons", "http://backend.userland.com/creativeCommonsRssModule"); } // Similar to a favicon image. WriteAttributeString("xmlns:image", "http://purl.org/rss/1.0/modules/image/"); } /// <summary> /// Starts the RSS document. /// </summary> protected virtual void StartDocument() { WriteStartElement("rss"); WriteAttributeString("version", "2.0"); } /// <summary> /// Ends the document. /// </summary> protected void EndDocument() { WriteEndElement(); } /// <summary> /// Writes the channel Start element. /// </summary> protected void StartChannel() { WriteStartElement("channel"); } /// <summary> /// Writes the channel. /// </summary> protected virtual void WriteChannel() { var blogUrl = new Uri(UrlHelper.BlogUrl().ToFullyQualifiedUrl(Blog), "Default.aspx"); var image = new RssImageElement(GetRssImage(), Blog.Title, blogUrl, 77, 60, null); BuildChannel(Blog.Title, blogUrl, Blog.Email, Blog.SubTitle, Blog.Language, Blog.Author, Blog.LicenseUrl, image); } /// <summary> /// Returns the image that will be displayed in an RSS aggregator that supports RSS images. /// </summary> /// <returns></returns> public virtual Uri GetRssImage() { VirtualPath url = UrlHelper.ImageUrl("RSS2Image.gif"); return url.ToFullyQualifiedUrl(Blog); } /// <summary> /// Builds the RSS channel starting XML section. /// </summary> /// <param name="title">The title.</param> /// <param name="url">The url.</param> /// <param name="authorEmail">The author email.</param> /// <param name="description">The description.</param> /// <param name="lang">The lang.</param> /// <param name="copyright">The copyright.</param> /// <param name="cclicense">The cclicense.</param> protected void BuildChannel(string title, Uri url, string authorEmail, string description, string lang, string copyright, string cclicense) { BuildChannel(title, url, authorEmail, description, lang, copyright, cclicense, null); } /// <summary> /// Builds the RSS channel starting XML section. /// </summary> /// <param name="title">The title.</param> /// <param name="url">The url.</param> /// <param name="authorEmail">The author email.</param> /// <param name="description">The description.</param> /// <param name="lang">The lang.</param> /// <param name="copyright">The copyright.</param> /// <param name="cclicense">The cclicense.</param> /// <param name="image">An optional sub-element of channel for rendering an image for the channel.</param> protected void BuildChannel(string title, Uri url, string authorEmail, string description, string lang, string copyright, string cclicense, RssImageElement image) { //Required Channel Elements WriteElementString("title", HtmlHelper.RemoveHtml(title)); WriteElementString("link", url.ToString()); WriteElementString("description", HtmlHelper.RemoveHtml(description)); //Optional Channel Elements WriteElementString("language", lang); //TODO: Implement this element. WriteElementString("copyright", copyright); if (!string.IsNullOrEmpty(authorEmail) && authorEmail.IndexOf("@") > 0 && authorEmail.IndexOf(".") > 0 && (Blog.ShowEmailAddressInRss)) { WriteElementString("managingEditor", authorEmail); } //TODO: <category>One or more categories</category> WriteElementString("generator", VersionInfo.VersionDisplayText); if (!string.IsNullOrEmpty(cclicense)) { WriteElementString("creativeCommons:license", cclicense); } if (image != null) { image.WriteToXmlWriter(this); } } protected void EndChannel() { WriteEndElement(); } private void WriteEntries() { BlogConfigurationSettings settings = Config.Settings; ClientHasAllFeedItems = true; LatestPublishDateUtc = DateLastViewedFeedItemPublishedUtc; foreach (T entry in Items) { if (UseDeltaEncoding && GetSyndicationDate(entry) <= DateLastViewedFeedItemPublishedUtc) { // Since Entries are ordered by DatePublished descending, as soon // as we encounter one that is smaller than or equal to // one the client has already seen, we're done as we // know the client already has the rest of the items in // the collection. return; } // If we're here, we know that entry.EntryId is larger than // the LastViewedFeedItemId. Thus we can send it. WriteStartElement("item"); EntryXml(entry, settings); WriteEndElement(); if (GetSyndicationDate(entry) > LatestPublishDateUtc) { LatestPublishDateUtc = GetSyndicationDate(entry); } ClientHasAllFeedItems = false; } } protected virtual string GetGuid(T item) { return GetLinkFromItem(item); } /// <summary> /// Writes the XML for a single entry. /// </summary> protected virtual void EntryXml(T item, BlogConfigurationSettings settings) { //core WriteElementString("title", GetTitleFromItem(item)); ICollection<string> categories = GetCategoriesFromItem(item); if (categories != null) { foreach (string category in categories) { WriteElementString("category", category); } } string fullUrl = GetLinkFromItem(item); WriteElementString("link", fullUrl); WriteElementString ( "description", //Tag string.Format ( CultureInfo.InvariantCulture, "{0}{1}", //tag def GetBodyFromItem(item), (UseAggBugs && settings.Tracking.EnableAggBugs) ? TrackingUrls.AggBugImage(GetAggBugUrl(item)) : null //use aggbugs ) ); string author = GetAuthorFromItem(item); if (!String.IsNullOrEmpty(author)) { WriteElementString("dc:creator", author); } WriteElementString("guid", GetGuid(item)); WriteElementString("pubDate", GetPublishedDateUtc(item).ToString("r", CultureInfo.InvariantCulture)); if (ItemCouldContainComments(item)) { if (AllowComments && Blog.CommentsEnabled && ItemAllowsComments(item) && !CommentsClosedOnItem(item)) { // Comment API (http://wellformedweb.org/story/9) WriteElementString("wfw:comment", GetCommentApiUrl(item)); } WriteElementString("comments", fullUrl + "#feedback"); if (GetFeedbackCount(item) > 0) { WriteElementString("slash:comments", GetFeedbackCount(item).ToString(CultureInfo.InvariantCulture)); } WriteElementString("wfw:commentRss", GetCommentRssUrl(item)); if (Blog.TrackbacksEnabled) { WriteElementString("trackback:ping", GetTrackBackUrl(item)); } } EnclosureItem encItem = GetEnclosureFromItem(item); if (encItem != null) { WriteStartElement("enclosure"); WriteAttributeString("url", encItem.Url); WriteAttributeString("length", encItem.Size.ToString(CultureInfo.InvariantCulture)); WriteAttributeString("type", encItem.MimeType); WriteEndElement(); } } protected abstract string GetCommentRssUrl(T item); protected abstract string GetTrackBackUrl(T item); protected abstract string GetCommentApiUrl(T item); protected abstract string GetAggBugUrl(T item); /// <summary> /// Gets the categories from entry. /// </summary> /// <param name="item">The entry.</param> /// <returns></returns> protected abstract ICollection<string> GetCategoriesFromItem(T item); /// <summary> /// Gets the title from item. /// </summary> /// <param name="item">The item.</param> /// <returns></returns> protected abstract string GetTitleFromItem(T item); /// <summary> /// Gets the link from item. /// </summary> /// <param name="item">The item.</param> /// <returns></returns> protected abstract string GetLinkFromItem(T item); /// <summary> /// Gets the body from item. /// </summary> /// <param name="item">The item.</param> /// <returns></returns> protected abstract string GetBodyFromItem(T item); /// <summary> /// Gets the author from item. /// </summary> /// <param name="item">The item.</param> /// <returns></returns> protected abstract string GetAuthorFromItem(T item); /// <summary> /// Gets the publish date from item. /// </summary> /// <param name="item">The item.</param> /// <returns></returns> protected abstract DateTime GetPublishedDateUtc(T item); /// <summary> /// Returns true if the Item could contain comments. /// </summary> /// <param name="item">The item.</param> /// <returns></returns> protected abstract bool ItemCouldContainComments(T item); /// <summary> /// Returns true if the item allows comments, otherwise false. /// </summary> /// <param name="item">The item.</param> /// <returns></returns> protected abstract bool ItemAllowsComments(T item); /// <summary> /// Returns true if comments are closed, otherwise false. /// </summary> /// <param name="item">The item.</param> /// <returns></returns> protected abstract bool CommentsClosedOnItem(T item); /// <summary> /// Gets the feedback count for the item. /// </summary> /// <param name="item">The item.</param> /// <returns></returns> protected abstract int GetFeedbackCount(T item); /// <summary> /// Obtains the syndication date for the specified entry, since /// we don't necessarily know if the type has that field, we /// can delegate this to the inheriting class. /// </summary> /// <param name="item"></param> /// <returns></returns> protected abstract DateTime GetSyndicationDate(T item); /// <summary> /// Gets the enclosure for the item. /// </summary> /// <param name="item">The item.</param> /// <returns></returns> protected abstract EnclosureItem GetEnclosureFromItem(T item); #region Nested type: EnclosureItem protected class EnclosureItem { public string MimeType { get; set; } public long Size { get; set; } public string Url { get; set; } } #endregion } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator 0.17.0.0 // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Microsoft.Azure.Insights { using System.Linq; using Microsoft.Rest; using Microsoft.Rest.Azure; using Models; /// <summary> /// EventsOperations operations. /// </summary> internal partial class EventsOperations : Microsoft.Rest.IServiceOperations<InsightsClient>, IEventsOperations { /// <summary> /// Initializes a new instance of the EventsOperations class. /// </summary> /// <param name='client'> /// Reference to the service client. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> internal EventsOperations(InsightsClient client) { if (client == null) { throw new System.ArgumentNullException("client"); } this.Client = client; } /// <summary> /// Gets a reference to the InsightsClient /// </summary> public InsightsClient Client { get; private set; } /// <summary> /// Provides the list of events. /// </summary> /// <param name='odataQuery'> /// OData parameters to apply to the operation. /// </param> /// <param name='select'> /// The list of property names to be returned. Possible values are: /// authorization, channels, claims, correlationId, description, eventDataId, /// eventName, eventTimestamp, httpRequest, level, operationId, /// operationName, properties, resourceGroupName, resourceProviderName, /// resourceId, status, submissionTimestamp, subStatus, subscriptionId /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async System.Threading.Tasks.Task<Microsoft.Rest.Azure.AzureOperationResponse<Microsoft.Rest.Azure.IPage<EventData>>> ListWithHttpMessagesAsync(Microsoft.Rest.Azure.OData.ODataQuery<EventData> odataQuery = default(Microsoft.Rest.Azure.OData.ODataQuery<EventData>), string select = default(string), System.Collections.Generic.Dictionary<string, System.Collections.Generic.List<string>> customHeaders = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { if (this.Client.SubscriptionId == null) { throw new Microsoft.Rest.ValidationException(Microsoft.Rest.ValidationRules.CannotBeNull, "this.Client.SubscriptionId"); } string apiVersion = "2015-04-01"; // Tracing bool _shouldTrace = Microsoft.Rest.ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = Microsoft.Rest.ServiceClientTracing.NextInvocationId.ToString(); System.Collections.Generic.Dictionary<string, object> tracingParameters = new System.Collections.Generic.Dictionary<string, object>(); tracingParameters.Add("odataQuery", odataQuery); tracingParameters.Add("apiVersion", apiVersion); tracingParameters.Add("select", select); tracingParameters.Add("cancellationToken", cancellationToken); Microsoft.Rest.ServiceClientTracing.Enter(_invocationId, this, "List", tracingParameters); } // Construct URL var _baseUrl = this.Client.BaseUri.AbsoluteUri; var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/providers/microsoft.insights/eventtypes/management/values").ToString(); _url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(this.Client.SubscriptionId)); System.Collections.Generic.List<string> _queryParameters = new System.Collections.Generic.List<string>(); if (odataQuery != null) { var _odataFilter = odataQuery.ToString(); if (!string.IsNullOrEmpty(_odataFilter)) { _queryParameters.Add(_odataFilter); } } if (apiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(apiVersion))); } if (select != null) { _queryParameters.Add(string.Format("$select={0}", System.Uri.EscapeDataString(select))); } if (_queryParameters.Count > 0) { _url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters); } // Create HTTP transport objects System.Net.Http.HttpRequestMessage _httpRequest = new System.Net.Http.HttpRequestMessage(); System.Net.Http.HttpResponseMessage _httpResponse = null; _httpRequest.Method = new System.Net.Http.HttpMethod("GET"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString()); } if (this.Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (this.Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } System.Net.HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new Microsoft.Rest.Azure.CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, this.Client.DeserializationSettings); if (_errorBody != null) { ex = new Microsoft.Rest.Azure.CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (Newtonsoft.Json.JsonException) { // Ignore the exception } ex.Request = new Microsoft.Rest.HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new Microsoft.Rest.HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new Microsoft.Rest.Azure.AzureOperationResponse<Microsoft.Rest.Azure.IPage<EventData>>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Page1<EventData>>(_responseContent, this.Client.DeserializationSettings); } catch (Newtonsoft.Json.JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new Microsoft.Rest.SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Provides the list of events. /// </summary> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async System.Threading.Tasks.Task<Microsoft.Rest.Azure.AzureOperationResponse<Microsoft.Rest.Azure.IPage<EventData>>> ListNextWithHttpMessagesAsync(string nextPageLink, System.Collections.Generic.Dictionary<string, System.Collections.Generic.List<string>> customHeaders = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { if (nextPageLink == null) { throw new Microsoft.Rest.ValidationException(Microsoft.Rest.ValidationRules.CannotBeNull, "nextPageLink"); } // Tracing bool _shouldTrace = Microsoft.Rest.ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = Microsoft.Rest.ServiceClientTracing.NextInvocationId.ToString(); System.Collections.Generic.Dictionary<string, object> tracingParameters = new System.Collections.Generic.Dictionary<string, object>(); tracingParameters.Add("nextPageLink", nextPageLink); tracingParameters.Add("cancellationToken", cancellationToken); Microsoft.Rest.ServiceClientTracing.Enter(_invocationId, this, "ListNext", tracingParameters); } // Construct URL string _url = "{nextLink}"; _url = _url.Replace("{nextLink}", nextPageLink); System.Collections.Generic.List<string> _queryParameters = new System.Collections.Generic.List<string>(); if (_queryParameters.Count > 0) { _url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters); } // Create HTTP transport objects System.Net.Http.HttpRequestMessage _httpRequest = new System.Net.Http.HttpRequestMessage(); System.Net.Http.HttpResponseMessage _httpResponse = null; _httpRequest.Method = new System.Net.Http.HttpMethod("GET"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString()); } if (this.Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (this.Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } System.Net.HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new Microsoft.Rest.Azure.CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, this.Client.DeserializationSettings); if (_errorBody != null) { ex = new Microsoft.Rest.Azure.CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (Newtonsoft.Json.JsonException) { // Ignore the exception } ex.Request = new Microsoft.Rest.HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new Microsoft.Rest.HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new Microsoft.Rest.Azure.AzureOperationResponse<Microsoft.Rest.Azure.IPage<EventData>>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Page1<EventData>>(_responseContent, this.Client.DeserializationSettings); } catch (Newtonsoft.Json.JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new Microsoft.Rest.SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.Exit(_invocationId, _result); } return _result; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; namespace JitTest { internal class Test { private static ulong s_A,s_B; private static void testNumbers(ulong a, ulong b) { s_A = a; s_B = b; ulong c = 0; try { c = checked(a * b); } catch (OverflowException) { ulong AH = a >> 32; ulong AL = a & 0xffffffff; ulong BH = b >> 32; ulong BL = b & 0xffffffff; if (checked(AH * 0x100000000 + AL) != a) throw new Exception(); if (checked(BH * 0x100000000 + BL) != b) throw new Exception(); if (AH == 0 || BH == 0) { ulong sum = checked(AL * BH + AH * BL); if (sum < 0x100000000) { sum = checked(sum * 0x100000000); if (checked(AL * BL) <= checked(0xffffffffffffffff - sum)) throw new Exception(); } } return; } try { if (c / b != a) throw new Exception(); } catch (DivideByZeroException) { if (b != 0) throw new Exception(); } try { if (c / a != b) throw new Exception(); } catch (DivideByZeroException) { if (a != 0) throw new Exception(); } } private static int Main() { try { testNumbers(0x0000000000000009, 0x00000000000000b8); testNumbers(0x0000000000000009, 0x00000000000000f9); testNumbers(0x000000000000006e, 0x0000000000000093); testNumbers(0x000000000000001e, 0x0000000000000086); testNumbers(0x00000000000000cc, 0x000000000000583f); testNumbers(0x00000000000000c9, 0x000000000000a94c); testNumbers(0x0000000000000054, 0x0000000000002d06); testNumbers(0x0000000000000030, 0x0000000000009921); testNumbers(0x000000000000001d, 0x0000000000450842); testNumbers(0x000000000000002a, 0x0000000000999f6c); testNumbers(0x00000000000000c5, 0x000000000090faa7); testNumbers(0x0000000000000050, 0x000000000069de08); testNumbers(0x000000000000009a, 0x000000000cd715be); testNumbers(0x0000000000000039, 0x0000000016a61eb5); testNumbers(0x00000000000000e0, 0x0000000095575fef); testNumbers(0x0000000000000093, 0x00000000209e58c5); testNumbers(0x000000000000003b, 0x0000000c3c34b48c); testNumbers(0x00000000000000c2, 0x0000006a671c470f); testNumbers(0x000000000000004b, 0x000000f538cede2b); testNumbers(0x0000000000000099, 0x0000005ba885d43b); testNumbers(0x0000000000000068, 0x00009f692f98ac45); testNumbers(0x00000000000000d9, 0x00008d5eaa7f0a8e); testNumbers(0x00000000000000ac, 0x0000ba1316512e4c); testNumbers(0x000000000000001c, 0x00008c4fbf2f14aa); testNumbers(0x00000000000000c0, 0x0069a9eb9a9bc822); testNumbers(0x0000000000000074, 0x003f8f5a893de200); testNumbers(0x0000000000000027, 0x000650eb1747a5bc); testNumbers(0x00000000000000d9, 0x00d3d50809c70fda); testNumbers(0x00000000000000c0, 0xac6556a4ca94513e); testNumbers(0x0000000000000020, 0xa697fcbfd6d232d1); testNumbers(0x000000000000009c, 0xc4421a4f5147b9b8); testNumbers(0x000000000000009e, 0xc5ef494112a7b33f); testNumbers(0x000000000000f7fa, 0x00000000000000af); testNumbers(0x000000000000ad17, 0x00000000000000e8); testNumbers(0x000000000000c9c4, 0x0000000000000045); testNumbers(0x000000000000a704, 0x0000000000000012); testNumbers(0x000000000000c55b, 0x000000000000a33a); testNumbers(0x000000000000ab88, 0x0000000000009a3c); testNumbers(0x000000000000a539, 0x000000000000cf3a); testNumbers(0x0000000000005890, 0x000000000000eec8); testNumbers(0x000000000000e9e2, 0x0000000000fe7c46); testNumbers(0x0000000000007303, 0x0000000000419f2a); testNumbers(0x000000000000e105, 0x000000000013f913); testNumbers(0x0000000000008191, 0x0000000000fa2458); testNumbers(0x00000000000006d9, 0x0000000091cf14f7); testNumbers(0x000000000000bdb1, 0x0000000086c2a97c); testNumbers(0x000000000000e905, 0x0000000064f702f4); testNumbers(0x0000000000002fdc, 0x00000000f059caf6); testNumbers(0x000000000000f8fd, 0x00000013f0265b1e); testNumbers(0x000000000000e8b8, 0x0000000aa69a6308); testNumbers(0x0000000000003d00, 0x000000fbcb67879b); testNumbers(0x000000000000aa46, 0x00000085c3d371d5); testNumbers(0x0000000000005f60, 0x000008cde4a63203); testNumbers(0x00000000000092b5, 0x00007ca86ba2f30e); testNumbers(0x00000000000093c6, 0x0000a2d73fc4eac0); testNumbers(0x0000000000004156, 0x000006dbd08f2fda); testNumbers(0x0000000000004597, 0x006cfb0ba5962826); testNumbers(0x0000000000006bac, 0x001e79315071480f); testNumbers(0x0000000000002c3a, 0x0092f12cbd82df69); testNumbers(0x0000000000009859, 0x00b0f0cd9dc019f2); testNumbers(0x000000000000b37f, 0x4966447d15850076); testNumbers(0x0000000000005e34, 0x7c1869c9ed2cad38); testNumbers(0x0000000000005c54, 0x7cee70ee82837a08); testNumbers(0x000000000000967f, 0x4eb98adf4b8b0d32); testNumbers(0x0000000000fd2919, 0x000000000000005d); testNumbers(0x0000000000abd5b1, 0x0000000000000098); testNumbers(0x0000000000ab1887, 0x00000000000000ef); testNumbers(0x000000000096034a, 0x000000000000002f); testNumbers(0x0000000000d5bb94, 0x00000000000057d2); testNumbers(0x0000000000d7b2cb, 0x00000000000080f5); testNumbers(0x00000000004ccc6d, 0x000000000000087c); testNumbers(0x0000000000ec0c50, 0x000000000000bdff); testNumbers(0x00000000008a6865, 0x000000000076c014); testNumbers(0x0000000000ac38dd, 0x0000000000f12b09); testNumbers(0x0000000000615e2a, 0x0000000000e7cbf8); testNumbers(0x00000000000e214f, 0x00000000005b8e2f); testNumbers(0x00000000003bd7c6, 0x00000000c1db4e46); testNumbers(0x0000000000ae208d, 0x0000000001c9aa7a); testNumbers(0x00000000008a9cef, 0x0000000003930b07); testNumbers(0x000000000036b866, 0x00000000d64b7bef); testNumbers(0x0000000000d337cd, 0x000000a2b45fb7de); testNumbers(0x0000000000024471, 0x0000005c5de3da89); testNumbers(0x0000000000012b15, 0x0000007cd40030fe); testNumbers(0x0000000000d38af2, 0x0000005905921572); testNumbers(0x0000000000aca0d7, 0x0000c632301abeb8); testNumbers(0x00000000004eadc2, 0x00006a1ebf37403c); testNumbers(0x00000000005d909c, 0x00004021bfa15862); testNumbers(0x0000000000710e08, 0x0000e9a1a030b230); testNumbers(0x0000000000478b9b, 0x00804add8afc31d9); testNumbers(0x00000000005754ed, 0x00af85e7ebb1ce33); testNumbers(0x00000000003ab44e, 0x00f41b9f70360f78); testNumbers(0x00000000007aa129, 0x00eb6e4eddf7eb87); testNumbers(0x00000000003b036f, 0x333874e4330fbfa4); testNumbers(0x0000000000a33186, 0xec8607412503fc4c); testNumbers(0x00000000009af471, 0xe7ad0935fdbff151); testNumbers(0x0000000000c04e8c, 0x58ee406ab936ac24); testNumbers(0x0000000054fdd28b, 0x0000000000000034); testNumbers(0x0000000033736b36, 0x00000000000000fd); testNumbers(0x0000000069cfe4b7, 0x0000000000000026); testNumbers(0x00000000fd078d36, 0x00000000000000dc); testNumbers(0x0000000075cc3f36, 0x0000000000001617); testNumbers(0x00000000075d660e, 0x0000000000008511); testNumbers(0x0000000052acb037, 0x00000000000043cb); testNumbers(0x00000000a0db7bf5, 0x0000000000002c98); testNumbers(0x0000000083d4be11, 0x0000000000ba37c9); testNumbers(0x0000000083d04f94, 0x00000000003ddbd0); testNumbers(0x000000005ed41f6a, 0x0000000000eaf1d5); testNumbers(0x000000000e364a9a, 0x000000000085880c); testNumbers(0x0000000012657ecb, 0x00000000a88b8a68); testNumbers(0x000000009897a4ac, 0x0000000076707981); testNumbers(0x00000000469cd1cf, 0x00000000cf40f67a); testNumbers(0x00000000ee7444c8, 0x00000000d1b0d7de); testNumbers(0x00000000fbb6f547, 0x000000c1ef3c4d9b); testNumbers(0x000000000e20dd53, 0x000000b05833c7cf); testNumbers(0x00000000e5733fb8, 0x0000008eae18a855); testNumbers(0x000000005db1c271, 0x000000c4a2f7c27d); testNumbers(0x0000000007add22a, 0x00000ed9fd23dc3e); testNumbers(0x000000002239d1d5, 0x0000a1ae07a62635); testNumbers(0x00000000410d4d58, 0x0000c05c5205bed2); testNumbers(0x000000004c3c435e, 0x00001e30c1bf628a); testNumbers(0x00000000096f44d5, 0x005488c521a6072b); testNumbers(0x0000000017f28913, 0x00796ff3891c44ff); testNumbers(0x0000000065be69cf, 0x00dd5c6f9b3f3119); testNumbers(0x000000002200f221, 0x00ab6c98c90cfe9d); testNumbers(0x00000000d48bee1a, 0x64b76d7491a58799); testNumbers(0x000000006cb93100, 0xa515fe27402dad45); testNumbers(0x00000000bed95abe, 0xc9924098acc74be9); testNumbers(0x0000000092781a2e, 0x67ada9ef3f9e39b7); testNumbers(0x000000e3aafcdae2, 0x000000000000009c); testNumbers(0x000000d8dad80c34, 0x0000000000000099); testNumbers(0x000000addcd074d6, 0x00000000000000ea); testNumbers(0x00000096735bc25a, 0x00000000000000ba); testNumbers(0x000000f492ef7446, 0x00000000000039b1); testNumbers(0x000000bc86816119, 0x0000000000001520); testNumbers(0x00000060a36818e7, 0x000000000000c5a8); testNumbers(0x000000317121d508, 0x000000000000ac3d); testNumbers(0x0000004abfdaf232, 0x00000000005cea57); testNumbers(0x000000acc458f392, 0x0000000000a9c3e3); testNumbers(0x0000001020993532, 0x0000000000df6042); testNumbers(0x000000ad25b80abb, 0x0000000000cec15b); testNumbers(0x0000002305d2c443, 0x000000002a26131c); testNumbers(0x00000007c42e2ce0, 0x000000009768024f); testNumbers(0x00000076f674816c, 0x000000008d33c7b4); testNumbers(0x000000bf567b23bc, 0x00000000ef264890); testNumbers(0x000000e3283681a0, 0x0000002e66850719); testNumbers(0x000000011fe13754, 0x00000066fad0b407); testNumbers(0x00000052f259009f, 0x000000a2886ef414); testNumbers(0x000000a9ebb540fc, 0x0000009d27ba694f); testNumbers(0x00000083af60d7eb, 0x0000b6f2a0f51f4c); testNumbers(0x000000f2ec42d13a, 0x000046855f279407); testNumbers(0x00000094e71cb562, 0x00002d9566618e56); testNumbers(0x000000c0ee690ddc, 0x000054295c8ca584); testNumbers(0x0000002683cd5206, 0x00a5a2d269bcd188); testNumbers(0x0000002e77038305, 0x00c727f0f3787e22); testNumbers(0x0000008323b9d026, 0x00fed29f8575c120); testNumbers(0x0000007b3231f0fc, 0x0091080854b27d3e); testNumbers(0x00000084522a7708, 0x91ba8f22fccd6222); testNumbers(0x000000afb1b50d90, 0x3261a532b65c7838); testNumbers(0x0000002c65e838c6, 0x5b858452c9bf6f39); testNumbers(0x000000219e837734, 0x97873bed5bb0a44b); testNumbers(0x00009f133e2f116f, 0x0000000000000073); testNumbers(0x0000887577574766, 0x0000000000000048); testNumbers(0x0000ba4c778d4aa8, 0x000000000000003a); testNumbers(0x00002683df421474, 0x0000000000000056); testNumbers(0x00006ff76294c275, 0x00000000000089f7); testNumbers(0x0000fdf053abefa2, 0x000000000000eb65); testNumbers(0x0000ea4b254b24eb, 0x000000000000ba27); testNumbers(0x000009f7ce21b811, 0x000000000000e8f6); testNumbers(0x00009cc645fa08a1, 0x0000000000a29ea3); testNumbers(0x0000726f9a9f816e, 0x000000000070dce1); testNumbers(0x0000a4be34825ef6, 0x0000000000bb2be7); testNumbers(0x000057ff147cb7c1, 0x0000000000e255af); testNumbers(0x0000ab9d6f546dd4, 0x000000007e2772a5); testNumbers(0x0000b148e3446e89, 0x0000000051ed3c28); testNumbers(0x00001e3abfe9725e, 0x00000000d4dec3f4); testNumbers(0x0000f61bcaba115e, 0x00000000fade149f); testNumbers(0x0000ae642b9a6626, 0x000000d8de0e0b9a); testNumbers(0x00009d015a13c8ae, 0x000000afc8827997); testNumbers(0x0000ecc72cc2df89, 0x00000070d47ec7c4); testNumbers(0x0000fdbf05894fd2, 0x00000012aec393bd); testNumbers(0x0000cd7675a70874, 0x0000d7d696a62cbc); testNumbers(0x0000fad44a89216d, 0x0000cb8cfc8ada4c); testNumbers(0x0000f41eb5363551, 0x00009c040aa7775e); testNumbers(0x00003c02d93e01f6, 0x0000f1f4e68a14f8); testNumbers(0x0000e0d99954b598, 0x00b2a2de4e453485); testNumbers(0x0000a6081be866d9, 0x00f2a12e845e4f2e); testNumbers(0x0000ae56a5680dfd, 0x00c96cd7c15d5bec); testNumbers(0x0000360363e37938, 0x00d4ed572e1937e0); testNumbers(0x00001f052aebf185, 0x3584e582d1c6db1a); testNumbers(0x00003fac9c7b3d1b, 0xa4b120f080d69113); testNumbers(0x00005330d51c3217, 0xc16dd32ffd822c0e); testNumbers(0x0000cd0694ff5ab0, 0x29673fe67245fbfc); testNumbers(0x0098265e5a308523, 0x000000000000007d); testNumbers(0x00560863350df217, 0x00000000000000c8); testNumbers(0x00798ce804d829a1, 0x00000000000000b1); testNumbers(0x007994c0051256fd, 0x000000000000005c); testNumbers(0x00ff1a2838e69f42, 0x0000000000003c16); testNumbers(0x009e7e95ac5de2c7, 0x000000000000ed49); testNumbers(0x00fd6867eabba5c0, 0x000000000000c689); testNumbers(0x009d1632daf20de0, 0x000000000000b74f); testNumbers(0x00ee29d8f76d4e9c, 0x00000000008020d4); testNumbers(0x0089e03ecf8daa0a, 0x00000000003e7587); testNumbers(0x00115763be4beb44, 0x000000000088f762); testNumbers(0x00815cfc87c427d0, 0x00000000009eec06); testNumbers(0x001d9c3c9ded0c1a, 0x00000000b9f6d331); testNumbers(0x00932225412f1222, 0x00000000130ff743); testNumbers(0x00fe82151e2e0bf3, 0x00000000781cd6f9); testNumbers(0x002222abb5061b12, 0x000000000491f1df); testNumbers(0x0012ce0cf0452748, 0x000000a8566274aa); testNumbers(0x00e570484e9937e1, 0x000000ac81f171be); testNumbers(0x00eb371f7f8f514e, 0x000000df0248189c); testNumbers(0x003777a7cc43dfd7, 0x0000003a7b8eaf40); testNumbers(0x00e181db76238786, 0x00004126e572a568); testNumbers(0x00ac1df87977e122, 0x0000e1e8cfde6678); testNumbers(0x001c858763a2c23b, 0x000004ef61f3964f); testNumbers(0x00bd786bbb71ce46, 0x00002cda097a464f); testNumbers(0x00a7a6de21a46360, 0x00007afda16f98c3); testNumbers(0x006fed70a6ccfdf2, 0x009771441e8e00e8); testNumbers(0x005ad2782dcd5e60, 0x000d170d518385f6); testNumbers(0x001fd67b153bc9b9, 0x007b3366dff66c6c); testNumbers(0x00bf00203beb73f4, 0x693495fefab1c77e); testNumbers(0x002faac1b1b068f8, 0x1cb11cc5c3aaff86); testNumbers(0x00bb63cfbffe7648, 0x84f5b0c583f9e77b); testNumbers(0x00615db89673241c, 0x8de5f125247eba0f); testNumbers(0x9be183a6b293dffe, 0x0000000000000072); testNumbers(0xa3df9b76d8a51b19, 0x00000000000000c4); testNumbers(0xb4cc300f0ea7566d, 0x000000000000007e); testNumbers(0xfdac12a8e23e16e7, 0x0000000000000015); testNumbers(0xc0805405aadc0f47, 0x00000000000019d4); testNumbers(0x843a391f8d9f8972, 0x000000000000317a); testNumbers(0x5a0d124c427ed453, 0x00000000000034fe); testNumbers(0x8631150f34008f1b, 0x0000000000002ecd); testNumbers(0x3ff4c18715ad3a76, 0x000000000072d22a); testNumbers(0x3ef93e5a649422bd, 0x0000000000db5c60); testNumbers(0x6bdd1056ae58fe0e, 0x0000000000805c75); testNumbers(0xeff1fa30f3ad9ded, 0x00000000000c83ca); testNumbers(0xbbc143ac147e56a9, 0x00000000161179b7); testNumbers(0x0829dde88caa2e45, 0x000000001443ab62); testNumbers(0x97ac43ff797a4514, 0x0000000033eef42b); testNumbers(0x703e9cdf96a148aa, 0x000000008e08f3d8); testNumbers(0x75cbb739b54e2ad6, 0x0000007a8b12628c); testNumbers(0x91e42fafe97d638f, 0x0000000fbe867c51); testNumbers(0x9159d77deec116c1, 0x00000096c0c774fc); testNumbers(0xb59dbb4c15761d88, 0x0000004a033a73e7); testNumbers(0xab668e9783af9617, 0x00005aa18404076c); testNumbers(0x54c68e5b5c4127df, 0x0000f2934fd8dd1f); testNumbers(0xf490d3936184c9f9, 0x00004007477e2110); testNumbers(0x349e577c9d5c44e2, 0x0000bdb2235af963); testNumbers(0x58f3ac26cdafde28, 0x0017d4f4ade9ec35); testNumbers(0xa4a263c316d21f4c, 0x00a7ec1e6fda834b); testNumbers(0x6ab14771c448666f, 0x005b0f49593c3a27); testNumbers(0x15f392c3602aa4f7, 0x0018af171045f88e); testNumbers(0xf17de69c0063f62c, 0xee2a164c2c3a46f8); testNumbers(0xf34b743eeff8e5c6, 0x4f4067f1a0e404ad); testNumbers(0xee0296f678756647, 0xf1bbfdc6f0280d36); testNumbers(0x65c33db0c952b829, 0xa7ab9c39dcffbcf3); Console.WriteLine("All tests passed."); return 100; } catch (DivideByZeroException) { return 1; } } } }
// // PicasaWebExport.cs // // Author: // Stephane Delcroix <stephane@delcroix.org> // Stephen Shaw <sshaw@decriptor.com> // // Copyright (C) 2006-2009 Novell, Inc. // Copyright (C) 2006-2009 Stephane Delcroix // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // /* * PicasaWebExport.cs * * Authors: * Stephane Delcroix <stephane@delcroix.org> * * Copyright (C) 2006 Stephane Delcroix */ using System; using System.IO; using System.Text; using System.Collections; using System.Collections.Generic; using Mono.Unix; using Hyena; using FSpot; using FSpot.Core; using FSpot.Database; using FSpot.Filters; using FSpot.Settings; using FSpot.Widgets; using FSpot.UI.Dialog; using Mono.Google; using Mono.Google.Picasa; using System.Linq; namespace FSpot.Exporters.PicasaWeb { public class GoogleExport : FSpot.Extensions.IExporter { public GoogleExport () { } public void Run (IBrowsableCollection selection) { builder = new GtkBeans.Builder (null, "google_export_dialog.ui", null); builder.Autoconnect (this); gallery_optionmenu = Gtk.ComboBox.NewText (); album_optionmenu = Gtk.ComboBox.NewText (); (edit_button.Parent as Gtk.HBox).PackStart (gallery_optionmenu); (album_button.Parent as Gtk.HBox).PackStart (album_optionmenu); (edit_button.Parent as Gtk.HBox).ReorderChild (gallery_optionmenu, 1); (album_button.Parent as Gtk.HBox).ReorderChild (album_optionmenu, 1); gallery_optionmenu.Show (); album_optionmenu.Show (); this.items = selection.Items.ToArray (); album_button.Sensitive = false; var view = new TrayView (selection); view.DisplayDates = false; view.DisplayTags = false; Dialog.Modal = false; Dialog.TransientFor = null; thumb_scrolledwindow.Add (view); view.Show (); Dialog.Show (); GoogleAccountManager manager = GoogleAccountManager.GetInstance (); manager.AccountListChanged += PopulateGoogleOptionMenu; PopulateGoogleOptionMenu (manager, null); album_optionmenu.Changed += HandleAlbumOptionMenuChanged; if (edit_button != null) edit_button.Clicked += HandleEditGallery; Dialog.Response += HandleResponse; connect = true; HandleSizeActive (null, null); Connect (); LoadPreference (SCALE_KEY); LoadPreference (SIZE_KEY); LoadPreference (BROWSER_KEY); LoadPreference (TAG_KEY); } private bool scale; private int size; private bool browser; private bool export_tag; private bool connect = false; private long approx_size = 0; private long sent_bytes = 0; IPhoto[] items; int photo_index; ThreadProgressDialog progress_dialog; List<GoogleAccount> accounts; private GoogleAccount account; private PicasaAlbum album; private PicasaAlbumCollection albums = null; private GtkBeans.Builder builder; private string dialog_name = "google_export_dialog"; public const string EXPORT_SERVICE = "picasaweb/"; public const string SCALE_KEY = Preferences.APP_FSPOT_EXPORT + EXPORT_SERVICE + "scale"; public const string SIZE_KEY = Preferences.APP_FSPOT_EXPORT + EXPORT_SERVICE + "size"; public const string BROWSER_KEY = Preferences.APP_FSPOT_EXPORT + EXPORT_SERVICE + "browser"; public const string TAG_KEY = Preferences.APP_FSPOT_EXPORT + EXPORT_SERVICE + "tag"; // widgets [GtkBeans.Builder.Object] Gtk.Dialog dialog; Gtk.ComboBox gallery_optionmenu; Gtk.ComboBox album_optionmenu; #pragma warning disable 649 [GtkBeans.Builder.Object] Gtk.Label status_label; [GtkBeans.Builder.Object] Gtk.Label album_status_label; [GtkBeans.Builder.Object] Gtk.CheckButton browser_check; [GtkBeans.Builder.Object] Gtk.CheckButton scale_check; [GtkBeans.Builder.Object] Gtk.CheckButton tag_check; [GtkBeans.Builder.Object] Gtk.SpinButton size_spin; [GtkBeans.Builder.Object] Gtk.Button album_button; [GtkBeans.Builder.Object] Gtk.Button edit_button; [GtkBeans.Builder.Object] Gtk.Button export_button; [GtkBeans.Builder.Object] Gtk.ScrolledWindow thumb_scrolledwindow; #pragma warning restore 649 System.Threading.Thread command_thread; private void HandleResponse (object sender, Gtk.ResponseArgs args) { if (args.ResponseId != Gtk.ResponseType.Ok) { Dialog.Destroy (); return; } if (scale_check != null) { scale = scale_check.Active; size = size_spin.ValueAsInt; } else scale = false; browser = browser_check.Active; export_tag = tag_check.Active; if (account != null) { album = (PicasaAlbum)account.Picasa.GetAlbums () [Math.Max (0, album_optionmenu.Active)]; photo_index = 0; Dialog.Destroy (); command_thread = new System.Threading.Thread (new System.Threading.ThreadStart (this.Upload)); command_thread.Name = Catalog.GetString ("Uploading Pictures"); progress_dialog = new ThreadProgressDialog (command_thread, items.Length); progress_dialog.Start (); // Save these settings for next time Preferences.Set (SCALE_KEY, scale); Preferences.Set (SIZE_KEY, size); Preferences.Set (BROWSER_KEY, browser); // Preferences.Set (Preferences.EXPORT_GALLERY_META, meta); Preferences.Set (TAG_KEY, export_tag); } } public void HandleSizeActive (object sender, EventArgs args) { size_spin.Sensitive = scale_check.Active; } void HandleUploadProgress (object o, UploadProgressEventArgs args) { if (approx_size == 0) progress_dialog.ProgressText = string.Format (Catalog.GetString ("{0} Sent"), GLib.Format.SizeForDisplay (args.BytesSent)); else progress_dialog.ProgressText = string.Format (Catalog.GetString ("{0} of approx. {1}"), GLib.Format.SizeForDisplay (sent_bytes + args.BytesSent), GLib.Format.SizeForDisplay (approx_size)); progress_dialog.Fraction = ((photo_index - 1) / (double)items.Length) + (args.BytesSent / (args.BytesTotal * (double)items.Length)); } class DateComparer : IComparer { public int Compare (object left, object right) { return DateTime.Compare ((left as IPhoto).Time, (right as IPhoto).Time); } } void Upload () { album.UploadProgress += HandleUploadProgress; sent_bytes = 0; approx_size = 0; Log.Debug ("Starting Upload to Picasa"); FilterSet filters = new FilterSet (); filters.Add (new JpegFilter ()); if (scale) filters.Add (new ResizeFilter ((uint)size)); Array.Sort (items, new DateComparer ()); while (photo_index < items.Length) { try { IPhoto item = items [photo_index]; FileInfo file_info; Log.Debug ("Picasa uploading " + photo_index); progress_dialog.Message = string.Format (Catalog.GetString ("Uploading picture \"{0}\" ({1} of {2})"), item.Name, photo_index + 1, items.Length); photo_index++; PicasaPicture picture; using (FilterRequest request = new FilterRequest (item.DefaultVersion.Uri)) { filters.Convert (request); file_info = new FileInfo (request.Current.LocalPath); if (approx_size == 0) //first image approx_size = file_info.Length * items.Length; else approx_size = sent_bytes * items.Length / (photo_index - 1); picture = album.UploadPicture (request.Current.LocalPath, Path.ChangeExtension (item.Name, "jpg"), item.Description); sent_bytes += file_info.Length; } if (App.Instance.Database != null && item is Photo) App.Instance.Database.Exports.Create ((item as Photo).Id, (item as Photo).DefaultVersionId, ExportStore.PicasaExportType, picture.Link); //tagging if (item.Tags != null && export_tag) foreach (Tag tag in item.Tags) { picture.AddTag (tag.Name); } } catch (System.Threading.ThreadAbortException te) { Log.Exception (te); System.Threading.Thread.ResetAbort (); } catch (System.Exception e) { progress_dialog.Message = string.Format (Catalog.GetString ("Error Uploading To Gallery: {0}"), e.Message); progress_dialog.ProgressText = Catalog.GetString ("Error"); Log.DebugException (e); if (progress_dialog.PerformRetrySkip ()) { photo_index--; if (photo_index == 0) approx_size = 0; } } } progress_dialog.Message = Catalog.GetString ("Done Sending Photos"); progress_dialog.Fraction = 1.0; progress_dialog.ProgressText = Catalog.GetString ("Upload Complete"); progress_dialog.ButtonLabel = Gtk.Stock.Ok; if (browser) GtkBeans.Global.ShowUri (Dialog.Screen, album.Link); } private void PopulateGoogleOptionMenu (GoogleAccountManager manager, GoogleAccount changed_account) { this.account = changed_account; int pos = -1; accounts = manager.GetAccounts (); if (accounts == null || accounts.Count == 0) { if (accounts == null) Log.Debug ("accounts == null"); else Log.Debug ("accounts != null"); Log.DebugFormat ("accounts.Count = {0}", accounts.Count); gallery_optionmenu.AppendText (Catalog.GetString ("(No Gallery)")); gallery_optionmenu.Sensitive = false; edit_button.Sensitive = false; pos = 0; } else { int i = 0; pos = 0; foreach (GoogleAccount account in accounts) { if (account == changed_account) pos = i; gallery_optionmenu.AppendText (account.Username); i++; } gallery_optionmenu.Sensitive = true; edit_button.Sensitive = true; } Log.DebugFormat ("Setting gallery_optionmenu.Active = {0}", pos); gallery_optionmenu.Active = pos; } private void Connect () { Connect (null); } private void Connect (GoogleAccount selected) { Connect (selected, null, null); } private void Connect (GoogleAccount selected, string token, string text) { try { if (accounts.Count != 0 && connect) { if (selected == null) account = (GoogleAccount)accounts [gallery_optionmenu.Active]; else account = selected; if (!account.Connected) account.Connect (); PopulateAlbumOptionMenu (account.Picasa); long qu = account.Picasa.QuotaUsed; long ql = account.Picasa.QuotaLimit; StringBuilder sb = new StringBuilder ("<small>"); sb.Append (string.Format (Catalog.GetString ("Available space: {0}, {1}% used out of {2}"), GLib.Format.SizeForDisplay (ql - qu), (100 * qu / ql), GLib.Format.SizeForDisplay (ql))); sb.Append ("</small>"); status_label.Text = sb.ToString (); status_label.UseMarkup = true; album_button.Sensitive = true; } } catch (CaptchaException exc) { Log.Debug ("Your Google account is locked"); if (selected != null) account = selected; PopulateAlbumOptionMenu (account.Picasa); album_button.Sensitive = false; new GoogleAccountDialog (this.Dialog, account, false, exc); Log.Warning ("Your Google account is locked, you can unlock it by visiting: {0}", CaptchaException.UnlockCaptchaURL); } catch (System.Exception) { Log.Warning ("Can not connect to Picasa. Bad username? password? network connection?"); if (selected != null) account = selected; PopulateAlbumOptionMenu (account.Picasa); status_label.Text = string.Empty; album_button.Sensitive = false; new GoogleAccountDialog (this.Dialog, account, true, null); } } private void HandleAccountSelected (object sender, System.EventArgs args) { Connect (); } public void HandleAlbumAdded (string title) { GoogleAccount account = (GoogleAccount)accounts [gallery_optionmenu.Active]; PopulateAlbumOptionMenu (account.Picasa); // make the newly created album selected // PicasaAlbumCollection albums = account.Picasa.GetAlbums(); for (int i=0; i < albums.Count; i++) { if (((PicasaAlbum)albums [i]).Title == title) album_optionmenu.Active = i; } } private void PopulateAlbumOptionMenu (Mono.Google.Picasa.PicasaWeb picasa) { if (picasa != null) try { albums = picasa.GetAlbums(); } catch { Log.Warning ("Picasa: can't get the albums"); albums = null; picasa = null; } bool disconnected = picasa == null || !account.Connected || albums == null; if (disconnected || albums.Count == 0) { string msg = disconnected ? Catalog.GetString ("(Not Connected)") : Catalog.GetString ("(No Albums)"); album_optionmenu.AppendText (msg); export_button.Sensitive = false; album_optionmenu.Sensitive = false; album_button.Sensitive = false; if (disconnected) album_button.Sensitive = false; } else { foreach (PicasaAlbum album in albums.AllValues) { System.Text.StringBuilder label_builder = new System.Text.StringBuilder (); label_builder.Append (album.Title); label_builder.Append (" (" + album.PicturesCount + ")"); album_optionmenu.AppendText (label_builder.ToString ()); } export_button.Sensitive = items.Length > 0; album_optionmenu.Sensitive = true; album_button.Sensitive = true; } } public void HandleAlbumOptionMenuChanged (object sender, System.EventArgs args) { if (albums == null || albums.Count == 0) return; PicasaAlbum a = albums [album_optionmenu.Active]; export_button.Sensitive = a.PicturesRemaining >= items.Length; if (album_status_label.Visible = !export_button.Sensitive) { StringBuilder sb = new StringBuilder ("<small>"); sb.Append (string.Format (Catalog.GetString ("The selected album has a limit of {0} pictures,\n" + "which would be passed with the current selection of {1} images"), a.PicturesCount + a.PicturesRemaining, items.Length)); sb.Append ("</small>"); album_status_label.Text = string.Format (sb.ToString ()); album_status_label.UseMarkup = true; } else album_status_label.Text = string.Empty; } public void HandleAddGallery (object sender, System.EventArgs args) { new GoogleAccountDialog (this.Dialog); } public void HandleEditGallery (object sender, System.EventArgs args) { new GoogleAccountDialog (this.Dialog, account, false, null); } public void HandleAddAlbum (object sender, System.EventArgs args) { if (account == null) throw new Exception (Catalog.GetString ("No account selected")); new GoogleAddAlbum (this, account.Picasa); } void LoadPreference (string key) { switch (key) { case SCALE_KEY: if (scale_check.Active != Preferences.Get<bool> (key)) scale_check.Active = Preferences.Get<bool> (key); break; case SIZE_KEY: size_spin.Value = (double)Preferences.Get<int> (key); break; case BROWSER_KEY: if (browser_check.Active != Preferences.Get<bool> (key)) browser_check.Active = Preferences.Get<bool> (key); break; case TAG_KEY: if (tag_check.Active != Preferences.Get<bool> (key)) tag_check.Active = Preferences.Get<bool> (key); break; } } private Gtk.Dialog Dialog { get { if (dialog == null) dialog = new Gtk.Dialog (builder.GetRawObject (dialog_name)); return dialog; } } } }
using UnityEngine; using System.Collections; using System.Collections.Generic; using Pathfinding; using Pathfinding.RVO; /** AI for following paths. * This AI is the default movement script which comes with the A* Pathfinding Project. * It is in no way required by the rest of the system, so feel free to write your own. But I hope this script will make it easier * to set up movement for the characters in your game. This script is not written for high performance, so I do not recommend using it for large groups of units. * \n * \n * This script will try to follow a target transform, in regular intervals, the path to that target will be recalculated. * It will on FixedUpdate try to move towards the next point in the path. * However it will only move in the forward direction, but it will rotate around it's Y-axis * to make it reach the target. * * \section variables Quick overview of the variables * In the inspector in Unity, you will see a bunch of variables. You can view detailed information further down, but here's a quick overview.\n * The #repathRate determines how often it will search for new paths, if you have fast moving targets, you might want to set it to a lower value.\n * The #target variable is where the AI will try to move, it can be a point on the ground where the player has clicked in an RTS for example. * Or it can be the player object in a zombie game.\n * The speed is self-explanatory, so is turningSpeed, however #slowdownDistance might require some explanation. * It is the approximate distance from the target where the AI will start to slow down. Note that this doesn't only affect the end point of the path * but also any intermediate points, so be sure to set #forwardLook and #pickNextWaypointDist to a higher value than this.\n * #pickNextWaypointDist is simply determines within what range it will switch to target the next waypoint in the path.\n * #forwardLook will try to calculate an interpolated target point on the current segment in the path so that it has a distance of #forwardLook from the AI\n * Below is an image illustrating several variables as well as some internal ones, but which are relevant for understanding how it works. * Note that the #forwardLook range will not match up exactly with the target point practically, even though that's the goal. * \shadowimage{aipath_variables.png} * This script has many movement fallbacks. * If it finds a NavmeshController, it will use that, otherwise it will look for a character controller, then for a rigidbody and if it hasn't been able to find any * it will use Transform.Translate which is guaranteed to always work. */ [RequireComponent(typeof(Seeker))] [AddComponentMenu("Pathfinding/AI/AIPath (3D)")] [HelpURL("http://arongranberg.com/astar/docs/class_a_i_path.php")] public class AIPath : MonoBehaviour { /** Determines how often it will search for new paths. * If you have fast moving targets or AIs, you might want to set it to a lower value. * The value is in seconds between path requests. */ public float repathRate = 0.5F; /** Target to move towards. * The AI will try to follow/move towards this target. * It can be a point on the ground where the player has clicked in an RTS for example, or it can be the player object in a zombie game. */ public Transform target; /** Enables or disables searching for paths. * Setting this to false does not stop any active path requests from being calculated or stop it from continuing to follow the current path. * \see #canMove */ public bool canSearch = true; /** Enables or disables movement. * \see #canSearch */ public bool canMove = true; /** Maximum velocity. * This is the maximum speed in world units per second. */ public float speed = 3; /** Rotation speed. * Rotation is calculated using Quaternion.SLerp. This variable represents the damping, the higher, the faster it will be able to rotate. */ public float turningSpeed = 5; /** Distance from the target point where the AI will start to slow down. * Note that this doesn't only affect the end point of the path * but also any intermediate points, so be sure to set #forwardLook and #pickNextWaypointDist to a higher value than this */ public float slowdownDistance = 0.6F; /** Determines within what range it will switch to target the next waypoint in the path */ public float pickNextWaypointDist = 2; /** Target point is Interpolated on the current segment in the path so that it has a distance of #forwardLook from the AI. * See the detailed description of AIPath for an illustrative image */ public float forwardLook = 1; /** Distance to the end point to consider the end of path to be reached. * When this has been reached, the AI will not move anymore until the target changes and OnTargetReached will be called. */ public float endReachedDistance = 0.2F; /** Do a closest point on path check when receiving path callback. * Usually the AI has moved a bit between requesting the path, and getting it back, and there is usually a small gap between the AI * and the closest node. * If this option is enabled, it will simulate, when the path callback is received, movement between the closest node and the current * AI position. This helps to reduce the moments when the AI just get a new path back, and thinks it ought to move backwards to the start of the new path * even though it really should just proceed forward. */ public bool closestOnPathCheck = true; protected float minMoveScale = 0.05F; /** Cached Seeker component */ protected Seeker seeker; /** Cached Transform component */ protected Transform tr; /** Time when the last path request was sent */ protected float lastRepath = -9999; /** Current path which is followed */ protected Path path; /** Cached CharacterController component */ protected CharacterController controller; /** Cached Rigidbody component */ protected Rigidbody rigid; /** Current index in the path which is current target */ protected int currentWaypointIndex = 0; /** Holds if the end-of-path is reached * \see TargetReached */ protected bool targetReached = false; /** Only when the previous path has been returned should be search for a new path */ protected bool canSearchAgain = true; protected Vector3 lastFoundWaypointPosition; protected float lastFoundWaypointTime = -9999; /** Returns if the end-of-path has been reached * \see targetReached */ public bool TargetReached { get { return targetReached; } } /** Holds if the Start function has been run. * Used to test if coroutines should be started in OnEnable to prevent calculating paths * in the awake stage (or rather before start on frame 0). */ private bool startHasRun = false; /** Initializes reference variables. * If you override this function you should in most cases call base.Awake () at the start of it. * */ protected virtual void Awake () { seeker = GetComponent<Seeker>(); //This is a simple optimization, cache the transform component lookup tr = transform; //Cache some other components (not all are necessarily there) controller = GetComponent<CharacterController>(); rigid = GetComponent<Rigidbody>(); } /** Starts searching for paths. * If you override this function you should in most cases call base.Start () at the start of it. * \see OnEnable * \see RepeatTrySearchPath */ protected virtual void Start () { startHasRun = true; OnEnable(); } /** Run at start and when reenabled. * Starts RepeatTrySearchPath. * * \see Start */ protected virtual void OnEnable () { lastRepath = -9999; canSearchAgain = true; lastFoundWaypointPosition = GetFeetPosition(); if (startHasRun) { //Make sure we receive callbacks when paths complete seeker.pathCallback += OnPathComplete; StartCoroutine(RepeatTrySearchPath()); } } public void OnDisable () { // Abort calculation of path if (seeker != null && !seeker.IsDone()) seeker.GetCurrentPath().Error(); // Release current path if (path != null) path.Release(this); path = null; //Make sure we receive callbacks when paths complete seeker.pathCallback -= OnPathComplete; } /** Tries to search for a path every #repathRate seconds. * \see TrySearchPath */ protected IEnumerator RepeatTrySearchPath () { while (true) { float v = TrySearchPath(); yield return new WaitForSeconds(v); } } /** Tries to search for a path. * Will search for a new path if there was a sufficient time since the last repath and both * #canSearchAgain and #canSearch are true and there is a target. * * \returns The time to wait until calling this function again (based on #repathRate) */ public float TrySearchPath () { if (Time.time - lastRepath >= repathRate && canSearchAgain && canSearch && target != null) { SearchPath(); return repathRate; } else { //StartCoroutine (WaitForRepath ()); float v = repathRate - (Time.time-lastRepath); return v < 0 ? 0 : v; } } /** Requests a path to the target */ public virtual void SearchPath () { if (target == null) throw new System.InvalidOperationException("Target is null"); lastRepath = Time.time; //This is where we should search to Vector3 targetPosition = target.position; canSearchAgain = false; //Alternative way of requesting the path //ABPath p = ABPath.Construct (GetFeetPosition(),targetPosition,null); //seeker.StartPath (p); //We should search from the current position seeker.StartPath(GetFeetPosition(), targetPosition); } public virtual void OnTargetReached () { //End of path has been reached //If you want custom logic for when the AI has reached it's destination //add it here //You can also create a new script which inherits from this one //and override the function in that script } /** Called when a requested path has finished calculation. * A path is first requested by #SearchPath, it is then calculated, probably in the same or the next frame. * Finally it is returned to the seeker which forwards it to this function.\n */ public virtual void OnPathComplete (Path _p) { ABPath p = _p as ABPath; if (p == null) throw new System.Exception("This function only handles ABPaths, do not use special path types"); canSearchAgain = true; //Claim the new path p.Claim(this); // Path couldn't be calculated of some reason. // More info in p.errorLog (debug string) if (p.error) { p.Release(this); return; } //Release the previous path if (path != null) path.Release(this); //Replace the old path path = p; //Reset some variables currentWaypointIndex = 0; targetReached = false; //The next row can be used to find out if the path could be found or not //If it couldn't (error == true), then a message has probably been logged to the console //however it can also be got using p.errorLog //if (p.error) if (closestOnPathCheck) { // Simulate movement from the point where the path was requested // to where we are right now. This reduces the risk that the agent // gets confused because the first point in the path is far away // from the current position (possibly behind it which could cause // the agent to turn around, and that looks pretty bad). Vector3 p1 = Time.time - lastFoundWaypointTime < 0.3f ? lastFoundWaypointPosition : p.originalStartPoint; Vector3 p2 = GetFeetPosition(); Vector3 dir = p2-p1; float magn = dir.magnitude; dir /= magn; int steps = (int)(magn/pickNextWaypointDist); for (int i = 0; i <= steps; i++) { CalculateVelocity(p1); p1 += dir; } } } public virtual Vector3 GetFeetPosition () { if (controller != null) { return tr.position - Vector3.up*controller.height*0.5F; } return tr.position; } public virtual void Update () { if (!canMove) { return; } Vector3 dir = CalculateVelocity(GetFeetPosition()); //Rotate towards targetDirection (filled in by CalculateVelocity) RotateTowards(targetDirection); if (controller != null) { controller.SimpleMove(dir); } else if (rigid != null) { rigid.AddForce(dir); } else { tr.Translate(dir*Time.deltaTime, Space.World); } } /** Point to where the AI is heading. * Filled in by #CalculateVelocity */ protected Vector3 targetPoint; /** Relative direction to where the AI is heading. * Filled in by #CalculateVelocity */ protected Vector3 targetDirection; protected float XZSqrMagnitude (Vector3 a, Vector3 b) { float dx = b.x-a.x; float dz = b.z-a.z; return dx*dx + dz*dz; } /** Calculates desired velocity. * Finds the target path segment and returns the forward direction, scaled with speed. * A whole bunch of restrictions on the velocity is applied to make sure it doesn't overshoot, does not look too far ahead, * and slows down when close to the target. * /see speed * /see endReachedDistance * /see slowdownDistance * /see CalculateTargetPoint * /see targetPoint * /see targetDirection * /see currentWaypointIndex */ protected Vector3 CalculateVelocity (Vector3 currentPosition) { if (path == null || path.vectorPath == null || path.vectorPath.Count == 0) return Vector3.zero; List<Vector3> vPath = path.vectorPath; if (vPath.Count == 1) { vPath.Insert(0, currentPosition); } if (currentWaypointIndex >= vPath.Count) { currentWaypointIndex = vPath.Count-1; } if (currentWaypointIndex <= 1) currentWaypointIndex = 1; while (true) { if (currentWaypointIndex < vPath.Count-1) { //There is a "next path segment" float dist = XZSqrMagnitude(vPath[currentWaypointIndex], currentPosition); //Mathfx.DistancePointSegmentStrict (vPath[currentWaypointIndex+1],vPath[currentWaypointIndex+2],currentPosition); if (dist < pickNextWaypointDist*pickNextWaypointDist) { lastFoundWaypointPosition = currentPosition; lastFoundWaypointTime = Time.time; currentWaypointIndex++; } else { break; } } else { break; } } Vector3 dir = vPath[currentWaypointIndex] - vPath[currentWaypointIndex-1]; Vector3 targetPosition = CalculateTargetPoint(currentPosition, vPath[currentWaypointIndex-1], vPath[currentWaypointIndex]); dir = targetPosition-currentPosition; dir.y = 0; float targetDist = dir.magnitude; float slowdown = Mathf.Clamp01(targetDist / slowdownDistance); this.targetDirection = dir; this.targetPoint = targetPosition; if (currentWaypointIndex == vPath.Count-1 && targetDist <= endReachedDistance) { if (!targetReached) { targetReached = true; OnTargetReached(); } //Send a move request, this ensures gravity is applied return Vector3.zero; } Vector3 forward = tr.forward; float dot = Vector3.Dot(dir.normalized, forward); float sp = speed * Mathf.Max(dot, minMoveScale) * slowdown; if (Time.deltaTime > 0) { sp = Mathf.Clamp(sp, 0, targetDist/(Time.deltaTime*2)); } return forward*sp; } /** Rotates in the specified direction. * Rotates around the Y-axis. * \see turningSpeed */ protected virtual void RotateTowards (Vector3 dir) { if (dir == Vector3.zero) return; Quaternion rot = tr.rotation; Quaternion toTarget = Quaternion.LookRotation(dir); rot = Quaternion.Slerp(rot, toTarget, turningSpeed*Time.deltaTime); Vector3 euler = rot.eulerAngles; euler.z = 0; euler.x = 0; rot = Quaternion.Euler(euler); tr.rotation = rot; } /** Calculates target point from the current line segment. * \param p Current position * \param a Line segment start * \param b Line segment end * The returned point will lie somewhere on the line segment. * \see #forwardLook * \todo This function uses .magnitude quite a lot, can it be optimized? */ protected Vector3 CalculateTargetPoint (Vector3 p, Vector3 a, Vector3 b) { a.y = p.y; b.y = p.y; float magn = (a-b).magnitude; if (magn == 0) return a; float closest = Mathf.Clamp01(VectorMath.ClosestPointOnLineFactor(a, b, p)); Vector3 point = (b-a)*closest + a; float distance = (point-p).magnitude; float lookAhead = Mathf.Clamp(forwardLook - distance, 0.0F, forwardLook); float offset = lookAhead / magn; offset = Mathf.Clamp(offset+closest, 0.0F, 1.0F); return (b-a)*offset + a; } }
/* * NameValueCollection.cs - Implementation of * "System.Collections.Specialized.NameValueCollection". * * Copyright (C) 2002 Southern Storm Software, Pty Ltd. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ namespace System.Collections.Specialized { using System; using System.Collections; using System.Runtime.Serialization; using System.Text; public class NameValueCollection : NameObjectCollectionBase { // Internal state. private String[] allKeysResult; private String[] copyToResult; // Constructors. public NameValueCollection() : base(0, null, null) { // Nothing to do here. } public NameValueCollection(NameValueCollection col) : base(0, null, null) { Add(col); } public NameValueCollection(IHashCodeProvider hashProvider, IComparer comparer) : base(0, hashProvider, comparer) { // Nothing to do here. } public NameValueCollection(int capacity) : base(capacity, null, null) { // Nothing to do here. } public NameValueCollection(int capacity, NameValueCollection col) : base(capacity, null, null) { Add(col); } public NameValueCollection(int capacity, IHashCodeProvider hashProvider, IComparer comparer) : base(capacity, hashProvider, comparer) { // Nothing to do here. } #if CONFIG_SERIALIZATION protected NameValueCollection(SerializationInfo info, StreamingContext context) : base(info, context) {} #endif // Add a name/value pair to this collection. public virtual void Add(String name, String value) { if(IsReadOnly) { throw new NotSupportedException(S._("NotSupp_ReadOnly")); } InvalidateCachedArrays(); ArrayList strings = (ArrayList)(BaseGet(name)); if(strings == null) { strings = new ArrayList(1); if(value != null) { strings.Add(value); } BaseAdd(name, strings); } else if(value != null) { strings.Add(value); } } // Add the contents of another name/value collection to this collection. public void Add(NameValueCollection c) { if(c == null) { throw new ArgumentNullException("c"); } int count = c.Count; int posn; String name; ArrayList strings; for(posn = 0; posn < count; ++posn) { name = c.BaseGetKey(posn); strings = (ArrayList)(c.BaseGet(posn)); foreach(String value in strings) { Add(name, value); } } } // Copy the strings in this collection to an array. public void CopyTo(Array array, int index) { if(copyToResult == null) { int count = Count; int posn; copyToResult = new String [count]; for(posn = 0; posn < count; ++posn) { copyToResult[posn] = Get(posn); } } copyToResult.CopyTo(array, index); } // Clear the contents of this collection. public void Clear() { if(!IsReadOnly) { InvalidateCachedArrays(); } BaseClear(); } // Get a key at a particular index within this collection. public virtual String GetKey(int index) { return BaseGetKey(index); } // Collapse an array list of strings into a comma-separated value. private static String CollapseToString(ArrayList strings) { if(strings == null) { return null; } int count = strings.Count; if(count == 0) { return null; } else if(count == 1) { return (String)(strings[0]); } else { StringBuilder builder = new StringBuilder(); builder.Append((String)(strings[0])); int posn; for(posn = 1; posn < count; ++posn) { builder.Append(','); builder.Append((String)(strings[posn])); } return builder.ToString(); } } // Collapse an array list of strings into an array. private static String[] CollapseToArray(ArrayList strings) { if (strings == null) return null; String[] result = new String [strings.Count]; strings.CopyTo(result, 0); return result; } // Get a value at a particular index within this collection. public virtual String Get(int index) { return CollapseToString((ArrayList)(BaseGet(index))); } // Get an array of values at a particular index within this collection. public virtual String[] GetValues(int index) { return CollapseToArray((ArrayList)(BaseGet(index))); } // Get the value associcated with a particular name. public virtual String Get(String name) { return CollapseToString((ArrayList)(BaseGet(name))); } // Get the array of values associcated with a particular name. public virtual String[] GetValues(String name) { return CollapseToArray((ArrayList)(BaseGet(name))); } // Determine if the collection has keys that are not null. public bool HasKeys() { return BaseHasKeys(); } // Invalidate cached arrays within this collection. protected void InvalidateCachedArrays() { allKeysResult = null; copyToResult = null; } // Remove an entry with a specified name from this collection. public virtual void Remove(String name) { if(!IsReadOnly) { InvalidateCachedArrays(); } BaseRemove(name); } // Set the value associated with a specified name in this collection. public virtual void Set(String name, String value) { if(!IsReadOnly) { InvalidateCachedArrays(); } ArrayList strings = new ArrayList(1); if(value != null) { strings.Add(value); } BaseSet(name, strings); } // Get a list of all keys in this collection. public virtual String[] AllKeys { get { if(allKeysResult == null) { allKeysResult = BaseGetAllKeys(); } return allKeysResult; } } // Get or set a specific item within this collection by name. public String this[String name] { get { return Get(name); } set { Set(name, value); } } // Get a specific item within this collection by index. public String this[int index] { get { return Get(index); } } }; // class NameValueCollection }; // namespace System.Collections.Specialized
using System; using System.Data; using System.Data.OleDb; using System.Collections; using System.Configuration; using PCSComUtils.DataAccess; using PCSComUtils.PCSExc; using PCSComUtils.Common; namespace PCSComUtils.Admin.DS { public class Sys_VisibilityItemDS { public Sys_VisibilityItemDS() { } private const string THIS = "PCSComUtils.Admin.DS.Sys_VisibilityItemDS"; //************************************************************************** /// <Description> /// This method uses to add data to Sys_VisibilityItem /// </Description> /// <Inputs> /// Sys_VisibilityItemVO /// </Inputs> /// <Outputs> /// newly inserted primarkey value /// </Outputs> /// <Returns> /// void /// </Returns> /// <Authors> /// HungLa /// </Authors> /// <History> /// Tuesday, October 18, 2005 /// </History> /// <Notes> /// </Notes> //************************************************************************** public void Add(object pobjObjectVO) { const string METHOD_NAME = THIS + ".Add()"; OleDbConnection oconPCS =null; OleDbCommand ocmdPCS =null; try { Sys_VisibilityItemVO objObject = (Sys_VisibilityItemVO) pobjObjectVO; string strSql = String.Empty; Utils utils = new Utils(); oconPCS = new OleDbConnection(Utils.Instance.OleDbConnectionString); ocmdPCS = new OleDbCommand("", oconPCS); strSql= "INSERT INTO Sys_VisibilityItem(" + Sys_VisibilityItemTable.NAME_FLD + "," + Sys_VisibilityItemTable.GROUPID_FLD + "," + Sys_VisibilityItemTable.TYPE_FLD + ")" + "VALUES(?,?,?)"; ocmdPCS.Parameters.Add(new OleDbParameter(Sys_VisibilityItemTable.NAME_FLD, OleDbType.WChar)); ocmdPCS.Parameters[Sys_VisibilityItemTable.NAME_FLD].Value = objObject.Name; ocmdPCS.Parameters.Add(new OleDbParameter(Sys_VisibilityItemTable.GROUPID_FLD, OleDbType.Integer)); ocmdPCS.Parameters[Sys_VisibilityItemTable.GROUPID_FLD].Value = objObject.GroupID; ocmdPCS.Parameters.Add(new OleDbParameter(Sys_VisibilityItemTable.TYPE_FLD, OleDbType.Integer)); ocmdPCS.Parameters[Sys_VisibilityItemTable.TYPE_FLD].Value = objObject.Type; ocmdPCS.CommandText = strSql; ocmdPCS.Connection.Open(); ocmdPCS.ExecuteNonQuery(); } catch(OleDbException ex) { if (ex.Errors[1].NativeError == ErrorCode.SQLDUPLICATE_KEYCODE) { throw new PCSDBException(ErrorCode.DUPLICATE_KEY, METHOD_NAME, ex); } else { throw new PCSDBException(ErrorCode.ERROR_DB, METHOD_NAME,ex); } } catch(InvalidOperationException ex) { throw new PCSDBException(ErrorCode.ERROR_DB, METHOD_NAME,ex); } catch (Exception ex) { throw new PCSDBException(ErrorCode.OTHER_ERROR, METHOD_NAME, ex); } finally { if (oconPCS!=null) { if (oconPCS.State != ConnectionState.Closed) { oconPCS.Close(); } } } } //************************************************************************** /// <Description> /// This method uses to delete data from Sys_VisibilityItem /// </Description> /// <Inputs> /// ID /// </Inputs> /// <Outputs> /// void /// </Outputs> /// <Returns> /// /// </Returns> /// <Authors> /// HungLa /// </Authors> /// <History> /// 09-Dec-2004 /// </History> /// <Notes> /// </Notes> //************************************************************************** public void Delete(int pintID) { const string METHOD_NAME = THIS + ".Delete()"; string strSql = String.Empty; strSql= "DELETE " + Sys_VisibilityItemTable.TABLE_NAME + " WHERE " + "VisibilityItemID" + "=" + pintID.ToString(); OleDbConnection oconPCS=null; OleDbCommand ocmdPCS =null; try { Utils utils = new Utils(); oconPCS = new OleDbConnection(Utils.Instance.OleDbConnectionString); ocmdPCS = new OleDbCommand(strSql, oconPCS); ocmdPCS.Connection.Open(); ocmdPCS.ExecuteNonQuery(); ocmdPCS = null; } catch(OleDbException ex) { if (ex.Errors[1].NativeError == ErrorCode.SQLCASCADE_PREVENT_KEYCODE) { throw new PCSDBException(ErrorCode.CASCADE_DELETE_PREVENT, METHOD_NAME, ex); } else { throw new PCSDBException(ErrorCode.ERROR_DB, METHOD_NAME,ex); } } catch (Exception ex) { throw new PCSDBException(ErrorCode.OTHER_ERROR, METHOD_NAME, ex); } finally { if (oconPCS!=null) { if (oconPCS.State != ConnectionState.Closed) { oconPCS.Close(); } } } } //************************************************************************** /// <Description> /// This method uses to get data from Sys_VisibilityItem /// </Description> /// <Inputs> /// ID /// </Inputs> /// <Outputs> /// Sys_VisibilityItemVO /// </Outputs> /// <Returns> /// Sys_VisibilityItemVO /// </Returns> /// <Authors> /// HungLa /// </Authors> /// <History> /// Tuesday, October 18, 2005 /// </History> /// <Notes> /// </Notes> //************************************************************************** public object GetObjectVO(int pintID) { const string METHOD_NAME = THIS + ".GetObjectVO()"; DataSet dstPCS = new DataSet(); OleDbDataReader odrPCS = null; OleDbConnection oconPCS = null; OleDbCommand ocmdPCS = null; try { string strSql = String.Empty; strSql= "SELECT " + Sys_VisibilityItemTable.VISIBILITYITEMID_FLD + "," + Sys_VisibilityItemTable.NAME_FLD + "," + Sys_VisibilityItemTable.GROUPID_FLD + "," + Sys_VisibilityItemTable.TYPE_FLD + " FROM " + Sys_VisibilityItemTable.TABLE_NAME +" WHERE " + Sys_VisibilityItemTable.VISIBILITYITEMID_FLD + "=" + pintID; Utils utils = new Utils(); oconPCS = new OleDbConnection(Utils.Instance.OleDbConnectionString); ocmdPCS = new OleDbCommand(strSql, oconPCS); ocmdPCS.Connection.Open(); odrPCS = ocmdPCS.ExecuteReader(); Sys_VisibilityItemVO objObject = new Sys_VisibilityItemVO(); while (odrPCS.Read()) { objObject.VisibilityItemID = int.Parse(odrPCS[Sys_VisibilityItemTable.VISIBILITYITEMID_FLD].ToString().Trim()); objObject.Name = odrPCS[Sys_VisibilityItemTable.NAME_FLD].ToString().Trim(); objObject.GroupID = int.Parse(odrPCS[Sys_VisibilityItemTable.GROUPID_FLD].ToString().Trim()); objObject.Type = int.Parse(odrPCS[Sys_VisibilityItemTable.TYPE_FLD].ToString().Trim()); } return objObject; } catch(OleDbException ex) { throw new PCSDBException(ErrorCode.ERROR_DB, METHOD_NAME,ex); } catch (Exception ex) { throw new PCSDBException(ErrorCode.OTHER_ERROR, METHOD_NAME, ex); } finally { if (oconPCS!=null) { if (oconPCS.State != ConnectionState.Closed) { oconPCS.Close(); } } } } //************************************************************************** /// <Description> /// This method uses to update data to Sys_VisibilityItem /// </Description> /// <Inputs> /// Sys_VisibilityItemVO /// </Inputs> /// <Outputs> /// /// </Outputs> /// <Returns> /// /// </Returns> /// <Authors> /// HungLa /// </Authors> /// <History> /// 09-Dec-2004 /// </History> /// <Notes> /// </Notes> //************************************************************************** public void Update(object pobjObjecVO) { const string METHOD_NAME = THIS + ".Update()"; Sys_VisibilityItemVO objObject = (Sys_VisibilityItemVO) pobjObjecVO; //prepare value for parameters OleDbConnection oconPCS =null; OleDbCommand ocmdPCS = null; try { string strSql = String.Empty; Utils utils = new Utils(); oconPCS = new OleDbConnection(Utils.Instance.OleDbConnectionString); ocmdPCS = new OleDbCommand(strSql, oconPCS); strSql= "UPDATE Sys_VisibilityItem SET " + Sys_VisibilityItemTable.NAME_FLD + "= ?" + "," + Sys_VisibilityItemTable.GROUPID_FLD + "= ?" + "," + Sys_VisibilityItemTable.TYPE_FLD + "= ?" +" WHERE " + Sys_VisibilityItemTable.VISIBILITYITEMID_FLD + "= ?"; ocmdPCS.Parameters.Add(new OleDbParameter(Sys_VisibilityItemTable.NAME_FLD, OleDbType.WChar)); ocmdPCS.Parameters[Sys_VisibilityItemTable.NAME_FLD].Value = objObject.Name; ocmdPCS.Parameters.Add(new OleDbParameter(Sys_VisibilityItemTable.GROUPID_FLD, OleDbType.Integer)); ocmdPCS.Parameters[Sys_VisibilityItemTable.GROUPID_FLD].Value = objObject.GroupID; ocmdPCS.Parameters.Add(new OleDbParameter(Sys_VisibilityItemTable.TYPE_FLD, OleDbType.Integer)); ocmdPCS.Parameters[Sys_VisibilityItemTable.TYPE_FLD].Value = objObject.Type; ocmdPCS.Parameters.Add(new OleDbParameter(Sys_VisibilityItemTable.VISIBILITYITEMID_FLD, OleDbType.Integer)); ocmdPCS.Parameters[Sys_VisibilityItemTable.VISIBILITYITEMID_FLD].Value = objObject.VisibilityItemID; ocmdPCS.CommandText = strSql; ocmdPCS.Connection.Open(); ocmdPCS.ExecuteNonQuery(); } catch(OleDbException ex) { if (ex.Errors[1].NativeError == ErrorCode.SQLDUPLICATE_KEYCODE) { throw new PCSDBException(ErrorCode.DUPLICATE_KEY, METHOD_NAME, ex); } else { throw new PCSDBException(ErrorCode.ERROR_DB, METHOD_NAME,ex); } } catch(InvalidOperationException ex) { throw new PCSDBException(ErrorCode.ERROR_DB, METHOD_NAME,ex); } catch (Exception ex) { throw new PCSDBException(ErrorCode.OTHER_ERROR, METHOD_NAME, ex); } finally { if (oconPCS!=null) { if (oconPCS.State != ConnectionState.Closed) { oconPCS.Close(); } } } } //************************************************************************** /// <Description> /// This method uses to get all data from Sys_VisibilityItem /// </Description> /// <Inputs> /// /// </Inputs> /// <Outputs> /// DataSet /// </Outputs> /// <Returns> /// DataSet /// </Returns> /// <Authors> /// HungLa /// </Authors> /// <History> /// Tuesday, October 18, 2005 /// </History> /// <Notes> /// </Notes> //************************************************************************** public DataSet List() { const string METHOD_NAME = THIS + ".List()"; DataSet dstPCS = new DataSet(); OleDbConnection oconPCS =null; OleDbCommand ocmdPCS = null; try { string strSql = String.Empty; strSql= "SELECT " + Sys_VisibilityItemTable.VISIBILITYITEMID_FLD + "," + Sys_VisibilityItemTable.NAME_FLD + "," + Sys_VisibilityItemTable.GROUPID_FLD + "," + Sys_VisibilityItemTable.TYPE_FLD + " FROM " + Sys_VisibilityItemTable.TABLE_NAME; Utils utils = new Utils(); oconPCS = new OleDbConnection(Utils.Instance.OleDbConnectionString); ocmdPCS = new OleDbCommand(strSql, oconPCS); ocmdPCS.Connection.Open(); OleDbDataAdapter odadPCS = new OleDbDataAdapter(ocmdPCS); odadPCS.Fill(dstPCS,Sys_VisibilityItemTable.TABLE_NAME); return dstPCS; } catch(OleDbException ex) { throw new PCSDBException(ErrorCode.ERROR_DB, METHOD_NAME,ex); } catch (Exception ex) { throw new PCSDBException(ErrorCode.OTHER_ERROR, METHOD_NAME, ex); } finally { if (oconPCS!=null) { if (oconPCS.State != ConnectionState.Closed) { oconPCS.Close(); } } } } //************************************************************************** /// <Description> /// This method uses to update a DataSet /// </Description> /// <Inputs> /// DataSet /// </Inputs> /// <Outputs> /// /// </Outputs> /// <Returns> /// /// </Returns> /// <Authors> /// HungLa /// </Authors> /// <History> /// Tuesday, October 18, 2005 /// </History> /// <Notes> /// </Notes> //************************************************************************** public void UpdateDataSet(DataSet pData) { const string METHOD_NAME = THIS + ".UpdateDataSet()"; string strSql; OleDbConnection oconPCS =null; OleDbCommandBuilder odcbPCS ; OleDbDataAdapter odadPCS = new OleDbDataAdapter(); try { strSql= "SELECT " + Sys_VisibilityItemTable.VISIBILITYITEMID_FLD + "," + Sys_VisibilityItemTable.NAME_FLD + "," + Sys_VisibilityItemTable.GROUPID_FLD + "," + Sys_VisibilityItemTable.TYPE_FLD + " FROM " + Sys_VisibilityItemTable.TABLE_NAME; Utils utils = new Utils(); oconPCS = new OleDbConnection(Utils.Instance.OleDbConnectionString); odadPCS.SelectCommand = new OleDbCommand(strSql, oconPCS); odcbPCS = new OleDbCommandBuilder(odadPCS); pData.EnforceConstraints = false; odadPCS.Update(pData,Sys_VisibilityItemTable.TABLE_NAME); } catch(OleDbException ex) { if (ex.Errors[1].NativeError == ErrorCode.SQLDUPLICATE_KEYCODE) { throw new PCSDBException(ErrorCode.DUPLICATE_KEY, METHOD_NAME, ex); } else if (ex.Errors[1].NativeError == ErrorCode.SQLCASCADE_PREVENT_KEYCODE) { throw new PCSDBException(ErrorCode.CASCADE_DELETE_PREVENT, METHOD_NAME, ex); } else { throw new PCSDBException(ErrorCode.ERROR_DB, METHOD_NAME,ex); } } catch(InvalidOperationException ex) { throw new PCSDBException(ErrorCode.ERROR_DB, METHOD_NAME,ex); } catch (Exception ex) { throw new PCSDBException(ErrorCode.OTHER_ERROR, METHOD_NAME, ex); } finally { if (oconPCS!=null) { if (oconPCS.State != ConnectionState.Closed) { oconPCS.Close(); } } } } } }
/* * Originally part of #Punk, a port of FlashPunk to C# * https://bitbucket.org/jacobalbano/punk/overview */ using System; namespace Otter { /// <summary> /// Static class with useful easer functions that can be used by Tweens. /// </summary> public static class Ease { const float PI = 3.14159f; const float PI2 = PI / 2; const float B1 = 1 / 2.75f; const float B2 = 2 / 2.75f; const float B3 = 1.5f / 2.75f; const float B4 = 2.5f / 2.75f; const float B5 = 2.25f / 2.75f; const float B6 = 2.625f / 2.75f; /// <summary> /// Elastic in. /// </summary> /// <param name="t">Time elapsed.</param> /// <returns>Eased timescale.</returns> public static float ElasticIn(float t) { return (float)(Math.Sin(13 * PI2 * t) * Math.Pow(2, 10 * (t - 1))); } /// <summary> /// Elastic out. /// </summary> /// <param name="t">Time elapsed.</param> /// <returns>Eased timescale.</returns> public static float ElasticOut(float t) { return (float)(Math.Sin(-13 * PI2 * (t + 1)) * Math.Pow(2, -10 * t) + 1); } /// <summary> /// Elastic in and out. /// </summary> /// <param name="t">Time elapsed.</param> /// <returns>Eased timescale.</returns> public static float ElasticInOut(float t) { if (t < 0.5) { return (float)(0.5 * Math.Sin(13 * PI2 * (2 * t)) * Math.Pow(2, 10 * ((2 * t) - 1))); } return (float)(0.5 * (Math.Sin(-13 * PI2 * ((2 * t - 1) + 1)) * Math.Pow(2, -10 * (2 * t - 1)) + 2)); } /// <summary> /// Quadratic in. /// </summary> /// <param name="t">Time elapsed.</param> /// <returns>Eased timescale.</returns> public static float QuadIn(float t) { return (float)(t * t); } /// <summary> /// Quadratic out. /// </summary> /// <param name="t">Time elapsed.</param> /// <returns>Eased timescale.</returns> public static float QuadOut(float t) { return (float)(-t * (t - 2)); } /// <summary> /// Quadratic in and out. /// </summary> /// <param name="t">Time elapsed.</param> /// <returns>Eased timescale.</returns> public static float QuadInOut(float t) { return (float)(t <= .5 ? t * t * 2 : 1 - (--t) * t * 2); } /// <summary> /// Cubic in. /// </summary> /// <param name="t">Time elapsed.</param> /// <returns>Eased timescale.</returns> public static float CubeIn(float t) { return (float)(t * t * t); } /// <summary> /// Cubic out. /// </summary> /// <param name="t">Time elapsed.</param> /// <returns>Eased timescale.</returns> public static float CubeOut(float t) { return (float)(1 + (--t) * t * t); } /// <summary> /// Cubic in and out. /// </summary> /// <param name="t">Time elapsed.</param> /// <returns>Eased timescale.</returns> public static float CubeInOut(float t) { return (float)(t <= .5 ? t * t * t * 4 : 1 + (--t) * t * t * 4); } /// <summary> /// Quart in. /// </summary> /// <param name="t">Time elapsed.</param> /// <returns>Eased timescale.</returns> public static float QuartIn(float t) { return (float)(t * t * t * t); } /// <summary> /// Quart out. /// </summary> /// <param name="t">Time elapsed.</param> /// <returns>Eased timescale.</returns> public static float QuartOut(float t) { return (float)(1 - (t -= 1) * t * t * t); } /// <summary> /// Quart in and out. /// </summary> /// <param name="t">Time elapsed.</param> /// <returns>Eased timescale.</returns> public static float QuartInOut(float t) { return (float)(t <= .5 ? t * t * t * t * 8 : (1 - (t = t * 2 - 2) * t * t * t) / 2 + .5); } /// <summary> /// Quint in. /// </summary> /// <param name="t">Time elapsed.</param> /// <returns>Eased timescale.</returns> public static float QuintIn(float t) { return (float)(t * t * t * t * t); } /// <summary> /// Quint out. /// </summary> /// <param name="t">Time elapsed.</param> /// <returns>Eased timescale.</returns> public static float QuintOut(float t) { return (float)((t = t - 1) * t * t * t * t + 1); } /// <summary> /// Quint in and out. /// </summary> /// <param name="t">Time elapsed.</param> /// <returns>Eased timescale.</returns> public static float QuintInOut(float t) { return (float)(((t *= 2) < 1) ? (t * t * t * t * t) / 2 : ((t -= 2) * t * t * t * t + 2) / 2); } /// <summary> /// Sine in. /// </summary> /// <param name="t">Time elapsed.</param> /// <returns>Eased timescale.</returns> public static float SineIn(float t) { return (float)(-Math.Cos(PI2 * t) + 1); } /// <summary> /// Sine out. /// </summary> /// <param name="t">Time elapsed.</param> /// <returns>Eased timescale.</returns> public static float SineOut(float t) { return (float)(Math.Sin(PI2 * t)); } /// <summary> /// Sine in and out /// </summary> /// <param name="t">Time elapsed.</param> /// <returns>Eased timescale.</returns> public static float SineInOut(float t) { return (float)(-Math.Cos(PI * t) / 2 + .5); } /// <summary> /// Bounce in. /// </summary> /// <param name="t">Time elapsed.</param> /// <returns>Eased timescale.</returns> public static float BounceIn(float t) { t = 1 - t; if (t < B1) return (float)(1 - 7.5625 * t * t); if (t < B2) return (float)(1 - (7.5625 * (t - B3) * (t - B3) + .75)); if (t < B4) return (float)(1 - (7.5625 * (t - B5) * (t - B5) + .9375)); return (float)(1 - (7.5625 * (t - B6) * (t - B6) + .984375)); } /// <summary> /// Bounce out. /// </summary> /// <param name="t">Time elapsed.</param> /// <returns>Eased timescale.</returns> public static float BounceOut(float t) { if (t < B1) return (float)(7.5625 * t * t); if (t < B2) return (float)(7.5625 * (t - B3) * (t - B3) + .75); if (t < B4) return (float)(7.5625 * (t - B5) * (t - B5) + .9375); return (float)(7.5625 * (t - B6) * (t - B6) + .984375); } /// <summary> /// Bounce in and out. /// </summary> /// <param name="t">Time elapsed.</param> /// <returns>Eased timescale.</returns> public static float BounceInOut(float t) { if (t < .5) { t = 1 - t * 2; if (t < B1) return (float)((1 - 7.5625 * t * t) / 2); if (t < B2) return (float)((1 - (7.5625 * (t - B3) * (t - B3) + .75)) / 2); if (t < B4) return (float)((1 - (7.5625 * (t - B5) * (t - B5) + .9375)) / 2); return (float)((1 - (7.5625 * (t - B6) * (t - B6) + .984375)) / 2); } t = t * 2 - 1; if (t < B1) return (float)((7.5625 * t * t) / 2 + .5); if (t < B2) return (float)((7.5625 * (t - B3) * (t - B3) + .75) / 2 + .5); if (t < B4) return (float)((7.5625 * (t - B5) * (t - B5) + .9375) / 2 + .5); return (float)((7.5625 * (t - B6) * (t - B6) + .984375) / 2 + .5); } /// <summary> /// Circle in. /// </summary> /// <param name="t">Time elapsed.</param> /// <returns>Eased timescale.</returns> public static float CircIn(float t) { return (float)(-(Math.Sqrt(1 - t * t) - 1)); } /// <summary> /// Circle out /// </summary> /// <param name="t">Time elapsed.</param> /// <returns>Eased timescale.</returns> public static float CircOut(float t) { return (float)(Math.Sqrt(1 - (t - 1) * (t - 1))); } /// <summary> /// Circle in and out. /// </summary> /// <param name="t">Time elapsed.</param> /// <returns>Eased timescale.</returns> public static float CircInOut(float t) { return (float)(t <= .5 ? (Math.Sqrt(1 - t * t * 4) - 1) / -2 : (Math.Sqrt(1 - (t * 2 - 2) * (t * 2 - 2)) + 1) / 2); } /// <summary> /// Exponential in. /// </summary> /// <param name="t">Time elapsed.</param> /// <returns>Eased timescale.</returns> public static float ExpoIn(float t) { return (float)(Math.Pow(2, 10 * (t - 1))); } /// <summary> /// Exponential out. /// </summary> /// <param name="t">Time elapsed.</param> /// <returns>Eased timescale.</returns> public static float ExpoOut(float t) { return (float)(-Math.Pow(2, -10 * t) + 1); } /// <summary> /// Exponential in and out. /// </summary> /// <param name="t">Time elapsed.</param> /// <returns>Eased timescale.</returns> public static float ExpoInOut(float t) { return (float)(t < .5 ? Math.Pow(2, 10 * (t * 2 - 1)) / 2 : (-Math.Pow(2, -10 * (t * 2 - 1)) + 2) / 2); } /// <summary> /// Back in. /// </summary> /// <param name="t">Time elapsed.</param> /// <returns>Eased timescale.</returns> public static float BackIn(float t) { return (float)(t * t * (2.70158 * t - 1.70158)); } /// <summary> /// Back out. /// </summary> /// <param name="t">Time elapsed.</param> /// <returns>Eased timescale.</returns> public static float BackOut(float t) { return (float)(1 - (--t) * (t) * (-2.70158 * t - 1.70158)); } /// <summary> /// Back in and out. /// </summary> /// <param name="t">Time elapsed.</param> /// <returns>Eased timescale.</returns> public static float BackInOut(float t) { t *= 2; if (t < 1) return (float)(t * t * (2.70158 * t - 1.70158) / 2); t--; return (float)((1 - (--t) * (t) * (-2.70158 * t - 1.70158)) / 2 + .5); } /// <summary> /// Linear. /// </summary> /// <param name="t">Time elapsed.</param> /// <returns>Eased timescale.</returns> public static float Linear(float t) { return t; } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.Collections.Generic; using System.Diagnostics; using System.Linq.Expressions; using System.Reflection; using System.Runtime.CompilerServices; using System.Threading; using System.Dynamic.Utils; namespace System.Linq.Expressions.Interpreter { internal sealed class InterpretedFrame { [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Security", "CA2104:DoNotDeclareReadOnlyMutableReferenceTypes")] [ThreadStatic] public static InterpretedFrame CurrentFrame; internal readonly Interpreter Interpreter; internal InterpretedFrame _parent; [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Security", "CA2105:ArrayFieldsShouldNotBeReadOnly")] private int[] _continuations; private int _continuationIndex; private int _pendingContinuation; private object _pendingValue; [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Security", "CA2105:ArrayFieldsShouldNotBeReadOnly")] public readonly object[] Data; [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Security", "CA2105:ArrayFieldsShouldNotBeReadOnly")] public readonly IStrongBox[] Closure; public int StackIndex; public int InstructionIndex; #if FEATURE_THREAD_ABORT // When a ThreadAbortException is raised from interpreted code this is the first frame that caught it. // No handlers within this handler re-abort the current thread when left. public ExceptionHandler CurrentAbortHandler; #endif internal InterpretedFrame(Interpreter interpreter, IStrongBox[] closure) { Interpreter = interpreter; StackIndex = interpreter.LocalCount; Data = new object[StackIndex + interpreter.Instructions.MaxStackDepth]; int c = interpreter.Instructions.MaxContinuationDepth; if (c > 0) { _continuations = new int[c]; } Closure = closure; _pendingContinuation = -1; _pendingValue = Interpreter.NoValue; } public DebugInfo GetDebugInfo(int instructionIndex) { return DebugInfo.GetMatchingDebugInfo(Interpreter._debugInfos, instructionIndex); } public string Name { get { return Interpreter._name; } } #region Data Stack Operations public void Push(object value) { Data[StackIndex++] = value; } public void Push(bool value) { Data[StackIndex++] = value ? ScriptingRuntimeHelpers.True : ScriptingRuntimeHelpers.False; } public void Push(int value) { Data[StackIndex++] = ScriptingRuntimeHelpers.Int32ToObject(value); } public void Push(byte value) { Data[StackIndex++] = value; } public void Push(sbyte value) { Data[StackIndex++] = value; } public void Push(Int16 value) { Data[StackIndex++] = value; } public void Push(UInt16 value) { Data[StackIndex++] = value; } public object Pop() { return Data[--StackIndex]; } internal void SetStackDepth(int depth) { StackIndex = Interpreter.LocalCount + depth; } public object Peek() { return Data[StackIndex - 1]; } public void Dup() { int i = StackIndex; Data[i] = Data[i - 1]; StackIndex = i + 1; } #endregion #region Stack Trace public InterpretedFrame Parent { get { return _parent; } } public static bool IsInterpretedFrame(MethodBase method) { //ContractUtils.RequiresNotNull(method, "method"); return method.DeclaringType == typeof(Interpreter) && method.Name == "Run"; } public IEnumerable<InterpretedFrameInfo> GetStackTraceDebugInfo() { var frame = this; do { yield return new InterpretedFrameInfo(frame.Name, frame.GetDebugInfo(frame.InstructionIndex)); frame = frame.Parent; } while (frame != null); } internal void SaveTraceToException(Exception exception) { if (exception.Data[typeof(InterpretedFrameInfo)] == null) { exception.Data[typeof(InterpretedFrameInfo)] = new List<InterpretedFrameInfo>(GetStackTraceDebugInfo()).ToArray(); } } public static InterpretedFrameInfo[] GetExceptionStackTrace(Exception exception) { return exception.Data[typeof(InterpretedFrameInfo)] as InterpretedFrameInfo[]; } #if DEBUG internal string[] Trace { get { var trace = new List<string>(); var frame = this; do { trace.Add(frame.Name); frame = frame.Parent; } while (frame != null); return trace.ToArray(); } } #endif internal InterpretedFrame Enter() { var currentFrame = CurrentFrame; CurrentFrame = this; return _parent = currentFrame; } internal void Leave(InterpretedFrame prevFrame) { CurrentFrame = prevFrame; } #endregion #region Continuations internal bool IsJumpHappened() { return _pendingContinuation >= 0; } public void RemoveContinuation() { _continuationIndex--; } public void PushContinuation(int continuation) { _continuations[_continuationIndex++] = continuation; } public int YieldToCurrentContinuation() { var target = Interpreter._labels[_continuations[_continuationIndex - 1]]; SetStackDepth(target.StackDepth); return target.Index - InstructionIndex; } /// <summary> /// Get called from the LeaveFinallyInstruction /// </summary> public int YieldToPendingContinuation() { Debug.Assert(_pendingContinuation >= 0); RuntimeLabel pendingTarget = Interpreter._labels[_pendingContinuation]; // the current continuation might have higher priority (continuationIndex is the depth of the current continuation): if (pendingTarget.ContinuationStackDepth < _continuationIndex) { RuntimeLabel currentTarget = Interpreter._labels[_continuations[_continuationIndex - 1]]; SetStackDepth(currentTarget.StackDepth); return currentTarget.Index - InstructionIndex; } SetStackDepth(pendingTarget.StackDepth); if (_pendingValue != Interpreter.NoValue) { Data[StackIndex - 1] = _pendingValue; } // Set the _pendingContinuation and _pendingValue to the default values if we finally gets to the Goto target _pendingContinuation = -1; _pendingValue = Interpreter.NoValue; return pendingTarget.Index - InstructionIndex; } internal void PushPendingContinuation() { Push(_pendingContinuation); Push(_pendingValue); _pendingContinuation = -1; _pendingValue = Interpreter.NoValue; } internal void PopPendingContinuation() { _pendingValue = Pop(); _pendingContinuation = (int)Pop(); } private static MethodInfo s_goto; private static MethodInfo s_voidGoto; internal static MethodInfo GotoMethod { get { return s_goto ?? (s_goto = typeof(InterpretedFrame).GetMethod("Goto")); } } internal static MethodInfo VoidGotoMethod { get { return s_voidGoto ?? (s_voidGoto = typeof(InterpretedFrame).GetMethod("VoidGoto")); } } public int VoidGoto(int labelIndex) { return Goto(labelIndex, Interpreter.NoValue, gotoExceptionHandler: false); } public int Goto(int labelIndex, object value, bool gotoExceptionHandler) { // TODO: we know this at compile time (except for compiled loop): RuntimeLabel target = Interpreter._labels[labelIndex]; Debug.Assert(!gotoExceptionHandler || (gotoExceptionHandler && _continuationIndex == target.ContinuationStackDepth), "When it's time to jump to the exception handler, all previous finally blocks should already be processed"); if (_continuationIndex == target.ContinuationStackDepth) { SetStackDepth(target.StackDepth); if (value != Interpreter.NoValue) { Data[StackIndex - 1] = value; } return target.Index - InstructionIndex; } // if we are in the middle of executing jump we forget the previous target and replace it by a new one: _pendingContinuation = labelIndex; _pendingValue = value; return YieldToCurrentContinuation(); } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. // =+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+ // // LongRangePartitionerTests.cs - Tests for range partitioner for long integer range. // // PLEASE NOTE !! - For tests that need to iterate the elements inside the partitions more // than once, we need to call GetPartitions for the second time. Iterating a second times // over the first enumerable<tuples> / IList<IEnumerator<tuples> will yield no elements // // PLEASE NOTE!! - we use lazy evaluation wherever possible to allow for more than Int32.MaxValue // elements. ToArray / toList will result in an OOM // // Taken from dev11 branch: // \qa\clr\testsrc\pfx\Functional\Common\Partitioner\YetiTests\RangePartitioner\LongRangePartitionerTests.cs // =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- using System; using System.Collections; using System.Collections.Concurrent; using System.Collections.Generic; using Xunit; namespace System.Collections.Concurrent.Tests { public class LongRangePartitionerTests { /// <summary> /// Ensure that the partitioner returned has properties set correctly /// </summary> [Fact] public static void CheckKeyProperties() { var partitioner = Partitioner.Create(0, 9223372036854774807); Assert.True(partitioner.KeysOrderedInEachPartition, "Expected KeysOrderedInEachPartition to be set to true"); Assert.False(partitioner.KeysOrderedAcrossPartitions, "KeysOrderedAcrossPartitions to be set to false"); Assert.True(partitioner.KeysNormalized, "Expected KeysNormalized to be set to true"); partitioner = Partitioner.Create(0, 9223372036854774807, 90); Assert.True(partitioner.KeysOrderedInEachPartition, "Expected KeysOrderedInEachPartition to be set to true"); Assert.False(partitioner.KeysOrderedAcrossPartitions, "KeysOrderedAcrossPartitions to be set to false"); Assert.True(partitioner.KeysNormalized, "Expected KeysNormalized to be set to true"); } /// <summary> /// GetPartitions returns an IList<IEnumerator<Tuple<long, long>> /// We unroll the tuples and flatten them to a single sequence /// The single sequence is compared to the original range for verification /// </summary> [Fact] public static void CheckGetPartitions() { CheckGetPartitions(0, 1, 1); CheckGetPartitions(1, 1999, 3); CheckGetPartitions(2147473647, 9999, 4); CheckGetPartitions(2147484647, 1000, 8); CheckGetPartitions(-2147484647, 1000, 16); CheckGetPartitions(-1999, 5000, 63); CheckGetPartitions(9223372036854774807, 999, 13); // close to Int64.Max } public static void CheckGetPartitions(long from, long count, int dop) { long to = from + count; var partitioner = Partitioner.Create(from, to); //var elements = partitioner.GetPartitions(dop).SelectMany(enumerator => enumerator.UnRoll()); IList<long> elements = new List<long>(); foreach (var partition in partitioner.GetPartitions(dop)) { foreach (var item in partition.UnRoll()) elements.Add(item); } Assert.True(elements.CompareSequences<long>(RangePartitionerHelpers.LongEnumerable(from, to)), "GetPartitions element mismatch"); } /// <summary> /// CheckGetDynamicPartitions returns an IEnumerable<Tuple<long, long>> /// We unroll the tuples and flatten them to a single sequence /// The single sequence is compared to the original range for verification /// </summary> [Fact] public static void CheckGetDynamicPartitions() { CheckGetDynamicPartitions(0, 1); CheckGetDynamicPartitions(1, 1999); CheckGetDynamicPartitions(2147473647, 9999); CheckGetDynamicPartitions(2147484647, 1000); CheckGetDynamicPartitions(-2147484647, 1000); CheckGetDynamicPartitions(-1999, 5000); CheckGetDynamicPartitions(9223372036854774807, 999); // close to Int64.Max } public static void CheckGetDynamicPartitions(long from, long count) { long to = from + count; var partitioner = Partitioner.Create(from, to); //var elements = partitioner.GetDynamicPartitions().SelectMany(tuple => tuple.UnRoll()); IList<long> elements = new List<long>(); foreach (var partition in partitioner.GetDynamicPartitions()) { foreach (var item in partition.UnRoll()) elements.Add(item); } Assert.True(elements.CompareSequences<long>(RangePartitionerHelpers.LongEnumerable(from, to)), "GetDynamicPartitions Element mismatch"); } /// <summary> /// GetOrderablePartitions returns an IList<IEnumerator<KeyValuePair<long, Tuple<long, long>>> /// We unroll the tuples and flatten them to a single sequence /// The single sequence is compared to the original range for verification /// Also the indices are extracted to ensure that they are ordered & normalized /// </summary> [Fact] public static void CheckGetOrderablePartitions() { CheckGetOrderablePartitions(0, 1, 1); CheckGetOrderablePartitions(1, 1999, 3); CheckGetOrderablePartitions(2147473647, 9999, 4); CheckGetOrderablePartitions(2147484647, 1000, 8); CheckGetOrderablePartitions(-2147484647, 1000, 16); CheckGetOrderablePartitions(-1999, 5000, 63); CheckGetOrderablePartitions(9223372036854774807, 999, 13); // close to Int64.Max } public static void CheckGetOrderablePartitions(long from, long count, int dop) { long to = from + count; var partitioner = Partitioner.Create(from, to); //var elements = partitioner.GetOrderablePartitions(dop).SelectMany(enumerator => enumerator.UnRoll()); IList<long> elements = new List<long>(); foreach (var partition in partitioner.GetPartitions(dop)) { foreach (var item in partition.UnRoll()) elements.Add(item); } Assert.True(elements.CompareSequences<long>(RangePartitionerHelpers.LongEnumerable(from, to)), "GetOrderablePartitions Element mismatch"); //var keys = partitioner.GetOrderablePartitions(dop).SelectMany(enumerator => enumerator.UnRollIndices()).ToArray(); IList<long> keys = new List<long>(); foreach (var partition in partitioner.GetOrderablePartitions(dop)) { foreach (var item in partition.UnRollIndices()) keys.Add(item); } Assert.True(keys.CompareSequences<long>(RangePartitionerHelpers.LongEnumerable(keys[0], keys.Count)), "GetOrderablePartitions key mismatch"); } /// <summary> /// GetOrderableDynamicPartitions returns an IEnumerable<KeyValuePair<long, Tuple<long, long>> /// We unroll the tuples and flatten them to a single sequence /// The single sequence is compared to the original range for verification /// Also the indices are extracted to ensure that they are ordered & normalized /// </summary> /// <param name="from"></param> /// <param name="count"></param> [Fact] public static void GetOrderableDynamicPartitions() { GetOrderableDynamicPartitions(0, 1); GetOrderableDynamicPartitions(1, 1999); GetOrderableDynamicPartitions(2147473647, 9999); GetOrderableDynamicPartitions(2147484647, 1000); GetOrderableDynamicPartitions(-2147484647, 1000); GetOrderableDynamicPartitions(-1999, 5000); GetOrderableDynamicPartitions(9223372036854774807, 999); // close to Int64.Max } public static void GetOrderableDynamicPartitions(long from, long count) { long to = from + count; var partitioner = Partitioner.Create(from, to); //var elements = partitioner.GetOrderableDynamicPartitions().SelectMany(tuple => tuple.UnRoll()); IList<long> elements = new List<long>(); foreach (var partition in partitioner.GetOrderableDynamicPartitions()) { foreach (var item in partition.UnRoll()) elements.Add(item); } Assert.True(elements.CompareSequences<long>(RangePartitionerHelpers.LongEnumerable(from, to)), "GetOrderableDynamicPartitions Element mismatch"); //var keys = partitioner.GetOrderableDynamicPartitions().Select(tuple => tuple.Key).ToArray(); IList<long> keys = new List<long>(); foreach (var tuple in partitioner.GetOrderableDynamicPartitions()) { keys.Add(tuple.Key); } Assert.True(keys.CompareSequences<long>(RangePartitionerHelpers.LongEnumerable(keys[0], keys.Count)), "GetOrderableDynamicPartitions key mismatch"); } /// <summary> /// GetPartitions returns an IList<IEnumerator<Tuple<long, long>> /// We unroll the tuples and flatten them to a single sequence /// The single sequence is compared to the original range for verification /// This method tests the partitioner created with user provided desiredRangeSize /// The range sizes for individual ranges are checked to see if they are equal to /// desiredRangeSize. The last range may have less than or equal to desiredRangeSize. /// </summary> [Fact] public static void CheckGetPartitionsWithRange() { CheckGetPartitionsWithRange(1999, 1000, 20, 1); CheckGetPartitionsWithRange(-1999, 1000, 100, 2); CheckGetPartitionsWithRange(1999, 1, 2000, 3); CheckGetPartitionsWithRange(9223372036854774807, 999, 600, 4); CheckGetPartitionsWithRange(-9223372036854774807, 1000, 19, 63); } public static void CheckGetPartitionsWithRange(long from, long count, long desiredRangeSize, int dop) { long to = from + count; var partitioner = Partitioner.Create(from, to, desiredRangeSize); //var elements = partitioner.GetPartitions(dop).SelectMany(enumerator => enumerator.UnRoll()); IList<long> elements = new List<long>(); foreach (var partition in partitioner.GetPartitions(dop)) { foreach (var item in partition.UnRoll()) elements.Add(item); } Assert.True(elements.CompareSequences<long>(RangePartitionerHelpers.LongEnumerable(from, to)), "GetPartitions element mismatch"); //var rangeSizes = partitioner.GetPartitions(dop).SelectMany(enumerator => enumerator.GetRangeSize()).ToArray(); IList<long> rangeSizes = new List<long>(); foreach (var partition in partitioner.GetPartitions(dop)) { foreach (var item in partition.GetRangeSize()) rangeSizes.Add(item); } ValidateRangeSize(desiredRangeSize, rangeSizes); } /// <summary> /// CheckGetDynamicPartitionsWithRange returns an IEnumerable<Tuple<long, long>> /// We unroll the tuples and flatten them to a single sequence /// The single sequence is compared to the original range for verification /// This method tests the partitioner created with user provided desiredRangeSize /// The range sizes for individual ranges are checked to see if they are equal to /// desiredRangeSize. The last range may have less than or equal to desiredRangeSize. /// </summary> [Fact] public static void CheckGetDynamicPartitionsWithRange() { CheckGetDynamicPartitionsWithRange(1999, 1000, 20); CheckGetDynamicPartitionsWithRange(-1999, 1000, 100); CheckGetDynamicPartitionsWithRange(1999, 1, 2000); CheckGetDynamicPartitionsWithRange(9223372036854774807, 999, 600); CheckGetDynamicPartitionsWithRange(-9223372036854774807, 1000, 19); } public static void CheckGetDynamicPartitionsWithRange(long from, long count, long desiredRangeSize) { long to = from + count; var partitioner = Partitioner.Create(from, to, desiredRangeSize); //var elements = partitioner.GetDynamicPartitions().SelectMany(tuple => tuple.UnRoll()); IList<long> elements = new List<long>(); foreach (var partition in partitioner.GetDynamicPartitions()) { foreach (var item in partition.UnRoll()) elements.Add(item); } Assert.True(elements.CompareSequences<long>(RangePartitionerHelpers.LongEnumerable(from, to)), "GetDynamicPartitions Element mismatch"); //var rangeSizes = partitioner.GetDynamicPartitions().Select(tuple => tuple.GetRangeSize()).ToArray(); IList<long> rangeSizes = new List<long>(); foreach (var partition in partitioner.GetDynamicPartitions()) { rangeSizes.Add(partition.GetRangeSize()); } ValidateRangeSize(desiredRangeSize, rangeSizes); } /// <summary> /// GetOrderablePartitions returns an IList<IEnumerator<KeyValuePair<long, Tuple<long, long>>> /// We unroll the tuples and flatten them to a single sequence /// The single sequence is compared to the original range for verification /// Also the indices are extracted to ensure that they are ordered & normalized /// This method tests the partitioner created with user provided desiredRangeSize /// The range sizes for individual ranges are checked to see if they are equal to /// desiredRangeSize. The last range may have less than or equal to desiredRangeSize. /// </summary> [Fact] public static void CheckGetOrderablePartitionsWithRange() { CheckGetOrderablePartitionsWithRange(1999, 1000, 20, 1); CheckGetOrderablePartitionsWithRange(-1999, 1000, 100, 2); CheckGetOrderablePartitionsWithRange(1999, 1, 2000, 3); CheckGetOrderablePartitionsWithRange(9223372036854774807, 999, 600, 4); CheckGetOrderablePartitionsWithRange(-9223372036854774807, 1000, 19, 63); } public static void CheckGetOrderablePartitionsWithRange(long from, long count, long desiredRangeSize, int dop) { long to = from + count; var partitioner = Partitioner.Create(from, to, desiredRangeSize); //var elements = partitioner.GetOrderablePartitions(dop).SelectMany(enumerator => enumerator.UnRoll()); IList<long> elements = new List<long>(); foreach (var partition in partitioner.GetOrderablePartitions(dop)) { foreach (var item in partition.UnRoll()) elements.Add(item); } Assert.True(elements.CompareSequences<long>(RangePartitionerHelpers.LongEnumerable(from, to)), "GetOrderablePartitions Element mismatch"); //var keys = partitioner.GetOrderablePartitions(dop).SelectMany(enumerator => enumerator.UnRollIndices()).ToArray(); IList<long> keys = new List<long>(); foreach (var partition in partitioner.GetOrderablePartitions(dop)) { foreach (var item in partition.UnRollIndices()) keys.Add(item); } Assert.True(keys.CompareSequences<long>(RangePartitionerHelpers.LongEnumerable(keys[0], keys.Count)), "GetOrderablePartitions key mismatch"); //var rangeSizes = partitioner.GetOrderablePartitions(dop).SelectMany(enumerator => enumerator.GetRangeSize()).ToArray(); IList<long> rangeSizes = new List<long>(); foreach (var partition in partitioner.GetOrderablePartitions(dop)) { foreach (var item in partition.GetRangeSize()) rangeSizes.Add(item); } ValidateRangeSize(desiredRangeSize, rangeSizes); } /// <summary> /// GetOrderableDynamicPartitions returns an IEnumerable<KeyValuePair<long, Tuple<long, long>> /// We unroll the tuples and flatten them to a single sequence /// The single sequence is compared to the original range for verification /// Also the indices are extracted to ensure that they are ordered & normalized /// This method tests the partitioner created with user provided desiredRangeSize /// The range sizes for individual ranges are checked to see if they are equal to /// desiredRangeSize. The last range may have less than or equal to desiredRangeSize. /// </summary> [Fact] public static void GetOrderableDynamicPartitionsWithRange() { GetOrderableDynamicPartitionsWithRange(1999, 1000, 20); GetOrderableDynamicPartitionsWithRange(-1999, 1000, 100); GetOrderableDynamicPartitionsWithRange(1999, 1, 2000); GetOrderableDynamicPartitionsWithRange(9223372036854774807, 999, 600); GetOrderableDynamicPartitionsWithRange(-9223372036854774807, 1000, 19); } public static void GetOrderableDynamicPartitionsWithRange(long from, long count, long desiredRangeSize) { long to = from + count; var partitioner = Partitioner.Create(from, to, desiredRangeSize); //var elements = partitioner.GetOrderableDynamicPartitions().SelectMany(tuple => tuple.UnRoll()); IList<long> elements = new List<long>(); foreach (var tuple in partitioner.GetOrderableDynamicPartitions()) { foreach (var item in tuple.UnRoll()) elements.Add(item); } Assert.True(elements.CompareSequences<long>(RangePartitionerHelpers.LongEnumerable(from, to)), "GetOrderableDynamicPartitions Element mismatch"); //var keys = partitioner.GetOrderableDynamicPartitions().Select(tuple => tuple.Key).ToArray(); IList<long> keys = new List<long>(); foreach (var tuple in partitioner.GetOrderableDynamicPartitions()) { keys.Add(tuple.Key); } Assert.True(keys.CompareSequences<long>(RangePartitionerHelpers.LongEnumerable(keys[0], keys.Count)), "GetOrderableDynamicPartitions key mismatch"); //var rangeSizes = partitioner.GetOrderableDynamicPartitions().Select(tuple => tuple.GetRangeSize()).ToArray(); IList<long> rangeSizes = new List<long>(); foreach (var partition in partitioner.GetOrderableDynamicPartitions()) { rangeSizes.Add(partition.GetRangeSize()); } ValidateRangeSize(desiredRangeSize, rangeSizes); } /// <summary> /// Helper function to validate the range size of the partitioners match what the user specified /// (desiredRangeSize). /// The last range may have less than or equal to desiredRangeSize. /// </summary> /// <param name="desiredRangeSize"></param> /// <param name="rangeSizes"></param> /// <returns></returns> public static void ValidateRangeSize(long desiredRangeSize, IList<long> rangeSizes) { //var rangesWithDifferentRangeSize = rangeSizes.Take(rangeSizes.Length - 1).Where(r => r != desiredRangeSize).ToArray(); IList<long> rangesWithDifferentRangeSize = new List<long>(); // ensuring that every range, size from the last one is the same. int numToTake = rangeSizes.Count - 1; for (int i = 0; i < numToTake; i++) { long range = rangeSizes[i]; if (range != desiredRangeSize) rangesWithDifferentRangeSize.Add(range); } Assert.Equal(0, rangesWithDifferentRangeSize.Count); Assert.InRange(rangeSizes[rangeSizes.Count - 1], 0, desiredRangeSize); } /// <summary> /// Ensure that the range partitioner doesn't chunk up elements i.e. uses chunk size = 1 /// </summary> [Fact] public static void RangePartitionerChunking() { RangePartitionerChunking(2147473647, 9999, 4); RangePartitionerChunking(2147484647, 1000, -1); } public static void RangePartitionerChunking(long from, long count, long rangeSize) { long to = from + count; var partitioner = (rangeSize == -1) ? Partitioner.Create(from, to) : Partitioner.Create(from, to, rangeSize); // Check static partitions var partitions = partitioner.GetPartitions(2); // Initialize the from / to values from the first element if (!partitions[0].MoveNext()) return; Assert.Equal(from, partitions[0].Current.Item1); if (rangeSize == -1) { rangeSize = partitions[0].Current.Item2 - partitions[0].Current.Item1; } long nextExpectedFrom = partitions[0].Current.Item2; long nextExpectedTo = (nextExpectedFrom + rangeSize) > to ? to : (nextExpectedFrom + rangeSize); // Ensure that each partition gets one range only // we check this by alternating partitions asking for elements and make sure // that we get ranges in a sequence. If chunking were to happen then we wouldn't see a sequence long actualCount = partitions[0].Current.Item2 - partitions[0].Current.Item1; while (true) { if (!partitions[0].MoveNext()) break; Assert.Equal(nextExpectedFrom, partitions[0].Current.Item1); Assert.Equal(nextExpectedTo, partitions[0].Current.Item2); nextExpectedFrom = (nextExpectedFrom + rangeSize) > to ? to : (nextExpectedFrom + rangeSize); nextExpectedTo = (nextExpectedTo + rangeSize) > to ? to : (nextExpectedTo + rangeSize); actualCount += partitions[0].Current.Item2 - partitions[0].Current.Item1; if (!partitions[1].MoveNext()) break; Assert.Equal(nextExpectedFrom, partitions[1].Current.Item1); Assert.Equal(nextExpectedTo, partitions[1].Current.Item2); nextExpectedFrom = (nextExpectedFrom + rangeSize) > to ? to : (nextExpectedFrom + rangeSize); nextExpectedTo = (nextExpectedTo + rangeSize) > to ? to : (nextExpectedTo + rangeSize); actualCount += partitions[1].Current.Item2 - partitions[1].Current.Item1; if (!partitions[1].MoveNext()) break; Assert.Equal(nextExpectedFrom, partitions[1].Current.Item1); Assert.Equal(nextExpectedTo, partitions[1].Current.Item2); nextExpectedFrom = (nextExpectedFrom + rangeSize) > to ? to : (nextExpectedFrom + rangeSize); nextExpectedTo = (nextExpectedTo + rangeSize) > to ? to : (nextExpectedTo + rangeSize); actualCount += partitions[1].Current.Item2 - partitions[1].Current.Item1; if (!partitions[0].MoveNext()) break; Assert.Equal(nextExpectedFrom, partitions[0].Current.Item1); Assert.Equal(nextExpectedTo, partitions[0].Current.Item2); nextExpectedFrom = (nextExpectedFrom + rangeSize) > to ? to : (nextExpectedFrom + rangeSize); nextExpectedTo = (nextExpectedTo + rangeSize) > to ? to : (nextExpectedTo + rangeSize); actualCount += partitions[0].Current.Item2 - partitions[0].Current.Item1; } // Verifying that all items are there Assert.Equal(count, actualCount); } /// <summary> /// Ensure that the range partitioner doesn't chunk up elements i.e. uses chunk size = 1 /// </summary> [Fact] public static void RangePartitionerDynamicChunking() { RangePartitionerDynamicChunking(2147473647, 9999, 4); RangePartitionerDynamicChunking(2147484647, 1000, -1); } public static void RangePartitionerDynamicChunking(long from, long count, long rangeSize) { long to = from + count; var partitioner = (rangeSize == -1) ? Partitioner.Create(from, to) : Partitioner.Create(from, to, rangeSize); // Check static partitions var partitions = partitioner.GetDynamicPartitions(); var partition1 = partitions.GetEnumerator(); var partition2 = partitions.GetEnumerator(); // Initialize the from / to values from the first element if (!partition1.MoveNext()) return; Assert.Equal(from, partition1.Current.Item1); if (rangeSize == -1) { rangeSize = partition1.Current.Item2 - partition1.Current.Item1; } long nextExpectedFrom = partition1.Current.Item2; long nextExpectedTo = (nextExpectedFrom + rangeSize) > to ? to : (nextExpectedFrom + rangeSize); // Ensure that each partition gets one range only // we check this by alternating partitions asking for elements and make sure // that we get ranges in a sequence. If chunking were to happen then we wouldn't see a sequence long actualCount = partition1.Current.Item2 - partition1.Current.Item1; while (true) { if (!partition1.MoveNext()) break; Assert.Equal(nextExpectedFrom, partition1.Current.Item1); Assert.Equal(nextExpectedTo, partition1.Current.Item2); nextExpectedFrom = (nextExpectedFrom + rangeSize) > to ? to : (nextExpectedFrom + rangeSize); nextExpectedTo = (nextExpectedTo + rangeSize) > to ? to : (nextExpectedTo + rangeSize); actualCount += partition1.Current.Item2 - partition1.Current.Item1; if (!partition2.MoveNext()) break; Assert.Equal(nextExpectedFrom, partition2.Current.Item1); Assert.Equal(nextExpectedTo, partition2.Current.Item2); nextExpectedFrom = (nextExpectedFrom + rangeSize) > to ? to : (nextExpectedFrom + rangeSize); nextExpectedTo = (nextExpectedTo + rangeSize) > to ? to : (nextExpectedTo + rangeSize); actualCount += partition2.Current.Item2 - partition2.Current.Item1; if (!partition2.MoveNext()) break; Assert.Equal(nextExpectedFrom, partition2.Current.Item1); Assert.Equal(nextExpectedTo, partition2.Current.Item2); nextExpectedFrom = (nextExpectedFrom + rangeSize) > to ? to : (nextExpectedFrom + rangeSize); nextExpectedTo = (nextExpectedTo + rangeSize) > to ? to : (nextExpectedTo + rangeSize); actualCount += partition2.Current.Item2 - partition2.Current.Item1; if (!partition1.MoveNext()) break; Assert.Equal(nextExpectedFrom, partition1.Current.Item1); Assert.Equal(nextExpectedTo, partition1.Current.Item2); nextExpectedFrom = (nextExpectedFrom + rangeSize) > to ? to : (nextExpectedFrom + rangeSize); nextExpectedTo = (nextExpectedTo + rangeSize) > to ? to : (nextExpectedTo + rangeSize); actualCount += partition1.Current.Item2 - partition1.Current.Item1; } // Verifying that all items are there Assert.Equal(count, actualCount); } } }
public static class GlobalMembersBug00166 { /* $Id$ */ #if __cplusplus #endif #define GD_H #define GD_MAJOR_VERSION #define GD_MINOR_VERSION #define GD_RELEASE_VERSION #define GD_EXTRA_VERSION //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define gd.gdXXX_VERSION_STR(mjr, mnr, rev, ext) mjr "." mnr "." rev ext #define GDXXX_VERSION_STR //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define gd.gdXXX_STR(s) gd.gdXXX_SSTR(s) #define GDXXX_STR //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define gd.gdXXX_SSTR(s) #s #define GDXXX_SSTR //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define GD_VERSION_STRING "GD_MAJOR_VERSION" "." "GD_MINOR_VERSION" "." "GD_RELEASE_VERSION" GD_EXTRA_VERSION #define GD_VERSION_STRING #if _WIN32 || CYGWIN || _WIN32_WCE #if BGDWIN32 #if NONDLL #define BGD_EXPORT_DATA_PROT #else #if __GNUC__ #define BGD_EXPORT_DATA_PROT #else //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define BGD_EXPORT_DATA_PROT __declspec(dllexport) #define BGD_EXPORT_DATA_PROT #endif #endif #else #if __GNUC__ #define BGD_EXPORT_DATA_PROT #else //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define BGD_EXPORT_DATA_PROT __declspec(dllimport) #define BGD_EXPORT_DATA_PROT #endif #endif //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define BGD_STDCALL __stdcall #define BGD_STDCALL #define BGD_EXPORT_DATA_IMPL #else #if HAVE_VISIBILITY #define BGD_EXPORT_DATA_PROT #define BGD_EXPORT_DATA_IMPL #else #define BGD_EXPORT_DATA_PROT #define BGD_EXPORT_DATA_IMPL #endif #define BGD_STDCALL #endif #if BGD_EXPORT_DATA_PROT_ConditionalDefinition1 #if BGD_STDCALL_ConditionalDefinition1 //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define BGD_DECLARE (rt) rt __stdcall #define BGD_DECLARE #elif BGD_STDCALL_ConditionalDefinition2 //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define BGD_DECLARE (rt) rt #define BGD_DECLARE #else //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define BGD_DECLARE (rt) rt BGD_STDCALLTangibleTempImmunity #define BGD_DECLARE #endif #elif BGD_EXPORT_DATA_PROT_ConditionalDefinition2 #if BGD_STDCALL_ConditionalDefinition1 //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define BGD_DECLARE (rt) rt __stdcall #define BGD_DECLARE #elif BGD_STDCALL_ConditionalDefinition2 //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define BGD_DECLARE (rt) rt #define BGD_DECLARE #else //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define BGD_DECLARE (rt) rt BGD_STDCALLTangibleTempImmunity #define BGD_DECLARE #endif #elif BGD_EXPORT_DATA_PROT_ConditionalDefinition3 #if BGD_STDCALL_ConditionalDefinition1 //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define BGD_DECLARE (rt) __declspec(dllexport) rt __stdcall #define BGD_DECLARE #elif BGD_STDCALL_ConditionalDefinition2 //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define BGD_DECLARE (rt) __declspec(dllexport) rt #define BGD_DECLARE #else //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define BGD_DECLARE (rt) __declspec(dllexport) rt BGD_STDCALLTangibleTempImmunity #define BGD_DECLARE #endif #elif BGD_EXPORT_DATA_PROT_ConditionalDefinition4 #if BGD_STDCALL_ConditionalDefinition1 //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define BGD_DECLARE (rt) __declspec(dllimport) rt __stdcall #define BGD_DECLARE #elif BGD_STDCALL_ConditionalDefinition2 //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define BGD_DECLARE (rt) __declspec(dllimport) rt #define BGD_DECLARE #else //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define BGD_DECLARE (rt) __declspec(dllimport) rt BGD_STDCALLTangibleTempImmunity #define BGD_DECLARE #endif #else #if BGD_STDCALL_ConditionalDefinition1 //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define BGD_DECLARE (rt) BGD_EXPORT_DATA_PROTTangibleTempImmunity rt __stdcall #define BGD_DECLARE #elif BGD_STDCALL_ConditionalDefinition2 //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define BGD_DECLARE (rt) BGD_EXPORT_DATA_PROTTangibleTempImmunity rt #define BGD_DECLARE #else //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define Bgd.gd_DECLARE(rt) BGD_EXPORT_DATA_PROTTangibleTempImmunity rt BGD_STDCALLTangibleTempImmunity #define BGD_DECLARE #endif #endif #if __cplusplus #endif #if __cplusplus #endif #define GD_IO_H #if VMS #endif #if __cplusplus #endif #define gdMaxColors #define gdAlphaMax #define gdAlphaOpaque #define gdAlphaTransparent #define gdRedMax #define gdGreenMax #define gdBlueMax //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define gd.gdTrueColorGetAlpha(c) (((c) & 0x7F000000) >> 24) #define gdTrueColorGetAlpha //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define gd.gdTrueColorGetRed(c) (((c) & 0xFF0000) >> 16) #define gdTrueColorGetRed //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define gd.gdTrueColorGetGreen(c) (((c) & 0x00FF00) >> 8) #define gdTrueColorGetGreen //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define gd.gdTrueColorGetBlue(c) ((c) & 0x0000FF) #define gdTrueColorGetBlue #define gdEffectReplace #define gdEffectAlphaBlend #define gdEffectNormal #define gdEffectOverlay #define GD_TRUE #define GD_FALSE #define GD_EPSILON #define M_PI #define gdDashSize #define gdStyled #define gdBrushed #define gdStyledBrushed #define gdTiled #define gdTransparent #define gdAntiAliased //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define gdImageCreatePalette gdImageCreate #define gdImageCreatePalette #define gdFTEX_LINESPACE #define gdFTEX_CHARMAP #define gdFTEX_RESOLUTION #define gdFTEX_DISABLE_KERNING #define gdFTEX_XSHOW #define gdFTEX_FONTPATHNAME #define gdFTEX_FONTCONFIG #define gdFTEX_RETURNFONTPATHNAME #define gdFTEX_Unicode #define gdFTEX_Shift_JIS #define gdFTEX_Big5 #define gdFTEX_Adobe_Custom //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define gd.gdTrueColor(r, g, b) (((r) << 16) + ((g) << 8) + (b)) #define gdTrueColor //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define gd.gdTrueColorAlpha(r, g, b, a) (((a) << 24) + ((r) << 16) + ((g) << 8) + (b)) #define gdTrueColorAlpha #define gdArc //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define gdPie gdArc #define gdPie #define gdChord #define gdNoFill #define gdEdged //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define gd.gdImageTrueColor(im) ((im)->trueColor) #define gdImageTrueColor //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define gd.gdImageSX(im) ((im)->sx) #define gdImageSX //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define gd.gdImageSY(im) ((im)->sy) #define gdImageSY //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define gd.gdImageColorsTotal(im) ((im)->colorsTotal) #define gdImageColorsTotal //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define gd.gdImageRed(im, c) ((im)->trueColor ? (((c) & 0xFF0000) >> 16) : (im)->red[(c)]) #define gdImageRed //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define gd.gdImageGreen(im, c) ((im)->trueColor ? (((c) & 0x00FF00) >> 8) : (im)->green[(c)]) #define gdImageGreen //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define gd.gdImageBlue(im, c) ((im)->trueColor ? ((c) & 0x0000FF) : (im)->blue[(c)]) #define gdImageBlue //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define gd.gdImageAlpha(im, c) ((im)->trueColor ? (((c) & 0x7F000000) >> 24) : (im)->alpha[(c)]) #define gdImageAlpha //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define gd.gdImageGetTransparent(im) ((im)->transparent) #define gdImageGetTransparent //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define gd.gdImageGetInterlaced(im) ((im)->interlace) #define gdImageGetInterlaced //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define gd.gdImagePalettePixel(im, x, y) (im)->pixels[(y)][(x)] #define gdImagePalettePixel //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define gd.gdImageTrueColorPixel(im, x, y) (im)->tpixels[(y)][(x)] #define gdImageTrueColorPixel //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define gd.gdImageResolutionX(im) (im)->res_x #define gdImageResolutionX //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define gd.gdImageResolutionY(im) (im)->res_y #define gdImageResolutionY #define GD2_CHUNKSIZE #define GD2_CHUNKSIZE_MIN #define GD2_CHUNKSIZE_MAX #define GD2_VERS #define GD2_ID #define GD2_FMT_RAW #define GD2_FMT_COMPRESSED #define GD_FLIP_HORINZONTAL #define GD_FLIP_VERTICAL #define GD_FLIP_BOTH #define GD_CMP_IMAGE #define GD_CMP_NUM_COLORS #define GD_CMP_COLOR #define GD_CMP_SIZE_X #define GD_CMP_SIZE_Y #define GD_CMP_TRANSPARENT #define GD_CMP_BACKGROUND #define GD_CMP_INTERLACE #define GD_CMP_TRUECOLOR #define GD_RESOLUTION #if __cplusplus #endif #if __cplusplus #endif #define GDFX_H #if __cplusplus #endif #if __cplusplus #endif #define GDTEST_TOP_DIR #define GDTEST_STRING_MAX //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define gd.gdAssertImageEqualsToFile(ex,ac) gd.gdTestImageCompareToFile(__FILE__,__LINE__,NULL,(ex),(ac)) #define gdAssertImageEqualsToFile //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define gd.gdAssertImageFileEqualsMsg(ex,ac) gd.gdTestImageCompareFiles(__FILE__,__LINE__,(ms),(ex),(ac)) #define gdAssertImageFileEqualsMsg //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define gd.gdAssertImageEquals(tc,ex,ac) CuAssertImageEquals_LineMsg((tc),__FILE__,__LINE__,NULL,(ex),(ac)) #define gdAssertImageEquals //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define gd.gdAssertImageEqualsMsg(tc,ex,ac) CuAssertImageEquals_LineMsg((tc),__FILE__,__LINE__,(ms),(ex),(ac)) #define gdAssertImageEqualsMsg //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define gd.gdTestAssert(cond) _gd.gdTestAssert(__FILE__, __LINE__, "assert failed in <%s:%i>\n", (cond)) #define gdTestAssert //C++ TO C# CONVERTER NOTE: The following #define macro was replaced in-line: //ORIGINAL LINE: #define gd.gdTestErrorMsg(...) _gd.gdTestErrorMsg(__FILE__, __LINE__, __VA_ARGS__) #define gdTestErrorMsg static int Main() { gdImageStruct im; string path = new string(new char[1024]); int c; int result; path = string.Format("{0}/xpm/bug00166.xpm", DefineConstants.GDTEST_TOP_DIR); im = gd.gdImageCreateFromXpm(path); if (im == null) { return 2; } c = gd.gdImageGetPixel(im, 1, 1); if (((im).trueColor != 0 ? (((c) & 0xFF0000) >> 16) : (im).red[(c)]) == 0xAA && ((im).trueColor != 0 ? (((c) & 0x00FF00) >> 8) : (im).green[(c)]) == 0xBB && ((im).trueColor != 0 ? ((c) & 0x0000FF) : (im).blue[(c)]) == 0xCC) { result = 0; } else { result = 1; } gd.gdImageDestroy(im); return result; } }
namespace OpenRiaServices.DomainServices.Tools.TextTemplate { using System; using System.Collections.Generic; using System.ComponentModel; using System.ComponentModel.DataAnnotations; using System.Globalization; using System.Linq; using System.Reflection; using System.Runtime.Serialization; using OpenRiaServices.DomainServices; using OpenRiaServices.DomainServices.Server; using OpenRiaServices.DomainServices.Tools; /// <summary> /// Base class to generate proxy for a data contract type. /// </summary> public abstract partial class DataContractProxyGenerator { internal bool IsAbstract { get; set; } List<PropertyDescriptor> _notificationMethodList; internal IEnumerable<PropertyDescriptor> NotificationMethodList { get { if (this._notificationMethodList == null) { this._notificationMethodList = new List<PropertyDescriptor>(); } return this._notificationMethodList; } } /// <summary> /// Gets or sets the type for which the proxy is to be generated. /// </summary> protected Type Type { get; set; } /// <summary> /// Gets or sets the ClientCodeGenerator object. /// </summary> protected ClientCodeGenerator ClientCodeGenerator { get; set; } /// <summary> /// Generates proxy code in a specific language. /// </summary> /// <returns>Actual code for the proxy.</returns> protected abstract string GenerateDataContractProxy(); internal abstract IEnumerable<Type> ComplexTypes { get; } private IEnumerable<PropertyDescriptor> _properties; internal IEnumerable<PropertyDescriptor> Properties { get { if (this._properties == null) { this._properties = new List<PropertyDescriptor>(); } return this._properties; } } internal virtual void Initialize() { this.IsAbstract = this.Type.IsAbstract; this._notificationMethodList = new List<PropertyDescriptor>(); this._properties = this.GetPropertiesToGenerate(); this.GenerationEnvironment.Clear(); } internal abstract string GetBaseTypeName(); internal abstract bool IsDerivedType { get; } internal bool IsPropertyReadOnly(PropertyDescriptor property) { // Here, we continue to respect the [ReadOnly] attribute because TypeDescriptor // will materialize this when a property setter is not available. ReadOnlyAttribute readOnlyAttr = property.Attributes[typeof(ReadOnlyAttribute)] as ReadOnlyAttribute; if (readOnlyAttr != null && readOnlyAttr.IsReadOnly) { return true; } EditableAttribute editableAttribute = property.Attributes[typeof(EditableAttribute)] as EditableAttribute; if (editableAttribute != null && !editableAttribute.AllowEdit) { return true; } return false; } internal IEnumerable<Attribute> GetPropertyAttributes(PropertyDescriptor propertyDescriptor, Type propertyType) { List<Attribute> propertyAttributes = propertyDescriptor.ExplicitAttributes().Cast<Attribute>().ToList(); if (!propertyAttributes.OfType<DataMemberAttribute>().Any()) { propertyAttributes.Add(new DataMemberAttribute()); } ReadOnlyAttribute readOnlyAttr = propertyAttributes.OfType<ReadOnlyAttribute>().SingleOrDefault(); if (readOnlyAttr != null && !propertyAttributes.OfType<EditableAttribute>().Any()) { propertyAttributes.Add(new EditableAttribute(!readOnlyAttr.IsReadOnly)); } if (TypeUtility.IsSupportedComplexType(propertyType) && !propertyAttributes.OfType<DisplayAttribute>().Any()) { DisplayAttribute displayAttribute = new DisplayAttribute() { AutoGenerateField = false }; propertyAttributes.Add(displayAttribute); } // If the data contract type already contains the RoundtripOriginalAttribute, then we remove the attribute from properties. if (this.Type.Attributes()[typeof(RoundtripOriginalAttribute)] != null) { propertyAttributes.RemoveAll(attr => attr.GetType() == typeof(RoundtripOriginalAttribute)); } return propertyAttributes; } internal IEnumerable<Attribute> GetTypeAttributes() { AttributeCollection typeAttributes = this.Type.Attributes(); List<Attribute> filteredAttributes = new List<Attribute>(); // Ignore DefaultMemberAttribute if it has been put for an indexer IEnumerable<Attribute> defaultMemberAttribs = typeAttributes.Cast<Attribute>().Where(a => a.GetType() == typeof(DefaultMemberAttribute)); if (defaultMemberAttribs.Any()) { HashSet<string> properties = new HashSet<string>(TypeDescriptor.GetProperties(this.Type).Cast<PropertyDescriptor>().Select(p => p.Name), StringComparer.Ordinal); foreach (DefaultMemberAttribute attrib in defaultMemberAttribs) { if (!properties.Contains(attrib.MemberName)) { filteredAttributes.Add(attrib); } } } // Filter out attributes in filteredAttributes as well as DataContractAttribute and KnownTypeAttribute (since they are already handled in GenerateTypeAttributes()) return typeAttributes.Cast<Attribute>().Where(a => a.GetType() != typeof(DataContractAttribute) && a.GetType() != typeof(KnownTypeAttribute) && !(filteredAttributes.Contains(a))); } internal IEnumerable<PropertyDescriptor> GetPropertiesToGenerate() { IEnumerable<PropertyDescriptor> properties = TypeDescriptor.GetProperties(this.Type) .Cast<PropertyDescriptor>() .OrderBy(p => p.Name); List<PropertyDescriptor> propertiesToGenerate = new List<PropertyDescriptor>(); foreach (PropertyDescriptor pd in properties) { if (!this.ShouldDeclareProperty(pd)) { continue; } // Generate a property getter/setter pair for every property whose type // we support. Non supported property types will be skipped. if (this.CanGenerateProperty(pd)) { // Ensure the property is not virtual, abstract or new // If there is a violation, we log the error and keep // running to accumulate all such errors. This function // may return an "okay" for non-error case polymorphics. if (!this.CanGeneratePropertyIfPolymorphic(pd)) { continue; } if (!this.HandleNonSerializableProperty(pd)) { Type propType = CodeGenUtilities.TranslateType(pd.PropertyType); List<Type> typesToCodeGen = new List<Type>(); bool isTypeSafeToGenerate = true; // Create a list containing the types we will require on the client if (TypeUtility.IsPredefinedDictionaryType(propType)) { typesToCodeGen.AddRange(CodeGenUtilities.GetDictionaryGenericArgumentTypes(propType)); } else { typesToCodeGen.Add(TypeUtility.GetElementType(propType)); } // We consider all predefined types as legal to code-gen *except* those // that would generate a compile error on the client due to missing reference. // We treat "don't know" and "false" as grounds for a warning. // Note that we do this *after* TranslateType so that types like System.Data.Linq.Binary // which cannot exist on the client anyway has been translated foreach (Type type in typesToCodeGen) { // Enum (and nullable<enum>) types may require generation on client Type nonNullableType = TypeUtility.GetNonNullableType(type); if (nonNullableType.IsEnum) { // Register use of this enum type, which could cause deferred generation this.ClientCodeGenerator.AddEnumTypeToGenerate(nonNullableType); } // If this is not an enum or nullable<enum> and we're not generating the complex type, determine whether this // property type is visible to the client. If it is not, log a warning. else if (!this.ComplexTypes.Contains(type)) { // "Don't know" counts as "no" CodeMemberShareKind enumShareKind = this.ClientCodeGenerator.GetTypeShareKind(nonNullableType); if ((enumShareKind & CodeMemberShareKind.Shared) == 0) { this.ClientCodeGenerator.CodeGenerationHost.LogWarning(string.Format(CultureInfo.CurrentCulture, Resource.ClientCodeGen_PropertyType_Not_Shared, pd.Name, this.Type.FullName, type.FullName, this.ClientCodeGenerator.ClientProjectName)); isTypeSafeToGenerate = false; // Flag error but continue to allow accumulation of additional errors. } } } if (isTypeSafeToGenerate) { // Generate OnMethodXxChanging/Changed partial methods. this._notificationMethodList.Add(pd); propertiesToGenerate.Add(pd); } } } else { this.OnPropertySkipped(pd); } } return propertiesToGenerate; } internal virtual void OnPropertySkipped(PropertyDescriptor pd) { } internal virtual bool CanGenerateProperty(PropertyDescriptor propertyDescriptor) { Type type = propertyDescriptor.PropertyType; // Make sure the member is serializable (based on data contract attributes, [Exclude], type support, etc.). if (SerializationUtility.IsSerializableDataMember(propertyDescriptor)) { // If property type is an enum that cannot be generated, we cannot expose this property, but only log a warning string errorMessage = null; Type enumType = TypeUtility.GetNonNullableType(type); if (enumType.IsEnum) { if (!this.ClientCodeGenerator.CanExposeEnumType(enumType, out errorMessage)) { this.ClientCodeGenerator.CodeGenerationHost.LogWarning(String.Format(CultureInfo.CurrentCulture, Resource.ClientCodeGen_Property_Enum_Error, this.Type, propertyDescriptor.Name, enumType.FullName, errorMessage)); return false; } else { return true; } } return true; } else { return false; } } internal abstract IEnumerable<Type> GetDerivedTypes(); internal virtual bool CanGeneratePropertyIfPolymorphic(PropertyDescriptor pd) { return true; } internal virtual bool HandleNonSerializableProperty(PropertyDescriptor pd) { return false; } internal virtual bool ShouldDeclareProperty(PropertyDescriptor pd) { AttributeCollection propertyAttributes = pd.ExplicitAttributes(); if (this.IsExcluded(pd, propertyAttributes)) { // Ignore the [Include] because that's what we do during serialization as well. (We don't want to // check for [Exclude] + [Include] everywhere in our code base.) return false; } if (this.IsPropertyShared(pd)) { return false; } return true; } private bool IsExcluded(PropertyDescriptor pd, AttributeCollection propertyAttributes) { // The [Exclude] attribute is a signal simply to omit this property, no matter what bool hasExcludeAttr = (propertyAttributes[typeof(ExcludeAttribute)] != null); if (hasExcludeAttr) { // If we also see an [Include], warn the user. if (propertyAttributes[typeof(IncludeAttribute)] != null) { this.ClientCodeGenerator.CodeGenerationHost.LogWarning(String.Format(CultureInfo.CurrentCulture, Resource.ClientCodeGen_Cannot_Have_Include_And_Exclude, pd.Name, this.Type)); } } return hasExcludeAttr; } internal virtual bool IsPropertyShared(PropertyDescriptor pd) { // If this property is visible to the client already because of partial types, // do not generate it again, or we will get a compile error CodeMemberShareKind shareKind = this.ClientCodeGenerator.GetPropertyShareKind(this.Type, pd.Name); return ((shareKind & CodeMemberShareKind.Shared) != 0); } } }
using System; using System.Diagnostics; using System.Linq; using FluentNHibernate.Conventions.Inspections; using FluentNHibernate.MappingModel; using NHibernate.UserTypes; namespace FluentNHibernate.Conventions.Instances { public class PropertyInstance : PropertyInspector, IPropertyInstance { private readonly PropertyMapping mapping; private bool nextBool = true; const int layer = Layer.Conventions; public PropertyInstance(PropertyMapping mapping) : base(mapping) { this.mapping = mapping; } public new void Insert() { mapping.Set(x => x.Insert, layer, nextBool); nextBool = true; } public new void Update() { mapping.Set(x => x.Update, layer, nextBool); nextBool = true; } public new void ReadOnly() { mapping.Set(x => x.Insert, layer, !nextBool); mapping.Set(x => x.Update, layer, !nextBool); nextBool = true; } public new void Nullable() { foreach (var column in mapping.Columns) column.Set(x => x.NotNull, layer, !nextBool); nextBool = true; } public new IAccessInstance Access { get { return new AccessInstance(value => mapping.Set(x => x.Access, layer, value)); } } public void CustomType(TypeReference type) { // Use "PropertyName_" as default prefix to avoid breaking existing code CustomType(type, Property.Name + "_"); } public void CustomType(TypeReference type, string columnPrefix) { mapping.Set(x => x.Type, layer, type); if (typeof(ICompositeUserType).IsAssignableFrom(mapping.Type.GetUnderlyingSystemType())) AddColumnsForCompositeUserType(columnPrefix); } public void CustomType<T>(string columnPrefix) { CustomType(typeof(T), columnPrefix); } public void CustomType<T>() { CustomType(typeof(T)); } public void CustomType(Type type) { CustomType(new TypeReference(type)); } public void CustomType(Type type, string columnPrefix) { CustomType(new TypeReference(type), columnPrefix); } public void CustomType(string type) { CustomType(new TypeReference(type)); } public void CustomType(string type, string columnPrefix) { CustomType(new TypeReference(type), columnPrefix); } public void CustomSqlType(string sqlType) { foreach (var column in mapping.Columns) column.Set(x => x.SqlType, layer, sqlType); } public new void Precision(int precision) { foreach (var column in mapping.Columns) column.Set(x => x.Precision, layer, precision); } public new void Scale(int scale) { foreach (var column in mapping.Columns) column.Set(x => x.Scale, layer, scale); } public new void Default(string value) { foreach (var column in mapping.Columns) column.Set(x => x.Default, layer, value); } public new void Unique() { foreach (var column in mapping.Columns) column.Set(x => x.Unique, layer, nextBool); nextBool = true; } public new void UniqueKey(string keyName) { foreach (var column in mapping.Columns) column.Set(x => x.UniqueKey, layer, keyName); } [DebuggerBrowsable(DebuggerBrowsableState.Never)] public IPropertyInstance Not { get { nextBool = !nextBool; return this; } } public void Column(string columnName) { var originalColumn = mapping.Columns.FirstOrDefault(); var column = originalColumn == null ? new ColumnMapping() : originalColumn.Clone(); column.Set(x => x.Name, layer, columnName); mapping.AddColumn(Layer.Conventions, column); } public new void Formula(string formula) { mapping.Set(x => x.Formula, layer, formula); mapping.MakeColumnsEmpty(Layer.UserSupplied); } public new IGeneratedInstance Generated { get { return new GeneratedInstance(value => mapping.Set(x => x.Generated, layer, value)); } } public new void OptimisticLock() { mapping.Set(x => x.OptimisticLock, layer, nextBool); nextBool = true; } public new void Length(int length) { foreach (var column in mapping.Columns) column.Set(x => x.Length, layer, length); } public new void LazyLoad() { mapping.Set(x => x.Lazy, layer, nextBool); nextBool = true; } public new void Index(string value) { foreach (var column in mapping.Columns) column.Set(x => x.Index, layer, value); } public new void Check(string constraint) { foreach (var column in mapping.Columns) column.Set(x => x.Check, layer, constraint); } private void AddColumnsForCompositeUserType(string columnPrefix) { var inst = (ICompositeUserType)Activator.CreateInstance(mapping.Type.GetUnderlyingSystemType()); if (inst.PropertyNames.Length > 1) { var existingColumn = mapping.Columns.Single(); mapping.MakeColumnsEmpty(Layer.Conventions); foreach (var propertyName in inst.PropertyNames) { var column = existingColumn.Clone(); column.Set(x => x.Name, layer, columnPrefix + propertyName); mapping.AddColumn(Layer.Conventions, column); } } } } }
//----------------------------------------------------------------------------- // <copyright file="Decoder.cs" company="Dropbox Inc"> // Copyright (c) Dropbox Inc. All rights reserved. // </copyright> //----------------------------------------------------------------------------- namespace Dropbox.Api.Stone { using System; using System.Collections.Generic; using System.Globalization; /// <summary> /// The factory class for decoders. /// </summary> internal static class Decoder { /// <summary> /// Create a instance of the <see cref="ListDecoder{T}"/> class. /// </summary> /// <typeparam name="T">The item type.</typeparam> /// <param name="itemDecoder">The item decoder.</param> /// <returns>The list decoder.</returns> public static IDecoder<List<T>> CreateListDecoder<T>(IDecoder<T> itemDecoder) { return new ListDecoder<T>(itemDecoder); } } /// <summary> /// Decoder for nullable struct. /// </summary> /// <typeparam name="T">Type of the struct.</typeparam> internal sealed class NullableDecoder<T> : IDecoder<T?> where T : struct { /// <summary> /// The decoder. /// </summary> private readonly IDecoder<T> decoder; /// <summary> /// Initializes a new instance of the <see cref="NullableDecoder{T}"/> class. /// </summary> /// <param name="decoder">The decoder.</param> public NullableDecoder(IDecoder<T> decoder) { this.decoder = decoder; } /// <summary> /// The decode. /// </summary> /// <param name="reader">The reader.</param> /// <returns>The value.</returns> public T? Decode(IJsonReader reader) { if (reader.IsNull) { reader.Read(); return null; } return this.decoder.Decode(reader); } } /// <summary> /// Decoder for Int32. /// </summary> internal sealed class Int32Decoder : IDecoder<int> { /// <summary> /// The instance. /// </summary> public static readonly IDecoder<int> Instance = new Int32Decoder(); /// <summary> /// The instance for nullable. /// </summary> public static readonly IDecoder<int?> NullableInstance = new NullableDecoder<int>(Instance); /// <summary> /// The decode. /// </summary> /// <param name="reader">The reader.</param> /// <returns>The value.</returns> public int Decode(IJsonReader reader) { return reader.ReadInt32(); } } /// <summary> /// Decoder for Int64. /// </summary> internal sealed class Int64Decoder : IDecoder<long> { /// <summary> /// The instance. /// </summary> public static readonly IDecoder<long> Instance = new Int64Decoder(); /// <summary> /// The instance for nullable. /// </summary> public static readonly IDecoder<long?> NullableInstance = new NullableDecoder<long>(Instance); /// <summary> /// The decode. /// </summary> /// <param name="reader">The reader.</param> /// <returns>The value.</returns> public long Decode(IJsonReader reader) { return reader.ReadInt64(); } } /// <summary> /// Decoder for UInt32. /// </summary> internal sealed class UInt32Decoder : IDecoder<uint> { /// <summary> /// The instance. /// </summary> public static readonly IDecoder<uint> Instance = new UInt32Decoder(); /// <summary> /// The instance for nullable. /// </summary> public static readonly IDecoder<uint?> NullableInstance = new NullableDecoder<uint>(Instance); /// <summary> /// The decode. /// </summary> /// <param name="reader">The reader.</param> /// <returns>The value.</returns> public uint Decode(IJsonReader reader) { return reader.ReadUInt32(); } } /// <summary> /// Decoder for UInt64. /// </summary> internal sealed class UInt64Decoder : IDecoder<ulong> { /// <summary> /// The instance. /// </summary> public static readonly IDecoder<ulong> Instance = new UInt64Decoder(); /// <summary> /// The instance for nullable. /// </summary> public static readonly IDecoder<ulong?> NullableInstance = new NullableDecoder<ulong>(Instance); /// <summary> /// The decode. /// </summary> /// <param name="reader">The reader.</param> /// <returns>The value.</returns> public ulong Decode(IJsonReader reader) { return reader.ReadUInt64(); } } /// <summary> /// Decoder for Float. /// </summary> internal sealed class SingleDecoder : IDecoder<float> { /// <summary> /// The instance. /// </summary> public static readonly IDecoder<float> Instance = new SingleDecoder(); /// <summary> /// The instance for nullable. /// </summary> public static readonly IDecoder<float?> NullableInstance = new NullableDecoder<float>(Instance); /// <summary> /// The decode. /// </summary> /// <param name="reader">The reader.</param> /// <returns>The value.</returns> public float Decode(IJsonReader reader) { return reader.ReadSingle(); } } /// <summary> /// Decoder for double. /// </summary> internal sealed class DoubleDecoder : IDecoder<double> { /// <summary> /// The instance. /// </summary> public static readonly IDecoder<double> Instance = new DoubleDecoder(); /// <summary> /// The instance for nullable. /// </summary> public static readonly IDecoder<double?> NullableInstance = new NullableDecoder<double>(Instance); /// <summary> /// The decode. /// </summary> /// <param name="reader">The reader.</param> /// <returns>The value.</returns> public double Decode(IJsonReader reader) { return reader.ReadDouble(); } } /// <summary> /// Decoder for boolean. /// </summary> internal sealed class BooleanDecoder : IDecoder<bool> { /// <summary> /// The instance. /// </summary> public static readonly IDecoder<bool> Instance = new BooleanDecoder(); /// <summary> /// The instance for nullable. /// </summary> public static readonly IDecoder<bool?> NullableInstance = new NullableDecoder<bool>(Instance); /// <summary> /// The decode. /// </summary> /// <param name="reader">The reader.</param> /// <returns>The value.</returns> public bool Decode(IJsonReader reader) { return reader.ReadBoolean(); } } /// <summary> /// Decoder for DateTime. /// </summary> internal sealed class DateTimeDecoder : IDecoder<DateTime> { /// <summary> /// The instance. /// </summary> public static readonly IDecoder<DateTime> Instance = new DateTimeDecoder(); /// <summary> /// The instance for nullable. /// </summary> public static readonly IDecoder<DateTime?> NullableInstance = new NullableDecoder<DateTime>(Instance); /// <summary> /// The decode. /// </summary> /// <param name="reader">The reader.</param> /// <returns>The value.</returns> public DateTime Decode(IJsonReader reader) { return DateTime.Parse(reader.ReadString(), CultureInfo.InvariantCulture, DateTimeStyles.RoundtripKind); } } /// <summary> /// Decoder for bytes. /// </summary> internal sealed class BytesDecoder : IDecoder<byte[]> { /// <summary> /// The instance. /// </summary> public static readonly IDecoder<byte[]> Instance = new BytesDecoder(); /// <summary> /// The decode. /// </summary> /// <param name="reader">The reader.</param> /// <returns>The value.</returns> public byte[] Decode(IJsonReader reader) { return reader.ReadBytes(); } } /// <summary> /// Decoder for string. /// </summary> internal sealed class StringDecoder : IDecoder<string> { /// <summary> /// The instance. /// </summary> public static readonly IDecoder<string> Instance = new StringDecoder(); /// <summary> /// The decode. /// </summary> /// <param name="reader">The reader.</param> /// <returns>The value.</returns> public string Decode(IJsonReader reader) { return reader.ReadString(); } } /// <summary> /// Decoder for struct type. /// </summary> /// <typeparam name="T">The struct type.</typeparam> internal abstract class StructDecoder<T> : IDecoder<T> where T : class { /// <summary> /// The decode. /// </summary> /// <param name="reader">The reader.</param> /// <returns>The value.</returns> public T Decode(IJsonReader reader) { if (reader.IsNull) { reader.Read(); return null; } EnsureStartObject(reader); var obj = this.DecodeFields(reader); EnsureEndObject(reader); return obj; } /// <summary> /// Decode fields without ensuring start and end object. /// </summary> /// <param name="reader">The json reader.</param> /// <returns>The decoded object.</returns> public virtual T DecodeFields(IJsonReader reader) { var obj = this.Create(); while (TryReadPropertyName(reader, out string fieldName)) { this.SetField(obj, fieldName, reader); } return obj; } /// <summary> /// Try read next token as property name. /// </summary> /// <param name="reader">The json reader.</param> /// <param name="propertyName">The property name.</param> /// <returns>If succeeded.</returns> protected static bool TryReadPropertyName(IJsonReader reader, out string propertyName) { if (reader.IsPropertyName) { propertyName = reader.ReadString(); return true; } propertyName = null; return false; } /// <summary> /// Read list of specific type. /// </summary> /// <typeparam name="TItem">The item type.</typeparam> /// <param name="reader">The json reader.</param> /// <param name="itemDecoder">The item decoder.</param> /// <returns>The decoded list.</returns> protected static List<TItem> ReadList<TItem>(IJsonReader reader, IDecoder<TItem> itemDecoder) { return ListDecoder<TItem>.Decode(reader, itemDecoder); } /// <summary> /// Create a struct instance. /// </summary> /// <returns>The struct instance.</returns> protected abstract T Create(); /// <summary> /// Set given field. /// </summary> /// <param name="value">The field value.</param> /// <param name="fieldName">The field name.</param> /// <param name="reader">The json reader.</param> protected virtual void SetField(T value, string fieldName, IJsonReader reader) { reader.Skip(); } /// <summary> /// Ensure current token is start object. /// </summary> /// <param name="reader">The json reader.</param> private static void EnsureStartObject(IJsonReader reader) { if (!reader.IsStartObject) { throw new InvalidOperationException("Invalid json token. Expect start object"); } reader.Read(); } /// <summary> /// Ensure next token is end object. /// </summary> /// <param name="reader">The json reader.</param> private static void EnsureEndObject(IJsonReader reader) { if (!reader.IsEndObject) { throw new InvalidOperationException("Invalid json token. Expect end object"); } reader.Read(); } } /// <summary> /// Decoder for union type. /// </summary> /// <typeparam name="T">The union type.</typeparam> internal abstract class UnionDecoder<T> : StructDecoder<T> where T : class { /// <summary> /// Decode fields without ensuring start and end object. /// </summary> /// <param name="reader">The json reader.</param> /// <returns>The decoded object.</returns> public override T DecodeFields(IJsonReader reader) { if (!StructDecoder<T>.TryReadPropertyName(reader, out string fieldName)) { throw new InvalidOperationException("Not property found."); } if (fieldName != ".tag") { throw new InvalidOperationException( string.Format( CultureInfo.InvariantCulture, "Expect '.tag' field, got {0}", fieldName)); } return this.Decode(StringDecoder.Instance.Decode(reader), reader); } /// <summary> /// Decode based on given tag. /// </summary> /// <param name="tag">The tag.</param> /// <param name="reader">The reader.</param> /// <returns>The decoded object.</returns> protected abstract T Decode(string tag, IJsonReader reader); } /// <summary> /// The decoder for Empty Type. /// </summary> internal sealed class EmptyDecoder : IDecoder<Empty> { /// <summary> /// The instance. /// </summary> public static readonly IDecoder<Empty> Instance = new EmptyDecoder(); /// <summary> /// Decoder for struct type. /// </summary> /// <param name="reader">The reader.</param> /// <returns>The empty instance.</returns> public Empty Decode(IJsonReader reader) { reader.Skip(); return Empty.Instance; } } /// <summary> /// Decoder for generic list. /// </summary> /// <typeparam name="T">The list item type.</typeparam> internal sealed class ListDecoder<T> : IDecoder<List<T>> { /// <summary> /// Decoder for list item. /// </summary> private readonly IDecoder<T> itemDecoder; /// <summary> /// Initializes a new instance of the <see cref="ListDecoder{T}"/> class. /// </summary> /// <param name="itemDecoder">The item decoder.</param> public ListDecoder(IDecoder<T> itemDecoder) { this.itemDecoder = itemDecoder; } /// <summary> /// Decode into list of specific type. /// </summary> /// <param name="reader">The json reader.</param> /// <param name="itemDecoder">The item decoder.</param> /// <returns>The list.</returns> public static List<T> Decode(IJsonReader reader, IDecoder<T> itemDecoder) { var list = new List<T>(); EnsureStartArray(reader); while (TryReadArrayItem(reader, itemDecoder, out T item)) { list.Add(item); } EnsureEndArray(reader); return list; } /// <summary> /// The decode. /// </summary> /// <param name="reader">The reader.</param> /// <returns>The value.</returns> public List<T> Decode(IJsonReader reader) { return Decode(reader, this.itemDecoder); } /// <summary> /// Ensure current token is start array. /// </summary> /// <param name="reader">The json reader.</param> private static void EnsureStartArray(IJsonReader reader) { if (!reader.IsStartArray) { throw new InvalidOperationException("Invalid json token. Expect start array"); } reader.Read(); } /// <summary> /// Ensure next token is end array. /// </summary> /// <param name="reader">The json reader.</param> private static void EnsureEndArray(IJsonReader reader) { if (!reader.IsEndArray) { throw new InvalidOperationException("Invalid json token. Expect end array"); } reader.Read(); } /// <summary> /// Try read next array item.. /// </summary> /// <param name="reader">The json reader.</param> /// <param name="decoder">The decoder.</param> /// <param name="value">The value of the array item.</param> /// <returns>If succeeded.</returns> private static bool TryReadArrayItem(IJsonReader reader, IDecoder<T> decoder, out T value) { value = default; while (!reader.IsEndArray) { value = decoder.Decode(reader); return true; } return false; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ namespace Apache.Ignite.Core.Impl { using System; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.IO; using System.Linq; using System.Reflection; using System.Runtime.InteropServices; using System.Text; using Apache.Ignite.Core.Binary; using Apache.Ignite.Core.Cluster; using Apache.Ignite.Core.Common; using Apache.Ignite.Core.Impl.Binary; using Apache.Ignite.Core.Impl.Cluster; using Apache.Ignite.Core.Impl.Common; using Apache.Ignite.Core.Impl.Unmanaged; using BinaryReader = Apache.Ignite.Core.Impl.Binary.BinaryReader; /// <summary> /// Native utility methods. /// </summary> internal static class IgniteUtils { /** Environment variable: JAVA_HOME. */ private const string EnvJavaHome = "JAVA_HOME"; /** Lookup paths. */ private static readonly string[] JvmDllLookupPaths = {@"jre\bin\server", @"jre\bin\default"}; /** File: jvm.dll. */ internal const string FileJvmDll = "jvm.dll"; /** File: Ignite.Common.dll. */ internal const string FileIgniteJniDll = "ignite.common.dll"; /** Prefix for temp directory names. */ private const string DirIgniteTmp = "Ignite_"; /** Loaded. */ private static bool _loaded; /** Thread-local random. */ [ThreadStatic] private static Random _rnd; /// <summary> /// Initializes the <see cref="IgniteUtils"/> class. /// </summary> [SuppressMessage("Microsoft.Performance", "CA1810:InitializeReferenceTypeStaticFieldsInline", Justification = "Readability.")] static IgniteUtils() { TryCleanTempDirectories(); } /// <summary> /// Gets thread local random. /// </summary> /// <value>Thread local random.</value> public static Random ThreadLocalRandom { get { return _rnd ?? (_rnd = new Random()); } } /// <summary> /// Returns shuffled list copy. /// </summary> /// <returns>Shuffled list copy.</returns> public static IList<T> Shuffle<T>(IList<T> list) { int cnt = list.Count; if (cnt > 1) { List<T> res = new List<T>(list); Random rnd = ThreadLocalRandom; while (cnt > 1) { cnt--; int idx = rnd.Next(cnt + 1); T val = res[idx]; res[idx] = res[cnt]; res[cnt] = val; } return res; } return list; } /// <summary> /// Load JVM DLL if needed. /// </summary> /// <param name="configJvmDllPath">JVM DLL path from config.</param> public static void LoadDlls(string configJvmDllPath) { if (_loaded) return; // 1. Load JNI dll. LoadJvmDll(configJvmDllPath); // 2. Load GG JNI dll. UnmanagedUtils.Initialize(); _loaded = true; } /// <summary> /// Create new instance of specified class. /// </summary> /// <param name="typeName">Class name</param> /// <returns>New Instance.</returns> public static T CreateInstance<T>(string typeName) { IgniteArgumentCheck.NotNullOrEmpty(typeName, "typeName"); var type = new TypeResolver().ResolveType(typeName); if (type == null) throw new IgniteException("Failed to create class instance [className=" + typeName + ']'); return (T) Activator.CreateInstance(type); } /// <summary> /// Set properties on the object. /// </summary> /// <param name="target">Target object.</param> /// <param name="props">Properties.</param> public static void SetProperties(object target, IEnumerable<KeyValuePair<string, object>> props) { if (props == null) return; IgniteArgumentCheck.NotNull(target, "target"); Type typ = target.GetType(); foreach (KeyValuePair<string, object> prop in props) { PropertyInfo prop0 = typ.GetProperty(prop.Key, BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic); if (prop0 == null) throw new IgniteException("Property is not found [type=" + typ.Name + ", property=" + prop.Key + ']'); prop0.SetValue(target, prop.Value, null); } } /// <summary> /// Loads the JVM DLL. /// </summary> private static void LoadJvmDll(string configJvmDllPath) { var messages = new List<string>(); foreach (var dllPath in GetJvmDllPaths(configJvmDllPath)) { var errCode = LoadDll(dllPath.Value, FileJvmDll); if (errCode == 0) return; messages.Add(string.Format(CultureInfo.InvariantCulture, "[option={0}, path={1}, errorCode={2}]", dllPath.Key, dllPath.Value, errCode)); if (dllPath.Value == configJvmDllPath) break; // if configJvmDllPath is specified and is invalid - do not try other options } if (!messages.Any()) // not loaded and no messages - everything was null messages.Add(string.Format(CultureInfo.InvariantCulture, "Please specify IgniteConfiguration.JvmDllPath or {0}.", EnvJavaHome)); if (messages.Count == 1) throw new IgniteException(string.Format(CultureInfo.InvariantCulture, "Failed to load {0} ({1})", FileJvmDll, messages[0])); var combinedMessage = messages.Aggregate((x, y) => string.Format(CultureInfo.InvariantCulture, "{0}\n{1}", x, y)); throw new IgniteException(string.Format(CultureInfo.InvariantCulture, "Failed to load {0}:\n{1}", FileJvmDll, combinedMessage)); } /// <summary> /// Try loading DLLs first using file path, then using it's simple name. /// </summary> /// <param name="filePath"></param> /// <param name="simpleName"></param> /// <returns>Zero in case of success, error code in case of failure.</returns> private static int LoadDll(string filePath, string simpleName) { int res = 0; IntPtr ptr; if (filePath != null) { ptr = NativeMethods.LoadLibrary(filePath); if (ptr == IntPtr.Zero) res = Marshal.GetLastWin32Error(); else return res; } // Failed to load using file path, fallback to simple name. ptr = NativeMethods.LoadLibrary(simpleName); if (ptr == IntPtr.Zero) { // Preserve the first error code, if any. if (res == 0) res = Marshal.GetLastWin32Error(); } else res = 0; return res; } /// <summary> /// Gets the JVM DLL paths in order of lookup priority. /// </summary> private static IEnumerable<KeyValuePair<string, string>> GetJvmDllPaths(string configJvmDllPath) { if (!string.IsNullOrEmpty(configJvmDllPath)) yield return new KeyValuePair<string, string>("IgniteConfiguration.JvmDllPath", configJvmDllPath); var javaHomeDir = Environment.GetEnvironmentVariable(EnvJavaHome); if (!string.IsNullOrEmpty(javaHomeDir)) foreach (var path in JvmDllLookupPaths) yield return new KeyValuePair<string, string>(EnvJavaHome, Path.Combine(javaHomeDir, path, FileJvmDll)); } /// <summary> /// Unpacks an embedded resource into a temporary folder and returns the full path of resulting file. /// </summary> /// <param name="resourceName">Resource name.</param> /// <returns>Path to a temp file with an unpacked resource.</returns> public static string UnpackEmbeddedResource(string resourceName) { var dllRes = Assembly.GetExecutingAssembly().GetManifestResourceNames() .Single(x => x.EndsWith(resourceName, StringComparison.OrdinalIgnoreCase)); return WriteResourceToTempFile(dllRes, resourceName); } /// <summary> /// Writes the resource to temporary file. /// </summary> /// <param name="resource">The resource.</param> /// <param name="name">File name prefix</param> /// <returns>Path to the resulting temp file.</returns> private static string WriteResourceToTempFile(string resource, string name) { // Dll file name should not be changed, so we create a temp folder with random name instead. var file = Path.Combine(GetTempDirectoryName(), name); using (var src = Assembly.GetExecutingAssembly().GetManifestResourceStream(resource)) using (var dest = File.OpenWrite(file)) { // ReSharper disable once PossibleNullReferenceException src.CopyTo(dest); return file; } } /// <summary> /// Tries to clean temporary directories created with <see cref="GetTempDirectoryName"/>. /// </summary> private static void TryCleanTempDirectories() { foreach (var dir in Directory.GetDirectories(Path.GetTempPath(), DirIgniteTmp + "*")) { try { Directory.Delete(dir, true); } catch (IOException) { // Expected } catch (UnauthorizedAccessException) { // Expected } } } /// <summary> /// Creates a uniquely named, empty temporary directory on disk and returns the full path of that directory. /// </summary> /// <returns>The full path of the temporary directory.</returns> private static string GetTempDirectoryName() { while (true) { var dir = Path.Combine(Path.GetTempPath(), DirIgniteTmp + Path.GetRandomFileName()); try { return Directory.CreateDirectory(dir).FullName; } catch (IOException) { // Expected } catch (UnauthorizedAccessException) { // Expected } } } /// <summary> /// Convert unmanaged char array to string. /// </summary> /// <param name="chars">Char array.</param> /// <param name="charsLen">Char array length.</param> /// <returns></returns> public static unsafe string Utf8UnmanagedToString(sbyte* chars, int charsLen) { IntPtr ptr = new IntPtr(chars); if (ptr == IntPtr.Zero) return null; byte[] arr = new byte[charsLen]; Marshal.Copy(ptr, arr, 0, arr.Length); return Encoding.UTF8.GetString(arr); } /// <summary> /// Convert string to unmanaged byte array. /// </summary> /// <param name="str">String.</param> /// <returns>Unmanaged byte array.</returns> public static unsafe sbyte* StringToUtf8Unmanaged(string str) { var ptr = IntPtr.Zero; if (str != null) { byte[] strBytes = Encoding.UTF8.GetBytes(str); ptr = Marshal.AllocHGlobal(strBytes.Length + 1); Marshal.Copy(strBytes, 0, ptr, strBytes.Length); *((byte*)ptr.ToPointer() + strBytes.Length) = 0; // NULL-terminator. } return (sbyte*)ptr.ToPointer(); } /// <summary> /// Reads node collection from stream. /// </summary> /// <param name="reader">Reader.</param> /// <param name="pred">The predicate.</param> /// <returns> Nodes list or null. </returns> public static List<IClusterNode> ReadNodes(IBinaryRawReader reader, Func<ClusterNodeImpl, bool> pred = null) { var cnt = reader.ReadInt(); if (cnt < 0) return null; var res = new List<IClusterNode>(cnt); var ignite = ((BinaryReader)reader).Marshaller.Ignite; if (pred == null) { for (var i = 0; i < cnt; i++) res.Add(ignite.GetNode(reader.ReadGuid())); } else { for (var i = 0; i < cnt; i++) { var node = ignite.GetNode(reader.ReadGuid()); if (pred(node)) res.Add(node); } } return res; } } }
using System; using System.Collections.Generic; using System.Threading; using System.Reflection; using System.Xml; using OpenMetaverse; using OpenMetaverse.Packets; using OpenMetaverse.Utilities; namespace OpenMetaverse.TestClient { public class TestClient : GridClient { public UUID GroupID = UUID.Zero; public Dictionary<UUID, GroupMember> GroupMembers; public Dictionary<UUID, AvatarAppearancePacket> Appearances = new Dictionary<UUID, AvatarAppearancePacket>(); public Dictionary<string, Command> Commands = new Dictionary<string, Command>(); public bool Running = true; public bool GroupCommands = false; public string MasterName = String.Empty; public UUID MasterKey = UUID.Zero; public bool AllowObjectMaster = false; public ClientManager ClientManager; public VoiceManager VoiceManager; // Shell-like inventory commands need to be aware of the 'current' inventory folder. public InventoryFolder CurrentDirectory = null; private System.Timers.Timer updateTimer; private UUID GroupMembersRequestID; public Dictionary<UUID, Group> GroupsCache = null; private ManualResetEvent GroupsEvent = new ManualResetEvent(false); /// <summary> /// /// </summary> public TestClient(ClientManager manager) { ClientManager = manager; updateTimer = new System.Timers.Timer(500); updateTimer.Elapsed += new System.Timers.ElapsedEventHandler(updateTimer_Elapsed); RegisterAllCommands(Assembly.GetExecutingAssembly()); Settings.LOG_LEVEL = Helpers.LogLevel.Debug; Settings.LOG_RESENDS = false; Settings.STORE_LAND_PATCHES = true; Settings.ALWAYS_DECODE_OBJECTS = true; Settings.ALWAYS_REQUEST_OBJECTS = true; Settings.SEND_AGENT_UPDATES = true; Settings.USE_ASSET_CACHE = true; Network.RegisterCallback(PacketType.AgentDataUpdate, AgentDataUpdateHandler); Network.LoginProgress += LoginHandler; Objects.AvatarUpdate += new EventHandler<AvatarUpdateEventArgs>(Objects_AvatarUpdate); Objects.TerseObjectUpdate += new EventHandler<TerseObjectUpdateEventArgs>(Objects_TerseObjectUpdate); Network.SimChanged += new EventHandler<SimChangedEventArgs>(Network_SimChanged); Self.IM += Self_IM; Groups.GroupMembersReply += GroupMembersHandler; Inventory.InventoryObjectOffered += Inventory_OnInventoryObjectReceived; Network.RegisterCallback(PacketType.AvatarAppearance, AvatarAppearanceHandler); Network.RegisterCallback(PacketType.AlertMessage, AlertMessageHandler); VoiceManager = new VoiceManager(this); updateTimer.Start(); } void Objects_TerseObjectUpdate(object sender, TerseObjectUpdateEventArgs e) { if (e.Prim.LocalID == Self.LocalID) { SetDefaultCamera(); } } void Objects_AvatarUpdate(object sender, AvatarUpdateEventArgs e) { if (e.Avatar.LocalID == Self.LocalID) { SetDefaultCamera(); } } void Network_SimChanged(object sender, SimChangedEventArgs e) { Self.Movement.SetFOVVerticalAngle(Utils.TWO_PI - 0.05f); } public void SetDefaultCamera() { // SetCamera 5m behind the avatar Self.Movement.Camera.LookAt( Self.SimPosition + new Vector3(-5, 0, 0) * Self.Movement.BodyRotation, Self.SimPosition ); } void Self_IM(object sender, InstantMessageEventArgs e) { bool groupIM = e.IM.GroupIM && GroupMembers != null && GroupMembers.ContainsKey(e.IM.FromAgentID) ? true : false; if (e.IM.FromAgentID == MasterKey || (GroupCommands && groupIM)) { // Received an IM from someone that is authenticated Console.WriteLine("<{0} ({1})> {2}: {3} (@{4}:{5})", e.IM.GroupIM ? "GroupIM" : "IM", e.IM.Dialog, e.IM.FromAgentName, e.IM.Message, e.IM.RegionID, e.IM.Position); if (e.IM.Dialog == InstantMessageDialog.RequestTeleport) { Console.WriteLine("Accepting teleport lure."); Self.TeleportLureRespond(e.IM.FromAgentID, e.IM.IMSessionID, true); } else if ( e.IM.Dialog == InstantMessageDialog.MessageFromAgent || e.IM.Dialog == InstantMessageDialog.MessageFromObject) { ClientManager.Instance.DoCommandAll(e.IM.Message, e.IM.FromAgentID); } } else { // Received an IM from someone that is not the bot's master, ignore Console.WriteLine("<{0} ({1})> {2} (not master): {3} (@{4}:{5})", e.IM.GroupIM ? "GroupIM" : "IM", e.IM.Dialog, e.IM.FromAgentName, e.IM.Message, e.IM.RegionID, e.IM.Position); return; } } /// <summary> /// Initialize everything that needs to be initialized once we're logged in. /// </summary> /// <param name="login">The status of the login</param> /// <param name="message">Error message on failure, MOTD on success.</param> public void LoginHandler(object sender, LoginProgressEventArgs e) { if (e.Status == LoginStatus.Success) { // Start in the inventory root folder. CurrentDirectory = Inventory.Store.RootFolder; } } public void RegisterAllCommands(Assembly assembly) { foreach (Type t in assembly.GetTypes()) { try { if (t.IsSubclassOf(typeof(Command))) { ConstructorInfo info = t.GetConstructor(new Type[] { typeof(TestClient) }); Command command = (Command)info.Invoke(new object[] { this }); RegisterCommand(command); } } catch (Exception e) { Console.WriteLine(e.ToString()); } } } public void RegisterCommand(Command command) { command.Client = this; if (!Commands.ContainsKey(command.Name.ToLower())) { Commands.Add(command.Name.ToLower(), command); } } public void ReloadGroupsCache() { Groups.CurrentGroups += Groups_CurrentGroups; Groups.RequestCurrentGroups(); GroupsEvent.WaitOne(10000, false); Groups.CurrentGroups -= Groups_CurrentGroups; GroupsEvent.Reset(); } void Groups_CurrentGroups(object sender, CurrentGroupsEventArgs e) { if (null == GroupsCache) GroupsCache = e.Groups; else lock (GroupsCache) { GroupsCache = e.Groups; } GroupsEvent.Set(); } public UUID GroupName2UUID(String groupName) { UUID tryUUID; if (UUID.TryParse(groupName,out tryUUID)) return tryUUID; if (null == GroupsCache) { ReloadGroupsCache(); if (null == GroupsCache) return UUID.Zero; } lock(GroupsCache) { if (GroupsCache.Count > 0) { foreach (Group currentGroup in GroupsCache.Values) if (currentGroup.Name.ToLower() == groupName.ToLower()) return currentGroup.ID; } } return UUID.Zero; } private void updateTimer_Elapsed(object sender, System.Timers.ElapsedEventArgs e) { foreach (Command c in Commands.Values) if (c.Active) c.Think(); } private void AgentDataUpdateHandler(object sender, PacketReceivedEventArgs e) { AgentDataUpdatePacket p = (AgentDataUpdatePacket)e.Packet; if (p.AgentData.AgentID == e.Simulator.Client.Self.AgentID && p.AgentData.ActiveGroupID != UUID.Zero) { GroupID = p.AgentData.ActiveGroupID; GroupMembersRequestID = e.Simulator.Client.Groups.RequestGroupMembers(GroupID); } } private void GroupMembersHandler(object sender, GroupMembersReplyEventArgs e) { if (e.RequestID != GroupMembersRequestID) return; GroupMembers = e.Members; } private void AvatarAppearanceHandler(object sender, PacketReceivedEventArgs e) { Packet packet = e.Packet; AvatarAppearancePacket appearance = (AvatarAppearancePacket)packet; lock (Appearances) Appearances[appearance.Sender.ID] = appearance; } private void AlertMessageHandler(object sender, PacketReceivedEventArgs e) { Packet packet = e.Packet; AlertMessagePacket message = (AlertMessagePacket)packet; Logger.Log("[AlertMessage] " + Utils.BytesToString(message.AlertData.Message), Helpers.LogLevel.Info, this); } private void Inventory_OnInventoryObjectReceived(object sender, InventoryObjectOfferedEventArgs e) { if (MasterKey != UUID.Zero) { if (e.Offer.FromAgentID != MasterKey) return; } else if (GroupMembers != null && !GroupMembers.ContainsKey(e.Offer.FromAgentID)) { return; } e.Accept = true; return; } } }
#nullable enable using System; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Linq; using Content.Shared.GameObjects.Components.Body.Part; using Content.Shared.GameObjects.Components.Body.Part.Property; using Content.Shared.GameObjects.Components.Body.Preset; using Content.Shared.GameObjects.Components.Body.Template; using Content.Shared.GameObjects.Components.Damage; using Content.Shared.GameObjects.Components.Movement; using Content.Shared.GameObjects.EntitySystems; using Robust.Shared.GameObjects; using Robust.Shared.GameObjects.Systems; using Robust.Shared.Interfaces.GameObjects; using Robust.Shared.IoC; using Robust.Shared.Prototypes; using Robust.Shared.Serialization; using Robust.Shared.Utility; using Robust.Shared.ViewVariables; namespace Content.Shared.GameObjects.Components.Body { // TODO BODY Damage methods for collections of IDamageableComponents public abstract class SharedBodyComponent : Component, IBody { [Dependency] private readonly IPrototypeManager _prototypeManager = default!; public override string Name => "Body"; public override uint? NetID => ContentNetIDs.BODY; private string? _centerSlot; private Dictionary<string, string> _partIds = new Dictionary<string, string>(); private readonly Dictionary<string, IBodyPart> _parts = new Dictionary<string, IBodyPart>(); [ViewVariables] public string? TemplateName { get; private set; } [ViewVariables] public string? PresetName { get; private set; } [ViewVariables] public Dictionary<string, BodyPartType> Slots { get; private set; } = new Dictionary<string, BodyPartType>(); [ViewVariables] public Dictionary<string, List<string>> Connections { get; private set; } = new Dictionary<string, List<string>>(); /// <summary> /// Maps slots to the part filling each one. /// </summary> [ViewVariables] public IReadOnlyDictionary<string, IBodyPart> Parts => _parts; public IReadOnlyDictionary<string, string> PartIds => _partIds; [ViewVariables] public IReadOnlyDictionary<string, string> PartIDs => _partIds; protected virtual bool CanAddPart(string slot, IBodyPart part) { if (!HasSlot(slot) || !_parts.TryAdd(slot, part)) { return false; } return true; } protected virtual void OnAddPart(string slot, IBodyPart part) { part.Body = this; var argsAdded = new BodyPartAddedEventArgs(part, slot); foreach (var component in Owner.GetAllComponents<IBodyPartAdded>().ToArray()) { component.BodyPartAdded(argsAdded); } // TODO BODY Sort this duplicate out OnBodyChanged(); } protected virtual void OnRemovePart(string slot, IBodyPart part) { part.Body = null; var args = new BodyPartRemovedEventArgs(part, slot); foreach (var component in Owner.GetAllComponents<IBodyPartRemoved>()) { component.BodyPartRemoved(args); } // creadth: fall down if no legs if (part.PartType == BodyPartType.Leg && Parts.Count(x => x.Value.PartType == BodyPartType.Leg) == 0) { EntitySystem.Get<SharedStandingStateSystem>().Down(Owner); } // creadth: immediately kill entity if last vital part removed if (Owner.TryGetComponent(out IDamageableComponent? damageable)) { if (part.IsVital && Parts.Count(x => x.Value.PartType == part.PartType) == 0) { damageable.CurrentState = DamageState.Dead; damageable.ForceHealthChangedEvent(); } } OnBodyChanged(); } public bool TryAddPart(string slot, IBodyPart part, bool force = false) { DebugTools.AssertNotNull(part); DebugTools.AssertNotNull(slot); if (force) { if (!HasSlot(slot)) { Slots[slot] = part.PartType; } _parts[slot] = part; } else { if (!CanAddPart(slot, part)) { return false; } } OnAddPart(slot, part); return true; } public bool HasPart(string slot) { DebugTools.AssertNotNull(slot); return _parts.ContainsKey(slot); } public bool HasPart(IBodyPart part) { DebugTools.AssertNotNull(part); return _parts.ContainsValue(part); } public void RemovePart(IBodyPart part) { DebugTools.AssertNotNull(part); var slotName = _parts.FirstOrDefault(x => x.Value == part).Key; if (string.IsNullOrEmpty(slotName)) { return; } RemovePart(slotName); } // TODO BODY invert this behavior with the one above public bool RemovePart(string slot) { DebugTools.AssertNotNull(slot); if (!_parts.Remove(slot, out var part)) { return false; } OnRemovePart(slot, part); if (TryGetSlotConnections(slot, out var connections)) { foreach (var connectionName in connections) { if (TryGetPart(connectionName, out var result) && !ConnectedToCenter(result)) { RemovePart(connectionName); } } } return true; } public bool RemovePart(IBodyPart part, [NotNullWhen(true)] out string? slotName) { DebugTools.AssertNotNull(part); var pair = _parts.FirstOrDefault(kvPair => kvPair.Value == part); if (pair.Equals(default)) { slotName = null; return false; } if (RemovePart(pair.Key)) { slotName = pair.Key; return true; } slotName = null; return false; } public bool TryDropPart(IBodyPart part, [NotNullWhen(true)] out List<IBodyPart>? dropped) { DebugTools.AssertNotNull(part); if (!_parts.ContainsValue(part)) { dropped = null; return false; } if (!RemovePart(part, out var slotName)) { dropped = null; return false; } dropped = new List<IBodyPart> {part}; // Call disconnect on all limbs that were hanging off this limb. if (TryGetSlotConnections(slotName, out var connections)) { // TODO BODY optimize foreach (var connectionName in connections) { if (TryGetPart(connectionName, out var result) && !ConnectedToCenter(result) && RemovePart(connectionName)) { dropped.Add(result); } } } OnBodyChanged(); return true; } public bool ConnectedToCenter(IBodyPart part) { var searchedSlots = new List<string>(); return TryGetSlot(part, out var result) && ConnectedToCenterPartRecursion(searchedSlots, result); } private bool ConnectedToCenterPartRecursion(ICollection<string> searchedSlots, string slotName) { if (!TryGetPart(slotName, out var part)) { return false; } if (part == CenterPart()) { return true; } searchedSlots.Add(slotName); if (!TryGetSlotConnections(slotName, out var connections)) { return false; } foreach (var connection in connections) { if (!searchedSlots.Contains(connection) && ConnectedToCenterPartRecursion(searchedSlots, connection)) { return true; } } return false; } public IBodyPart? CenterPart() { if (_centerSlot == null) return null; return Parts.GetValueOrDefault(_centerSlot); } public bool HasSlot(string slot) { return Slots.ContainsKey(slot); } public bool TryGetPart(string slot, [NotNullWhen(true)] out IBodyPart? result) { return Parts.TryGetValue(slot, out result); } public bool TryGetSlot(IBodyPart part, [NotNullWhen(true)] out string? slot) { // We enforce that there is only one of each value in the dictionary, // so we can iterate through the dictionary values to get the key from there. var pair = Parts.FirstOrDefault(x => x.Value == part); slot = pair.Key; return !pair.Equals(default); } public bool TryGetSlotType(string slot, out BodyPartType result) { return Slots.TryGetValue(slot, out result); } public bool TryGetSlotConnections(string slot, [NotNullWhen(true)] out List<string>? connections) { return Connections.TryGetValue(slot, out connections); } public bool TryGetPartConnections(string slot, [NotNullWhen(true)] out List<IBodyPart>? connections) { if (!Connections.TryGetValue(slot, out var slotConnections)) { connections = null; return false; } connections = new List<IBodyPart>(); foreach (var connection in slotConnections) { if (TryGetPart(connection, out var part)) { connections.Add(part); } } if (connections.Count <= 0) { connections = null; return false; } return true; } public bool TryGetPartConnections(IBodyPart part, [NotNullWhen(true)] out List<IBodyPart>? connections) { connections = null; return TryGetSlot(part, out var slotName) && TryGetPartConnections(slotName, out connections); } public List<IBodyPart> GetPartsOfType(BodyPartType type) { var parts = new List<IBodyPart>(); foreach (var part in Parts.Values) { if (part.PartType == type) { parts.Add(part); } } return parts; } public List<(IBodyPart part, IBodyPartProperty property)> GetPartsWithProperty(Type type) { var parts = new List<(IBodyPart, IBodyPartProperty)>(); foreach (var part in Parts.Values) { if (part.TryGetProperty(type, out var property)) { parts.Add((part, property)); } } return parts; } public List<(IBodyPart part, T property)> GetPartsWithProperty<T>() where T : class, IBodyPartProperty { var parts = new List<(IBodyPart, T)>(); foreach (var part in Parts.Values) { if (part.TryGetProperty<T>(out var property)) { parts.Add((part, property)); } } return parts; } private void CalculateSpeed() { if (!Owner.TryGetComponent(out MovementSpeedModifierComponent? playerMover)) { return; } var legs = GetPartsWithProperty<LegComponent>(); float speedSum = 0; foreach (var leg in GetPartsWithProperty<LegComponent>()) { var footDistance = DistanceToNearestFoot(leg.part); if (Math.Abs(footDistance - float.MinValue) <= 0.001f) { continue; } speedSum += leg.property.Speed * (1 + (float) Math.Log(footDistance, 1024.0)); } if (speedSum <= 0.001f) { playerMover.BaseWalkSpeed = 0.8f; playerMover.BaseSprintSpeed = 2.0f; } else { // Extra legs stack diminishingly. playerMover.BaseWalkSpeed = speedSum / (legs.Count - (float) Math.Log(legs.Count, 4.0)); playerMover.BaseSprintSpeed = playerMover.BaseWalkSpeed * 1.75f; } } /// <summary> /// Called when the layout of this body changes. /// </summary> private void OnBodyChanged() { // Calculate move speed based on this body. if (Owner.HasComponent<MovementSpeedModifierComponent>()) { CalculateSpeed(); } Dirty(); } /// <summary> /// Returns the combined length of the distance to the nearest /// <see cref="IBodyPart"/> that is a foot. /// If you consider a <see cref="IBody"/> a node map, then it will /// look for a foot node from the given node. It can only search /// through <see cref="IBodyPart"/>s with an /// <see cref="ExtensionComponent"/>. /// </summary> /// <returns> /// The distance to the foot if found, <see cref="float.MinValue"/> /// otherwise. /// </returns> public float DistanceToNearestFoot(IBodyPart source) { if (source.PartType == BodyPartType.Foot && source.TryGetProperty<ExtensionComponent>(out var extension)) { return extension.Distance; } return LookForFootRecursion(source, new List<IBodyPart>()); } private float LookForFootRecursion(IBodyPart current, ICollection<IBodyPart> searchedParts) { if (!current.TryGetProperty<ExtensionComponent>(out var extProperty)) { return float.MinValue; } // Get all connected parts if the current part has an extension property if (!TryGetPartConnections(current, out var connections)) { return float.MinValue; } // If a connected BodyPart is a foot, return this BodyPart's length. foreach (var connection in connections) { if (connection.PartType == BodyPartType.Foot && !searchedParts.Contains(connection)) { return extProperty.Distance; } } // Otherwise, get the recursion values of all connected BodyParts and // store them in a list. var distances = new List<float>(); foreach (var connection in connections) { if (!searchedParts.Contains(connection)) { continue; } var result = LookForFootRecursion(connection, searchedParts); if (Math.Abs(result - float.MinValue) > 0.001f) { distances.Add(result); } } // If one or more of the searches found a foot, return the smallest one // and add this ones length. if (distances.Count > 0) { return distances.Min<float>() + extProperty.Distance; } return float.MinValue; } // TODO BODY optimize this public KeyValuePair<string, BodyPartType> SlotAt(int index) { return Slots.ElementAt(index); } public KeyValuePair<string, IBodyPart> PartAt(int index) { return Parts.ElementAt(index); } public override void ExposeData(ObjectSerializer serializer) { base.ExposeData(serializer); serializer.DataReadWriteFunction( "template", null, name => { if (string.IsNullOrEmpty(name)) { return; } var template = _prototypeManager.Index<BodyTemplatePrototype>(name); Connections = template.Connections; Slots = template.Slots; _centerSlot = template.CenterSlot; TemplateName = name; }, () => TemplateName); serializer.DataReadWriteFunction( "preset", null, name => { if (string.IsNullOrEmpty(name)) { return; } var preset = _prototypeManager.Index<BodyPresetPrototype>(name); _partIds = preset.PartIDs; }, () => PresetName); serializer.DataReadWriteFunction( "connections", new Dictionary<string, List<string>>(), connections => { foreach (var (from, to) in connections) { Connections.GetOrNew(from).AddRange(to); } }, () => Connections); serializer.DataReadWriteFunction( "slots", new Dictionary<string, BodyPartType>(), slots => { foreach (var (part, type) in slots) { Slots[part] = type; } }, () => Slots); // TODO BODY Move to template or somewhere else serializer.DataReadWriteFunction( "centerSlot", null, slot => _centerSlot = slot, () => _centerSlot); serializer.DataReadWriteFunction( "partIds", new Dictionary<string, string>(), partIds => { foreach (var (slot, part) in partIds) { _partIds[slot] = part; } }, () => _partIds); // Our prototypes don't force the user to define a BodyPart connection twice. E.g. Head: Torso v.s. Torso: Head. // The user only has to do one. We want it to be that way in the code, though, so this cleans that up. var cleanedConnections = new Dictionary<string, List<string>>(); foreach (var targetSlotName in Slots.Keys) { var tempConnections = new List<string>(); foreach (var (slotName, slotConnections) in Connections) { if (slotName == targetSlotName) { foreach (var connection in slotConnections) { if (!tempConnections.Contains(connection)) { tempConnections.Add(connection); } } } else if (slotConnections.Contains(targetSlotName)) { tempConnections.Add(slotName); } } if (tempConnections.Count > 0) { cleanedConnections.Add(targetSlotName, tempConnections); } } Connections = cleanedConnections; } public override ComponentState GetComponentState() { var parts = new (string slot, EntityUid partId)[_parts.Count]; var i = 0; foreach (var (slot, part) in _parts) { parts[i] = (slot, part.Owner.Uid); i++; } return new BodyComponentState(parts); } public override void HandleComponentState(ComponentState? curState, ComponentState? nextState) { base.HandleComponentState(curState, nextState); if (!(curState is BodyComponentState state)) { return; } var newParts = state.Parts(); foreach (var (slot, oldPart) in _parts) { if (!newParts.TryGetValue(slot, out var newPart) || newPart != oldPart) { RemovePart(oldPart); } } foreach (var (slot, newPart) in newParts) { if (!_parts.TryGetValue(slot, out var oldPart) || oldPart != newPart) { TryAddPart(slot, newPart, true); } } } } [Serializable, NetSerializable] public class BodyComponentState : ComponentState { private Dictionary<string, IBodyPart>? _parts; public readonly (string slot, EntityUid partId)[] PartIds; public BodyComponentState((string slot, EntityUid partId)[] partIds) : base(ContentNetIDs.BODY) { PartIds = partIds; } public Dictionary<string, IBodyPart> Parts(IEntityManager? entityManager = null) { if (_parts != null) { return _parts; } entityManager ??= IoCManager.Resolve<IEntityManager>(); var parts = new Dictionary<string, IBodyPart>(PartIds.Length); foreach (var (slot, partId) in PartIds) { if (!entityManager.TryGetEntity(partId, out var entity)) { continue; } if (!entity.TryGetComponent(out IBodyPart? part)) { continue; } parts[slot] = part; } return _parts = parts; } } }
using BellRichM.Helpers.Test; using BellRichM.Identity.Api.Data; using BellRichM.Identity.Api.Exceptions; using BellRichM.Identity.Api.Repositories; using BellRichM.Logging; using FluentAssertions; using Machine.Specifications; using Microsoft.AspNetCore.Identity; using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore.Storage; using Moq; using System; using System.Collections.Generic; using System.Security.Claims; using IT = Moq.It; using It = Machine.Specifications.It; namespace BellRichM.Identity.Api.Test { internal class RoleRepositorySpecs { protected static LoggingData loggingData; protected static Mock<ILoggerAdapter<RoleRepository>> loggerMock; protected static Mock<RoleManager<Role>> roleManagerMock; protected static Mock<IdentityDbContext> identityDbContextMock; protected static Mock<IRoleStore<Role>> roleStoreMock; protected static Mock<IDbContextTransaction> dbTransactionMock; protected static RoleRepository roleRepository; protected static Role role; protected static List<Claim> claims; protected static Claim claim; protected static List<ClaimValue> claimValues; Establish context = () => { loggerMock = new Mock<ILoggerAdapter<RoleRepository>>(); roleStoreMock = new Mock<IRoleStore<Role>>(); roleManagerMock = new Mock<RoleManager<Role>>(roleStoreMock.Object, null, null, null, null); dbTransactionMock = new Mock<IDbContextTransaction>(); identityDbContextMock = new Mock<IdentityDbContext>(new DbContextOptions<IdentityDbContext>()); identityDbContextMock.Setup(x => x.BeginTransaction()).Returns(dbTransactionMock.Object); role = new Role { Id = "id", Description = "description", Name = "name" }; claimValues = new List<ClaimValue>(); claimValues.Add(new ClaimValue { Type = "type", Value = "value" }); claims = new List<Claim>(); roleManagerMock.Setup(x => x.FindByIdAsync(role.Id)) .ReturnsAsync(role); roleManagerMock.Setup(x => x.FindByNameAsync(role.Name)) .ReturnsAsync(role); roleManagerMock.Setup(x => x.GetClaimsAsync(role)) .ReturnsAsync(claims); roleRepository = new RoleRepository(loggerMock.Object, roleManagerMock.Object, identityDbContextMock.Object); }; Cleanup after = () => roleRepository.Dispose(); } [Subject("Get Role")] internal class When_role_id_does_not_exist : RoleRepositorySpecs { private static Role roleResult; Establish context = () => { loggingData = new LoggingData { DebugTimes = 1, EventLoggingData = new List<EventLoggingData>(), ErrorLoggingMessages = new List<string>() }; roleManagerMock.Setup(x => x.FindByIdAsync(role.Id)) .ReturnsAsync((Role)null); }; Because of = () => roleResult = roleRepository.GetById(role.Id).Result; Behaves_like<LoggingBehaviors<RoleRepository>> correct_logging = () => { }; It should_return_correct_role = () => roleResult.ShouldBeNull(); } internal class When_getting_role_by_id_without_claims : RoleRepositorySpecs { private static Role roleResult; Establish context = () => { loggingData = new LoggingData { DebugTimes = 1, EventLoggingData = new List<EventLoggingData>(), ErrorLoggingMessages = new List<string>() }; }; Because of = () => roleResult = roleRepository.GetById(role.Id).Result; Behaves_like<LoggingBehaviors<RoleRepository>> correct_logging = () => { }; It should_return_correct_role = () => roleResult.Should().BeEquivalentTo(role); It should_have_no_claims = () => roleResult.ClaimValues.Should().BeEmpty(); } internal class When_getting_role_by_id_with_claims : RoleRepositorySpecs { private static Role roleResult; Establish context = () => { claim = new Claim("type", "value", "description"); claims.Add(claim); loggingData = new LoggingData { DebugTimes = 1, EventLoggingData = new List<EventLoggingData>(), ErrorLoggingMessages = new List<string>() }; }; Because of = () => roleResult = roleRepository.GetById(role.Id).Result; Behaves_like<LoggingBehaviors<RoleRepository>> correct_logging = () => { }; It should_return_correct_role = () => roleResult.Should().BeEquivalentTo(role); It should_have_one_claim = () => roleResult.ClaimValues.Should().ContainSingle(); It should_have_correct_claim_values = () => { roleResult.ClaimValues.Should() .AllBeEquivalentTo(claim, config => config .Excluding(ctx => ctx.OriginalIssuer) .Excluding(ctx => ctx.Properties) .Excluding(ctx => ctx.Subject) .Excluding(ctx => ctx.Issuer)); }; } internal class When_role_name_does_not_exist : RoleRepositorySpecs { private static Role roleResult; Establish context = () => { roleManagerMock.Setup(x => x.FindByNameAsync(role.Name)) .ReturnsAsync((Role)null); loggingData = new LoggingData { DebugTimes = 1, EventLoggingData = new List<EventLoggingData>(), ErrorLoggingMessages = new List<string>() }; }; Because of = () => roleResult = roleRepository.GetByName(role.Id).Result; Behaves_like<LoggingBehaviors<RoleRepository>> correct_logging = () => { }; It should_return_correct_role = () => roleResult.ShouldBeNull(); } internal class When_getting_role_by_name_without_claims : RoleRepositorySpecs { private static Role roleResult; Establish context = () => { loggingData = new LoggingData { DebugTimes = 1, EventLoggingData = new List<EventLoggingData>(), ErrorLoggingMessages = new List<string>() }; }; Because of = () => roleResult = roleRepository.GetByName(role.Name).Result; Behaves_like<LoggingBehaviors<RoleRepository>> correct_logging = () => { }; It should_return_correct_role = () => roleResult.Should().BeEquivalentTo(role); It should_have_no_claims = () => roleResult.ClaimValues.Should().BeEmpty(); } internal class When_getting_role_by_name_with_claims : RoleRepositorySpecs { private static Role roleResult; Establish context = () => { claim = new Claim("type", "value", "description"); claims.Add(claim); loggingData = new LoggingData { DebugTimes = 1, EventLoggingData = new List<EventLoggingData>(), ErrorLoggingMessages = new List<string>() }; }; Because of = () => roleResult = roleRepository.GetByName(role.Name).Result; Behaves_like<LoggingBehaviors<RoleRepository>> correct_logging = () => { }; It should_return_correct_role = () => roleResult.Should().BeEquivalentTo(role); It should_have_one_claim = () => roleResult.ClaimValues.Should().ContainSingle(); It should_have_correct_claim_values = () => { roleResult.ClaimValues.Should() .AllBeEquivalentTo(claim, config => config .Excluding(ctx => ctx.OriginalIssuer) .Excluding(ctx => ctx.Properties) .Excluding(ctx => ctx.Subject) .Excluding(ctx => ctx.Issuer)); }; } [Subject("creating Role")] internal class When_error_creating_role : RoleRepositorySpecs { private static Role roleResult; private static Exception exception; Establish context = () => { roleManagerMock.Setup(x => x.CreateAsync(role)) .ReturnsAsync(IdentityResult.Failed()); loggingData = new LoggingData { DebugTimes = 1, EventLoggingData = new List<EventLoggingData>(), ErrorLoggingMessages = new List<string>() }; }; Because of = () => exception = Catch.Exception(() => roleResult = roleRepository.Create(role).Await()); Behaves_like<LoggingBehaviors<RoleRepository>> correct_logging = () => { }; It should_throw_correct_exception_type = () => exception.ShouldBeOfExactType<CreateRoleException>(); It should_have_correct_exception_code = () => ((CreateRoleException)exception).Code.ShouldEqual(CreateRoleExceptionCode.CreateRoleFailed); It should_not_return_a_role = () => roleResult.ShouldBeNull(); It should_rollback_the_work = () => dbTransactionMock.Verify(x => x.Rollback(), Times.Once); It should_not_commit_the_work = () => dbTransactionMock.Verify(x => x.Commit(), Times.Never); } internal class When_error_adding_claim : RoleRepositorySpecs { private static Role roleResult; private static Exception exception; Establish context = () => { roleManagerMock.Setup(x => x.CreateAsync(role)) .ReturnsAsync(IdentityResult.Success); roleManagerMock .Setup(x => x.AddClaimAsync(role, IT.IsAny<Claim>())) .ReturnsAsync(IdentityResult.Failed()); role.ClaimValues = claimValues; loggingData = new LoggingData { DebugTimes = 1, EventLoggingData = new List<EventLoggingData>(), ErrorLoggingMessages = new List<string>() }; }; Because of = () => exception = Catch.Exception(() => roleResult = roleRepository.Create(role).Await()); Behaves_like<LoggingBehaviors<RoleRepository>> correct_logging = () => { }; It should_throw_correct_exception_type = () => exception.ShouldBeOfExactType<CreateRoleException>(); It should_have_correct_exception_code = () => ((CreateRoleException)exception).Code.ShouldEqual(CreateRoleExceptionCode.AddClaimFailed); It should_not_return_a_role = () => roleResult.ShouldBeNull(); It should_rollback_the_work = () => dbTransactionMock.Verify(x => x.Rollback(), Times.Once); It should_not_commit_the_work = () => dbTransactionMock.Verify(x => x.Commit(), Times.Never); } internal class When_creating_role_without_claims : RoleRepositorySpecs { private static Role roleResult; private static Exception exception; Establish context = () => { roleManagerMock.Setup(x => x.CreateAsync(role)) .ReturnsAsync(IdentityResult.Success); loggingData = new LoggingData { DebugTimes = 2, EventLoggingData = new List<EventLoggingData>(), ErrorLoggingMessages = new List<string>() }; }; Because of = () => exception = Catch.Exception(() => roleResult = roleRepository.Create(role).Await()); Behaves_like<LoggingBehaviors<RoleRepository>> correct_logging = () => { }; It should_return_a_role = () => roleResult.ShouldNotBeNull(); It should_not_rollback_the_work = () => dbTransactionMock.Verify(x => x.Rollback(), Times.Never); It should_commit_the_work = () => dbTransactionMock.Verify(x => x.Commit(), Times.Once); } internal class When_creating_role_with_claims : RoleRepositorySpecs { private static Role roleResult; private static Exception exception; Establish context = () => { roleManagerMock.Setup(x => x.CreateAsync(role)) .ReturnsAsync(IdentityResult.Success); roleManagerMock .Setup(x => x.AddClaimAsync(role, IT.IsAny<Claim>())) .ReturnsAsync(IdentityResult.Success); role.ClaimValues = claimValues; loggingData = new LoggingData { DebugTimes = 2, EventLoggingData = new List<EventLoggingData>(), ErrorLoggingMessages = new List<string>() }; }; Because of = () => exception = Catch.Exception(() => roleResult = roleRepository.Create(role).Await()); Behaves_like<LoggingBehaviors<RoleRepository>> correct_logging = () => { }; It should_return_a_role = () => roleResult.ShouldNotBeNull(); It should_add_the_claim = () => roleManagerMock.Verify( x => x.AddClaimAsync( IT.IsAny<Role>(), IT.Is<Claim>(c => c.Type == "type" && c.Value == "value")), Times.Once); It should_not_rollback_the_work = () => dbTransactionMock.Verify(x => x.Rollback(), Times.Never); It should_commit_the_work = () => dbTransactionMock.Verify(x => x.Commit(), Times.Once); } [Subject("Delete Role")] internal class When_deleting_role_succeeds : RoleRepositorySpecs { private static Exception exception; Establish context = () => { roleManagerMock.Setup(x => x.FindByIdAsync(role.Id)) .ReturnsAsync(role); roleManagerMock.Setup(x => x.DeleteAsync(role)) .ReturnsAsync(IdentityResult.Success); loggingData = new LoggingData { DebugTimes = 1, EventLoggingData = new List<EventLoggingData>(), ErrorLoggingMessages = new List<string>() }; }; Because of = () => exception = Catch.Exception(() => roleRepository.Delete(role.Id).Await()); Behaves_like<LoggingBehaviors<RoleRepository>> correct_logging = () => { }; It should_not_throw_exception = () => exception.ShouldBeNull(); } internal class When_deleting_role_fails : RoleRepositorySpecs { private static Exception exception; Establish context = () => { roleManagerMock.Setup(x => x.FindByIdAsync(role.Id)) .ReturnsAsync(role); roleManagerMock.Setup(x => x.DeleteAsync(role)) .ReturnsAsync(IdentityResult.Failed()); loggingData = new LoggingData { DebugTimes = 1, EventLoggingData = new List<EventLoggingData>(), ErrorLoggingMessages = new List<string>() }; }; Because of = () => exception = Catch.Exception(() => roleRepository.Delete(role.Id).Await()); Behaves_like<LoggingBehaviors<RoleRepository>> correct_logging = () => { }; It should_throw_correct_exception_type = () => exception.ShouldBeOfExactType<DeleteRoleException>(); It should_have_correct_exception_code = () => ((DeleteRoleException)exception).Code.ShouldEqual(DeleteRoleExceptionCode.DeleteRoleFailed); } internal class When_deleting_nonexistant_role : RoleRepositorySpecs { private static Exception exception; Establish context = () => { roleManagerMock.Setup(x => x.FindByIdAsync(role.Id)) .ReturnsAsync((Role)null); loggingData = new LoggingData { DebugTimes = 1, EventLoggingData = new List<EventLoggingData>(), ErrorLoggingMessages = new List<string>() }; }; Because of = () => exception = Catch.Exception(() => roleRepository.Delete(role.Id).Await()); Behaves_like<LoggingBehaviors<RoleRepository>> correct_logging = () => { }; It should_throw_correct_exception_type = () => exception.ShouldBeOfExactType<DeleteRoleException>(); It should_have_correct_exception_code = () => ((DeleteRoleException)exception).Code.ShouldEqual(DeleteRoleExceptionCode.RoleNotFound); } }
using System; using System.Configuration; namespace Miracle.Diagnostics.Logging { /// <summary> /// Static class for logging through logging framework. /// </summary> public class Log { private static ConfigSection Config { get { return (ConfigSection) ConfigurationManager.GetSection("log"); } } /// <summary> /// Get/Set object used to get context from. /// </summary> public static object Context { get; set; } /// <summary> /// Add log entry to global logging infrastructure /// </summary> /// <param name="entry"></param> public static void Add(ILogEntry entry) { if ( (Config.MinSeveritySpecified == false || entry.Severity >= Config.MinSeverity) && (Config.MaxSeveritySpecified == false || entry.Severity <= Config.MaxSeverity) ) { foreach (var rule in Config.Rules) { if (rule.IsActive) { if ( (rule.MinSeveritySpecified == false || entry.Severity >= rule.MinSeverity) && (rule.MaxSeveritySpecified == false || entry.Severity <= rule.MaxSeverity) ) { ILog log = rule.CreateInstance(); log.Add(entry); } } } } } #region Debug helpers /// <summary> /// Log message using Debug severity /// </summary> /// <param name="message">Message to log</param> public static void Debug(string message) { Add(new LogEntry(SeverityEnum.Debug, message)); } /// <summary> /// Log formatted message using Debug severity /// </summary> /// <param name="messageFormat">Message format to log</param> /// <param name="args">Argument for message format</param> public static void Debug(string messageFormat, params object[] args) { Debug(string.Format(messageFormat, args)); } #endregion #region Information helpers /// <summary> /// Log message using Information severity /// </summary> /// <param name="message">Message to log</param> public static void Information(string message) { Add(new LogEntry(SeverityEnum.Information, message)); } /// <summary> /// Log formatted message using Information severity /// </summary> /// <param name="messageFormat">Message format to log</param> /// <param name="args">Argument for message format</param> public static void Information(string messageFormat, params object[] args) { Information(string.Format(messageFormat, args)); } #endregion #region Warning helpers /// <summary> /// Log message using Warning severity /// </summary> /// <param name="message">Message to log</param> public static void Warning(string message) { Add(new LogEntry(SeverityEnum.Warning, message)); } /// <summary> /// Log formatted message using Warning severity /// </summary> /// <param name="messageFormat">Message format to log</param> /// <param name="args">Argument for message format</param> public static void Warning(string messageFormat, params object[] args) { Warning(string.Format(messageFormat, args)); } /// <summary> /// Log exception using Warning severity /// </summary> /// <param name="ex">Exception to log</param> public static void Warning(Exception ex) { Add(new ExceptionLogEntry(SeverityEnum.Warning, ex)); } /// <summary> /// Log exception using Warning severity /// </summary> /// <param name="message">Message to log</param> /// <param name="ex">Exception to log</param> public static void Warning(Exception ex, string message) { Add(new ExceptionLogEntry(SeverityEnum.Warning, message, ex)); } /// <summary> /// Log exception using Warning severity /// </summary> /// <param name="ex">Exception to log</param> /// <param name="messageFormat">Message format to log</param> /// <param name="args">Argument for message format</param> public static void Warning(Exception ex, string messageFormat, params object[] args) { Add(new ExceptionLogEntry(SeverityEnum.Warning, string.Format(messageFormat, args), ex)); } #endregion #region Error helpers /// <summary> /// Log message using Error severity /// </summary> /// <param name="message">Message to log</param> public static void Error(string message) { Add(new LogEntry(SeverityEnum.Error, message)); } /// <summary> /// Log formatted message using Error severity /// </summary> /// <param name="messageFormat">Message format to log</param> /// <param name="args">Argument for message format</param> public static void Error(string messageFormat, params object[] args) { Error(string.Format(messageFormat, args)); } /// <summary> /// Log exception using Error severity /// </summary> /// <param name="ex">Exception to log</param> public static void Error(Exception ex) { Add(new ExceptionLogEntry(SeverityEnum.Error, ex)); } /// <summary> /// Log exception using Error severity /// </summary> /// <param name="message">Message to log</param> /// <param name="ex">Exception to log</param> public static void Error(Exception ex, string message) { Add(new ExceptionLogEntry(SeverityEnum.Error, message, ex)); } /// <summary> /// Log exception using Error severity /// </summary> /// <param name="ex">Exception to log</param> /// <param name="messageFormat">Message format to log</param> /// <param name="args">Argument for message format</param> public static void Error(Exception ex, string messageFormat, params object[] args) { Add(new ExceptionLogEntry(SeverityEnum.Error, string.Format(messageFormat, args), ex)); } #endregion #region Fatal helpers /// <summary> /// Log message using Fatal severity /// </summary> /// <param name="message">Message to log</param> public static void Fatal(string message) { Add(new LogEntry(SeverityEnum.Fatal, message)); } /// <summary> /// Log formatted message using Fatal severity /// </summary> /// <param name="messageFormat">Message format to log</param> /// <param name="args">Argument for message format</param> public static void Fatal(string messageFormat, params object[] args) { Fatal(string.Format(messageFormat, args)); } /// <summary> /// Log exception using Fatal severity /// </summary> /// <param name="ex">Exception to log</param> public static void Fatal(Exception ex) { Add(new ExceptionLogEntry(SeverityEnum.Fatal, ex)); } /// <summary> /// Log exception using Fatal severity /// </summary> /// <param name="message">Message to log</param> /// <param name="ex">Exception to log</param> public static void Fatal(Exception ex, string message) { Add(new ExceptionLogEntry(SeverityEnum.Fatal, message, ex)); } /// <summary> /// Log exception using Fatal severity /// </summary> /// <param name="ex">Exception to log</param> /// <param name="messageFormat">Message format to log</param> /// <param name="args">Argument for message format</param> public static void Fatal(Exception ex, string messageFormat, params object[] args) { Add(new ExceptionLogEntry(SeverityEnum.Fatal, string.Format(messageFormat, args), ex)); } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /*============================================================ ** ** ** ** Purpose: A random number generator. ** ** ===========================================================*/ namespace System { using System; using System.Runtime; using System.Runtime.CompilerServices; using System.Globalization; using System.Diagnostics.Contracts; [System.Runtime.InteropServices.ComVisible(true)] [Serializable] public class Random { // // Private Constants // private const int MBIG = Int32.MaxValue; private const int MSEED = 161803398; private const int MZ = 0; // // Member Variables // private int inext; private int inextp; private int[] SeedArray = new int[56]; // // Public Constants // // // Native Declarations // // // Constructors // public Random() : this(Environment.TickCount) { } public Random(int Seed) { int ii; int mj, mk; //Initialize our Seed array. int subtraction = (Seed == Int32.MinValue) ? Int32.MaxValue : Math.Abs(Seed); mj = MSEED - subtraction; SeedArray[55]=mj; mk=1; for (int i=1; i<55; i++) { //Apparently the range [1..55] is special (Knuth) and so we're wasting the 0'th position. ii = (21*i)%55; SeedArray[ii]=mk; mk = mj - mk; if (mk<0) mk+=MBIG; mj=SeedArray[ii]; } for (int k=1; k<5; k++) { for (int i=1; i<56; i++) { SeedArray[i] -= SeedArray[1+(i+30)%55]; if (SeedArray[i]<0) SeedArray[i]+=MBIG; } } inext=0; inextp = 21; Seed = 1; } // // Package Private Methods // /*====================================Sample==================================== **Action: Return a new random number [0..1) and reSeed the Seed array. **Returns: A double [0..1) **Arguments: None **Exceptions: None ==============================================================================*/ protected virtual double Sample() { //Including this division at the end gives us significantly improved //random number distribution. return (InternalSample()*(1.0/MBIG)); } private int InternalSample() { int retVal; int locINext = inext; int locINextp = inextp; if (++locINext >=56) locINext=1; if (++locINextp>= 56) locINextp = 1; retVal = SeedArray[locINext]-SeedArray[locINextp]; if (retVal == MBIG) retVal--; if (retVal<0) retVal+=MBIG; SeedArray[locINext]=retVal; inext = locINext; inextp = locINextp; return retVal; } // // Public Instance Methods // /*=====================================Next===================================== **Returns: An int [0..Int32.MaxValue) **Arguments: None **Exceptions: None. ==============================================================================*/ public virtual int Next() { return InternalSample(); } private double GetSampleForLargeRange() { // The distribution of double value returned by Sample // is not distributed well enough for a large range. // If we use Sample for a range [Int32.MinValue..Int32.MaxValue) // We will end up getting even numbers only. int result = InternalSample(); // Note we can't use addition here. The distribution will be bad if we do that. bool negative = (InternalSample()%2 == 0) ? true : false; // decide the sign based on second sample if( negative) { result = -result; } double d = result; d += (Int32.MaxValue - 1); // get a number in range [0 .. 2 * Int32MaxValue - 1) d /= 2*(uint)Int32.MaxValue - 1 ; return d; } /*=====================================Next===================================== **Returns: An int [minvalue..maxvalue) **Arguments: minValue -- the least legal value for the Random number. ** maxValue -- One greater than the greatest legal return value. **Exceptions: None. ==============================================================================*/ public virtual int Next(int minValue, int maxValue) { if (minValue>maxValue) { throw new ArgumentOutOfRangeException("minValue",Environment.GetResourceString("Argument_MinMaxValue", "minValue", "maxValue")); } Contract.EndContractBlock(); long range = (long)maxValue-minValue; if( range <= (long)Int32.MaxValue) { return ((int)(Sample() * range) + minValue); } else { return (int)((long)(GetSampleForLargeRange() * range) + minValue); } } /*=====================================Next===================================== **Returns: An int [0..maxValue) **Arguments: maxValue -- One more than the greatest legal return value. **Exceptions: None. ==============================================================================*/ public virtual int Next(int maxValue) { if (maxValue<0) { throw new ArgumentOutOfRangeException("maxValue", Environment.GetResourceString("ArgumentOutOfRange_MustBePositive", "maxValue")); } Contract.EndContractBlock(); return (int)(Sample()*maxValue); } /*=====================================Next===================================== **Returns: A double [0..1) **Arguments: None **Exceptions: None ==============================================================================*/ public virtual double NextDouble() { return Sample(); } /*==================================NextBytes=================================== **Action: Fills the byte array with random bytes [0..0x7f]. The entire array is filled. **Returns:Void **Arugments: buffer -- the array to be filled. **Exceptions: None ==============================================================================*/ public virtual void NextBytes(byte [] buffer){ if (buffer==null) throw new ArgumentNullException("buffer"); Contract.EndContractBlock(); for (int i=0; i<buffer.Length; i++) { buffer[i]=(byte)(InternalSample()%(Byte.MaxValue+1)); } } } }
// Copyright (c) .NET Foundation and contributors. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.Collections.Generic; using Microsoft.Build.Construction; using Microsoft.DotNet.Internal.ProjectModel; using Microsoft.DotNet.Internal.ProjectModel.Graph; using Microsoft.DotNet.Cli; using System.Linq; using System.IO; using Microsoft.DotNet.ProjectJsonMigration.Rules; using Microsoft.DotNet.Tools.Common; namespace Microsoft.DotNet.ProjectJsonMigration { internal class ProjectMigrator { private readonly IMigrationRule _ruleSet; private readonly ProjectDependencyFinder _projectDependencyFinder = new ProjectDependencyFinder(); public ProjectMigrator() : this(new DefaultMigrationRuleSet()) { } public ProjectMigrator(IMigrationRule ruleSet) { _ruleSet = ruleSet; } public MigrationReport Migrate(MigrationSettings rootSettings, bool skipProjectReferences = false) { if (rootSettings == null) { throw new ArgumentNullException(); } // Try to read the project dependencies, ignore an unresolved exception for now MigrationRuleInputs rootInputs = ComputeMigrationRuleInputs(rootSettings); IEnumerable<ProjectDependency> projectDependencies = null; var tempMSBuildProjectTemplate = rootSettings.CloneMSBuildProjectTemplate(); try { // Verify up front so we can prefer these errors over an unresolved project dependency VerifyInputs(rootInputs, rootSettings); projectDependencies = ResolveTransitiveClosureProjectDependencies( rootSettings.ProjectDirectory, rootSettings.ProjectXProjFilePath); } catch (MigrationException e) { return new MigrationReport( new List<ProjectMigrationReport> { new ProjectMigrationReport( rootSettings.ProjectDirectory, rootInputs?.DefaultProjectContext.GetProjectName(), new List<MigrationError> {e.Error}, null) }); } var projectMigrationReports = new List<ProjectMigrationReport>(); projectMigrationReports.Add(MigrateProject(rootSettings)); if (skipProjectReferences) { return new MigrationReport(projectMigrationReports); } foreach(var project in projectDependencies) { var projectDir = Path.GetDirectoryName(project.ProjectFilePath); var settings = new MigrationSettings(projectDir, projectDir, rootSettings.SdkPackageVersion, tempMSBuildProjectTemplate); MigrateProject(settings); projectMigrationReports.Add(MigrateProject(settings)); } return new MigrationReport(projectMigrationReports); } private void DeleteProjectJsons(MigrationSettings rootsettings, IEnumerable<ProjectDependency> projectDependencies) { try { File.Delete(Path.Combine(rootsettings.ProjectDirectory, "project.json")); } catch {} foreach (var projectDependency in projectDependencies) { try { File.Delete(projectDependency.ProjectFilePath); } catch { } } } private IEnumerable<ProjectDependency> ResolveTransitiveClosureProjectDependencies(string rootProject, string xprojFile) { HashSet<ProjectDependency> projectsMap = new HashSet<ProjectDependency>(new ProjectDependencyComparer()); var projectDependencies = _projectDependencyFinder.ResolveProjectDependencies(rootProject, xprojFile); Queue<ProjectDependency> projectsQueue = new Queue<ProjectDependency>(projectDependencies); while (projectsQueue.Count() != 0) { var projectDependency = projectsQueue.Dequeue(); if (projectsMap.Contains(projectDependency)) { continue; } projectsMap.Add(projectDependency); var projectDir = Path.GetDirectoryName(projectDependency.ProjectFilePath); projectDependencies = _projectDependencyFinder.ResolveProjectDependencies(projectDir); foreach (var project in projectDependencies) { projectsQueue.Enqueue(project); } } return projectsMap; } private ProjectMigrationReport MigrateProject(MigrationSettings migrationSettings) { var migrationRuleInputs = ComputeMigrationRuleInputs(migrationSettings); var projectName = migrationRuleInputs.DefaultProjectContext.GetProjectName(); try { if (IsMigrated(migrationSettings, migrationRuleInputs)) { MigrationTrace.Instance.WriteLine($"{nameof(ProjectMigrator)}: Skip migrating {migrationSettings.ProjectDirectory}, it is already migrated."); return new ProjectMigrationReport(migrationSettings.ProjectDirectory, projectName, skipped: true); } VerifyInputs(migrationRuleInputs, migrationSettings); SetupOutputDirectory(migrationSettings.ProjectDirectory, migrationSettings.OutputDirectory); _ruleSet.Apply(migrationSettings, migrationRuleInputs); } catch (MigrationException exc) { var error = new List<MigrationError> { exc.Error }; return new ProjectMigrationReport(migrationSettings.ProjectDirectory, projectName, error, null); } var outputProject = Path.Combine(migrationSettings.OutputDirectory, projectName + ".csproj"); return new ProjectMigrationReport(migrationSettings.ProjectDirectory, projectName, outputProject, null); } private MigrationRuleInputs ComputeMigrationRuleInputs(MigrationSettings migrationSettings) { var projectContexts = ProjectContext.CreateContextForEachFramework(migrationSettings.ProjectDirectory); var xprojFile = migrationSettings.ProjectXProjFilePath ?? _projectDependencyFinder.FindXprojFile(migrationSettings.ProjectDirectory); ProjectRootElement xproj = null; if (xprojFile != null) { xproj = ProjectRootElement.Open(xprojFile); } var templateMSBuildProject = migrationSettings.MSBuildProjectTemplate; if (templateMSBuildProject == null) { throw new Exception("Expected non-null MSBuildProjectTemplate in MigrationSettings"); } var propertyGroup = templateMSBuildProject.AddPropertyGroup(); var itemGroup = templateMSBuildProject.AddItemGroup(); return new MigrationRuleInputs(projectContexts, templateMSBuildProject, itemGroup, propertyGroup, xproj); } private void VerifyInputs(MigrationRuleInputs migrationRuleInputs, MigrationSettings migrationSettings) { VerifyProject(migrationRuleInputs.ProjectContexts, migrationSettings.ProjectDirectory); } private void VerifyProject(IEnumerable<ProjectContext> projectContexts, string projectDirectory) { if (!projectContexts.Any()) { MigrationErrorCodes.MIGRATE1013($"No projects found in {projectDirectory}").Throw(); } var defaultProjectContext = projectContexts.First(); var diagnostics = defaultProjectContext.ProjectFile.Diagnostics; if (diagnostics.Any()) { MigrationErrorCodes.MIGRATE1011( $"{projectDirectory}{Environment.NewLine}{string.Join(Environment.NewLine, diagnostics.Select(d => FormatDiagnosticMessage(d)))}") .Throw(); } var compilerName = defaultProjectContext.ProjectFile.GetCompilerOptions(defaultProjectContext.TargetFramework, "_") .CompilerName; if (!compilerName.Equals("csc", StringComparison.OrdinalIgnoreCase)) { MigrationErrorCodes.MIGRATE20013( $"Cannot migrate project {defaultProjectContext.ProjectFile.ProjectFilePath} using compiler {compilerName}").Throw(); } } private string FormatDiagnosticMessage(DiagnosticMessage d) { return $"{d.Message} (line: {d.StartLine}, file: {d.SourceFilePath})"; } private void SetupOutputDirectory(string projectDirectory, string outputDirectory) { if (!Directory.Exists(outputDirectory)) { Directory.CreateDirectory(outputDirectory); } if (projectDirectory != outputDirectory) { CopyProjectToOutputDirectory(projectDirectory, outputDirectory); } } private void CopyProjectToOutputDirectory(string projectDirectory, string outputDirectory) { var sourceFilePaths = Directory.EnumerateFiles(projectDirectory, "*", SearchOption.AllDirectories); foreach (var sourceFilePath in sourceFilePaths) { var relativeFilePath = PathUtility.GetRelativePath(projectDirectory, sourceFilePath); var destinationFilePath = Path.Combine(outputDirectory, relativeFilePath); var destinationDirectory = Path.GetDirectoryName(destinationFilePath); if (!Directory.Exists(destinationDirectory)) { Directory.CreateDirectory(destinationDirectory); } File.Copy(sourceFilePath, destinationFilePath); } } public bool IsMigrated(MigrationSettings migrationSettings, MigrationRuleInputs migrationRuleInputs) { var outputName = migrationRuleInputs.DefaultProjectContext.GetProjectName(); var outputProject = Path.Combine(migrationSettings.OutputDirectory, outputName + ".csproj"); return File.Exists(outputProject); } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Text; using System.Threading; using EnvDTE; using Microsoft.CodeAnalysis; using Microsoft.CodeAnalysis.Diagnostics; using Microsoft.CodeAnalysis.Editor.Shared.Utilities; using Microsoft.CodeAnalysis.Host; using Microsoft.CodeAnalysis.Host.Mef; using Microsoft.CodeAnalysis.Notification; using Microsoft.CodeAnalysis.Options; using Microsoft.CodeAnalysis.Shared.Utilities; using Microsoft.CodeAnalysis.SolutionCrawler; using Microsoft.CodeAnalysis.Storage; using Microsoft.CodeAnalysis.Text; using Microsoft.VisualStudio.ComponentModelHost; using Microsoft.VisualStudio.Editor; using Microsoft.VisualStudio.LanguageServices.Implementation.ProjectSystem.Extensions; using Microsoft.VisualStudio.LanguageServices.Utilities; using Microsoft.VisualStudio.Shell; using Microsoft.VisualStudio.Shell.Interop; using Microsoft.VisualStudio.Text; using Microsoft.VisualStudio.TextManager.Interop; using Roslyn.Utilities; using Roslyn.VisualStudio.ProjectSystem; using VSLangProj; using VSLangProj140; using OLEServiceProvider = Microsoft.VisualStudio.OLE.Interop.IServiceProvider; using OleInterop = Microsoft.VisualStudio.OLE.Interop; namespace Microsoft.VisualStudio.LanguageServices.Implementation.ProjectSystem { /// <summary> /// The Workspace for running inside Visual Studio. /// </summary> internal abstract partial class VisualStudioWorkspaceImpl : VisualStudioWorkspace { private static readonly IntPtr s_docDataExisting_Unknown = new IntPtr(-1); private const string AppCodeFolderName = "App_Code"; protected readonly IServiceProvider ServiceProvider; private readonly IVsUIShellOpenDocument _shellOpenDocument; private readonly IVsTextManager _textManager; // Not readonly because it needs to be set in the derived class' constructor. private VisualStudioProjectTracker _projectTracker; // document worker coordinator private ISolutionCrawlerRegistrationService _registrationService; private readonly ForegroundThreadAffinitizedObject _foregroundObject = new ForegroundThreadAffinitizedObject(); public VisualStudioWorkspaceImpl( SVsServiceProvider serviceProvider, WorkspaceBackgroundWork backgroundWork) : base( CreateHostServices(serviceProvider), backgroundWork) { this.ServiceProvider = serviceProvider; _textManager = serviceProvider.GetService(typeof(SVsTextManager)) as IVsTextManager; _shellOpenDocument = serviceProvider.GetService(typeof(SVsUIShellOpenDocument)) as IVsUIShellOpenDocument; // Ensure the options factory services are initialized on the UI thread this.Services.GetService<IOptionService>(); } internal static HostServices CreateHostServices(SVsServiceProvider serviceProvider) { var composition = (IComponentModel)serviceProvider.GetService(typeof(SComponentModel)); return MefV1HostServices.Create(composition.DefaultExportProvider); } protected void InitializeStandardVisualStudioWorkspace(SVsServiceProvider serviceProvider, SaveEventsService saveEventsService) { var projectTracker = new VisualStudioProjectTracker(serviceProvider, this.Services); // Ensure the document tracking service is initialized on the UI thread var documentTrackingService = (VisualStudioDocumentTrackingService)this.Services.GetService<IDocumentTrackingService>(); var documentProvider = new DocumentProvider(projectTracker, serviceProvider, documentTrackingService); var metadataReferenceProvider = this.Services.GetService<VisualStudioMetadataReferenceManager>(); var ruleSetFileProvider = this.Services.GetService<VisualStudioRuleSetManager>(); projectTracker.InitializeProviders(documentProvider, metadataReferenceProvider, ruleSetFileProvider); this.SetProjectTracker(projectTracker); var workspaceHost = new VisualStudioWorkspaceHost(this); projectTracker.RegisterWorkspaceHost(workspaceHost); projectTracker.StartSendingEventsToWorkspaceHost(workspaceHost); saveEventsService.StartSendingSaveEvents(); // Ensure the options factory services are initialized on the UI thread this.Services.GetService<IOptionService>(); } /// <summary>NOTE: Call only from derived class constructor</summary> protected void SetProjectTracker(VisualStudioProjectTracker projectTracker) { _projectTracker = projectTracker; } internal VisualStudioProjectTracker ProjectTracker { get { return _projectTracker; } } internal void ClearReferenceCache() { _projectTracker.MetadataReferenceProvider.ClearCache(); } internal IVisualStudioHostDocument GetHostDocument(DocumentId documentId) { var project = GetHostProject(documentId.ProjectId); if (project != null) { return project.GetDocumentOrAdditionalDocument(documentId); } return null; } internal IVisualStudioHostProject GetHostProject(ProjectId projectId) { return this.ProjectTracker.GetProject(projectId); } private bool TryGetHostProject(ProjectId projectId, out IVisualStudioHostProject project) { project = GetHostProject(projectId); return project != null; } internal override bool TryApplyChanges( Microsoft.CodeAnalysis.Solution newSolution, IProgressTracker progressTracker) { var projectChanges = newSolution.GetChanges(this.CurrentSolution).GetProjectChanges().ToList(); var projectsToLoad = new HashSet<Guid>(); foreach (var pc in projectChanges) { if (pc.GetAddedAdditionalDocuments().Any() || pc.GetAddedAnalyzerReferences().Any() || pc.GetAddedDocuments().Any() || pc.GetAddedMetadataReferences().Any() || pc.GetAddedProjectReferences().Any() || pc.GetRemovedAdditionalDocuments().Any() || pc.GetRemovedAnalyzerReferences().Any() || pc.GetRemovedDocuments().Any() || pc.GetRemovedMetadataReferences().Any() || pc.GetRemovedProjectReferences().Any()) { projectsToLoad.Add(GetHostProject(pc.ProjectId).Guid); } } if (projectsToLoad.Any()) { var vsSolution4 = GetVsService(typeof(SVsSolution)) as IVsSolution4; vsSolution4.EnsureProjectsAreLoaded( (uint)projectsToLoad.Count, projectsToLoad.ToArray(), (uint)__VSBSLFLAGS.VSBSLFLAGS_None); } // first make sure we can edit the document we will be updating (check them out from source control, etc) var changedDocs = projectChanges.SelectMany(pd => pd.GetChangedDocuments()).ToList(); if (changedDocs.Count > 0) { this.EnsureEditableDocuments(changedDocs); } return base.TryApplyChanges(newSolution, progressTracker); } public override bool CanOpenDocuments { get { return true; } } internal override bool CanChangeActiveContextDocument { get { return true; } } public override bool CanApplyChange(ApplyChangesKind feature) { switch (feature) { case ApplyChangesKind.AddDocument: case ApplyChangesKind.RemoveDocument: case ApplyChangesKind.ChangeDocument: case ApplyChangesKind.AddMetadataReference: case ApplyChangesKind.RemoveMetadataReference: case ApplyChangesKind.AddProjectReference: case ApplyChangesKind.RemoveProjectReference: case ApplyChangesKind.AddAnalyzerReference: case ApplyChangesKind.RemoveAnalyzerReference: case ApplyChangesKind.AddAdditionalDocument: case ApplyChangesKind.RemoveAdditionalDocument: case ApplyChangesKind.ChangeAdditionalDocument: return true; default: return false; } } private bool TryGetProjectData(ProjectId projectId, out IVisualStudioHostProject hostProject, out IVsHierarchy hierarchy, out EnvDTE.Project project) { hierarchy = null; project = null; return this.TryGetHostProject(projectId, out hostProject) && this.TryGetHierarchy(projectId, out hierarchy) && hierarchy.TryGetProject(out project); } internal void GetProjectData(ProjectId projectId, out IVisualStudioHostProject hostProject, out IVsHierarchy hierarchy, out EnvDTE.Project project) { if (!TryGetProjectData(projectId, out hostProject, out hierarchy, out project)) { throw new ArgumentException(string.Format(ServicesVSResources.Could_not_find_project_0, projectId)); } } internal EnvDTE.Project TryGetDTEProject(ProjectId projectId) { return TryGetProjectData(projectId, out var hostProject, out var hierarchy, out var project) ? project : null; } internal bool TryAddReferenceToProject(ProjectId projectId, string assemblyName) { EnvDTE.Project project; try { GetProjectData(projectId, out var hostProject, out var hierarchy, out project); } catch (ArgumentException) { return false; } var vsProject = (VSProject)project.Object; try { vsProject.References.Add(assemblyName); } catch (Exception) { return false; } return true; } private string GetAnalyzerPath(AnalyzerReference analyzerReference) { return analyzerReference.FullPath; } protected override void ApplyAnalyzerReferenceAdded(ProjectId projectId, AnalyzerReference analyzerReference) { if (projectId == null) { throw new ArgumentNullException(nameof(projectId)); } if (analyzerReference == null) { throw new ArgumentNullException(nameof(analyzerReference)); } GetProjectData(projectId, out var hostProject, out var hierarchy, out var project); string filePath = GetAnalyzerPath(analyzerReference); if (filePath != null) { VSProject3 vsProject = (VSProject3)project.Object; vsProject.AnalyzerReferences.Add(filePath); } } protected override void ApplyAnalyzerReferenceRemoved(ProjectId projectId, AnalyzerReference analyzerReference) { if (projectId == null) { throw new ArgumentNullException(nameof(projectId)); } if (analyzerReference == null) { throw new ArgumentNullException(nameof(analyzerReference)); } GetProjectData(projectId, out var hostProject, out var hierarchy, out var project); string filePath = GetAnalyzerPath(analyzerReference); if (filePath != null) { VSProject3 vsProject = (VSProject3)project.Object; vsProject.AnalyzerReferences.Remove(filePath); } } private string GetMetadataPath(MetadataReference metadataReference) { var fileMetadata = metadataReference as PortableExecutableReference; if (fileMetadata != null) { return fileMetadata.FilePath; } return null; } protected override void ApplyMetadataReferenceAdded( ProjectId projectId, MetadataReference metadataReference) { if (projectId == null) { throw new ArgumentNullException(nameof(projectId)); } if (metadataReference == null) { throw new ArgumentNullException(nameof(metadataReference)); } GetProjectData(projectId, out var hostProject, out var hierarchy, out var project); string filePath = GetMetadataPath(metadataReference); if (filePath != null) { VSProject vsProject = (VSProject)project.Object; vsProject.References.Add(filePath); var undoManager = TryGetUndoManager(); undoManager?.Add(new RemoveMetadataReferenceUndoUnit(this, projectId, filePath)); } } protected override void ApplyMetadataReferenceRemoved( ProjectId projectId, MetadataReference metadataReference) { if (projectId == null) { throw new ArgumentNullException(nameof(projectId)); } if (metadataReference == null) { throw new ArgumentNullException(nameof(metadataReference)); } GetProjectData(projectId, out var hostProject, out var hierarchy, out var project); string filePath = GetMetadataPath(metadataReference); if (filePath != null) { VSProject vsProject = (VSProject)project.Object; foreach (Reference reference in vsProject.References) { if (StringComparer.OrdinalIgnoreCase.Equals(reference.Path, filePath)) { reference.Remove(); var undoManager = TryGetUndoManager(); undoManager?.Add(new AddMetadataReferenceUndoUnit(this, projectId, filePath)); break; } } } } protected override void ApplyProjectReferenceAdded( ProjectId projectId, ProjectReference projectReference) { if (projectId == null) { throw new ArgumentNullException(nameof(projectId)); } if (projectReference == null) { throw new ArgumentNullException(nameof(projectReference)); } GetProjectData(projectId, out var hostProject, out var hierarchy, out var project); GetProjectData(projectReference.ProjectId, out var refHostProject, out var refHierarchy, out var refProject); var vsProject = (VSProject)project.Object; vsProject.References.AddProject(refProject); var undoManager = TryGetUndoManager(); undoManager?.Add(new RemoveProjectReferenceUndoUnit( this, projectId, projectReference.ProjectId)); } private OleInterop.IOleUndoManager TryGetUndoManager() { var documentTrackingService = this.Services.GetService<IDocumentTrackingService>(); if (documentTrackingService != null) { var documentId = documentTrackingService.GetActiveDocument() ?? documentTrackingService.GetVisibleDocuments().FirstOrDefault(); if (documentId != null) { var composition = (IComponentModel)ServiceProvider.GetService(typeof(SComponentModel)); var exportProvider = composition.DefaultExportProvider; var editorAdaptersService = exportProvider.GetExportedValue<IVsEditorAdaptersFactoryService>(); return editorAdaptersService.TryGetUndoManager(this, documentId, CancellationToken.None); } } return null; } protected override void ApplyProjectReferenceRemoved( ProjectId projectId, ProjectReference projectReference) { if (projectId == null) { throw new ArgumentNullException(nameof(projectId)); } if (projectReference == null) { throw new ArgumentNullException(nameof(projectReference)); } GetProjectData(projectId, out var hostProject, out var hierarchy, out var project); GetProjectData(projectReference.ProjectId, out var refHostProject, out var refHierarchy, out var refProject); var vsProject = (VSProject)project.Object; foreach (Reference reference in vsProject.References) { if (reference.SourceProject == refProject) { reference.Remove(); var undoManager = TryGetUndoManager(); undoManager?.Add(new AddProjectReferenceUndoUnit(this, projectId, projectReference.ProjectId)); } } } protected override void ApplyDocumentAdded(DocumentInfo info, SourceText text) { AddDocumentCore(info, text, isAdditionalDocument: false); } protected override void ApplyAdditionalDocumentAdded(DocumentInfo info, SourceText text) { AddDocumentCore(info, text, isAdditionalDocument: true); } private void AddDocumentCore(DocumentInfo info, SourceText initialText, bool isAdditionalDocument) { GetProjectData(info.Id.ProjectId, out var hostProject, out var hierarchy, out var project); // If the first namespace name matches the name of the project, then we don't want to // generate a folder for that. The project is implicitly a folder with that name. var folders = info.Folders.AsEnumerable(); if (folders.FirstOrDefault() == project.Name) { folders = folders.Skip(1); } folders = FilterFolderForProjectType(project, folders); if (IsWebsite(project)) { AddDocumentToFolder(hostProject, project, info.Id, SpecializedCollections.SingletonEnumerable(AppCodeFolderName), info.Name, info.SourceCodeKind, initialText, isAdditionalDocument: isAdditionalDocument); } else if (folders.Any()) { AddDocumentToFolder(hostProject, project, info.Id, folders, info.Name, info.SourceCodeKind, initialText, isAdditionalDocument: isAdditionalDocument); } else { AddDocumentToProject(hostProject, project, info.Id, info.Name, info.SourceCodeKind, initialText, isAdditionalDocument: isAdditionalDocument); } var undoManager = TryGetUndoManager(); if (isAdditionalDocument) { undoManager?.Add(new RemoveAdditionalDocumentUndoUnit(this, info.Id)); } else { undoManager?.Add(new RemoveDocumentUndoUnit(this, info.Id)); } } private bool IsWebsite(EnvDTE.Project project) { return project.Kind == VsWebSite.PrjKind.prjKindVenusProject; } private IEnumerable<string> FilterFolderForProjectType(EnvDTE.Project project, IEnumerable<string> folders) { foreach (var folder in folders) { var items = GetAllItems(project.ProjectItems); var folderItem = items.FirstOrDefault(p => StringComparer.OrdinalIgnoreCase.Compare(p.Name, folder) == 0); if (folderItem == null || folderItem.Kind != EnvDTE.Constants.vsProjectItemKindPhysicalFile) { yield return folder; } } } private IEnumerable<ProjectItem> GetAllItems(ProjectItems projectItems) { if (projectItems == null) { return SpecializedCollections.EmptyEnumerable<ProjectItem>(); } var items = projectItems.OfType<ProjectItem>(); return items.Concat(items.SelectMany(i => GetAllItems(i.ProjectItems))); } #if false protected override void AddExistingDocument(DocumentId documentId, string filePath, IEnumerable<string> folders) { IVsHierarchy hierarchy; EnvDTE.Project project; IVisualStudioHostProject hostProject; GetProjectData(documentId.ProjectId, out hostProject, out hierarchy, out project); // If the first namespace name matches the name of the project, then we don't want to // generate a folder for that. The project is implicitly a folder with that name. if (folders.FirstOrDefault() == project.Name) { folders = folders.Skip(1); } var name = Path.GetFileName(filePath); if (folders.Any()) { AddDocumentToFolder(hostProject, project, documentId, folders, name, SourceCodeKind.Regular, initialText: null, filePath: filePath); } else { AddDocumentToProject(hostProject, project, documentId, name, SourceCodeKind.Regular, initialText: null, filePath: filePath); } } #endif private ProjectItem AddDocumentToProject( IVisualStudioHostProject hostProject, EnvDTE.Project project, DocumentId documentId, string documentName, SourceCodeKind sourceCodeKind, SourceText initialText = null, string filePath = null, bool isAdditionalDocument = false) { if (!project.TryGetFullPath(out var folderPath)) { // TODO(cyrusn): Throw an appropriate exception here. throw new Exception(ServicesVSResources.Could_not_find_location_of_folder_on_disk); } return AddDocumentToProjectItems(hostProject, project.ProjectItems, documentId, folderPath, documentName, sourceCodeKind, initialText, filePath, isAdditionalDocument); } private ProjectItem AddDocumentToFolder( IVisualStudioHostProject hostProject, EnvDTE.Project project, DocumentId documentId, IEnumerable<string> folders, string documentName, SourceCodeKind sourceCodeKind, SourceText initialText = null, string filePath = null, bool isAdditionalDocument = false) { var folder = project.FindOrCreateFolder(folders); if (!folder.TryGetFullPath(out var folderPath)) { // TODO(cyrusn): Throw an appropriate exception here. throw new Exception(ServicesVSResources.Could_not_find_location_of_folder_on_disk); } return AddDocumentToProjectItems(hostProject, folder.ProjectItems, documentId, folderPath, documentName, sourceCodeKind, initialText, filePath, isAdditionalDocument); } private ProjectItem AddDocumentToProjectItems( IVisualStudioHostProject hostProject, ProjectItems projectItems, DocumentId documentId, string folderPath, string documentName, SourceCodeKind sourceCodeKind, SourceText initialText, string filePath, bool isAdditionalDocument) { if (filePath == null) { var baseName = Path.GetFileNameWithoutExtension(documentName); var extension = isAdditionalDocument ? Path.GetExtension(documentName) : GetPreferredExtension(hostProject, sourceCodeKind); var uniqueName = projectItems.GetUniqueName(baseName, extension); filePath = Path.Combine(folderPath, uniqueName); } if (initialText != null) { using (var writer = new StreamWriter(filePath, append: false, encoding: initialText.Encoding ?? Encoding.UTF8)) { initialText.Write(writer); } } using (var documentIdHint = _projectTracker.DocumentProvider.ProvideDocumentIdHint(filePath, documentId)) { return projectItems.AddFromFile(filePath); } } protected void RemoveDocumentCore( DocumentId documentId, bool isAdditionalDocument) { if (documentId == null) { throw new ArgumentNullException(nameof(documentId)); } var hostDocument = this.GetHostDocument(documentId); if (hostDocument != null) { var document = this.CurrentSolution.GetDocument(documentId); var text = this.GetTextForced(document); var project = hostDocument.Project.Hierarchy as IVsProject3; var itemId = hostDocument.GetItemId(); if (itemId == (uint)VSConstants.VSITEMID.Nil) { // it is no longer part of the solution return; } project.RemoveItem(0, itemId, out var result); var undoManager = TryGetUndoManager(); var docInfo = CreateDocumentInfoWithoutText(document); if (isAdditionalDocument) { undoManager?.Add(new AddAdditionalDocumentUndoUnit(this, docInfo, text)); } else { undoManager?.Add(new AddDocumentUndoUnit(this, docInfo, text)); } } } protected override void ApplyDocumentRemoved(DocumentId documentId) { RemoveDocumentCore(documentId, isAdditionalDocument: false); } protected override void ApplyAdditionalDocumentRemoved(DocumentId documentId) { RemoveDocumentCore(documentId, isAdditionalDocument: true); } public override void OpenDocument(DocumentId documentId, bool activate = true) { OpenDocumentCore(documentId, activate); } public override void OpenAdditionalDocument(DocumentId documentId, bool activate = true) { OpenDocumentCore(documentId, activate); } public override void CloseDocument(DocumentId documentId) { CloseDocumentCore(documentId); } public override void CloseAdditionalDocument(DocumentId documentId) { CloseDocumentCore(documentId); } public bool TryGetInfoBarData(out IVsWindowFrame frame, out IVsInfoBarUIFactory factory) { frame = null; factory = null; var monitorSelectionService = ServiceProvider.GetService(typeof(SVsShellMonitorSelection)) as IVsMonitorSelection; // We want to get whichever window is currently in focus (including toolbars) as we could have had an exception thrown from the error list or interactive window if (monitorSelectionService != null && ErrorHandler.Succeeded(monitorSelectionService.GetCurrentElementValue((uint)VSConstants.VSSELELEMID.SEID_WindowFrame, out var value))) { frame = value as IVsWindowFrame; } else { return false; } factory = ServiceProvider.GetService(typeof(SVsInfoBarUIFactory)) as IVsInfoBarUIFactory; return frame != null && factory != null; } public void OpenDocumentCore(DocumentId documentId, bool activate = true) { if (documentId == null) { throw new ArgumentNullException(nameof(documentId)); } if (!_foregroundObject.IsForeground()) { throw new InvalidOperationException(ServicesVSResources.This_workspace_only_supports_opening_documents_on_the_UI_thread); } var document = this.GetHostDocument(documentId); if (document != null && document.Project != null) { if (TryGetFrame(document, out var frame)) { if (activate) { frame.Show(); } else { frame.ShowNoActivate(); } } } } private bool TryGetFrame(IVisualStudioHostDocument document, out IVsWindowFrame frame) { frame = null; var itemId = document.GetItemId(); if (itemId == (uint)VSConstants.VSITEMID.Nil) { // If the ItemId is Nil, then IVsProject would not be able to open the // document using its ItemId. Thus, we must use OpenDocumentViaProject, which only // depends on the file path. return ErrorHandler.Succeeded(_shellOpenDocument.OpenDocumentViaProject( document.FilePath, VSConstants.LOGVIEWID.TextView_guid, out var oleServiceProvider, out var uiHierarchy, out var itemid, out frame)); } else { // If the ItemId is not Nil, then we should not call IVsUIShellDocument // .OpenDocumentViaProject here because that simply takes a file path and opens the // file within the context of the first project it finds. That would cause problems // if the document we're trying to open is actually a linked file in another // project. So, we get the project's hierarchy and open the document using its item // ID. // It's conceivable that IVsHierarchy might not implement IVsProject. However, // OpenDocumentViaProject itself relies upon this QI working, so it should be OK to // use here. var vsProject = document.Project.Hierarchy as IVsProject; return vsProject != null && ErrorHandler.Succeeded(vsProject.OpenItem(itemId, VSConstants.LOGVIEWID.TextView_guid, s_docDataExisting_Unknown, out frame)); } } public void CloseDocumentCore(DocumentId documentId) { if (documentId == null) { throw new ArgumentNullException(nameof(documentId)); } if (this.IsDocumentOpen(documentId)) { var document = this.GetHostDocument(documentId); if (document != null) { if (ErrorHandler.Succeeded(_shellOpenDocument.IsDocumentOpen(null, 0, document.FilePath, Guid.Empty, 0, out var uiHierarchy, null, out var frame, out var isOpen))) { // TODO: do we need save argument for CloseDocument? frame.CloseFrame((uint)__FRAMECLOSE.FRAMECLOSE_NoSave); } } } } protected override void ApplyDocumentTextChanged(DocumentId documentId, SourceText newText) { EnsureEditableDocuments(documentId); var hostDocument = GetHostDocument(documentId); hostDocument.UpdateText(newText); } protected override void ApplyAdditionalDocumentTextChanged(DocumentId documentId, SourceText newText) { EnsureEditableDocuments(documentId); var hostDocument = GetHostDocument(documentId); hostDocument.UpdateText(newText); } private static string GetPreferredExtension(IVisualStudioHostProject hostProject, SourceCodeKind sourceCodeKind) { // No extension was provided. Pick a good one based on the type of host project. switch (hostProject.Language) { case LanguageNames.CSharp: // TODO: uncomment when fixing https://github.com/dotnet/roslyn/issues/5325 //return sourceCodeKind == SourceCodeKind.Regular ? ".cs" : ".csx"; return ".cs"; case LanguageNames.VisualBasic: // TODO: uncomment when fixing https://github.com/dotnet/roslyn/issues/5325 //return sourceCodeKind == SourceCodeKind.Regular ? ".vb" : ".vbx"; return ".vb"; default: throw new InvalidOperationException(); } } public override IVsHierarchy GetHierarchy(ProjectId projectId) { var project = this.GetHostProject(projectId); if (project == null) { return null; } return project.Hierarchy; } internal override void SetDocumentContext(DocumentId documentId) { var hostDocument = GetHostDocument(documentId); if (hostDocument == null) { // the document or project is not being tracked return; } var itemId = hostDocument.GetItemId(); if (itemId == (uint)VSConstants.VSITEMID.Nil) { // the document has been removed from the solution return; } var hierarchy = hostDocument.Project.Hierarchy; var sharedHierarchy = LinkedFileUtilities.GetSharedHierarchyForItem(hierarchy, itemId); if (sharedHierarchy != null) { if (sharedHierarchy.SetProperty( (uint)VSConstants.VSITEMID.Root, (int)__VSHPROPID8.VSHPROPID_ActiveIntellisenseProjectContext, ProjectTracker.GetProject(documentId.ProjectId).ProjectSystemName) == VSConstants.S_OK) { // The ASP.NET 5 intellisense project is now updated. return; } else { // Universal Project shared files // Change the SharedItemContextHierarchy of the project's parent hierarchy, then // hierarchy events will trigger the workspace to update. var hr = sharedHierarchy.SetProperty((uint)VSConstants.VSITEMID.Root, (int)__VSHPROPID7.VSHPROPID_SharedItemContextHierarchy, hierarchy); } } else { // Regular linked files // Transfer the item (open buffer) to the new hierarchy, and then hierarchy events // will trigger the workspace to update. var vsproj = hierarchy as IVsProject3; var hr = vsproj.TransferItem(hostDocument.FilePath, hostDocument.FilePath, punkWindowFrame: null); } } internal void UpdateDocumentContextIfContainsDocument(IVsHierarchy sharedHierarchy, DocumentId documentId) { // TODO: This is a very roundabout way to update the context // The sharedHierarchy passed in has a new context, but we don't know what it is. // The documentId passed in is associated with this sharedHierarchy, and this method // will be called once for each such documentId. During this process, one of these // documentIds will actually belong to the new SharedItemContextHierarchy. Once we // find that one, we can map back to the open buffer and set its active context to // the appropriate project. // Note that if there is a single head project and it's in the process of being unloaded // there might not be a host project. var hostProject = LinkedFileUtilities.GetContextHostProject(sharedHierarchy, ProjectTracker); if (hostProject?.Hierarchy == sharedHierarchy) { return; } if (hostProject.Id != documentId.ProjectId) { // While this documentId is associated with one of the head projects for this // sharedHierarchy, it is not associated with the new context hierarchy. Another // documentId will be passed to this method and update the context. return; } // This documentId belongs to the new SharedItemContextHierarchy. Update the associated // buffer. OnDocumentContextUpdated(documentId); } /// <summary> /// Finds the <see cref="DocumentId"/> related to the given <see cref="DocumentId"/> that /// is in the current context. For regular files (non-shared and non-linked) and closed /// linked files, this is always the provided <see cref="DocumentId"/>. For open linked /// files and open shared files, the active context is already tracked by the /// <see cref="Workspace"/> and can be looked up directly. For closed shared files, the /// document in the shared project's <see cref="__VSHPROPID7.VSHPROPID_SharedItemContextHierarchy"/> /// is preferred. /// </summary> internal override DocumentId GetDocumentIdInCurrentContext(DocumentId documentId) { // If the document is open, then the Workspace knows the current context for both // linked and shared files if (IsDocumentOpen(documentId)) { return base.GetDocumentIdInCurrentContext(documentId); } var hostDocument = GetHostDocument(documentId); var itemId = hostDocument.GetItemId(); if (itemId == (uint)VSConstants.VSITEMID.Nil) { // An itemid is required to determine whether the file belongs to a Shared Project return base.GetDocumentIdInCurrentContext(documentId); } // If this is a regular document or a closed linked (non-shared) document, then use the // default logic for determining current context. var sharedHierarchy = LinkedFileUtilities.GetSharedHierarchyForItem(hostDocument.Project.Hierarchy, itemId); if (sharedHierarchy == null) { return base.GetDocumentIdInCurrentContext(documentId); } // This is a closed shared document, so we must determine the correct context. var hostProject = LinkedFileUtilities.GetContextHostProject(sharedHierarchy, ProjectTracker); var matchingProject = CurrentSolution.GetProject(hostProject.Id); if (matchingProject == null || hostProject.Hierarchy == sharedHierarchy) { return base.GetDocumentIdInCurrentContext(documentId); } if (matchingProject.ContainsDocument(documentId)) { // The provided documentId is in the current context project return documentId; } // The current context document is from another project. var linkedDocumentIds = CurrentSolution.GetDocument(documentId).GetLinkedDocumentIds(); var matchingDocumentId = linkedDocumentIds.FirstOrDefault(id => id.ProjectId == matchingProject.Id); return matchingDocumentId ?? base.GetDocumentIdInCurrentContext(documentId); } internal bool TryGetHierarchy(ProjectId projectId, out IVsHierarchy hierarchy) { hierarchy = this.GetHierarchy(projectId); return hierarchy != null; } public override string GetFilePath(DocumentId documentId) { var document = this.GetHostDocument(documentId); if (document == null) { return null; } else { return document.FilePath; } } internal void StartSolutionCrawler() { if (_registrationService == null) { lock (this) { if (_registrationService == null) { _registrationService = this.Services.GetService<ISolutionCrawlerRegistrationService>(); _registrationService.Register(this); } } } } internal void StopSolutionCrawler() { if (_registrationService != null) { lock (this) { if (_registrationService != null) { _registrationService.Unregister(this, blockingShutdown: true); _registrationService = null; } } } } protected override void Dispose(bool finalize) { // workspace is going away. unregister this workspace from work coordinator StopSolutionCrawler(); base.Dispose(finalize); } public void EnsureEditableDocuments(IEnumerable<DocumentId> documents) { var queryEdit = (IVsQueryEditQuerySave2)ServiceProvider.GetService(typeof(SVsQueryEditQuerySave)); var fileNames = documents.Select(GetFilePath).ToArray(); // TODO: meditate about the flags we can pass to this and decide what is most appropriate for Roslyn int result = queryEdit.QueryEditFiles( rgfQueryEdit: 0, cFiles: fileNames.Length, rgpszMkDocuments: fileNames, rgrgf: new uint[fileNames.Length], rgFileInfo: new VSQEQS_FILE_ATTRIBUTE_DATA[fileNames.Length], pfEditVerdict: out var editVerdict, prgfMoreInfo: out var editResultFlags); if (ErrorHandler.Failed(result) || editVerdict != (uint)tagVSQueryEditResult.QER_EditOK) { throw new Exception("Unable to check out the files from source control."); } if ((editResultFlags & (uint)(tagVSQueryEditResultFlags2.QER_Changed | tagVSQueryEditResultFlags2.QER_Reloaded)) != 0) { throw new Exception("A file was reloaded during the source control checkout."); } } public void EnsureEditableDocuments(params DocumentId[] documents) { this.EnsureEditableDocuments((IEnumerable<DocumentId>)documents); } internal void OnDocumentTextUpdatedOnDisk(DocumentId documentId) { var vsDoc = this.GetHostDocument(documentId); this.OnDocumentTextLoaderChanged(documentId, vsDoc.Loader); } internal void OnAdditionalDocumentTextUpdatedOnDisk(DocumentId documentId) { var vsDoc = this.GetHostDocument(documentId); this.OnAdditionalDocumentTextLoaderChanged(documentId, vsDoc.Loader); } public TInterface GetVsService<TService, TInterface>() where TService : class where TInterface : class { return this.ServiceProvider.GetService(typeof(TService)) as TInterface; } public object GetVsService(Type serviceType) { return ServiceProvider.GetService(serviceType); } public DTE GetVsDte() { return GetVsService<SDTE, DTE>(); } internal override bool CanAddProjectReference(ProjectId referencingProject, ProjectId referencedProject) { _foregroundObject.AssertIsForeground(); if (!TryGetHierarchy(referencingProject, out var referencingHierarchy) || !TryGetHierarchy(referencedProject, out var referencedHierarchy)) { // Couldn't even get a hierarchy for this project. So we have to assume // that adding a reference is disallowed. return false; } // First we have to see if either project disallows the reference being added. const int ContextFlags = (int)__VSQUERYFLAVORREFERENCESCONTEXT.VSQUERYFLAVORREFERENCESCONTEXT_RefreshReference; uint canAddProjectReference = (uint)__VSREFERENCEQUERYRESULT.REFERENCE_UNKNOWN; uint canBeReferenced = (uint)__VSREFERENCEQUERYRESULT.REFERENCE_UNKNOWN; var referencingProjectFlavor3 = referencingHierarchy as IVsProjectFlavorReferences3; if (referencingProjectFlavor3 != null) { if (ErrorHandler.Failed(referencingProjectFlavor3.QueryAddProjectReferenceEx(referencedHierarchy, ContextFlags, out canAddProjectReference, out var unused))) { // Something went wrong even trying to see if the reference would be allowed. // Assume it won't be allowed. return false; } if (canAddProjectReference == (uint)__VSREFERENCEQUERYRESULT.REFERENCE_DENY) { // Adding this project reference is not allowed. return false; } } var referencedProjectFlavor3 = referencedHierarchy as IVsProjectFlavorReferences3; if (referencedProjectFlavor3 != null) { if (ErrorHandler.Failed(referencedProjectFlavor3.QueryCanBeReferencedEx(referencingHierarchy, ContextFlags, out canBeReferenced, out var unused))) { // Something went wrong even trying to see if the reference would be allowed. // Assume it won't be allowed. return false; } if (canBeReferenced == (uint)__VSREFERENCEQUERYRESULT.REFERENCE_DENY) { // Adding this project reference is not allowed. return false; } } // Neither project denied the reference being added. At this point, if either project // allows the reference to be added, and the other doesn't block it, then we can add // the reference. if (canAddProjectReference == (int)__VSREFERENCEQUERYRESULT.REFERENCE_ALLOW || canBeReferenced == (int)__VSREFERENCEQUERYRESULT.REFERENCE_ALLOW) { return true; } // In both directions things are still unknown. Fallback to the reference manager // to make the determination here. var referenceManager = GetVsService<SVsReferenceManager, IVsReferenceManager>(); if (referenceManager == null) { // Couldn't get the reference manager. Have to assume it's not allowed. return false; } // As long as the reference manager does not deny things, then we allow the // reference to be added. var result = referenceManager.QueryCanReferenceProject(referencingHierarchy, referencedHierarchy); return result != (uint)__VSREFERENCEQUERYRESULT.REFERENCE_DENY; } /// <summary> /// A trivial implementation of <see cref="IVisualStudioWorkspaceHost" /> that just /// forwards the calls down to the underlying Workspace. /// </summary> protected sealed class VisualStudioWorkspaceHost : IVisualStudioWorkspaceHost, IVisualStudioWorkspaceHost2, IVisualStudioWorkingFolder { private readonly VisualStudioWorkspaceImpl _workspace; private readonly Dictionary<DocumentId, uint> _documentIdToHierarchyEventsCookieMap = new Dictionary<DocumentId, uint>(); public VisualStudioWorkspaceHost(VisualStudioWorkspaceImpl workspace) { _workspace = workspace; } void IVisualStudioWorkspaceHost.OnOptionsChanged(ProjectId projectId, CompilationOptions compilationOptions, ParseOptions parseOptions) { _workspace.OnCompilationOptionsChanged(projectId, compilationOptions); _workspace.OnParseOptionsChanged(projectId, parseOptions); } void IVisualStudioWorkspaceHost.OnDocumentAdded(DocumentInfo documentInfo) { _workspace.OnDocumentAdded(documentInfo); } void IVisualStudioWorkspaceHost.OnDocumentClosed(DocumentId documentId, ITextBuffer textBuffer, TextLoader loader, bool updateActiveContext) { // TODO: Move this out to DocumentProvider. As is, this depends on being able to // access the host document which will already be deleted in some cases, causing // a crash. Until this is fixed, we will leak a HierarchyEventsSink every time a // Mercury shared document is closed. // UnsubscribeFromSharedHierarchyEvents(documentId); using (_workspace.Services.GetService<IGlobalOperationNotificationService>().Start("Document Closed")) { _workspace.OnDocumentClosed(documentId, loader, updateActiveContext); } } void IVisualStudioWorkspaceHost.OnDocumentOpened(DocumentId documentId, ITextBuffer textBuffer, bool currentContext) { SubscribeToSharedHierarchyEvents(documentId); _workspace.OnDocumentOpened(documentId, textBuffer.AsTextContainer(), currentContext); } private void SubscribeToSharedHierarchyEvents(DocumentId documentId) { // Todo: maybe avoid double alerts. var hostDocument = _workspace.GetHostDocument(documentId); if (hostDocument == null) { return; } var hierarchy = hostDocument.Project.Hierarchy; var itemId = hostDocument.GetItemId(); if (itemId == (uint)VSConstants.VSITEMID.Nil) { // the document has been removed from the solution return; } var sharedHierarchy = LinkedFileUtilities.GetSharedHierarchyForItem(hierarchy, itemId); if (sharedHierarchy != null) { var eventSink = new HierarchyEventsSink(_workspace, sharedHierarchy, documentId); var hr = sharedHierarchy.AdviseHierarchyEvents(eventSink, out var cookie); if (hr == VSConstants.S_OK && !_documentIdToHierarchyEventsCookieMap.ContainsKey(documentId)) { _documentIdToHierarchyEventsCookieMap.Add(documentId, cookie); } } } private void UnsubscribeFromSharedHierarchyEvents(DocumentId documentId) { var hostDocument = _workspace.GetHostDocument(documentId); var itemId = hostDocument.GetItemId(); if (itemId == (uint)VSConstants.VSITEMID.Nil) { // the document has been removed from the solution return; } var sharedHierarchy = LinkedFileUtilities.GetSharedHierarchyForItem(hostDocument.Project.Hierarchy, itemId); if (sharedHierarchy != null) { if (_documentIdToHierarchyEventsCookieMap.TryGetValue(documentId, out var cookie)) { var hr = sharedHierarchy.UnadviseHierarchyEvents(cookie); _documentIdToHierarchyEventsCookieMap.Remove(documentId); } } } private void RegisterPrimarySolutionForPersistentStorage( SolutionId solutionId) { var service = _workspace.Services.GetService<IPersistentStorageService>() as PersistentStorageService; if (service == null) { return; } service.RegisterPrimarySolution(solutionId); } private void UnregisterPrimarySolutionForPersistentStorage( SolutionId solutionId, bool synchronousShutdown) { var service = _workspace.Services.GetService<IPersistentStorageService>() as PersistentStorageService; if (service == null) { return; } service.UnregisterPrimarySolution(solutionId, synchronousShutdown); } void IVisualStudioWorkspaceHost.OnDocumentRemoved(DocumentId documentId) { _workspace.OnDocumentRemoved(documentId); } void IVisualStudioWorkspaceHost.OnMetadataReferenceAdded(ProjectId projectId, PortableExecutableReference metadataReference) { _workspace.OnMetadataReferenceAdded(projectId, metadataReference); } void IVisualStudioWorkspaceHost.OnMetadataReferenceRemoved(ProjectId projectId, PortableExecutableReference metadataReference) { _workspace.OnMetadataReferenceRemoved(projectId, metadataReference); } void IVisualStudioWorkspaceHost.OnProjectAdded(ProjectInfo projectInfo) { using (_workspace.Services.GetService<IGlobalOperationNotificationService>()?.Start("Add Project")) { _workspace.OnProjectAdded(projectInfo); } } void IVisualStudioWorkspaceHost.OnProjectReferenceAdded(ProjectId projectId, ProjectReference projectReference) { _workspace.OnProjectReferenceAdded(projectId, projectReference); } void IVisualStudioWorkspaceHost.OnProjectReferenceRemoved(ProjectId projectId, ProjectReference projectReference) { _workspace.OnProjectReferenceRemoved(projectId, projectReference); } void IVisualStudioWorkspaceHost.OnProjectRemoved(ProjectId projectId) { using (_workspace.Services.GetService<IGlobalOperationNotificationService>()?.Start("Remove Project")) { _workspace.OnProjectRemoved(projectId); } } void IVisualStudioWorkspaceHost.OnSolutionAdded(SolutionInfo solutionInfo) { RegisterPrimarySolutionForPersistentStorage(solutionInfo.Id); _workspace.OnSolutionAdded(solutionInfo); } void IVisualStudioWorkspaceHost.OnSolutionRemoved() { var solutionId = _workspace.CurrentSolution.Id; _workspace.OnSolutionRemoved(); _workspace.ClearReferenceCache(); UnregisterPrimarySolutionForPersistentStorage(solutionId, synchronousShutdown: false); } void IVisualStudioWorkspaceHost.ClearSolution() { _workspace.ClearSolution(); _workspace.ClearReferenceCache(); } void IVisualStudioWorkspaceHost.OnDocumentTextUpdatedOnDisk(DocumentId id) { _workspace.OnDocumentTextUpdatedOnDisk(id); } void IVisualStudioWorkspaceHost.OnAssemblyNameChanged(ProjectId id, string assemblyName) { _workspace.OnAssemblyNameChanged(id, assemblyName); } void IVisualStudioWorkspaceHost.OnOutputFilePathChanged(ProjectId id, string outputFilePath) { _workspace.OnOutputFilePathChanged(id, outputFilePath); } void IVisualStudioWorkspaceHost.OnProjectNameChanged(ProjectId projectId, string name, string filePath) { _workspace.OnProjectNameChanged(projectId, name, filePath); } void IVisualStudioWorkspaceHost.OnAnalyzerReferenceAdded(ProjectId projectId, AnalyzerReference analyzerReference) { _workspace.OnAnalyzerReferenceAdded(projectId, analyzerReference); } void IVisualStudioWorkspaceHost.OnAnalyzerReferenceRemoved(ProjectId projectId, AnalyzerReference analyzerReference) { _workspace.OnAnalyzerReferenceRemoved(projectId, analyzerReference); } void IVisualStudioWorkspaceHost.OnAdditionalDocumentAdded(DocumentInfo documentInfo) { _workspace.OnAdditionalDocumentAdded(documentInfo); } void IVisualStudioWorkspaceHost.OnAdditionalDocumentRemoved(DocumentId documentInfo) { _workspace.OnAdditionalDocumentRemoved(documentInfo); } void IVisualStudioWorkspaceHost.OnAdditionalDocumentOpened(DocumentId documentId, ITextBuffer textBuffer, bool isCurrentContext) { _workspace.OnAdditionalDocumentOpened(documentId, textBuffer.AsTextContainer(), isCurrentContext); } void IVisualStudioWorkspaceHost.OnAdditionalDocumentClosed(DocumentId documentId, ITextBuffer textBuffer, TextLoader loader) { _workspace.OnAdditionalDocumentClosed(documentId, loader); } void IVisualStudioWorkspaceHost.OnAdditionalDocumentTextUpdatedOnDisk(DocumentId id) { _workspace.OnAdditionalDocumentTextUpdatedOnDisk(id); } void IVisualStudioWorkspaceHost2.OnHasAllInformation(ProjectId projectId, bool hasAllInformation) { _workspace.OnHasAllInformationChanged(projectId, hasAllInformation); } void IVisualStudioWorkingFolder.OnBeforeWorkingFolderChange() { UnregisterPrimarySolutionForPersistentStorage(_workspace.CurrentSolution.Id, synchronousShutdown: true); } void IVisualStudioWorkingFolder.OnAfterWorkingFolderChange() { var solutionId = _workspace.CurrentSolution.Id; _workspace.ProjectTracker.UpdateSolutionProperties(solutionId); RegisterPrimarySolutionForPersistentStorage(solutionId); } } } }
using Orleans.Runtime.Configuration; using TestExtensions; namespace UnitTests.Serialization { using System; using System.Collections.Generic; using System.Reflection; using Orleans.CodeGeneration; using Orleans.Runtime; using Orleans.Serialization; using Xunit; public class SerializationOrderTests { private readonly SerializationTestEnvironment environment; public SerializationOrderTests() { FakeTypeToSerialize.Reset(); FakeSerializer1.Reset(); FakeSerializer2.Reset(); var config = new ClientConfiguration() { SerializationProviders = { typeof(FakeSerializer1).GetTypeInfo(), typeof(FakeSerializer2).GetTypeInfo() } }; this.environment = SerializationTestEnvironment.InitializeWithDefaults(config); } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Serialization")] public void SerializationOrder_VerifyThatExternalIsHigherPriorityThanAttributeDefined() { FakeSerializer1.SupportedTypes = FakeSerializer2.SupportedTypes = new[] { typeof(FakeTypeToSerialize) }; var serializationItem = new FakeTypeToSerialize { SomeValue = 1 }; this.environment.SerializationManager.RoundTripSerializationForTesting(serializationItem); Assert.True( FakeSerializer1.SerializeCalled, "IExternalSerializer.Serialize should have been called on FakeSerializer1"); Assert.True( FakeSerializer1.DeserializeCalled, "IExternalSerializer.Deserialize should have been called on FakeSerializer1"); Assert.False( FakeTypeToSerialize.SerializeWasCalled, "Serialize on the type should NOT have been called"); Assert.False( FakeTypeToSerialize.DeserializeWasCalled, "Deserialize on the type should NOT have been called"); } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Serialization")] public void SerializationOrder_VerifyThatAttributeDefinedCalledIfNoExternalSerializersSupportType() { var serializationItem = new FakeTypeToSerialize { SomeValue = 1 }; FakeSerializer1.SupportedTypes = FakeSerializer2.SupportedTypes = null; this.environment.SerializationManager.RoundTripSerializationForTesting(serializationItem); Assert.True(FakeTypeToSerialize.SerializeWasCalled, "FakeTypeToSerialize.Serialize should have been called"); Assert.True(FakeTypeToSerialize.DeserializeWasCalled, "FakeTypeToSerialize.Deserialize should have been called"); } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Serialization")] public void SerializationOrder_VerifyExternalSerializersInvokedInOrder() { FakeSerializer1.SupportedTypes = FakeSerializer2.SupportedTypes = new[] { typeof(FakeTypeToSerialize) }; var serializationItem = new FakeTypeToSerialize { SomeValue = 1 }; this.environment.SerializationManager.RoundTripSerializationForTesting(serializationItem); Assert.True(FakeSerializer1.SerializeCalled, "IExternalSerializer.Serialize should have been called on FakeSerializer1"); Assert.True(FakeSerializer1.DeserializeCalled, "IExternalSerializer.Deserialize should have been called on FakeSerializer1"); Assert.False(FakeSerializer2.SerializeCalled, "IExternalSerializer.Serialize should NOT have been called on FakeSerializer2"); Assert.False(FakeSerializer2.DeserializeCalled, "IExternalSerializer.Deserialize should NOT have been called on FakeSerializer2"); Assert.False(FakeTypeToSerialize.SerializeWasCalled, "Serialize on the type should NOT have been called"); Assert.False(FakeTypeToSerialize.DeserializeWasCalled, "Deserialize on the type should NOT have been called"); } private class FakeSerializer1 : IExternalSerializer { public static bool IsSupportedTypeCalled { get; private set; } public static bool DeepCopyCalled { get; private set; } public static bool SerializeCalled { get; private set; } public static bool DeserializeCalled { get; private set; } public static IList<Type> SupportedTypes { get; set; } public static void Reset() { IsSupportedTypeCalled = DeepCopyCalled = SerializeCalled = DeserializeCalled = false; } public void Initialize(Logger logger) { } public bool IsSupportedType(Type itemType) { IsSupportedTypeCalled = true; return SupportedTypes == null ? false : SupportedTypes.Contains(itemType); } public object DeepCopy(object source, ICopyContext context) { DeepCopyCalled = true; return source; } public void Serialize(object item, ISerializationContext context, Type expectedType) { SerializeCalled = true; } public object Deserialize(Type expectedType, IDeserializationContext context) { DeserializeCalled = true; return null; } } private class FakeSerializer2: IExternalSerializer { public static bool IsSupportedTypeCalled { get; private set; } public static bool DeepCopyCalled { get; private set; } public static bool SerializeCalled { get; private set; } public static bool DeserializeCalled { get; private set; } public static IList<Type> SupportedTypes { get; set; } public static void Reset() { IsSupportedTypeCalled = DeepCopyCalled = SerializeCalled = DeserializeCalled = false; } public void Initialize(Logger logger) { } public bool IsSupportedType(Type itemType) { IsSupportedTypeCalled = true; return SupportedTypes == null ? false : SupportedTypes.Contains(itemType); } public object DeepCopy(object source, ICopyContext context) { DeepCopyCalled = true; return source; } public void Serialize(object item, ISerializationContext context, Type expectedType) { SerializeCalled = true; } public object Deserialize(Type expectedType, IDeserializationContext context) { DeserializeCalled = true; return null; } } private class FakeTypeToSerialize { public int SomeValue { get; set; } public static bool CopyWasCalled { get; private set; } public static bool SerializeWasCalled { get; private set; } public static bool DeserializeWasCalled { get; private set; } public static void Reset() { CopyWasCalled = SerializeWasCalled = DeserializeWasCalled = false; } [CopierMethod] private static object Copy(object input, ICopyContext context) { CopyWasCalled = true; return input; } [SerializerMethod] private static void Serialize(object input, ISerializationContext context, Type expected) { SerializeWasCalled = true; } [DeserializerMethod] private static object Deserialize(Type expected, IDeserializationContext context) { DeserializeWasCalled = true; return null; } } } }
using System; using System.Collections; using Google.Maps.Examples.Shared; using UnityEngine; using UnityEngine.UI; namespace Google.Maps.Examples { /// <summary> /// Class for connecting a <see cref="Slider"/> to an <see cref="Action"/>, animating the /// <see cref="Slider"/> moving up and down until player input is detected. /// </summary> public sealed class SliderController : MonoBehaviour { [Tooltip("Actual UI Slider.")] public Slider Slider; [Tooltip( "Time in seconds to animate slider moving up and down. Animation continues until input " + "is received from the player.")] public float AnimationTime = 6f; [Tooltip( "Apply smoothing to ends of slider animation (true) or leave ends as linear/sharp " + "(false)?")] public bool Smooth = true; [Tooltip("Move up, then down, then up (true), or up, then reset and up again (false)")] public bool BackAndForth = true; /// <summary>Is this <see cref="Slider"/> current animating?</summary> internal bool IsAnimating { get; private set; } /// <summary> /// Action called when this <see cref="Slider"/>'s value is changed, either as part of animation /// or in response to player input. /// </summary> internal Action<float> OnChange; /// <summary>Current animation value.</summary> private float AnimationCycle; /// <summary> /// Has 'no action defined' error message been debugged yet? This flag is used to prevent /// printing an error every single frame <see cref="Slider"/>'s value is changed (printing an /// error on the first frame only). /// </summary> private bool NoOnSliderErrorShown; /// <summary>Optional value when animation should stop (null if no stop time set).</summary> private float? StopValue; /// <summary> /// Flag used to make sure <see cref="Slider"/> is currently approaching <see /// cref="StopValue"/>. /// </summary> /// <remarks> /// This is used to make sure the <see cref="Slider"/> does not instantly stop if receives a /// <see cref="StopValue"/> that is less than the current value. Instead, the <see /// cref="Slider"/> must be less than the <see cref="StopValue"/> for at least one frame before /// can be considered to have passed the <see cref="StopValue"/>, and trigger a stop. /// </remarks> private bool CanStop; /// <summary>Is a starting value currently being set for <see cref="Slider"/>.</summary> /// <remarks> /// This flag is used to allow <see cref="TryOnChange"/> to skip trying to call /// <see cref="OnChange"/> while this value is set. /// </remarks> private bool SettingStartingValue; /// <summary> /// Setup <see cref="Slider"/>. /// </summary> private void Awake() { // Make sure we have a UI Slider to work with, printing an error if not. if (Slider == null) { Debug.LogError(ExampleErrors.MissingParameter(this, Slider, "Slider")); return; } // Connect changes in Slider's value to given action. Slider.onValueChanged.AddListener(TryOnChange); } /// <summary>Set the starting value to use for this <see cref="Slider"/>.</summary> /// <remarks> /// This will not trigger <see cref="OnChange"/> to be called (nor an error to be shown if /// <see cref="OnChange"/> has not yet been defined). /// </remarks> /// <param name="value"> /// Starting value of <see cref="Slider"/> (assumed to be a valid 0f to 1f value). /// </param> internal void SetStartingValue(float value) { // Use a flag to prevent any Actions being called while the Slider's value is changed. SettingStartingValue = true; Slider.value = value; SettingStartingValue = false; } /// <summary>Start animating <see cref="Slider"/>.</summary> internal void StartAnimating() { // Only start animation coroutine if it has not already been started. AnimationCycle = 0f; if (!IsAnimating) { StartCoroutine(Animate()); } } /// <summary>Start animating <see cref="Slider"/> at a given value.</summary> /// <param name="startValue"> /// Value to start animation at (error printed if not between 0f and 1f inclusive). /// </param> internal void StartAnimatingAt(float startValue) { // Verify value is between 0f and 1f. if (startValue < 0f || startValue > 1f) { Debug.LogErrorFormat( "Invalid value of {0:N2} given to {1}.{2}.StartAnimatingAt function\n" + "Valid values are within the range 0f to 1f inclusive.\nDefaulting to 0f.", startValue, name, GetType()); startValue = 0f; } // Set current value, and start animation coroutine if it has not already been started. AnimationCycle = startValue; if (!IsAnimating) { StartCoroutine(Animate()); } } /// <summary>Stop animating <see cref="Slider"/>.</summary> internal void StopAnimating() { // Use flag to immediately stop Animate coroutine (if it is currently in progress). IsAnimating = false; } /// <summary>Stop animating <see cref="Slider"/> when reach a given value.</summary> /// <param name="stopValue"> /// Value to stop animation at (error printed if not between 0f and 1f inclusive). /// </param> internal void StopAnimatingAt(float stopValue) { // Verify value is between 0f and 1f. if (stopValue < 0f || stopValue > 1f) { Debug.LogErrorFormat( "Invalid value of {0:N2} given to {1}.{2}.StopAnimatingAt function.\n" + "Valid values are within the range 0f to 1f inclusive.\nDefaulting to 1f.", stopValue, name, GetType()); stopValue = 1f; } // Store value to stop animation at, and reset flag used to ensure animation does not // immediately stop if it has already passed this stop value. StopValue = stopValue; CanStop = false; } /// <summary>Animate slider moving.</summary> private IEnumerator Animate() { // Flag that animation is now in progress (prevents this Coroutine being redundant started // while it is already in progress). IsAnimating = true; float animationDirection = 1f; bool travellingUp = true; while (IsAnimating && !Input.anyKey) { // Use Time.smoothDeltaTime for slider movement to automatically smooth out changes in frame // rate. AnimationCycle += Time.smoothDeltaTime * animationDirection / AnimationTime; // If using back and forth animation, then when we reach top of slider, start moving towards // bottom (and vice versa). float animationPercent; if (BackAndForth) { if (AnimationCycle >= 1f) { animationDirection = -1f; animationPercent = 1f; travellingUp = false; } else if (AnimationCycle <= 0f) { animationDirection = 1f; animationPercent = 0f; travellingUp = true; } else { // When between top and bottom, optionally apply smoothing to ease the transition // between going up and down (and vice versa). animationPercent = TrySmooth(AnimationCycle); } // See if animation should be stopped this frame, adjusting animation value if so. Exactly // how this is measured is based on the direction the slider is currently travelling (up // or down). animationPercent = travellingUp ? TryStopIfAbove(animationPercent) : TryStopIfBelow(animationPercent); } else { // For regular (non back and forth) animation, optionally smoothing in between. if (AnimationCycle > 1f) { AnimationCycle %= 1f; } animationPercent = TrySmooth(AnimationCycle); // See if animation should be stopped this frame, adjusting animation value if so. animationPercent = TryStopIfAbove(animationPercent); } Slider.value = animationPercent; // Wait for next frame, at which point animation will stop if any input is received from the // player. yield return null; } } /// <summary> /// See if should stop animation this frame, adjusting given value if have just past stop value /// by going downwards, leaving value as is otherwise. /// </summary> /// <remarks>This version is used when the <see cref="Slider"/>'s value is ascending.</remarks> private float TryStopIfAbove(float value) { // If a stop value has been given, see if have passed it yet. if (StopValue.HasValue) { // Make sure that animation has been below Stop Value for at least one frame. This check // prevents the animation from stopping immediately if a Stop Value is given that is less // than the current value, waiting instead until the moment the value passes the Stop Value // before stopping the animation. if (CanStop) { // If have just past Stop Value, clamp to exactly this value and stop animation. if (value >= StopValue.Value) { value = StopValue.Value; StopValue = null; CanStop = false; IsAnimating = false; } } else { CanStop = value < StopValue.Value; } } return value; } /// <summary> /// See if should stop animation this frame, adjusting given value if have just past stop value /// by going upwards, leaving value as is otherwise. /// </summary> /// <remarks>This version is used when the <see cref="Slider"/>'s value is descending.</remarks> private float TryStopIfBelow(float value) { // If a stop value has been given, see if have passed it yet. if (StopValue.HasValue) { // Make sure that animation has been above Stop Value for at least one frame. This check // prevents the animation from stopping immediately if a Stop Value is given that is greater // than the current value, waiting instead until the moment the value passes the Stop Value // before stopping the animation. if (CanStop) { // If have just past Stop Value, clamp to exactly this value and stop animation. if (value <= StopValue.Value) { value = StopValue.Value; StopValue = null; CanStop = false; IsAnimating = false; } } else { CanStop = value > StopValue.Value; } } return value; } /// <summary> /// Optionally apply smoothing to a given value (if <see cref="Smooth"/> is enabled). /// </summary> private float TrySmooth(float value) { return Smooth ? (Mathf.Sin((value - 0.5f) * Mathf.PI) + 1f) / 2f : value; } /// <summary>Respond to <see cref="Slider"/> changing value.</summary> private void TryOnChange(float value) { // Skip trying to call Action/s if the change in value was in response to the user setting a // starting value for the Slider. if (SettingStartingValue) { return; } // If the change was not caused by setting a starting value, confirm an Action has been given // to call on value changes. if (OnChange == null) { // If no Action is defined, only show an error once, skipping further errors. This avoids // error spamming every frame the Slider's value is changed by the user. if (!NoOnSliderErrorShown) { Debug.LogErrorFormat( "No OnSlider Action set for {0}.{1}, so cannot inform other classes " + "of new slider value of {2:N2}.", name, GetType(), value); NoOnSliderErrorShown = true; } } else { OnChange(value); } } } }
// // Copyright (c) 2004-2021 Jaroslaw Kowalski <jaak@jkowalski.net>, Kim Christensen, Julian Verdurmen // // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // // * Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // // * Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // * Neither the name of Jaroslaw Kowalski nor the names of its // contributors may be used to endorse or promote products derived from this // software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF // THE POSSIBILITY OF SUCH DAMAGE. // using JetBrains.Annotations; namespace NLog { using System; using System.Collections; using System.Collections.Generic; using System.ComponentModel; using System.Diagnostics; using System.Globalization; using System.Threading; using NLog.Common; using NLog.Internal; using NLog.Layouts; using NLog.MessageTemplates; using NLog.Time; /// <summary> /// Represents the logging event. /// </summary> public class LogEventInfo { /// <summary> /// Gets the date of the first log event created. /// </summary> public static readonly DateTime ZeroDate = DateTime.UtcNow; internal static readonly LogMessageFormatter StringFormatMessageFormatter = GetStringFormatMessageFormatter; internal static LogMessageFormatter DefaultMessageFormatter { get; private set; } = LogMessageTemplateFormatter.DefaultAuto.MessageFormatter; private static int globalSequenceId; /// <summary> /// The formatted log message. /// </summary> private string _formattedMessage; /// <summary> /// The log message including any parameter placeholders /// </summary> private string _message; private object[] _parameters; private IFormatProvider _formatProvider; private LogMessageFormatter _messageFormatter = DefaultMessageFormatter; private IDictionary<Layout, object> _layoutCache; private PropertiesDictionary _properties; /// <summary> /// Initializes a new instance of the <see cref="LogEventInfo" /> class. /// </summary> public LogEventInfo() { TimeStamp = TimeSource.Current.Time; SequenceID = Interlocked.Increment(ref globalSequenceId); } /// <summary> /// Initializes a new instance of the <see cref="LogEventInfo" /> class. /// </summary> /// <param name="level">Log level.</param> /// <param name="loggerName">Logger name.</param> /// <param name="message">Log message including parameter placeholders.</param> public LogEventInfo(LogLevel level, string loggerName, [Localizable(false)] string message) : this(level, loggerName, null, message, null, null) { } /// <summary> /// Initializes a new instance of the <see cref="LogEventInfo" /> class. /// </summary> /// <param name="level">Log level.</param> /// <param name="loggerName">Logger name.</param> /// <param name="message">Log message including parameter placeholders.</param> /// <param name="messageTemplateParameters">Log message including parameter placeholders.</param> public LogEventInfo(LogLevel level, string loggerName, [Localizable(false)] string message, IList<MessageTemplateParameter> messageTemplateParameters) : this(level, loggerName, null, message, null, null) { if (messageTemplateParameters?.Count > 0) { var messageProperties = new MessageTemplateParameter[messageTemplateParameters.Count]; for (int i = 0; i < messageTemplateParameters.Count; ++i) messageProperties[i] = messageTemplateParameters[i]; _properties = new PropertiesDictionary(messageProperties); } } /// <summary> /// Initializes a new instance of the <see cref="LogEventInfo" /> class. /// </summary> /// <param name="level">Log level.</param> /// <param name="loggerName">Logger name.</param> /// <param name="formatProvider">An IFormatProvider that supplies culture-specific formatting information.</param> /// <param name="message">Log message including parameter placeholders.</param> /// <param name="parameters">Parameter array.</param> public LogEventInfo(LogLevel level, string loggerName, IFormatProvider formatProvider, [Localizable(false)] string message, object[] parameters) : this(level, loggerName, formatProvider, message, parameters, null) { } /// <summary> /// Initializes a new instance of the <see cref="LogEventInfo" /> class. /// </summary> /// <param name="level">Log level.</param> /// <param name="loggerName">Logger name.</param> /// <param name="formatProvider">An IFormatProvider that supplies culture-specific formatting information.</param> /// <param name="message">Log message including parameter placeholders.</param> /// <param name="parameters">Parameter array.</param> /// <param name="exception">Exception information.</param> public LogEventInfo(LogLevel level, string loggerName, IFormatProvider formatProvider, [Localizable(false)] string message, object[] parameters, Exception exception): this() { Level = level; LoggerName = loggerName; Message = message; Parameters = parameters; FormatProvider = formatProvider; Exception = exception; if (NeedToPreformatMessage(parameters)) { CalcFormattedMessage(); } } /// <summary> /// Gets the unique identifier of log event which is automatically generated /// and monotonously increasing. /// </summary> [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1709:IdentifiersShouldBeCasedCorrectly", MessageId = "ID", Justification = "Backwards compatibility")] // ReSharper disable once InconsistentNaming public int SequenceID { get; private set; } /// <summary> /// Gets or sets the timestamp of the logging event. /// </summary> [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1702:CompoundWordsShouldBeCasedCorrectly", MessageId = "TimeStamp", Justification = "Backwards compatibility.")] public DateTime TimeStamp { get; set; } /// <summary> /// Gets or sets the level of the logging event. /// </summary> public LogLevel Level { get; set; } [CanBeNull] internal CallSiteInformation CallSiteInformation { get; private set; } [NotNull] internal CallSiteInformation GetCallSiteInformationInternal() { return CallSiteInformation ?? (CallSiteInformation = new CallSiteInformation()); } /// <summary> /// Gets a value indicating whether stack trace has been set for this event. /// </summary> public bool HasStackTrace => CallSiteInformation?.StackTrace != null; /// <summary> /// Gets the stack frame of the method that did the logging. /// </summary> public StackFrame UserStackFrame => CallSiteInformation?.UserStackFrame; /// <summary> /// Gets the number index of the stack frame that represents the user /// code (not the NLog code). /// </summary> public int UserStackFrameNumber => CallSiteInformation?.UserStackFrameNumberLegacy ?? CallSiteInformation?.UserStackFrameNumber ?? 0; /// <summary> /// Gets the entire stack trace. /// </summary> public StackTrace StackTrace => CallSiteInformation?.StackTrace; /// <summary> /// Gets the callsite class name /// </summary> public string CallerClassName => CallSiteInformation?.GetCallerClassName(null, true, true, true); /// <summary> /// Gets the callsite member function name /// </summary> public string CallerMemberName => CallSiteInformation?.GetCallerMemberName(null, false, true, true); /// <summary> /// Gets the callsite source file path /// </summary> public string CallerFilePath => CallSiteInformation?.GetCallerFilePath(0); /// <summary> /// Gets the callsite source file line number /// </summary> public int CallerLineNumber => CallSiteInformation?.GetCallerLineNumber(0) ?? 0; /// <summary> /// Gets or sets the exception information. /// </summary> [CanBeNull] public Exception Exception { get; set; } /// <summary> /// Gets or sets the logger name. /// </summary> [CanBeNull] public string LoggerName { get; set; } /// <summary> /// Gets the logger short name. /// </summary> /// <remarks>This property was marked as obsolete on NLog 2.0 and it may be removed in a future release.</remarks> [Obsolete("This property should not be used. Marked obsolete on NLog 2.0")] public string LoggerShortName { // NOTE: This property is not referenced by NLog code anymore. get { if (LoggerName == null) return LoggerName; int lastDot = LoggerName.LastIndexOf('.'); if (lastDot >= 0) { return LoggerName.Substring(lastDot + 1); } return LoggerName; } } /// <summary> /// Gets or sets the log message including any parameter placeholders. /// </summary> public string Message { get => _message; set { bool rebuildMessageTemplateParameters = ResetMessageTemplateParameters(); _message = value; ResetFormattedMessage(rebuildMessageTemplateParameters); } } /// <summary> /// Gets or sets the parameter values or null if no parameters have been specified. /// </summary> [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Performance", "CA1819:PropertiesShouldNotReturnArrays", Justification = "For backwards compatibility.")] public object[] Parameters { get => _parameters; set { bool rebuildMessageTemplateParameters = ResetMessageTemplateParameters(); _parameters = value; ResetFormattedMessage(rebuildMessageTemplateParameters); } } /// <summary> /// Gets or sets the format provider that was provided while logging or <see langword="null" /> /// when no formatProvider was specified. /// </summary> public IFormatProvider FormatProvider { get => _formatProvider; set { if (_formatProvider != value) { _formatProvider = value; ResetFormattedMessage(false); } } } /// <summary> /// Gets or sets the message formatter for generating <see cref="LogEventInfo.FormattedMessage"/> /// Uses string.Format(...) when nothing else has been configured. /// </summary> public LogMessageFormatter MessageFormatter { get => _messageFormatter; set { _messageFormatter = value ?? StringFormatMessageFormatter; ResetFormattedMessage(false); } } /// <summary> /// Gets the formatted message. /// </summary> public string FormattedMessage { get { if (_formattedMessage == null) { CalcFormattedMessage(); } return _formattedMessage; } } /// <summary> /// Checks if any per-event properties (Without allocation) /// </summary> public bool HasProperties { get { if (_properties != null) { return _properties.Count > 0; } else { return CreateOrUpdatePropertiesInternal(false)?.Count > 0; } } } /// <summary> /// Gets the dictionary of per-event context properties. /// </summary> public IDictionary<object, object> Properties => CreateOrUpdatePropertiesInternal(); /// <summary> /// Gets the dictionary of per-event context properties. /// Internal helper for the PropertiesDictionary type. /// </summary> /// <param name="forceCreate">Create the event-properties dictionary, even if no initial template parameters</param> /// <param name="templateParameters">Provided when having parsed the message template and capture template parameters (else null)</param> /// <returns></returns> internal PropertiesDictionary CreateOrUpdatePropertiesInternal(bool forceCreate = true, IList<MessageTemplateParameter> templateParameters = null) { var properties = _properties; if (properties == null) { if (forceCreate || templateParameters?.Count > 0 || (templateParameters == null && HasMessageTemplateParameters)) { properties = new PropertiesDictionary(templateParameters); Interlocked.CompareExchange(ref _properties, properties, null); if (templateParameters == null && (!forceCreate || HasMessageTemplateParameters)) { // Trigger capture of MessageTemplateParameters from logevent-message CalcFormattedMessage(); } } } else if (templateParameters != null) { properties.MessageProperties = templateParameters; } return _properties; } private bool HasMessageTemplateParameters { get { // Have not yet parsed/rendered the FormattedMessage, so check with ILogMessageFormatter if (_formattedMessage == null && _parameters?.Length > 0) { var logMessageFormatter = _messageFormatter?.Target as ILogMessageFormatter; return logMessageFormatter?.HasProperties(this) ?? false; } return false; } } /// <summary> /// Gets the named parameters extracted from parsing <see cref="Message"/> as MessageTemplate /// </summary> public MessageTemplateParameters MessageTemplateParameters { get { if (_properties != null && _properties.MessageProperties.Count > 0) { return new MessageTemplateParameters(_properties.MessageProperties, _message, _parameters); } else if (_parameters?.Length > 0) { return new MessageTemplateParameters(_message, _parameters); } else { return MessageTemplateParameters.Empty; // No parameters, means nothing to parse } } } /// <summary> /// Gets the dictionary of per-event context properties. /// </summary> /// <remarks>This property was marked as obsolete on NLog 2.0 and it may be removed in a future release.</remarks> [Obsolete("Use LogEventInfo.Properties instead. Marked obsolete on NLog 2.0", true)] public IDictionary Context => CreateOrUpdatePropertiesInternal().EventContext; /// <summary> /// Creates the null event. /// </summary> /// <returns>Null log event.</returns> public static LogEventInfo CreateNullEvent() { return new LogEventInfo(LogLevel.Off, string.Empty, string.Empty); } /// <summary> /// Creates the log event. /// </summary> /// <param name="logLevel">The log level.</param> /// <param name="loggerName">Name of the logger.</param> /// <param name="message">The message.</param> /// <returns>Instance of <see cref="LogEventInfo"/>.</returns> public static LogEventInfo Create(LogLevel logLevel, string loggerName, [Localizable(false)] string message) { return new LogEventInfo(logLevel, loggerName, null, message, null); } /// <summary> /// Creates the log event. /// </summary> /// <param name="logLevel">The log level.</param> /// <param name="loggerName">Name of the logger.</param> /// <param name="formatProvider">The format provider.</param> /// <param name="message">The message.</param> /// <param name="parameters">The parameters.</param> /// <returns>Instance of <see cref="LogEventInfo"/>.</returns> public static LogEventInfo Create(LogLevel logLevel, string loggerName, IFormatProvider formatProvider, [Localizable(false)] string message, object[] parameters) { return new LogEventInfo(logLevel, loggerName, formatProvider, message, parameters); } /// <summary> /// Creates the log event. /// </summary> /// <param name="logLevel">The log level.</param> /// <param name="loggerName">Name of the logger.</param> /// <param name="formatProvider">The format provider.</param> /// <param name="message">The message.</param> /// <returns>Instance of <see cref="LogEventInfo"/>.</returns> public static LogEventInfo Create(LogLevel logLevel, string loggerName, IFormatProvider formatProvider, object message) { Exception exception = message as Exception; if (exception == null && message is LogEventInfo logEvent) { logEvent.LoggerName = loggerName; logEvent.Level = logLevel; logEvent.FormatProvider = formatProvider ?? logEvent.FormatProvider; return logEvent; } return new LogEventInfo(logLevel, loggerName, formatProvider, "{0}", new[] { message }, exception); } /// <summary> /// Creates the log event. /// </summary> /// <param name="logLevel">The log level.</param> /// <param name="loggerName">Name of the logger.</param> /// <param name="message">The message.</param> /// <param name="exception">The exception.</param> /// <returns>Instance of <see cref="LogEventInfo"/>.</returns> /// <remarks>This method was marked as obsolete before NLog 4.3.11 and it may be removed in a future release.</remarks> [Obsolete("use Create(LogLevel logLevel, string loggerName, Exception exception, IFormatProvider formatProvider, string message) instead. Marked obsolete before v4.3.11")] public static LogEventInfo Create(LogLevel logLevel, string loggerName, [Localizable(false)] string message, Exception exception) { return new LogEventInfo(logLevel, loggerName, null, message, null, exception); } /// <summary> /// Creates the log event. /// </summary> /// <param name="logLevel">The log level.</param> /// <param name="loggerName">Name of the logger.</param> /// <param name="exception">The exception.</param> /// <param name="formatProvider">The format provider.</param> /// <param name="message">The message.</param> /// <returns>Instance of <see cref="LogEventInfo"/>.</returns> public static LogEventInfo Create(LogLevel logLevel, string loggerName, Exception exception, IFormatProvider formatProvider, [Localizable(false)] string message) { return Create(logLevel, loggerName, exception, formatProvider, message, null); } /// <summary> /// Creates the log event. /// </summary> /// <param name="logLevel">The log level.</param> /// <param name="loggerName">Name of the logger.</param> /// <param name="exception">The exception.</param> /// <param name="formatProvider">The format provider.</param> /// <param name="message">The message.</param> /// <param name="parameters">The parameters.</param> /// <returns>Instance of <see cref="LogEventInfo"/>.</returns> public static LogEventInfo Create(LogLevel logLevel, string loggerName, Exception exception, IFormatProvider formatProvider, [Localizable(false)] string message, object[] parameters) { return new LogEventInfo(logLevel, loggerName,formatProvider, message, parameters, exception); } /// <summary> /// Creates <see cref="AsyncLogEventInfo"/> from this <see cref="LogEventInfo"/> by attaching the specified asynchronous continuation. /// </summary> /// <param name="asyncContinuation">The asynchronous continuation.</param> /// <returns>Instance of <see cref="AsyncLogEventInfo"/> with attached continuation.</returns> public AsyncLogEventInfo WithContinuation(AsyncContinuation asyncContinuation) { return new AsyncLogEventInfo(this, asyncContinuation); } /// <summary> /// Returns a string representation of this log event. /// </summary> /// <returns>String representation of the log event.</returns> public override string ToString() { return $"Log Event: Logger='{LoggerName}' Level={Level} Message='{FormattedMessage}' SequenceID={SequenceID}"; } /// <summary> /// Sets the stack trace for the event info. /// </summary> /// <param name="stackTrace">The stack trace.</param> /// <param name="userStackFrame">Index of the first user stack frame within the stack trace (Negative means NLog should skip stackframes from System-assemblies).</param> public void SetStackTrace(StackTrace stackTrace, int userStackFrame) { GetCallSiteInformationInternal().SetStackTrace(stackTrace, userStackFrame >= 0 ? userStackFrame : (int?)null); } /// <summary> /// Sets the details retrieved from the Caller Information Attributes /// </summary> /// <param name="callerClassName"></param> /// <param name="callerMemberName"></param> /// <param name="callerFilePath"></param> /// <param name="callerLineNumber"></param> public void SetCallerInfo(string callerClassName, string callerMemberName, string callerFilePath, int callerLineNumber) { GetCallSiteInformationInternal().SetCallerInfo(callerClassName, callerMemberName, callerFilePath, callerLineNumber); } internal void AddCachedLayoutValue(Layout layout, object value) { if (_layoutCache == null) { var dictionary = new Dictionary<Layout, object>(); dictionary[layout] = value; // Faster than collection initializer if (Interlocked.CompareExchange(ref _layoutCache, dictionary, null) == null) { return; // No need to use lock } } lock (_layoutCache) { _layoutCache[layout] = value; } } internal bool TryGetCachedLayoutValue(Layout layout, out object value) { if (_layoutCache == null) { // We don't need lock to see if dictionary has been created value = null; return false; } lock (_layoutCache) { if (_layoutCache.Count == 0) { value = null; return false; } return _layoutCache.TryGetValue(layout, out value); } } private static bool NeedToPreformatMessage(object[] parameters) { // we need to preformat message if it contains any parameters which could possibly // do logging in their ToString() if (parameters == null || parameters.Length == 0) { return false; } if (parameters.Length > 5) { // too many parameters, too costly to check return true; } for (int i = 0; i < parameters.Length; ++i) { if (!IsSafeToDeferFormatting(parameters[i])) return true; } return false; } private static bool IsSafeToDeferFormatting(object value) { return Convert.GetTypeCode(value) != TypeCode.Object; } internal bool IsLogEventMutableSafe() { if (Exception != null || _formattedMessage != null) return false; var properties = CreateOrUpdatePropertiesInternal(false); if (properties == null || properties.Count == 0) return true; // No mutable state, no need to precalculate if (properties.Count > 5) return false; // too many properties, too costly to check if (properties.Count == _parameters?.Length && properties.Count == properties.MessageProperties.Count) return true; // Already checked formatted message, no need to do it twice return HasImmutableProperties(properties); } private static bool HasImmutableProperties(PropertiesDictionary properties) { if (properties.Count == properties.MessageProperties.Count) { // Skip enumerator allocation when all properties comes from the message-template for (int i = 0; i < properties.MessageProperties.Count; ++i) { var property = properties.MessageProperties[i]; if (!IsSafeToDeferFormatting(property.Value)) return false; } } else { // Already spent the time on allocating a Dictionary, also have time for an enumerator foreach (var property in properties) { if (!IsSafeToDeferFormatting(property.Value)) return false; } } return true; } internal bool CanLogEventDeferMessageFormat() { if (_formattedMessage != null) return false; // Already formatted, cannot be deferred if (_parameters == null || _parameters.Length == 0) return false; // No parameters to format if (_message?.Length < 256 && ReferenceEquals(MessageFormatter, LogMessageTemplateFormatter.DefaultAuto.MessageFormatter)) return true; // Not too expensive to scan for properties else return false; } private static string GetStringFormatMessageFormatter(LogEventInfo logEvent) { if (logEvent.Parameters == null || logEvent.Parameters.Length == 0) { return logEvent.Message; } else { return string.Format(logEvent.FormatProvider ?? CultureInfo.CurrentCulture, logEvent.Message, logEvent.Parameters); } } private void CalcFormattedMessage() { try { _formattedMessage = _messageFormatter(this); } catch (Exception exception) { _formattedMessage = Message; InternalLogger.Warn(exception, "Error when formatting a message."); if (exception.MustBeRethrown()) { throw; } } } internal void AppendFormattedMessage(ILogMessageFormatter messageFormatter, System.Text.StringBuilder builder) { if (_formattedMessage != null) { builder.Append(_formattedMessage); } else { int originalLength = builder.Length; try { messageFormatter.AppendFormattedMessage(this, builder); } catch (Exception ex) { builder.Length = originalLength; builder.Append(_message ?? string.Empty); InternalLogger.Warn(ex, "Error when formatting a message."); if (ex.MustBeRethrown()) { throw; } } } } private void ResetFormattedMessage(bool rebuildMessageTemplateParameters) { _formattedMessage = null; if (rebuildMessageTemplateParameters && HasMessageTemplateParameters) { CalcFormattedMessage(); } } private bool ResetMessageTemplateParameters() { if (_properties != null) { if (HasMessageTemplateParameters) _properties.MessageProperties = null; return _properties.MessageProperties.Count == 0; } return false; } /// <summary> /// Set the <see cref="DefaultMessageFormatter"/> /// </summary> /// <param name="mode">true = Always, false = Never, null = Auto Detect</param> internal static void SetDefaultMessageFormatter(bool? mode) { if (mode == true) { InternalLogger.Info("Message Template Format always enabled"); DefaultMessageFormatter = LogMessageTemplateFormatter.Default.MessageFormatter; } else if (mode == false) { InternalLogger.Info("Message Template String Format always enabled"); DefaultMessageFormatter = StringFormatMessageFormatter; } else { //null = auto InternalLogger.Info("Message Template Auto Format enabled"); DefaultMessageFormatter = LogMessageTemplateFormatter.DefaultAuto.MessageFormatter; } } } }
using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.ComponentModel; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.IO; using System.Linq; using System.Net.Http; using System.Net.Http.Formatting; using System.Net.Http.Headers; using System.Web.Http.Description; using System.Xml.Linq; using Newtonsoft.Json; namespace TheBigCatProject.Server.Areas.HelpPage { /// <summary> /// This class will generate the samples for the help page. /// </summary> public class HelpPageSampleGenerator { /// <summary> /// Initializes a new instance of the <see cref="HelpPageSampleGenerator"/> class. /// </summary> public HelpPageSampleGenerator() { ActualHttpMessageTypes = new Dictionary<HelpPageSampleKey, Type>(); ActionSamples = new Dictionary<HelpPageSampleKey, object>(); SampleObjects = new Dictionary<Type, object>(); SampleObjectFactories = new List<Func<HelpPageSampleGenerator, Type, object>> { DefaultSampleObjectFactory, }; } /// <summary> /// Gets CLR types that are used as the content of <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/>. /// </summary> public IDictionary<HelpPageSampleKey, Type> ActualHttpMessageTypes { get; internal set; } /// <summary> /// Gets the objects that are used directly as samples for certain actions. /// </summary> public IDictionary<HelpPageSampleKey, object> ActionSamples { get; internal set; } /// <summary> /// Gets the objects that are serialized as samples by the supported formatters. /// </summary> public IDictionary<Type, object> SampleObjects { get; internal set; } /// <summary> /// Gets factories for the objects that the supported formatters will serialize as samples. Processed in order, /// stopping when the factory successfully returns a non-<see langref="null"/> object. /// </summary> /// <remarks> /// Collection includes just <see cref="ObjectGenerator.GenerateObject(Type)"/> initially. Use /// <code>SampleObjectFactories.Insert(0, func)</code> to provide an override and /// <code>SampleObjectFactories.Add(func)</code> to provide a fallback.</remarks> [SuppressMessage("Microsoft.Design", "CA1006:DoNotNestGenericTypesInMemberSignatures", Justification = "This is an appropriate nesting of generic types")] public IList<Func<HelpPageSampleGenerator, Type, object>> SampleObjectFactories { get; private set; } /// <summary> /// Gets the request body samples for a given <see cref="ApiDescription"/>. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The samples keyed by media type.</returns> public IDictionary<MediaTypeHeaderValue, object> GetSampleRequests(ApiDescription api) { return GetSample(api, SampleDirection.Request); } /// <summary> /// Gets the response body samples for a given <see cref="ApiDescription"/>. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The samples keyed by media type.</returns> public IDictionary<MediaTypeHeaderValue, object> GetSampleResponses(ApiDescription api) { return GetSample(api, SampleDirection.Response); } /// <summary> /// Gets the request or response body samples. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param> /// <returns>The samples keyed by media type.</returns> public virtual IDictionary<MediaTypeHeaderValue, object> GetSample(ApiDescription api, SampleDirection sampleDirection) { if (api == null) { throw new ArgumentNullException("api"); } string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName; string actionName = api.ActionDescriptor.ActionName; IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name); Collection<MediaTypeFormatter> formatters; Type type = ResolveType(api, controllerName, actionName, parameterNames, sampleDirection, out formatters); var samples = new Dictionary<MediaTypeHeaderValue, object>(); // Use the samples provided directly for actions var actionSamples = GetAllActionSamples(controllerName, actionName, parameterNames, sampleDirection); foreach (var actionSample in actionSamples) { samples.Add(actionSample.Key.MediaType, WrapSampleIfString(actionSample.Value)); } // Do the sample generation based on formatters only if an action doesn't return an HttpResponseMessage. // Here we cannot rely on formatters because we don't know what's in the HttpResponseMessage, it might not even use formatters. if (type != null && !typeof(HttpResponseMessage).IsAssignableFrom(type)) { object sampleObject = GetSampleObject(type); foreach (var formatter in formatters) { foreach (MediaTypeHeaderValue mediaType in formatter.SupportedMediaTypes) { if (!samples.ContainsKey(mediaType)) { object sample = GetActionSample(controllerName, actionName, parameterNames, type, formatter, mediaType, sampleDirection); // If no sample found, try generate sample using formatter and sample object if (sample == null && sampleObject != null) { sample = WriteSampleObjectUsingFormatter(formatter, sampleObject, type, mediaType); } samples.Add(mediaType, WrapSampleIfString(sample)); } } } } return samples; } /// <summary> /// Search for samples that are provided directly through <see cref="ActionSamples"/>. /// </summary> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> /// <param name="type">The CLR type.</param> /// <param name="formatter">The formatter.</param> /// <param name="mediaType">The media type.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param> /// <returns>The sample that matches the parameters.</returns> public virtual object GetActionSample(string controllerName, string actionName, IEnumerable<string> parameterNames, Type type, MediaTypeFormatter formatter, MediaTypeHeaderValue mediaType, SampleDirection sampleDirection) { object sample; // First, try to get the sample provided for the specified mediaType, sampleDirection, controllerName, actionName and parameterNames. // If not found, try to get the sample provided for the specified mediaType, sampleDirection, controllerName and actionName regardless of the parameterNames. // If still not found, try to get the sample provided for the specified mediaType and type. // Finally, try to get the sample provided for the specified mediaType. if (ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, parameterNames), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, new[] { "*" }), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, type), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType), out sample)) { return sample; } return null; } /// <summary> /// Gets the sample object that will be serialized by the formatters. /// First, it will look at the <see cref="SampleObjects"/>. If no sample object is found, it will try to create /// one using <see cref="DefaultSampleObjectFactory"/> (which wraps an <see cref="ObjectGenerator"/>) and other /// factories in <see cref="SampleObjectFactories"/>. /// </summary> /// <param name="type">The type.</param> /// <returns>The sample object.</returns> [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Even if all items in SampleObjectFactories throw, problem will be visible as missing sample.")] public virtual object GetSampleObject(Type type) { object sampleObject; if (!SampleObjects.TryGetValue(type, out sampleObject)) { // No specific object available, try our factories. foreach (Func<HelpPageSampleGenerator, Type, object> factory in SampleObjectFactories) { if (factory == null) { continue; } try { sampleObject = factory(this, type); if (sampleObject != null) { break; } } catch { // Ignore any problems encountered in the factory; go on to the next one (if any). } } } return sampleObject; } /// <summary> /// Resolves the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The type.</returns> public virtual Type ResolveHttpRequestMessageType(ApiDescription api) { string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName; string actionName = api.ActionDescriptor.ActionName; IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name); Collection<MediaTypeFormatter> formatters; return ResolveType(api, controllerName, actionName, parameterNames, SampleDirection.Request, out formatters); } /// <summary> /// Resolves the type of the action parameter or return value when <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/> is used. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or a response.</param> /// <param name="formatters">The formatters.</param> [SuppressMessage("Microsoft.Design", "CA1021:AvoidOutParameters", Justification = "This is only used in advanced scenarios.")] public virtual Type ResolveType(ApiDescription api, string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection, out Collection<MediaTypeFormatter> formatters) { if (!Enum.IsDefined(typeof(SampleDirection), sampleDirection)) { throw new InvalidEnumArgumentException("sampleDirection", (int)sampleDirection, typeof(SampleDirection)); } if (api == null) { throw new ArgumentNullException("api"); } Type type; if (ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, parameterNames), out type) || ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, new[] { "*" }), out type)) { // Re-compute the supported formatters based on type Collection<MediaTypeFormatter> newFormatters = new Collection<MediaTypeFormatter>(); foreach (var formatter in api.ActionDescriptor.Configuration.Formatters) { if (IsFormatSupported(sampleDirection, formatter, type)) { newFormatters.Add(formatter); } } formatters = newFormatters; } else { switch (sampleDirection) { case SampleDirection.Request: ApiParameterDescription requestBodyParameter = api.ParameterDescriptions.FirstOrDefault(p => p.Source == ApiParameterSource.FromBody); type = requestBodyParameter == null ? null : requestBodyParameter.ParameterDescriptor.ParameterType; formatters = api.SupportedRequestBodyFormatters; break; case SampleDirection.Response: default: type = api.ResponseDescription.ResponseType ?? api.ResponseDescription.DeclaredType; formatters = api.SupportedResponseFormatters; break; } } return type; } /// <summary> /// Writes the sample object using formatter. /// </summary> /// <param name="formatter">The formatter.</param> /// <param name="value">The value.</param> /// <param name="type">The type.</param> /// <param name="mediaType">Type of the media.</param> /// <returns></returns> [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as InvalidSample.")] public virtual object WriteSampleObjectUsingFormatter(MediaTypeFormatter formatter, object value, Type type, MediaTypeHeaderValue mediaType) { if (formatter == null) { throw new ArgumentNullException("formatter"); } if (mediaType == null) { throw new ArgumentNullException("mediaType"); } object sample = String.Empty; MemoryStream ms = null; HttpContent content = null; try { if (formatter.CanWriteType(type)) { ms = new MemoryStream(); content = new ObjectContent(type, value, formatter, mediaType); formatter.WriteToStreamAsync(type, value, ms, content, null).Wait(); ms.Position = 0; StreamReader reader = new StreamReader(ms); string serializedSampleString = reader.ReadToEnd(); if (mediaType.MediaType.ToUpperInvariant().Contains("XML")) { serializedSampleString = TryFormatXml(serializedSampleString); } else if (mediaType.MediaType.ToUpperInvariant().Contains("JSON")) { serializedSampleString = TryFormatJson(serializedSampleString); } sample = new TextSample(serializedSampleString); } else { sample = new InvalidSample(String.Format( CultureInfo.CurrentCulture, "Failed to generate the sample for media type '{0}'. Cannot use formatter '{1}' to write type '{2}'.", mediaType, formatter.GetType().Name, type.Name)); } } catch (Exception e) { sample = new InvalidSample(String.Format( CultureInfo.CurrentCulture, "An exception has occurred while using the formatter '{0}' to generate sample for media type '{1}'. Exception message: {2}", formatter.GetType().Name, mediaType.MediaType, UnwrapException(e).Message)); } finally { if (ms != null) { ms.Dispose(); } if (content != null) { content.Dispose(); } } return sample; } internal static Exception UnwrapException(Exception exception) { AggregateException aggregateException = exception as AggregateException; if (aggregateException != null) { return aggregateException.Flatten().InnerException; } return exception; } // Default factory for sample objects private static object DefaultSampleObjectFactory(HelpPageSampleGenerator sampleGenerator, Type type) { // Try to create a default sample object ObjectGenerator objectGenerator = new ObjectGenerator(); return objectGenerator.GenerateObject(type); } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")] private static string TryFormatJson(string str) { try { object parsedJson = JsonConvert.DeserializeObject(str); return JsonConvert.SerializeObject(parsedJson, Formatting.Indented); } catch { // can't parse JSON, return the original string return str; } } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")] private static string TryFormatXml(string str) { try { XDocument xml = XDocument.Parse(str); return xml.ToString(); } catch { // can't parse XML, return the original string return str; } } private static bool IsFormatSupported(SampleDirection sampleDirection, MediaTypeFormatter formatter, Type type) { switch (sampleDirection) { case SampleDirection.Request: return formatter.CanReadType(type); case SampleDirection.Response: return formatter.CanWriteType(type); } return false; } private IEnumerable<KeyValuePair<HelpPageSampleKey, object>> GetAllActionSamples(string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection) { HashSet<string> parameterNamesSet = new HashSet<string>(parameterNames, StringComparer.OrdinalIgnoreCase); foreach (var sample in ActionSamples) { HelpPageSampleKey sampleKey = sample.Key; if (String.Equals(controllerName, sampleKey.ControllerName, StringComparison.OrdinalIgnoreCase) && String.Equals(actionName, sampleKey.ActionName, StringComparison.OrdinalIgnoreCase) && (sampleKey.ParameterNames.SetEquals(new[] { "*" }) || parameterNamesSet.SetEquals(sampleKey.ParameterNames)) && sampleDirection == sampleKey.SampleDirection) { yield return sample; } } } private static object WrapSampleIfString(object sample) { string stringSample = sample as string; if (stringSample != null) { return new TextSample(stringSample); } return sample; } } }
/* * Farseer Physics Engine: * Copyright (c) 2012 Ian Qvist * * Original source Box2D: * Copyright (c) 2006-2011 Erin Catto http://www.box2d.org * * This software is provided 'as-is', without any express or implied * warranty. In no event will the authors be held liable for any damages * arising from the use of this software. * Permission is granted to anyone to use this software for any purpose, * including commercial applications, and to alter it and redistribute it * freely, subject to the following restrictions: * 1. The origin of this software must not be misrepresented; you must not * claim that you wrote the original software. If you use this software * in a product, an acknowledgment in the product documentation would be * appreciated but is not required. * 2. Altered source versions must be plainly marked as such, and must not be * misrepresented as being the original software. * 3. This notice may not be removed or altered from any source distribution. */ #pragma warning disable 0162 namespace TrueSync.Physics2D { // Linear constraint (point-to-line) // d = pB - pA = xB + rB - xA - rA // C = dot(ay, d) // Cdot = dot(d, cross(wA, ay)) + dot(ay, vB + cross(wB, rB) - vA - cross(wA, rA)) // = -dot(ay, vA) - dot(cross(d + rA, ay), wA) + dot(ay, vB) + dot(cross(rB, ay), vB) // J = [-ay, -cross(d + rA, ay), ay, cross(rB, ay)] // Spring linear constraint // C = dot(ax, d) // Cdot = = -dot(ax, vA) - dot(cross(d + rA, ax), wA) + dot(ax, vB) + dot(cross(rB, ax), vB) // J = [-ax -cross(d+rA, ax) ax cross(rB, ax)] // Motor rotational constraint // Cdot = wB - wA // J = [0 0 -1 0 0 1] /// <summary> /// A wheel joint. This joint provides two degrees of freedom: translation /// along an axis fixed in bodyA and rotation in the plane. You can use a /// joint limit to restrict the range of motion and a joint motor to drive /// the rotation or to model rotational friction. /// This joint is designed for vehicle suspensions. /// </summary> public class WheelJoint : Joint2D { // Solver shared private TSVector2 _localYAxis; private FP _impulse; private FP _motorImpulse; private FP _springImpulse; private FP _maxMotorTorque; private FP _motorSpeed; private bool _enableMotor; // Solver temp private int _indexA; private int _indexB; private TSVector2 _localCenterA; private TSVector2 _localCenterB; private FP _invMassA; private FP _invMassB; private FP _invIA; private FP _invIB; private TSVector2 _ax, _ay; private FP _sAx, _sBx; private FP _sAy, _sBy; private FP _mass; private FP _motorMass; private FP _springMass; private FP _bias; private FP _gamma; private TSVector2 _axis; internal WheelJoint() { JointType = JointType.Wheel; } /// <summary> /// Constructor for WheelJoint /// </summary> /// <param name="bodyA">The first body</param> /// <param name="bodyB">The second body</param> /// <param name="anchor">The anchor point</param> /// <param name="axis">The axis</param> /// <param name="useWorldCoordinates">Set to true if you are using world coordinates as anchors.</param> public WheelJoint(Body bodyA, Body bodyB, TSVector2 anchor, TSVector2 axis, bool useWorldCoordinates = false) : base(bodyA, bodyB) { JointType = JointType.Wheel; if (useWorldCoordinates) { LocalAnchorA = bodyA.GetLocalPoint(anchor); LocalAnchorB = bodyB.GetLocalPoint(anchor); } else { LocalAnchorA = bodyA.GetLocalPoint(bodyB.GetWorldPoint(anchor)); LocalAnchorB = anchor; } Axis = axis; //FPE only: We maintain the original value as it is supposed to. } /// <summary> /// The local anchor point on BodyA /// </summary> public TSVector2 LocalAnchorA { get; set; } /// <summary> /// The local anchor point on BodyB /// </summary> public TSVector2 LocalAnchorB { get; set; } public override TSVector2 WorldAnchorA { get { return BodyA.GetWorldPoint(LocalAnchorA); } set { LocalAnchorA = BodyA.GetLocalPoint(value); } } public override TSVector2 WorldAnchorB { get { return BodyB.GetWorldPoint(LocalAnchorB); } set { LocalAnchorB = BodyB.GetLocalPoint(value); } } /// <summary> /// The axis at which the suspension moves. /// </summary> public TSVector2 Axis { get { return _axis; } set { _axis = value; LocalXAxis = BodyA.GetLocalVector(_axis); _localYAxis = MathUtils.Cross(1.0f, LocalXAxis); } } /// <summary> /// The axis in local coordinates relative to BodyA /// </summary> public TSVector2 LocalXAxis { get; private set; } /// <summary> /// The desired motor speed in radians per second. /// </summary> public FP MotorSpeed { get { return _motorSpeed; } set { WakeBodies(); _motorSpeed = value; } } /// <summary> /// The maximum motor torque, usually in N-m. /// </summary> public FP MaxMotorTorque { get { return _maxMotorTorque; } set { WakeBodies(); _maxMotorTorque = value; } } /// <summary> /// Suspension frequency, zero indicates no suspension /// </summary> public FP Frequency { get; set; } /// <summary> /// Suspension damping ratio, one indicates critical damping /// </summary> public FP DampingRatio { get; set; } /// <summary> /// Gets the translation along the axis /// </summary> public FP JointTranslation { get { Body bA = BodyA; Body bB = BodyB; TSVector2 pA = bA.GetWorldPoint(LocalAnchorA); TSVector2 pB = bB.GetWorldPoint(LocalAnchorB); TSVector2 d = pB - pA; TSVector2 axis = bA.GetWorldVector(LocalXAxis); FP translation = TSVector2.Dot(d, axis); return translation; } } /// <summary> /// Gets the angular velocity of the joint /// </summary> public FP JointSpeed { get { FP wA = BodyA.AngularVelocity; FP wB = BodyB.AngularVelocity; return wB - wA; } } /// <summary> /// Enable/disable the joint motor. /// </summary> public bool MotorEnabled { get { return _enableMotor; } set { WakeBodies(); _enableMotor = value; } } /// <summary> /// Gets the torque of the motor /// </summary> /// <param name="invDt">inverse delta time</param> public FP GetMotorTorque(FP invDt) { return invDt * _motorImpulse; } public override TSVector2 GetReactionForce(FP invDt) { return invDt * (_impulse * _ay + _springImpulse * _ax); } public override FP GetReactionTorque(FP invDt) { return invDt * _motorImpulse; } internal override void InitVelocityConstraints(ref SolverData data) { _indexA = BodyA.IslandIndex; _indexB = BodyB.IslandIndex; _localCenterA = BodyA._sweep.LocalCenter; _localCenterB = BodyB._sweep.LocalCenter; _invMassA = BodyA._invMass; _invMassB = BodyB._invMass; _invIA = BodyA._invI; _invIB = BodyB._invI; FP mA = _invMassA, mB = _invMassB; FP iA = _invIA, iB = _invIB; TSVector2 cA = data.positions[_indexA].c; FP aA = data.positions[_indexA].a; TSVector2 vA = data.velocities[_indexA].v; FP wA = data.velocities[_indexA].w; TSVector2 cB = data.positions[_indexB].c; FP aB = data.positions[_indexB].a; TSVector2 vB = data.velocities[_indexB].v; FP wB = data.velocities[_indexB].w; Rot qA = new Rot(aA), qB = new Rot(aB); // Compute the effective masses. TSVector2 rA = MathUtils.Mul(qA, LocalAnchorA - _localCenterA); TSVector2 rB = MathUtils.Mul(qB, LocalAnchorB - _localCenterB); TSVector2 d1 = cB + rB - cA - rA; // Point to line constraint { _ay = MathUtils.Mul(qA, _localYAxis); _sAy = MathUtils.Cross(d1 + rA, _ay); _sBy = MathUtils.Cross(rB, _ay); _mass = mA + mB + iA * _sAy * _sAy + iB * _sBy * _sBy; if (_mass > 0.0f) { _mass = 1.0f / _mass; } } // Spring constraint _springMass = 0.0f; _bias = 0.0f; _gamma = 0.0f; if (Frequency > 0.0f) { _ax = MathUtils.Mul(qA, LocalXAxis); _sAx = MathUtils.Cross(d1 + rA, _ax); _sBx = MathUtils.Cross(rB, _ax); FP invMass = mA + mB + iA * _sAx * _sAx + iB * _sBx * _sBx; if (invMass > 0.0f) { _springMass = 1.0f / invMass; FP C = TSVector2.Dot(d1, _ax); // Frequency FP omega = 2.0f * Settings.Pi * Frequency; // Damping coefficient FP d = 2.0f * _springMass * DampingRatio * omega; // Spring stiffness FP k = _springMass * omega * omega; // magic formulas FP h = data.step.dt; _gamma = h * (d + h * k); if (_gamma > 0.0f) { _gamma = 1.0f / _gamma; } _bias = C * h * k * _gamma; _springMass = invMass + _gamma; if (_springMass > 0.0f) { _springMass = 1.0f / _springMass; } } } else { _springImpulse = 0.0f; } // Rotational motor if (_enableMotor) { _motorMass = iA + iB; if (_motorMass > 0.0f) { _motorMass = 1.0f / _motorMass; } } else { _motorMass = 0.0f; _motorImpulse = 0.0f; } if (Settings.EnableWarmstarting) { // Account for variable time step. _impulse *= data.step.dtRatio; _springImpulse *= data.step.dtRatio; _motorImpulse *= data.step.dtRatio; TSVector2 P = _impulse * _ay + _springImpulse * _ax; FP LA = _impulse * _sAy + _springImpulse * _sAx + _motorImpulse; FP LB = _impulse * _sBy + _springImpulse * _sBx + _motorImpulse; vA -= _invMassA * P; wA -= _invIA * LA; vB += _invMassB * P; wB += _invIB * LB; } else { _impulse = 0.0f; _springImpulse = 0.0f; _motorImpulse = 0.0f; } data.velocities[_indexA].v = vA; data.velocities[_indexA].w = wA; data.velocities[_indexB].v = vB; data.velocities[_indexB].w = wB; } internal override void SolveVelocityConstraints(ref SolverData data) { FP mA = _invMassA, mB = _invMassB; FP iA = _invIA, iB = _invIB; TSVector2 vA = data.velocities[_indexA].v; FP wA = data.velocities[_indexA].w; TSVector2 vB = data.velocities[_indexB].v; FP wB = data.velocities[_indexB].w; // Solve spring constraint { FP Cdot = TSVector2.Dot(_ax, vB - vA) + _sBx * wB - _sAx * wA; FP impulse = -_springMass * (Cdot + _bias + _gamma * _springImpulse); _springImpulse += impulse; TSVector2 P = impulse * _ax; FP LA = impulse * _sAx; FP LB = impulse * _sBx; vA -= mA * P; wA -= iA * LA; vB += mB * P; wB += iB * LB; } // Solve rotational motor constraint { FP Cdot = wB - wA - _motorSpeed; FP impulse = -_motorMass * Cdot; FP oldImpulse = _motorImpulse; FP maxImpulse = data.step.dt * _maxMotorTorque; _motorImpulse = MathUtils.Clamp(_motorImpulse + impulse, -maxImpulse, maxImpulse); impulse = _motorImpulse - oldImpulse; wA -= iA * impulse; wB += iB * impulse; } // Solve point to line constraint { FP Cdot = TSVector2.Dot(_ay, vB - vA) + _sBy * wB - _sAy * wA; FP impulse = -_mass * Cdot; _impulse += impulse; TSVector2 P = impulse * _ay; FP LA = impulse * _sAy; FP LB = impulse * _sBy; vA -= mA * P; wA -= iA * LA; vB += mB * P; wB += iB * LB; } data.velocities[_indexA].v = vA; data.velocities[_indexA].w = wA; data.velocities[_indexB].v = vB; data.velocities[_indexB].w = wB; } internal override bool SolvePositionConstraints(ref SolverData data) { TSVector2 cA = data.positions[_indexA].c; FP aA = data.positions[_indexA].a; TSVector2 cB = data.positions[_indexB].c; FP aB = data.positions[_indexB].a; Rot qA = new Rot(aA), qB = new Rot(aB); TSVector2 rA = MathUtils.Mul(qA, LocalAnchorA - _localCenterA); TSVector2 rB = MathUtils.Mul(qB, LocalAnchorB - _localCenterB); TSVector2 d = (cB - cA) + rB - rA; TSVector2 ay = MathUtils.Mul(qA, _localYAxis); FP sAy = MathUtils.Cross(d + rA, ay); FP sBy = MathUtils.Cross(rB, ay); FP C = TSVector2.Dot(d, ay); FP k = _invMassA + _invMassB + _invIA * _sAy * _sAy + _invIB * _sBy * _sBy; FP impulse; if (k != 0.0f) { impulse = -C / k; } else { impulse = 0.0f; } TSVector2 P = impulse * ay; FP LA = impulse * sAy; FP LB = impulse * sBy; cA -= _invMassA * P; aA -= _invIA * LA; cB += _invMassB * P; aB += _invIB * LB; data.positions[_indexA].c = cA; data.positions[_indexA].a = aA; data.positions[_indexB].c = cB; data.positions[_indexB].a = aB; return FP.Abs(C) <= Settings.LinearSlop; } } }
/* * Based on the Source Map library: * https://github.com/mozilla/source-map * Copyright 2011 Mozilla Foundation and contributors * Licensed under the New BSD license. See LICENSE or: * http://opensource.org/licenses/BSD-3-Clause * * Based on the Base 64 VLQ implementation in Closure Compiler: * https://code.google.com/p/closure-compiler/source/browse/trunk/src/com/google/debugging/sourcemap/Base64VLQ.java * * Copyright 2011 The Closure Compiler Authors. All rights reserved. * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials provided * with the distribution. * * Neither the name of Google Inc. nor the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Text; using Mono.Cecil.Cil; namespace JSIL.Internal { public class SourceMapBuilder { private readonly List<Mapping> _mappings = new List<Mapping>(); public void AddInfo(int genaratedLine, int generatedColumn, IEnumerable<SequencePoint> info) { var point = info != null ? info.FirstOrDefault() : null; _mappings.Add(new Mapping( genaratedLine + 1, generatedColumn, point == null ? -1 : point.StartLine, point == null ? -1 : point.StartColumn - 1, point == null ? null : point.Document.Url, null)); } public bool Build(string path, string sourceName) { if (!_mappings.Any()) { return false; } var mappings = _mappings.OrderBy(item => item.GeneratedLine).ThenBy(item => item.GeneratedColumn).ToList(); var sources = new List<string>(); var names = new List<string>(); var previousGeneratedColumn = 0; var previousGeneratedLine = 1; var previousOriginalColumn = 0; var previousOriginalLine = 0; var previousName = 0; var previousSource = 0; StringBuilder result = new StringBuilder(); for (int i = 0; i < mappings.Count; i++) { var mapping = mappings[i]; if (mapping.Source != null && !sources.Contains(mapping.Source)) { sources.Add(mapping.Source); } if (mapping.Name != null && !names.Contains(mapping.Name)) { names.Add(mapping.Name); } if (mapping.GeneratedLine != previousGeneratedLine) { previousGeneratedColumn = 0; while (mapping.GeneratedLine != previousGeneratedLine) { result.Append(';'); previousGeneratedLine++; } } else { if (i > 0) { if (mapping == mappings[i - 1]) { continue; } result.Append(','); } } result.Append(Base64Vlq.Encode(mapping.GeneratedColumn - previousGeneratedColumn)); previousGeneratedColumn = mapping.GeneratedColumn; if (mapping.Source != null) { var sourceIdx = sources.IndexOf(mapping.Source); result.Append(Base64Vlq.Encode(sourceIdx - previousSource)); previousSource = sourceIdx; // lines are stored 0-based in SourceMap spec version 3 result.Append(Base64Vlq.Encode(mapping.OriginalLine - 1 - previousOriginalLine)); previousOriginalLine = mapping.OriginalLine - 1; result.Append(Base64Vlq.Encode(mapping.OriginalColumn - previousOriginalColumn)); previousOriginalColumn = mapping.OriginalColumn; if (mapping.Name != null) { var nameIdx = names.IndexOf(mapping.Name); result.Append(Base64Vlq.Encode(nameIdx - previousName)); previousName = nameIdx; } } //else //{ ////IK: I'm not sure if we should do it? // previousOriginalLine = 0; // previousOriginalColumn = 0; //} } var sourceMap = new StringBuilder(); sourceMap.AppendLine("{"); sourceMap.AppendLine("\t\"version\" : 3,"); sourceMap.AppendLine(string.Format("\t\"file\" : \"{0}\",", sourceName)); sourceMap.AppendLine(string.Format("\t\"sourceRoot\" : \"{0}\",", new Uri(Path.GetFullPath(path)))); sourceMap.AppendLine(string.Format("\t\"sources\" : [{0}],", string.Join(", ", sources.Select(item => "\"" + MakeRelativePath(path, item) + "\"")))); sourceMap.AppendLine(string.Format("\t\"names\" : [{0}],", string.Join(", ", names.Select(item => "\"" + item + "\"")))); sourceMap.AppendLine(string.Format("\t\"mappings\" : \"{0}\"", result)); sourceMap.AppendLine("}"); using (var file = File.Create(GetFullMapPath(path, sourceName))) { using (var tw = new StreamWriter(file)) { tw.Write(sourceMap.ToString()); tw.Flush(); } } return true; } public static Uri MakeRelativePath(string fromPath, string toPath) { if (string.IsNullOrEmpty(fromPath)) throw new ArgumentNullException("fromPath"); if (string.IsNullOrEmpty(toPath)) throw new ArgumentNullException("toPath"); Uri fromUri = new Uri(Path.GetFullPath(fromPath)); Uri toUri = new Uri(Path.GetFullPath(toPath)); if (fromUri.Scheme != toUri.Scheme) { return toUri; } // path can't be made relative. return fromUri.MakeRelativeUri(toUri); } private sealed class Mapping { public int GeneratedLine { get; private set; } public int GeneratedColumn { get; private set; } public int OriginalLine { get; private set; } public int OriginalColumn { get; private set; } public string Source { get; private set; } public string Name { get; private set; } public Mapping(int generatedLine, int generatedColumn, int originalLine, int originalColumn, string source, string name) { GeneratedLine = generatedLine; GeneratedColumn = generatedColumn; OriginalLine = originalLine; OriginalColumn = originalColumn; Source = source; Name = name; } private bool Equals(Mapping other) { return GeneratedLine == other.GeneratedLine && GeneratedColumn == other.GeneratedColumn && OriginalLine == other.OriginalLine && OriginalColumn == other.OriginalColumn && string.Equals(Source, other.Source) && string.Equals(Name, other.Name); } public override bool Equals(object obj) { if (ReferenceEquals(null, obj)) return false; if (ReferenceEquals(this, obj)) return true; if (obj.GetType() != this.GetType()) return false; return Equals((Mapping) obj); } public override int GetHashCode() { unchecked { var hashCode = GeneratedLine.GetHashCode(); hashCode = (hashCode*397) ^ GeneratedColumn.GetHashCode(); hashCode = (hashCode*397) ^ OriginalLine.GetHashCode(); hashCode = (hashCode*397) ^ OriginalColumn.GetHashCode(); hashCode = (hashCode*397) ^ (Source != null ? Source.GetHashCode() : 0); hashCode = (hashCode*397) ^ (Name != null ? Name.GetHashCode() : 0); return hashCode; } } public static bool operator ==(Mapping left, Mapping right) { return Equals(left, right); } public static bool operator !=(Mapping left, Mapping right) { return !Equals(left, right); } } private static class Base64Vlq { private const string IntToCharMap = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"; // A single base 64 digit can contain 6 bits of data. For the base 64 variable // length quantities we use in the source map spec, the first bit is the sign, // the next four bits are the actual value, and the 6th bit is the // continuation bit. The continuation bit tells us whether there are more // digits in this value following this digit. // // Continuation // | Sign // | | // V V // 101011 private const int VLQ_BASE_SHIFT = 5; // binary: 100000 private const int VLQ_BASE = 1 << VLQ_BASE_SHIFT; // binary: 011111 private const int VLQ_BASE_MASK = VLQ_BASE - 1; // binary: 100000 private const int VLQ_CONTINUATION_BIT = VLQ_BASE; /// <summary> /// Converts from a two-complement value to a value where the sign bit is /// placed in the least significant bit.For example, as decimals: /// 1 becomes 2 (10 binary), -1 becomes 3 (11 binary) /// 2 becomes 4 (100 binary), -2 becomes 5 (101 binary) /// </summary> private static uint ToVlqSigned(int aValue) { return (uint) (aValue < 0 ? ((-aValue) << 1) + 1 : (aValue << 1) + 0); } public static string Encode(int aValue) { var encoded = ""; var vlq = ToVlqSigned(aValue); do { var digit = vlq & VLQ_BASE_MASK; vlq >>= VLQ_BASE_SHIFT; if (vlq > 0) { // There are still more digits in this value, so we must make sure the // continuation bit is marked. digit |= VLQ_CONTINUATION_BIT; } encoded += EncodeDigit((int) digit); } while (vlq > 0); return encoded; } private static char EncodeDigit(int number) { if (number >= 0 && number < IntToCharMap.Length) { return IntToCharMap[number]; } throw new ArgumentException("Must be between 0 and 63: " + number); } } public void WriteSourceMapLink(Stream outputStream, string path, string sourceName) { var writer = new StreamWriter(outputStream); writer.Write("//# sourceMappingURL=" + new Uri(GetFullMapPath(path, sourceName))); writer.Flush(); } private string GetFullMapPath(string path, string sourceName) { return Path.GetFullPath(Path.Combine(path, sourceName + ".map")).Replace(" ", ""); } } }
/* Copyright (c) 2008-2012 Peter Palotas * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ using System; using System.Collections.Generic; using System.Linq; using AlphaShadow.Options; using System.Text; using System.Diagnostics; namespace AlphaShadow { public class ConsoleHost : IUIHost { private int m_indent = 0; public ConsoleHost() { IsWordWrapEnabled = true; } public void WriteHeader(string message, params object[] args) { WriteLine(ConsoleColor.Cyan, WordWrap(message, args)); } public void WriteLine(string message, params object[] args) { WriteLine(Console.ForegroundColor, WordWrap(message, args)); } public void WriteWarning(string message, params object[] args) { WriteMessage(ConsoleColor.Red, "Warning:", message, args); } public void WriteError(string message, params object[] args) { WriteMessage(ConsoleColor.Red, "Error:", message, args); } public void WriteVerbose(string message, params object[] args) { if (VerboseOutputEnabled) WriteLine(ConsoleColor.DarkGray, WordWrap(message, args)); } private void WriteMessage(ConsoleColor color, string label, string message, params object[] args) { if (IsWordWrapEnabled) { int col1Width = label.Length; int col2Width = Math.Max(1, Console.WindowWidth - col1Width - 2); string text = StringFormatter.FormatInColumns(m_indent, 1, new StringFormatter.ColumnInfo(col1Width, label), new StringFormatter.ColumnInfo(col2Width, String.Format(message, args))); WriteLine(color, text); } else { WriteLine(color, label + " " + String.Format(message, args)); } } private void WriteLine(ConsoleColor color, string message) { ConsoleColor temp = Console.ForegroundColor; Console.ForegroundColor = color; Console.WriteLine(message); Console.ForegroundColor = temp; } private string WordWrap(string message, params object[] args) { if (IsWordWrapEnabled) { StringBuilder sb = new StringBuilder(); string wrappedString = StringFormatter.WordWrap(String.Format(message, args), Console.WindowWidth - 5 - m_indent, StringFormatter.WordWrappingMethod.Greedy); IList<string> splitString = StringFormatter.SplitAtLineBreaks(wrappedString); for (int i = 0; i < splitString.Count; i++) { if (i != 0) sb.AppendLine(); sb.Append(String.Format("{0}{1}", new String(' ', m_indent), splitString[i])); } return sb.ToString(); } else { return new String(' ', m_indent) + String.Format(message, args); } } public void WriteLine() { Console.WriteLine(); } public bool VerboseOutputEnabled { get; set; } public bool IsWordWrapEnabled { get; set; } public void WriteTable(StringTable table, int columnSpacing = 3, bool addVerticalSeparation = false) { if (table == null) throw new ArgumentNullException("table", "table is null."); if (IsWordWrapEnabled) { int indent = m_indent; if (indent >= Console.WindowWidth - columnSpacing - 2) indent = 0; int maxWidth = Console.WindowWidth - indent; int col1Width = Math.Min(table.Labels.Max(text => text.Length), maxWidth / 2); int colSpacing = columnSpacing; int col2Width = maxWidth - col1Width - colSpacing - 1; for (int i = 0; i < table.Count; i++) { if (i > 0 && addVerticalSeparation) Console.WriteLine(); Console.WriteLine( StringFormatter.FormatInColumns(indent, colSpacing, new StringFormatter.ColumnInfo(col1Width, table.Labels[i]), new StringFormatter.ColumnInfo(col2Width, table.Values[i]))); } } else { for (int i = 0; i < table.Count; i++) { Console.WriteLine("{0}{1}{2}{3}", new String(' ', m_indent), table.Labels[i], new String(' ', columnSpacing), table.Values[i]); } } } public void PushIndent() { m_indent += 3; } public void PopIndent() { m_indent -= 3; if (m_indent < 0) m_indent = 0; } public IDisposable GetIndent() { return new Indenter(this); } private class Indenter : IDisposable { IUIHost m_host; public Indenter(IUIHost host) { m_host = host; m_host.PushIndent(); } public void Dispose() { m_host.PopIndent(); } } public void ExecCommand(string execCommand, string args) { WriteLine("- Executing command '{0}' ...", execCommand); WriteLine("-----------------------------------------------------"); ProcessStartInfo ps = new ProcessStartInfo(execCommand, args); ps.CreateNoWindow = false; ps.UseShellExecute = false; Process p = Process.Start(ps); p.WaitForExit(); WriteLine("-----------------------------------------------------"); if (p.ExitCode != 0) { WriteError("Command line '{0}' failed!. Aborting the backup...", execCommand); WriteError("Returned error code: {0}", p.ExitCode); throw new CommandAbortedException(); } } public bool ShouldContinue() { WriteHeader("Continue? [Y/N]"); string response = Console.ReadLine(); return response.Equals("y", StringComparison.OrdinalIgnoreCase) || response.Equals("yes", StringComparison.OrdinalIgnoreCase); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Runtime.InteropServices; using System.Text; using System.Threading.Tasks; using Xunit; using Xunit.NetCore.Extensions; namespace System.IO.Compression.Tests { public class ZipFileTest_ConvenienceMethods : ZipFileTestBase { [Fact] public async Task CreateFromDirectoryNormal() { await TestCreateDirectory(zfolder("normal"), true); } [Fact] [Trait(XunitConstants.Category, XunitConstants.IgnoreForCI)] // Jenkins fails with unicode characters [JENKINS-12610] public async Task CreateFromDirectoryUnicodel() { await TestCreateDirectory(zfolder("unicode"), true); } private async Task TestCreateDirectory(string folderName, Boolean testWithBaseDir) { string noBaseDir = GetTestFilePath(); ZipFile.CreateFromDirectory(folderName, noBaseDir); await IsZipSameAsDirAsync(noBaseDir, folderName, ZipArchiveMode.Read, true, true); if (testWithBaseDir) { string withBaseDir = GetTestFilePath(); ZipFile.CreateFromDirectory(folderName, withBaseDir, CompressionLevel.Optimal, true); SameExceptForBaseDir(noBaseDir, withBaseDir, folderName); } } private static void SameExceptForBaseDir(string zipNoBaseDir, string zipBaseDir, string baseDir) { //b has the base dir using (ZipArchive a = ZipFile.Open(zipNoBaseDir, ZipArchiveMode.Read), b = ZipFile.Open(zipBaseDir, ZipArchiveMode.Read)) { var aCount = a.Entries.Count; var bCount = b.Entries.Count; Assert.Equal(aCount, bCount); int bIdx = 0; foreach (ZipArchiveEntry aEntry in a.Entries) { ZipArchiveEntry bEntry = b.Entries[bIdx++]; Assert.Equal(Path.GetFileName(baseDir) + "/" + aEntry.FullName, bEntry.FullName); Assert.Equal(aEntry.Name, bEntry.Name); Assert.Equal(aEntry.Length, bEntry.Length); Assert.Equal(aEntry.CompressedLength, bEntry.CompressedLength); using (Stream aStream = aEntry.Open(), bStream = bEntry.Open()) { StreamsEqual(aStream, bStream); } } } } [Fact] public void ExtractToDirectoryNormal() { TestExtract(zfile("normal.zip"), zfolder("normal")); TestExtract(zfile("empty.zip"), zfolder("empty")); TestExtract(zfile("explicitdir1.zip"), zfolder("explicitdir")); TestExtract(zfile("explicitdir2.zip"), zfolder("explicitdir")); TestExtract(zfile("appended.zip"), zfolder("small")); TestExtract(zfile("prepended.zip"), zfolder("small")); TestExtract(zfile("noexplicitdir.zip"), zfolder("explicitdir")); } [Fact] [Trait(XunitConstants.Category, XunitConstants.IgnoreForCI)] // Jenkins fails with unicode characters [JENKINS-12610] public void ExtractToDirectoryUnicode() { TestExtract(zfile("unicode.zip"), zfolder("unicode")); } private void TestExtract(string zipFileName, string folderName) { using (var tempFolder = new TempDirectory(GetTestFilePath())) { ZipFile.ExtractToDirectory(zipFileName, tempFolder.Path); DirsEqual(tempFolder.Path, folderName); Assert.Throws<ArgumentNullException>(() => ZipFile.ExtractToDirectory(null, tempFolder.Path)); } } #region "Extension Methods" [Theory] [InlineData(true)] [InlineData(false)] public async Task CreateEntryFromFileTest(bool withCompressionLevel) { //add file using (TempFile testArchive = CreateTempCopyFile(zfile("normal.zip"), GetTestFilePath())) { using (ZipArchive archive = ZipFile.Open(testArchive.Path, ZipArchiveMode.Update)) { string entryName = "added.txt"; string sourceFilePath = zmodified(Path.Combine("addFile", entryName)); Assert.Throws<ArgumentNullException>(() => ((ZipArchive)null).CreateEntryFromFile(sourceFilePath, entryName)); Assert.Throws<ArgumentNullException>(() => archive.CreateEntryFromFile(null, entryName)); Assert.Throws<ArgumentNullException>(() => archive.CreateEntryFromFile(sourceFilePath, null)); ZipArchiveEntry e = withCompressionLevel ? archive.CreateEntryFromFile(sourceFilePath, entryName) : archive.CreateEntryFromFile(sourceFilePath, entryName, CompressionLevel.Fastest); Assert.NotNull(e); } await IsZipSameAsDirAsync(testArchive.Path, zmodified("addFile"), ZipArchiveMode.Read, true, true); } } [Fact] public void ExtractToFileTest() { using (ZipArchive archive = ZipFile.Open(zfile("normal.zip"), ZipArchiveMode.Read)) { string file = GetTestFilePath(); ZipArchiveEntry e = archive.GetEntry("first.txt"); Assert.Throws<ArgumentNullException>(() => ((ZipArchiveEntry)null).ExtractToFile(file)); Assert.Throws<ArgumentNullException>(() => e.ExtractToFile(null)); //extract when there is nothing there e.ExtractToFile(file); using (Stream fs = File.Open(file, FileMode.Open), es = e.Open()) { StreamsEqual(fs, es); } Assert.Throws<IOException>(() => e.ExtractToFile(file, false)); //truncate file using (Stream fs = File.Open(file, FileMode.Truncate)) { } //now use overwrite mode e.ExtractToFile(file, true); using (Stream fs = File.Open(file, FileMode.Open), es = e.Open()) { StreamsEqual(fs, es); } } } [Fact] public void ExtractToDirectoryTest() { using (ZipArchive archive = ZipFile.Open(zfile("normal.zip"), ZipArchiveMode.Read)) { string tempFolder = GetTestFilePath(); Assert.Throws<ArgumentNullException>(() => ((ZipArchive)null).ExtractToDirectory(tempFolder)); Assert.Throws<ArgumentNullException>(() => archive.ExtractToDirectory(null)); archive.ExtractToDirectory(tempFolder); DirsEqual(tempFolder, zfolder("normal")); } } [Fact] [Trait(XunitConstants.Category, XunitConstants.IgnoreForCI)] // Jenkins fails with unicode characters [JENKINS-12610] public void ExtractToDirectoryTest_Unicode() { using (ZipArchive archive = ZipFile.OpenRead(zfile("unicode.zip"))) { string tempFolder = GetTestFilePath(); archive.ExtractToDirectory(tempFolder); DirsEqual(tempFolder, zfolder("unicode")); } } [Fact] public void CreatedEmptyDirectoriesRoundtrip() { using (var tempFolder = new TempDirectory(GetTestFilePath())) { DirectoryInfo rootDir = new DirectoryInfo(tempFolder.Path); rootDir.CreateSubdirectory("empty1"); string archivePath = GetTestFilePath(); ZipFile.CreateFromDirectory( rootDir.FullName, archivePath, CompressionLevel.Optimal, false, Encoding.UTF8); using (ZipArchive archive = ZipFile.OpenRead(archivePath)) { Assert.Equal(1, archive.Entries.Count); Assert.True(archive.Entries[0].FullName.StartsWith("empty1")); } } } [Fact] public void CreatedEmptyRootDirectoryRoundtrips() { using (var tempFolder = new TempDirectory(GetTestFilePath())) { DirectoryInfo emptyRoot = new DirectoryInfo(tempFolder.Path); string archivePath = GetTestFilePath(); ZipFile.CreateFromDirectory( emptyRoot.FullName, archivePath, CompressionLevel.Optimal, true); using (ZipArchive archive = ZipFile.OpenRead(archivePath)) { Assert.Equal(1, archive.Entries.Count); } } } #endregion } }
// Copyright (c) Charlie Poole, Rob Prouse and Contributors. MIT License - see LICENSE.txt using System; using System.IO; using System.Text; using System.Reflection; using System.Collections.Generic; using System.Runtime.InteropServices; namespace NUnit.Engine.Internal { /// <summary> /// Static methods for manipulating project paths, including both directories /// and files. Some synonyms for System.Path methods are included as well. /// </summary> public class PathUtils { public const uint FILE_ATTRIBUTE_DIRECTORY = 0x00000010; public const uint FILE_ATTRIBUTE_NORMAL = 0x00000080; public const int MAX_PATH = 256; protected static char DirectorySeparatorChar = Path.DirectorySeparatorChar; protected static char AltDirectorySeparatorChar = Path.AltDirectorySeparatorChar; /// <summary> /// Returns a boolean indicating whether the specified path /// is that of an assembly - that is a dll or exe file. /// </summary> /// <param name="path">Path to a file.</param> /// <returns>True if the file extension is dll or exe, otherwise false.</returns> public static bool IsAssemblyFileType(string path) { string extension = Path.GetExtension(path).ToLower(); return extension == ".dll" || extension == ".exe"; } /// <summary> /// Returns the relative path from a base directory to another /// directory or file. /// </summary> public static string RelativePath( string from, string to ) { if (from == null) throw new ArgumentNullException (from); if (to == null) throw new ArgumentNullException (to); string toPathRoot = Path.GetPathRoot(to); if (toPathRoot == null || toPathRoot == string.Empty) return to; string fromPathRoot = Path.GetPathRoot(from); if (!PathsEqual(toPathRoot, fromPathRoot)) return null; string fromNoRoot = from.Substring(fromPathRoot.Length); string toNoRoot = to.Substring(toPathRoot.Length); string[] _from = SplitPath(fromNoRoot); string[] _to = SplitPath(toNoRoot); StringBuilder sb = new StringBuilder (Math.Max (from.Length, to.Length)); int last_common, min = Math.Min (_from.Length, _to.Length); for (last_common = 0; last_common < min; ++last_common) { if (!PathsEqual(_from[last_common], _to[last_common])) break; } if (last_common < _from.Length) sb.Append (".."); for (int i = last_common + 1; i < _from.Length; ++i) { sb.Append (PathUtils.DirectorySeparatorChar).Append (".."); } if (sb.Length > 0) sb.Append (PathUtils.DirectorySeparatorChar); if (last_common < _to.Length) sb.Append (_to [last_common]); for (int i = last_common + 1; i < _to.Length; ++i) { sb.Append (PathUtils.DirectorySeparatorChar).Append (_to [i]); } return sb.ToString (); } /// <summary> /// Return the canonical form of a path. /// </summary> public static string Canonicalize( string path ) { List<string> parts = new List<string>( path.Split( DirectorySeparatorChar, AltDirectorySeparatorChar ) ); for( int index = 0; index < parts.Count; ) { string part = parts[index]; switch( part ) { case ".": parts.RemoveAt( index ); break; case "..": parts.RemoveAt( index ); if ( index > 0 ) parts.RemoveAt( --index ); break; default: index++; break; } } // Trailing separator removal if (parts.Count > 1 && path.Length > 1 && parts[parts.Count - 1] == "") parts.RemoveAt(parts.Count - 1); return String.Join(DirectorySeparatorChar.ToString(), parts.ToArray()); } /// <summary> /// True if the two paths are the same or if the second is /// directly or indirectly under the first. Note that paths /// using different network shares or drive letters are /// considered unrelated, even if they end up referencing /// the same subtrees in the file system. /// </summary> public static bool SamePathOrUnder( string path1, string path2 ) { path1 = Canonicalize( path1 ); path2 = Canonicalize( path2 ); int length1 = path1.Length; int length2 = path2.Length; // if path1 is longer, then path2 can't be under it if ( length1 > length2 ) return false; // if lengths are the same, check for equality if ( length1 == length2 ) return string.Compare( path1, path2, IsWindows() ) == 0; // path 2 is longer than path 1: see if initial parts match if ( string.Compare( path1, path2.Substring( 0, length1 ), IsWindows() ) != 0 ) return false; // must match through or up to a directory separator boundary return path2[length1-1] == DirectorySeparatorChar || path2[length1] == DirectorySeparatorChar; } /// <summary> /// Combines all the arguments into a single path /// </summary> public static string Combine(string path1, params string[] morePaths) { string result = path1; foreach (string path in morePaths) result = Path.Combine(result, path); return result; } /// <summary> /// Returns a value that indicates whether the specified file path is absolute or not on Windows operating systems. /// </summary> /// <param name="path">Path to check</param> /// <returns><see langword="true"/> if <paramref name="path"/> is an absolute or UNC path; otherwhise, false.</returns> /// <exception cref="ArgumentNullException"><paramref name="path"/> is <see langword="null"/>.</exception> public static bool IsFullyQualifiedWindowsPath(string path) { if (path == null) { throw new ArgumentNullException(nameof(path)); } if (path.Length > 2) { return (IsValidDriveSpecifier(path[0]) && path[1] == ':' && IsWindowsDirectorySeparator(path[2])) || (IsWindowsDirectorySeparator(path[0]) && IsWindowsDirectorySeparator(path[1])); } else { return false; } } /// <summary> /// Returns a value that indicates whether the specified file path is absolute or not on Linux/macOS/Unix. /// </summary> /// <param name="path">Path to check</param> /// <returns><see langword="true"/> if <paramref name="path"/> is an absolute path; otherwhise, false.</returns> /// <exception cref="ArgumentNullException"><paramref name="path"/></exception> public static bool IsFullyQualifiedUnixPath(string path) { if (path == null) { throw new ArgumentNullException(nameof(path)); } return path.Length > 0 && path[0] == '/'; } private static bool IsWindowsDirectorySeparator(char c) { return c == '\\' || c == '/'; } private static bool IsValidDriveSpecifier(char c) { return ('A' <= c && c <= 'Z') || ('a' <= c && c <= 'z'); } private static bool IsWindows() { return PathUtils.DirectorySeparatorChar == '\\'; } private static string[] SplitPath(string path) { char[] separators = new char[] { PathUtils.DirectorySeparatorChar, PathUtils.AltDirectorySeparatorChar }; string[] trialSplit = path.Split(separators); int emptyEntries = 0; foreach (string piece in trialSplit) if (piece == string.Empty) emptyEntries++; if (emptyEntries == 0) return trialSplit; string[] finalSplit = new string[trialSplit.Length - emptyEntries]; int index = 0; foreach (string piece in trialSplit) if (piece != string.Empty) finalSplit[index++] = piece; return finalSplit; } private static bool PathsEqual(string path1, string path2) { if (PathUtils.IsWindows()) return path1.ToLower().Equals(path2.ToLower()); else return path1.Equals(path2); } } }
// Copyright (c) Aspose 2002-2014. All Rights Reserved. using System; using System.Collections.Generic; using System.ComponentModel; using System.Data; using System.Drawing; using System.Text; using System.Windows.Forms; using System.IO; using Aspose.CreateProjectWizard.Library; namespace Aspose.CreateProjectWizard { public partial class InstallerForm : Form { private readonly InstallerControlList contentControls; private InstallerControl currentContentControl; private int currentContentControlIndex = 0; public InstallerForm() { this.contentControls = new InstallerControlList(); try { } catch (FileNotFoundException) { } InitializeComponent(); this.Load += new EventHandler(InstallerForm_Load); } #region Event Handlers private void InstallerForm_Load(object sender, EventArgs e) { string bannerImageFile = ApplicationSettings.BannerImage; if (!String.IsNullOrEmpty(bannerImageFile)) { this.titlePanel.BackgroundImageLayout = System.Windows.Forms.ImageLayout.Stretch; if (bannerImageFile != "Default") { this.titlePanel.BackgroundImage = LoadImage(bannerImageFile); } else { this.titlePanel.BackgroundImage = global::Aspose.CreateProjectWizard.Properties.Resources.Banner; } } string logoImageFile = ApplicationSettings.LogoImage; if (!String.IsNullOrEmpty(logoImageFile)) { if (logoImageFile != "Default") { this.logoPicture.BackgroundImage = LoadImage(logoImageFile); } } ReplaceContentControl(0); } private void nextButton_Click(object sender, EventArgs e) { GlobalData.SelectedProductsList.Clear(); InstallerControl newContentControl = contentControls[0]; CheckedListBox productsCheckedListBox = (CheckedListBox)this.Controls.Find("productsCheckedListBox", true)[0]; if (productsCheckedListBox.CheckedItems.Count > 0) { foreach (object itemChecked in productsCheckedListBox.CheckedItems) { if (!GlobalData.SelectedProductsList.Contains(itemChecked.ToString())) GlobalData.SelectedProductsList.Add(itemChecked.ToString()); } } currentContentControlIndex++; ReplaceContentControl(currentContentControlIndex); } private void prevButton_Click(object sender, EventArgs e) { currentContentControlIndex--; ReplaceContentControl(currentContentControlIndex); if (GlobalData.backgroundWorker != null) { if (GlobalData.backgroundWorker.IsBusy == true) { GlobalData.backgroundWorker.CancelAsync(); GlobalData.backgroundWorker.CancelAsync(); GlobalData.backgroundWorker.Dispose(); GlobalData.backgroundWorker = null; GC.Collect(); } } } private void cancelButton_Click(object sender, EventArgs e) { this.Dispose(); } private void InstallerForm_FormClosing(object sender, FormClosingEventArgs e) { try { if (GlobalData.backgroundWorker != null) { if (GlobalData.backgroundWorker.IsBusy == true) { GlobalData.backgroundWorker.CancelAsync(); GlobalData.backgroundWorker.Dispose(); GlobalData.backgroundWorker = null; GC.Collect(); } } } catch (Exception) { } } #endregion #region Public Properties public InstallerControlList ContentControls { get { return contentControls; } } public Button AbortButton { get { return cancelButton; } } public Button PrevButton { get { return prevButton; } } public Button NextButton { get { return nextButton; } } #endregion #region Public Methods public void SetTitle(string title) { titleLabel.Text = title; } public void SetSubTitle(string title) { subTitleLabel.Text = title; } #endregion #region Private Methods private void ReplaceContentControl(int index) { if (currentContentControl != null) { } if (index == 0) { prevButton.Enabled = false; nextButton.Enabled = true; } else if (index == (contentControls.Count - 1)) { prevButton.Enabled = true; nextButton.Enabled = false; } else { prevButton.Enabled = true; nextButton.Enabled = true; } InstallerControl newContentControl = contentControls[index]; newContentControl.Dock = DockStyle.Fill; titleLabel.Text = newContentControl.Title; subTitleLabel.Text = newContentControl.SubTitle; contentPanel.Controls.Clear(); contentPanel.Controls.Add(newContentControl); newContentControl.Open(); currentContentControl = newContentControl; } private Image LoadImage(string filename) { try { return Image.FromFile(filename); } catch (IOException) { return null; } } #endregion private void logoPicture_Click(object sender, EventArgs e) { AsposeManager.RunAsposeHomePage(); } private void titlePanel_Click(object sender, EventArgs e) { AsposeManager.RunAsposeHomePage(); } private void subTitleLabel_Click(object sender, EventArgs e) { AsposeManager.RunAsposeHomePage(); } private void titleLabel_Click(object sender, EventArgs e) { AsposeManager.RunAsposeHomePage(); } } }
// (c) Copyright Microsoft Corporation. // This source is subject to the Microsoft Permissive License. // See http://www.microsoft.com/opensource/licenses.mspx#Ms-PL. // All other rights reserved. using System; using System.Collections.Generic; using System.Text; using System.Windows.Forms; using Drawing = System.Drawing; using System.Drawing.Design; using System.Windows; using System.Windows.Automation; using System.Diagnostics; using System.ComponentModel; using System.Collections; namespace VisualUIAVerify.Features { class AutomationElementPropertyObject { const string constGeneralAccessibilityCategory = "General Accessibility"; const string constStateCategory = "State"; const string constIdentificationCategory = "Identification"; const string constVisibilityCategory = "Visibility"; const string constPatternsCategory = "Patterns"; const string constInvokeMethod = "(invoke method)"; const string constMessageCaption = "Message"; public readonly AutomationElement AutomationElement; DockPatternPropertyObject _dockPatternPropertyObject; ExpandCollapsePatternPropertyObject _expandCollapsePatternPropertyObject; GridPatternPropertyObject _gridPatternPropertyObject; InvokePatternPropertyObject _invokePatternPropertyObject; MultipleViewPatternPropertyObject _multipleViewPatternPropertyObject; RangeValuePatternPropertyObject _rangeValuePatternPropertyObject; ScrollItemPatternPropertyObject _scrollItemPatternPropertyObject; ScrollPatternPropertyObject _scrollPatternPropertyObject; SelectionItemPatternPropertyObject _selectionItemPatternPropertyObject; SelectionPatternPropertyObject _selectionPatternPropertyObject; TableItemPatternPropertyObject _tableItemPatternPropertyObject; TablePatternPropertyObject _tablePatternPropertyObject; TextPatternPropertyObject _textPatternPropertyObject; TogglePatternPropertyObject _togglePatternPropertyObject; TransformPatternPropertyObject _transformPatternPropertyObject; ValuePatternPropertyObject _valuePatternPropertyObject; WindowPatternPropertyObject _windowPatternPropertyObject; /// <summary> /// Initializes all pattern objects within the property grid with the AutomationElement patterns supported /// </summary> public AutomationElementPropertyObject(AutomationElement automationElement) { this.AutomationElement = automationElement; foreach (AutomationPattern patternId in automationElement.GetSupportedPatterns()) { object pattern = null; if (false == automationElement.TryGetCurrentPattern(patternId, out pattern)) continue; if (pattern is DockPattern) this._dockPatternPropertyObject = new DockPatternPropertyObject((DockPattern)pattern); else if (pattern is ExpandCollapsePattern) this._expandCollapsePatternPropertyObject = new ExpandCollapsePatternPropertyObject((ExpandCollapsePattern)pattern); else if (pattern is GridPattern) this._gridPatternPropertyObject = new GridPatternPropertyObject((GridPattern)pattern); else if (pattern is InvokePattern) this._invokePatternPropertyObject = new InvokePatternPropertyObject((InvokePattern)pattern); else if (pattern is MultipleViewPattern) this._multipleViewPatternPropertyObject = new MultipleViewPatternPropertyObject((MultipleViewPattern)pattern); else if (pattern is RangeValuePattern) this._rangeValuePatternPropertyObject = new RangeValuePatternPropertyObject((RangeValuePattern)pattern); else if (pattern is ScrollItemPattern) this._scrollItemPatternPropertyObject = new ScrollItemPatternPropertyObject((ScrollItemPattern)pattern); else if (pattern is ScrollPattern) this._scrollPatternPropertyObject = new ScrollPatternPropertyObject((ScrollPattern)pattern); else if (pattern is SelectionItemPattern) this._selectionItemPatternPropertyObject = new SelectionItemPatternPropertyObject((SelectionItemPattern)pattern); else if (pattern is SelectionPattern) this._selectionPatternPropertyObject = new SelectionPatternPropertyObject((SelectionPattern)pattern); else if (pattern is TableItemPattern) this._tableItemPatternPropertyObject = new TableItemPatternPropertyObject((TableItemPattern)pattern); else if (pattern is TablePattern) this._tablePatternPropertyObject = new TablePatternPropertyObject((TablePattern)pattern); else if (pattern is TextPattern) this._textPatternPropertyObject = new TextPatternPropertyObject((TextPattern)pattern); else if (pattern is TogglePattern) this._togglePatternPropertyObject = new TogglePatternPropertyObject((TogglePattern)pattern); else if (pattern is TransformPattern) this._transformPatternPropertyObject = new TransformPatternPropertyObject((TransformPattern)pattern); else if (pattern is ValuePattern) this._valuePatternPropertyObject = new ValuePatternPropertyObject((ValuePattern)pattern); else if (pattern is WindowPattern) this._windowPatternPropertyObject = new WindowPatternPropertyObject((WindowPattern)pattern); } } #region general properties [Category(constGeneralAccessibilityCategory)] public string AccessKey { get { return this.AutomationElement.Current.AccessKey; } } [Category(constGeneralAccessibilityCategory)] public string AcceleratorKey { get { return this.AutomationElement.Current.AcceleratorKey; } } [Category(constGeneralAccessibilityCategory)] public bool IsKeyboardFocusable { get { return this.AutomationElement.Current.IsKeyboardFocusable; } } [Category(constGeneralAccessibilityCategory)] public string LabeledBy { get { AutomationElement labeledBy = this.AutomationElement.Current.LabeledBy; if (labeledBy != null) return Controls.TreeHelper.GetAutomationElementTreeNodeText(labeledBy); return null; } } [Category(constGeneralAccessibilityCategory)] public string HelpText { get { return this.AutomationElement.Current.HelpText; } } [Category(constStateCategory)] public bool IsEnabled { get { return this.AutomationElement.Current.IsEnabled; } } [Category(constStateCategory)] public bool HasKeyboardFocus { get { return this.AutomationElement.Current.HasKeyboardFocus; } } [Category(constIdentificationCategory)] public string ClassName { get { return this.AutomationElement.Current.ClassName; } } [Category(constIdentificationCategory)] public string ControlType { get { return this.AutomationElement.Current.ControlType.ProgrammaticName; } } [Category(constIdentificationCategory)] public string AutomationId { get { return this.AutomationElement.Current.AutomationId; } } //Adds the MainWindow Handle as Hex to the properties window [Category(constIdentificationCategory)] public string hWnd { get { return "0x" + this.AutomationElement.Current.NativeWindowHandle.ToString("X"); } } [Category(constIdentificationCategory)] public string LocalizedControlType { get { return this.AutomationElement.Current.LocalizedControlType; } } [Category(constIdentificationCategory)] public string Name { get { return this.AutomationElement.Current.Name; } } [Category(constIdentificationCategory)] public int ProcessId { get { return this.AutomationElement.Current.ProcessId; } } [Category(constIdentificationCategory)] public string FrameworkId { get { return this.AutomationElement.Current.FrameworkId; } } [Category(constIdentificationCategory)] public bool IsPassword { get { return this.AutomationElement.Current.IsPassword; } } [Category(constIdentificationCategory)] public bool IsControlElement { get { return this.AutomationElement.Current.IsControlElement; } } [Category(constIdentificationCategory)] public bool IsContentElement { get { return this.AutomationElement.Current.IsContentElement; } } [Category(constVisibilityCategory)] //[TypeConverter(typeof(ExpandableObjectConverter))] public Rect BoundingRectangle { get { return this.AutomationElement.Current.BoundingRectangle; } } [Category(constVisibilityCategory)] public bool IsOffscreen { get { return this.AutomationElement.Current.IsOffscreen; } } #endregion #region Pattern /// <summary>Actual pattern object in the property grid</summary> [Category(constPatternsCategory)] [TypeConverter(typeof(ExpandableObjectConverter))] public DockPatternPropertyObject DockPattern { get { return this._dockPatternPropertyObject; } } /// <summary>Actual pattern object in the property grid</summary> [Category(constPatternsCategory)] [TypeConverter(typeof(ExpandableObjectConverter))] public ExpandCollapsePatternPropertyObject ExpandCollapsePattern { get { return this._expandCollapsePatternPropertyObject; } } /// <summary>Actual pattern object in the property grid</summary> [Category(constPatternsCategory)] [TypeConverter(typeof(ExpandableObjectConverter))] public GridPatternPropertyObject GridPattern { get { return this._gridPatternPropertyObject; } } /// <summary>Actual pattern object in the property grid</summary> [Category(constPatternsCategory)] [TypeConverter(typeof(ExpandableObjectConverter))] public InvokePatternPropertyObject InvokePattern { get { return this._invokePatternPropertyObject; } } /// <summary>Actual pattern object in the property grid</summary> [Category(constPatternsCategory)] [TypeConverter(typeof(ExpandableObjectConverter))] public MultipleViewPatternPropertyObject MultipleViewPattern { get { return this._multipleViewPatternPropertyObject; } } /// <summary>Actual pattern object in the property grid</summary> [Category(constPatternsCategory)] [TypeConverter(typeof(ExpandableObjectConverter))] public RangeValuePatternPropertyObject RangeValuePattern { get { return this._rangeValuePatternPropertyObject; } } /// <summary>Actual pattern object in the property grid</summary> [Category(constPatternsCategory)] [TypeConverter(typeof(ExpandableObjectConverter))] public ScrollItemPatternPropertyObject ScrollItemPattern { get { return this._scrollItemPatternPropertyObject; } } /// <summary>Actual pattern object in the property grid</summary> [Category(constPatternsCategory)] [TypeConverter(typeof(ExpandableObjectConverter))] public ScrollPatternPropertyObject ScrollPattern { get { return this._scrollPatternPropertyObject; } } /// <summary>Actual pattern object in the property grid</summary> [Category(constPatternsCategory)] [TypeConverter(typeof(ExpandableObjectConverter))] public SelectionItemPatternPropertyObject SelectionItemPattern { get { return this._selectionItemPatternPropertyObject; } } /// <summary>Actual pattern object in the property grid</summary> [Category(constPatternsCategory)] [TypeConverter(typeof(ExpandableObjectConverter))] public SelectionPatternPropertyObject SelectionPattern { get { return this._selectionPatternPropertyObject; } } /// <summary>Actual pattern object in the property grid</summary> [Category(constPatternsCategory)] [TypeConverter(typeof(ExpandableObjectConverter))] public TableItemPatternPropertyObject TableItemPattern { get { return this._tableItemPatternPropertyObject; } } /// <summary>Actual pattern object in the property grid</summary> [Category(constPatternsCategory)] [TypeConverter(typeof(ExpandableObjectConverter))] public TablePatternPropertyObject TablePattern { get { return this._tablePatternPropertyObject; } } /// <summary>Actual pattern object in the property grid</summary> [Category(constPatternsCategory)] [TypeConverter(typeof(ExpandableObjectConverter))] public TextPatternPropertyObject TextPattern { get { return this._textPatternPropertyObject; } } /// <summary>Actual pattern object in the property grid</summary> [Category(constPatternsCategory)] [TypeConverter(typeof(ExpandableObjectConverter))] public TogglePatternPropertyObject TogglePattern { get { return this._togglePatternPropertyObject; } } /// <summary>Actual pattern object in the property grid</summary> [Category(constPatternsCategory)] [TypeConverter(typeof(ExpandableObjectConverter))] public TransformPatternPropertyObject TransformPattern { get { return this._transformPatternPropertyObject; } } /// <summary>Actual pattern object in the property grid</summary> [Category(constPatternsCategory)] [TypeConverter(typeof(ExpandableObjectConverter))] public ValuePatternPropertyObject ValuePattern { get { return this._valuePatternPropertyObject; } } /// <summary>Actual pattern object in the property grid</summary> [Category(constPatternsCategory)] [TypeConverter(typeof(ExpandableObjectConverter))] public WindowPatternPropertyObject WindowPattern { get { return this._windowPatternPropertyObject; } } #endregion Patterns #region DockPattern public class DockPatternPropertyObject { DockPattern _pattern; DockPositionArg _setDockPositionArg = new DockPositionArg(); public DockPatternPropertyObject(DockPattern dockPattern) { this._pattern = dockPattern; } /// <summary>_pattern.Current</summary> [TypeConverter(typeof(ExpandableObjectConverter))] public object Current { get { return _pattern.Current; } } /// <summary>_pattern.SetDockPosition</summary> [TypeConverter(typeof(ExpandableObjectConverter))] [Editor(typeof(InvokeMethodButtonEditor), typeof(System.Drawing.Design.UITypeEditor))] public object SetDockPosition { set { _pattern.SetDockPosition(_setDockPositionArg.DockPosition); } get { return _setDockPositionArg; } } } #endregion #region ExpandCollapsePattern public class ExpandCollapsePatternPropertyObject { ExpandCollapsePattern _pattern; public ExpandCollapsePatternPropertyObject(ExpandCollapsePattern pattern) { this._pattern = pattern; } /// <summary>_pattern.Current</summary> [TypeConverter(typeof(ExpandableObjectConverter))] public object Current { get { return _pattern.Current; } } /// <summary>_pattern.Collapse</summary> [Editor(typeof(InvokeMethodButtonEditor), typeof(System.Drawing.Design.UITypeEditor))] public object Collapse { get { return constInvokeMethod; } set { this._pattern.Collapse(); } } /// <summary>_pattern.Expand</summary> [Editor(typeof(InvokeMethodButtonEditor), typeof(System.Drawing.Design.UITypeEditor))] public object Expand { get { return constInvokeMethod; } set { this._pattern.Expand(); } } } #endregion #region GridPattern public class GridPatternPropertyObject { GridPattern _pattern; GridCoordinate _getItemArgs = new GridCoordinate(); public GridPatternPropertyObject(GridPattern pattern) { this._pattern = pattern; } /// <summary>_pattern.Current</summary> [TypeConverter(typeof(ExpandableObjectConverter))] public object Current { get { return _pattern.Current; } } /// <summary>_pattern.GetItem</summary> [TypeConverter(typeof(ExpandableObjectConverter))] [Editor(typeof(InvokeMethodButtonEditor), typeof(System.Drawing.Design.UITypeEditor))] public object GetItem { get { return _getItemArgs; } set { MessageBox.Show(_pattern.GetItem(_getItemArgs.Column, _getItemArgs.Row).Current.Name); } } } #endregion #region InvokePattern public class InvokePatternPropertyObject { InvokePattern _pattern; public InvokePatternPropertyObject(InvokePattern pattern) { this._pattern = pattern; } [Editor(typeof(InvokeMethodButtonEditor), typeof(System.Drawing.Design.UITypeEditor))] public object InvokeMethod { get { return constInvokeMethod; } set { this._pattern.Invoke(); } } } #endregion #region MultipleViewPattern public class MultipleViewPatternPropertyObject { MultipleViewPattern _pattern; public MultipleViewPatternPropertyObject(MultipleViewPattern pattern) { this._pattern = pattern; } /// <summary>_pattern.Current</summary> [TypeConverter(typeof(ExpandableObjectConverter))] public object Current { get { return _pattern.Current; } } /// <summary>_pattern.SetCurrentView</summary> public int SetCurrentView { get { return this._pattern.Current.CurrentView; } set { this._pattern.SetCurrentView(value); } } /// <summary>_pattern.Current.GetSupportedViews</summary> public string GetSupportedViews { get { StringBuilder buffer = new StringBuilder("Views("); string delim = ","; foreach (int view in _pattern.Current.GetSupportedViews()) { buffer.Append(view + delim); } if (buffer[buffer.Length - 1].ToString() == delim) buffer.Remove(buffer.Length - 1, 1); return buffer.Append(")").ToString(); } } /// <summary>_pattern.GetViewName</summary> [Editor(typeof(InvokeMethodButtonEditor), typeof(System.Drawing.Design.UITypeEditor))] public int GetViewName { get { return _pattern.Current.CurrentView; } set { MessageBox.Show(this._pattern.GetViewName(value), constMessageCaption); } } } #endregion #region RangeValuePattern properties public class RangeValuePatternPropertyObject { RangeValuePattern _pattern; /// <summary></summary> public RangeValuePatternPropertyObject(RangeValuePattern pattern) { this._pattern = pattern; } /// <summary>_pattern.Current</summary> [TypeConverter(typeof(ExpandableObjectConverter))] public object Current { get { return _pattern.Current; } } /// <summary>_pattern.Current.SetValue</summary> public double SetValue { get { return _pattern.Current.Value; } set { this._pattern.SetValue(value); } } } #endregion #region ScrollItemPattern properties public class ScrollItemPatternPropertyObject { ScrollItemPattern _pattern; public ScrollItemPatternPropertyObject(ScrollItemPattern pattern) { this._pattern = pattern; } [Editor(typeof(InvokeMethodButtonEditor), typeof(System.Drawing.Design.UITypeEditor))] public object ScrollIntoView { get { return constInvokeMethod; } set { this._pattern.ScrollIntoView(); } } } #endregion #region ScrollPattern properties public class ScrollPatternPropertyObject { ScrollPattern _pattern; ScrollPatternProperty_ScrollMethodArgs _scrollMethodArgs = new ScrollPatternProperty_ScrollMethodArgs(); ScrollPatternProperty_ScrollAmountArgs _scrollAmountArgsHorz = new ScrollPatternProperty_ScrollAmountArgs(); ScrollPatternProperty_ScrollAmountArgs _scrollAmountArgsVert = new ScrollPatternProperty_ScrollAmountArgs(); ScrollPatternProperty_SetScrollPercentArgs _setScrollPercentArgs = new ScrollPatternProperty_SetScrollPercentArgs(); public ScrollPatternPropertyObject(ScrollPattern pattern) { this._pattern = pattern; } /// <summary>_pattern.Current</summary> [TypeConverter(typeof(ExpandableObjectConverter))] public object Current { get { return _pattern.Current; } } /// <summary>_pattern.Scroll</summary> [TypeConverter(typeof(ExpandableObjectConverter))] [Editor(typeof(InvokeMethodButtonEditor), typeof(System.Drawing.Design.UITypeEditor))] public object Scroll { set { _pattern.Scroll(_scrollMethodArgs._horizontalAmount, _scrollMethodArgs._verticalAmount); } get { return _scrollMethodArgs; } } /// <summary>_pattern.ScrollHorizontal</summary> [TypeConverter(typeof(ExpandableObjectConverter))] [Editor(typeof(InvokeMethodButtonEditor), typeof(System.Drawing.Design.UITypeEditor))] public object ScrollHorizontal { set { _pattern.ScrollHorizontal(_scrollAmountArgsHorz.ScrollAmount); } get { return _scrollAmountArgsHorz; } } /// <summary>_pattern.ScrollVertical</summary> [TypeConverter(typeof(ExpandableObjectConverter))] [Editor(typeof(InvokeMethodButtonEditor), typeof(System.Drawing.Design.UITypeEditor))] public object ScrollVertical { set { _pattern.ScrollVertical(_scrollAmountArgsVert.ScrollAmount); } get { return _scrollAmountArgsVert; } } /// <summary>_pattern.SetScrollPercent</summary> [TypeConverter(typeof(ExpandableObjectConverter))] [Editor(typeof(InvokeMethodButtonEditor), typeof(System.Drawing.Design.UITypeEditor))] public object SetScrollPercent { set { _pattern.SetScrollPercent(_setScrollPercentArgs.HorizontalPercent, _setScrollPercentArgs.VerticalPercent); } get { return _setScrollPercentArgs; } } } #endregion #region SelectionItemPattern properties public class SelectionItemPatternPropertyObject { SelectionItemPattern _pattern; public SelectionItemPatternPropertyObject(SelectionItemPattern pattern) { this._pattern = pattern; } /// <summary>_pattern.Current</summary> [TypeConverter(typeof(ExpandableObjectConverter))] public object Current { get { return _pattern.Current; } } [Editor(typeof(InvokeMethodButtonEditor), typeof(System.Drawing.Design.UITypeEditor))] public object AddToSelection { get { return constInvokeMethod; } set { this._pattern.AddToSelection(); } } [Editor(typeof(InvokeMethodButtonEditor), typeof(System.Drawing.Design.UITypeEditor))] public object RemoveFromSelection { get { return constInvokeMethod; } set { this._pattern.RemoveFromSelection(); } } [Editor(typeof(InvokeMethodButtonEditor), typeof(System.Drawing.Design.UITypeEditor))] public object Select { get { return constInvokeMethod; } set { this._pattern.Select(); } } } #endregion #region SelectionPattern public class SelectionPatternPropertyObject { SelectionPattern _pattern; public SelectionPatternPropertyObject(SelectionPattern pattern) { this._pattern = pattern; } /// <summary>_pattern.Current</summary> [TypeConverter(typeof(ExpandableObjectConverter))] public object Current { get { return _pattern.Current; } } [Editor(typeof(InvokeMethodButtonEditor), typeof(System.Drawing.Design.UITypeEditor))] public string GetSelection { get { StringBuilder buffer = new StringBuilder(); foreach (AutomationElement element in _pattern.Current.GetSelection()) { buffer.Append(element.Current.Name + "\n"); } return buffer.ToString(); } } } #endregion #region TableItemPattern properties public class TableItemPatternPropertyObject { TableItemPattern _pattern; public TableItemPatternPropertyObject(TableItemPattern pattern) { this._pattern = pattern; } /// <summary>_pattern.Current</summary> [TypeConverter(typeof(ExpandableObjectConverter))] public object Current { get { return _pattern.Current; } } } #endregion #region TablePattern properties public class TablePatternPropertyObject { TablePattern _pattern; CoordinateArgs<int> _getItemArgs = new CoordinateArgs<int>(0, 0); public TablePatternPropertyObject(TablePattern pattern) { this._pattern = pattern; } /// <summary>_pattern.Current</summary> [TypeConverter(typeof(ExpandableObjectConverter))] public object Current { get { return _pattern.Current; } } /// <summary>_pattern.GetItem</summary> public CoordinateArgs<int> GetItem { get { return _getItemArgs; } set { MessageBox.Show(_pattern.GetItem(_getItemArgs.X, _getItemArgs.Y).Current.Name); } } } #endregion #region TextPattern properties public class TextPatternPropertyObject { TextPattern _pattern; public TextPatternPropertyObject(TextPattern pattern) { this._pattern = pattern; } } #endregion TextPattern properties #region TogglePattern properties public class TogglePatternPropertyObject { TogglePattern _pattern; public TogglePatternPropertyObject(TogglePattern pattern) { this._pattern = pattern; } /// <summary>_pattern.Current</summary> [TypeConverter(typeof(ExpandableObjectConverter))] public object Current { get { return _pattern.Current; } } [Editor(typeof(InvokeMethodButtonEditor), typeof(System.Drawing.Design.UITypeEditor))] public object Toggle { get { return constInvokeMethod; } set { this._pattern.Toggle(); } } } #endregion #region TransformPattern properties public class TransformPatternPropertyObject { TransformPattern _pattern; CoordinateArgs<double> _moveArgs = new CoordinateArgs<double>(); CoordinateArgs<double> _resizeArgs = new CoordinateArgs<double>(); NumericArgument<double> _rotateArg = new NumericArgument<double>(); public TransformPatternPropertyObject(TransformPattern pattern) { this._pattern = pattern; } /// <summary>_pattern.Current</summary> [TypeConverter(typeof(ExpandableObjectConverter))] public object Current { get { return _pattern.Current; } } [Editor(typeof(InvokeMethodButtonEditor), typeof(System.Drawing.Design.UITypeEditor))] [TypeConverter(typeof(ExpandableObjectConverter))] public object Move { get { return _moveArgs; } set { _pattern.Move(_moveArgs.X, _moveArgs.Y); } } [Editor(typeof(InvokeMethodButtonEditor), typeof(System.Drawing.Design.UITypeEditor))] [TypeConverter(typeof(ExpandableObjectConverter))] public object Resize { get { return _resizeArgs; } set { _pattern.Resize(_resizeArgs.X, _resizeArgs.Y); } } [Editor(typeof(InvokeMethodButtonEditor), typeof(System.Drawing.Design.UITypeEditor))] [TypeConverter(typeof(ExpandableObjectConverter))] public NumericArgument<double> Rotate { get { return _rotateArg; } set { _pattern.Rotate(_rotateArg.X); } } } #endregion #region ValuePattern properties public class ValuePatternPropertyObject { ValuePattern _pattern; public ValuePatternPropertyObject(ValuePattern pattern) { this._pattern = pattern; } /// <summary>_pattern.Current</summary> [TypeConverter(typeof(ExpandableObjectConverter))] public object Current { get { return _pattern.Current; } } /// <summary>_pattern.SetValue</summary> public string SetValue { get { return this._pattern.Current.Value; } set { this._pattern.SetValue(value); } } } #endregion #region WindowPattern properties public class WindowPatternPropertyObject { WindowPattern _pattern; public WindowPatternPropertyObject(WindowPattern pattern) { this._pattern = pattern; } /// <summary>_pattern.Current</summary> [TypeConverter(typeof(ExpandableObjectConverter))] public object Current { get { return _pattern.Current; } } /// <summary>_pattern.SetWindowVisualState</summary> public WindowVisualState SetWindowVisualState { get { return this._pattern.Current.WindowVisualState; } set { this._pattern.SetWindowVisualState(value); } } /// <summary>_pattern.Close</summary> [Editor(typeof(InvokeMethodButtonEditor), typeof(System.Drawing.Design.UITypeEditor))] public object Close { get { return constInvokeMethod; } set { this._pattern.Close(); } } } #endregion #region Args public class DockPositionArg { DockPosition _dockPosition = DockPosition.None; public DockPosition DockPosition { get { return _dockPosition; } set { _dockPosition = value; } } } public class GridCoordinate { int _row; public int Row { get { return _row; } set { _row = value; } } int _column; public int Column { get { return _column; } set { _column = value; } } } public class CoordinateArgs<T> { T _x; T _y; public T X { get { return _x; } set { _x = value; } } public T Y { get { return _y; } set { _y = value; } } public CoordinateArgs(T x, T y) { x = X; y = Y; } public CoordinateArgs() { } } public class NumericArgument<T> { T _x; public T X { get { return _x; } set { _x = value; } } public NumericArgument() { } } public class ScrollPatternProperty_ScrollMethodArgs { internal ScrollAmount _horizontalAmount = ScrollAmount.NoAmount; internal ScrollAmount _verticalAmount = ScrollAmount.NoAmount; public ScrollPatternProperty_ScrollMethodArgs() { } public ScrollAmount HorizontallyAmount { get { return _horizontalAmount; } set { _horizontalAmount = value; } } public ScrollAmount VerticalAmount { get { return _verticalAmount; } set { _verticalAmount = value; } } } public class ScrollPatternProperty_ScrollAmountArgs { ScrollAmount _scrollAmount = ScrollAmount.NoAmount; internal ScrollPatternProperty_ScrollAmountArgs() { } public ScrollAmount ScrollAmount { get { return _scrollAmount; } set { _scrollAmount = value; } } } public class ScrollPatternProperty_SetScrollPercentArgs { double horizontalPercent = -1; double verticalPercent = -1; public double HorizontalPercent { get { return horizontalPercent; } set { horizontalPercent = value; } } public double VerticalPercent { get { return verticalPercent; } set { verticalPercent = value; } } } #endregion Args } }
/* * QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals. * Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ using System; using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; using QuantConnect.Configuration; using QuantConnect.Data; using QuantConnect.Data.Custom; using QuantConnect.Data.Custom.Tiingo; using QuantConnect.Data.Market; using QuantConnect.Data.UniverseSelection; using QuantConnect.Interfaces; using QuantConnect.Lean.Engine.DataFeeds.Enumerators; using QuantConnect.Lean.Engine.DataFeeds.Enumerators.Factories; using QuantConnect.Lean.Engine.Results; using QuantConnect.Logging; using QuantConnect.Packets; using QuantConnect.Securities; using QuantConnect.Util; namespace QuantConnect.Lean.Engine.DataFeeds { /// <summary> /// Provides an implementation of <see cref="IDataFeed"/> that is designed to deal with /// live, remote data sources /// </summary> public class LiveTradingDataFeed : IDataFeed { private LiveNodePacket _job; // used to get current time private ITimeProvider _timeProvider; private ITimeProvider _frontierTimeProvider; private IDataProvider _dataProvider; private IMapFileProvider _mapFileProvider; private IDataQueueHandler _dataQueueHandler; private BaseDataExchange _customExchange; private SubscriptionCollection _subscriptions; private CancellationTokenSource _cancellationTokenSource = new CancellationTokenSource(); private IDataChannelProvider _channelProvider; /// <summary> /// Public flag indicator that the thread is still busy. /// </summary> public bool IsActive { get; private set; } /// <summary> /// Initializes the data feed for the specified job and algorithm /// </summary> public void Initialize(IAlgorithm algorithm, AlgorithmNodePacket job, IResultHandler resultHandler, IMapFileProvider mapFileProvider, IFactorFileProvider factorFileProvider, IDataProvider dataProvider, IDataFeedSubscriptionManager subscriptionManager, IDataFeedTimeProvider dataFeedTimeProvider, IDataChannelProvider dataChannelProvider) { if (!(job is LiveNodePacket)) { throw new ArgumentException("The LiveTradingDataFeed requires a LiveNodePacket."); } _cancellationTokenSource = new CancellationTokenSource(); _job = (LiveNodePacket) job; _timeProvider = dataFeedTimeProvider.TimeProvider; _dataProvider = dataProvider; _mapFileProvider = mapFileProvider; _channelProvider = dataChannelProvider; _frontierTimeProvider = dataFeedTimeProvider.FrontierTimeProvider; _customExchange = new BaseDataExchange("CustomDataExchange") {SleepInterval = 10}; _subscriptions = subscriptionManager.DataFeedSubscriptions; _dataQueueHandler = GetDataQueueHandler(); _dataQueueHandler?.SetJob(_job); // run the custom data exchange var manualEvent = new ManualResetEventSlim(false); Task.Factory.StartNew(() => { manualEvent.Set(); _customExchange.Start(_cancellationTokenSource.Token); }, TaskCreationOptions.LongRunning); manualEvent.Wait(); manualEvent.DisposeSafely(); IsActive = true; } /// <summary> /// Creates a new subscription to provide data for the specified security. /// </summary> /// <param name="request">Defines the subscription to be added, including start/end times the universe and security</param> /// <returns>The created <see cref="Subscription"/> if successful, null otherwise</returns> public Subscription CreateSubscription(SubscriptionRequest request) { // create and add the subscription to our collection var subscription = request.IsUniverseSubscription ? CreateUniverseSubscription(request) : CreateDataSubscription(request); return subscription; } /// <summary> /// Removes the subscription from the data feed, if it exists /// </summary> /// <param name="subscription">The subscription to remove</param> public void RemoveSubscription(Subscription subscription) { var symbol = subscription.Configuration.Symbol; // remove the subscriptions if (!_channelProvider.ShouldStreamSubscription(subscription.Configuration)) { _customExchange.RemoveEnumerator(symbol); _customExchange.RemoveDataHandler(symbol); } else { _dataQueueHandler.Unsubscribe(subscription.Configuration); if (subscription.Configuration.SecurityType == SecurityType.Equity && !subscription.Configuration.IsInternalFeed) { _dataQueueHandler.Unsubscribe(new SubscriptionDataConfig(subscription.Configuration, typeof(Dividend))); _dataQueueHandler.Unsubscribe(new SubscriptionDataConfig(subscription.Configuration, typeof(Split))); } } } /// <summary> /// External controller calls to signal a terminate of the thread. /// </summary> public virtual void Exit() { if (IsActive) { IsActive = false; Log.Trace("LiveTradingDataFeed.Exit(): Start. Setting cancellation token..."); _cancellationTokenSource.Cancel(); _customExchange?.Stop(); Log.Trace("LiveTradingDataFeed.Exit(): Exit Finished."); } } /// <summary> /// Gets the <see cref="IDataQueueHandler"/> to use. By default this will try to load /// the type specified in the configuration via the 'data-queue-handler' /// </summary> /// <returns>The loaded <see cref="IDataQueueHandler"/></returns> protected virtual IDataQueueHandler GetDataQueueHandler() { Log.Trace($"LiveTradingDataFeed.GetDataQueueHandler(): will use {_job.DataQueueHandler}"); return Composer.Instance.GetExportedValueByTypeName<IDataQueueHandler>(_job.DataQueueHandler); } /// <summary> /// Creates a new subscription for the specified security /// </summary> /// <param name="request">The subscription request</param> /// <returns>A new subscription instance of the specified security</returns> protected Subscription CreateDataSubscription(SubscriptionRequest request) { Subscription subscription = null; try { var localEndTime = request.EndTimeUtc.ConvertFromUtc(request.Security.Exchange.TimeZone); var timeZoneOffsetProvider = new TimeZoneOffsetProvider(request.Security.Exchange.TimeZone, request.StartTimeUtc, request.EndTimeUtc); IEnumerator<BaseData> enumerator; if (!_channelProvider.ShouldStreamSubscription(request.Configuration)) { if (!Quandl.IsAuthCodeSet) { // we're not using the SubscriptionDataReader, so be sure to set the auth token here Quandl.SetAuthCode(Config.Get("quandl-auth-token")); } if (!Tiingo.IsAuthCodeSet) { // we're not using the SubscriptionDataReader, so be sure to set the auth token here Tiingo.SetAuthCode(Config.Get("tiingo-auth-token")); } var factory = new LiveCustomDataSubscriptionEnumeratorFactory(_timeProvider); var enumeratorStack = factory.CreateEnumerator(request, _dataProvider); _customExchange.AddEnumerator(request.Configuration.Symbol, enumeratorStack); var enqueable = new EnqueueableEnumerator<BaseData>(); _customExchange.SetDataHandler(request.Configuration.Symbol, data => { enqueable.Enqueue(data); subscription.OnNewDataAvailable(); }); enumerator = enqueable; } else { EventHandler handler = (sender, args) => subscription?.OnNewDataAvailable(); enumerator = _dataQueueHandler.Subscribe(request.Configuration, handler); var securityType = request.Configuration.SecurityType; var auxEnumerators = new List<IEnumerator<BaseData>>(); if (securityType == SecurityType.Equity) { auxEnumerators.Add(_dataQueueHandler.Subscribe(new SubscriptionDataConfig(request.Configuration, typeof(Dividend)), handler)); auxEnumerators.Add(_dataQueueHandler.Subscribe(new SubscriptionDataConfig(request.Configuration, typeof(Split)), handler)); } IEnumerator<BaseData> delistingEnumerator; if (LiveDelistingEventProviderEnumerator.TryCreate(request.Configuration, _timeProvider, _dataQueueHandler, request.Security.Cache, _mapFileProvider, out delistingEnumerator)) { auxEnumerators.Add(delistingEnumerator); } if (auxEnumerators.Count > 0) { enumerator = new LiveAuxiliaryDataSynchronizingEnumerator(_timeProvider, request.Configuration.ExchangeTimeZone, enumerator, auxEnumerators.ToArray()); } } if (request.Configuration.FillDataForward) { var fillForwardResolution = _subscriptions.UpdateAndGetFillForwardResolution(request.Configuration); enumerator = new LiveFillForwardEnumerator(_frontierTimeProvider, enumerator, request.Security.Exchange, fillForwardResolution, request.Configuration.ExtendedMarketHours, localEndTime, request.Configuration.Increment, request.Configuration.DataTimeZone); } // define market hours and user filters to incoming data if (request.Configuration.IsFilteredSubscription) { enumerator = new SubscriptionFilterEnumerator(enumerator, request.Security, localEndTime, request.Configuration.ExtendedMarketHours, true, request.ExchangeHours); } // finally, make our subscriptions aware of the frontier of the data feed, prevents future data from spewing into the feed enumerator = new FrontierAwareEnumerator(enumerator, _frontierTimeProvider, timeZoneOffsetProvider); var subscriptionDataEnumerator = new SubscriptionDataEnumerator(request.Configuration, request.Security.Exchange.Hours, timeZoneOffsetProvider, enumerator, request.IsUniverseSubscription); subscription = new Subscription(request, subscriptionDataEnumerator, timeZoneOffsetProvider); } catch (Exception err) { Log.Error(err); } return subscription; } /// <summary> /// Creates a new subscription for universe selection /// </summary> /// <param name="request">The subscription request</param> private Subscription CreateUniverseSubscription(SubscriptionRequest request) { Subscription subscription = null; // TODO : Consider moving the creating of universe subscriptions to a separate, testable class // grab the relevant exchange hours var config = request.Universe.Configuration; var localEndTime = request.EndTimeUtc.ConvertFromUtc(request.Security.Exchange.TimeZone); var tzOffsetProvider = new TimeZoneOffsetProvider(request.Security.Exchange.TimeZone, request.StartTimeUtc, request.EndTimeUtc); IEnumerator<BaseData> enumerator = null; var timeTriggered = request.Universe as ITimeTriggeredUniverse; if (timeTriggered != null) { Log.Trace($"LiveTradingDataFeed.CreateUniverseSubscription(): Creating user defined universe: {config.Symbol.ID}"); // spoof a tick on the requested interval to trigger the universe selection function var enumeratorFactory = new TimeTriggeredUniverseSubscriptionEnumeratorFactory(timeTriggered, MarketHoursDatabase.FromDataFolder(), _frontierTimeProvider); enumerator = enumeratorFactory.CreateEnumerator(request, _dataProvider); enumerator = new FrontierAwareEnumerator(enumerator, _timeProvider, tzOffsetProvider); var enqueueable = new EnqueueableEnumerator<BaseData>(); _customExchange.AddEnumerator(new EnumeratorHandler(config.Symbol, enumerator, enqueueable)); enumerator = enqueueable; } else if (config.Type == typeof (CoarseFundamental)) { Log.Trace($"LiveTradingDataFeed.CreateUniverseSubscription(): Creating coarse universe: {config.Symbol.ID}"); // Will try to pull coarse data from the data folder every 10min, file with today's date. // If lean is started today it will trigger initial coarse universe selection var factory = new LiveCustomDataSubscriptionEnumeratorFactory(_timeProvider, // we adjust time to the previous tradable date time => Time.GetStartTimeForTradeBars(request.Security.Exchange.Hours, time, Time.OneDay, 1, false, config.DataTimeZone), TimeSpan.FromMinutes(10) ); var enumeratorStack = factory.CreateEnumerator(request, _dataProvider); // aggregates each coarse data point into a single BaseDataCollection var aggregator = new BaseDataCollectionAggregatorEnumerator(enumeratorStack, config.Symbol, true); _customExchange.AddEnumerator(config.Symbol, aggregator); var enqueable = new EnqueueableEnumerator<BaseData>(); _customExchange.SetDataHandler(config.Symbol, data => { var coarseData = data as BaseDataCollection; enqueable.Enqueue(new BaseDataCollection(coarseData.Time, config.Symbol, coarseData.Data)); subscription.OnNewDataAvailable(); }); enumerator = GetConfiguredFrontierAwareEnumerator(enqueable, tzOffsetProvider, // advance time if before 23pm or after 5am and not on Saturdays time => time.Hour < 23 && time.Hour > 5 && time.DayOfWeek != DayOfWeek.Saturday); } else if (request.Universe is OptionChainUniverse) { Log.Trace("LiveTradingDataFeed.CreateUniverseSubscription(): Creating option chain universe: " + config.Symbol.ID); Func<SubscriptionRequest, IEnumerator<BaseData>> configure = (subRequest) => { var fillForwardResolution = _subscriptions.UpdateAndGetFillForwardResolution(subRequest.Configuration); var input = _dataQueueHandler.Subscribe(subRequest.Configuration, (sender, args) => subscription.OnNewDataAvailable()); return new LiveFillForwardEnumerator(_frontierTimeProvider, input, subRequest.Security.Exchange, fillForwardResolution, subRequest.Configuration.ExtendedMarketHours, localEndTime, subRequest.Configuration.Increment, subRequest.Configuration.DataTimeZone); }; var symbolUniverse = _dataQueueHandler as IDataQueueUniverseProvider; if (symbolUniverse == null) { throw new NotSupportedException("The DataQueueHandler does not support Options."); } var enumeratorFactory = new OptionChainUniverseSubscriptionEnumeratorFactory(configure, symbolUniverse, _timeProvider); enumerator = enumeratorFactory.CreateEnumerator(request, _dataProvider); enumerator = new FrontierAwareEnumerator(enumerator, _frontierTimeProvider, tzOffsetProvider); } else if (request.Universe is FuturesChainUniverse) { Log.Trace("LiveTradingDataFeed.CreateUniverseSubscription(): Creating futures chain universe: " + config.Symbol.ID); var symbolUniverse = _dataQueueHandler as IDataQueueUniverseProvider; if (symbolUniverse == null) { throw new NotSupportedException("The DataQueueHandler does not support Futures."); } var enumeratorFactory = new FuturesChainUniverseSubscriptionEnumeratorFactory(symbolUniverse, _timeProvider); enumerator = enumeratorFactory.CreateEnumerator(request, _dataProvider); enumerator = new FrontierAwareEnumerator(enumerator, _frontierTimeProvider, tzOffsetProvider); } else { Log.Trace("LiveTradingDataFeed.CreateUniverseSubscription(): Creating custom universe: " + config.Symbol.ID); var factory = new LiveCustomDataSubscriptionEnumeratorFactory(_timeProvider); var enumeratorStack = factory.CreateEnumerator(request, _dataProvider); enumerator = new BaseDataCollectionAggregatorEnumerator(enumeratorStack, config.Symbol, liveMode:true); var enqueueable = new EnqueueableEnumerator<BaseData>(); _customExchange.AddEnumerator(new EnumeratorHandler(config.Symbol, enumerator, enqueueable)); enumerator = enqueueable; } // create the subscription var subscriptionDataEnumerator = new SubscriptionDataEnumerator(request.Configuration, request.Security.Exchange.Hours, tzOffsetProvider, enumerator, request.IsUniverseSubscription); subscription = new Subscription(request, subscriptionDataEnumerator, tzOffsetProvider); // send the subscription for the new symbol through to the data queuehandler if (_channelProvider.ShouldStreamSubscription(subscription.Configuration)) { _dataQueueHandler.Subscribe(request.Configuration, (sender, args) => subscription.OnNewDataAvailable()); } return subscription; } /// <summary> /// Will wrap the provided enumerator with a <see cref="FrontierAwareEnumerator"/> /// using a <see cref="PredicateTimeProvider"/> that will advance time based on the provided /// function /// </summary> /// <remarks>Won't advance time if now.Hour is bigger or equal than 23pm, less or equal than 5am or Saturday. /// This is done to prevent universe selection occurring in those hours so that the subscription changes /// are handled correctly.</remarks> private IEnumerator<BaseData> GetConfiguredFrontierAwareEnumerator( IEnumerator<BaseData> enumerator, TimeZoneOffsetProvider tzOffsetProvider, Func<DateTime, bool> customStepEvaluator) { var stepTimeProvider = new PredicateTimeProvider(_frontierTimeProvider, customStepEvaluator); return new FrontierAwareEnumerator(enumerator, stepTimeProvider, tzOffsetProvider); } /// <summary> /// Overrides methods of the base data exchange implementation /// </summary> class EnumeratorHandler : BaseDataExchange.EnumeratorHandler { private readonly EnqueueableEnumerator<BaseData> _enqueueable; public EnumeratorHandler(Symbol symbol, IEnumerator<BaseData> enumerator, EnqueueableEnumerator<BaseData> enqueueable) : base(symbol, enumerator, true) { _enqueueable = enqueueable; } /// <summary> /// Returns true if this enumerator should move next /// </summary> public override bool ShouldMoveNext() { return true; } /// <summary> /// Calls stop on the internal enqueueable enumerator /// </summary> public override void OnEnumeratorFinished() { _enqueueable.Stop(); } /// <summary> /// Enqueues the data /// </summary> /// <param name="data">The data to be handled</param> public override void HandleData(BaseData data) { _enqueueable.Enqueue(data); } } } }
using System; using System.Collections.Generic; using System.Globalization; using System.IO; using System.Linq; using System.Net; using DotVVM.Framework.Controls; using DotVVM.Framework.Storage; using System.Diagnostics; using System.Reflection; using System.Text.RegularExpressions; using DotVVM.Framework.Hosting.Middlewares; using DotVVM.Framework.ViewModel.Serialization; using Microsoft.Extensions.DependencyInjection; using System.Threading; using System.Threading.Tasks; using DotVVM.Framework.Routing; using DotVVM.Framework.Hosting; using DotVVM.Core.Storage; using DotVVM.Framework.Runtime; using DotVVM.Framework.ViewModel.Validation; public static class DotvvmRequestContextExtensions { /// <summary> /// Gets the unique id of the SpaContentPlaceHolder that should be loaded. /// </summary> public static string? GetSpaContentPlaceHolderUniqueId(this IDotvvmRequestContext context) { return DotvvmPresenter.DetermineSpaContentPlaceHolderUniqueId(context.HttpContext); } /// <summary> /// Gets cancellation token for the request /// </summary> public static CancellationToken GetCancellationToken(this IDotvvmRequestContext context) { var cancellationService = context.Services.GetRequiredService<IRequestCancellationTokenProvider>(); return cancellationService.GetCancellationToken(context); } /// <summary> /// Changes the current culture of this HTTP request. /// </summary> [Obsolete("This method only assigns CultureInfo.CurrentCulture, which is not preserved in async methods. You should assign it manually, or use RequestLocalization middleware or LocalizablePresenter.")] public static void ChangeCurrentCulture(this IDotvvmRequestContext context, string cultureName) => context.ChangeCurrentCulture(cultureName, cultureName); /// <summary> /// Changes the current culture of this HTTP request. /// </summary> [Obsolete("This method only assigns CultureInfo.CurrentCulture, which is not preserved in async methods. You should assign it manually, or use RequestLocalization middleware or LocalizablePresenter.")] public static void ChangeCurrentCulture(this IDotvvmRequestContext context, string cultureName, string uiCultureName) { #if DotNetCore CultureInfo.CurrentCulture = new CultureInfo(cultureName); CultureInfo.CurrentUICulture = new CultureInfo(uiCultureName); #else Thread.CurrentThread.CurrentCulture = new CultureInfo(cultureName); Thread.CurrentThread.CurrentUICulture = new CultureInfo(uiCultureName); #endif } /// <summary> /// Gets the current UI culture of this HTTP request. /// </summary> [Obsolete("This just returns CultureInfo.CurrentUICulture")] public static CultureInfo GetCurrentUICulture(this IDotvvmRequestContext context) { return CultureInfo.CurrentUICulture; } /// <summary> /// Gets the current culture of this HTTP request. /// </summary> [Obsolete("This just returns CultureInfo.CurrentCulture")] public static CultureInfo GetCurrentCulture(this IDotvvmRequestContext context) { return CultureInfo.CurrentCulture; } /// <summary> /// Interrupts the execution of the current request. /// </summary> [DebuggerHidden] public static void InterruptRequest(this IDotvvmRequestContext context) { throw new DotvvmInterruptRequestExecutionException(); } /// <summary> /// Returns the redirect response and interrupts the execution of current request. /// </summary> public static void RedirectToUrl(this IDotvvmRequestContext context, string url, bool replaceInHistory = false, bool allowSpaRedirect = false) { context.SetRedirectResponse(context.TranslateVirtualPath(url), (int)HttpStatusCode.Redirect, replaceInHistory, allowSpaRedirect); throw new DotvvmInterruptRequestExecutionException(InterruptReason.Redirect, url); } /// <summary> /// Verifies that the URL is local and returns the redirect response and interrupts the execution of current request. /// </summary> public static void RedirectToLocalUrl(this IDotvvmRequestContext context, string url, bool replaceInHistory = false, bool allowSpaRedirect = false) { if (!UrlHelper.IsLocalUrl(url)) { throw new InvalidOperationException($"The URL '{url}' is not local or contains invalid characters!"); } context.RedirectToUrl(url, replaceInHistory, allowSpaRedirect); } /// <summary> /// Returns the redirect response and interrupts the execution of current request. /// </summary> public static void RedirectToRoute(this IDotvvmRequestContext context, string routeName, object? newRouteValues = null, bool replaceInHistory = false, bool allowSpaRedirect = true, string? urlSuffix = null, object? query = null) { var route = context.Configuration.RouteTable[routeName]; var url = route.BuildUrl(context.Parameters!, newRouteValues) + UrlHelper.BuildUrlSuffix(urlSuffix, query); context.RedirectToUrl(url, replaceInHistory, allowSpaRedirect); } /// <summary> /// Returns the permanent redirect response and interrupts the execution of current request. /// </summary> public static void RedirectToUrlPermanent(this IDotvvmRequestContext context, string url, bool replaceInHistory = false, bool allowSpaRedirect = false) { context.SetRedirectResponse(context.TranslateVirtualPath(url), (int)HttpStatusCode.MovedPermanently, replaceInHistory, allowSpaRedirect); throw new DotvvmInterruptRequestExecutionException(InterruptReason.RedirectPermanent, url); } /// <summary> /// Returns the permanent redirect response and interrupts the execution of current request. /// </summary> public static void RedirectToRoutePermanent(this IDotvvmRequestContext context, string routeName, object? newRouteValues = null, bool replaceInHistory = false, bool allowSpaRedirect = true, string? urlSuffix = null, object? query = null) { var route = context.Configuration.RouteTable[routeName]; var url = route.BuildUrl(context.Parameters!, newRouteValues) + UrlHelper.BuildUrlSuffix(urlSuffix, query); context.RedirectToUrlPermanent(url, replaceInHistory, allowSpaRedirect); } public static void SetRedirectResponse(this IDotvvmRequestContext context, string url, int statusCode = (int)HttpStatusCode.Redirect, bool replaceInHistory = false, bool allowSpaRedirect = false) => context.Configuration.ServiceProvider.GetRequiredService<IHttpRedirectService>().WriteRedirectResponse(context.HttpContext, url, statusCode, replaceInHistory, allowSpaRedirect); internal static Task SetCachedViewModelMissingResponse(this IDotvvmRequestContext context) { context.HttpContext.Response.StatusCode = 200; context.HttpContext.Response.ContentType = "application/json"; return context.HttpContext.Response.WriteAsync(DefaultViewModelSerializer.GenerateMissingCachedViewModelResponse()); } /// <summary> /// Ends the request execution when the <see cref="ModelState"/> is not valid and displays the validation errors in <see cref="ValidationSummary"/> control. /// If it is valid, it does nothing. /// </summary> public static void FailOnInvalidModelState(this IDotvvmRequestContext context) { context.PreprocessModelState(); if (!context.ModelState.IsValid) { context.HttpContext.Response.ContentType = "application/json"; context.HttpContext.Response .WriteAsync(context.Services.GetRequiredService<IViewModelSerializer>().SerializeModelState(context)) .GetAwaiter().GetResult(); // ^ we just wait for this Task. This API never was async and the response size is small enough that we can't quite safely wait for the result // .GetAwaiter().GetResult() preserves stack traces across async calls, thus I like it more than .Wait() throw new DotvvmInterruptRequestExecutionException(InterruptReason.ModelValidationFailed, "The ViewModel contains validation errors!"); } } private static void PreprocessModelState(this IDotvvmRequestContext context) { if (!context.ModelState.IsValid) { var modelStateErrorExpander = context.Services.GetRequiredService<IValidationErrorPathExpander>(); modelStateErrorExpander.Expand(context.ModelState, context.ViewModel); } } /// <summary> /// Gets the serialized view model. /// </summary> public static string GetSerializedViewModel(this IDotvvmRequestContext context) { return context.Services.GetRequiredService<IViewModelSerializer>().SerializeViewModel(context); } /// <summary> /// Translates the virtual path (~/something) to the domain relative path (/virtualDirectory/something). /// For example, when the app is configured to run in a virtual directory '/virtDir', the URL '~/myPage.dothtml' will be translated to '/virtDir/myPage.dothtml'. /// </summary> public static string TranslateVirtualPath(this IDotvvmRequestContext context, string virtualUrl) { return TranslateVirtualPath(virtualUrl, context.HttpContext); } /// <summary> /// Translates the virtual path (~/something) to the domain relative path (/virtualDirectory/something). /// For example, when the app is configured to run in a virtual directory '/virtDir', the URL '~/myPage.dothtml' will be translated to '/virtDir/myPage.dothtml'. /// </summary> public static string TranslateVirtualPath(string virtualUrl, IHttpContext httpContext) { if (virtualUrl.StartsWith("~/", StringComparison.Ordinal)) { var url = DotvvmMiddlewareBase.GetVirtualDirectory(httpContext) + "/" + virtualUrl.Substring(2); if (!url.StartsWith("/", StringComparison.Ordinal)) { url = "/" + url; } return url; } else { return virtualUrl; } } /// <summary> /// Redirects the client to the specified file. /// </summary> [Obsolete("Use ReturnFileAsync() instead")] public static void ReturnFile(this IDotvvmRequestContext context, byte[] bytes, string fileName, string mimeType, IEnumerable<KeyValuePair<string, string>>? additionalHeaders = null, string? attachmentDispositionType = null) => context.ReturnFile(new MemoryStream(bytes), fileName, mimeType, additionalHeaders, attachmentDispositionType); /// <summary> /// Redirects the client to the specified file. /// </summary> [Obsolete("Use ReturnFileAsync() instead")] public static void ReturnFile(this IDotvvmRequestContext context, Stream stream, string fileName, string mimeType, IEnumerable<KeyValuePair<string, string>>? additionalHeaders = null, string? attachmentDispositionType = null) => context.ReturnFileAsync(stream, fileName, mimeType, additionalHeaders, attachmentDispositionType).GetAwaiter().GetResult(); /// <summary> /// Redirects the client to the specified file. /// </summary> public static Task ReturnFileAsync(this IDotvvmRequestContext context, byte[] bytes, string fileName, string mimeType, IEnumerable<KeyValuePair<string, string>>? additionalHeaders = null, string? attachmentDispositionType = null) => context.ReturnFileAsync(new MemoryStream(bytes), fileName, mimeType, additionalHeaders, attachmentDispositionType); /// <summary> /// Redirects the client to the specified file. /// </summary> public static async Task ReturnFileAsync(this IDotvvmRequestContext context, Stream stream, string fileName, string mimeType, IEnumerable<KeyValuePair<string, string>>? additionalHeaders = null, string? attachmentDispositionType = null) { var returnedFileStorage = context.Services.GetService<IReturnedFileStorage>(); if (returnedFileStorage == null) { throw new DotvvmFileStorageMissingException($"Unable to resolve service for type '{typeof(IReturnedFileStorage).Name}'. " + $"Visit https://www.dotvvm.com/docs/tutorials/advanced-returning-files for more details!"); } var metadata = new ReturnedFileMetadata() { FileName = fileName, MimeType = mimeType, AdditionalHeaders = additionalHeaders?.GroupBy(k => k.Key, k => k.Value)?.ToDictionary(k => k.Key, k => k.ToArray()), AttachmentDispositionType = attachmentDispositionType ?? "attachment" }; var generatedFileId = await returnedFileStorage.StoreFileAsync(stream, metadata).ConfigureAwait(false); context.SetRedirectResponse(context.TranslateVirtualPath("~/dotvvmReturnedFile?id=" + generatedFileId)); throw new DotvvmInterruptRequestExecutionException(InterruptReason.ReturnFile, fileName); } internal static async Task RejectRequest(this IDotvvmRequestContext context, string message, int statusCode = 403) { // push it as a warning to ILogger var msg = "request rejected: " + message; context.Services .GetRequiredService<RuntimeWarningCollector>() .Warn(new DotvvmRuntimeWarning(msg)); context.HttpContext.Response.StatusCode = statusCode; context.HttpContext.Response.ContentType = "text/plain"; await context.HttpContext.Response.WriteAsync(msg); throw new DotvvmInterruptRequestExecutionException(InterruptReason.RequestRejected, msg); } public static void DebugWarning(this IDotvvmRequestContext context, string message, Exception? relatedException = null, DotvvmBindableObject? relatedControl = null) { if (context.Configuration.Debug) { context.Services .GetRequiredService<RuntimeWarningCollector>() .Warn(new DotvvmRuntimeWarning(message, relatedException, relatedControl)); } } }
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// // File: StringBuilderExtFormat.cs // Date: 11th March 2010 // Author: Gavin Pugh // Details: Extension methods for the 'StringBuilder' standard .NET class, to allow garbage-free concatenation of // formatted strings with a variable set of arguments. // // Copyright (c) Gavin Pugh 2010 - Released under the zlib license: http://www.opensource.org/licenses/zlib-license.php ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// using System; using System.Text; using UnityEngine; using Debug = System.Diagnostics.Debug; namespace GCMonitor { public static partial class StringBuilderExtensions { //! Concatenate a formatted string with arguments public static StringBuilder ConcatFormat<A>( this StringBuilder string_builder, String format_string, A arg1 ) where A : IConvertible { return string_builder.ConcatFormat<A, int, int, int>( format_string, arg1, 0, 0, 0 ); } //! Concatenate a formatted string with arguments public static StringBuilder ConcatFormat<A, B>( this StringBuilder string_builder, String format_string, A arg1, B arg2 ) where A : IConvertible where B : IConvertible { return string_builder.ConcatFormat<A, B, int, int>( format_string, arg1, arg2, 0, 0 ); } //! Concatenate a formatted string with arguments public static StringBuilder ConcatFormat<A, B, C>( this StringBuilder string_builder, String format_string, A arg1, B arg2, C arg3 ) where A : IConvertible where B : IConvertible where C : IConvertible { return string_builder.ConcatFormat<A, B, C, int>( format_string, arg1, arg2, arg3, 0 ); } //! Concatenate a formatted string with arguments public static StringBuilder ConcatFormat<A,B,C,D>( this StringBuilder string_builder, String format_string, A arg1, B arg2, C arg3, D arg4 ) where A : IConvertible where B : IConvertible where C : IConvertible where D : IConvertible { int verbatim_range_start = 0; for ( int index = 0; index < format_string.Length; index++ ) { if ( format_string[index] == '{' ) { // Formatting bit now, so make sure the last block of the string is written out verbatim. if ( verbatim_range_start < index ) { // Write out unformatted string portion string_builder.Append( format_string, verbatim_range_start, index - verbatim_range_start ); } uint base_value = 10; uint padding = 0; uint decimal_places = 5; // Default decimal places in .NET libs index++; char format_char = format_string[index]; if ( format_char == '{' ) { string_builder.Append( '{' ); index++; } else { index++; if ( format_string[index] == ':' ) { // Extra formatting. This is a crude first pass proof-of-concept. It's not meant to cover // comprehensively what the .NET standard library Format() can do. index++; // Deal with padding while ( format_string[index] == '0' ) { index++; padding++; } if ( format_string[index] == 'X' ) { index++; // Print in hex base_value = 16; // Specify amount of padding ( "{0:X8}" for example pads hex to eight characters if ( ( format_string[index] >= '0' ) && ( format_string[index] <= '9' ) ) { padding = (uint)( format_string[index] - '0' ); index++; } } else if ( format_string[index] == '.' ) { index++; // Specify number of decimal places decimal_places = 0; while ( format_string[index] == '0' ) { index++; decimal_places++; } } } // Scan through to end bracket while ( format_string[index] != '}' ) { index++; } // Have any extended settings now, so just print out the particular argument they wanted switch ( format_char ) { case '0': string_builder.ConcatFormatValue<A>( arg1, padding, base_value, decimal_places, true ); break; case '1': string_builder.ConcatFormatValue<B>( arg2, padding, base_value, decimal_places, true ); break; case '2': string_builder.ConcatFormatValue<C>( arg3, padding, base_value, decimal_places, true ); break; case '3': string_builder.ConcatFormatValue<D>( arg4, padding, base_value, decimal_places, true ); break; default: Debug.Assert(false, "Invalid parameter index"); break; } } // Update the verbatim range, start of a new section now verbatim_range_start = ( index + 1 ); } } // Anything verbatim to write out? if ( verbatim_range_start < format_string.Length ) { // Write out unformatted string portion string_builder.Append( format_string, verbatim_range_start, format_string.Length - verbatim_range_start ); } return string_builder; } //! The worker method. This does a garbage-free conversion of a generic type, and uses the garbage-free Concat() to add to the stringbuilder private static void ConcatFormatValue<T>( this StringBuilder string_builder, T arg, uint padding, uint base_value, uint decimal_places, bool thousand_sep) where T : IConvertible { switch ( arg.GetTypeCode() ) { case System.TypeCode.String: { string_builder.Append( Convert.ToString( arg ) ); break; } case TypeCode.Boolean: MonoBehaviour.print("Boolean"); break; case TypeCode.Char: MonoBehaviour.print("Char"); break; case TypeCode.SByte: MonoBehaviour.print("SByte"); break; case TypeCode.Byte: MonoBehaviour.print("Byte"); break; case TypeCode.Int16: case System.TypeCode.Int32: case TypeCode.Int64: { string_builder.Concat(arg.ToInt64(System.Globalization.NumberFormatInfo.CurrentInfo), padding, ' ', thousand_sep, base_value); break; } case TypeCode.UInt16: case System.TypeCode.UInt32: case TypeCode.UInt64: { string_builder.Concat(arg.ToUInt64(System.Globalization.NumberFormatInfo.CurrentInfo), padding, ' ', thousand_sep, base_value); break; } case System.TypeCode.Single: { string_builder.Concat(arg.ToSingle(System.Globalization.NumberFormatInfo.CurrentInfo), decimal_places, padding, '0', thousand_sep); break; } case TypeCode.Double: string_builder.Concat(arg.ToDouble(System.Globalization.NumberFormatInfo.CurrentInfo), decimal_places, padding, '0', thousand_sep); break; case TypeCode.Decimal: MonoBehaviour.print("Decimal"); break; case TypeCode.DateTime: MonoBehaviour.print("DateTime"); break; default: { MonoBehaviour.print("Unknown parameter type" ); break; } } } } }
// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the MIT Licence. // See the LICENCE file in the repository root for full licence text. using System; using System.Collections.Generic; using System.Linq; using osu.Framework.Allocation; using osu.Framework.Audio; using osu.Framework.Audio.Sample; using osu.Framework.Bindables; using osu.Framework.Graphics; using osu.Framework.Graphics.Containers; using osu.Framework.Input.Events; using osu.Framework.Logging; using osu.Framework.Screens; using osu.Framework.Threading; using osu.Game.Beatmaps; using osu.Game.Configuration; using osu.Game.Graphics.Containers; using osu.Game.Online.API; using osu.Game.Overlays; using osu.Game.Rulesets; using osu.Game.Rulesets.Mods; using osu.Game.Rulesets.Scoring; using osu.Game.Rulesets.UI; using osu.Game.Scoring; using osu.Game.Screens.Ranking; using osu.Game.Skinning; using osu.Game.Users; namespace osu.Game.Screens.Play { public class Player : ScreenWithBeatmapBackground { public override bool AllowBackButton => false; // handled by HoldForMenuButton protected override UserActivity InitialActivity => new UserActivity.SoloGame(Beatmap.Value.BeatmapInfo, Ruleset.Value); public override float BackgroundParallaxAmount => 0.1f; public override bool HideOverlaysOnEnter => true; public override OverlayActivation InitialOverlayActivationMode => OverlayActivation.UserTriggered; /// <summary> /// Whether gameplay should pause when the game window focus is lost. /// </summary> protected virtual bool PauseOnFocusLost => true; public Action RestartRequested; public bool HasFailed { get; private set; } private Bindable<bool> mouseWheelDisabled; private readonly Bindable<bool> storyboardReplacesBackground = new Bindable<bool>(); public int RestartCount; [Resolved] private ScoreManager scoreManager { get; set; } private RulesetInfo rulesetInfo; private Ruleset ruleset; private IAPIProvider api; private SampleChannel sampleRestart; private BreakOverlay breakOverlay; protected ScoreProcessor ScoreProcessor { get; private set; } protected DrawableRuleset DrawableRuleset { get; private set; } protected HUDOverlay HUDOverlay { get; private set; } public bool LoadedBeatmapSuccessfully => DrawableRuleset?.Objects.Any() == true; protected GameplayClockContainer GameplayClockContainer { get; private set; } protected DimmableStoryboard DimmableStoryboard { get; private set; } protected DimmableVideo DimmableVideo { get; private set; } [Cached] [Cached(Type = typeof(IBindable<IReadOnlyList<Mod>>))] protected new readonly Bindable<IReadOnlyList<Mod>> Mods = new Bindable<IReadOnlyList<Mod>>(Array.Empty<Mod>()); /// <summary> /// Whether failing should be allowed. /// By default, this checks whether all selected mods allow failing. /// </summary> protected virtual bool AllowFail => Mods.Value.OfType<IApplicableFailOverride>().All(m => m.AllowFail); private readonly bool allowPause; private readonly bool showResults; /// <summary> /// Create a new player instance. /// </summary> /// <param name="allowPause">Whether pausing should be allowed. If not allowed, attempting to pause will quit.</param> /// <param name="showResults">Whether results screen should be pushed on completion.</param> public Player(bool allowPause = true, bool showResults = true) { this.allowPause = allowPause; this.showResults = showResults; } [BackgroundDependencyLoader] private void load(AudioManager audio, IAPIProvider api, OsuConfigManager config) { this.api = api; Mods.Value = base.Mods.Value.Select(m => m.CreateCopy()).ToArray(); WorkingBeatmap working = loadBeatmap(); if (working == null) return; sampleRestart = audio.Samples.Get(@"Gameplay/restart"); mouseWheelDisabled = config.GetBindable<bool>(OsuSetting.MouseDisableWheel); ScoreProcessor = DrawableRuleset.CreateScoreProcessor(); ScoreProcessor.Mods.BindTo(Mods); if (!ScoreProcessor.Mode.Disabled) config.BindWith(OsuSetting.ScoreDisplayMode, ScoreProcessor.Mode); InternalChild = GameplayClockContainer = new GameplayClockContainer(working, Mods.Value, DrawableRuleset.GameplayStartTime); addUnderlayComponents(GameplayClockContainer); addGameplayComponents(GameplayClockContainer, working); addOverlayComponents(GameplayClockContainer, working); DrawableRuleset.HasReplayLoaded.BindValueChanged(e => HUDOverlay.HoldToQuit.PauseOnFocusLost = !e.NewValue && PauseOnFocusLost, true); // bind clock into components that require it DrawableRuleset.IsPaused.BindTo(GameplayClockContainer.IsPaused); // Bind ScoreProcessor to ourselves ScoreProcessor.AllJudged += onCompletion; ScoreProcessor.Failed += onFail; foreach (var mod in Mods.Value.OfType<IApplicableToScoreProcessor>()) mod.ApplyToScoreProcessor(ScoreProcessor); } private void addUnderlayComponents(Container target) { target.Add(DimmableVideo = new DimmableVideo(Beatmap.Value.Video) { RelativeSizeAxes = Axes.Both }); target.Add(DimmableStoryboard = new DimmableStoryboard(Beatmap.Value.Storyboard) { RelativeSizeAxes = Axes.Both }); } private void addGameplayComponents(Container target, WorkingBeatmap working) { var beatmapSkinProvider = new BeatmapSkinProvidingContainer(working.Skin); // the beatmapSkinProvider is used as the fallback source here to allow the ruleset-specific skin implementation // full access to all skin sources. var rulesetSkinProvider = new SkinProvidingContainer(ruleset.CreateLegacySkinProvider(beatmapSkinProvider)); // load the skinning hierarchy first. // this is intentionally done in two stages to ensure things are in a loaded state before exposing the ruleset to skin sources. target.Add(new ScalingContainer(ScalingMode.Gameplay) .WithChild(beatmapSkinProvider .WithChild(target = rulesetSkinProvider))); target.AddRange(new Drawable[] { DrawableRuleset, new ComboEffects(ScoreProcessor) }); } private void addOverlayComponents(Container target, WorkingBeatmap working) { target.AddRange(new[] { breakOverlay = new BreakOverlay(working.Beatmap.BeatmapInfo.LetterboxInBreaks, ScoreProcessor) { Anchor = Anchor.Centre, Origin = Anchor.Centre, Breaks = working.Beatmap.Breaks }, // display the cursor above some HUD elements. DrawableRuleset.Cursor?.CreateProxy() ?? new Container(), DrawableRuleset.ResumeOverlay?.CreateProxy() ?? new Container(), HUDOverlay = new HUDOverlay(ScoreProcessor, DrawableRuleset, Mods.Value) { HoldToQuit = { Action = performUserRequestedExit, IsPaused = { BindTarget = GameplayClockContainer.IsPaused } }, PlayerSettingsOverlay = { PlaybackSettings = { UserPlaybackRate = { BindTarget = GameplayClockContainer.UserPlaybackRate } } }, KeyCounter = { Visible = { BindTarget = DrawableRuleset.HasReplayLoaded } }, RequestSeek = GameplayClockContainer.Seek, Anchor = Anchor.Centre, Origin = Anchor.Centre }, new SkipOverlay(DrawableRuleset.GameplayStartTime) { RequestSeek = GameplayClockContainer.Seek }, FailOverlay = new FailOverlay { OnRetry = Restart, OnQuit = performUserRequestedExit, }, PauseOverlay = new PauseOverlay { OnResume = Resume, Retries = RestartCount, OnRetry = Restart, OnQuit = performUserRequestedExit, }, new HotkeyRetryOverlay { Action = () => { if (!this.IsCurrentScreen()) return; fadeOut(true); Restart(); }, }, new HotkeyExitOverlay { Action = () => { if (!this.IsCurrentScreen()) return; fadeOut(true); performImmediateExit(); }, }, failAnimation = new FailAnimation(DrawableRuleset) { OnComplete = onFailComplete, } }); } private WorkingBeatmap loadBeatmap() { WorkingBeatmap working = Beatmap.Value; if (working is DummyWorkingBeatmap) return null; try { var beatmap = working.Beatmap; if (beatmap == null) throw new InvalidOperationException("Beatmap was not loaded"); rulesetInfo = Ruleset.Value ?? beatmap.BeatmapInfo.Ruleset; ruleset = rulesetInfo.CreateInstance(); try { DrawableRuleset = ruleset.CreateDrawableRulesetWith(working, Mods.Value); } catch (BeatmapInvalidForRulesetException) { // we may fail to create a DrawableRuleset if the beatmap cannot be loaded with the user's preferred ruleset // let's try again forcing the beatmap's ruleset. rulesetInfo = beatmap.BeatmapInfo.Ruleset; ruleset = rulesetInfo.CreateInstance(); DrawableRuleset = ruleset.CreateDrawableRulesetWith(Beatmap.Value, Mods.Value); } if (!DrawableRuleset.Objects.Any()) { Logger.Log("Beatmap contains no hit objects!", level: LogLevel.Error); return null; } } catch (Exception e) { Logger.Error(e, "Could not load beatmap sucessfully!"); //couldn't load, hard abort! return null; } return working; } private void performImmediateExit() { // if a restart has been requested, cancel any pending completion (user has shown intent to restart). completionProgressDelegate?.Cancel(); ValidForResume = false; performUserRequestedExit(); } private void performUserRequestedExit() { if (!this.IsCurrentScreen()) return; if (ValidForResume && HasFailed && !FailOverlay.IsPresent) { failAnimation.FinishTransforms(true); return; } if (canPause) Pause(); else this.Exit(); } public void Restart() { if (!this.IsCurrentScreen()) return; sampleRestart?.Play(); RestartRequested?.Invoke(); performImmediateExit(); } private ScheduledDelegate completionProgressDelegate; private void onCompletion() { // Only show the completion screen if the player hasn't failed if (ScoreProcessor.HasFailed || completionProgressDelegate != null) return; ValidForResume = false; if (!showResults) return; using (BeginDelayedSequence(1000)) { completionProgressDelegate = Schedule(delegate { if (!this.IsCurrentScreen()) return; var score = CreateScore(); if (DrawableRuleset.ReplayScore == null) scoreManager.Import(score).Wait(); this.Push(CreateResults(score)); }); } } protected virtual ScoreInfo CreateScore() { var score = DrawableRuleset.ReplayScore?.ScoreInfo ?? new ScoreInfo { Beatmap = Beatmap.Value.BeatmapInfo, Ruleset = rulesetInfo, Mods = Mods.Value.ToArray(), User = api.LocalUser.Value, }; ScoreProcessor.PopulateScore(score); return score; } protected override bool OnScroll(ScrollEvent e) => mouseWheelDisabled.Value && !GameplayClockContainer.IsPaused.Value; protected virtual Results CreateResults(ScoreInfo score) => new SoloResults(score); #region Fail Logic protected FailOverlay FailOverlay { get; private set; } private FailAnimation failAnimation; private bool onFail() { if (!AllowFail) return false; HasFailed = true; // There is a chance that we could be in a paused state as the ruleset's internal clock (see FrameStabilityContainer) // could process an extra frame after the GameplayClock is stopped. // In such cases we want the fail state to precede a user triggered pause. if (PauseOverlay.State.Value == Visibility.Visible) PauseOverlay.Hide(); failAnimation.Start(); if (Mods.Value.OfType<IApplicableFailOverride>().Any(m => m.RestartOnFail)) Restart(); return true; } // Called back when the transform finishes private void onFailComplete() { GameplayClockContainer.Stop(); FailOverlay.Retries = RestartCount; FailOverlay.Show(); } #endregion #region Pause Logic public bool IsResuming { get; private set; } /// <summary> /// The amount of gameplay time after which a second pause is allowed. /// </summary> private const double pause_cooldown = 1000; protected PauseOverlay PauseOverlay { get; private set; } private double? lastPauseActionTime; private bool canPause => // must pass basic screen conditions (beatmap loaded, instance allows pause) LoadedBeatmapSuccessfully && allowPause && ValidForResume // replays cannot be paused and exit immediately && !DrawableRuleset.HasReplayLoaded.Value // cannot pause if we are already in a fail state && !HasFailed // cannot pause if already paused (or in a cooldown state) unless we are in a resuming state. && (IsResuming || (GameplayClockContainer.IsPaused.Value == false && !pauseCooldownActive)); private bool pauseCooldownActive => lastPauseActionTime.HasValue && GameplayClockContainer.GameplayClock.CurrentTime < lastPauseActionTime + pause_cooldown; private bool canResume => // cannot resume from a non-paused state GameplayClockContainer.IsPaused.Value // cannot resume if we are already in a fail state && !HasFailed // already resuming && !IsResuming; public void Pause() { if (!canPause) return; IsResuming = false; GameplayClockContainer.Stop(); PauseOverlay.Show(); lastPauseActionTime = GameplayClockContainer.GameplayClock.CurrentTime; } public void Resume() { if (!canResume) return; IsResuming = true; PauseOverlay.Hide(); // breaks and time-based conditions may allow instant resume. if (breakOverlay.IsBreakTime.Value || GameplayClockContainer.GameplayClock.CurrentTime < Beatmap.Value.Beatmap.HitObjects.First().StartTime) completeResume(); else DrawableRuleset.RequestResume(completeResume); void completeResume() { GameplayClockContainer.Start(); IsResuming = false; } } #endregion #region Screen Logic public override void OnEntering(IScreen last) { base.OnEntering(last); if (!LoadedBeatmapSuccessfully) return; Alpha = 0; this .ScaleTo(0.7f) .ScaleTo(1, 750, Easing.OutQuint) .Delay(250) .FadeIn(250); Background.EnableUserDim.Value = true; Background.BlurAmount.Value = 0; Background.StoryboardReplacesBackground.BindTo(storyboardReplacesBackground); DimmableStoryboard.StoryboardReplacesBackground.BindTo(storyboardReplacesBackground); storyboardReplacesBackground.Value = Beatmap.Value.Storyboard.ReplacesBackground && Beatmap.Value.Storyboard.HasDrawable; GameplayClockContainer.Restart(); GameplayClockContainer.FadeInFromZero(750, Easing.OutQuint); foreach (var mod in Mods.Value.OfType<IApplicableToHUD>()) mod.ApplyToHUD(HUDOverlay); } public override void OnSuspending(IScreen next) { fadeOut(); base.OnSuspending(next); } public override bool OnExiting(IScreen next) { if (completionProgressDelegate != null && !completionProgressDelegate.Cancelled && !completionProgressDelegate.Completed) { // proceed to result screen if beatmap already finished playing completionProgressDelegate.RunTask(); return true; } // ValidForResume is false when restarting if (ValidForResume) { if (pauseCooldownActive && !GameplayClockContainer.IsPaused.Value) // still want to block if we are within the cooldown period and not already paused. return true; } GameplayClockContainer.ResetLocalAdjustments(); fadeOut(); return base.OnExiting(next); } private void fadeOut(bool instant = false) { float fadeOutDuration = instant ? 0 : 250; this.FadeOut(fadeOutDuration); Background.EnableUserDim.Value = false; storyboardReplacesBackground.Value = false; } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. namespace Microsoft.Tools.ServiceModel.SvcUtil { using System; using System.Globalization; using System.Collections.Generic; using System.Text.RegularExpressions; using System.IO; internal enum SwitchType { Flag, SingletonValue, ValueList } internal class CommandSwitch { private readonly string _name; private readonly string _abbreviation; private readonly SwitchType _switchType; internal CommandSwitch(string name, string abbreviation, SwitchType switchType) { //ensure that either name doesn't start with '/' or '-' //also convert to lower-case if ((name[0] == '/') || (name[0] == '-')) _name = (name.Substring(1)).ToLower(CultureInfo.InvariantCulture); else _name = name.ToLower(CultureInfo.InvariantCulture); if ((abbreviation[0] == '/') || (abbreviation[0] == '-')) _abbreviation = (abbreviation.Substring(1)).ToLower(CultureInfo.InvariantCulture); else _abbreviation = abbreviation.ToLower(CultureInfo.InvariantCulture); _switchType = switchType; } internal string Name { get { return _name; } } #if NotUsed internal string Abbreviation { get { return abbreviation; } } #endif internal SwitchType SwitchType { get { return _switchType; } } internal bool Equals(string other) { string temp; //ensure that compare doesn't start with '/' or '-' //also convert to lower-case if ((other[0] == '/') || (other[0] == '-')) temp = (other.Substring(1)).ToLower(CultureInfo.InvariantCulture); else temp = other.ToLower(CultureInfo.InvariantCulture); //if equal to name, then return the OK if (_name.Equals(temp)) return true; //now check abbreviation return _abbreviation.Equals(temp); } internal static CommandSwitch FindSwitch(string name, CommandSwitch[] switches) { foreach (CommandSwitch cs in switches) if (cs.Equals(name)) return cs; //if no match found, then return null return null; } } internal class ArgumentDictionary { private Dictionary<string, IList<string>> _contents; internal ArgumentDictionary(int capacity) { _contents = new Dictionary<string, IList<string>>(capacity); } internal void Add(string key, string value) { IList<string> values; if (!ContainsArgument(key)) { values = new List<string>(); Add(key, values); } else values = GetArguments(key); values.Add(value); } internal string GetArgument(string key) { IList<string> values; if (_contents.TryGetValue(key.ToLower(CultureInfo.InvariantCulture), out values)) { #if SM_TOOL Tool.Assert((values.Count == 1), "contains more than one argument please call GetArguments"); #endif return values[0]; } #if SM_TOOL Tool.Assert(false, "argument was not specified please call ContainsArgument to check this"); #endif return null; // unreachable code but the compiler doesn't know this. } internal IList<string> GetArguments(string key) { IList<string> result; if (!_contents.TryGetValue(key.ToLower(CultureInfo.InvariantCulture), out result)) result = new List<string>(); return result; } internal bool ContainsArgument(string key) { return _contents.ContainsKey(key.ToLower(CultureInfo.InvariantCulture)); } internal void Add(string key, IList<string> values) { _contents.Add(key.ToLower(CultureInfo.InvariantCulture), values); } internal int Count { get { return _contents.Count; } } } internal static class CommandParser { internal static ArgumentDictionary ParseCommand(string[] cmd, CommandSwitch[] switches) { ArgumentDictionary arguments; //switches/values from cmd line string arg; //argument to test next CommandSwitch argSwitch; //switch corresponding to that argument string argValue; //value corresponding to that argument int delim; //location of value delimiter (':' or '=') arguments = new ArgumentDictionary(cmd.Length); foreach (string s in cmd) { arg = s; bool argIsFlag = true; //if argument does not start with switch indicator, place into "default" arguments if ((arg[0] != '/') && (arg[0] != '-')) { arguments.Add(String.Empty, arg); continue; } //if we have something which begins with '/' or '-', throw if nothing after it if (arg.Length == 1) throw new ArgumentException(SR.Format(SR.ErrSwitchMissing, arg)); //yank switch indicator ('/' or '-') off of command argument arg = arg.Substring(1); //check to make sure delimiter does not start off switch delim = arg.IndexOfAny(new char[] { ':', '=' }); if (delim == 0) throw new ArgumentException(SR.Format(SR.ErrUnexpectedDelimiter)); //if there is no value, than create a null string if (delim == (-1)) argValue = String.Empty; else { //assume valid argument now; must remove value attached to it //must avoid copying delimeter into either arguments argValue = arg.Substring(delim + 1); arg = arg.Substring(0, delim); argIsFlag = false; } //check if this switch exists in the list of possible switches //if no match found, then throw an exception argSwitch = CommandSwitch.FindSwitch(arg.ToLower(CultureInfo.InvariantCulture), switches); if (argSwitch == null) { // Paths start with "/" on Unix, so the arg could potentially be a path. // If we didn't find any matched option, check and see if it's a path. string potentialPath = "/" + arg; if (File.Exists(potentialPath)) { arguments.Add(string.Empty, potentialPath); continue; } throw new ArgumentException(SR.Format(SR.ErrUnknownSwitch, arg.ToLower(CultureInfo.InvariantCulture))); } //check if switch is allowed to have a value // if not and a value has been specified, then thrown an exception if (argSwitch.SwitchType == SwitchType.Flag) { if (!argIsFlag) throw new ArgumentException(SR.Format(SR.ErrUnexpectedValue, arg.ToLower(CultureInfo.InvariantCulture))); } else { if (argIsFlag) throw new ArgumentException(SR.Format(SR.ErrExpectedValue, arg.ToLower(CultureInfo.InvariantCulture))); } //check if switch is allowed to be specified multiple times // if not and it has already been specified and a new value has been paresd, throw an exception if (argSwitch.SwitchType != SwitchType.ValueList && arguments.ContainsArgument(argSwitch.Name)) { throw new ArgumentException(SR.Format(SR.ErrSingleUseSwitch, arg.ToLower(CultureInfo.InvariantCulture))); } else { arguments.Add(argSwitch.Name, argValue); } } return arguments; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Diagnostics; using System.Threading.Tasks; using Xunit; namespace System.Threading.Tests { /// <summary> /// SemaphoreSlim unit tests /// </summary> public class SemaphoreSlimTests { /// <summary> /// SemaphoreSlim public methods and properties to be tested /// </summary> private enum SemaphoreSlimActions { Constructor, Wait, WaitAsync, Release, Dispose, CurrentCount, AvailableWaitHandle } [Fact] public static void RunSemaphoreSlimTest0_Ctor() { RunSemaphoreSlimTest0_Ctor(0, 10, null); RunSemaphoreSlimTest0_Ctor(5, 10, null); RunSemaphoreSlimTest0_Ctor(10, 10, null); } [Fact] public static void RunSemaphoreSlimTest0_Ctor_Negative() { RunSemaphoreSlimTest0_Ctor(10, 0, typeof(ArgumentOutOfRangeException)); RunSemaphoreSlimTest0_Ctor(10, -1, typeof(ArgumentOutOfRangeException)); RunSemaphoreSlimTest0_Ctor(-1, 10, typeof(ArgumentOutOfRangeException)); } [Fact] public static void RunSemaphoreSlimTest1_Wait() { // Infinite timeout RunSemaphoreSlimTest1_Wait(10, 10, -1, true, null); RunSemaphoreSlimTest1_Wait(1, 10, -1, true, null); // Zero timeout RunSemaphoreSlimTest1_Wait(10, 10, 0, true, null); RunSemaphoreSlimTest1_Wait(1, 10, 0, true, null); RunSemaphoreSlimTest1_Wait(0, 10, 0, false, null); // Positive timeout RunSemaphoreSlimTest1_Wait(10, 10, 10, true, null); RunSemaphoreSlimTest1_Wait(1, 10, 10, true, null); RunSemaphoreSlimTest1_Wait(0, 10, 10, false, null); } [Fact] public static void RunSemaphoreSlimTest1_Wait_NegativeCases() { // Invalid timeout RunSemaphoreSlimTest1_Wait(10, 10, -10, true, typeof(ArgumentOutOfRangeException)); RunSemaphoreSlimTest1_Wait (10, 10, new TimeSpan(0, 0, Int32.MaxValue), true, typeof(ArgumentOutOfRangeException)); } [Fact] public static void RunSemaphoreSlimTest1_WaitAsync() { // Infinite timeout RunSemaphoreSlimTest1_WaitAsync(10, 10, -1, true, null); RunSemaphoreSlimTest1_WaitAsync(1, 10, -1, true, null); // Zero timeout RunSemaphoreSlimTest1_WaitAsync(10, 10, 0, true, null); RunSemaphoreSlimTest1_WaitAsync(1, 10, 0, true, null); RunSemaphoreSlimTest1_WaitAsync(0, 10, 0, false, null); // Positive timeout RunSemaphoreSlimTest1_WaitAsync(10, 10, 10, true, null); RunSemaphoreSlimTest1_WaitAsync(1, 10, 10, true, null); RunSemaphoreSlimTest1_WaitAsync(0, 10, 10, false, null); } [Fact] public static void RunSemaphoreSlimTest1_WaitAsync_NegativeCases() { // Invalid timeout RunSemaphoreSlimTest1_WaitAsync(10, 10, -10, true, typeof(ArgumentOutOfRangeException)); RunSemaphoreSlimTest1_WaitAsync (10, 10, new TimeSpan(0, 0, Int32.MaxValue), true, typeof(ArgumentOutOfRangeException)); RunSemaphoreSlimTest1_WaitAsync2(); } [Fact] public static void RunSemaphoreSlimTest2_Release() { // Valid release count RunSemaphoreSlimTest2_Release(5, 10, 1, null); RunSemaphoreSlimTest2_Release(0, 10, 1, null); RunSemaphoreSlimTest2_Release(5, 10, 5, null); } [Fact] public static void RunSemaphoreSlimTest2_Release_NegativeCases() { // Invalid release count RunSemaphoreSlimTest2_Release(5, 10, 0, typeof(ArgumentOutOfRangeException)); RunSemaphoreSlimTest2_Release(5, 10, -1, typeof(ArgumentOutOfRangeException)); // Semaphore Full RunSemaphoreSlimTest2_Release(10, 10, 1, typeof(SemaphoreFullException)); RunSemaphoreSlimTest2_Release(5, 10, 6, typeof(SemaphoreFullException)); RunSemaphoreSlimTest2_Release(int.MaxValue - 1, int.MaxValue, 10, typeof(SemaphoreFullException)); } [Fact] public static void RunSemaphoreSlimTest4_Dispose() { RunSemaphoreSlimTest4_Dispose(5, 10, null, null); RunSemaphoreSlimTest4_Dispose(5, 10, SemaphoreSlimActions.CurrentCount, null); RunSemaphoreSlimTest4_Dispose (5, 10, SemaphoreSlimActions.Wait, typeof(ObjectDisposedException)); RunSemaphoreSlimTest4_Dispose (5, 10, SemaphoreSlimActions.WaitAsync, typeof(ObjectDisposedException)); RunSemaphoreSlimTest4_Dispose (5, 10, SemaphoreSlimActions.Release, typeof(ObjectDisposedException)); RunSemaphoreSlimTest4_Dispose (5, 10, SemaphoreSlimActions.AvailableWaitHandle, typeof(ObjectDisposedException)); } [Fact] public static void RunSemaphoreSlimTest5_CurrentCount() { RunSemaphoreSlimTest5_CurrentCount(5, 10, null); RunSemaphoreSlimTest5_CurrentCount(5, 10, SemaphoreSlimActions.Wait); RunSemaphoreSlimTest5_CurrentCount(5, 10, SemaphoreSlimActions.WaitAsync); RunSemaphoreSlimTest5_CurrentCount(5, 10, SemaphoreSlimActions.Release); } [Fact] public static void RunSemaphoreSlimTest7_AvailableWaitHandle() { RunSemaphoreSlimTest7_AvailableWaitHandle(5, 10, null, true); RunSemaphoreSlimTest7_AvailableWaitHandle(0, 10, null, false); RunSemaphoreSlimTest7_AvailableWaitHandle(5, 10, SemaphoreSlimActions.Wait, true); RunSemaphoreSlimTest7_AvailableWaitHandle(1, 10, SemaphoreSlimActions.Wait, false); RunSemaphoreSlimTest7_AvailableWaitHandle(5, 10, SemaphoreSlimActions.Wait, true); RunSemaphoreSlimTest7_AvailableWaitHandle(5, 10, SemaphoreSlimActions.WaitAsync, true); RunSemaphoreSlimTest7_AvailableWaitHandle(1, 10, SemaphoreSlimActions.WaitAsync, false); RunSemaphoreSlimTest7_AvailableWaitHandle(5, 10, SemaphoreSlimActions.WaitAsync, true); RunSemaphoreSlimTest7_AvailableWaitHandle(0, 10, SemaphoreSlimActions.Release, true); } [Fact] [OuterLoop] public static void RunSemaphoreSlimCurrentTests() { RunSemaphoreSlimTest8_ConcWaitAndRelease (5, 1000, 50, 50, 50, 0, 5, 1000); RunSemaphoreSlimTest8_ConcWaitAndRelease (0, 1000, 50, 25, 25, 25, 0, 5000); RunSemaphoreSlimTest8_ConcWaitAndRelease (0, 1000, 50, 0, 0, 50, 0, 100); RunSemaphoreSlimTest8_ConcWaitAsyncAndRelease (5, 1000, 50, 50, 50, 0, 5, 5000); RunSemaphoreSlimTest8_ConcWaitAsyncAndRelease (0, 1000, 50, 25, 25, 25, 0, 5000); RunSemaphoreSlimTest8_ConcWaitAsyncAndRelease (0, 1000, 50, 0, 0, 50, 0, 100); TestConcurrentWaitAndWaitAsync(10, 10); TestConcurrentWaitAndWaitAsync(1, 10); TestConcurrentWaitAndWaitAsync(10, 1); } /// <summary> /// Test SemaphoreSlim constructor /// </summary> /// <param name="initial">The initial semaphore count</param> /// <param name="maximum">The maximum semaphore count</param> /// <param name="exceptionType">The type of the thrown exception in case of invalid cases, /// null for valid cases</param> /// <returns>True if the test succeeded, false otherwise</returns> private static void RunSemaphoreSlimTest0_Ctor(int initial, int maximum, Type exceptionType) { string methodFailed = "RunSemaphoreSlimTest0_Ctor(" + initial + "," + maximum + "): FAILED. "; Exception exception = null; try { SemaphoreSlim semaphore = new SemaphoreSlim(initial, maximum); Assert.Equal(initial, semaphore.CurrentCount); } catch (Exception ex) { Assert.NotNull(exceptionType); Assert.IsType(exceptionType, ex); exception = ex; } } /// <summary> /// Test SemaphoreSlim Wait /// </summary> /// <param name="initial">The initial semaphore count</param> /// <param name="maximum">The maximum semaphore count</param> /// <param name="timeout">The timeout parameter for the wait method, it must be either int or TimeSpan</param> /// <param name="returnValue">The expected wait return value</param> /// <param name="exceptionType">The type of the thrown exception in case of invalid cases, /// null for valid cases</param> /// <returns>True if the test succeeded, false otherwise</returns> private static void RunSemaphoreSlimTest1_Wait (int initial, int maximum, object timeout, bool returnValue, Type exceptionType) { SemaphoreSlim semaphore = new SemaphoreSlim(initial, maximum); try { bool result = false; if (timeout is TimeSpan) { result = semaphore.Wait((TimeSpan)timeout); } else { result = semaphore.Wait((int)timeout); } Assert.Equal(returnValue, result); if (result) { Assert.Equal(initial - 1, semaphore.CurrentCount); } } catch (Exception ex) { Assert.NotNull(exceptionType); Assert.IsType(exceptionType, ex); } } /// <summary> /// Test SemaphoreSlim WaitAsync /// </summary> /// <param name="initial">The initial semaphore count</param> /// <param name="maximum">The maximum semaphore count</param> /// <param name="timeout">The timeout parameter for the wait method, it must be either int or TimeSpan</param> /// <param name="returnValue">The expected wait return value</param> /// <param name="exceptionType">The type of the thrown exception in case of invalid cases, /// null for valid cases</param> /// <returns>True if the test succeeded, false otherwise</returns> private static void RunSemaphoreSlimTest1_WaitAsync (int initial, int maximum, object timeout, bool returnValue, Type exceptionType) { SemaphoreSlim semaphore = new SemaphoreSlim(initial, maximum); try { bool result = false; if (timeout is TimeSpan) { result = semaphore.WaitAsync((TimeSpan)timeout).Result; } else { result = semaphore.WaitAsync((int)timeout).Result; } Assert.Equal(returnValue, result); if (result) { Assert.Equal(initial - 1, semaphore.CurrentCount); } } catch (Exception ex) { Assert.NotNull(exceptionType); Assert.IsType(exceptionType, ex); } } /// <summary> /// Test SemaphoreSlim WaitAsync /// The test verifies that SemaphoreSlim.Release() does not execute any user code synchronously. /// </summary> private static void RunSemaphoreSlimTest1_WaitAsync2() { SemaphoreSlim semaphore = new SemaphoreSlim(1); ThreadLocal<int> counter = new ThreadLocal<int>(() => 0); bool nonZeroObserved = false; const int asyncActions = 20; int remAsyncActions = asyncActions; ManualResetEvent mre = new ManualResetEvent(false); Action<int> doWorkAsync = async delegate (int i) { await semaphore.WaitAsync(); if (counter.Value > 0) { nonZeroObserved = true; } counter.Value = counter.Value + 1; semaphore.Release(); counter.Value = counter.Value - 1; if (Interlocked.Decrement(ref remAsyncActions) == 0) mre.Set(); }; semaphore.Wait(); for (int i = 0; i < asyncActions; i++) doWorkAsync(i); semaphore.Release(); mre.WaitOne(); Assert.False(nonZeroObserved, "RunSemaphoreSlimTest1_WaitAsync2: FAILED. SemaphoreSlim.Release() seems to have synchronously invoked a continuation."); } /// <summary> /// Test SemaphoreSlim Release /// </summary> /// <param name="initial">The initial semaphore count</param> /// <param name="maximum">The maximum semaphore count</param> /// <param name="releaseCount">The release count for the release method</param> /// <param name="exceptionType">The type of the thrown exception in case of invalid cases, /// null for valid cases</param> /// <returns>True if the test succeeded, false otherwise</returns> private static void RunSemaphoreSlimTest2_Release (int initial, int maximum, int releaseCount, Type exceptionType) { SemaphoreSlim semaphore = new SemaphoreSlim(initial, maximum); try { int oldCount = semaphore.Release(releaseCount); Assert.Equal(initial, oldCount); Assert.Equal(initial + releaseCount, semaphore.CurrentCount); } catch (Exception ex) { Assert.NotNull(exceptionType); Assert.IsType(exceptionType, ex); } } /// <summary> /// Call specific SemaphoreSlim method or property /// </summary> /// <param name="semaphore">The SemaphoreSlim instance</param> /// <param name="action">The action name</param> /// <param name="param">The action parameter, null if it takes no parameters</param> /// <returns>The action return value, null if the action returns void</returns> private static object CallSemaphoreAction (SemaphoreSlim semaphore, SemaphoreSlimActions? action, object param) { if (action == SemaphoreSlimActions.Wait) { if (param is TimeSpan) { return semaphore.Wait((TimeSpan)param); } else if (param is int) { return semaphore.Wait((int)param); } semaphore.Wait(); return null; } else if (action == SemaphoreSlimActions.WaitAsync) { if (param is TimeSpan) { return semaphore.WaitAsync((TimeSpan)param).Result; } else if (param is int) { return semaphore.WaitAsync((int)param).Result; } semaphore.WaitAsync().Wait(); return null; } else if (action == SemaphoreSlimActions.Release) { if (param != null) { return semaphore.Release((int)param); } return semaphore.Release(); } else if (action == SemaphoreSlimActions.Dispose) { semaphore.Dispose(); return null; } else if (action == SemaphoreSlimActions.CurrentCount) { return semaphore.CurrentCount; } else if (action == SemaphoreSlimActions.AvailableWaitHandle) { return semaphore.AvailableWaitHandle; } return null; } /// <summary> /// Test SemaphoreSlim Dispose /// </summary> /// <param name="initial">The initial semaphore count</param> /// <param name="maximum">The maximum semaphore count</param> /// <param name="action">SemaphoreSlim action to be called after Dispose</param> /// <param name="exceptionType">The type of the thrown exception in case of invalid cases, /// null for valid cases</param> /// <returns>True if the test succeeded, false otherwise</returns> private static void RunSemaphoreSlimTest4_Dispose(int initial, int maximum, SemaphoreSlimActions? action, Type exceptionType) { SemaphoreSlim semaphore = new SemaphoreSlim(initial, maximum); try { semaphore.Dispose(); CallSemaphoreAction(semaphore, action, null); } catch (Exception ex) { Assert.NotNull(exceptionType); Assert.IsType(exceptionType, ex); } } /// <summary> /// Test SemaphoreSlim CurrentCount property /// </summary> /// <param name="initial">The initial semaphore count</param> /// <param name="maximum">The maximum semaphore count</param> /// <param name="action">SemaphoreSlim action to be called before CurrentCount</param> /// <returns>True if the test succeeded, false otherwise</returns> private static void RunSemaphoreSlimTest5_CurrentCount(int initial, int maximum, SemaphoreSlimActions? action) { SemaphoreSlim semaphore = new SemaphoreSlim(initial, maximum); CallSemaphoreAction(semaphore, action, null); if (action == null) { Assert.Equal(initial, semaphore.CurrentCount); } else { Assert.Equal(initial + (action == SemaphoreSlimActions.Release ? 1 : -1), semaphore.CurrentCount); } } /// <summary> /// Test SemaphoreSlim AvailableWaitHandle property /// </summary> /// <param name="initial">The initial semaphore count</param> /// <param name="maximum">The maximum semaphore count</param> /// <param name="action">SemaphoreSlim action to be called before WaitHandle</param> /// <param name="state">The expected wait handle state</param> /// <returns>True if the test succeeded, false otherwise</returns> private static void RunSemaphoreSlimTest7_AvailableWaitHandle(int initial, int maximum, SemaphoreSlimActions? action, bool state) { SemaphoreSlim semaphore = new SemaphoreSlim(initial, maximum); CallSemaphoreAction(semaphore, action, null); Assert.NotNull(semaphore.AvailableWaitHandle); Assert.Equal(state, semaphore.AvailableWaitHandle.WaitOne(0)); } /// <summary> /// Test SemaphoreSlim Wait and Release methods concurrently /// </summary> /// <param name="initial">The initial semaphore count</param> /// <param name="maximum">The maximum semaphore count</param> /// <param name="waitThreads">Number of the threads that call Wait method</param> /// <param name="releaseThreads">Number of the threads that call Release method</param> /// <param name="succeededWait">Number of succeeded wait threads</param> /// <param name="failedWait">Number of failed wait threads</param> /// <param name="finalCount">The final semaphore count</param> /// <returns>True if the test succeeded, false otherwise</returns> private static void RunSemaphoreSlimTest8_ConcWaitAndRelease(int initial, int maximum, int waitThreads, int releaseThreads, int succeededWait, int failedWait, int finalCount, int timeout) { SemaphoreSlim semaphore = new SemaphoreSlim(initial, maximum); Task[] threads = new Task[waitThreads + releaseThreads]; int succeeded = 0; int failed = 0; ManualResetEvent mre = new ManualResetEvent(false); // launch threads for (int i = 0; i < threads.Length; i++) { if (i < waitThreads) { // We are creating the Task using TaskCreationOptions.LongRunning to // force usage of another thread (which will be the case on the default scheduler // with its current implementation). Without this, the release tasks will likely get // queued behind the wait tasks in the pool, making it very likely that the wait tasks // will starve the very tasks that when run would unblock them. threads[i] = new Task(delegate () { mre.WaitOne(); if (semaphore.Wait(timeout)) { Interlocked.Increment(ref succeeded); } else { Interlocked.Increment(ref failed); } }, TaskCreationOptions.LongRunning); } else { threads[i] = new Task(delegate () { mre.WaitOne(); semaphore.Release(); }); } threads[i].Start(TaskScheduler.Default); } mre.Set(); //wait work to be done; Task.WaitAll(threads); //check the number of succeeded and failed wait Assert.Equal(succeededWait, succeeded); Assert.Equal(failedWait, failed); Assert.Equal(finalCount, semaphore.CurrentCount); } /// <summary> /// Test SemaphoreSlim WaitAsync and Release methods concurrently /// </summary> /// <param name="initial">The initial semaphore count</param> /// <param name="maximum">The maximum semaphore count</param> /// <param name="waitThreads">Number of the threads that call Wait method</param> /// <param name="releaseThreads">Number of the threads that call Release method</param> /// <param name="succeededWait">Number of succeeded wait threads</param> /// <param name="failedWait">Number of failed wait threads</param> /// <param name="finalCount">The final semaphore count</param> /// <returns>True if the test succeeded, false otherwise</returns> private static void RunSemaphoreSlimTest8_ConcWaitAsyncAndRelease(int initial, int maximum, int waitThreads, int releaseThreads, int succeededWait, int failedWait, int finalCount, int timeout) { SemaphoreSlim semaphore = new SemaphoreSlim(initial, maximum); Task[] tasks = new Task[waitThreads + releaseThreads]; int succeeded = 0; int failed = 0; ManualResetEvent mre = new ManualResetEvent(false); // launch threads for (int i = 0; i < tasks.Length; i++) { if (i < waitThreads) { tasks[i] = Task.Run(async delegate { mre.WaitOne(); if (await semaphore.WaitAsync(timeout)) { Interlocked.Increment(ref succeeded); } else { Interlocked.Increment(ref failed); } }); } else { tasks[i] = Task.Run(delegate { mre.WaitOne(); semaphore.Release(); }); } } mre.Set(); //wait work to be done; Task.WaitAll(tasks); Assert.Equal(succeededWait, succeeded); Assert.Equal(failedWait, failed); Assert.Equal(finalCount, semaphore.CurrentCount); } private static void TestConcurrentWaitAndWaitAsync(int syncWaiters, int asyncWaiters) { int totalWaiters = syncWaiters + asyncWaiters; var semaphore = new SemaphoreSlim(0); Task[] tasks = new Task[totalWaiters]; const int ITERS = 10; int randSeed = (int)DateTime.Now.Ticks; for (int i = 0; i < syncWaiters; i++) { tasks[i] = Task.Run(delegate { //Random rand = new Random(Interlocked.Increment(ref randSeed)); for (int iter = 0; iter < ITERS; iter++) { semaphore.Wait(); semaphore.Release(); } }); } for (int i = syncWaiters; i < totalWaiters; i++) { tasks[i] = Task.Run(async delegate { //Random rand = new Random(Interlocked.Increment(ref randSeed)); for (int iter = 0; iter < ITERS; iter++) { await semaphore.WaitAsync(); semaphore.Release(); } }); } semaphore.Release(totalWaiters / 2); Task.WaitAll(tasks); } } }
// // Copyright (c) Microsoft and contributors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // // See the License for the specific language governing permissions and // limitations under the License. // // Warning: This code was generated by a tool. // // Changes to this file may cause incorrect behavior and will be lost if the // code is regenerated. using System; using System.Linq; using System.Threading; using System.Threading.Tasks; using Microsoft.WindowsAzure; using Microsoft.WindowsAzure.Management.Storage; using Microsoft.WindowsAzure.Management.Storage.Models; namespace Microsoft.WindowsAzure.Management.Storage { /// <summary> /// The Service Management API includes operations for managing the storage /// accounts beneath your subscription. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/ee460790.aspx for /// more information) /// </summary> public static partial class StorageAccountOperationsExtensions { /// <summary> /// The Create Storage Account operation creates a new storage account /// in Windows Azure. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/hh264518.aspx /// for more information) /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.WindowsAzure.Management.Storage.IStorageAccountOperations. /// </param> /// <param name='parameters'> /// Parameters supplied to the Create Storage Account operation. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> public static OperationResponse BeginCreating(this IStorageAccountOperations operations, StorageAccountCreateParameters parameters) { try { return operations.BeginCreatingAsync(parameters).Result; } catch (AggregateException ex) { if (ex.InnerExceptions.Count > 1) { throw; } else { throw ex.InnerException; } } } /// <summary> /// The Create Storage Account operation creates a new storage account /// in Windows Azure. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/hh264518.aspx /// for more information) /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.WindowsAzure.Management.Storage.IStorageAccountOperations. /// </param> /// <param name='parameters'> /// Parameters supplied to the Create Storage Account operation. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> public static Task<OperationResponse> BeginCreatingAsync(this IStorageAccountOperations operations, StorageAccountCreateParameters parameters) { return operations.BeginCreatingAsync(parameters, CancellationToken.None); } /// <summary> /// The Check Name Availability operation checks if a storage account /// name is available for use in Windows Azure. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/jj154125.aspx /// for more information) /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.WindowsAzure.Management.Storage.IStorageAccountOperations. /// </param> /// <param name='serviceName'> /// The desired storage account name to check for availability. /// </param> /// <returns> /// The response to a storage account check name availability request. /// </returns> public static CheckNameAvailabilityResponse CheckNameAvailability(this IStorageAccountOperations operations, string serviceName) { try { return operations.CheckNameAvailabilityAsync(serviceName).Result; } catch (AggregateException ex) { if (ex.InnerExceptions.Count > 1) { throw; } else { throw ex.InnerException; } } } /// <summary> /// The Check Name Availability operation checks if a storage account /// name is available for use in Windows Azure. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/jj154125.aspx /// for more information) /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.WindowsAzure.Management.Storage.IStorageAccountOperations. /// </param> /// <param name='serviceName'> /// The desired storage account name to check for availability. /// </param> /// <returns> /// The response to a storage account check name availability request. /// </returns> public static Task<CheckNameAvailabilityResponse> CheckNameAvailabilityAsync(this IStorageAccountOperations operations, string serviceName) { return operations.CheckNameAvailabilityAsync(serviceName, CancellationToken.None); } /// <summary> /// The Create Storage Account operation creates a new storage account /// in Windows Azure. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/hh264518.aspx /// for more information) /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.WindowsAzure.Management.Storage.IStorageAccountOperations. /// </param> /// <param name='parameters'> /// Parameters supplied to the Create Storage Account operation. /// </param> /// <returns> /// The response body contains the status of the specified asynchronous /// operation, indicating whether it has succeeded, is inprogress, or /// has failed. Note that this status is distinct from the HTTP status /// code returned for the Get Operation Status operation itself. If /// the asynchronous operation succeeded, the response body includes /// the HTTP status code for the successful request. If the /// asynchronous operation failed, the response body includes the HTTP /// status code for the failed request, and also includes error /// information regarding the failure. /// </returns> public static StorageOperationStatusResponse Create(this IStorageAccountOperations operations, StorageAccountCreateParameters parameters) { try { return operations.CreateAsync(parameters).Result; } catch (AggregateException ex) { if (ex.InnerExceptions.Count > 1) { throw; } else { throw ex.InnerException; } } } /// <summary> /// The Create Storage Account operation creates a new storage account /// in Windows Azure. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/hh264518.aspx /// for more information) /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.WindowsAzure.Management.Storage.IStorageAccountOperations. /// </param> /// <param name='parameters'> /// Parameters supplied to the Create Storage Account operation. /// </param> /// <returns> /// The response body contains the status of the specified asynchronous /// operation, indicating whether it has succeeded, is inprogress, or /// has failed. Note that this status is distinct from the HTTP status /// code returned for the Get Operation Status operation itself. If /// the asynchronous operation succeeded, the response body includes /// the HTTP status code for the successful request. If the /// asynchronous operation failed, the response body includes the HTTP /// status code for the failed request, and also includes error /// information regarding the failure. /// </returns> public static Task<StorageOperationStatusResponse> CreateAsync(this IStorageAccountOperations operations, StorageAccountCreateParameters parameters) { return operations.CreateAsync(parameters, CancellationToken.None); } /// <summary> /// The Delete Storage Account operation deletes the specifiedstorage /// account from Windows Azure. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/hh264517.aspx /// for more information) /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.WindowsAzure.Management.Storage.IStorageAccountOperations. /// </param> /// <param name='serviceName'> /// The name of the storage account. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> public static OperationResponse Delete(this IStorageAccountOperations operations, string serviceName) { try { return operations.DeleteAsync(serviceName).Result; } catch (AggregateException ex) { if (ex.InnerExceptions.Count > 1) { throw; } else { throw ex.InnerException; } } } /// <summary> /// The Delete Storage Account operation deletes the specifiedstorage /// account from Windows Azure. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/hh264517.aspx /// for more information) /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.WindowsAzure.Management.Storage.IStorageAccountOperations. /// </param> /// <param name='serviceName'> /// The name of the storage account. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> public static Task<OperationResponse> DeleteAsync(this IStorageAccountOperations operations, string serviceName) { return operations.DeleteAsync(serviceName, CancellationToken.None); } /// <summary> /// The Get Storage Account Properties operation returns system /// properties for the specified storage account. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/ee460802.aspx /// for more information) /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.WindowsAzure.Management.Storage.IStorageAccountOperations. /// </param> /// <param name='serviceName'> /// Name of the storage account to get. /// </param> /// <returns> /// The Get Storage Account Properties operation response. /// </returns> public static StorageServiceGetResponse Get(this IStorageAccountOperations operations, string serviceName) { try { return operations.GetAsync(serviceName).Result; } catch (AggregateException ex) { if (ex.InnerExceptions.Count > 1) { throw; } else { throw ex.InnerException; } } } /// <summary> /// The Get Storage Account Properties operation returns system /// properties for the specified storage account. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/ee460802.aspx /// for more information) /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.WindowsAzure.Management.Storage.IStorageAccountOperations. /// </param> /// <param name='serviceName'> /// Name of the storage account to get. /// </param> /// <returns> /// The Get Storage Account Properties operation response. /// </returns> public static Task<StorageServiceGetResponse> GetAsync(this IStorageAccountOperations operations, string serviceName) { return operations.GetAsync(serviceName, CancellationToken.None); } /// <summary> /// The Get Storage Keys operation returns the primary and secondary /// access keys for the specified storage account. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/ee460785.aspx /// for more information) /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.WindowsAzure.Management.Storage.IStorageAccountOperations. /// </param> /// <param name='serviceName'> /// The name of the desired storage account. /// </param> /// <returns> /// The primary and secondary access keys for a storage account. /// </returns> public static StorageAccountGetKeysResponse GetKeys(this IStorageAccountOperations operations, string serviceName) { try { return operations.GetKeysAsync(serviceName).Result; } catch (AggregateException ex) { if (ex.InnerExceptions.Count > 1) { throw; } else { throw ex.InnerException; } } } /// <summary> /// The Get Storage Keys operation returns the primary and secondary /// access keys for the specified storage account. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/ee460785.aspx /// for more information) /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.WindowsAzure.Management.Storage.IStorageAccountOperations. /// </param> /// <param name='serviceName'> /// The name of the desired storage account. /// </param> /// <returns> /// The primary and secondary access keys for a storage account. /// </returns> public static Task<StorageAccountGetKeysResponse> GetKeysAsync(this IStorageAccountOperations operations, string serviceName) { return operations.GetKeysAsync(serviceName, CancellationToken.None); } /// <summary> /// The List Storage Accounts operation lists the storage accounts /// available under the current subscription. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/ee460787.aspx /// for more information) /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.WindowsAzure.Management.Storage.IStorageAccountOperations. /// </param> /// <returns> /// The List Storage Accounts operation response. /// </returns> public static StorageServiceListResponse List(this IStorageAccountOperations operations) { try { return operations.ListAsync().Result; } catch (AggregateException ex) { if (ex.InnerExceptions.Count > 1) { throw; } else { throw ex.InnerException; } } } /// <summary> /// The List Storage Accounts operation lists the storage accounts /// available under the current subscription. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/ee460787.aspx /// for more information) /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.WindowsAzure.Management.Storage.IStorageAccountOperations. /// </param> /// <returns> /// The List Storage Accounts operation response. /// </returns> public static Task<StorageServiceListResponse> ListAsync(this IStorageAccountOperations operations) { return operations.ListAsync(CancellationToken.None); } /// <summary> /// The Regenerate Keys operation regenerates the primary or secondary /// access key for the specified storage account. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/ee460795.aspx /// for more information) /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.WindowsAzure.Management.Storage.IStorageAccountOperations. /// </param> /// <param name='parameters'> /// Parameters supplied to the Regenerate Keys operation. /// </param> /// <returns> /// The primary and secondary access keys for a storage account. /// </returns> public static StorageAccountRegenerateKeysResponse RegenerateKeys(this IStorageAccountOperations operations, StorageAccountRegenerateKeysParameters parameters) { try { return operations.RegenerateKeysAsync(parameters).Result; } catch (AggregateException ex) { if (ex.InnerExceptions.Count > 1) { throw; } else { throw ex.InnerException; } } } /// <summary> /// The Regenerate Keys operation regenerates the primary or secondary /// access key for the specified storage account. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/ee460795.aspx /// for more information) /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.WindowsAzure.Management.Storage.IStorageAccountOperations. /// </param> /// <param name='parameters'> /// Parameters supplied to the Regenerate Keys operation. /// </param> /// <returns> /// The primary and secondary access keys for a storage account. /// </returns> public static Task<StorageAccountRegenerateKeysResponse> RegenerateKeysAsync(this IStorageAccountOperations operations, StorageAccountRegenerateKeysParameters parameters) { return operations.RegenerateKeysAsync(parameters, CancellationToken.None); } /// <summary> /// The Update Storage Account operation updates the label, the /// description, and enables or disables the geo-replication status /// for a storage account in Windows Azure. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/hh264516.aspx /// for more information) /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.WindowsAzure.Management.Storage.IStorageAccountOperations. /// </param> /// <param name='serviceName'> /// Name of the storage account to update. /// </param> /// <param name='parameters'> /// Parameters supplied to the Update Storage Account operation. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> public static OperationResponse Update(this IStorageAccountOperations operations, string serviceName, StorageAccountUpdateParameters parameters) { try { return operations.UpdateAsync(serviceName, parameters).Result; } catch (AggregateException ex) { if (ex.InnerExceptions.Count > 1) { throw; } else { throw ex.InnerException; } } } /// <summary> /// The Update Storage Account operation updates the label, the /// description, and enables or disables the geo-replication status /// for a storage account in Windows Azure. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/hh264516.aspx /// for more information) /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.WindowsAzure.Management.Storage.IStorageAccountOperations. /// </param> /// <param name='serviceName'> /// Name of the storage account to update. /// </param> /// <param name='parameters'> /// Parameters supplied to the Update Storage Account operation. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> public static Task<OperationResponse> UpdateAsync(this IStorageAccountOperations operations, string serviceName, StorageAccountUpdateParameters parameters) { return operations.UpdateAsync(serviceName, parameters, CancellationToken.None); } } }
namespace Nancy.Tests.Unit { using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Text; using FakeItEasy; using Nancy.IO; using Xunit; using Xunit.Extensions; public class RequestFixture { [Fact] public void Should_dispose_request_stream_when_being_disposed() { // Given var stream = A.Fake<RequestStream>(x => { x.Implements(typeof(IDisposable)); x.WithArgumentsForConstructor(() => new RequestStream(0, false)); }); var url = new Url() { Scheme = "http", Path = "localhost" }; var request = new Request("GET", url, stream); // When request.Dispose(); // Then A.CallTo(() => ((IDisposable)stream).Dispose()).MustHaveHappened(); } [Fact] public void Should_be_disposable() { // Given, When, Then typeof(Request).ShouldImplementInterface<IDisposable>(); } [Fact] public void Should_override_request_method_on_post() { // Given const string bodyContent = "_method=GET"; var memory = CreateRequestStream(); var writer = new StreamWriter(memory); writer.Write(bodyContent); writer.Flush(); memory.Position = 0; var headers = new Dictionary<string, IEnumerable<string>> { { "content-type", new[] { "application/x-www-form-urlencoded" } } }; // When var request = new Request("POST", new Url { Path = "/", Scheme = "http" }, memory, headers); // Then request.Method.ShouldEqual("GET"); } [Theory] [InlineData("GET")] [InlineData("PUT")] [InlineData("DELETE")] [InlineData("HEAD")] public void Should_only_override_method_on_post(string method) { // Given const string bodyContent = "_method=TEST"; var memory = CreateRequestStream(); var writer = new StreamWriter(memory); writer.Write(bodyContent); writer.Flush(); memory.Position = 0; var headers = new Dictionary<string, IEnumerable<string>> { { "content-type", new[] { "application/x-www-form-urlencoded" } } }; // When var request = new Request(method, new Url { Path = "/", Scheme = "http" }, memory, headers); // Then request.Method.ShouldEqual(method); } [Fact] public void Should_throw_argumentoutofrangeexception_when_initialized_with_null_method() { // Given, When var exception = Record.Exception(() => new Request(null, "/", "http")); // Then exception.ShouldBeOfType<ArgumentOutOfRangeException>(); } [Fact] public void Should_throw_argumentoutofrangeexception_when_initialized_with_empty_method() { // Given, When var exception = Record.Exception(() => new Request(string.Empty, "/", "http")); // Then exception.ShouldBeOfType<ArgumentOutOfRangeException>(); } [Fact] public void Should_throw_null_exception_when_initialized_with_null_uri() { // Given, When var exception = Record.Exception(() => new Request("GET", null, "http")); // Then exception.ShouldBeOfType<ArgumentNullException>(); } [Fact] public void Should_set_method_parameter_value_to_method_property_when_initialized() { // Given const string method = "GET"; // When var request = new Request(method, "/", "http"); // Then request.Method.ShouldEqual(method); } [Fact] public void Should_set_uri_parameter_value_to_uri_property_when_initialized() { // Given const string path = "/"; // When var request = new Request("GET", path, "http"); // Then request.Path.ShouldEqual(path); } [Fact] public void Should_set_header_parameter_value_to_header_property_when_initialized() { // Given var headers = new Dictionary<string, IEnumerable<string>>() { { "content-type", new[] {"foo"} } }; // When var request = new Request("GET", new Url { Path = "/", Scheme = "http" }, CreateRequestStream(), headers); // Then request.Headers.ContentType.ShouldNotBeEmpty(); } [Fact] public void Should_set_body_parameter_value_to_body_property_when_initialized() { // Given var body = CreateRequestStream(); // When var request = new Request("GET", new Url { Path = "/", Scheme = "http" }, body, new Dictionary<string, IEnumerable<string>>()); // Then request.Body.ShouldBeSameAs(body); } [Fact] public void Should_set_extract_form_data_from_body_when_content_type_is_x_www_form_urlencoded() { // Given const string bodyContent = "name=John+Doe&gender=male&family=5&city=kent&city=miami&other=abc%0D%0Adef&nickname=J%26D"; var memory = CreateRequestStream(); var writer = new StreamWriter(memory); writer.Write(bodyContent); writer.Flush(); memory.Position = 0; var headers = new Dictionary<string, IEnumerable<string>> { { "content-type", new[] { "application/x-www-form-urlencoded" } } }; // When var request = new Request("POST", new Url { Path = "/", Scheme = "http" }, memory, headers); // Then ((string)request.Form.name).ShouldEqual("John Doe"); } [Fact] public void Should_set_extract_form_data_from_body_when_content_type_is_x_www_form_urlencoded_with_character_set() { // Given const string bodyContent = "name=John+Doe&gender=male&family=5&city=kent&city=miami&other=abc%0D%0Adef&nickname=J%26D"; var memory = CreateRequestStream(); var writer = new StreamWriter(memory); writer.Write(bodyContent); writer.Flush(); memory.Position = 0; var headers = new Dictionary<string, IEnumerable<string>> { { "content-type", new[] { "application/x-www-form-urlencoded; charset=UTF-8" } } }; // When var request = new Request("POST", new Url { Path = "/", Scheme = "http" }, memory, headers); // Then ((string)request.Form.name).ShouldEqual("John Doe"); } [Fact] public void Should_set_extracted_form_data_from_body_when_content_type_is_multipart_form_data() { // Given var memory = new MemoryStream(BuildMultipartFormValues(new Dictionary<string, string> { { "name", "John Doe"}, { "age", "42"} })); var headers = new Dictionary<string, IEnumerable<string>> { { "content-type", new[] { "multipart/form-data; boundary=----NancyFormBoundary" } } }; // When var request = new Request("POST", new Url { Path = "/", Scheme = "http" }, CreateRequestStream(memory), headers); // Then ((string)request.Form.name).ShouldEqual("John Doe"); ((string)request.Form.age).ShouldEqual("42"); } [Fact] public void Should_respect_case_insensitivity_when_extracting_form_data_from_body_when_content_type_is_x_www_form_urlencoded() { // Given StaticConfiguration.CaseSensitive = false; const string bodyContent = "key=value&key=value&KEY=VALUE"; var memory = CreateRequestStream(); var writer = new StreamWriter(memory); writer.Write(bodyContent); writer.Flush(); memory.Position = 0; var headers = new Dictionary<string, IEnumerable<string>> { { "content-type", new[] { "application/x-www-form-urlencoded" } } }; // When var request = new Request("POST", new Url { Path = "/", Scheme = "http" }, memory, headers); // Then ((string)request.Form.key).ShouldEqual("value,value,VALUE"); ((string)request.Form.KEY).ShouldEqual("value,value,VALUE"); } [Fact] public void Should_respect_case_sensitivity_when_extracting_form_data_from_body_when_content_type_is_x_www_form_urlencoded() { // Given StaticConfiguration.CaseSensitive = true; const string bodyContent = "key=value&key=value&KEY=VALUE"; var memory = CreateRequestStream(); var writer = new StreamWriter(memory); writer.Write(bodyContent); writer.Flush(); memory.Position = 0; var headers = new Dictionary<string, IEnumerable<string>> { { "content-type", new[] { "application/x-www-form-urlencoded" } } }; // When var request = new Request("POST", new Url { Path = "/", Scheme = "http" }, memory, headers); // Then ((string)request.Form.key).ShouldEqual("value,value"); ((string)request.Form.KEY).ShouldEqual("VALUE"); } [Fact] public void Should_respect_case_insensitivity_when_extracting_form_data_from_body_when_content_type_is_multipart_form_data() { // Given StaticConfiguration.CaseSensitive = false; var memory = new MemoryStream(BuildMultipartFormValues(new Dictionary<string, string>(StringComparer.InvariantCulture) { { "key", "value" }, { "KEY", "VALUE" } })); var headers = new Dictionary<string, IEnumerable<string>> { { "content-type", new[] { "multipart/form-data; boundary=----NancyFormBoundary" } } }; // When var request = new Request("POST", new Url { Path = "/", Scheme = "http" }, CreateRequestStream(memory), headers); // Then ((string)request.Form.key).ShouldEqual("value,VALUE"); ((string)request.Form.KEY).ShouldEqual("value,VALUE"); } [Fact] public void Should_respect_case_sensitivity_when_extracting_form_data_from_body_when_content_type_is_multipart_form_data() { // Given StaticConfiguration.CaseSensitive = true; var memory = new MemoryStream(BuildMultipartFormValues(new Dictionary<string, string>(StringComparer.InvariantCulture) { { "key", "value" }, { "KEY", "VALUE" } })); var headers = new Dictionary<string, IEnumerable<string>> { { "content-type", new[] { "multipart/form-data; boundary=----NancyFormBoundary" } } }; // When var request = new Request("POST", new Url { Path = "/", Scheme = "http" }, CreateRequestStream(memory), headers); // Then ((string)request.Form.key).ShouldEqual("value"); ((string)request.Form.KEY).ShouldEqual("VALUE"); } [Fact] public void Should_set_extracted_files_to_files_collection_when_body_content_type_is_multipart_form_data() { // Given var memory = new MemoryStream(BuildMultipartFileValues(new Dictionary<string, Tuple<string, string, string>> { { "test", new Tuple<string, string, string>("content/type", "some test content", "whatever")} })); var headers = new Dictionary<string, IEnumerable<string>> { { "content-type", new[] { "multipart/form-data; boundary=----NancyFormBoundary" } } }; // When var request = new Request("POST", new Url { Path = "/", Scheme = "http" }, CreateRequestStream(memory), headers); // Then request.Files.ShouldHaveCount(1); } [Fact] public void Should_set_content_type_on_file_extracted_from_multipart_form_data_body() { // Given var memory = new MemoryStream(BuildMultipartFileValues(new Dictionary<string, Tuple<string, string, string>> { { "sample.txt", new Tuple<string, string, string>("content/type", "some test content", "whatever")} })); var headers = new Dictionary<string, IEnumerable<string>> { { "content-type", new[] { "multipart/form-data; boundary=----NancyFormBoundary" } } }; // When var request = new Request("POST", new Url { Path = "/", Scheme = "http" }, CreateRequestStream(memory), headers); // Then request.Files.First().ContentType.ShouldEqual("content/type"); } [Fact] public void Should_set_name_on_file_extracted_from_multipart_form_data_body() { // Given var memory = new MemoryStream(BuildMultipartFileValues(new Dictionary<string, Tuple<string, string, string>> { { "sample.txt", new Tuple<string, string, string>("content/type", "some test content", "whatever")} })); var headers = new Dictionary<string, IEnumerable<string>> { { "content-type", new[] { "multipart/form-data; boundary=----NancyFormBoundary" } } }; // When var request = new Request("POST", new Url { Path = "/", Scheme = "http" }, CreateRequestStream(memory), headers); // Then request.Files.First().Name.ShouldEqual("sample.txt"); } [Fact] public void Should_value_on_file_extracted_from_multipart_form_data_body() { // Given var memory = new MemoryStream(BuildMultipartFileValues(new Dictionary<string, Tuple<string, string, string>> { { "sample.txt", new Tuple<string, string, string>("content/type", "some test content", "whatever")} })); var headers = new Dictionary<string, IEnumerable<string>> { { "content-type", new[] { "multipart/form-data; boundary=----NancyFormBoundary" } } }; // When var request = new Request("POST", new Url { Path = "/", Scheme = "http" }, CreateRequestStream(memory), headers); // Then GetStringValue(request.Files.First().Value).ShouldEqual("some test content"); } [Fact] public void Should_set_key_on_file_extracted_from_multipart_data_body() { // Given var memory = new MemoryStream(BuildMultipartFileValues(new Dictionary<string, Tuple<string, string, string>> { { "sample.txt", new Tuple<string, string, string>("content/type", "some test content", "fieldname")} })); var headers = new Dictionary<string, IEnumerable<string>> { { "content-type", new[] { "multipart/form-data; boundary=----NancyFormBoundary" } } }; // When var request = new Request("POST", new Url { Path = "/", Scheme = "http" }, CreateRequestStream(memory), headers); // Then request.Files.First().Key.ShouldEqual("fieldname"); } private static string GetStringValue(Stream stream) { var reader = new StreamReader(stream); return reader.ReadToEnd(); } [Fact] public void Should_be_able_to_invoke_form_repeatedly() { const string bodyContent = "name=John+Doe&gender=male&family=5&city=kent&city=miami&other=abc%0D%0Adef&nickname=J%26D"; var memory = new MemoryStream(); var writer = new StreamWriter(memory); writer.Write(bodyContent); writer.Flush(); memory.Position = 0; var headers = new Dictionary<string, IEnumerable<string>> { { "content-type", new[] { "application/x-www-form-urlencoded" } } }; // When var request = new Request("POST", new Url { Path = "/", Scheme = "http" }, CreateRequestStream(memory), headers); // Then ((string)request.Form.name).ShouldEqual("John Doe"); } [Fact] public void Should_throw_argumentoutofrangeexception_when_initialized_with_null_protocol() { // Given, When var exception = Record.Exception(() => new Request("GET", "/", null)); // Then exception.ShouldBeOfType<ArgumentOutOfRangeException>(); } [Fact] public void Should_throw_argumentoutofrangeexception_when_initialized_with_an_empty_protocol() { // Given, When var exception = Record.Exception(() => new Request("GET", "/", string.Empty)); // Then exception.ShouldBeOfType<ArgumentOutOfRangeException>(); } [Fact] public void Should_set_protocol_parameter_value_to_protocol_property_when_initialized() { // Given const string protocol = "http"; // When var request = new Request("GET", "/", protocol); // Then request.Url.Scheme.ShouldEqual(protocol); } [Fact] public void Should_split_cookie_in_two_parts_only() { // Given, when var cookieName = "_nc"; var cookieData = "Y+M3rcC/7ssXvHTx9pwCbwQVV4g=sp0hUZVApYgGbKZIU4bvXbBCVl9fhSEssEXSGdrt4jVag6PO1oed8lSd+EJD1nzWx4OTTCTZKjYRWeHE97QVND4jJIl+DuKRgJnSl3hWI5gdgGjcxqCSTvMOMGmW3NHLVyKpajGD8tq1DXhXMyXHjTzrCAYl8TGzwyJJGx/gd7VMJeRbAy9JdHOxEUlCKUnPneWN6q+/ITFryAa5hAdfcjXmh4Fgym75whKOMkWO+yM2icdsciX0ShcvnEQ/bXcTHTya6d7dJVfZl7qQ8AgIQv8ucQHxD3NxIvHNPBwms2ClaPds0HG5N+7pu7eMSFZjUHpDrrCnFvYN+JDiG3GMpf98LuCCvxemvipJo2MUkY4J1LvaDFoWA5tIxAfItZJkSIW2d8JPDwFk8OHJy8zhyn8AjD2JFqWaUZr4y9KZOtgI0V0Qlq0mS3mDSlLn29xapgoPHBvykwQjR6TwF2pBLpStsfZa/tXbEv2mc3VO3CnErIA1lEfKNqn9C/Dw6hqW"; var headers = new Dictionary<string, IEnumerable<string>>(); var cookies = new List<string>(); cookies.Add(string.Format("{0}={1}", cookieName, cookieData)); headers.Add("cookie", cookies); var newUrl = new Url { Path = "/" }; var request = new Request("GET", newUrl, null, headers); // Then request.Cookies[cookieName].ShouldEqual(cookieData); } [Fact] public void Should_split_cookie_in_two_parts_with_secure_attribute() { // Given, when const string cookieName = "path"; const string cookieData = "/"; var headers = new Dictionary<string, IEnumerable<string>>(); var cookies = new List<string> { string.Format("{0}={1}; Secure", cookieName, cookieData)} ; headers.Add("cookie", cookies); var newUrl = new Url { Path = "/" }; var request = new Request("GET", newUrl, null, headers); // Then request.Cookies[cookieName].ShouldEqual(cookieData); } [Fact] public void Should_split_cookie_in_two_parts_with_httponly_and_secure_attribute() { // Given, when const string cookieName = "path"; const string cookieData = "/"; var headers = new Dictionary<string, IEnumerable<string>>(); var cookies = new List<string> { string.Format("{0}={1}; HttpOnly; Secure", cookieName, cookieData) }; headers.Add("cookie", cookies); var newUrl = new Url { Path = "/" }; var request = new Request("GET", newUrl, null, headers); // Then request.Cookies[cookieName].ShouldEqual(cookieData); } [Fact] public void Should_split_cookie_in_two_parts_with_httponly_and_secure_attribute_ignoring_case() { // Given, when const string cookieName = "path"; const string cookieData = "/"; var headers = new Dictionary<string, IEnumerable<string>>(); var cookies = new List<string> { string.Format("{0}={1}; httponly; secure", cookieName, cookieData) }; headers.Add("cookie", cookies); var newUrl = new Url { Path = "/" }; var request = new Request("GET", newUrl, null, headers); // Then request.Cookies[cookieName].ShouldEqual(cookieData); } [Fact] public void Should_split_cookie_in_two_parts_with_httponly_attribute() { // Given, when const string cookieName = "path"; const string cookieData = "/"; var headers = new Dictionary<string, IEnumerable<string>>(); var cookies = new List<string> { string.Format("{0}={1}; HttpOnly", cookieName, cookieData) }; headers.Add("cookie", cookies); var newUrl = new Url { Path = "/" }; var request = new Request("GET", newUrl, null, headers); // Then request.Cookies[cookieName].ShouldEqual(cookieData); } [Fact] public void Should_add_attribute_in_cookie_as_empty_value() { // Given, when const string cookieName = "path"; const string cookieData = "/"; const string cookieAttribute = "SomeAttribute"; var headers = new Dictionary<string, IEnumerable<string>>(); var cookies = new List<string> { string.Format("{0}={1}; {2}", cookieName, cookieData, cookieAttribute) }; headers.Add("cookie", cookies); var newUrl = new Url { Path = "/" }; var request = new Request("GET", newUrl, null, headers); // Then request.Cookies[cookieName].ShouldEqual(cookieData); request.Cookies[cookieAttribute].ShouldEqual(string.Empty); } [Fact] public void Should_move_request_body_position_to_zero_after_parsing_url_encoded_data() { // Given const string bodyContent = "name=John+Doe&gender=male&family=5&city=kent&city=miami&other=abc%0D%0Adef&nickname=J%26D"; var memory = CreateRequestStream(); var writer = new StreamWriter(memory); writer.Write(bodyContent); writer.Flush(); memory.Position = 0; var headers = new Dictionary<string, IEnumerable<string>> { { "content-type", new[] { "application/x-www-form-urlencoded; charset=UTF-8" } } }; // When var request = new Request("POST", new Url { Path = "/", Scheme = "http" }, memory, headers); // Then memory.Position.ShouldEqual(0L); } [Fact] public void Should_move_request_body_position_to_zero_after_parsing_multipart_encoded_data() { // Given var memory = new MemoryStream(BuildMultipartFileValues(new Dictionary<string, Tuple<string, string, string>> { { "sample.txt", new Tuple<string, string, string>("content/type", "some test content", "whatever")} })); var headers = new Dictionary<string, IEnumerable<string>> { { "content-type", new[] { "multipart/form-data; boundary=----NancyFormBoundary" } } }; // When var request = new Request("POST", new Url { Path = "/", Scheme = "http" }, CreateRequestStream(memory), headers); // Then memory.Position.ShouldEqual(0L); } [Fact] public void Should_preserve_all_values_when_multiple_are_posted_using_same_name_after_parsing_multipart_encoded_data() { // Given var memory = new MemoryStream(BuildMultipartFormValues( new KeyValuePair<string, string>("age", "32"), new KeyValuePair<string, string>("age", "42"), new KeyValuePair<string, string>("age", "52") )); var headers = new Dictionary<string, IEnumerable<string>> { { "content-type", new[] { "multipart/form-data; boundary=----NancyFormBoundary" } } }; // When var request = new Request("POST", new Url { Path = "/", Scheme = "http" }, CreateRequestStream(memory), headers); // Then ((string)request.Form.age).ShouldEqual("32,42,52"); } [Fact] public void Should_limit_the_amount_of_form_fields_parsed() { // Given var sb = new StringBuilder(); for (int i = 0; i < StaticConfiguration.RequestQueryFormMultipartLimit + 10; i++) { if (i > 0) { sb.Append('&'); } sb.AppendFormat("Field{0}=Value{0}", i); } var memory = CreateRequestStream(); var writer = new StreamWriter(memory); writer.Write(sb.ToString()); writer.Flush(); memory.Position = 0; var headers = new Dictionary<string, IEnumerable<string>> { { "content-type", new[] { "application/x-www-form-urlencoded" } } }; // When var request = new Request("POST", new Url { Path = "/", Scheme = "http" }, memory, headers); // Then ((IEnumerable<string>)request.Form.GetDynamicMemberNames()).Count().ShouldEqual(StaticConfiguration.RequestQueryFormMultipartLimit); } [Fact] public void Should_limit_the_amount_of_querystring_fields_parsed() { // Given var sb = new StringBuilder(); for (int i = 0; i < StaticConfiguration.RequestQueryFormMultipartLimit + 10; i++) { if (i > 0) { sb.Append('&'); } sb.AppendFormat("Field{0}=Value{0}", i); } var memory = CreateRequestStream(); // When var request = new Request("GET", new Url { Path = "/", Scheme = "http", Query = sb.ToString() }, memory, new Dictionary<string, IEnumerable<string>>()); // Then ((IEnumerable<string>)request.Query.GetDynamicMemberNames()).Count().ShouldEqual(StaticConfiguration.RequestQueryFormMultipartLimit); } [Fact] public void Should_change_empty_path_to_root() { var request = new Request("GET", "", "http"); request.Path.ShouldEqual("/"); } [Fact] public void Should_replace_value_of_query_key_without_value_with_true() { // Given var memory = CreateRequestStream(); // When var request = new Request("GET", new Url { Path = "/", Scheme = "http", Query = "key1" }, memory); // Then ((bool)request.Query.key1).ShouldBeTrue(); ((string)request.Query.key1).ShouldEqual("key1"); } [Fact] public void Should_not_replace_equal_key_value_query_with_bool() { // Given var memory = CreateRequestStream(); // When var request = new Request("GET", new Url { Path = "/", Scheme = "http", Query = "key1=key1" }, memory); // Then ShouldAssertExtensions.ShouldBeOfType<string>(request.Query["key1"].Value); } private static RequestStream CreateRequestStream() { return CreateRequestStream(new MemoryStream()); } private static RequestStream CreateRequestStream(Stream stream) { return RequestStream.FromStream(stream); } private static byte[] BuildMultipartFormValues(params KeyValuePair<string, string>[] values) { var boundaryBuilder = new StringBuilder(); foreach (var pair in values) { boundaryBuilder.Append('\r'); boundaryBuilder.Append('\n'); boundaryBuilder.Append("--"); boundaryBuilder.Append("----NancyFormBoundary"); boundaryBuilder.Append('\r'); boundaryBuilder.Append('\n'); boundaryBuilder.AppendFormat("Content-Disposition: form-data; name=\"{0}\"", pair.Key); boundaryBuilder.Append('\r'); boundaryBuilder.Append('\n'); boundaryBuilder.Append('\r'); boundaryBuilder.Append('\n'); boundaryBuilder.Append(pair.Value); } boundaryBuilder.Append('\r'); boundaryBuilder.Append('\n'); boundaryBuilder.Append("------NancyFormBoundary--"); var bytes = Encoding.ASCII.GetBytes(boundaryBuilder.ToString()); return bytes; } private static byte[] BuildMultipartFormValues(Dictionary<string, string> formValues) { var pairs = formValues.Keys.Select(key => new KeyValuePair<string, string>(key, formValues[key])); return BuildMultipartFormValues(pairs.ToArray()); } private static byte[] BuildMultipartFileValues(Dictionary<string, Tuple<string, string, string>> formValues) { var boundaryBuilder = new StringBuilder(); foreach (var key in formValues.Keys) { boundaryBuilder.Append('\r'); boundaryBuilder.Append('\n'); boundaryBuilder.Append("--"); boundaryBuilder.Append("----NancyFormBoundary"); boundaryBuilder.Append('\r'); boundaryBuilder.Append('\n'); boundaryBuilder.AppendFormat("Content-Disposition: form-data; name=\"{1}\"; filename=\"{0}\"", key, formValues[key].Item3); boundaryBuilder.Append('\r'); boundaryBuilder.Append('\n'); boundaryBuilder.AppendFormat("Content-Type: {0}", formValues[key].Item1); boundaryBuilder.Append('\r'); boundaryBuilder.Append('\n'); boundaryBuilder.Append('\r'); boundaryBuilder.Append('\n'); boundaryBuilder.Append(formValues[key].Item2); } boundaryBuilder.Append('\r'); boundaryBuilder.Append('\n'); boundaryBuilder.Append("------NancyFormBoundary--"); var bytes = Encoding.ASCII.GetBytes(boundaryBuilder.ToString()); return bytes; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.Text; namespace System.CommandLine { internal static class HelpTextGenerator { public static string Generate(ArgumentSyntax argumentSyntax, int maxWidth) { var forCommandList = argumentSyntax.ActiveCommand == null && argumentSyntax.Commands.Any(); var page = forCommandList ? GetCommandListHelp(argumentSyntax) : GetCommandHelp(argumentSyntax, argumentSyntax.ActiveCommand); var sb = new StringBuilder(); sb.WriteHelpPage(page, maxWidth); return sb.ToString(); } private struct HelpPage { public string ApplicationName; public IEnumerable<string> SyntaxElements; public IReadOnlyList<HelpRow> Rows; } private struct HelpRow { public string Header; public string Text; } private static void WriteHelpPage(this StringBuilder sb, HelpPage page, int maxWidth) { sb.WriteUsage(page.ApplicationName, page.SyntaxElements, maxWidth); if (!page.Rows.Any()) return; sb.AppendLine(); sb.WriteRows(page.Rows, maxWidth); sb.AppendLine(); } private static void WriteUsage(this StringBuilder sb, string applicationName, IEnumerable<string> syntaxElements, int maxWidth) { var usageHeader = string.Format(Strings.HelpUsageOfApplicationFmt, applicationName); sb.Append(usageHeader); if (syntaxElements.Any()) sb.Append(@" "); var syntaxIndent = usageHeader.Length + 1; var syntaxMaxWidth = maxWidth - syntaxIndent; sb.WriteWordWrapped(syntaxElements, syntaxIndent, syntaxMaxWidth); } private static void WriteRows(this StringBuilder sb, IReadOnlyList<HelpRow> rows, int maxWidth) { const int indent = 4; var maxColumnWidth = rows.Select(r => r.Header.Length).Max(); var helpStartColumn = maxColumnWidth + 2 * indent; var maxHelpWidth = maxWidth - helpStartColumn; if (maxHelpWidth < 0) maxHelpWidth = maxWidth; foreach (var row in rows) { var headerStart = sb.Length; sb.Append(' ', indent); sb.Append(row.Header); var headerLength = sb.Length - headerStart; var requiredSpaces = helpStartColumn - headerLength; sb.Append(' ', requiredSpaces); var words = SplitWords(row.Text); sb.WriteWordWrapped(words, helpStartColumn, maxHelpWidth); } } private static void WriteWordWrapped(this StringBuilder sb, IEnumerable<string> words, int indent, int maxidth) { var helpLines = WordWrapLines(words, maxidth); var isFirstHelpLine = true; foreach (var helpLine in helpLines) { if (isFirstHelpLine) isFirstHelpLine = false; else sb.Append(' ', indent); sb.AppendLine(helpLine); } if (isFirstHelpLine) sb.AppendLine(); } private static HelpPage GetCommandListHelp(ArgumentSyntax argumentSyntax) { return new HelpPage { ApplicationName = argumentSyntax.ApplicationName, SyntaxElements = GetGlobalSyntax(), Rows = GetCommandRows(argumentSyntax).ToArray() }; } private static HelpPage GetCommandHelp(ArgumentSyntax argumentSyntax, ArgumentCommand command) { return new HelpPage { ApplicationName = argumentSyntax.ApplicationName, SyntaxElements = GetCommandSyntax(argumentSyntax, command), Rows = GetArgumentRows(argumentSyntax, command).ToArray() }; } private static IEnumerable<string> GetGlobalSyntax() { yield return @"<command>"; yield return @"[<args>]"; } private static IEnumerable<string> GetCommandSyntax(ArgumentSyntax argumentSyntax, ArgumentCommand command) { if (command != null) yield return command.Name; foreach (var option in argumentSyntax.GetOptions(command).Where(o => !o.IsHidden)) yield return GetOptionSyntax(option); if (argumentSyntax.GetParameters(command).All(p => p.IsHidden)) yield break; if (argumentSyntax.GetOptions(command).Any(o => !o.IsHidden)) yield return @"[--]"; foreach (var parameter in argumentSyntax.GetParameters(command).Where(o => !o.IsHidden)) yield return GetParameterSyntax(parameter); } private static string GetOptionSyntax(Argument option) { var sb = new StringBuilder(); sb.Append(@"["); sb.Append(option.GetDisplayName()); if (!option.IsFlag) sb.Append(option.IsRequired ? @" <arg>" : @" [arg]"); if (option.IsList) sb.Append(@"..."); sb.Append(@"]"); return sb.ToString(); } private static string GetParameterSyntax(Argument parameter) { var sb = new StringBuilder(); sb.Append(parameter.GetDisplayName()); if (parameter.IsList) sb.Append(@"..."); return sb.ToString(); } private static IEnumerable<HelpRow> GetCommandRows(ArgumentSyntax argumentSyntax) { return argumentSyntax.Commands .Where(c => !c.IsHidden) .Select(c => new HelpRow { Header = c.Name, Text = c.Help }); } private static IEnumerable<HelpRow> GetArgumentRows(ArgumentSyntax argumentSyntax, ArgumentCommand command) { return argumentSyntax.GetArguments(command) .Where(a => !a.IsHidden) .Select(a => new HelpRow { Header = GetArgumentRowHeader(a), Text = a.Help }); } private static string GetArgumentRowHeader(Argument argument) { var sb = new StringBuilder(); foreach (var displayName in argument.GetDisplayNames()) { if (sb.Length > 0) sb.Append(@", "); sb.Append(displayName); } if (argument.IsOption && !argument.IsFlag) sb.Append(argument.IsRequired ? @" <arg>" : @" [arg]"); if (argument.IsList) sb.Append(@"..."); return sb.ToString(); } private static IEnumerable<string> WordWrapLines(IEnumerable<string> tokens, int maxWidth) { var sb = new StringBuilder(); foreach (var token in tokens) { var newLength = sb.Length == 0 ? token.Length : sb.Length + 1 + token.Length; if (newLength > maxWidth) { if (sb.Length == 0) { yield return token; continue; } yield return sb.ToString(); sb.Clear(); } if (sb.Length > 0) sb.Append(@" "); sb.Append(token); } if (sb.Length > 0) yield return sb.ToString(); } private static IEnumerable<string> SplitWords(string text) { return string.IsNullOrEmpty(text) ? Enumerable.Empty<string>() : text.Split(' '); } } }
using System; using System.Collections.Generic; using System.Linq; using System.Reflection; using Funq; using ServiceStack.CacheAccess; using ServiceStack.CacheAccess.Providers; using ServiceStack.Common; using ServiceStack.Common.Web; using ServiceStack.Html; using ServiceStack.IO; using ServiceStack.Messaging; using ServiceStack.MiniProfiler; using ServiceStack.ServiceHost; using ServiceStack.VirtualPath; using ServiceStack.ServiceModel.Serialization; using ServiceStack.WebHost.Endpoints.Extensions; using ServiceStack.WebHost.Endpoints.Formats; using ServiceStack.WebHost.Endpoints.Support; using ServiceStack.WebHost.Endpoints.Utils; namespace ServiceStack.WebHost.Endpoints { public class EndpointHost { public static IAppHost AppHost { get; internal set; } public static IContentTypeFilter ContentTypeFilter { get; set; } public static List<Action<IHttpRequest, IHttpResponse>> RawRequestFilters { get; private set; } public static List<Action<IHttpRequest, IHttpResponse, object>> RequestFilters { get; private set; } public static List<Action<IHttpRequest, IHttpResponse, object>> ResponseFilters { get; private set; } public static List<IViewEngine> ViewEngines { get; set; } //TODO: rename to UncaughtExceptionsHandler public static HandleUncaughtExceptionDelegate ExceptionHandler { get; set; } //TODO: rename to ServiceExceptionsHandler public static HandleServiceExceptionDelegate ServiceExceptionHandler { get; set; } public static List<HttpHandlerResolverDelegate> CatchAllHandlers { get; set; } private static bool pluginsLoaded = false; public static List<IPlugin> Plugins { get; set; } public static IVirtualPathProvider VirtualPathProvider { get; set; } public static DateTime StartedAt { get; set; } public static DateTime ReadyAt { get; set; } private static void Reset() { ContentTypeFilter = HttpResponseFilter.Instance; RawRequestFilters = new List<Action<IHttpRequest, IHttpResponse>>(); RequestFilters = new List<Action<IHttpRequest, IHttpResponse, object>>(); ResponseFilters = new List<Action<IHttpRequest, IHttpResponse, object>>(); ViewEngines = new List<IViewEngine>(); CatchAllHandlers = new List<HttpHandlerResolverDelegate>(); Plugins = new List<IPlugin> { new HtmlFormat(), new CsvFormat(), new MarkdownFormat(), new PredefinedRoutesFeature(), new MetadataFeature(), }; //Default Config for projects that want to use components but not WebFramework (e.g. MVC) Config = new EndpointHostConfig( "Empty Config", new ServiceManager(new Container(), new ServiceController(null))); } // Pre user config public static void ConfigureHost(IAppHost appHost, string serviceName, ServiceManager serviceManager) { Reset(); AppHost = appHost; EndpointHostConfig.Instance.ServiceName = serviceName; EndpointHostConfig.Instance.ServiceManager = serviceManager; var config = EndpointHostConfig.Instance; Config = config; // avoid cross-dependency on Config setter VirtualPathProvider = new FileSystemVirtualPathProvider(AppHost, Config.WebHostPhysicalPath); Config.DebugMode = appHost.GetType().Assembly.IsDebugBuild(); if (Config.DebugMode) { Plugins.Add(new RequestInfoFeature()); } } // Config has changed private static void ApplyConfigChanges() { config.ServiceEndpointsMetadataConfig = ServiceEndpointsMetadataConfig.Create(config.ServiceStackHandlerFactoryPath); JsonDataContractSerializer.Instance.UseBcl = config.UseBclJsonSerializers; JsonDataContractDeserializer.Instance.UseBcl = config.UseBclJsonSerializers; } //After configure called public static void AfterInit() { StartedAt = DateTime.UtcNow; if (config.EnableFeatures != Feature.All) { if ((Feature.Xml & config.EnableFeatures) != Feature.Xml) config.IgnoreFormatsInMetadata.Add("xml"); if ((Feature.Json & config.EnableFeatures) != Feature.Json) config.IgnoreFormatsInMetadata.Add("json"); if ((Feature.Jsv & config.EnableFeatures) != Feature.Jsv) config.IgnoreFormatsInMetadata.Add("jsv"); if ((Feature.Csv & config.EnableFeatures) != Feature.Csv) config.IgnoreFormatsInMetadata.Add("csv"); if ((Feature.Html & config.EnableFeatures) != Feature.Html) config.IgnoreFormatsInMetadata.Add("html"); if ((Feature.Soap11 & config.EnableFeatures) != Feature.Soap11) config.IgnoreFormatsInMetadata.Add("soap11"); if ((Feature.Soap12 & config.EnableFeatures) != Feature.Soap12) config.IgnoreFormatsInMetadata.Add("soap12"); } if ((Feature.Html & config.EnableFeatures) != Feature.Html) Plugins.RemoveAll(x => x is HtmlFormat); if ((Feature.Csv & config.EnableFeatures) != Feature.Csv) Plugins.RemoveAll(x => x is CsvFormat); if ((Feature.Markdown & config.EnableFeatures) != Feature.Markdown) Plugins.RemoveAll(x => x is MarkdownFormat); if ((Feature.PredefinedRoutes & config.EnableFeatures) != Feature.PredefinedRoutes) Plugins.RemoveAll(x => x is PredefinedRoutesFeature); if ((Feature.Metadata & config.EnableFeatures) != Feature.Metadata) Plugins.RemoveAll(x => x is MetadataFeature); if ((Feature.RequestInfo & config.EnableFeatures) != Feature.RequestInfo) Plugins.RemoveAll(x => x is RequestInfoFeature); if ((Feature.Razor & config.EnableFeatures) != Feature.Razor) Plugins.RemoveAll(x => x is IRazorPlugin); //external if ((Feature.ProtoBuf & config.EnableFeatures) != Feature.ProtoBuf) Plugins.RemoveAll(x => x is IProtoBufPlugin); //external if ((Feature.MsgPack & config.EnableFeatures) != Feature.MsgPack) Plugins.RemoveAll(x => x is IMsgPackPlugin); //external if (ExceptionHandler == null) { ExceptionHandler = (httpReq, httpRes, operationName, ex) => { var errorMessage = String.Format("Error occured while Processing Request: {0}", ex.Message); var statusCode = ex.ToStatusCode(); //httpRes.WriteToResponse always calls .Close in it's finally statement so //if there is a problem writing to response, by now it will be closed if (!httpRes.IsClosed) { httpRes.WriteErrorToResponse(httpReq, httpReq.ResponseContentType, operationName, errorMessage, ex, statusCode); } }; } if (config.ServiceStackHandlerFactoryPath != null) config.ServiceStackHandlerFactoryPath = config.ServiceStackHandlerFactoryPath.TrimStart('/'); var specifiedContentType = config.DefaultContentType; //Before plugins loaded ConfigurePlugins(); AppHost.LoadPlugin(Plugins.ToArray()); pluginsLoaded = true; AfterPluginsLoaded(specifiedContentType); var registeredCacheClient = AppHost.TryResolve<ICacheClient>(); using (registeredCacheClient) { if (registeredCacheClient == null) { Container.Register<ICacheClient>(new MemoryCacheClient()); } } var registeredMqService = AppHost.TryResolve<IMessageService>(); var registeredMqFactory = AppHost.TryResolve<IMessageFactory>(); if (registeredMqService != null && registeredMqFactory == null) { Container.Register(c => registeredMqService.MessageFactory); } ReadyAt = DateTime.UtcNow; } public static T TryResolve<T>() { return AppHost != null ? AppHost.TryResolve<T>() : default(T); } /// <summary> /// The AppHost.Container. Note: it is not thread safe to register dependencies after AppStart. /// </summary> public static Container Container { get { var aspHost = AppHost as AppHostBase; if (aspHost != null) return aspHost.Container; var listenerHost = AppHost as HttpListenerBase; return listenerHost != null ? listenerHost.Container : new Container(); //testing may use alt AppHost } } private static void ConfigurePlugins() { //Some plugins need to initialize before other plugins are registered. foreach (var plugin in Plugins) { var preInitPlugin = plugin as IPreInitPlugin; if (preInitPlugin != null) { preInitPlugin.Configure(AppHost); } } } private static void AfterPluginsLoaded(string specifiedContentType) { if (!String.IsNullOrEmpty(specifiedContentType)) config.DefaultContentType = specifiedContentType; else if (String.IsNullOrEmpty(config.DefaultContentType)) config.DefaultContentType = ContentType.Json; config.ServiceManager.AfterInit(); ServiceManager = config.ServiceManager; //reset operations } public static T GetPlugin<T>() where T : class, IPlugin { return Plugins.FirstOrDefault(x => x is T) as T; } public static void AddPlugin(params IPlugin[] plugins) { if (pluginsLoaded) { AppHost.LoadPlugin(plugins); } else { foreach (var plugin in plugins) { Plugins.Add(plugin); } } } public static ServiceManager ServiceManager { get { return config.ServiceManager; } set { config.ServiceManager = value; } } private static EndpointHostConfig config; public static EndpointHostConfig Config { get { return config; } set { if (value.ServiceName == null) throw new ArgumentNullException("ServiceName"); if (value.ServiceController == null) throw new ArgumentNullException("ServiceController"); config = value; ApplyConfigChanges(); } } public static void AssertTestConfig(params Assembly[] assemblies) { if (Config != null) return; var config = EndpointHostConfig.Instance; config.ServiceName = "Test Services"; config.ServiceManager = new ServiceManager(assemblies.Length == 0 ? new[] { Assembly.GetCallingAssembly() } : assemblies); Config = config; } public static bool DebugMode { get { return Config != null && Config.DebugMode; } } public static ServiceMetadata Metadata { get { return Config.Metadata; } } /// <summary> /// Applies the raw request filters. Returns whether or not the request has been handled /// and no more processing should be done. /// </summary> /// <returns></returns> public static bool ApplyPreRequestFilters(IHttpRequest httpReq, IHttpResponse httpRes) { foreach (var requestFilter in RawRequestFilters) { requestFilter(httpReq, httpRes); if (httpRes.IsClosed) break; } return httpRes.IsClosed; } /// <summary> /// Applies the request filters. Returns whether or not the request has been handled /// and no more processing should be done. /// </summary> /// <returns></returns> public static bool ApplyRequestFilters(IHttpRequest httpReq, IHttpResponse httpRes, object requestDto) { httpReq.ThrowIfNull("httpReq"); httpRes.ThrowIfNull("httpRes"); using (Profiler.Current.Step("Executing Request Filters")) { //Exec all RequestFilter attributes with Priority < 0 var attributes = FilterAttributeCache.GetRequestFilterAttributes(requestDto.GetType()); var i = 0; for (; i < attributes.Length && attributes[i].Priority < 0; i++) { var attribute = attributes[i]; ServiceManager.Container.AutoWire(attribute); attribute.RequestFilter(httpReq, httpRes, requestDto); if (AppHost != null) //tests AppHost.Release(attribute); if (httpRes.IsClosed) return httpRes.IsClosed; } //Exec global filters foreach (var requestFilter in RequestFilters) { requestFilter(httpReq, httpRes, requestDto); if (httpRes.IsClosed) return httpRes.IsClosed; } //Exec remaining RequestFilter attributes with Priority >= 0 for (; i < attributes.Length; i++) { var attribute = attributes[i]; ServiceManager.Container.AutoWire(attribute); attribute.RequestFilter(httpReq, httpRes, requestDto); if (AppHost != null) //tests AppHost.Release(attribute); if (httpRes.IsClosed) return httpRes.IsClosed; } return httpRes.IsClosed; } } /// <summary> /// Applies the response filters. Returns whether or not the request has been handled /// and no more processing should be done. /// </summary> /// <returns></returns> public static bool ApplyResponseFilters(IHttpRequest httpReq, IHttpResponse httpRes, object response) { httpReq.ThrowIfNull("httpReq"); httpRes.ThrowIfNull("httpRes"); using (Profiler.Current.Step("Executing Response Filters")) { var responseDto = response.ToResponseDto(); var attributes = responseDto != null ? FilterAttributeCache.GetResponseFilterAttributes(responseDto.GetType()) : null; //Exec all ResponseFilter attributes with Priority < 0 var i = 0; if (attributes != null) { for (; i < attributes.Length && attributes[i].Priority < 0; i++) { var attribute = attributes[i]; ServiceManager.Container.AutoWire(attribute); attribute.ResponseFilter(httpReq, httpRes, response); if (AppHost != null) //tests AppHost.Release(attribute); if (httpRes.IsClosed) return httpRes.IsClosed; } } //Exec global filters foreach (var responseFilter in ResponseFilters) { responseFilter(httpReq, httpRes, response); if (httpRes.IsClosed) return httpRes.IsClosed; } //Exec remaining RequestFilter attributes with Priority >= 0 if (attributes != null) { for (; i < attributes.Length; i++) { var attribute = attributes[i]; ServiceManager.Container.AutoWire(attribute); attribute.ResponseFilter(httpReq, httpRes, response); if (AppHost != null) //tests AppHost.Release(attribute); if (httpRes.IsClosed) return httpRes.IsClosed; } } return httpRes.IsClosed; } } internal static object ExecuteService(object request, EndpointAttributes endpointAttributes, IHttpRequest httpReq, IHttpResponse httpRes) { using (Profiler.Current.Step("Execute Service")) { return config.ServiceController.Execute(request, new HttpRequestContext(httpReq, httpRes, request, endpointAttributes)); } } public static IServiceRunner<TRequest> CreateServiceRunner<TRequest>(ActionContext actionContext) { return AppHost != null ? AppHost.CreateServiceRunner<TRequest>(actionContext) : new ServiceRunner<TRequest>(null, actionContext); } /// <summary> /// Call to signal the completion of a ServiceStack-handled Request /// </summary> internal static void CompleteRequest() { try { if (AppHost != null) { AppHost.OnEndRequest(); } } catch (Exception ex) { } } public static void Dispose() { AppHost = null; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using Microsoft.Win32.SafeHandles; using System.Diagnostics.CodeAnalysis; using System.Security; using System.Threading; using System.Threading.Tasks; using System.Diagnostics; namespace System.IO.Pipes { /// <summary> /// Named pipe server /// </summary> public sealed partial class NamedPipeServerStream : PipeStream { // Use the maximum number of server instances that the system resources allow public const int MaxAllowedServerInstances = -1; public NamedPipeServerStream(String pipeName) : this(pipeName, PipeDirection.InOut, 1, PipeTransmissionMode.Byte, PipeOptions.None, 0, 0, HandleInheritability.None) { } public NamedPipeServerStream(String pipeName, PipeDirection direction) : this(pipeName, direction, 1, PipeTransmissionMode.Byte, PipeOptions.None, 0, 0, HandleInheritability.None) { } public NamedPipeServerStream(String pipeName, PipeDirection direction, int maxNumberOfServerInstances) : this(pipeName, direction, maxNumberOfServerInstances, PipeTransmissionMode.Byte, PipeOptions.None, 0, 0, HandleInheritability.None) { } public NamedPipeServerStream(String pipeName, PipeDirection direction, int maxNumberOfServerInstances, PipeTransmissionMode transmissionMode) : this(pipeName, direction, maxNumberOfServerInstances, transmissionMode, PipeOptions.None, 0, 0, HandleInheritability.None) { } public NamedPipeServerStream(String pipeName, PipeDirection direction, int maxNumberOfServerInstances, PipeTransmissionMode transmissionMode, PipeOptions options) : this(pipeName, direction, maxNumberOfServerInstances, transmissionMode, options, 0, 0, HandleInheritability.None) { } public NamedPipeServerStream(String pipeName, PipeDirection direction, int maxNumberOfServerInstances, PipeTransmissionMode transmissionMode, PipeOptions options, int inBufferSize, int outBufferSize) : this(pipeName, direction, maxNumberOfServerInstances, transmissionMode, options, inBufferSize, outBufferSize, HandleInheritability.None) { } /// <summary> /// Full named pipe server constructor /// </summary> /// <param name="pipeName">Pipe name</param> /// <param name="direction">Pipe direction: In, Out or InOut (duplex). /// Win32 note: this gets OR'd into dwOpenMode to CreateNamedPipe /// </param> /// <param name="maxNumberOfServerInstances">Maximum number of server instances. Specify a fixed value between /// 1 and 254 (Windows)/greater than 1 (Unix), or use NamedPipeServerStream.MaxAllowedServerInstances to use the /// maximum amount allowed by system resources.</param> /// <param name="transmissionMode">Byte mode or message mode. /// Win32 note: this gets used for dwPipeMode. CreateNamedPipe allows you to specify PIPE_TYPE_BYTE/MESSAGE /// and PIPE_READMODE_BYTE/MESSAGE independently, but this sets type and readmode to match. /// </param> /// <param name="options">PipeOption enum: None, Asynchronous, or Write-through /// Win32 note: this gets passed in with dwOpenMode to CreateNamedPipe. Asynchronous corresponds to /// FILE_FLAG_OVERLAPPED option. PipeOptions enum doesn't expose FIRST_PIPE_INSTANCE option because /// this sets that automatically based on the number of instances specified. /// </param> /// <param name="inBufferSize">Incoming buffer size, 0 or higher. /// Note: this size is always advisory; OS uses a suggestion. /// </param> /// <param name="outBufferSize">Outgoing buffer size, 0 or higher (see above)</param> /// <param name="pipeSecurity">PipeSecurity, or null for default security descriptor</param> /// <param name="inheritability">Whether handle is inheritable</param> /// <param name="additionalAccessRights">Combination (logical OR) of PipeAccessRights.TakeOwnership, /// PipeAccessRights.AccessSystemSecurity, and PipeAccessRights.ChangePermissions</param> private NamedPipeServerStream(String pipeName, PipeDirection direction, int maxNumberOfServerInstances, PipeTransmissionMode transmissionMode, PipeOptions options, int inBufferSize, int outBufferSize, HandleInheritability inheritability) : base(direction, transmissionMode, outBufferSize) { if (pipeName == null) { throw new ArgumentNullException(nameof(pipeName)); } if (pipeName.Length == 0) { throw new ArgumentException(SR.Argument_NeedNonemptyPipeName); } if ((options & ~(PipeOptions.WriteThrough | PipeOptions.Asynchronous | PipeOptions.CurrentUserOnly)) != 0) { throw new ArgumentOutOfRangeException(nameof(options), SR.ArgumentOutOfRange_OptionsInvalid); } if (inBufferSize < 0) { throw new ArgumentOutOfRangeException(nameof(inBufferSize), SR.ArgumentOutOfRange_NeedNonNegNum); } if ((maxNumberOfServerInstances < 1 || maxNumberOfServerInstances > 254) && (maxNumberOfServerInstances != MaxAllowedServerInstances)) { // win32 allows fixed values of 1-254 or 255 to mean max allowed by system. We expose 255 as -1 (unlimited) // through the MaxAllowedServerInstances constant. This is consistent e.g. with -1 as infinite timeout, etc. // We do this check for consistency on Unix, even though maxNumberOfServerInstances is otherwise ignored. throw new ArgumentOutOfRangeException(nameof(maxNumberOfServerInstances), SR.ArgumentOutOfRange_MaxNumServerInstances); } // inheritability will always be None since this private constructor is only called from other constructors from which // inheritability is always set to None. Desktop has a public constructor to allow setting it to something else, but Core // doesn't. if (inheritability < HandleInheritability.None || inheritability > HandleInheritability.Inheritable) { throw new ArgumentOutOfRangeException(nameof(inheritability), SR.ArgumentOutOfRange_HandleInheritabilityNoneOrInheritable); } if ((options & PipeOptions.CurrentUserOnly) != 0) { IsCurrentUserOnly = true; } Create(pipeName, direction, maxNumberOfServerInstances, transmissionMode, options, inBufferSize, outBufferSize, inheritability); } // Create a NamedPipeServerStream from an existing server pipe handle. public NamedPipeServerStream(PipeDirection direction, bool isAsync, bool isConnected, SafePipeHandle safePipeHandle) : base(direction, PipeTransmissionMode.Byte, 0) { if (safePipeHandle == null) { throw new ArgumentNullException(nameof(safePipeHandle)); } if (safePipeHandle.IsInvalid) { throw new ArgumentException(SR.Argument_InvalidHandle, nameof(safePipeHandle)); } ValidateHandleIsPipe(safePipeHandle); InitializeHandle(safePipeHandle, true, isAsync); if (isConnected) { State = PipeState.Connected; } } ~NamedPipeServerStream() { Dispose(false); } public Task WaitForConnectionAsync() { return WaitForConnectionAsync(CancellationToken.None); } public System.IAsyncResult BeginWaitForConnection(AsyncCallback callback, object state) => TaskToApm.Begin(WaitForConnectionAsync(), callback, state); public void EndWaitForConnection(IAsyncResult asyncResult) => TaskToApm.End(asyncResult); // Server can only connect from Disconnected state [SuppressMessage("Microsoft.Security", "CA2122:DoNotIndirectlyExposeMethodsWithLinkDemands", Justification = "Consistent with security model")] private void CheckConnectOperationsServer() { // we're not checking whether already connected; this allows us to throw IOException // "pipe is being closed" if other side is closing (as does win32) or no-op if // already connected if (State == PipeState.Closed) { throw Error.GetPipeNotOpen(); } if (InternalHandle != null && InternalHandle.IsClosed) // only check IsClosed if we have a handle { throw Error.GetPipeNotOpen(); } if (State == PipeState.Broken) { throw new IOException(SR.IO_PipeBroken); } } // Server is allowed to disconnect from connected and broken states private void CheckDisconnectOperations() { if (State == PipeState.WaitingToConnect) { throw new InvalidOperationException(SR.InvalidOperation_PipeNotYetConnected); } if (State == PipeState.Disconnected) { throw new InvalidOperationException(SR.InvalidOperation_PipeAlreadyDisconnected); } if (InternalHandle == null && CheckOperationsRequiresSetHandle) { throw new InvalidOperationException(SR.InvalidOperation_PipeHandleNotSet); } if ((State == PipeState.Closed) || (InternalHandle != null && InternalHandle.IsClosed)) { throw Error.GetPipeNotOpen(); } } } // Users will use this delegate to specify a method to call while impersonating the client // (see NamedPipeServerStream.RunAsClient). public delegate void PipeStreamImpersonationWorker(); }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. using System; using System.Collections.Generic; using System.Text; using Microsoft.AspNetCore.Razor.Language.Extensions; using Microsoft.AspNetCore.Razor.Language.Intermediate; using Microsoft.AspNetCore.Razor.Language.Syntax; namespace Microsoft.AspNetCore.Razor.Language { public static class RazorCodeDocumentExtensions { private static readonly char[] PathSeparators = new char[] { '/', '\\' }; private static readonly char[] NamespaceSeparators = new char[] { '.' }; private static readonly object CssScopeKey = new object(); public static TagHelperDocumentContext GetTagHelperContext(this RazorCodeDocument document) { if (document == null) { throw new ArgumentNullException(nameof(document)); } return (TagHelperDocumentContext)document.Items[typeof(TagHelperDocumentContext)]; } public static void SetTagHelperContext(this RazorCodeDocument document, TagHelperDocumentContext context) { if (document == null) { throw new ArgumentNullException(nameof(document)); } document.Items[typeof(TagHelperDocumentContext)] = context; } internal static IReadOnlyList<TagHelperDescriptor> GetTagHelpers(this RazorCodeDocument document) { if (document == null) { throw new ArgumentNullException(nameof(document)); } return (document.Items[typeof(TagHelpersHolder)] as TagHelpersHolder)?.TagHelpers; } internal static void SetTagHelpers(this RazorCodeDocument document, IReadOnlyList<TagHelperDescriptor> tagHelpers) { if (document == null) { throw new ArgumentNullException(nameof(document)); } document.Items[typeof(TagHelpersHolder)] = new TagHelpersHolder(tagHelpers); } public static RazorSyntaxTree GetSyntaxTree(this RazorCodeDocument document) { if (document == null) { throw new ArgumentNullException(nameof(document)); } return document.Items[typeof(RazorSyntaxTree)] as RazorSyntaxTree; } public static void SetSyntaxTree(this RazorCodeDocument document, RazorSyntaxTree syntaxTree) { if (document == null) { throw new ArgumentNullException(nameof(document)); } document.Items[typeof(RazorSyntaxTree)] = syntaxTree; } public static IReadOnlyList<RazorSyntaxTree> GetImportSyntaxTrees(this RazorCodeDocument document) { if (document == null) { throw new ArgumentNullException(nameof(document)); } return (document.Items[typeof(ImportSyntaxTreesHolder)] as ImportSyntaxTreesHolder)?.SyntaxTrees; } public static void SetImportSyntaxTrees(this RazorCodeDocument document, IReadOnlyList<RazorSyntaxTree> syntaxTrees) { if (document == null) { throw new ArgumentNullException(nameof(document)); } document.Items[typeof(ImportSyntaxTreesHolder)] = new ImportSyntaxTreesHolder(syntaxTrees); } public static DocumentIntermediateNode GetDocumentIntermediateNode(this RazorCodeDocument document) { if (document == null) { throw new ArgumentNullException(nameof(document)); } return document.Items[typeof(DocumentIntermediateNode)] as DocumentIntermediateNode; } public static void SetDocumentIntermediateNode(this RazorCodeDocument document, DocumentIntermediateNode documentNode) { if (document == null) { throw new ArgumentNullException(nameof(document)); } document.Items[typeof(DocumentIntermediateNode)] = documentNode; } internal static RazorHtmlDocument GetHtmlDocument(this RazorCodeDocument document) { if (document == null) { throw new ArgumentNullException(nameof(document)); } var razorHtmlObj = document.Items[typeof(RazorHtmlDocument)]; if (razorHtmlObj == null) { var razorHtmlDocument = RazorHtmlWriter.GetHtmlDocument(document); if (razorHtmlDocument != null) { document.Items[typeof(RazorHtmlDocument)] = razorHtmlDocument; return razorHtmlDocument; } } return (RazorHtmlDocument)razorHtmlObj; } public static RazorCSharpDocument GetCSharpDocument(this RazorCodeDocument document) { if (document == null) { throw new ArgumentNullException(nameof(document)); } return (RazorCSharpDocument)document.Items[typeof(RazorCSharpDocument)]; } public static void SetCSharpDocument(this RazorCodeDocument document, RazorCSharpDocument csharp) { if (document == null) { throw new ArgumentNullException(nameof(document)); } document.Items[typeof(RazorCSharpDocument)] = csharp; } public static RazorParserOptions GetParserOptions(this RazorCodeDocument document) { if (document == null) { throw new ArgumentNullException(nameof(document)); } return (RazorParserOptions)document.Items[typeof(RazorParserOptions)]; } public static void SetParserOptions(this RazorCodeDocument document, RazorParserOptions parserOptions) { if (document == null) { throw new ArgumentNullException(nameof(document)); } document.Items[typeof(RazorParserOptions)] = parserOptions; } public static RazorCodeGenerationOptions GetCodeGenerationOptions(this RazorCodeDocument document) { if (document == null) { throw new ArgumentNullException(nameof(document)); } return (RazorCodeGenerationOptions)document.Items[typeof(RazorCodeGenerationOptions)]; } public static void SetCodeGenerationOptions(this RazorCodeDocument document, RazorCodeGenerationOptions codeGenerationOptions) { if (document == null) { throw new ArgumentNullException(nameof(document)); } document.Items[typeof(RazorCodeGenerationOptions)] = codeGenerationOptions; } public static string GetFileKind(this RazorCodeDocument document) { if (document == null) { throw new ArgumentNullException(nameof(document)); } return (string)document.Items[typeof(FileKinds)]; } public static void SetFileKind(this RazorCodeDocument document, string fileKind) { if (document == null) { throw new ArgumentNullException(nameof(document)); } document.Items[typeof(FileKinds)] = fileKind; } public static string GetCssScope(this RazorCodeDocument document) { if (document == null) { throw new ArgumentNullException(nameof(document)); } return (string)document.Items[CssScopeKey]; } public static void SetCssScope(this RazorCodeDocument document, string cssScope) { if (document == null) { throw new ArgumentNullException(nameof(document)); } document.Items[CssScopeKey] = cssScope; } // In general documents will have a relative path (relative to the project root). // We can only really compute a nice namespace when we know a relative path. // // However all kinds of thing are possible in tools. We shouldn't barf here if the document isn't // set up correctly. public static bool TryComputeNamespace(this RazorCodeDocument document, bool fallbackToRootNamespace, out string @namespace) { if (document == null) { throw new ArgumentNullException(nameof(document)); } var filePath = document.Source.FilePath; if (filePath == null || document.Source.RelativePath == null || filePath.Length < document.Source.RelativePath.Length) { @namespace = null; return false; } // If the document or it's imports contains a @namespace directive, we want to use that over the root namespace. var baseNamespace = string.Empty; var appendSuffix = true; var lastNamespaceContent = string.Empty; var lastNamespaceLocation = SourceSpan.Undefined; var importSyntaxTrees = document.GetImportSyntaxTrees(); if (importSyntaxTrees != null) { // ImportSyntaxTrees is usually set. Just being defensive. foreach (var importSyntaxTree in importSyntaxTrees) { if (importSyntaxTree != null && NamespaceVisitor.TryGetLastNamespaceDirective(importSyntaxTree, out var importNamespaceContent, out var importNamespaceLocation)) { lastNamespaceContent = importNamespaceContent; lastNamespaceLocation = importNamespaceLocation; } } } var syntaxTree = document.GetSyntaxTree(); if (syntaxTree != null && NamespaceVisitor.TryGetLastNamespaceDirective(syntaxTree, out var namespaceContent, out var namespaceLocation)) { lastNamespaceContent = namespaceContent; lastNamespaceLocation = namespaceLocation; } StringSegment relativePath = document.Source.RelativePath; // If there are multiple @namespace directives in the heirarchy, // we want to pick the closest one to the current document. if (!string.IsNullOrEmpty(lastNamespaceContent)) { baseNamespace = lastNamespaceContent; var directiveLocationDirectory = NormalizeDirectory(lastNamespaceLocation.FilePath); var sourceFilePath = new StringSegment(document.Source.FilePath); // We're specifically using OrdinalIgnoreCase here because Razor treats all paths as case-insensitive. if (!sourceFilePath.StartsWith(directiveLocationDirectory, StringComparison.OrdinalIgnoreCase) || sourceFilePath.Length <= directiveLocationDirectory.Length) { // The most relevant directive is not from the directory hierarchy, can't compute a suffix. appendSuffix = false; } else { // We know that the document containing the namespace directive is in the current document's heirarchy. // Let's compute the actual relative path that we'll use to compute the namespace suffix. relativePath = sourceFilePath.Subsegment(directiveLocationDirectory.Length); } } else if (fallbackToRootNamespace) { var options = document.GetCodeGenerationOptions() ?? document.GetDocumentIntermediateNode()?.Options; baseNamespace = options?.RootNamespace; appendSuffix = true; } if (string.IsNullOrEmpty(baseNamespace)) { // There was no valid @namespace directive and we couldn't compute the RootNamespace. @namespace = null; return false; } var builder = new StringBuilder(); // Sanitize the base namespace, but leave the dots. var segments = new StringTokenizer(baseNamespace, NamespaceSeparators); var first = true; foreach (var token in segments) { if (token.IsEmpty) { continue; } if (first) { first = false; } else { builder.Append('.'); } CSharpIdentifier.AppendSanitized(builder, token); } if (appendSuffix) { // If we get here, we already have a base namespace and the relative path that should be used as the namespace suffix. segments = new StringTokenizer(relativePath, PathSeparators); var previousLength = builder.Length; foreach (var token in segments) { if (token.IsEmpty) { continue; } previousLength = builder.Length; builder.Append('.'); CSharpIdentifier.AppendSanitized(builder, token); } // Trim the last segment because it's the FileName. builder.Length = previousLength; } @namespace = builder.ToString(); return true; // We want to normalize the path of the file containing the '@namespace' directive to just the containing // directory with a trailing separator. // // Not using Path.GetDirectoryName here because it doesn't meet these requirements, and we want to handle // both 'view engine' style paths and absolute paths. // // We also don't normalize the separators here. We expect that all documents are using a consistent style of path. // // If we can't normalize the path, we just return null so it will be ignored. StringSegment NormalizeDirectory(string path) { if (string.IsNullOrEmpty(path)) { return default; } var lastSeparator = path.LastIndexOfAny(PathSeparators); if (lastSeparator == -1) { return default; } // Includes the separator return new StringSegment(path, 0, lastSeparator + 1); } } private class ImportSyntaxTreesHolder { public ImportSyntaxTreesHolder(IReadOnlyList<RazorSyntaxTree> syntaxTrees) { SyntaxTrees = syntaxTrees; } public IReadOnlyList<RazorSyntaxTree> SyntaxTrees { get; } } private class IncludeSyntaxTreesHolder { public IncludeSyntaxTreesHolder(IReadOnlyList<RazorSyntaxTree> syntaxTrees) { SyntaxTrees = syntaxTrees; } public IReadOnlyList<RazorSyntaxTree> SyntaxTrees { get; } } private class TagHelpersHolder { public TagHelpersHolder(IReadOnlyList<TagHelperDescriptor> tagHelpers) { TagHelpers = tagHelpers; } public IReadOnlyList<TagHelperDescriptor> TagHelpers { get; } } private class NamespaceVisitor : SyntaxWalker { private readonly RazorSourceDocument _source; private NamespaceVisitor(RazorSourceDocument source) { _source = source; } public string LastNamespaceContent { get; set; } public SourceSpan LastNamespaceLocation { get; set; } public static bool TryGetLastNamespaceDirective( RazorSyntaxTree syntaxTree, out string namespaceDirectiveContent, out SourceSpan namespaceDirectiveSpan) { var visitor = new NamespaceVisitor(syntaxTree.Source); visitor.Visit(syntaxTree.Root); if (string.IsNullOrEmpty(visitor.LastNamespaceContent)) { namespaceDirectiveContent = null; namespaceDirectiveSpan = SourceSpan.Undefined; return false; } namespaceDirectiveContent = visitor.LastNamespaceContent; namespaceDirectiveSpan = visitor.LastNamespaceLocation; return true; } public override void VisitRazorDirective(RazorDirectiveSyntax node) { if (node != null && node.DirectiveDescriptor == NamespaceDirective.Directive) { var directiveContent = node.Body?.GetContent(); // In practice, this should never be null and always start with 'namespace'. Just being defensive here. if (directiveContent != null && directiveContent.StartsWith(NamespaceDirective.Directive.Directive, StringComparison.Ordinal)) { LastNamespaceContent = directiveContent.Substring(NamespaceDirective.Directive.Directive.Length).Trim(); LastNamespaceLocation = node.GetSourceSpan(_source); } } base.VisitRazorDirective(node); } } } }
using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics; using System.Linq; using System.Linq.Expressions; using FluentAssertions.Execution; namespace FluentAssertions.Collections { /// <summary> /// Contains a number of methods to assert that an <see cref="IEnumerable{T}"/> is in the expectation state. /// </summary> public class SelfReferencingCollectionAssertions<T, TAssertions> : CollectionAssertions<IEnumerable<T>, TAssertions> where TAssertions : SelfReferencingCollectionAssertions<T, TAssertions> { public SelfReferencingCollectionAssertions(IEnumerable<T> actualValue) { if (actualValue != null) { Subject = actualValue; } } /// <summary> /// Asserts that the number of items in the collection matches the supplied <paramref name="expected" /> amount. /// </summary> /// <param name="expected">The expected number of items in the collection.</param> /// <param name="because"> /// A formatted phrase as is supported by <see cref="string.Format(string,object[])" /> explaining why the assertion /// is needed. If the phrase does not start with the word <i>because</i>, it is prepended automatically. /// </param> /// <param name="reasonArgs"> /// Zero or more objects to format using the placeholders in <see cref="because" />. /// </param> public AndConstraint<TAssertions> HaveCount(int expected, string because = "", params object[] reasonArgs) { if (ReferenceEquals(Subject, null)) { Execute.Assertion .BecauseOf(because, reasonArgs) .FailWith("Expected {context:collection} to contain {0} item(s){reason}, but found <null>.", expected); } int actualCount = Subject.Count(); Execute.Assertion .ForCondition(actualCount == expected) .BecauseOf(because, reasonArgs) .FailWith("Expected {context:collection} to contain {0} item(s){reason}, but found {1}.", expected, actualCount); return new AndConstraint<TAssertions>((TAssertions)this); } /// <summary> /// Asserts that the number of items in the collection matches a condition stated by the <paramref name="countPredicate"/>. /// </summary> /// <param name="countPredicate">A predicate that yields the number of items that is expected to be in the collection.</param> /// <param name="because"> /// A formatted phrase as is supported by <see cref="string.Format(string,object[])" /> explaining why the assertion /// is needed. If the phrase does not start with the word <i>because</i>, it is prepended automatically. /// </param> /// <param name="reasonArgs"> /// Zero or more objects to format using the placeholders in <see cref="because" />. /// </param> public AndConstraint<TAssertions> HaveCount(Expression<Func<int, bool>> countPredicate, string because = "", params object[] reasonArgs) { if (countPredicate == null) { throw new NullReferenceException("Cannot compare collection count against a <null> predicate."); } if (ReferenceEquals(Subject, null)) { Execute.Assertion .BecauseOf(because, reasonArgs) .FailWith("Expected {context:collection} to contain {0} items{reason}, but found {1}.", countPredicate.Body, Subject); } Func<int, bool> compiledPredicate = countPredicate.Compile(); int actualCount = Subject.Count(); if (!compiledPredicate(actualCount)) { Execute.Assertion .BecauseOf(because, reasonArgs) .FailWith("Expected {context:collection} {0} to have a count {1}{reason}, but count is {2}.", Subject, countPredicate.Body, actualCount); } return new AndConstraint<TAssertions>((TAssertions)this); } /// <summary> /// Expects the current collection to contain all the same elements in the same order as the collection identified by /// <paramref name="elements" />. Elements are compared using their <see cref="T.Equals(T)" /> method. /// </summary> /// <param name="elements">A params array with the expected elements.</param> public AndConstraint<TAssertions> Equal(params T[] elements) { return Equal(elements, String.Empty); } /// <summary> /// Asserts that two collections contain the same items in the same order, where equality is determined using a /// predicate. /// </summary> /// <param name="expectation"> /// The collection to compare the subject with. /// </param> /// <param name="predicate"> /// A predicate the is used to determine whether two objects should be treated as equal. /// </param> /// <param name="because"> /// A formatted phrase as is supported by <see cref="string.Format(string,object[])"/> explaining why the assertion /// is needed. If the phrase does not start with the word <i>because</i>, it is prepended automatically. /// </param> /// <param name="reasonArgs"> /// Zero or more objects to format using the placeholders in <see cref="because"/>. /// </param> public AndConstraint<TAssertions> Equal<TExpected>( IEnumerable<TExpected> expectation, Func<T, TExpected, bool> predicate, string because = "", params object[] reasonArgs) { AssertSubjectEquality(expectation, predicate, because, reasonArgs); return new AndConstraint<TAssertions>((TAssertions)this); } /// <summary> /// Asserts that the collection contains the specified item. /// </summary> /// <param name="expected">The expectation item.</param> /// <param name="because"> /// A formatted phrase explaining why the assertion should be satisfied. If the phrase does not /// start with the word <i>because</i>, it is prepended to the message. /// </param> /// <param name="reasonArgs"> /// Zero or more values to use for filling in any <see cref="string.Format(string,object[])"/> compatible placeholders. /// </param> public AndWhichConstraint<TAssertions, T> Contain(T expected, string because = "", params object[] reasonArgs) { if (ReferenceEquals(Subject, null)) { Execute.Assertion .BecauseOf(because, reasonArgs) .FailWith("Expected {context:collection} to contain {0}{reason}, but found {1}.", expected, Subject); } if (!Subject.Contains(expected)) { Execute.Assertion .BecauseOf(because, reasonArgs) .FailWith("Expected {context:collection} {0} to contain {1}{reason}.", Subject, expected); } return new AndWhichConstraint<TAssertions, T>((TAssertions) this, Subject.Where( item => EqualityComparer<T>.Default.Equals(item, expected))); } /// <summary> /// Asserts that the collection contains some extra items in addition to the original items. /// </summary> /// <param name="expectedItemsList">An <see cref="IEnumerable{T}"/> of expectation items.</param> /// <param name="additionalExpectedItems">Additional items that are expectation to be contained by the collection.</param> public AndConstraint<TAssertions> Contain(IEnumerable<T> expectedItemsList, params T [] additionalExpectedItems) { var list = new List<T>(expectedItemsList); list.AddRange(additionalExpectedItems); return Contain((IEnumerable)list); } /// <summary> /// Asserts that the collection contains at least one item that matches the predicate. /// </summary> /// <param name="predicate">A predicate to match the items in the collection against.</param> /// <param name="because"> /// A formatted phrase explaining why the assertion should be satisfied. If the phrase does not /// start with the word <i>because</i>, it is prepended to the message. /// </param> /// <param name="reasonArgs"> /// Zero or more values to use for filling in any <see cref="string.Format(string,object[])"/> compatible placeholders. /// </param> public AndWhichConstraint<TAssertions, T> Contain(Expression<Func<T, bool>> predicate, string because = "", params object[] reasonArgs) { if (ReferenceEquals(Subject, null)) { Execute.Assertion .BecauseOf(because, reasonArgs) .FailWith("Expected {context:collection} to contain {0}{reason}, but found {1}.", predicate.Body, Subject); } Func<T, bool> func = predicate.Compile(); Execute.Assertion .ForCondition(Subject.Any(func)) .BecauseOf(because, reasonArgs) .FailWith("{context:Collection} {0} should have an item matching {1}{reason}.", Subject, predicate.Body); return new AndWhichConstraint<TAssertions, T>((TAssertions)this, Subject.Where(func)); } /// <summary> /// Asserts that the collection only contains items that match a predicate. /// </summary> /// <param name="predicate">A predicate to match the items in the collection against.</param> /// <param name="because"> /// A formatted phrase explaining why the assertion should be satisfied. If the phrase does not /// start with the word <i>because</i>, it is prepended to the message. /// </param> /// <param name="reasonArgs"> /// Zero or more values to use for filling in any <see cref="string.Format(string,object[])"/> compatible placeholders. /// </param> public AndConstraint<TAssertions> OnlyContain( Expression<Func<T, bool>> predicate, string because = "", params object[] reasonArgs) { Func<T, bool> compiledPredicate = predicate.Compile(); Execute.Assertion .ForCondition(Subject.Any()) .BecauseOf(because, reasonArgs) .FailWith("Expected {context:collection} to contain only items matching {0}{reason}, but the collection is empty.", predicate.Body); IEnumerable<T> mismatchingItems = Subject.Where(item => !compiledPredicate(item)); if (mismatchingItems.Any()) { Execute.Assertion .BecauseOf(because, reasonArgs) .FailWith("Expected {context:collection} to contain only items matching {0}{reason}, but {1} do(es) not match.", predicate.Body, mismatchingItems); } return new AndConstraint<TAssertions>((TAssertions)this); } /// <summary> /// Asserts that the collection does not contain any items that match the predicate. /// </summary> /// <param name="predicate">A predicate to match the items in the collection against.</param> /// <param name="because"> /// A formatted phrase explaining why the assertion should be satisfied. If the phrase does not /// start with the word <i>because</i>, it is prepended to the message. /// </param> /// <param name="reasonArgs"> /// Zero or more values to use for filling in any <see cref="string.Format(string,object[])"/> compatible placeholders. /// </param> public AndConstraint<TAssertions> NotContain(Expression<Func<T, bool>> predicate, string because = "", params object[] reasonArgs) { if (ReferenceEquals(Subject, null)) { Execute.Assertion .BecauseOf(because, reasonArgs) .FailWith("Expected {context:collection} not to contain {0}{reason}, but found {1}.", predicate.Body, Subject); } if (Subject.Any(item => predicate.Compile()(item))) { Execute.Assertion .BecauseOf(because, reasonArgs) .FailWith("{context:Collection} {0} should not have any items matching {1}{reason}.", Subject, predicate.Body); } return new AndConstraint<TAssertions>((TAssertions)this); } /// <summary> /// Expects the current collection to contain only a single item. /// </summary> /// <param name="because"> /// A formatted phrase as is supported by <see cref="string.Format(string,object[])" /> explaining why the assertion /// is needed. If the phrase does not start with the word <i>because</i>, it is prepended automatically. /// </param> /// <param name="reasonArgs"> /// Zero or more objects to format using the placeholders in <see cref="because" />. /// </param> public AndWhichConstraint<TAssertions, T> ContainSingle(string because = "", params object[] reasonArgs) { if (ReferenceEquals(Subject, null)) { Execute.Assertion .BecauseOf(because, reasonArgs) .FailWith("Expected {context:collection} to contain a single item but found <null>."); } if (Subject.Count() != 1) { Execute.Assertion .BecauseOf(because, reasonArgs) .FailWith("Expected {context:collection} to contain a single item."); } return new AndWhichConstraint<TAssertions, T>((TAssertions)this, Subject.Single()); } /// <summary> /// Expects the current collection to contain only a single item matching the specified <paramref name="predicate"/>. /// </summary> /// <param name="predicate">The predicate that will be used to find the matching items.</param> /// <param name="because"> /// A formatted phrase as is supported by <see cref="string.Format(string,object[])" /> explaining why the assertion /// is needed. If the phrase does not start with the word <i>because</i>, it is prepended automatically. /// </param> /// <param name="reasonArgs"> /// Zero or more objects to format using the placeholders in <see cref="because" />. /// </param> public AndWhichConstraint<TAssertions, T> ContainSingle(Expression<Func<T, bool>> predicate, string because = "", params object[] reasonArgs) { string expectationPrefix = string.Format("Expected {{context:collection}} to contain a single item matching {0}{{reason}}, ", predicate.Body); if (ReferenceEquals(Subject, null)) { Execute.Assertion .BecauseOf(because, reasonArgs) .FailWith(expectationPrefix + "but found {0}.", Subject); } T[] actualItems = Subject.ToArray(); Execute.Assertion .ForCondition(actualItems.Any()) .BecauseOf(because, reasonArgs) .FailWith(expectationPrefix + "but the collection is empty."); T[] matchingElements = actualItems.Where(predicate.Compile()).ToArray(); int count = matchingElements.Count(); if (count == 0) { Execute.Assertion .BecauseOf(because, reasonArgs) .FailWith(expectationPrefix + "but no such item was found."); } else if (count > 1) { Execute.Assertion .BecauseOf(because, reasonArgs) .FailWith(expectationPrefix + "but " + count + " such items were found."); } else { // Exactly 1 item was expected } return new AndWhichConstraint<TAssertions, T>((TAssertions) this, matchingElements); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /****************************************************************************** * This file is auto-generated from a template file by the GenerateTests.csx * * script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make * * changes, please update the corresponding template and run according to the * * directions listed in the file. * ******************************************************************************/ using System; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Runtime.Intrinsics; using System.Runtime.Intrinsics.X86; namespace JIT.HardwareIntrinsics.X86 { public static partial class Program { private static void BlendUInt162() { var test = new ImmBinaryOpTest__BlendUInt162(); if (test.IsSupported) { // Validates basic functionality works, using Unsafe.Read test.RunBasicScenario_UnsafeRead(); if (Avx.IsSupported) { // Validates basic functionality works, using Load test.RunBasicScenario_Load(); // Validates basic functionality works, using LoadAligned test.RunBasicScenario_LoadAligned(); } // Validates calling via reflection works, using Unsafe.Read test.RunReflectionScenario_UnsafeRead(); if (Avx.IsSupported) { // Validates calling via reflection works, using Load test.RunReflectionScenario_Load(); // Validates calling via reflection works, using LoadAligned test.RunReflectionScenario_LoadAligned(); } // Validates passing a static member works test.RunClsVarScenario(); // Validates passing a local works, using Unsafe.Read test.RunLclVarScenario_UnsafeRead(); if (Avx.IsSupported) { // Validates passing a local works, using Load test.RunLclVarScenario_Load(); // Validates passing a local works, using LoadAligned test.RunLclVarScenario_LoadAligned(); } // Validates passing the field of a local class works test.RunClassLclFldScenario(); // Validates passing an instance member of a class works test.RunClassFldScenario(); // Validates passing the field of a local struct works test.RunStructLclFldScenario(); // Validates passing an instance member of a struct works test.RunStructFldScenario(); } else { // Validates we throw on unsupported hardware test.RunUnsupportedScenario(); } if (!test.Succeeded) { throw new Exception("One or more scenarios did not complete as expected."); } } } public sealed unsafe class ImmBinaryOpTest__BlendUInt162 { private struct TestStruct { public Vector256<UInt16> _fld1; public Vector256<UInt16> _fld2; public static TestStruct Create() { var testStruct = new TestStruct(); for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt16(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<UInt16>, byte>(ref testStruct._fld1), ref Unsafe.As<UInt16, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector256<UInt16>>()); for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt16(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<UInt16>, byte>(ref testStruct._fld2), ref Unsafe.As<UInt16, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector256<UInt16>>()); return testStruct; } public void RunStructFldScenario(ImmBinaryOpTest__BlendUInt162 testClass) { var result = Avx2.Blend(_fld1, _fld2, 2); Unsafe.Write(testClass._dataTable.outArrayPtr, result); testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr); } } private static readonly int LargestVectorSize = 32; private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector256<UInt16>>() / sizeof(UInt16); private static readonly int Op2ElementCount = Unsafe.SizeOf<Vector256<UInt16>>() / sizeof(UInt16); private static readonly int RetElementCount = Unsafe.SizeOf<Vector256<UInt16>>() / sizeof(UInt16); private static UInt16[] _data1 = new UInt16[Op1ElementCount]; private static UInt16[] _data2 = new UInt16[Op2ElementCount]; private static Vector256<UInt16> _clsVar1; private static Vector256<UInt16> _clsVar2; private Vector256<UInt16> _fld1; private Vector256<UInt16> _fld2; private SimpleBinaryOpTest__DataTable<UInt16, UInt16, UInt16> _dataTable; static ImmBinaryOpTest__BlendUInt162() { for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt16(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<UInt16>, byte>(ref _clsVar1), ref Unsafe.As<UInt16, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector256<UInt16>>()); for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt16(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<UInt16>, byte>(ref _clsVar2), ref Unsafe.As<UInt16, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector256<UInt16>>()); } public ImmBinaryOpTest__BlendUInt162() { Succeeded = true; for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt16(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<UInt16>, byte>(ref _fld1), ref Unsafe.As<UInt16, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector256<UInt16>>()); for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt16(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<UInt16>, byte>(ref _fld2), ref Unsafe.As<UInt16, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector256<UInt16>>()); for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt16(); } for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt16(); } _dataTable = new SimpleBinaryOpTest__DataTable<UInt16, UInt16, UInt16>(_data1, _data2, new UInt16[RetElementCount], LargestVectorSize); } public bool IsSupported => Avx2.IsSupported; public bool Succeeded { get; set; } public void RunBasicScenario_UnsafeRead() { TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead)); var result = Avx2.Blend( Unsafe.Read<Vector256<UInt16>>(_dataTable.inArray1Ptr), Unsafe.Read<Vector256<UInt16>>(_dataTable.inArray2Ptr), 2 ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunBasicScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load)); var result = Avx2.Blend( Avx.LoadVector256((UInt16*)(_dataTable.inArray1Ptr)), Avx.LoadVector256((UInt16*)(_dataTable.inArray2Ptr)), 2 ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunBasicScenario_LoadAligned() { TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_LoadAligned)); var result = Avx2.Blend( Avx.LoadAlignedVector256((UInt16*)(_dataTable.inArray1Ptr)), Avx.LoadAlignedVector256((UInt16*)(_dataTable.inArray2Ptr)), 2 ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_UnsafeRead() { TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead)); var result = typeof(Avx2).GetMethod(nameof(Avx2.Blend), new Type[] { typeof(Vector256<UInt16>), typeof(Vector256<UInt16>), typeof(byte) }) .Invoke(null, new object[] { Unsafe.Read<Vector256<UInt16>>(_dataTable.inArray1Ptr), Unsafe.Read<Vector256<UInt16>>(_dataTable.inArray2Ptr), (byte)2 }); Unsafe.Write(_dataTable.outArrayPtr, (Vector256<UInt16>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load)); var result = typeof(Avx2).GetMethod(nameof(Avx2.Blend), new Type[] { typeof(Vector256<UInt16>), typeof(Vector256<UInt16>), typeof(byte) }) .Invoke(null, new object[] { Avx.LoadVector256((UInt16*)(_dataTable.inArray1Ptr)), Avx.LoadVector256((UInt16*)(_dataTable.inArray2Ptr)), (byte)2 }); Unsafe.Write(_dataTable.outArrayPtr, (Vector256<UInt16>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_LoadAligned() { TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_LoadAligned)); var result = typeof(Avx2).GetMethod(nameof(Avx2.Blend), new Type[] { typeof(Vector256<UInt16>), typeof(Vector256<UInt16>), typeof(byte) }) .Invoke(null, new object[] { Avx.LoadAlignedVector256((UInt16*)(_dataTable.inArray1Ptr)), Avx.LoadAlignedVector256((UInt16*)(_dataTable.inArray2Ptr)), (byte)2 }); Unsafe.Write(_dataTable.outArrayPtr, (Vector256<UInt16>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunClsVarScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario)); var result = Avx2.Blend( _clsVar1, _clsVar2, 2 ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr); } public void RunLclVarScenario_UnsafeRead() { TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead)); var left = Unsafe.Read<Vector256<UInt16>>(_dataTable.inArray1Ptr); var right = Unsafe.Read<Vector256<UInt16>>(_dataTable.inArray2Ptr); var result = Avx2.Blend(left, right, 2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(left, right, _dataTable.outArrayPtr); } public void RunLclVarScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load)); var left = Avx.LoadVector256((UInt16*)(_dataTable.inArray1Ptr)); var right = Avx.LoadVector256((UInt16*)(_dataTable.inArray2Ptr)); var result = Avx2.Blend(left, right, 2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(left, right, _dataTable.outArrayPtr); } public void RunLclVarScenario_LoadAligned() { TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_LoadAligned)); var left = Avx.LoadAlignedVector256((UInt16*)(_dataTable.inArray1Ptr)); var right = Avx.LoadAlignedVector256((UInt16*)(_dataTable.inArray2Ptr)); var result = Avx2.Blend(left, right, 2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(left, right, _dataTable.outArrayPtr); } public void RunClassLclFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario)); var test = new ImmBinaryOpTest__BlendUInt162(); var result = Avx2.Blend(test._fld1, test._fld2, 2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr); } public void RunClassFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario)); var result = Avx2.Blend(_fld1, _fld2, 2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr); } public void RunStructLclFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario)); var test = TestStruct.Create(); var result = Avx2.Blend(test._fld1, test._fld2, 2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr); } public void RunStructFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario)); var test = TestStruct.Create(); test.RunStructFldScenario(this); } public void RunUnsupportedScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario)); bool succeeded = false; try { RunBasicScenario_UnsafeRead(); } catch (PlatformNotSupportedException) { succeeded = true; } if (!succeeded) { Succeeded = false; } } private void ValidateResult(Vector256<UInt16> left, Vector256<UInt16> right, void* result, [CallerMemberName] string method = "") { UInt16[] inArray1 = new UInt16[Op1ElementCount]; UInt16[] inArray2 = new UInt16[Op2ElementCount]; UInt16[] outArray = new UInt16[RetElementCount]; Unsafe.WriteUnaligned(ref Unsafe.As<UInt16, byte>(ref inArray1[0]), left); Unsafe.WriteUnaligned(ref Unsafe.As<UInt16, byte>(ref inArray2[0]), right); Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt16, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector256<UInt16>>()); ValidateResult(inArray1, inArray2, outArray, method); } private void ValidateResult(void* left, void* right, void* result, [CallerMemberName] string method = "") { UInt16[] inArray1 = new UInt16[Op1ElementCount]; UInt16[] inArray2 = new UInt16[Op2ElementCount]; UInt16[] outArray = new UInt16[RetElementCount]; Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt16, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(left), (uint)Unsafe.SizeOf<Vector256<UInt16>>()); Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt16, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(right), (uint)Unsafe.SizeOf<Vector256<UInt16>>()); Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt16, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector256<UInt16>>()); ValidateResult(inArray1, inArray2, outArray, method); } private void ValidateResult(UInt16[] left, UInt16[] right, UInt16[] result, [CallerMemberName] string method = "") { bool succeeded = true; if (result[0] != (((2 & (1 << 0)) == 0) ? left[0] : right[0])) { succeeded = false; } else { for (var i = 1; i < RetElementCount; i++) { if (result[i] != ((i < 8) ? (((2 & (1 << i)) == 0) ? left[i] : right[i]) : (((2 & (1 << (i - 8))) == 0) ? left[i] : right[i]))) { succeeded = false; break; } } } if (!succeeded) { TestLibrary.TestFramework.LogInformation($"{nameof(Avx2)}.{nameof(Avx2.Blend)}<UInt16>(Vector256<UInt16>.2, Vector256<UInt16>): {method} failed:"); TestLibrary.TestFramework.LogInformation($" left: ({string.Join(", ", left)})"); TestLibrary.TestFramework.LogInformation($" right: ({string.Join(", ", right)})"); TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})"); TestLibrary.TestFramework.LogInformation(string.Empty); Succeeded = false; } } } }
using System; using System.Collections.Generic; using CamBam.CAD; using CamBam.Geom; using Tree4; namespace Matmill { class Topographer { private const double T4_MARGIN = 1.0; private readonly Polyline _outline; private readonly Polyline[] _islands; private readonly T4 _t4; bool is_t4_populated = false; public Point2F Min { get { return new Point2F(_t4.Rect.Xmin + T4_MARGIN, _t4.Rect.Ymin + T4_MARGIN); } } public Point2F Max { get { return new Point2F(_t4.Rect.Xmax - T4_MARGIN, _t4.Rect.Ymax - T4_MARGIN); } } private void populate_t4() { if (is_t4_populated) return; insert_in_t4(_outline); foreach (Polyline island in _islands) insert_in_t4(island); is_t4_populated = true; } private void insert_in_t4(Polyline p) { for (int i = 0; i < p.NumSegments; i++) { object seg = p.GetSegment(i); T4_rect rect; if (seg is Line2F) { Line2F line = ((Line2F)seg); rect = new T4_rect(Math.Min(line.p1.X, line.p2.X), Math.Min(line.p1.Y, line.p2.Y), Math.Max(line.p1.X, line.p2.X), Math.Max(line.p1.Y, line.p2.Y)); } else if (seg is Arc2F) { Point2F min = Point2F.Undefined; Point2F max = Point2F.Undefined; ((Arc2F)seg).GetExtrema(ref min, ref max); rect = new T4_rect(min.X, min.Y, max.X, max.Y); } else { throw new Exception("unknown segment type"); } _t4.Add(rect, seg); } } public bool Is_line_inside_region(Line2F line, bool should_analize_inner_intersections, double tolerance) { if (!_outline.PointInPolyline(line.p1, tolerance)) return false; // p1 is outside of outer curve boundary if (!_outline.PointInPolyline(line.p2, tolerance)) return false; // p2 is outside of outer curve boundary if (should_analize_inner_intersections && _outline.LineIntersections(line, tolerance).Length != 0) return false; // both endpoints are inside, but there are intersections, outer curve must be concave foreach (Polyline island in _islands) { if (island.PointInPolyline(line.p1, tolerance)) return false; // p1 is inside hole if (island.PointInPolyline(line.p2, tolerance)) return false; // p2 is inside hole if (should_analize_inner_intersections && island.LineIntersections(line, tolerance).Length != 0) return false; // p1, p2 are outside hole, but there are intersections } return true; } public List<Point2F> sample_curve(Polyline p, double step) { // divide curve evenly. There is a bug in CamBam's divide by step routine (duplicate points), while 'divide to n equal segments' should work ok. // execution speed may be worse, but who cares double length = p.GetPerimeter(); int nsegs = (int)Math.Max(Math.Ceiling(length / step), 1); List<Point2F> points = new List<Point2F>(); foreach (Point3F pt in PointListUtils.CreatePointlistFromPolyline(p, nsegs).Points) points.Add((Point2F)pt); return points; } public List<Point2F> sample_curve_exact(Polyline p, double step) { List<Point2F> points = new List<Point2F>(); foreach (Point3F pt in PointListUtils.CreatePointlistFromPolylineStep(p, step).Points) points.Add((Point2F)pt); Point2F last_sample = points[points.Count - 1]; Point2F poly_end = (Point2F)p.LastPoint; if (last_sample.DistanceTo(poly_end) > step * 0.001) points.Add(poly_end); return points; } public double Get_dist_to_wall(Point2F pt) { populate_t4(); double radius = double.MaxValue; foreach (object item in _t4.Get_nearest_objects(pt.X, pt.Y)) { double dist = 0; if (item is Line2F) ((Line2F)item).NearestPoint(pt, ref dist); else ((Arc2F)item).NearestPoint(pt, ref dist); if (dist < radius) radius = dist; } return radius; } public bool Is_line_inside_region(Line2F line, double tolerance) { return Is_line_inside_region(line, true, tolerance); } public List<Point2F> Get_samples(double sample_step) { List<Point2F> plist = new List<Point2F>(); plist.AddRange(sample_curve(_outline, sample_step)); foreach (Polyline p in _islands) plist.AddRange(sample_curve(p, sample_step)); return plist; } public List<Point2F> Get_samples_exact(double sample_step) { List<Point2F> plist = new List<Point2F>(); plist.AddRange(sample_curve_exact(_outline, sample_step)); foreach (Polyline p in _islands) plist.AddRange(sample_curve_exact(p, sample_step)); return plist; } public bool Build_medial_tree(Medial_branch tree, double sample_step, double general_tolerance, Point2F startpoint, double min_dist_to_wall, bool startpoint_is_a_hint) { List<Point2F> samples = Get_samples(sample_step); Logger.log("got {0} points", samples.Count); return Medial_builder.Build(tree, this, samples, general_tolerance, startpoint, min_dist_to_wall, startpoint_is_a_hint); } public Topographer(Polyline outline, Polyline[] islands) { _outline = outline; _islands = islands; Point3F min = Point3F.Undefined; Point3F max = Point3F.Undefined; _outline.GetExtrema(ref min, ref max); _t4 = new T4(new T4_rect(min.X - T4_MARGIN, min.Y - T4_MARGIN, max.X + T4_MARGIN, max.Y + T4_MARGIN)); } } class Ballfield_topographer { private readonly T4 _t4; private const double T4_MARGIN = 1.0; public Point2F Min { get { return new Point2F(_t4.Rect.Xmin + T4_MARGIN, _t4.Rect.Ymin + T4_MARGIN); } } public Point2F Max { get { return new Point2F(_t4.Rect.Xmax - T4_MARGIN, _t4.Rect.Ymax - T4_MARGIN); } } private List<Circle2F> find_intersecting_balls(Line2F line) { T4_rect rect = new T4_rect(Math.Min(line.p1.X, line.p2.X), Math.Min(line.p1.Y, line.p2.Y), Math.Max(line.p1.X, line.p2.X), Math.Max(line.p1.Y, line.p2.Y)); List<Circle2F> balls = new List<Circle2F>(); // since objects in t4 are generic, convert convert rects backs to balls foreach (T4_rect ballrect in _t4.Get_colliding_obj_rects(rect)) balls.Add(new Circle2F(new Point2F(ballrect.Xc, ballrect.Yc), ballrect.W / 2)); return balls; } // we are collecting all the intersections and tracking the list of balls we're inside // at any given point. If list becomes empty, we can't shortcut public bool Is_line_inside_region(Line2F line, double tolerance) { Point2F a = line.p1; Point2F b = line.p2; SortedList<double, List<Circle2F>> intersections = new SortedList<double, List<Circle2F>>(); List<Circle2F> running_balls = new List<Circle2F>(); foreach (Circle2F ball in find_intersecting_balls(line)) { Line2F insects = ball.LineIntersect(line, tolerance); if (insects.p1.IsUndefined && insects.p2.IsUndefined) { // no intersections: check if whole path lay inside the circle if (a.DistanceTo(ball.Center) < ball.Radius + tolerance && b.DistanceTo(ball.Center) < ball.Radius + tolerance) return true; } else if (insects.p1.IsUndefined || insects.p2.IsUndefined) { // single intersection. one of the path ends must be inside the circle, otherwise it is a tangent case // and should be ignored if (a.DistanceTo(ball.Center) < ball.Radius + tolerance) { running_balls.Add(ball); } else if (b.DistanceTo(ball.Center) < ball.Radius + tolerance) { ; } else { continue; } Point2F c = insects.p1.IsUndefined ? insects.p2 : insects.p1; double d = c.DistanceTo(a); if (!intersections.ContainsKey(d)) intersections.Add(d, new List<Circle2F>()); intersections[d].Add(ball); } else { // double intersection double d = insects.p1.DistanceTo(a); if (!intersections.ContainsKey(d)) intersections.Add(d, new List<Circle2F>()); intersections[d].Add(ball); d = insects.p2.DistanceTo(a); if (!intersections.ContainsKey(d)) intersections.Add(d, new List<Circle2F>()); intersections[d].Add(ball); } } if (running_balls.Count == 0) return false; foreach (var ins in intersections) { foreach (Circle2F s in ins.Value) { if (running_balls.Contains(s)) running_balls.Remove(s); else running_balls.Add(s); } if (running_balls.Count == 0 && (ins.Key + tolerance < a.DistanceTo(b))) return false; } return true; } public List<T> Get_colliding_objects<T>(Point2F min, Point2F max) { T4_rect rect = new T4_rect(min.X, min.Y, max.X, max.Y); return _t4.Get_colliding_objects<T>(rect); } public void Add(Point2F center, double radius, object obj) { T4_rect rect = new T4_rect(center.X - radius, center.Y - radius, center.X + radius, center.Y + radius); _t4.Add(rect, obj); } public void Add(Circle2F ball, object obj) { this.Add(ball.Center, ball.Radius, obj); } public Ballfield_topographer(Point2F min, Point2F max) { _t4 = new T4(new T4_rect(min.X - 1, min.Y - 1, max.X + 1, max.Y + 1)); } } }
using System; using System.Collections.Generic; using System.Linq; using System.Text; using Orleans.GrainDirectory; using Orleans.Runtime.Configuration; namespace Orleans.Runtime.GrainDirectory { [Serializable] internal class ActivationInfo : IActivationInfo { public SiloAddress SiloAddress { get; private set; } public DateTime TimeCreated { get; private set; } public GrainDirectoryEntryStatus RegistrationStatus { get; set; } public ActivationInfo(SiloAddress siloAddress, GrainDirectoryEntryStatus registrationStatus) { SiloAddress = siloAddress; TimeCreated = DateTime.UtcNow; RegistrationStatus = registrationStatus; } public ActivationInfo(IActivationInfo iActivationInfo) { SiloAddress = iActivationInfo.SiloAddress; TimeCreated = iActivationInfo.TimeCreated; RegistrationStatus = iActivationInfo.RegistrationStatus; } public bool OkToRemove(UnregistrationCause cause, GlobalConfiguration config) { switch (cause) { case UnregistrationCause.Force: return true; case UnregistrationCause.CacheInvalidation: return RegistrationStatus == GrainDirectoryEntryStatus.Cached; case UnregistrationCause.NonexistentActivation: { if (RegistrationStatus == GrainDirectoryEntryStatus.Cached) return true; // cache entries are always removed var delayparameter = config.DirectoryLazyDeregistrationDelay; if (delayparameter <= TimeSpan.Zero) return false; // no lazy deregistration else return (TimeCreated <= DateTime.UtcNow - delayparameter); } default: throw new OrleansException("unhandled case"); } } public override string ToString() { return String.Format("{0}, {1}", SiloAddress, TimeCreated); } } [Serializable] internal class GrainInfo : IGrainInfo { public Dictionary<ActivationId, IActivationInfo> Instances { get; private set; } public int VersionTag { get; private set; } public bool SingleInstance { get; private set; } private static readonly SafeRandom rand; internal const int NO_ETAG = -1; static GrainInfo() { rand = new SafeRandom(); } internal GrainInfo() { Instances = new Dictionary<ActivationId, IActivationInfo>(); VersionTag = 0; SingleInstance = false; } public bool AddActivation(ActivationId act, SiloAddress silo) { if (SingleInstance && (Instances.Count > 0) && !Instances.ContainsKey(act)) { throw new InvalidOperationException( "Attempting to add a second activation to an existing grain in single activation mode"); } IActivationInfo info; if (Instances.TryGetValue(act, out info)) { if (info.SiloAddress.Equals(silo)) { // just refresh, no need to generate new VersionTag return false; } } Instances[act] = new ActivationInfo(silo, GrainDirectoryEntryStatus.ClusterLocal); VersionTag = rand.Next(); return true; } public ActivationAddress AddSingleActivation(GrainId grain, ActivationId act, SiloAddress silo, GrainDirectoryEntryStatus registrationStatus) { SingleInstance = true; if (Instances.Count > 0) { var item = Instances.First(); return ActivationAddress.GetAddress(item.Value.SiloAddress, grain, item.Key); } else { Instances.Add(act, new ActivationInfo(silo, registrationStatus)); VersionTag = rand.Next(); return ActivationAddress.GetAddress(silo, grain, act); } } public bool RemoveActivation(ActivationId act, UnregistrationCause cause, GlobalConfiguration config, out IActivationInfo info, out bool wasRemoved) { info = null; wasRemoved = false; if (Instances.TryGetValue(act, out info) && info.OkToRemove(cause, config)) { Instances.Remove(act); wasRemoved = true; VersionTag = rand.Next(); } return Instances.Count == 0; } public Dictionary<SiloAddress, List<ActivationAddress>> Merge(GrainId grain, IGrainInfo other) { bool modified = false; foreach (var pair in other.Instances) { if (Instances.ContainsKey(pair.Key)) continue; Instances[pair.Key] = new ActivationInfo(pair.Value.SiloAddress, pair.Value.RegistrationStatus); modified = true; } if (modified) { VersionTag = rand.Next(); } if (SingleInstance && (Instances.Count > 0)) { // Grain is supposed to be in single activation mode, but we have two activations!! // Eventually we should somehow delegate handling this to the silo, but for now, we'll arbitrarily pick one value. var orderedActivations = Instances.OrderBy(pair => pair.Key); var activationToKeep = orderedActivations.First(); var activationsToDrop = orderedActivations.Skip(1); Instances.Clear(); Instances.Add(activationToKeep.Key, activationToKeep.Value); var mapping = new Dictionary<SiloAddress, List<ActivationAddress>>(); foreach (var activationPair in activationsToDrop) { var activation = ActivationAddress.GetAddress(activationPair.Value.SiloAddress, grain, activationPair.Key); List<ActivationAddress> activationsToRemoveOnSilo; if (!mapping.TryGetValue(activation.Silo, out activationsToRemoveOnSilo)) { activationsToRemoveOnSilo = mapping[activation.Silo] = new List<ActivationAddress>(1); } activationsToRemoveOnSilo.Add(activation); } return mapping; } return null; } public void CacheOrUpdateRemoteClusterRegistration(GrainId grain, ActivationId oldActivation, ActivationId activation, SiloAddress silo) { SingleInstance = true; if (Instances.Count > 0) { Instances.Remove(oldActivation); } Instances.Add(activation, new ActivationInfo(silo, GrainDirectoryEntryStatus.Cached)); } public bool UpdateClusterRegistrationStatus(ActivationId activationId, GrainDirectoryEntryStatus status, GrainDirectoryEntryStatus? compareWith = null) { IActivationInfo activationInfo; if (!Instances.TryGetValue(activationId, out activationInfo)) return false; if (compareWith.HasValue && compareWith.Value != activationInfo.RegistrationStatus) return false; activationInfo.RegistrationStatus = status; return true; } } internal class GrainDirectoryPartition { // Should we change this to SortedList<> or SortedDictionary so we can extract chunks better for shipping the full // parition to a follower, or should we leave it as a Dictionary to get O(1) lookups instead of O(log n), figuring we do // a lot more lookups and so can sort periodically? /// <summary> /// contains a map from grain to its list of activations along with the version (etag) counter for the list /// </summary> private Dictionary<GrainId, IGrainInfo> partitionData; private readonly object lockable; private readonly Logger log; private readonly ISiloStatusOracle siloStatusOracle; private readonly GlobalConfiguration globalConfig; private readonly IInternalGrainFactory grainFactory; [ThreadStatic] private static ActivationId[] activationIdsHolder; [ThreadStatic] private static IActivationInfo[] activationInfosHolder; internal int Count { get { return partitionData.Count; } } public GrainDirectoryPartition(ISiloStatusOracle siloStatusOracle, GlobalConfiguration globalConfig, IInternalGrainFactory grainFactory) { partitionData = new Dictionary<GrainId, IGrainInfo>(); lockable = new object(); log = LogManager.GetLogger("DirectoryPartition"); this.siloStatusOracle = siloStatusOracle; this.globalConfig = globalConfig; this.grainFactory = grainFactory; } private bool IsValidSilo(SiloAddress silo) { return this.siloStatusOracle.IsFunctionalDirectory(silo); } internal void Clear() { lock (lockable) { partitionData.Clear(); } } /// <summary> /// Returns all entries stored in the partition as an enumerable collection /// </summary> /// <returns></returns> public Dictionary<GrainId, IGrainInfo> GetItems() { lock (lockable) { return partitionData.Copy(); } } /// <summary> /// Adds a new activation to the directory partition /// </summary> /// <param name="grain"></param> /// <param name="activation"></param> /// <param name="silo"></param> /// <returns>The version associated with this directory mapping</returns> internal virtual int AddActivation(GrainId grain, ActivationId activation, SiloAddress silo) { if (!IsValidSilo(silo)) { return GrainInfo.NO_ETAG; } IGrainInfo grainInfo; lock (lockable) { if (!partitionData.TryGetValue(grain, out grainInfo)) { partitionData[grain] = grainInfo = new GrainInfo(); } grainInfo.AddActivation(activation, silo); } if (log.IsVerbose3) log.Verbose3("Adding activation for grain {0}", grain.ToString()); return grainInfo.VersionTag; } /// <summary> /// Adds a new activation to the directory partition /// </summary> /// <param name="grain"></param> /// <param name="activation"></param> /// <param name="silo"></param> /// <param name="registrationStatus"></param> /// <returns>The registered ActivationAddress and version associated with this directory mapping</returns> internal virtual AddressAndTag AddSingleActivation(GrainId grain, ActivationId activation, SiloAddress silo, GrainDirectoryEntryStatus registrationStatus) { if (log.IsVerbose3) log.Verbose3("Adding single activation for grain {0}{1}{2}", silo, grain, activation); AddressAndTag result = new AddressAndTag(); if (!IsValidSilo(silo)) return result; IGrainInfo grainInfo; lock (lockable) { if (!partitionData.TryGetValue(grain, out grainInfo)) { partitionData[grain] = grainInfo = new GrainInfo(); } result.Address = grainInfo.AddSingleActivation(grain, activation, silo, registrationStatus); result.VersionTag = grainInfo.VersionTag; } return result; } /// <summary> /// Removes an activation of the given grain from the partition /// </summary> /// <param name="grain">the identity of the grain</param> /// <param name="activation">the id of the activation</param> /// <param name="cause">reason for removing the activation</param> internal void RemoveActivation(GrainId grain, ActivationId activation, UnregistrationCause cause = UnregistrationCause.Force) { IActivationInfo ignore1; bool ignore2; RemoveActivation(grain, activation, cause, out ignore1, out ignore2); } /// <summary> /// Removes an activation of the given grain from the partition /// </summary> /// <param name="grain">the identity of the grain</param> /// <param name="activation">the id of the activation</param> /// <param name="cause">reason for removing the activation</param> /// <param name="entry">returns the entry, if found </param> /// <param name="wasRemoved">returns whether the entry was actually removed</param> internal void RemoveActivation(GrainId grain, ActivationId activation, UnregistrationCause cause, out IActivationInfo entry, out bool wasRemoved) { wasRemoved = false; entry = null; lock (lockable) { if (partitionData.ContainsKey(grain) && partitionData[grain].RemoveActivation(activation, cause, globalConfig, out entry, out wasRemoved)) // if the last activation for the grain was removed, we remove the entire grain info partitionData.Remove(grain); } if (log.IsVerbose3) log.Verbose3("Removing activation for grain {0} cause={1} was_removed={2}", grain.ToString(), cause, wasRemoved); } /// <summary> /// Removes the grain (and, effectively, all its activations) from the diretcory /// </summary> /// <param name="grain"></param> internal void RemoveGrain(GrainId grain) { lock (lockable) { partitionData.Remove(grain); } if (log.IsVerbose3) log.Verbose3("Removing grain {0}", grain.ToString()); } /// <summary> /// Returns a list of activations (along with the version number of the list) for the given grain. /// If the grain is not found, null is returned. /// </summary> /// <param name="grain"></param> /// <returns></returns> internal AddressesAndTag LookUpActivations(GrainId grain) { var result = new AddressesAndTag(); ActivationId[] activationIds; IActivationInfo[] activationInfos; const int arrayReusingThreshold = 100; int grainInfoInstancesCount; lock (lockable) { IGrainInfo graininfo; if (!partitionData.TryGetValue(grain, out graininfo)) { return result; } result.VersionTag = graininfo.VersionTag; grainInfoInstancesCount = graininfo.Instances.Count; if (grainInfoInstancesCount < arrayReusingThreshold) { if ((activationIds = activationIdsHolder) == null) { activationIdsHolder = activationIds = new ActivationId[arrayReusingThreshold]; } if ((activationInfos = activationInfosHolder) == null) { activationInfosHolder = activationInfos = new IActivationInfo[arrayReusingThreshold]; } } else { activationIds = new ActivationId[grainInfoInstancesCount]; activationInfos = new IActivationInfo[grainInfoInstancesCount]; } graininfo.Instances.Keys.CopyTo(activationIds, 0); graininfo.Instances.Values.CopyTo(activationInfos, 0); } result.Addresses = new List<ActivationAddress>(grainInfoInstancesCount); for (var i = 0; i < grainInfoInstancesCount; i++) { var activationInfo = activationInfos[i]; if (IsValidSilo(activationInfo.SiloAddress)) { result.Addresses.Add(ActivationAddress.GetAddress(activationInfo.SiloAddress, grain, activationIds[i])); } activationInfos[i] = null; activationIds[i] = null; } return result; } /// <summary> /// Returns the activation of a single-activation grain, if present. /// </summary> internal GrainDirectoryEntryStatus TryGetActivation(GrainId grain, out ActivationAddress address, out int version) { IGrainInfo grainInfo; address = null; version = 0; lock (lockable) { if (!partitionData.TryGetValue(grain, out grainInfo)) { return GrainDirectoryEntryStatus.Invalid; } var first = grainInfo.Instances.FirstOrDefault(); if (first.Value != null) { address = ActivationAddress.GetAddress(first.Value.SiloAddress, grain, first.Key); version = grainInfo.VersionTag; return first.Value.RegistrationStatus; } } return GrainDirectoryEntryStatus.Invalid; } /// <summary> /// Returns the version number of the list of activations for the grain. /// If the grain is not found, -1 is returned. /// </summary> /// <param name="grain"></param> /// <returns></returns> internal int GetGrainETag(GrainId grain) { IGrainInfo grainInfo; lock (lockable) { if (!partitionData.TryGetValue(grain, out grainInfo)) { return GrainInfo.NO_ETAG; } return grainInfo.VersionTag; } } /// <summary> /// Merges one partition into another, assuming partitions are disjoint. /// This method is supposed to be used by handoff manager to update the partitions when the system view (set of live silos) changes. /// </summary> /// <param name="other"></param> internal void Merge(GrainDirectoryPartition other) { lock (lockable) { foreach (var pair in other.partitionData) { if (partitionData.ContainsKey(pair.Key)) { if (log.IsVerbose) log.Verbose("While merging two disjoint partitions, same grain " + pair.Key + " was found in both partitions"); var activationsToDrop = partitionData[pair.Key].Merge(pair.Key, pair.Value); if (activationsToDrop == null) continue; foreach (var siloActivations in activationsToDrop) { var remoteCatalog = grainFactory.GetSystemTarget<ICatalog>(Constants.CatalogId, siloActivations.Key); remoteCatalog.DeleteActivations(siloActivations.Value).Ignore(); } } else { partitionData.Add(pair.Key, pair.Value); } } } } /// <summary> /// Runs through all entries in the partition and moves/copies (depending on the given flag) the /// entries satisfying the given predicate into a new partition. /// This method is supposed to be used by handoff manager to update the partitions when the system view (set of live silos) changes. /// </summary> /// <param name="predicate">filter predicate (usually if the given grain is owned by particular silo)</param> /// <param name="modifyOrigin">flag controling whether the source partition should be modified (i.e., the entries should be moved or just copied) </param> /// <returns>new grain directory partition containing entries satisfying the given predicate</returns> internal GrainDirectoryPartition Split(Predicate<GrainId> predicate, bool modifyOrigin) { var newDirectory = new GrainDirectoryPartition(this.siloStatusOracle, this.globalConfig, this.grainFactory); if (modifyOrigin) { // SInce we use the "pairs" list to modify the underlying collection below, we need to turn it into an actual list here List<KeyValuePair<GrainId, IGrainInfo>> pairs; lock (lockable) { pairs = partitionData.Where(pair => predicate(pair.Key)).ToList(); } foreach (var pair in pairs) { newDirectory.partitionData.Add(pair.Key, pair.Value); } lock (lockable) { foreach (var pair in pairs) { partitionData.Remove(pair.Key); } } } else { lock (lockable) { foreach (var pair in partitionData.Where(pair => predicate(pair.Key))) { newDirectory.partitionData.Add(pair.Key, pair.Value); } } } return newDirectory; } internal List<ActivationAddress> ToListOfActivations(bool singleActivation) { var result = new List<ActivationAddress>(); lock (lockable) { foreach (var pair in partitionData) { var grain = pair.Key; if (pair.Value.SingleInstance == singleActivation) { result.AddRange(pair.Value.Instances.Select(activationPair => ActivationAddress.GetAddress(activationPair.Value.SiloAddress, grain, activationPair.Key)) .Where(addr => IsValidSilo(addr.Silo))); } } } return result; } /// <summary> /// Sets the internal parition dictionary to the one given as input parameter. /// This method is supposed to be used by handoff manager to update the old partition with a new partition. /// </summary> /// <param name="newPartitionData">new internal partition dictionary</param> internal void Set(Dictionary<GrainId, IGrainInfo> newPartitionData) { partitionData = newPartitionData; } /// <summary> /// Updates partition with a new delta of changes. /// This method is supposed to be used by handoff manager to update the partition with a set of delta changes. /// </summary> /// <param name="newPartitionDelta">dictionary holding a set of delta updates to this partition. /// If the value for a given key in the delta is valid, then existing entry in the partition is replaced. /// Otherwise, i.e., if the value is null, the corresponding entry is removed. /// </param> internal void Update(Dictionary<GrainId, IGrainInfo> newPartitionDelta) { lock (lockable) { foreach (GrainId grain in newPartitionDelta.Keys) { if (newPartitionDelta[grain] != null) { partitionData[grain] = newPartitionDelta[grain]; } else { partitionData.Remove(grain); } } } } public override string ToString() { var sb = new StringBuilder(); lock (lockable) { foreach (var grainEntry in partitionData) { foreach (var activationEntry in grainEntry.Value.Instances) { sb.Append(" ").Append(grainEntry.Key.ToString()).Append("[" + grainEntry.Value.VersionTag + "]"). Append(" => ").Append(activationEntry.Key.ToString()). Append(" @ ").AppendLine(activationEntry.Value.ToString()); } } } return sb.ToString(); } public void CacheOrUpdateRemoteClusterRegistration(GrainId grain, ActivationId oldActivation, ActivationAddress otherClusterAddress) { lock (lockable) { if (partitionData.ContainsKey(grain)) { partitionData[grain].CacheOrUpdateRemoteClusterRegistration(grain, oldActivation, otherClusterAddress.Activation, otherClusterAddress.Silo); } else { AddSingleActivation(grain, otherClusterAddress.Activation, otherClusterAddress.Silo, GrainDirectoryEntryStatus.Cached); } } } public bool UpdateClusterRegistrationStatus(GrainId grain, ActivationId activationId, GrainDirectoryEntryStatus registrationStatus, GrainDirectoryEntryStatus? compareWith = null) { lock (lockable) { IGrainInfo graininfo; if (partitionData.TryGetValue(grain, out graininfo)) { return graininfo.UpdateClusterRegistrationStatus(activationId, registrationStatus, compareWith); } return false; } } } }
// Author: Robert Scheller using Landis.Core; using Landis.SpatialModeling; using Landis.Library.LeafBiomassCohorts; using System.Collections.Generic; using Landis.Library.Climate; namespace Landis.Extension.Succession.NetEcosystemCN { /// <summary> /// Utility methods. /// </summary> public class Main { public static int Year; public static int Month; public static int MonthCnt; /// <summary> /// Grows all cohorts at a site for a specified number of years. /// Litter is decomposed following growth. /// </summary> public static ISiteCohorts Run(ActiveSite site, int years, bool isSuccessionTimeStep) { ISiteCohorts siteCohorts = SiteVars.Cohorts[site]; IEcoregion ecoregion = PlugIn.ModelCore.Ecoregion[site]; for (int y = 0; y < years; ++y) { Year = y + 1; if (PlugIn.ModelCore.CurrentTime > 0 && Climate.Future_MonthlyData.ContainsKey(PlugIn.FutureClimateBaseYear + y + PlugIn.ModelCore.CurrentTime- years)) EcoregionData.AnnualWeather[ecoregion] = Climate.Future_MonthlyData[PlugIn.FutureClimateBaseYear + y - years + PlugIn.ModelCore.CurrentTime][ecoregion.Index]; SiteVars.ResetAnnualValues(site); if(y == 0 && SiteVars.FireSeverity != null && SiteVars.FireSeverity[site] > 0) FireEffects.ReduceLayers(SiteVars.FireSeverity[site], site); // Next, Grow and Decompose each month int[] months = new int[12]{6, 7, 8, 9, 10, 11, 0, 1, 2, 3, 4, 5}; if(OtherData.CalibrateMode) months = new int[12] { 6, 7, 8, 9, 10, 11, 0, 1, 2, 3, 4, 5 }; for (MonthCnt = 0; MonthCnt < 12; MonthCnt++) { // Calculate mineral N fractions based on coarse root biomass if (MonthCnt == 0) { AvailableN.CalculateMineralNfraction(site); } //PlugIn.ModelCore.UI.WriteLine("SiteVars.MineralN = {0:0.00}, month = {1}.", SiteVars.MineralN[site], i); Month = months[MonthCnt]; SiteVars.MonthlyAGNPPcarbon[site][Month] = 0.0; SiteVars.MonthlyBGNPPcarbon[site][Month] = 0.0; SiteVars.MonthlyNEE[site][Month] = 0.0; SiteVars.MonthlyResp[site][Month] = 0.0; SiteVars.MonthlyStreamN[site][Month] = 0.0; SiteVars.SourceSink[site].Carbon = 0.0; SiteVars.TotalWoodBiomass[site] = Main.ComputeWoodBiomass((ActiveSite) site); //SiteVars.LAI[site] = Main.ComputeLAI((ActiveSite)site); double ppt = EcoregionData.AnnualWeather[ecoregion].MonthlyPrecip[Main.Month]; double monthlyNdeposition; if (EcoregionData.AtmosNintercept[ecoregion]!=-1 && EcoregionData.AtmosNslope[ecoregion] !=-1) monthlyNdeposition = EcoregionData.AtmosNintercept[ecoregion] + (EcoregionData.AtmosNslope[ecoregion] * ppt); else { monthlyNdeposition = EcoregionData.AnnualWeather[ecoregion].MonthlyNDeposition[Main.Month]; } if (monthlyNdeposition < 0) throw new System.ApplicationException("Error: Nitrogen deposition input data are not present in climate library"); EcoregionData.MonthlyNDeposition[ecoregion][Month] = monthlyNdeposition; EcoregionData.AnnualNDeposition[ecoregion] += monthlyNdeposition; SiteVars.MineralN[site] += monthlyNdeposition; //PlugIn.ModelCore.UI.WriteLine("Ndeposition={0},MineralN={1:0.00}.", monthlyNdeposition, SiteVars.MineralN[site]); double liveBiomass = (double) ComputeLivingBiomass(siteCohorts); double baseFlow, stormFlow; SoilWater.Run(y, Month, liveBiomass, site, out baseFlow, out stormFlow); // Calculate N allocation for each cohort AvailableN.SetMineralNallocation(site); if (MonthCnt==11) siteCohorts.Grow(site, (y == years && isSuccessionTimeStep), true); else siteCohorts.Grow(site, (y == years && isSuccessionTimeStep), false); WoodLayer.Decompose(site); LitterLayer.Decompose(site); SoilLayer.Decompose(site); //...Volatilization loss as a function of the mineral n which // remains after uptake by plants. ML added a correction factor for wetlands since their denitrification rate is double that of wetlands //based on a review paper by Seitziner 2006. double volatilize = (SiteVars.MineralN[site] * EcoregionData.Denitrif[ecoregion]); // monthly value //PlugIn.ModelCore.UI.WriteLine("BeforeVol. MineralN={0:0.00}.", SiteVars.MineralN[site]); SiteVars.MineralN[site] -= volatilize; SiteVars.SourceSink[site].Nitrogen += volatilize; SiteVars.Nvol[site] += volatilize; SoilWater.Leach(site, baseFlow, stormFlow); SiteVars.MonthlyNEE[site][Month] -= SiteVars.MonthlyAGNPPcarbon[site][Month]; SiteVars.MonthlyNEE[site][Month] -= SiteVars.MonthlyBGNPPcarbon[site][Month]; SiteVars.MonthlyNEE[site][Month] += SiteVars.SourceSink[site].Carbon; } } ComputeTotalCohortCN(site, siteCohorts); return siteCohorts; } //--------------------------------------------------------------------- public static int ComputeLivingBiomass(ISiteCohorts cohorts) { int total = 0; if (cohorts != null) foreach (ISpeciesCohorts speciesCohorts in cohorts) foreach (ICohort cohort in speciesCohorts) total += (int) (cohort.WoodBiomass + cohort.LeafBiomass); //total += ComputeBiomass(speciesCohorts); return total; } //--------------------------------------------------------------------- public static double ComputeWoodBiomass(ActiveSite site) { double woodBiomass = 0; if (SiteVars.Cohorts[site] != null) foreach (ISpeciesCohorts speciesCohorts in SiteVars.Cohorts[site]) foreach (ICohort cohort in speciesCohorts) woodBiomass += cohort.WoodBiomass; return woodBiomass; } //--------------------------------------------------------------------- private static void ComputeTotalCohortCN(ActiveSite site, ISiteCohorts cohorts) { SiteVars.CohortLeafC[site] = 0; SiteVars.CohortFRootC[site] = 0; SiteVars.CohortLeafN[site] = 0; SiteVars.CohortFRootN[site] = 0; SiteVars.CohortWoodC[site] = 0; SiteVars.CohortCRootC[site] = 0; SiteVars.CohortWoodN[site] = 0; SiteVars.CohortCRootN[site] = 0; if (cohorts != null) foreach (ISpeciesCohorts speciesCohorts in cohorts) foreach (ICohort cohort in speciesCohorts) CalculateCohortCN(site, cohort); return; } /// <summary> /// Summarize cohort C&N for output. /// </summary> private static void CalculateCohortCN(ActiveSite site, ICohort cohort) { ISpecies species = cohort.Species; double leafC = cohort.LeafBiomass * 0.47; double woodC = cohort.WoodBiomass * 0.47; double fRootC = Roots.CalculateFineRoot(cohort, leafC); double cRootC = Roots.CalculateCoarseRoot(cohort, woodC); double totalC = leafC + woodC + fRootC + cRootC; double leafN = leafC / (double) SpeciesData.LeafCN[species]; double woodN = woodC / (double) SpeciesData.WoodCN[species]; double cRootN = cRootC / (double) SpeciesData.CoarseRootCN[species]; double fRootN = fRootC / (double) SpeciesData.FineRootCN[species]; //double totalN = woodN + cRootN + leafN + fRootN; //PlugIn.ModelCore.UI.WriteLine("month={0}, species={1}, leafB={2:0.0}, leafC={3:0.00}, leafN={4:0.0}, woodB={5:0.0}, woodC={6:0.000}, woodN={7:0.0}", Month, cohort.Species.Name, cohort.LeafBiomass, leafC, leafN, cohort.WoodBiomass, woodC, woodN); SiteVars.CohortLeafC[site] += leafC; SiteVars.CohortFRootC[site] += fRootC; SiteVars.CohortLeafN[site] += leafN; SiteVars.CohortFRootN[site] += fRootN; SiteVars.CohortWoodC[site] += woodC; SiteVars.CohortCRootC[site] += cRootC; SiteVars.CohortWoodN[site] += woodN ; SiteVars.CohortCRootN[site] += cRootN; return; } } }
using System; using System.Collections; using System.Collections.Generic; using System.Collections.Specialized; using System.ComponentModel.DataAnnotations; using System.Globalization; using System.Reflection; using System.Runtime.Serialization; using System.Web.Http; using System.Web.Http.Description; using System.Xml.Serialization; using Newtonsoft.Json; namespace WebApp.Areas.HelpPage.ModelDescriptions { /// <summary> /// Generates model descriptions for given types. /// </summary> public class ModelDescriptionGenerator { // Modify this to support more data annotation attributes. private readonly IDictionary<Type, Func<object, string>> AnnotationTextGenerator = new Dictionary<Type, Func<object, string>> { { typeof(RequiredAttribute), a => "Required" }, { typeof(RangeAttribute), a => { RangeAttribute range = (RangeAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Range: inclusive between {0} and {1}", range.Minimum, range.Maximum); } }, { typeof(MaxLengthAttribute), a => { MaxLengthAttribute maxLength = (MaxLengthAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Max length: {0}", maxLength.Length); } }, { typeof(MinLengthAttribute), a => { MinLengthAttribute minLength = (MinLengthAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Min length: {0}", minLength.Length); } }, { typeof(StringLengthAttribute), a => { StringLengthAttribute strLength = (StringLengthAttribute)a; return String.Format(CultureInfo.CurrentCulture, "String length: inclusive between {0} and {1}", strLength.MinimumLength, strLength.MaximumLength); } }, { typeof(DataTypeAttribute), a => { DataTypeAttribute dataType = (DataTypeAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Data type: {0}", dataType.CustomDataType ?? dataType.DataType.ToString()); } }, { typeof(RegularExpressionAttribute), a => { RegularExpressionAttribute regularExpression = (RegularExpressionAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Matching regular expression pattern: {0}", regularExpression.Pattern); } }, }; // Modify this to add more default documentations. private readonly IDictionary<Type, string> DefaultTypeDocumentation = new Dictionary<Type, string> { { typeof(Int16), "integer" }, { typeof(Int32), "integer" }, { typeof(Int64), "integer" }, { typeof(UInt16), "unsigned integer" }, { typeof(UInt32), "unsigned integer" }, { typeof(UInt64), "unsigned integer" }, { typeof(Byte), "byte" }, { typeof(Char), "character" }, { typeof(SByte), "signed byte" }, { typeof(Uri), "URI" }, { typeof(Single), "decimal number" }, { typeof(Double), "decimal number" }, { typeof(Decimal), "decimal number" }, { typeof(String), "string" }, { typeof(Guid), "globally unique identifier" }, { typeof(TimeSpan), "time interval" }, { typeof(DateTime), "date" }, { typeof(DateTimeOffset), "date" }, { typeof(Boolean), "boolean" }, }; private Lazy<IModelDocumentationProvider> _documentationProvider; public ModelDescriptionGenerator(HttpConfiguration config) { if (config == null) { throw new ArgumentNullException("config"); } _documentationProvider = new Lazy<IModelDocumentationProvider>(() => config.Services.GetDocumentationProvider() as IModelDocumentationProvider); GeneratedModels = new Dictionary<string, ModelDescription>(StringComparer.OrdinalIgnoreCase); } public Dictionary<string, ModelDescription> GeneratedModels { get; private set; } private IModelDocumentationProvider DocumentationProvider { get { return _documentationProvider.Value; } } public ModelDescription GetOrCreateModelDescription(Type modelType) { if (modelType == null) { throw new ArgumentNullException("modelType"); } Type underlyingType = Nullable.GetUnderlyingType(modelType); if (underlyingType != null) { modelType = underlyingType; } ModelDescription modelDescription; string modelName = ModelNameHelper.GetModelName(modelType); if (GeneratedModels.TryGetValue(modelName, out modelDescription)) { if (modelType != modelDescription.ModelType) { throw new InvalidOperationException( String.Format( CultureInfo.CurrentCulture, "A model description could not be created. Duplicate model name '{0}' was found for types '{1}' and '{2}'. " + "Use the [ModelName] attribute to change the model name for at least one of the types so that it has a unique name.", modelName, modelDescription.ModelType.FullName, modelType.FullName)); } return modelDescription; } if (DefaultTypeDocumentation.ContainsKey(modelType)) { return GenerateSimpleTypeModelDescription(modelType); } if (modelType.IsEnum) { return GenerateEnumTypeModelDescription(modelType); } if (modelType.IsGenericType) { Type[] genericArguments = modelType.GetGenericArguments(); if (genericArguments.Length == 1) { Type enumerableType = typeof(IEnumerable<>).MakeGenericType(genericArguments); if (enumerableType.IsAssignableFrom(modelType)) { return GenerateCollectionModelDescription(modelType, genericArguments[0]); } } if (genericArguments.Length == 2) { Type dictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments); if (dictionaryType.IsAssignableFrom(modelType)) { return GenerateDictionaryModelDescription(modelType, genericArguments[0], genericArguments[1]); } Type keyValuePairType = typeof(KeyValuePair<,>).MakeGenericType(genericArguments); if (keyValuePairType.IsAssignableFrom(modelType)) { return GenerateKeyValuePairModelDescription(modelType, genericArguments[0], genericArguments[1]); } } } if (modelType.IsArray) { Type elementType = modelType.GetElementType(); return GenerateCollectionModelDescription(modelType, elementType); } if (modelType == typeof(NameValueCollection)) { return GenerateDictionaryModelDescription(modelType, typeof(string), typeof(string)); } if (typeof(IDictionary).IsAssignableFrom(modelType)) { return GenerateDictionaryModelDescription(modelType, typeof(object), typeof(object)); } if (typeof(IEnumerable).IsAssignableFrom(modelType)) { return GenerateCollectionModelDescription(modelType, typeof(object)); } return GenerateComplexTypeModelDescription(modelType); } // Change this to provide different name for the member. private static string GetMemberName(MemberInfo member, bool hasDataContractAttribute) { JsonPropertyAttribute jsonProperty = member.GetCustomAttribute<JsonPropertyAttribute>(); if (jsonProperty != null && !String.IsNullOrEmpty(jsonProperty.PropertyName)) { return jsonProperty.PropertyName; } if (hasDataContractAttribute) { DataMemberAttribute dataMember = member.GetCustomAttribute<DataMemberAttribute>(); if (dataMember != null && !String.IsNullOrEmpty(dataMember.Name)) { return dataMember.Name; } } return member.Name; } private static bool ShouldDisplayMember(MemberInfo member, bool hasDataContractAttribute) { JsonIgnoreAttribute jsonIgnore = member.GetCustomAttribute<JsonIgnoreAttribute>(); XmlIgnoreAttribute xmlIgnore = member.GetCustomAttribute<XmlIgnoreAttribute>(); IgnoreDataMemberAttribute ignoreDataMember = member.GetCustomAttribute<IgnoreDataMemberAttribute>(); NonSerializedAttribute nonSerialized = member.GetCustomAttribute<NonSerializedAttribute>(); ApiExplorerSettingsAttribute apiExplorerSetting = member.GetCustomAttribute<ApiExplorerSettingsAttribute>(); bool hasMemberAttribute = member.DeclaringType.IsEnum ? member.GetCustomAttribute<EnumMemberAttribute>() != null : member.GetCustomAttribute<DataMemberAttribute>() != null; // Display member only if all the followings are true: // no JsonIgnoreAttribute // no XmlIgnoreAttribute // no IgnoreDataMemberAttribute // no NonSerializedAttribute // no ApiExplorerSettingsAttribute with IgnoreApi set to true // no DataContractAttribute without DataMemberAttribute or EnumMemberAttribute return jsonIgnore == null && xmlIgnore == null && ignoreDataMember == null && nonSerialized == null && (apiExplorerSetting == null || !apiExplorerSetting.IgnoreApi) && (!hasDataContractAttribute || hasMemberAttribute); } private string CreateDefaultDocumentation(Type type) { string documentation; if (DefaultTypeDocumentation.TryGetValue(type, out documentation)) { return documentation; } if (DocumentationProvider != null) { documentation = DocumentationProvider.GetDocumentation(type); } return documentation; } private void GenerateAnnotations(MemberInfo property, ParameterDescription propertyModel) { List<ParameterAnnotation> annotations = new List<ParameterAnnotation>(); IEnumerable<Attribute> attributes = property.GetCustomAttributes(); foreach (Attribute attribute in attributes) { Func<object, string> textGenerator; if (AnnotationTextGenerator.TryGetValue(attribute.GetType(), out textGenerator)) { annotations.Add( new ParameterAnnotation { AnnotationAttribute = attribute, Documentation = textGenerator(attribute) }); } } // Rearrange the annotations annotations.Sort((x, y) => { // Special-case RequiredAttribute so that it shows up on top if (x.AnnotationAttribute is RequiredAttribute) { return -1; } if (y.AnnotationAttribute is RequiredAttribute) { return 1; } // Sort the rest based on alphabetic order of the documentation return String.Compare(x.Documentation, y.Documentation, StringComparison.OrdinalIgnoreCase); }); foreach (ParameterAnnotation annotation in annotations) { propertyModel.Annotations.Add(annotation); } } private CollectionModelDescription GenerateCollectionModelDescription(Type modelType, Type elementType) { ModelDescription collectionModelDescription = GetOrCreateModelDescription(elementType); if (collectionModelDescription != null) { return new CollectionModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, ElementDescription = collectionModelDescription }; } return null; } private ModelDescription GenerateComplexTypeModelDescription(Type modelType) { ComplexTypeModelDescription complexModelDescription = new ComplexTypeModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, Documentation = CreateDefaultDocumentation(modelType) }; GeneratedModels.Add(complexModelDescription.Name, complexModelDescription); bool hasDataContractAttribute = modelType.GetCustomAttribute<DataContractAttribute>() != null; PropertyInfo[] properties = modelType.GetProperties(BindingFlags.Public | BindingFlags.Instance); foreach (PropertyInfo property in properties) { if (ShouldDisplayMember(property, hasDataContractAttribute)) { ParameterDescription propertyModel = new ParameterDescription { Name = GetMemberName(property, hasDataContractAttribute) }; if (DocumentationProvider != null) { propertyModel.Documentation = DocumentationProvider.GetDocumentation(property); } GenerateAnnotations(property, propertyModel); complexModelDescription.Properties.Add(propertyModel); propertyModel.TypeDescription = GetOrCreateModelDescription(property.PropertyType); } } FieldInfo[] fields = modelType.GetFields(BindingFlags.Public | BindingFlags.Instance); foreach (FieldInfo field in fields) { if (ShouldDisplayMember(field, hasDataContractAttribute)) { ParameterDescription propertyModel = new ParameterDescription { Name = GetMemberName(field, hasDataContractAttribute) }; if (DocumentationProvider != null) { propertyModel.Documentation = DocumentationProvider.GetDocumentation(field); } complexModelDescription.Properties.Add(propertyModel); propertyModel.TypeDescription = GetOrCreateModelDescription(field.FieldType); } } return complexModelDescription; } private DictionaryModelDescription GenerateDictionaryModelDescription(Type modelType, Type keyType, Type valueType) { ModelDescription keyModelDescription = GetOrCreateModelDescription(keyType); ModelDescription valueModelDescription = GetOrCreateModelDescription(valueType); return new DictionaryModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, KeyModelDescription = keyModelDescription, ValueModelDescription = valueModelDescription }; } private EnumTypeModelDescription GenerateEnumTypeModelDescription(Type modelType) { EnumTypeModelDescription enumDescription = new EnumTypeModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, Documentation = CreateDefaultDocumentation(modelType) }; bool hasDataContractAttribute = modelType.GetCustomAttribute<DataContractAttribute>() != null; foreach (FieldInfo field in modelType.GetFields(BindingFlags.Public | BindingFlags.Static)) { if (ShouldDisplayMember(field, hasDataContractAttribute)) { EnumValueDescription enumValue = new EnumValueDescription { Name = field.Name, Value = field.GetRawConstantValue().ToString() }; if (DocumentationProvider != null) { enumValue.Documentation = DocumentationProvider.GetDocumentation(field); } enumDescription.Values.Add(enumValue); } } GeneratedModels.Add(enumDescription.Name, enumDescription); return enumDescription; } private KeyValuePairModelDescription GenerateKeyValuePairModelDescription(Type modelType, Type keyType, Type valueType) { ModelDescription keyModelDescription = GetOrCreateModelDescription(keyType); ModelDescription valueModelDescription = GetOrCreateModelDescription(valueType); return new KeyValuePairModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, KeyModelDescription = keyModelDescription, ValueModelDescription = valueModelDescription }; } private ModelDescription GenerateSimpleTypeModelDescription(Type modelType) { SimpleTypeModelDescription simpleModelDescription = new SimpleTypeModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, Documentation = CreateDefaultDocumentation(modelType) }; GeneratedModels.Add(simpleModelDescription.Name, simpleModelDescription); return simpleModelDescription; } } }
// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! using gax = Google.Api.Gax; using gaxgrpc = Google.Api.Gax.Grpc; using gaxgrpccore = Google.Api.Gax.Grpc.GrpcCore; using proto = Google.Protobuf; using grpccore = Grpc.Core; using grpcinter = Grpc.Core.Interceptors; using sys = System; using scg = System.Collections.Generic; using sco = System.Collections.ObjectModel; using st = System.Threading; using stt = System.Threading.Tasks; namespace Google.Ads.GoogleAds.V10.Services { /// <summary>Settings for <see cref="AccountBudgetProposalServiceClient"/> instances.</summary> public sealed partial class AccountBudgetProposalServiceSettings : gaxgrpc::ServiceSettingsBase { /// <summary>Get a new instance of the default <see cref="AccountBudgetProposalServiceSettings"/>.</summary> /// <returns>A new instance of the default <see cref="AccountBudgetProposalServiceSettings"/>.</returns> public static AccountBudgetProposalServiceSettings GetDefault() => new AccountBudgetProposalServiceSettings(); /// <summary> /// Constructs a new <see cref="AccountBudgetProposalServiceSettings"/> object with default settings. /// </summary> public AccountBudgetProposalServiceSettings() { } private AccountBudgetProposalServiceSettings(AccountBudgetProposalServiceSettings existing) : base(existing) { gax::GaxPreconditions.CheckNotNull(existing, nameof(existing)); MutateAccountBudgetProposalSettings = existing.MutateAccountBudgetProposalSettings; OnCopy(existing); } partial void OnCopy(AccountBudgetProposalServiceSettings existing); /// <summary> /// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to /// <c>AccountBudgetProposalServiceClient.MutateAccountBudgetProposal</c> and /// <c>AccountBudgetProposalServiceClient.MutateAccountBudgetProposalAsync</c>. /// </summary> /// <remarks> /// <list type="bullet"> /// <item><description>Initial retry delay: 5000 milliseconds.</description></item> /// <item><description>Retry delay multiplier: 1.3</description></item> /// <item><description>Retry maximum delay: 60000 milliseconds.</description></item> /// <item><description>Maximum attempts: Unlimited</description></item> /// <item> /// <description> /// Retriable status codes: <see cref="grpccore::StatusCode.Unavailable"/>, /// <see cref="grpccore::StatusCode.DeadlineExceeded"/>. /// </description> /// </item> /// <item><description>Timeout: 3600 seconds.</description></item> /// </list> /// </remarks> public gaxgrpc::CallSettings MutateAccountBudgetProposalSettings { get; set; } = gaxgrpc::CallSettingsExtensions.WithRetry(gaxgrpc::CallSettings.FromExpiration(gax::Expiration.FromTimeout(sys::TimeSpan.FromMilliseconds(3600000))), gaxgrpc::RetrySettings.FromExponentialBackoff(maxAttempts: 2147483647, initialBackoff: sys::TimeSpan.FromMilliseconds(5000), maxBackoff: sys::TimeSpan.FromMilliseconds(60000), backoffMultiplier: 1.3, retryFilter: gaxgrpc::RetrySettings.FilterForStatusCodes(grpccore::StatusCode.Unavailable, grpccore::StatusCode.DeadlineExceeded))); /// <summary>Creates a deep clone of this object, with all the same property values.</summary> /// <returns>A deep clone of this <see cref="AccountBudgetProposalServiceSettings"/> object.</returns> public AccountBudgetProposalServiceSettings Clone() => new AccountBudgetProposalServiceSettings(this); } /// <summary> /// Builder class for <see cref="AccountBudgetProposalServiceClient"/> to provide simple configuration of /// credentials, endpoint etc. /// </summary> internal sealed partial class AccountBudgetProposalServiceClientBuilder : gaxgrpc::ClientBuilderBase<AccountBudgetProposalServiceClient> { /// <summary>The settings to use for RPCs, or <c>null</c> for the default settings.</summary> public AccountBudgetProposalServiceSettings Settings { get; set; } /// <summary>Creates a new builder with default settings.</summary> public AccountBudgetProposalServiceClientBuilder() { UseJwtAccessWithScopes = AccountBudgetProposalServiceClient.UseJwtAccessWithScopes; } partial void InterceptBuild(ref AccountBudgetProposalServiceClient client); partial void InterceptBuildAsync(st::CancellationToken cancellationToken, ref stt::Task<AccountBudgetProposalServiceClient> task); /// <summary>Builds the resulting client.</summary> public override AccountBudgetProposalServiceClient Build() { AccountBudgetProposalServiceClient client = null; InterceptBuild(ref client); return client ?? BuildImpl(); } /// <summary>Builds the resulting client asynchronously.</summary> public override stt::Task<AccountBudgetProposalServiceClient> BuildAsync(st::CancellationToken cancellationToken = default) { stt::Task<AccountBudgetProposalServiceClient> task = null; InterceptBuildAsync(cancellationToken, ref task); return task ?? BuildAsyncImpl(cancellationToken); } private AccountBudgetProposalServiceClient BuildImpl() { Validate(); grpccore::CallInvoker callInvoker = CreateCallInvoker(); return AccountBudgetProposalServiceClient.Create(callInvoker, Settings); } private async stt::Task<AccountBudgetProposalServiceClient> BuildAsyncImpl(st::CancellationToken cancellationToken) { Validate(); grpccore::CallInvoker callInvoker = await CreateCallInvokerAsync(cancellationToken).ConfigureAwait(false); return AccountBudgetProposalServiceClient.Create(callInvoker, Settings); } /// <summary>Returns the endpoint for this builder type, used if no endpoint is otherwise specified.</summary> protected override string GetDefaultEndpoint() => AccountBudgetProposalServiceClient.DefaultEndpoint; /// <summary> /// Returns the default scopes for this builder type, used if no scopes are otherwise specified. /// </summary> protected override scg::IReadOnlyList<string> GetDefaultScopes() => AccountBudgetProposalServiceClient.DefaultScopes; /// <summary>Returns the channel pool to use when no other options are specified.</summary> protected override gaxgrpc::ChannelPool GetChannelPool() => AccountBudgetProposalServiceClient.ChannelPool; /// <summary>Returns the default <see cref="gaxgrpc::GrpcAdapter"/>to use if not otherwise specified.</summary> protected override gaxgrpc::GrpcAdapter DefaultGrpcAdapter => gaxgrpccore::GrpcCoreAdapter.Instance; } /// <summary>AccountBudgetProposalService client wrapper, for convenient use.</summary> /// <remarks> /// A service for managing account-level budgets via proposals. /// /// A proposal is a request to create a new budget or make changes to an /// existing one. /// /// Mutates: /// The CREATE operation creates a new proposal. /// UPDATE operations aren't supported. /// The REMOVE operation cancels a pending proposal. /// </remarks> public abstract partial class AccountBudgetProposalServiceClient { /// <summary> /// The default endpoint for the AccountBudgetProposalService service, which is a host of /// "googleads.googleapis.com" and a port of 443. /// </summary> public static string DefaultEndpoint { get; } = "googleads.googleapis.com:443"; /// <summary>The default AccountBudgetProposalService scopes.</summary> /// <remarks> /// The default AccountBudgetProposalService scopes are: /// <list type="bullet"><item><description>https://www.googleapis.com/auth/adwords</description></item></list> /// </remarks> public static scg::IReadOnlyList<string> DefaultScopes { get; } = new sco::ReadOnlyCollection<string>(new string[] { "https://www.googleapis.com/auth/adwords", }); internal static gaxgrpc::ChannelPool ChannelPool { get; } = new gaxgrpc::ChannelPool(DefaultScopes, UseJwtAccessWithScopes); internal static bool UseJwtAccessWithScopes { get { bool useJwtAccessWithScopes = true; MaybeUseJwtAccessWithScopes(ref useJwtAccessWithScopes); return useJwtAccessWithScopes; } } static partial void MaybeUseJwtAccessWithScopes(ref bool useJwtAccessWithScopes); /// <summary> /// Asynchronously creates a <see cref="AccountBudgetProposalServiceClient"/> using the default credentials, /// endpoint and settings. To specify custom credentials or other settings, use /// <see cref="AccountBudgetProposalServiceClientBuilder"/>. /// </summary> /// <param name="cancellationToken"> /// The <see cref="st::CancellationToken"/> to use while creating the client. /// </param> /// <returns>The task representing the created <see cref="AccountBudgetProposalServiceClient"/>.</returns> public static stt::Task<AccountBudgetProposalServiceClient> CreateAsync(st::CancellationToken cancellationToken = default) => new AccountBudgetProposalServiceClientBuilder().BuildAsync(cancellationToken); /// <summary> /// Synchronously creates a <see cref="AccountBudgetProposalServiceClient"/> using the default credentials, /// endpoint and settings. To specify custom credentials or other settings, use /// <see cref="AccountBudgetProposalServiceClientBuilder"/>. /// </summary> /// <returns>The created <see cref="AccountBudgetProposalServiceClient"/>.</returns> public static AccountBudgetProposalServiceClient Create() => new AccountBudgetProposalServiceClientBuilder().Build(); /// <summary> /// Creates a <see cref="AccountBudgetProposalServiceClient"/> which uses the specified call invoker for remote /// operations. /// </summary> /// <param name="callInvoker"> /// The <see cref="grpccore::CallInvoker"/> for remote operations. Must not be null. /// </param> /// <param name="settings">Optional <see cref="AccountBudgetProposalServiceSettings"/>.</param> /// <returns>The created <see cref="AccountBudgetProposalServiceClient"/>.</returns> internal static AccountBudgetProposalServiceClient Create(grpccore::CallInvoker callInvoker, AccountBudgetProposalServiceSettings settings = null) { gax::GaxPreconditions.CheckNotNull(callInvoker, nameof(callInvoker)); grpcinter::Interceptor interceptor = settings?.Interceptor; if (interceptor != null) { callInvoker = grpcinter::CallInvokerExtensions.Intercept(callInvoker, interceptor); } AccountBudgetProposalService.AccountBudgetProposalServiceClient grpcClient = new AccountBudgetProposalService.AccountBudgetProposalServiceClient(callInvoker); return new AccountBudgetProposalServiceClientImpl(grpcClient, settings); } /// <summary> /// Shuts down any channels automatically created by <see cref="Create()"/> and /// <see cref="CreateAsync(st::CancellationToken)"/>. Channels which weren't automatically created are not /// affected. /// </summary> /// <remarks> /// After calling this method, further calls to <see cref="Create()"/> and /// <see cref="CreateAsync(st::CancellationToken)"/> will create new channels, which could in turn be shut down /// by another call to this method. /// </remarks> /// <returns>A task representing the asynchronous shutdown operation.</returns> public static stt::Task ShutdownDefaultChannelsAsync() => ChannelPool.ShutdownChannelsAsync(); /// <summary>The underlying gRPC AccountBudgetProposalService client</summary> public virtual AccountBudgetProposalService.AccountBudgetProposalServiceClient GrpcClient => throw new sys::NotImplementedException(); /// <summary> /// Creates, updates, or removes account budget proposals. Operation statuses /// are returned. /// /// List of thrown errors: /// [AccountBudgetProposalError]() /// [AuthenticationError]() /// [AuthorizationError]() /// [DatabaseError]() /// [DateError]() /// [FieldError]() /// [FieldMaskError]() /// [HeaderError]() /// [InternalError]() /// [MutateError]() /// [QuotaError]() /// [RequestError]() /// [StringLengthError]() /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual MutateAccountBudgetProposalResponse MutateAccountBudgetProposal(MutateAccountBudgetProposalRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// Creates, updates, or removes account budget proposals. Operation statuses /// are returned. /// /// List of thrown errors: /// [AccountBudgetProposalError]() /// [AuthenticationError]() /// [AuthorizationError]() /// [DatabaseError]() /// [DateError]() /// [FieldError]() /// [FieldMaskError]() /// [HeaderError]() /// [InternalError]() /// [MutateError]() /// [QuotaError]() /// [RequestError]() /// [StringLengthError]() /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<MutateAccountBudgetProposalResponse> MutateAccountBudgetProposalAsync(MutateAccountBudgetProposalRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// Creates, updates, or removes account budget proposals. Operation statuses /// are returned. /// /// List of thrown errors: /// [AccountBudgetProposalError]() /// [AuthenticationError]() /// [AuthorizationError]() /// [DatabaseError]() /// [DateError]() /// [FieldError]() /// [FieldMaskError]() /// [HeaderError]() /// [InternalError]() /// [MutateError]() /// [QuotaError]() /// [RequestError]() /// [StringLengthError]() /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<MutateAccountBudgetProposalResponse> MutateAccountBudgetProposalAsync(MutateAccountBudgetProposalRequest request, st::CancellationToken cancellationToken) => MutateAccountBudgetProposalAsync(request, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); /// <summary> /// Creates, updates, or removes account budget proposals. Operation statuses /// are returned. /// /// List of thrown errors: /// [AccountBudgetProposalError]() /// [AuthenticationError]() /// [AuthorizationError]() /// [DatabaseError]() /// [DateError]() /// [FieldError]() /// [FieldMaskError]() /// [HeaderError]() /// [InternalError]() /// [MutateError]() /// [QuotaError]() /// [RequestError]() /// [StringLengthError]() /// </summary> /// <param name="customerId"> /// Required. The ID of the customer. /// </param> /// <param name="operation"> /// Required. The operation to perform on an individual account-level budget proposal. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual MutateAccountBudgetProposalResponse MutateAccountBudgetProposal(string customerId, AccountBudgetProposalOperation operation, gaxgrpc::CallSettings callSettings = null) => MutateAccountBudgetProposal(new MutateAccountBudgetProposalRequest { CustomerId = gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), Operation = gax::GaxPreconditions.CheckNotNull(operation, nameof(operation)), }, callSettings); /// <summary> /// Creates, updates, or removes account budget proposals. Operation statuses /// are returned. /// /// List of thrown errors: /// [AccountBudgetProposalError]() /// [AuthenticationError]() /// [AuthorizationError]() /// [DatabaseError]() /// [DateError]() /// [FieldError]() /// [FieldMaskError]() /// [HeaderError]() /// [InternalError]() /// [MutateError]() /// [QuotaError]() /// [RequestError]() /// [StringLengthError]() /// </summary> /// <param name="customerId"> /// Required. The ID of the customer. /// </param> /// <param name="operation"> /// Required. The operation to perform on an individual account-level budget proposal. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<MutateAccountBudgetProposalResponse> MutateAccountBudgetProposalAsync(string customerId, AccountBudgetProposalOperation operation, gaxgrpc::CallSettings callSettings = null) => MutateAccountBudgetProposalAsync(new MutateAccountBudgetProposalRequest { CustomerId = gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), Operation = gax::GaxPreconditions.CheckNotNull(operation, nameof(operation)), }, callSettings); /// <summary> /// Creates, updates, or removes account budget proposals. Operation statuses /// are returned. /// /// List of thrown errors: /// [AccountBudgetProposalError]() /// [AuthenticationError]() /// [AuthorizationError]() /// [DatabaseError]() /// [DateError]() /// [FieldError]() /// [FieldMaskError]() /// [HeaderError]() /// [InternalError]() /// [MutateError]() /// [QuotaError]() /// [RequestError]() /// [StringLengthError]() /// </summary> /// <param name="customerId"> /// Required. The ID of the customer. /// </param> /// <param name="operation"> /// Required. The operation to perform on an individual account-level budget proposal. /// </param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<MutateAccountBudgetProposalResponse> MutateAccountBudgetProposalAsync(string customerId, AccountBudgetProposalOperation operation, st::CancellationToken cancellationToken) => MutateAccountBudgetProposalAsync(customerId, operation, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); } /// <summary>AccountBudgetProposalService client wrapper implementation, for convenient use.</summary> /// <remarks> /// A service for managing account-level budgets via proposals. /// /// A proposal is a request to create a new budget or make changes to an /// existing one. /// /// Mutates: /// The CREATE operation creates a new proposal. /// UPDATE operations aren't supported. /// The REMOVE operation cancels a pending proposal. /// </remarks> public sealed partial class AccountBudgetProposalServiceClientImpl : AccountBudgetProposalServiceClient { private readonly gaxgrpc::ApiCall<MutateAccountBudgetProposalRequest, MutateAccountBudgetProposalResponse> _callMutateAccountBudgetProposal; /// <summary> /// Constructs a client wrapper for the AccountBudgetProposalService service, with the specified gRPC client and /// settings. /// </summary> /// <param name="grpcClient">The underlying gRPC client.</param> /// <param name="settings"> /// The base <see cref="AccountBudgetProposalServiceSettings"/> used within this client. /// </param> public AccountBudgetProposalServiceClientImpl(AccountBudgetProposalService.AccountBudgetProposalServiceClient grpcClient, AccountBudgetProposalServiceSettings settings) { GrpcClient = grpcClient; AccountBudgetProposalServiceSettings effectiveSettings = settings ?? AccountBudgetProposalServiceSettings.GetDefault(); gaxgrpc::ClientHelper clientHelper = new gaxgrpc::ClientHelper(effectiveSettings); _callMutateAccountBudgetProposal = clientHelper.BuildApiCall<MutateAccountBudgetProposalRequest, MutateAccountBudgetProposalResponse>(grpcClient.MutateAccountBudgetProposalAsync, grpcClient.MutateAccountBudgetProposal, effectiveSettings.MutateAccountBudgetProposalSettings).WithGoogleRequestParam("customer_id", request => request.CustomerId); Modify_ApiCall(ref _callMutateAccountBudgetProposal); Modify_MutateAccountBudgetProposalApiCall(ref _callMutateAccountBudgetProposal); OnConstruction(grpcClient, effectiveSettings, clientHelper); } partial void Modify_ApiCall<TRequest, TResponse>(ref gaxgrpc::ApiCall<TRequest, TResponse> call) where TRequest : class, proto::IMessage<TRequest> where TResponse : class, proto::IMessage<TResponse>; partial void Modify_MutateAccountBudgetProposalApiCall(ref gaxgrpc::ApiCall<MutateAccountBudgetProposalRequest, MutateAccountBudgetProposalResponse> call); partial void OnConstruction(AccountBudgetProposalService.AccountBudgetProposalServiceClient grpcClient, AccountBudgetProposalServiceSettings effectiveSettings, gaxgrpc::ClientHelper clientHelper); /// <summary>The underlying gRPC AccountBudgetProposalService client</summary> public override AccountBudgetProposalService.AccountBudgetProposalServiceClient GrpcClient { get; } partial void Modify_MutateAccountBudgetProposalRequest(ref MutateAccountBudgetProposalRequest request, ref gaxgrpc::CallSettings settings); /// <summary> /// Creates, updates, or removes account budget proposals. Operation statuses /// are returned. /// /// List of thrown errors: /// [AccountBudgetProposalError]() /// [AuthenticationError]() /// [AuthorizationError]() /// [DatabaseError]() /// [DateError]() /// [FieldError]() /// [FieldMaskError]() /// [HeaderError]() /// [InternalError]() /// [MutateError]() /// [QuotaError]() /// [RequestError]() /// [StringLengthError]() /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public override MutateAccountBudgetProposalResponse MutateAccountBudgetProposal(MutateAccountBudgetProposalRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_MutateAccountBudgetProposalRequest(ref request, ref callSettings); return _callMutateAccountBudgetProposal.Sync(request, callSettings); } /// <summary> /// Creates, updates, or removes account budget proposals. Operation statuses /// are returned. /// /// List of thrown errors: /// [AccountBudgetProposalError]() /// [AuthenticationError]() /// [AuthorizationError]() /// [DatabaseError]() /// [DateError]() /// [FieldError]() /// [FieldMaskError]() /// [HeaderError]() /// [InternalError]() /// [MutateError]() /// [QuotaError]() /// [RequestError]() /// [StringLengthError]() /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public override stt::Task<MutateAccountBudgetProposalResponse> MutateAccountBudgetProposalAsync(MutateAccountBudgetProposalRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_MutateAccountBudgetProposalRequest(ref request, ref callSettings); return _callMutateAccountBudgetProposal.Async(request, callSettings); } } }
using System; using Csla; using SelfLoadSoftDelete.DataAccess; using SelfLoadSoftDelete.DataAccess.ERLevel; namespace SelfLoadSoftDelete.Business.ERLevel { /// <summary> /// G07_Country_Child (editable child object).<br/> /// This is a generated base class of <see cref="G07_Country_Child"/> business object. /// </summary> /// <remarks> /// This class is an item of <see cref="G06_Country"/> collection. /// </remarks> [Serializable] public partial class G07_Country_Child : BusinessBase<G07_Country_Child> { #region Business Properties /// <summary> /// Maintains metadata about <see cref="Country_Child_Name"/> property. /// </summary> public static readonly PropertyInfo<string> Country_Child_NameProperty = RegisterProperty<string>(p => p.Country_Child_Name, "Regions Child Name"); /// <summary> /// Gets or sets the Regions Child Name. /// </summary> /// <value>The Regions Child Name.</value> public string Country_Child_Name { get { return GetProperty(Country_Child_NameProperty); } set { SetProperty(Country_Child_NameProperty, value); } } #endregion #region Factory Methods /// <summary> /// Factory method. Creates a new <see cref="G07_Country_Child"/> object. /// </summary> /// <returns>A reference to the created <see cref="G07_Country_Child"/> object.</returns> internal static G07_Country_Child NewG07_Country_Child() { return DataPortal.CreateChild<G07_Country_Child>(); } /// <summary> /// Factory method. Loads a <see cref="G07_Country_Child"/> object, based on given parameters. /// </summary> /// <param name="country_ID1">The Country_ID1 parameter of the G07_Country_Child to fetch.</param> /// <returns>A reference to the fetched <see cref="G07_Country_Child"/> object.</returns> internal static G07_Country_Child GetG07_Country_Child(int country_ID1) { return DataPortal.FetchChild<G07_Country_Child>(country_ID1); } #endregion #region Constructor /// <summary> /// Initializes a new instance of the <see cref="G07_Country_Child"/> class. /// </summary> /// <remarks> Do not use to create a Csla object. Use factory methods instead.</remarks> [System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)] public G07_Country_Child() { // Use factory methods and do not use direct creation. // show the framework that this is a child object MarkAsChild(); } #endregion #region Data Access /// <summary> /// Loads default values for the <see cref="G07_Country_Child"/> object properties. /// </summary> [Csla.RunLocal] protected override void Child_Create() { var args = new DataPortalHookArgs(); OnCreate(args); base.Child_Create(); } /// <summary> /// Loads a <see cref="G07_Country_Child"/> object from the database, based on given criteria. /// </summary> /// <param name="country_ID1">The Country ID1.</param> protected void Child_Fetch(int country_ID1) { var args = new DataPortalHookArgs(country_ID1); OnFetchPre(args); using (var dalManager = DalFactorySelfLoadSoftDelete.GetManager()) { var dal = dalManager.GetProvider<IG07_Country_ChildDal>(); var data = dal.Fetch(country_ID1); Fetch(data); } OnFetchPost(args); } /// <summary> /// Loads a <see cref="G07_Country_Child"/> object from the given <see cref="G07_Country_ChildDto"/>. /// </summary> /// <param name="data">The G07_Country_ChildDto to use.</param> private void Fetch(G07_Country_ChildDto data) { // Value properties LoadProperty(Country_Child_NameProperty, data.Country_Child_Name); var args = new DataPortalHookArgs(data); OnFetchRead(args); } /// <summary> /// Inserts a new <see cref="G07_Country_Child"/> object in the database. /// </summary> /// <param name="parent">The parent object.</param> [Transactional(TransactionalTypes.TransactionScope)] private void Child_Insert(G06_Country parent) { var dto = new G07_Country_ChildDto(); dto.Parent_Country_ID = parent.Country_ID; dto.Country_Child_Name = Country_Child_Name; using (var dalManager = DalFactorySelfLoadSoftDelete.GetManager()) { var args = new DataPortalHookArgs(dto); OnInsertPre(args); var dal = dalManager.GetProvider<IG07_Country_ChildDal>(); using (BypassPropertyChecks) { var resultDto = dal.Insert(dto); args = new DataPortalHookArgs(resultDto); } OnInsertPost(args); } } /// <summary> /// Updates in the database all changes made to the <see cref="G07_Country_Child"/> object. /// </summary> /// <param name="parent">The parent object.</param> [Transactional(TransactionalTypes.TransactionScope)] private void Child_Update(G06_Country parent) { if (!IsDirty) return; var dto = new G07_Country_ChildDto(); dto.Parent_Country_ID = parent.Country_ID; dto.Country_Child_Name = Country_Child_Name; using (var dalManager = DalFactorySelfLoadSoftDelete.GetManager()) { var args = new DataPortalHookArgs(dto); OnUpdatePre(args); var dal = dalManager.GetProvider<IG07_Country_ChildDal>(); using (BypassPropertyChecks) { var resultDto = dal.Update(dto); args = new DataPortalHookArgs(resultDto); } OnUpdatePost(args); } } /// <summary> /// Self deletes the <see cref="G07_Country_Child"/> object from database. /// </summary> /// <param name="parent">The parent object.</param> [Transactional(TransactionalTypes.TransactionScope)] private void Child_DeleteSelf(G06_Country parent) { using (var dalManager = DalFactorySelfLoadSoftDelete.GetManager()) { var args = new DataPortalHookArgs(); OnDeletePre(args); var dal = dalManager.GetProvider<IG07_Country_ChildDal>(); using (BypassPropertyChecks) { dal.Delete(parent.Country_ID); } OnDeletePost(args); } } #endregion #region DataPortal Hooks /// <summary> /// Occurs after setting all defaults for object creation. /// </summary> partial void OnCreate(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Delete, after setting query parameters and before the delete operation. /// </summary> partial void OnDeletePre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Delete, after the delete operation, before Commit(). /// </summary> partial void OnDeletePost(DataPortalHookArgs args); /// <summary> /// Occurs after setting query parameters and before the fetch operation. /// </summary> partial void OnFetchPre(DataPortalHookArgs args); /// <summary> /// Occurs after the fetch operation (object or collection is fully loaded and set up). /// </summary> partial void OnFetchPost(DataPortalHookArgs args); /// <summary> /// Occurs after the low level fetch operation, before the data reader is destroyed. /// </summary> partial void OnFetchRead(DataPortalHookArgs args); /// <summary> /// Occurs after setting query parameters and before the update operation. /// </summary> partial void OnUpdatePre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after the update operation, before setting back row identifiers (RowVersion) and Commit(). /// </summary> partial void OnUpdatePost(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after setting query parameters and before the insert operation. /// </summary> partial void OnInsertPre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after the insert operation, before setting back row identifiers (ID and RowVersion) and Commit(). /// </summary> partial void OnInsertPost(DataPortalHookArgs args); #endregion } }
using System; using System.Collections; namespace Tutor.Structure { /// <summary> /// Summary description for EquationStructure. /// </summary> public interface Equation { bool Equal(Equation E); string str(); int order(); void cycle(); bool simple(); string lisp(); } public class EquFunctionApplication : Equation { public string FunctionName; public ArrayList Parameters; public EquFunctionApplication() { Parameters = new ArrayList(); } public EquFunctionApplication(string name) { FunctionName = name; Parameters = new ArrayList(); } public EquFunctionApplication(string name, Equation E) { FunctionName = name; Parameters = new ArrayList(); Parameters.Add(E); } #region Equation Members public bool Equal(Equation E) { if(E is EquFunctionApplication) { EquFunctionApplication EFA = (EquFunctionApplication) E; if( ! EFA.FunctionName.Equals(FunctionName) ) return false; if( EFA.Parameters.Count != Parameters.Count ) return false; for(int k = 0; k < Parameters.Count; k++) { Equation L = (Equation) Parameters[k]; Equation R = (Equation) EFA.Parameters[k]; if( ! L.Equal(R) ) return false; } return true; } return false; } public string str() { string S = FunctionName + "("; bool First = true; foreach(Equation E in Parameters) { if(!First) S += ","; First = false; S += E.str(); } S += ")"; return S; } public int order() { return 1; } public void cycle() { } public bool simple() { return true; } public string lisp() { string X = "(fun_app (var " + FunctionName + ") "; foreach(Equation E in Parameters) { X += E.lisp(); } return X + ")"; } #endregion } public class EquConstant : Equation { public long Value; public EquConstant(long v) { Value = v; } public bool Equal(Equation E) { if(E is EquConstant) { EquConstant V = (EquConstant) E; return V.Value == Value; } return false; } public string str() { if(Value < 0) { return "\\left(" + Value + "\\right)"; } return "" + Value + ""; } public string lisp() { return "(n " + Value + ") "; } public int order() { return 1; } public void cycle() {} public bool simple() { return true; } }; public class EquVariable : Equation { public string Variable; public EquVariable(string var) { Variable = var; } public bool Equal(Equation E) { if(E is EquVariable) { EquVariable V = (EquVariable) E; return V.Variable.Equals(Variable); } return false; } public string str() { return " " + Variable + " "; } public string lisp() { return "(var " + Variable + " )"; } public int order() { return 1; } public void cycle() {} public bool simple() { return true; } }; public class EquRoot : Equation { public Equation Term; public Equation Root; public EquRoot(Equation t, Equation r) { Term = t; Root = r; } public bool Equal(Equation E) { if(E is EquRoot) { EquRoot V = (EquRoot) E; return V.Term.Equal(Term) && V.Root.Equal(Root); } return false; } public string str() { if(Root.str().Equals("2")) return "\\sqrt{" + Term.str() + "}"; return "\\sqrt["+Root.str()+"]{" + Term.str() + "} "; } public string lisp() { if(Root.str().Equals("2")) return "(sqrt " + Term.lisp() + ") "; return "(root " + Term.lisp() + " " + Root.lisp() + ") "; } public int order() { return 1; } public void cycle() {} public bool simple() { return true; } }; public class EquLog : Equation { public Equation Term; public Equation Base; public EquLog(Equation t, Equation b) { Term = t; Base = b; } public bool Equal(Equation E) { if(E is EquLog) { EquLog V = (EquLog) E; return V.Term.Equal(Term) && V.Base.Equal(Base); } return false; } public string str() { if(Base.str().Equals("e")) return "\\ln{" + Term.str() + "}"; return "\\log["+Base.str()+"]{" + Term.str() + "} "; } public string lisp() { return "(log " + Term.lisp() + " " + Base.lisp() + ") "; } public int order() { return 1; } public void cycle() {} public bool simple() { return true; } }; public class EquPower : Equation { public Equation Base; public Equation Power; public EquPower(Equation b, Equation p) { Base = b; Power = p; } public bool Equal(Equation E) { if(E is EquPower) { EquPower V = (EquPower) E; return V.Base.Equal(Base) && V.Power.Equal(Power); } return false; } public string str() { string b = Base.str(); if(!Base.simple()) { b = "\\left(" + b + "\\right)"; } return b+"^{" + Power.str() + "}"; } public string lisp() { return "(^ " + Base.lisp() + " " + Power.lisp() + ") "; } public int order() { return 1; } public void cycle() {} public bool simple() { return true; } } public class EquDivide : Equation { public Equation Numerator; public Equation Denominator; private int customCycle; public EquDivide(Equation n, Equation d) { Numerator = n; Denominator = d; customCycle = 0; } public bool Equal(Equation E) { if(E is EquDivide) { EquDivide V = (EquDivide) E; return V.Numerator.Equal(Numerator) && V.Denominator.Equal(Denominator); } return false; } public string str() { return "\\frac{" + Numerator.str() + "}{" + Denominator.str() + "}"; } public string lisp() { return "(/ " + Numerator.lisp() + " " + Denominator.lisp() + ") "; } public int order() { return Numerator.order() * Denominator.order(); } public void cycle() { customCycle ++; Numerator.cycle(); if(customCycle % Numerator.order() == 0) { Denominator.cycle(); } } public bool simple() { return true; } } public class EquEquality : Equation { public Equation Left; public Equation Right; public EquEquality(Equation l, Equation r) { Left = l; Right = r; } public string str() { return "" + Left.str() + "=" + Right.str() + ""; } public bool Equal(Equation E) { if(E is EquEquality) { EquEquality V = (EquEquality) E; return V.Left.Equal(Left) && V.Right.Equal(Right); } return false; } public string lisp() { return "(= " + Left.lisp() + " " + Right.lisp() + ") "; } public int order() { return 2; } public void cycle() { Equation Temp = Left; Left = Right; Right = Temp; } public bool simple() { return true; } }; public enum EquInEqualityType { LessThan, LessThanEqual, GreaterThan, GreaterThanEqual, NotEqual } public class EquInEquality : Equation { public Equation Left; public Equation Right; public EquInEqualityType Typ; public string GetTypStr(EquInEqualityType T) { if(T == EquInEqualityType.GreaterThan) return ">"; if(T == EquInEqualityType.LessThan) return "<"; if(T == EquInEqualityType.GreaterThanEqual) return ">="; if(T == EquInEqualityType.LessThanEqual) return "<="; if(T == EquInEqualityType.NotEqual) return "!="; return "?"; } public EquInEquality(Equation l, Equation r, string T) { Left = l; Right = r; Typ = EquInEqualityType.NotEqual; if(T.Equals(">")) Typ = EquInEqualityType.GreaterThan; if(T.Equals("<")) Typ = EquInEqualityType.LessThan; if(T.Equals(">=")) Typ = EquInEqualityType.GreaterThanEqual; if(T.Equals("<=")) Typ = EquInEqualityType.LessThanEqual; } public EquInEquality(Equation l, Equation r, EquInEqualityType T) { Left = l; Right = r; Typ = T; } public string str() { return "" + Left.str() + GetTypStr(Typ) + Right.str() + ""; } public bool Equal(Equation E) { if(E is EquInEquality) { EquInEquality V = (EquInEquality) E; return V.Left.Equal(Left) && V.Right.Equal(Right) && V.Typ == Typ; } return false; } public string lisp() { return "("+ GetTypStr(Typ) +" " + Left.lisp() + " " + Right.lisp() + ") "; } public int order() { if(Typ == EquInEqualityType.NotEqual) return 2; return 1; } public void cycle() { if(Typ == EquInEqualityType.NotEqual) {Equation Temp = Left; Left = Right; Right = Temp; } } public bool simple() { return true; } }; public class EquNegation : Equation { public Equation Term; public EquNegation(Equation t) { Term = t; } public bool Equal(Equation E) { if(E is EquNegation) { EquNegation V = (EquNegation) E; return V.Term.Equal(Term); } return false; } public string str() { if(Term.simple()) return "-" + Term.str() + ""; return "-\\left(" + Term.str() + "\\right)"; } public string lisp() { return "(- " + Term.lisp() + ") "; } public int order() { return 1; } public void cycle() {} public bool simple() { return true; } } public class EquAbsoluteValue : Equation { public Equation Term; public EquAbsoluteValue(Equation t) { Term = t; } public bool Equal(Equation E) { if(E is EquAbsoluteValue) { EquAbsoluteValue V = (EquAbsoluteValue) E; return V.Term.Equal(Term); } return false; } public string str() { return "\\abs{" + Term.str() + "}"; } public string lisp() { return "(abs " + Term.lisp() + ") "; } public int order() { return 1; } public void cycle() {} public bool simple() { return true; } } public class EquAddition : Equation { public ArrayList _Terms; public EquAddition() { _Terms = new ArrayList(); } public EquAddition(Equation a) { _Terms = new ArrayList(); if(a != null) _Terms.Add(a); CollectSums(false); } public EquAddition(Equation a, Equation b) { _Terms = new ArrayList(); if(a != null) _Terms.Add(a); if(b != null) _Terms.Add(b); CollectSums(false); } public EquAddition(Equation [] A) { _Terms = new ArrayList(); foreach(Equation E in A) _Terms.Add(E); CollectSums(false); } public void CollectSums() { CollectSums(true); } public void CollectSums(bool sort) { ArrayList NewTerms = new ArrayList(); foreach(Equation E in _Terms) { if(E is EquAddition) { EquAddition V = (EquAddition) E; V.CollectSums(sort); foreach(Equation F in V._Terms) NewTerms.Add(F); } else { NewTerms.Add(E); } } _Terms = NewTerms; if(sort) _Terms.Sort(new CompareEquations(-1)); } public bool Equal(Equation E) { if(E is EquAddition) { EquAddition V = (EquAddition) E; return SubEquality.TestEquality(V._Terms, _Terms); } return false; } public string str() { string interior = ""; bool first = true; foreach(Equation E in _Terms) { if(E is EquNegation) { if(!first) { interior += "-"; interior += ((EquNegation)E).Term.str(); } else { interior += E.str(); } } else { if(!first) interior += "+"; interior += E.str(); } first = false; } return "" +interior + ""; } public string lisp() { string interior = ""; foreach(Equation E in _Terms) { interior += E.lisp() + " "; } return "(+ " +interior + ") "; } public int order() { return _Terms.Count; } public void cycle() { Object F = _Terms[0]; _Terms.RemoveAt(0); _Terms.Add(F); } public bool simple() { return false; } }; public class EquMultiplication : Equation { public ArrayList _Factors; public EquMultiplication() { _Factors = new ArrayList(); } public EquMultiplication(Equation a) { _Factors = new ArrayList(); if(a != null) _Factors.Add(a); CollectFactors(false); } public EquMultiplication(Equation a, Equation b) { _Factors = new ArrayList(); if(a != null)_Factors.Add(a); if(b != null)_Factors.Add(b); CollectFactors(false); } public EquMultiplication(Equation a, Equation b, Equation c) { _Factors = new ArrayList(); if(a != null)_Factors.Add(a); if(b != null)_Factors.Add(b); if(c != null)_Factors.Add(c); CollectFactors(false); } public EquMultiplication(Equation [] A) { _Factors = new ArrayList(); foreach(Equation E in A) _Factors.Add(E); CollectFactors(false); } public void CollectFactors() { CollectFactors(true); } public void CollectFactors(bool sort) { ArrayList NewFactors = new ArrayList(); foreach(Equation E in _Factors) { if(E is EquMultiplication) { EquMultiplication V = (EquMultiplication) E; V.CollectFactors(sort); foreach(Equation F in V._Factors) NewFactors.Add(F); } else { NewFactors.Add(E); } } _Factors = NewFactors; if(sort) _Factors.Sort(new CompareEquations(1)); } public bool Equal(Equation E) { if(E is EquMultiplication) { EquMultiplication V = (EquMultiplication) E; return SubEquality.TestEquality(V._Factors, _Factors); } return false; } public string str() { string interior = ""; bool first = true; foreach(Equation E in _Factors) { if(!first) interior += "*"; if(!E.simple()) interior += "("; interior += E.str(); if(!E.simple()) interior += ")"; first = false; } return "" + interior + ""; } public string lisp() { string interior = ""; foreach(Equation E in _Factors) { interior += E.lisp() + " "; } return "(* " +interior + ") "; } public int order() { return _Factors.Count; } public void cycle() { Object F = _Factors[0]; _Factors.RemoveAt(0); _Factors.Add(F); } public bool simple() { return true; } } public class SubEquality { public static bool TestEquality(ArrayList Left, ArrayList Right) { // quick rejection test if(Left.Count != Right.Count) return false; // the right side Equation [] EqR = new Equation[Right.Count]; // copy from list to array int idx = 0; foreach(Equation E in Right) { EqR[idx] = E; idx++; } foreach(Equation L in Left) { bool found = false; for(int k = 0; k < EqR.Length && !found; k++) { if(EqR[k] != null) { if(EqR[k].Equal(L)) { EqR[k] = null; found = true; } } } if(!found) return false; } return true; } } public class CompareEquations : IComparer { public int Ratio; public CompareEquations(int R) { Ratio = R; } #region IComparer Members public int Compare(object x, object y) { return Ratio * ((Equation) x).str().CompareTo( ((Equation) y).str()); } #endregion } public class UnknownEquNode : Exception { }; public class EquationParser { public static Equation Interpret(SimpleParser.SimpleParseTree SPL) { if(SPL.node.Equals("+")) { EquAddition add = new EquAddition(); foreach(SimpleParser.SimpleParseTree sub in SPL.children) { add._Terms.Add( Interpret(sub) ); } add.CollectSums(false); return add; } if(SPL.node.Equals("*")) { EquMultiplication mult = new EquMultiplication(); foreach(SimpleParser.SimpleParseTree sub in SPL.children) { mult._Factors.Add( Interpret(sub) ); } mult.CollectFactors(false); return mult; } if(SPL.node.Equals("var")) { return new EquVariable(((SimpleParser.SimpleParseTree) SPL.children[0] ) .node); } if(SPL.node.Equals("^")) { return new EquPower(Interpret( ((SimpleParser.SimpleParseTree) SPL.children[0] ) ), Interpret( ((SimpleParser.SimpleParseTree) SPL.children[1] ) )); } if(SPL.node.Equals("-")) { Equation Sub; if(SPL.children.Count == 2) { Sub = new EquNegation( Interpret( ((SimpleParser.SimpleParseTree) SPL.children[1] ) ) ); return new EquAddition( Interpret( ((SimpleParser.SimpleParseTree) SPL.children[0] ) ), Sub ); } Sub = Interpret( ((SimpleParser.SimpleParseTree) SPL.children[0] ) ); if(Sub is EquConstant) { EquConstant Eq = (EquConstant) Sub; Eq.Value *= -1; return Eq; } return new EquNegation(Sub); } if(SPL.node.Equals("abs")) { return new EquAbsoluteValue( Interpret( ((SimpleParser.SimpleParseTree) SPL.children[0] ) ) ); } if(SPL.node.Equals("=")) { return new EquEquality( Interpret( ((SimpleParser.SimpleParseTree) SPL.children[0] ) ), Interpret( ((SimpleParser.SimpleParseTree) SPL.children[1] ) ) ); } if(SPL.node.Equals("<")) { return new EquInEquality( Interpret( ((SimpleParser.SimpleParseTree) SPL.children[0] ) ), Interpret( ((SimpleParser.SimpleParseTree) SPL.children[1] ) ) , EquInEqualityType.LessThan ); } if(SPL.node.Equals(">")) { return new EquInEquality( Interpret( ((SimpleParser.SimpleParseTree) SPL.children[0] ) ), Interpret( ((SimpleParser.SimpleParseTree) SPL.children[1] ) ) , EquInEqualityType.GreaterThan ); } if(SPL.node.Equals("<=")) { return new EquInEquality( Interpret( ((SimpleParser.SimpleParseTree) SPL.children[0] ) ), Interpret( ((SimpleParser.SimpleParseTree) SPL.children[1] ) ) , EquInEqualityType.LessThanEqual ); } if(SPL.node.Equals(">=")) { return new EquInEquality( Interpret( ((SimpleParser.SimpleParseTree) SPL.children[0] ) ), Interpret( ((SimpleParser.SimpleParseTree) SPL.children[1] ) ) , EquInEqualityType.GreaterThanEqual ); } if(SPL.node.Equals("!=")) { return new EquInEquality( Interpret( ((SimpleParser.SimpleParseTree) SPL.children[0] ) ), Interpret( ((SimpleParser.SimpleParseTree) SPL.children[1] ) ) , EquInEqualityType.NotEqual ); } if(SPL.node.Equals("matrix")) { return MatrixAlgebra.Matrix.Interpret(SPL); } if(SPL.node.Equals("/")) { return new EquDivide(Interpret( ((SimpleParser.SimpleParseTree) SPL.children[0] ) ), Interpret( ((SimpleParser.SimpleParseTree) SPL.children[1] ) )); } if(SPL.node.Equals("root")) { return new EquRoot(Interpret( ((SimpleParser.SimpleParseTree) SPL.children[0] ) ), Interpret( ((SimpleParser.SimpleParseTree) SPL.children[1] ) )); } if(SPL.node.Equals("ln")) { return new EquLog(Interpret( ((SimpleParser.SimpleParseTree) SPL.children[0] ) ), new EquVariable("e") ); } if(SPL.node.Equals("log")) { return new EquLog(Interpret( ((SimpleParser.SimpleParseTree) SPL.children[0] ) ), Interpret( ((SimpleParser.SimpleParseTree) SPL.children[1] ) )); } if(SPL.node.Equals("sqrt")) { return new EquRoot(Interpret( ((SimpleParser.SimpleParseTree) SPL.children[0] ) ), new EquConstant(2) ); } if(SPL.node.Equals("n")) { return new EquConstant( Int32.Parse(((SimpleParser.SimpleParseTree) SPL.children[0] ) .node) ); } if(SPL.node.Equals("fun_app")) { EquFunctionApplication EFA = new EquFunctionApplication(); if(SPL.children.Count > 0) { SimpleParser.SimpleParseTree C0 = ((SimpleParser.SimpleParseTree)SPL.children[0]); if( C0.node.Equals("var")) { if(C0.children.Count > 0) { SimpleParser.SimpleParseTree Name = ((SimpleParser.SimpleParseTree)C0.children[0]); EFA.FunctionName = Name.node; for(int k = 1; k < SPL.children.Count; k++) { Equation EP = Interpret( ((SimpleParser.SimpleParseTree) SPL.children[k] ) ); EFA.Parameters.Add(EP); } return EFA; } } } } throw new UnknownEquNode(); } public static Equation Parse(string source) { SimpleParser.SimpleLexer SL = new SimpleParser.SimpleLexer(source); SimpleParser.SimpleParseTree SPL = SimpleParser.SimpleParseTree.Parse(SL); return Interpret(SPL); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.IO; using System.Data.Common; using System.Data.ProviderBase; using System.Diagnostics; using System.Text; namespace System.Data.Odbc { internal class OdbcMetaDataFactory : DbMetaDataFactory { private struct SchemaFunctionName { internal SchemaFunctionName(string schemaName, ODBC32.SQL_API odbcFunction) { _schemaName = schemaName; _odbcFunction = odbcFunction; } internal readonly string _schemaName; internal readonly ODBC32.SQL_API _odbcFunction; } private const string _collectionName = "CollectionName"; private const string _populationMechanism = "PopulationMechanism"; private const string _prepareCollection = "PrepareCollection"; private readonly SchemaFunctionName[] _schemaMapping; internal static readonly char[] KeywordSeparatorChar = new char[1] { ',' }; internal OdbcMetaDataFactory(Stream XMLStream, string serverVersion, string serverVersionNormalized, OdbcConnection connection) : base(XMLStream, serverVersion, serverVersionNormalized) { // set up the colletion name ODBC function mapping guid mapping _schemaMapping = new SchemaFunctionName[] { new SchemaFunctionName(DbMetaDataCollectionNames.DataTypes,ODBC32.SQL_API.SQLGETTYPEINFO), new SchemaFunctionName(OdbcMetaDataCollectionNames.Columns,ODBC32.SQL_API.SQLCOLUMNS), new SchemaFunctionName(OdbcMetaDataCollectionNames.Indexes,ODBC32.SQL_API.SQLSTATISTICS), new SchemaFunctionName(OdbcMetaDataCollectionNames.Procedures,ODBC32.SQL_API.SQLPROCEDURES), new SchemaFunctionName(OdbcMetaDataCollectionNames.ProcedureColumns,ODBC32.SQL_API.SQLPROCEDURECOLUMNS), new SchemaFunctionName(OdbcMetaDataCollectionNames.ProcedureParameters,ODBC32.SQL_API.SQLPROCEDURECOLUMNS), new SchemaFunctionName(OdbcMetaDataCollectionNames.Tables,ODBC32.SQL_API.SQLTABLES), new SchemaFunctionName(OdbcMetaDataCollectionNames.Views,ODBC32.SQL_API.SQLTABLES)}; // verify the existance of the table in the data set DataTable metaDataCollectionsTable = CollectionDataSet.Tables[DbMetaDataCollectionNames.MetaDataCollections]; if (metaDataCollectionsTable == null) { throw ADP.UnableToBuildCollection(DbMetaDataCollectionNames.MetaDataCollections); } // copy the table filtering out any rows that don't apply to the current version of the provider metaDataCollectionsTable = CloneAndFilterCollection(DbMetaDataCollectionNames.MetaDataCollections, null); // verify the existance of the table in the data set DataTable restrictionsTable = CollectionDataSet.Tables[DbMetaDataCollectionNames.Restrictions]; if (restrictionsTable != null) { // copy the table filtering out any rows that don't apply to the current version of the provider restrictionsTable = CloneAndFilterCollection(DbMetaDataCollectionNames.Restrictions, null); } // need to filter out any of the collections where // 1) it is populated using prepare collection // 2) it is in the collection to odbc function mapping above // 3) the provider does not support the necessary odbc function DataColumn populationMechanism = metaDataCollectionsTable.Columns[_populationMechanism]; DataColumn collectionName = metaDataCollectionsTable.Columns[_collectionName]; DataColumn restrictionCollectionName = null; if (restrictionsTable != null) { restrictionCollectionName = restrictionsTable.Columns[_collectionName]; } foreach (DataRow collection in metaDataCollectionsTable.Rows) { if ((string)collection[populationMechanism] == _prepareCollection) { // is the collection in the mapping int mapping = -1; for (int i = 0; i < _schemaMapping.Length; i++) { if (_schemaMapping[i]._schemaName == (string)collection[collectionName]) { mapping = i; break; } } // no go on to the next collection if (mapping == -1) { continue; } // does the provider support the necessary odbc function // if not delete the row from the table if (connection.SQLGetFunctions(_schemaMapping[mapping]._odbcFunction) == false) { // but first delete any related restrictions if (restrictionsTable != null) { foreach (DataRow restriction in restrictionsTable.Rows) { if ((string)collection[collectionName] == (string)restriction[restrictionCollectionName]) { restriction.Delete(); } } restrictionsTable.AcceptChanges(); } collection.Delete(); } } } // replace the original table with the updated one metaDataCollectionsTable.AcceptChanges(); CollectionDataSet.Tables.Remove(CollectionDataSet.Tables[DbMetaDataCollectionNames.MetaDataCollections]); CollectionDataSet.Tables.Add(metaDataCollectionsTable); if (restrictionsTable != null) { CollectionDataSet.Tables.Remove(CollectionDataSet.Tables[DbMetaDataCollectionNames.Restrictions]); CollectionDataSet.Tables.Add(restrictionsTable); } } private object BooleanFromODBC(object odbcSource) { if (odbcSource != DBNull.Value) { //convert to Int32 before doing the comparison //some odbc drivers report the odbcSource value as unsigned, in which case we will //have upgraded the type to Int32, and thus can't cast directly to short if (Convert.ToInt32(odbcSource, null) == 0) { return false; } else { return true; } } return DBNull.Value; } private OdbcCommand GetCommand(OdbcConnection connection) { OdbcCommand command = connection.CreateCommand(); // You need to make sure you pick up the transaction from the connection, // or odd things can happen... command.Transaction = connection.LocalTransaction; return command; } private DataTable DataTableFromDataReader(IDataReader reader, string tableName) { // set up the column structure of the data table from the reader object[] values; DataTable resultTable = NewDataTableFromReader(reader, out values, tableName); // populate the data table from the data reader while (reader.Read()) { reader.GetValues(values); resultTable.Rows.Add(values); } return resultTable; } private void DataTableFromDataReaderDataTypes(DataTable dataTypesTable, OdbcDataReader dataReader, OdbcConnection connection) { DataTable schemaTable = null; // // Build a DataTable from the reader schemaTable = dataReader.GetSchemaTable(); // vstfdevdiv:479715 Handle cases where reader is empty if (null == schemaTable) { throw ADP.OdbcNoTypesFromProvider(); } object[] getTypeInfoValues = new object[schemaTable.Rows.Count]; DataRow dataTypesRow; DataColumn typeNameColumn = dataTypesTable.Columns[DbMetaDataColumnNames.TypeName]; DataColumn providerDbTypeColumn = dataTypesTable.Columns[DbMetaDataColumnNames.ProviderDbType]; DataColumn columnSizeColumn = dataTypesTable.Columns[DbMetaDataColumnNames.ColumnSize]; DataColumn createParametersColumn = dataTypesTable.Columns[DbMetaDataColumnNames.CreateParameters]; DataColumn dataTypeColumn = dataTypesTable.Columns[DbMetaDataColumnNames.DataType]; DataColumn isAutoIncermentableColumn = dataTypesTable.Columns[DbMetaDataColumnNames.IsAutoIncrementable]; DataColumn isCaseSensitiveColumn = dataTypesTable.Columns[DbMetaDataColumnNames.IsCaseSensitive]; DataColumn isFixedLengthColumn = dataTypesTable.Columns[DbMetaDataColumnNames.IsFixedLength]; DataColumn isFixedPrecisionScaleColumn = dataTypesTable.Columns[DbMetaDataColumnNames.IsFixedPrecisionScale]; DataColumn isLongColumn = dataTypesTable.Columns[DbMetaDataColumnNames.IsLong]; DataColumn isNullableColumn = dataTypesTable.Columns[DbMetaDataColumnNames.IsNullable]; DataColumn isSearchableColumn = dataTypesTable.Columns[DbMetaDataColumnNames.IsSearchable]; DataColumn isSearchableWithLikeColumn = dataTypesTable.Columns[DbMetaDataColumnNames.IsSearchableWithLike]; DataColumn isUnsignedColumn = dataTypesTable.Columns[DbMetaDataColumnNames.IsUnsigned]; DataColumn maximumScaleColumn = dataTypesTable.Columns[DbMetaDataColumnNames.MaximumScale]; DataColumn minimumScaleColumn = dataTypesTable.Columns[DbMetaDataColumnNames.MinimumScale]; DataColumn literalPrefixColumn = dataTypesTable.Columns[DbMetaDataColumnNames.LiteralPrefix]; DataColumn literalSuffixColumn = dataTypesTable.Columns[DbMetaDataColumnNames.LiteralSuffix]; DataColumn SQLTypeNameColumn = dataTypesTable.Columns[OdbcMetaDataColumnNames.SQLType]; const int indexTYPE_NAME = 0; const int indexDATA_TYPE = 1; const int indexCOLUMN_SIZE = 2; const int indexCREATE_PARAMS = 5; const int indexAUTO_UNIQUE_VALUE = 11; const int indexCASE_SENSITIVE = 7; const int indexFIXED_PREC_SCALE = 10; const int indexNULLABLE = 6; const int indexSEARCHABLE = 8; const int indexUNSIGNED_ATTRIBUTE = 9; const int indexMAXIMUM_SCALE = 14; const int indexMINIMUM_SCALE = 13; const int indexLITERAL_PREFIX = 3; const int indexLITERAL_SUFFIX = 4; const int SQL_DATE_V2 = 9; const int SQL_TIME_V2 = 10; TypeMap typeMap; while (dataReader.Read()) { dataReader.GetValues(getTypeInfoValues); dataTypesRow = dataTypesTable.NewRow(); ODBC32.SQL_TYPE sqlType; dataTypesRow[typeNameColumn] = getTypeInfoValues[indexTYPE_NAME]; dataTypesRow[SQLTypeNameColumn] = getTypeInfoValues[indexDATA_TYPE]; sqlType = (ODBC32.SQL_TYPE)(Int32)Convert.ChangeType(getTypeInfoValues[indexDATA_TYPE], typeof(Int32), (System.IFormatProvider)null); // if the driver is pre version 3 and it returned the v2 SQL_DATE or SQL_TIME types they need // to be mapped to thier v3 equlivants if (connection.IsV3Driver == false) { if ((int)sqlType == SQL_DATE_V2) { sqlType = ODBC32.SQL_TYPE.TYPE_DATE; } else if ((int)sqlType == SQL_TIME_V2) { sqlType = ODBC32.SQL_TYPE.TYPE_TIME; } } try { typeMap = TypeMap.FromSqlType(sqlType); } // FromSqlType will throw an argument exception if it does not recognize the SqlType. // This is not an error since the GetTypeInfo DATA_TYPE may be a SQL data type or a driver specific // type. If there is no TypeMap for the type its not an error but it will degrade our level of // understanding of/ support for the type. catch (ArgumentException) { typeMap = null; } // if we have a type map we can determine the dbType and the CLR type if not leave them null if (typeMap != null) { dataTypesRow[providerDbTypeColumn] = typeMap._odbcType; dataTypesRow[dataTypeColumn] = typeMap._type.FullName; // setting isLong and isFixedLength only if we have a type map because for provider // specific types we have no idea what the types attributes are if GetTypeInfo did not // tell us switch (sqlType) { case ODBC32.SQL_TYPE.LONGVARCHAR: case ODBC32.SQL_TYPE.WLONGVARCHAR: case ODBC32.SQL_TYPE.LONGVARBINARY: case ODBC32.SQL_TYPE.SS_XML: dataTypesRow[isLongColumn] = true; dataTypesRow[isFixedLengthColumn] = false; break; case ODBC32.SQL_TYPE.VARCHAR: case ODBC32.SQL_TYPE.WVARCHAR: case ODBC32.SQL_TYPE.VARBINARY: dataTypesRow[isLongColumn] = false; dataTypesRow[isFixedLengthColumn] = false; break; case ODBC32.SQL_TYPE.CHAR: case ODBC32.SQL_TYPE.WCHAR: case ODBC32.SQL_TYPE.DECIMAL: case ODBC32.SQL_TYPE.NUMERIC: case ODBC32.SQL_TYPE.SMALLINT: case ODBC32.SQL_TYPE.INTEGER: case ODBC32.SQL_TYPE.REAL: case ODBC32.SQL_TYPE.FLOAT: case ODBC32.SQL_TYPE.DOUBLE: case ODBC32.SQL_TYPE.BIT: case ODBC32.SQL_TYPE.TINYINT: case ODBC32.SQL_TYPE.BIGINT: case ODBC32.SQL_TYPE.TYPE_DATE: case ODBC32.SQL_TYPE.TYPE_TIME: case ODBC32.SQL_TYPE.TIMESTAMP: case ODBC32.SQL_TYPE.TYPE_TIMESTAMP: case ODBC32.SQL_TYPE.GUID: case ODBC32.SQL_TYPE.SS_VARIANT: case ODBC32.SQL_TYPE.SS_UTCDATETIME: case ODBC32.SQL_TYPE.SS_TIME_EX: case ODBC32.SQL_TYPE.BINARY: dataTypesRow[isLongColumn] = false; dataTypesRow[isFixedLengthColumn] = true; break; case ODBC32.SQL_TYPE.SS_UDT: default: // for User defined types don't know if its long or or if it is // varaible length or not so leave the fields null break; } } dataTypesRow[columnSizeColumn] = getTypeInfoValues[indexCOLUMN_SIZE]; dataTypesRow[createParametersColumn] = getTypeInfoValues[indexCREATE_PARAMS]; if ((getTypeInfoValues[indexAUTO_UNIQUE_VALUE] == DBNull.Value) || (Convert.ToInt16(getTypeInfoValues[indexAUTO_UNIQUE_VALUE], null) == 0)) { dataTypesRow[isAutoIncermentableColumn] = false; } else { dataTypesRow[isAutoIncermentableColumn] = true; } dataTypesRow[isCaseSensitiveColumn] = BooleanFromODBC(getTypeInfoValues[indexCASE_SENSITIVE]); dataTypesRow[isFixedPrecisionScaleColumn] = BooleanFromODBC(getTypeInfoValues[indexFIXED_PREC_SCALE]); if (getTypeInfoValues[indexNULLABLE] != DBNull.Value) { //Use Convert.ToInt16 instead of direct cast to short because the value will be Int32 in some cases switch ((ODBC32.SQL_NULLABILITY)Convert.ToInt16(getTypeInfoValues[indexNULLABLE], null)) { case ODBC32.SQL_NULLABILITY.NO_NULLS: dataTypesRow[isNullableColumn] = false; break; case ODBC32.SQL_NULLABILITY.NULLABLE: dataTypesRow[isNullableColumn] = true; break; case ODBC32.SQL_NULLABILITY.UNKNOWN: dataTypesRow[isNullableColumn] = DBNull.Value; break; } } if (DBNull.Value != getTypeInfoValues[indexSEARCHABLE]) { //Use Convert.ToInt16 instead of direct cast to short because the value will be Int32 in some cases Int16 searchableValue = Convert.ToInt16(getTypeInfoValues[indexSEARCHABLE], null); switch (searchableValue) { case (Int16)ODBC32.SQL_SEARCHABLE.UNSEARCHABLE: dataTypesRow[isSearchableColumn] = false; dataTypesRow[isSearchableWithLikeColumn] = false; break; case (Int16)ODBC32.SQL_SEARCHABLE.LIKE_ONLY: dataTypesRow[isSearchableColumn] = false; dataTypesRow[isSearchableWithLikeColumn] = true; break; case (Int16)ODBC32.SQL_SEARCHABLE.ALL_EXCEPT_LIKE: dataTypesRow[isSearchableColumn] = true; dataTypesRow[isSearchableWithLikeColumn] = false; break; case (Int16)ODBC32.SQL_SEARCHABLE.SEARCHABLE: dataTypesRow[isSearchableColumn] = true; dataTypesRow[isSearchableWithLikeColumn] = true; break; } } dataTypesRow[isUnsignedColumn] = BooleanFromODBC(getTypeInfoValues[indexUNSIGNED_ATTRIBUTE]); //For assignment to the DataSet, don't cast the data types -- let the DataSet take care of any conversion if (getTypeInfoValues[indexMAXIMUM_SCALE] != DBNull.Value) { dataTypesRow[maximumScaleColumn] = getTypeInfoValues[indexMAXIMUM_SCALE]; } if (getTypeInfoValues[indexMINIMUM_SCALE] != DBNull.Value) { dataTypesRow[minimumScaleColumn] = getTypeInfoValues[indexMINIMUM_SCALE]; } if (getTypeInfoValues[indexLITERAL_PREFIX] != DBNull.Value) { dataTypesRow[literalPrefixColumn] = getTypeInfoValues[indexLITERAL_PREFIX]; } if (getTypeInfoValues[indexLITERAL_SUFFIX] != DBNull.Value) { dataTypesRow[literalSuffixColumn] = getTypeInfoValues[indexLITERAL_SUFFIX]; } dataTypesTable.Rows.Add(dataTypesRow); } } private DataTable DataTableFromDataReaderIndex(IDataReader reader, string tableName, string restrictionIndexName) { // set up the column structure of the data table from the reader object[] values; DataTable resultTable = NewDataTableFromReader(reader, out values, tableName); // populate the data table from the data reader int positionOfType = 6; int positionOfIndexName = 5; while (reader.Read()) { reader.GetValues(values); if (IncludeIndexRow(values[positionOfIndexName], restrictionIndexName, Convert.ToInt16(values[positionOfType], null)) == true) { resultTable.Rows.Add(values); } } return resultTable; } private DataTable DataTableFromDataReaderProcedureColumns(IDataReader reader, string tableName, Boolean isColumn) { // set up the column structure of the data table from the reader object[] values; DataTable resultTable = NewDataTableFromReader(reader, out values, tableName); // populate the data table from the data reader int positionOfColumnType = 4; while (reader.Read()) { reader.GetValues(values); // the column type should always be short but need to check just in case if (values[positionOfColumnType].GetType() == typeof(short)) { if ((((short)values[positionOfColumnType] == ODBC32.SQL_RESULT_COL) && (isColumn == true)) || (((short)values[positionOfColumnType] != ODBC32.SQL_RESULT_COL) && (isColumn == false))) { resultTable.Rows.Add(values); } } } return resultTable; } private DataTable DataTableFromDataReaderProcedures(IDataReader reader, string tableName, Int16 procedureType) { // Build a DataTable from the reader // set up the column structure of the data table from the reader object[] values; DataTable resultTable = NewDataTableFromReader(reader, out values, tableName); // populate the data table from the data reader int positionOfProcedureType = 7; while (reader.Read()) { reader.GetValues(values); // the column type should always be short but need to check just in case its null if (values[positionOfProcedureType].GetType() == typeof(short)) { if ((short)values[positionOfProcedureType] == procedureType) { resultTable.Rows.Add(values); } } } return resultTable; } private void FillOutRestrictions(int restrictionsCount, string[] restrictions, object[] allRestrictions, string collectionName) { Debug.Assert(allRestrictions.Length >= restrictionsCount); int i = 0; // if we have restrictions put them in the restrictions array if (restrictions != null) { if (restrictions.Length > restrictionsCount) { throw ADP.TooManyRestrictions(collectionName); } for (i = 0; i < restrictions.Length; i++) { if (restrictions[i] != null) { allRestrictions[i] = restrictions[i]; } } } // initalize the rest to no restrictions for (; i < restrictionsCount; i++) { allRestrictions[i] = null; } } private DataTable GetColumnsCollection(String[] restrictions, OdbcConnection connection) { OdbcCommand command = null; OdbcDataReader dataReader = null; DataTable resultTable = null; const int columnsRestrictionsCount = 4; try { command = GetCommand(connection); String[] allRestrictions = new string[columnsRestrictionsCount]; FillOutRestrictions(columnsRestrictionsCount, restrictions, allRestrictions, OdbcMetaDataCollectionNames.Columns); dataReader = command.ExecuteReaderFromSQLMethod(allRestrictions, ODBC32.SQL_API.SQLCOLUMNS); resultTable = DataTableFromDataReader(dataReader, OdbcMetaDataCollectionNames.Columns); } finally { if (dataReader != null) { dataReader.Dispose(); }; if (command != null) { command.Dispose(); }; } return resultTable; } private DataTable GetDataSourceInformationCollection(string[] restrictions, OdbcConnection connection) { if (ADP.IsEmptyArray(restrictions) == false) { throw ADP.TooManyRestrictions(DbMetaDataCollectionNames.DataSourceInformation); } // verify that the data source information table is in the data set DataTable dataSourceInformationTable = CollectionDataSet.Tables[DbMetaDataCollectionNames.DataSourceInformation]; if (dataSourceInformationTable == null) { throw ADP.UnableToBuildCollection(DbMetaDataCollectionNames.DataSourceInformation); } // copy the table filtering out any rows that don't apply to the current version of the provider dataSourceInformationTable = CloneAndFilterCollection(DbMetaDataCollectionNames.DataSourceInformation, null); // after filtering there better be just one row if (dataSourceInformationTable.Rows.Count != 1) { throw ADP.IncorrectNumberOfDataSourceInformationRows(); } DataRow dataSourceInformation = dataSourceInformationTable.Rows[0]; string stringValue; Int16 int16Value; Int32 int32Value; ODBC32.RetCode retcode; // update the catalog separator stringValue = connection.GetInfoStringUnhandled(ODBC32.SQL_INFO.CATALOG_NAME_SEPARATOR); if (!string.IsNullOrEmpty(stringValue)) { StringBuilder patternEscaped = new StringBuilder(); ADP.EscapeSpecialCharacters(stringValue, patternEscaped); dataSourceInformation[DbMetaDataColumnNames.CompositeIdentifierSeparatorPattern] = patternEscaped.ToString(); } // get the DBMS Name stringValue = connection.GetInfoStringUnhandled(ODBC32.SQL_INFO.DBMS_NAME); if (stringValue != null) { dataSourceInformation[DbMetaDataColumnNames.DataSourceProductName] = stringValue; } // update the server version strings dataSourceInformation[DbMetaDataColumnNames.DataSourceProductVersion] = ServerVersion; dataSourceInformation[DbMetaDataColumnNames.DataSourceProductVersionNormalized] = ServerVersionNormalized; // values that are the same for all ODBC drivers. See bug 105333 dataSourceInformation[DbMetaDataColumnNames.ParameterMarkerFormat] = "?"; dataSourceInformation[DbMetaDataColumnNames.ParameterMarkerPattern] = "\\?"; dataSourceInformation[DbMetaDataColumnNames.ParameterNameMaxLength] = 0; // determine the supportedJoinOperators // leave the column null if the GetInfo fails. There is no explicit value for // unknown. if (connection.IsV3Driver) { retcode = connection.GetInfoInt32Unhandled(ODBC32.SQL_INFO.SQL_OJ_CAPABILITIES_30, out int32Value); } else { retcode = connection.GetInfoInt32Unhandled(ODBC32.SQL_INFO.SQL_OJ_CAPABILITIES_20, out int32Value); } if ((retcode == ODBC32.RetCode.SUCCESS) || (retcode == ODBC32.RetCode.SUCCESS_WITH_INFO)) { Common.SupportedJoinOperators supportedJoinOperators = Common.SupportedJoinOperators.None; if ((int32Value & (Int32)ODBC32.SQL_OJ_CAPABILITIES.LEFT) != 0) { supportedJoinOperators = supportedJoinOperators | Common.SupportedJoinOperators.LeftOuter; } if ((int32Value & (Int32)ODBC32.SQL_OJ_CAPABILITIES.RIGHT) != 0) { supportedJoinOperators = supportedJoinOperators | Common.SupportedJoinOperators.RightOuter; } if ((int32Value & (Int32)ODBC32.SQL_OJ_CAPABILITIES.FULL) != 0) { supportedJoinOperators = supportedJoinOperators | Common.SupportedJoinOperators.FullOuter; } if ((int32Value & (Int32)ODBC32.SQL_OJ_CAPABILITIES.INNER) != 0) { supportedJoinOperators = supportedJoinOperators | Common.SupportedJoinOperators.Inner; } dataSourceInformation[DbMetaDataColumnNames.SupportedJoinOperators] = supportedJoinOperators; } // determine the GroupByBehavior retcode = connection.GetInfoInt16Unhandled(ODBC32.SQL_INFO.GROUP_BY, out int16Value); Common.GroupByBehavior groupByBehavior = Common.GroupByBehavior.Unknown; if ((retcode == ODBC32.RetCode.SUCCESS) || (retcode == ODBC32.RetCode.SUCCESS_WITH_INFO)) { switch (int16Value) { case (Int16)ODBC32.SQL_GROUP_BY.NOT_SUPPORTED: groupByBehavior = Common.GroupByBehavior.NotSupported; break; case (Int16)ODBC32.SQL_GROUP_BY.GROUP_BY_EQUALS_SELECT: groupByBehavior = Common.GroupByBehavior.ExactMatch; break; case (Int16)ODBC32.SQL_GROUP_BY.GROUP_BY_CONTAINS_SELECT: groupByBehavior = Common.GroupByBehavior.MustContainAll; break; case (Int16)ODBC32.SQL_GROUP_BY.NO_RELATION: groupByBehavior = Common.GroupByBehavior.Unrelated; break; /* COLLATE is new in ODBC 3.0 and GroupByBehavior does not have a value for it. case ODBC32.SQL_GROUP_BY.COLLATE: groupByBehavior = Common.GroupByBehavior.Unknown; break; */ } } dataSourceInformation[DbMetaDataColumnNames.GroupByBehavior] = groupByBehavior; // determine the identifier case retcode = connection.GetInfoInt16Unhandled(ODBC32.SQL_INFO.IDENTIFIER_CASE, out int16Value); Common.IdentifierCase identifierCase = Common.IdentifierCase.Unknown; if ((retcode == ODBC32.RetCode.SUCCESS) || (retcode == ODBC32.RetCode.SUCCESS_WITH_INFO)) { switch (int16Value) { case (Int16)ODBC32.SQL_IDENTIFIER_CASE.SENSITIVE: identifierCase = Common.IdentifierCase.Sensitive; break; case (Int16)ODBC32.SQL_IDENTIFIER_CASE.UPPER: case (Int16)ODBC32.SQL_IDENTIFIER_CASE.LOWER: case (Int16)ODBC32.SQL_IDENTIFIER_CASE.MIXED: identifierCase = Common.IdentifierCase.Insensitive; break; } } dataSourceInformation[DbMetaDataColumnNames.IdentifierCase] = identifierCase; // OrderByColumnsInSelect stringValue = connection.GetInfoStringUnhandled(ODBC32.SQL_INFO.ORDER_BY_COLUMNS_IN_SELECT); if (stringValue != null) { if (stringValue == "Y") { dataSourceInformation[DbMetaDataColumnNames.OrderByColumnsInSelect] = true; } else if (stringValue == "N") { dataSourceInformation[DbMetaDataColumnNames.OrderByColumnsInSelect] = false; } } // build the QuotedIdentifierPattern using the quote prefix and suffix from the provider and // assuming that the quote suffix is escaped via repetion (i.e " becomes "") stringValue = connection.QuoteChar(ADP.GetSchema); if (stringValue != null) { // by spec a blank identifier quote char indicates that the provider does not suppport // quoted identifiers if (stringValue != " ") { // only know how to build the parttern if the quote characters is 1 character // in all other cases just leave the field null if (stringValue.Length == 1) { StringBuilder scratchStringBuilder = new StringBuilder(); ADP.EscapeSpecialCharacters(stringValue, scratchStringBuilder); string escapedQuoteSuffixString = scratchStringBuilder.ToString(); scratchStringBuilder.Length = 0; ADP.EscapeSpecialCharacters(stringValue, scratchStringBuilder); scratchStringBuilder.Append("(([^"); scratchStringBuilder.Append(escapedQuoteSuffixString); scratchStringBuilder.Append("]|"); scratchStringBuilder.Append(escapedQuoteSuffixString); scratchStringBuilder.Append(escapedQuoteSuffixString); scratchStringBuilder.Append(")*)"); scratchStringBuilder.Append(escapedQuoteSuffixString); dataSourceInformation[DbMetaDataColumnNames.QuotedIdentifierPattern] = scratchStringBuilder.ToString(); } } } // determine the quoted identifier case retcode = connection.GetInfoInt16Unhandled(ODBC32.SQL_INFO.QUOTED_IDENTIFIER_CASE, out int16Value); Common.IdentifierCase quotedIdentifierCase = Common.IdentifierCase.Unknown; if ((retcode == ODBC32.RetCode.SUCCESS) || (retcode == ODBC32.RetCode.SUCCESS_WITH_INFO)) { switch (int16Value) { case (Int16)ODBC32.SQL_IDENTIFIER_CASE.SENSITIVE: quotedIdentifierCase = Common.IdentifierCase.Sensitive; break; case (Int16)ODBC32.SQL_IDENTIFIER_CASE.UPPER: case (Int16)ODBC32.SQL_IDENTIFIER_CASE.LOWER: case (Int16)ODBC32.SQL_IDENTIFIER_CASE.MIXED: quotedIdentifierCase = Common.IdentifierCase.Insensitive; break; } } dataSourceInformation[DbMetaDataColumnNames.QuotedIdentifierCase] = quotedIdentifierCase; dataSourceInformationTable.AcceptChanges(); return dataSourceInformationTable; } private DataTable GetDataTypesCollection(String[] restrictions, OdbcConnection connection) { if (ADP.IsEmptyArray(restrictions) == false) { throw ADP.TooManyRestrictions(DbMetaDataCollectionNames.DataTypes); } // verify the existance of the table in the data set DataTable dataTypesTable = CollectionDataSet.Tables[DbMetaDataCollectionNames.DataTypes]; if (dataTypesTable == null) { throw ADP.UnableToBuildCollection(DbMetaDataCollectionNames.DataTypes); } // copy the data table it dataTypesTable = CloneAndFilterCollection(DbMetaDataCollectionNames.DataTypes, null); OdbcCommand command = null; OdbcDataReader dataReader = null; object[] allArguments = new object[1]; allArguments[0] = ODBC32.SQL_ALL_TYPES; try { command = GetCommand(connection); dataReader = command.ExecuteReaderFromSQLMethod(allArguments, ODBC32.SQL_API.SQLGETTYPEINFO); DataTableFromDataReaderDataTypes(dataTypesTable, dataReader, connection); } finally { if (dataReader != null) { dataReader.Dispose(); }; if (command != null) { command.Dispose(); }; } dataTypesTable.AcceptChanges(); return dataTypesTable; } private DataTable GetIndexCollection(String[] restrictions, OdbcConnection connection) { OdbcCommand command = null; OdbcDataReader dataReader = null; DataTable resultTable = null; const int nativeRestrictionsCount = 5; const int indexRestrictionsCount = 4; const int indexOfTableName = 2; const int indexOfIndexName = 3; try { command = GetCommand(connection); object[] allRestrictions = new object[nativeRestrictionsCount]; FillOutRestrictions(indexRestrictionsCount, restrictions, allRestrictions, OdbcMetaDataCollectionNames.Indexes); if (allRestrictions[indexOfTableName] == null) { throw ODBC.GetSchemaRestrictionRequired(); } allRestrictions[3] = (Int16)ODBC32.SQL_INDEX.ALL; allRestrictions[4] = (Int16)ODBC32.SQL_STATISTICS_RESERVED.ENSURE; dataReader = command.ExecuteReaderFromSQLMethod(allRestrictions, ODBC32.SQL_API.SQLSTATISTICS); string indexName = null; if (restrictions != null) { if (restrictions.Length >= indexOfIndexName + 1) { indexName = restrictions[indexOfIndexName]; } } resultTable = DataTableFromDataReaderIndex(dataReader, OdbcMetaDataCollectionNames.Indexes, indexName); } finally { if (dataReader != null) { dataReader.Dispose(); }; if (command != null) { command.Dispose(); }; } return resultTable; } private DataTable GetProcedureColumnsCollection(String[] restrictions, OdbcConnection connection, Boolean isColumns) { OdbcCommand command = null; OdbcDataReader dataReader = null; DataTable resultTable = null; const int procedureColumnsRestrictionsCount = 4; try { command = GetCommand(connection); String[] allRestrictions = new string[procedureColumnsRestrictionsCount]; FillOutRestrictions(procedureColumnsRestrictionsCount, restrictions, allRestrictions, OdbcMetaDataCollectionNames.Columns); dataReader = command.ExecuteReaderFromSQLMethod(allRestrictions, ODBC32.SQL_API.SQLPROCEDURECOLUMNS); string collectionName; if (isColumns == true) { collectionName = OdbcMetaDataCollectionNames.ProcedureColumns; } else { collectionName = OdbcMetaDataCollectionNames.ProcedureParameters; } resultTable = DataTableFromDataReaderProcedureColumns(dataReader, collectionName, isColumns); } finally { if (dataReader != null) { dataReader.Dispose(); }; if (command != null) { command.Dispose(); }; } return resultTable; } private DataTable GetProceduresCollection(String[] restrictions, OdbcConnection connection) { OdbcCommand command = null; OdbcDataReader dataReader = null; DataTable resultTable = null; const int columnsRestrictionsCount = 4; const int indexOfProcedureType = 3; try { command = GetCommand(connection); String[] allRestrictions = new string[columnsRestrictionsCount]; FillOutRestrictions(columnsRestrictionsCount, restrictions, allRestrictions, OdbcMetaDataCollectionNames.Procedures); dataReader = command.ExecuteReaderFromSQLMethod(allRestrictions, ODBC32.SQL_API.SQLPROCEDURES); if (allRestrictions[indexOfProcedureType] == null) { resultTable = DataTableFromDataReader(dataReader, OdbcMetaDataCollectionNames.Procedures); } else { Int16 procedureType; if ((restrictions[indexOfProcedureType] == "SQL_PT_UNKNOWN") || (restrictions[indexOfProcedureType] == "0" /*ODBC32.SQL_PROCEDURETYPE.UNKNOWN*/)) { procedureType = (Int16)ODBC32.SQL_PROCEDURETYPE.UNKNOWN; } else if ((restrictions[indexOfProcedureType] == "SQL_PT_PROCEDURE") || (restrictions[indexOfProcedureType] == "1" /*ODBC32.SQL_PROCEDURETYPE.PROCEDURE*/)) { procedureType = (Int16)ODBC32.SQL_PROCEDURETYPE.PROCEDURE; } else if ((restrictions[indexOfProcedureType] == "SQL_PT_FUNCTION") || (restrictions[indexOfProcedureType] == "2" /*ODBC32.SQL_PROCEDURETYPE.FUNCTION*/)) { procedureType = (Int16)ODBC32.SQL_PROCEDURETYPE.FUNCTION; } else { throw ADP.InvalidRestrictionValue(OdbcMetaDataCollectionNames.Procedures, "PROCEDURE_TYPE", restrictions[indexOfProcedureType]); } resultTable = DataTableFromDataReaderProcedures(dataReader, OdbcMetaDataCollectionNames.Procedures, procedureType); } } finally { if (dataReader != null) { dataReader.Dispose(); }; if (command != null) { command.Dispose(); }; } return resultTable; } private DataTable GetReservedWordsCollection(string[] restrictions, OdbcConnection connection) { if (ADP.IsEmptyArray(restrictions) == false) { throw ADP.TooManyRestrictions(DbMetaDataCollectionNames.ReservedWords); } // verify the existance of the table in the data set DataTable reservedWordsTable = CollectionDataSet.Tables[DbMetaDataCollectionNames.ReservedWords]; if (reservedWordsTable == null) { throw ADP.UnableToBuildCollection(DbMetaDataCollectionNames.ReservedWords); } // copy the table filtering out any rows that don't apply to tho current version of the prrovider reservedWordsTable = CloneAndFilterCollection(DbMetaDataCollectionNames.ReservedWords, null); DataColumn reservedWordColumn = reservedWordsTable.Columns[DbMetaDataColumnNames.ReservedWord]; if (reservedWordColumn == null) { throw ADP.UnableToBuildCollection(DbMetaDataCollectionNames.ReservedWords); } string keywords = connection.GetInfoStringUnhandled(ODBC32.SQL_INFO.KEYWORDS); if (null != keywords) { string[] values = keywords.Split(KeywordSeparatorChar); for (int i = 0; i < values.Length; ++i) { DataRow row = reservedWordsTable.NewRow(); row[reservedWordColumn] = values[i]; reservedWordsTable.Rows.Add(row); row.AcceptChanges(); } } return reservedWordsTable; } private DataTable GetTablesCollection(String[] restrictions, OdbcConnection connection, Boolean isTables) { OdbcCommand command = null; OdbcDataReader dataReader = null; DataTable resultTable = null; const int tablesRestrictionsCount = 3; const string includedTableTypesTables = "TABLE,SYSTEM TABLE"; const string includedTableTypesViews = "VIEW"; string includedTableTypes; string dataTableName; try { //command = (OdbcCommand) connection.CreateCommand(); command = GetCommand(connection); string[] allArguments = new string[tablesRestrictionsCount + 1]; if (isTables == true) { includedTableTypes = includedTableTypesTables; dataTableName = OdbcMetaDataCollectionNames.Tables; } else { includedTableTypes = includedTableTypesViews; dataTableName = OdbcMetaDataCollectionNames.Views; } FillOutRestrictions(tablesRestrictionsCount, restrictions, allArguments, dataTableName); allArguments[tablesRestrictionsCount] = includedTableTypes; dataReader = command.ExecuteReaderFromSQLMethod(allArguments, ODBC32.SQL_API.SQLTABLES); resultTable = DataTableFromDataReader(dataReader, dataTableName); } finally { if (dataReader != null) { dataReader.Dispose(); }; if (command != null) { command.Dispose(); }; } return resultTable; } private Boolean IncludeIndexRow(object rowIndexName, string restrictionIndexName, Int16 rowIndexType) { // never include table statictics rows if (rowIndexType == (Int16)ODBC32.SQL_STATISTICSTYPE.TABLE_STAT) { return false; } if ((restrictionIndexName != null) && (restrictionIndexName != (string)rowIndexName)) { return false; } return true; } private DataTable NewDataTableFromReader(IDataReader reader, out object[] values, string tableName) { DataTable resultTable = new DataTable(tableName); resultTable.Locale = System.Globalization.CultureInfo.InvariantCulture; DataTable schemaTable = reader.GetSchemaTable(); foreach (DataRow row in schemaTable.Rows) { resultTable.Columns.Add(row["ColumnName"] as string, (Type)row["DataType"] as Type); } values = new object[resultTable.Columns.Count]; return resultTable; } protected override DataTable PrepareCollection(String collectionName, String[] restrictions, DbConnection connection) { DataTable resultTable = null; OdbcConnection odbcConnection = (OdbcConnection)connection; if (collectionName == OdbcMetaDataCollectionNames.Tables) { resultTable = GetTablesCollection(restrictions, odbcConnection, true); } else if (collectionName == OdbcMetaDataCollectionNames.Views) { resultTable = GetTablesCollection(restrictions, odbcConnection, false); } else if (collectionName == OdbcMetaDataCollectionNames.Columns) { resultTable = GetColumnsCollection(restrictions, odbcConnection); } else if (collectionName == OdbcMetaDataCollectionNames.Procedures) { resultTable = GetProceduresCollection(restrictions, odbcConnection); } else if (collectionName == OdbcMetaDataCollectionNames.ProcedureColumns) { resultTable = GetProcedureColumnsCollection(restrictions, odbcConnection, true); } else if (collectionName == OdbcMetaDataCollectionNames.ProcedureParameters) { resultTable = GetProcedureColumnsCollection(restrictions, odbcConnection, false); } else if (collectionName == OdbcMetaDataCollectionNames.Indexes) { resultTable = GetIndexCollection(restrictions, odbcConnection); } else if (collectionName == DbMetaDataCollectionNames.DataTypes) { resultTable = GetDataTypesCollection(restrictions, odbcConnection); } else if (collectionName == DbMetaDataCollectionNames.DataSourceInformation) { resultTable = GetDataSourceInformationCollection(restrictions, odbcConnection); } else if (collectionName == DbMetaDataCollectionNames.ReservedWords) { resultTable = GetReservedWordsCollection(restrictions, odbcConnection); } if (resultTable == null) { throw ADP.UnableToBuildCollection(collectionName); } return resultTable; } } }
using System; using System.Reflection; using UnityEditor.ShaderGraph.Drawing.Controls; using UnityEngine; using UnityEditor.Graphing; namespace UnityEditor.ShaderGraph { enum Colorspace { RGB, Linear, HSV } [Serializable] struct ColorspaceConversion : IEnumConversion { public Colorspace from; public Colorspace to; public ColorspaceConversion(Colorspace from, Colorspace to) { this.from = from; this.to = to; } Enum IEnumConversion.from { get { return from; } set { from = (Colorspace)value; } } Enum IEnumConversion.to { get { return to; } set { to = (Colorspace)value; } } } [Title("Artistic", "Utility", "Colorspace Conversion")] class ColorspaceConversionNode : CodeFunctionNode { public ColorspaceConversionNode() { name = "Colorspace Conversion"; } [SerializeField] ColorspaceConversion m_Conversion = new ColorspaceConversion(Colorspace.RGB, Colorspace.RGB); [EnumConversionControl] ColorspaceConversion conversion { get { return m_Conversion; } set { if (m_Conversion.Equals(value)) return; m_Conversion = value; Dirty(ModificationScope.Graph); } } string GetSpaceFrom() { return Enum.GetName(typeof(Colorspace), conversion.from); } string GetSpaceTo() { return Enum.GetName(typeof(Colorspace), conversion.to); } protected override MethodInfo GetFunctionToConvert() { return GetType().GetMethod(string.Format("Unity_ColorspaceConversion_{0}_{1}", GetSpaceFrom(), GetSpaceTo()), BindingFlags.Static | BindingFlags.NonPublic); } static string Unity_ColorspaceConversion_RGB_RGB( [Slot(0, Binding.None)] Vector3 In, [Slot(1, Binding.None)] out Vector3 Out) { Out = Vector3.zero; return @" { Out = In; } "; } static string Unity_ColorspaceConversion_RGB_Linear( [Slot(0, Binding.None)] Vector3 In, [Slot(1, Binding.None)] out Vector3 Out) { Out = Vector3.zero; return @" { {precision}3 linearRGBLo = In / 12.92; {precision}3 linearRGBHi = pow(max(abs((In + 0.055) / 1.055), 1.192092896e-07), {precision}3(2.4, 2.4, 2.4)); Out = {precision}3(In <= 0.04045) ? linearRGBLo : linearRGBHi; } "; } static string Unity_ColorspaceConversion_RGB_HSV( [Slot(0, Binding.None)] Vector3 In, [Slot(1, Binding.None)] out Vector3 Out) { Out = Vector3.zero; return @" { {precision}4 K = {precision}4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0); {precision}4 P = lerp({precision}4(In.bg, K.wz), {precision}4(In.gb, K.xy), step(In.b, In.g)); {precision}4 Q = lerp({precision}4(P.xyw, In.r), {precision}4(In.r, P.yzx), step(P.x, In.r)); {precision} D = Q.x - min(Q.w, Q.y); {precision} E = 1e-10; Out = {precision}3(abs(Q.z + (Q.w - Q.y)/(6.0 * D + E)), D / (Q.x + E), Q.x); } "; } static string Unity_ColorspaceConversion_Linear_RGB( [Slot(0, Binding.None)] Vector3 In, [Slot(1, Binding.None)] out Vector3 Out) { Out = Vector3.zero; return @" { {precision}3 sRGBLo = In * 12.92; {precision}3 sRGBHi = (pow(max(abs(In), 1.192092896e-07), {precision}3(1.0 / 2.4, 1.0 / 2.4, 1.0 / 2.4)) * 1.055) - 0.055; Out = {precision}3(In <= 0.0031308) ? sRGBLo : sRGBHi; } "; } static string Unity_ColorspaceConversion_Linear_Linear( [Slot(0, Binding.None)] Vector3 In, [Slot(1, Binding.None)] out Vector3 Out) { Out = Vector3.zero; return @" { Out = In; } "; } static string Unity_ColorspaceConversion_Linear_HSV( [Slot(0, Binding.None)] Vector3 In, [Slot(1, Binding.None)] out Vector3 Out) { Out = Vector3.zero; return @" { {precision}3 sRGBLo = In * 12.92; {precision}3 sRGBHi = (pow(max(abs(In), 1.192092896e-07), {precision}3(1.0 / 2.4, 1.0 / 2.4, 1.0 / 2.4)) * 1.055) - 0.055; {precision}3 Linear = {precision}3(In <= 0.0031308) ? sRGBLo : sRGBHi; {precision}4 K = {precision}4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0); {precision}4 P = lerp({precision}4(Linear.bg, K.wz), {precision}4(Linear.gb, K.xy), step(Linear.b, Linear.g)); {precision}4 Q = lerp({precision}4(P.xyw, Linear.r), {precision}4(Linear.r, P.yzx), step(P.x, Linear.r)); {precision} D = Q.x - min(Q.w, Q.y); {precision} E = 1e-10; Out = {precision}3(abs(Q.z + (Q.w - Q.y)/(6.0 * D + E)), D / (Q.x + E), Q.x); } "; } static string Unity_ColorspaceConversion_HSV_RGB( [Slot(0, Binding.None)] Vector3 In, [Slot(1, Binding.None)] out Vector3 Out) { Out = Vector3.zero; return @" { {precision}4 K = {precision}4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0); {precision}3 P = abs(frac(In.xxx + K.xyz) * 6.0 - K.www); Out = In.z * lerp(K.xxx, saturate(P - K.xxx), In.y); } "; } static string Unity_ColorspaceConversion_HSV_Linear( [Slot(0, Binding.None)] Vector3 In, [Slot(1, Binding.None)] out Vector3 Out) { Out = Vector3.zero; return @" { {precision}4 K = {precision}4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0); {precision}3 P = abs(frac(In.xxx + K.xyz) * 6.0 - K.www); {precision}3 RGB = In.z * lerp(K.xxx, saturate(P - K.xxx), In.y); {precision}3 linearRGBLo = RGB / 12.92; {precision}3 linearRGBHi = pow(max(abs((RGB + 0.055) / 1.055), 1.192092896e-07), {precision}3(2.4, 2.4, 2.4)); Out = {precision}3(RGB <= 0.04045) ? linearRGBLo : linearRGBHi; } "; } static string Unity_ColorspaceConversion_HSV_HSV( [Slot(0, Binding.None)] Vector3 In, [Slot(1, Binding.None)] out Vector3 Out) { Out = Vector3.zero; return @" { Out = In; } "; } } }
using System; using MyGeneration.dOOdads; namespace MyGeneration.dOOdads.Tests.SQL { /// <summary> /// Description of RefreshDatabase. /// </summary> public class UnitTestBase { static UnitTestBase() { } public static void RefreshDatabase() { AggregateTest testData = new AggregateTest(); testData.ConnectionStringConfig = "SQLConnection"; testData.LoadAll(); testData.DeleteAll(); testData.Save(); testData.AddNew(); testData.s_DepartmentID = "3"; testData.s_FirstName = "David"; testData.s_LastName = "Doe"; testData.s_Age = "16"; testData.s_HireDate = "2000-02-16 00:00:00"; testData.s_Salary = "34.71"; testData.s_IsActive = "true"; testData.AddNew(); testData.s_DepartmentID = "1"; testData.s_FirstName = "Sarah"; testData.s_LastName = "McDonald"; testData.s_Age = "28"; testData.s_HireDate = "1999-03-25 00:00:00"; testData.s_Salary = "11.06"; testData.s_IsActive = "true"; testData.AddNew(); testData.s_DepartmentID = "3"; testData.s_FirstName = "David"; testData.s_LastName = "Vincent"; testData.s_Age = "43"; testData.s_HireDate = "2000-10-17 00:00:00"; testData.s_Salary = "10.27"; testData.s_IsActive = "false"; testData.AddNew(); testData.s_DepartmentID = "2"; testData.s_FirstName = "Fred"; testData.s_LastName = "Smith"; testData.s_Age = "15"; testData.s_HireDate = "1999-03-15 00:00:00"; testData.s_Salary = "15.15"; testData.s_IsActive = "true"; testData.AddNew(); testData.s_DepartmentID = "3"; testData.s_FirstName = "Sally"; testData.s_LastName = "Johnson"; testData.s_Age = "30"; testData.s_HireDate = "2000-10-07 00:00:00"; testData.s_Salary = "14.36"; testData.s_IsActive = "true"; testData.AddNew(); testData.s_DepartmentID = "5"; testData.s_FirstName = "Jane"; testData.s_LastName = "Rapaport"; testData.s_Age = "44"; testData.s_HireDate = "2002-05-02 00:00:00"; testData.s_Salary = "13.56"; testData.s_IsActive = "false"; testData.AddNew(); testData.s_DepartmentID = "4"; testData.s_FirstName = "Paul"; testData.s_LastName = "Gellar"; testData.s_Age = "16"; testData.s_HireDate = "2000-09-27 00:00:00"; testData.s_Salary = "18.44"; testData.s_IsActive = "true"; testData.AddNew(); testData.s_DepartmentID = "2"; testData.s_FirstName = "John"; testData.s_LastName = "Jones"; testData.s_Age = "31"; testData.s_HireDate = "2002-04-22 00:00:00"; testData.s_Salary = "17.65"; testData.s_IsActive = "true"; testData.AddNew(); testData.s_DepartmentID = "3"; testData.s_FirstName = "Michelle"; testData.s_LastName = "Johnson"; testData.s_Age = "45"; testData.s_HireDate = "2003-11-14 00:00:00"; testData.s_Salary = "16.86"; testData.s_IsActive = "false"; testData.AddNew(); testData.s_DepartmentID = "2"; testData.s_FirstName = "David"; testData.s_LastName = "Costner"; testData.s_Age = "17"; testData.s_HireDate = "2002-04-11 00:00:00"; testData.s_Salary = "21.74"; testData.s_IsActive = "true"; testData.AddNew(); testData.s_DepartmentID = "4"; testData.s_FirstName = "William"; testData.s_LastName = "Gellar"; testData.s_Age = "32"; testData.s_HireDate = "2003-11-04 00:00:00"; testData.s_Salary = "20.94"; testData.s_IsActive = "false"; testData.AddNew(); testData.s_DepartmentID = "3"; testData.s_FirstName = "Sally"; testData.s_LastName = "Rapaport"; testData.s_Age = "39"; testData.s_HireDate = "2002-04-01 00:00:00"; testData.s_Salary = "25.82"; testData.s_IsActive = "true"; testData.AddNew(); testData.s_DepartmentID = "5"; testData.s_FirstName = "Jane"; testData.s_LastName = "Vincent"; testData.s_Age = "18"; testData.s_HireDate = "2003-10-25 00:00:00"; testData.s_Salary = "25.03"; testData.s_IsActive = "true"; testData.AddNew(); testData.s_DepartmentID = "2"; testData.s_FirstName = "Fred"; testData.s_LastName = "Costner"; testData.s_Age = "33"; testData.s_HireDate = "1998-05-20 00:00:00"; testData.s_Salary = "24.24"; testData.s_IsActive = "false"; testData.AddNew(); testData.s_DepartmentID = "1"; testData.s_FirstName = "John"; testData.s_LastName = "Johnson"; testData.s_Age = "40"; testData.s_HireDate = "2003-10-15 00:00:00"; testData.s_Salary = "29.12"; testData.s_IsActive = "true"; testData.AddNew(); testData.s_DepartmentID = "3"; testData.s_FirstName = "Michelle"; testData.s_LastName = "Rapaport"; testData.s_Age = "19"; testData.s_HireDate = "1998-05-10 00:00:00"; testData.s_Salary = "28.32"; testData.s_IsActive = "true"; testData.AddNew(); testData.s_DepartmentID = "4"; testData.s_FirstName = "Sarah"; testData.s_LastName = "Doe"; testData.s_Age = "34"; testData.s_HireDate = "1999-12-03 00:00:00"; testData.s_Salary = "27.53"; testData.s_IsActive = "false"; testData.AddNew(); testData.s_DepartmentID = "4"; testData.s_FirstName = "William"; testData.s_LastName = "Jones"; testData.s_Age = "41"; testData.s_HireDate = "1998-04-30 00:00:00"; testData.s_Salary = "32.41"; testData.s_IsActive = "true"; testData.AddNew(); testData.s_DepartmentID = "1"; testData.s_FirstName = "Sarah"; testData.s_LastName = "McDonald"; testData.s_Age = "21"; testData.s_HireDate = "1999-11-23 00:00:00"; testData.s_Salary = "31.62"; testData.s_IsActive = "false"; testData.AddNew(); testData.s_DepartmentID = "4"; testData.s_FirstName = "Jane"; testData.s_LastName = "Costner"; testData.s_Age = "28"; testData.s_HireDate = "1998-04-20 00:00:00"; testData.s_Salary = "36.50"; testData.s_IsActive = "true"; testData.AddNew(); testData.s_DepartmentID = "2"; testData.s_FirstName = "Fred"; testData.s_LastName = "Douglas"; testData.s_Age = "42"; testData.s_HireDate = "1999-11-13 00:00:00"; testData.s_Salary = "35.71"; testData.s_IsActive = "true"; testData.AddNew(); testData.s_DepartmentID = "3"; testData.s_FirstName = "Paul"; testData.s_LastName = "Jones"; testData.s_Age = "22"; testData.s_HireDate = "2001-06-07 00:00:00"; testData.s_Salary = "34.91"; testData.s_IsActive = "false"; testData.AddNew(); testData.s_DepartmentID = "3"; testData.s_FirstName = "Michelle"; testData.s_LastName = "Doe"; testData.s_Age = "29"; testData.s_HireDate = "1999-11-03 00:00:00"; testData.s_Salary = "39.79"; testData.s_IsActive = "true"; testData.AddNew(); testData.s_DepartmentID = "4"; testData.s_FirstName = "Paul"; testData.s_LastName = "Costner"; testData.s_Age = "43"; testData.s_HireDate = "2001-05-28 00:00:00"; testData.s_Salary = "39.00"; testData.s_IsActive = "true"; testData.AddNew(); testData.AddNew(); testData.AddNew(); testData.AddNew(); testData.AddNew(); testData.AddNew(); testData.s_DepartmentID = "0"; testData.s_FirstName = ""; testData.s_LastName = ""; testData.s_Age = "0"; testData.s_Salary = "0"; testData.Save(); } } }
// Copyright (C) 2014 dot42 // // Original filename: Org.Apache.Http.Conn.Scheme.cs // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #pragma warning disable 1717 namespace Org.Apache.Http.Conn.Scheme { /// <summary> /// <para>A factory for creating and connecting sockets. The factory encapsulates the logic for establishing a socket connection. <br></br> Both Object.equals() and Object.hashCode() must be overridden for the correct operation of some connection managers.</para><para><para> </para><simplesectsep></simplesectsep><para>Michael Becke </para><simplesectsep></simplesectsep><para> </para></para> /// </summary> /// <java-name> /// org/apache/http/conn/scheme/SocketFactory /// </java-name> [Dot42.DexImport("org/apache/http/conn/scheme/SocketFactory", AccessFlags = 1537)] public partial interface ISocketFactory /* scope: __dot42__ */ { /// <summary> /// <para>Creates a new, unconnected socket. The socket should subsequently be passed to connectSocket.</para><para></para> /// </summary> /// <returns> /// <para>a new socket</para> /// </returns> /// <java-name> /// createSocket /// </java-name> [Dot42.DexImport("createSocket", "()Ljava/net/Socket;", AccessFlags = 1025)] global::Java.Net.Socket CreateSocket() /* MethodBuilder.Create */ ; /// <summary> /// <para>Connects a socket to the given host.</para><para></para> /// </summary> /// <returns> /// <para>the connected socket. The returned object may be different from the <code>sock</code> argument if this factory supports a layered protocol.</para> /// </returns> /// <java-name> /// connectSocket /// </java-name> [Dot42.DexImport("connectSocket", "(Ljava/net/Socket;Ljava/lang/String;ILjava/net/InetAddress;ILorg/apache/http/para" + "ms/HttpParams;)Ljava/net/Socket;", AccessFlags = 1025)] global::Java.Net.Socket ConnectSocket(global::Java.Net.Socket sock, string host, int port, global::Java.Net.InetAddress localAddress, int localPort, global::Org.Apache.Http.Params.IHttpParams @params) /* MethodBuilder.Create */ ; /// <summary> /// <para>Checks whether a socket provides a secure connection. The socket must be connected by this factory. The factory will <b>not</b> perform I/O operations in this method. <br></br> As a rule of thumb, plain sockets are not secure and TLS/SSL sockets are secure. However, there may be application specific deviations. For example, a plain socket to a host in the same intranet ("trusted zone") could be considered secure. On the other hand, a TLS/SSL socket could be considered insecure based on the cypher suite chosen for the connection.</para><para></para> /// </summary> /// <returns> /// <para><code>true</code> if the connection of the socket should be considered secure, or <code>false</code> if it should not</para> /// </returns> /// <java-name> /// isSecure /// </java-name> [Dot42.DexImport("isSecure", "(Ljava/net/Socket;)Z", AccessFlags = 1025)] bool IsSecure(global::Java.Net.Socket sock) /* MethodBuilder.Create */ ; } /// <summary> /// <para>Encapsulates specifics of a protocol scheme such as "http" or "https". Schemes are identified by lowercase names. Supported schemes are typically collected in a SchemeRegistry.</para><para>For example, to configure support for "https://" URLs, you could write code like the following: </para><para><pre> /// Scheme https = new Scheme("https", new MySecureSocketFactory(), 443); /// SchemeRegistry.DEFAULT.register(https); /// </pre></para><para><para> </para><simplesectsep></simplesectsep><para>Michael Becke </para><simplesectsep></simplesectsep><para>Jeff Dever </para><simplesectsep></simplesectsep><para> </para></para> /// </summary> /// <java-name> /// org/apache/http/conn/scheme/Scheme /// </java-name> [Dot42.DexImport("org/apache/http/conn/scheme/Scheme", AccessFlags = 49)] public sealed partial class Scheme /* scope: __dot42__ */ { /// <summary> /// <para>Creates a new scheme. Whether the created scheme allows for layered connections depends on the class of <code>factory</code>.</para><para></para> /// </summary> [Dot42.DexImport("<init>", "(Ljava/lang/String;Lorg/apache/http/conn/scheme/SocketFactory;I)V", AccessFlags = 1)] public Scheme(string name, global::Org.Apache.Http.Conn.Scheme.ISocketFactory factory, int port) /* MethodBuilder.Create */ { } /// <summary> /// <para>Obtains the default port.</para><para></para> /// </summary> /// <returns> /// <para>the default port for this scheme </para> /// </returns> /// <java-name> /// getDefaultPort /// </java-name> [Dot42.DexImport("getDefaultPort", "()I", AccessFlags = 17)] public int GetDefaultPort() /* MethodBuilder.Create */ { return default(int); } /// <summary> /// <para>Obtains the socket factory. If this scheme is layered, the factory implements LayeredSocketFactory.</para><para></para> /// </summary> /// <returns> /// <para>the socket factory for this scheme </para> /// </returns> /// <java-name> /// getSocketFactory /// </java-name> [Dot42.DexImport("getSocketFactory", "()Lorg/apache/http/conn/scheme/SocketFactory;", AccessFlags = 17)] public global::Org.Apache.Http.Conn.Scheme.ISocketFactory GetSocketFactory() /* MethodBuilder.Create */ { return default(global::Org.Apache.Http.Conn.Scheme.ISocketFactory); } /// <summary> /// <para>Obtains the scheme name.</para><para></para> /// </summary> /// <returns> /// <para>the name of this scheme, in lowercase </para> /// </returns> /// <java-name> /// getName /// </java-name> [Dot42.DexImport("getName", "()Ljava/lang/String;", AccessFlags = 17)] public string GetName() /* MethodBuilder.Create */ { return default(string); } /// <summary> /// <para>Indicates whether this scheme allows for layered connections.</para><para></para> /// </summary> /// <returns> /// <para><code>true</code> if layered connections are possible, <code>false</code> otherwise </para> /// </returns> /// <java-name> /// isLayered /// </java-name> [Dot42.DexImport("isLayered", "()Z", AccessFlags = 17)] public bool IsLayered() /* MethodBuilder.Create */ { return default(bool); } /// <summary> /// <para>Resolves the correct port for this scheme. Returns the given port if it is valid, the default port otherwise.</para><para></para> /// </summary> /// <returns> /// <para>the given port or the defaultPort </para> /// </returns> /// <java-name> /// resolvePort /// </java-name> [Dot42.DexImport("resolvePort", "(I)I", AccessFlags = 17)] public int ResolvePort(int port) /* MethodBuilder.Create */ { return default(int); } /// <summary> /// <para>Return a string representation of this object.</para><para></para> /// </summary> /// <returns> /// <para>a human-readable string description of this scheme </para> /// </returns> /// <java-name> /// toString /// </java-name> [Dot42.DexImport("toString", "()Ljava/lang/String;", AccessFlags = 17)] public override string ToString() /* MethodBuilder.Create */ { return default(string); } /// <summary> /// <para>Compares this scheme to an object.</para><para></para> /// </summary> /// <returns> /// <para><code>true</code> iff the argument is equal to this scheme </para> /// </returns> /// <java-name> /// equals /// </java-name> [Dot42.DexImport("equals", "(Ljava/lang/Object;)Z", AccessFlags = 17)] public override bool Equals(object obj) /* MethodBuilder.Create */ { return default(bool); } /// <summary> /// <para>Obtains a hash code for this scheme.</para><para></para> /// </summary> /// <returns> /// <para>the hash code </para> /// </returns> /// <java-name> /// hashCode /// </java-name> [Dot42.DexImport("hashCode", "()I", AccessFlags = 1)] public override int GetHashCode() /* MethodBuilder.Create */ { return default(int); } [global::System.ComponentModel.EditorBrowsable(global::System.ComponentModel.EditorBrowsableState.Never)] internal Scheme() /* TypeBuilder.AddDefaultConstructor */ { } /// <summary> /// <para>Obtains the default port.</para><para></para> /// </summary> /// <returns> /// <para>the default port for this scheme </para> /// </returns> /// <java-name> /// getDefaultPort /// </java-name> public int DefaultPort { [Dot42.DexImport("getDefaultPort", "()I", AccessFlags = 17)] get{ return GetDefaultPort(); } } /// <summary> /// <para>Obtains the socket factory. If this scheme is layered, the factory implements LayeredSocketFactory.</para><para></para> /// </summary> /// <returns> /// <para>the socket factory for this scheme </para> /// </returns> /// <java-name> /// getSocketFactory /// </java-name> public global::Org.Apache.Http.Conn.Scheme.ISocketFactory SocketFactory { [Dot42.DexImport("getSocketFactory", "()Lorg/apache/http/conn/scheme/SocketFactory;", AccessFlags = 17)] get{ return GetSocketFactory(); } } /// <summary> /// <para>Obtains the scheme name.</para><para></para> /// </summary> /// <returns> /// <para>the name of this scheme, in lowercase </para> /// </returns> /// <java-name> /// getName /// </java-name> public string Name { [Dot42.DexImport("getName", "()Ljava/lang/String;", AccessFlags = 17)] get{ return GetName(); } } } /// <summary> /// <para>A SocketFactory for layered sockets (SSL/TLS). See there for things to consider when implementing a socket factory.</para><para><para>Michael Becke </para><simplesectsep></simplesectsep><para> </para><para>4.0 </para></para> /// </summary> /// <java-name> /// org/apache/http/conn/scheme/LayeredSocketFactory /// </java-name> [Dot42.DexImport("org/apache/http/conn/scheme/LayeredSocketFactory", AccessFlags = 1537)] public partial interface ILayeredSocketFactory : global::Org.Apache.Http.Conn.Scheme.ISocketFactory /* scope: __dot42__ */ { /// <summary> /// <para>Returns a socket connected to the given host that is layered over an existing socket. Used primarily for creating secure sockets through proxies.</para><para></para> /// </summary> /// <returns> /// <para>Socket a new socket</para> /// </returns> /// <java-name> /// createSocket /// </java-name> [Dot42.DexImport("createSocket", "(Ljava/net/Socket;Ljava/lang/String;IZ)Ljava/net/Socket;", AccessFlags = 1025)] global::Java.Net.Socket CreateSocket(global::Java.Net.Socket socket, string host, int port, bool autoClose) /* MethodBuilder.Create */ ; } /// <summary> /// <para>A set of supported protocol schemes. Schemes are identified by lowercase names.</para><para><para></para><para></para><title>Revision:</title><para>648356 </para><title>Date:</title><para>2008-04-15 10:57:53 -0700 (Tue, 15 Apr 2008) </para></para><para><para>4.0 </para></para> /// </summary> /// <java-name> /// org/apache/http/conn/scheme/SchemeRegistry /// </java-name> [Dot42.DexImport("org/apache/http/conn/scheme/SchemeRegistry", AccessFlags = 49)] public sealed partial class SchemeRegistry /* scope: __dot42__ */ { /// <summary> /// <para>Creates a new, empty scheme registry. </para> /// </summary> [Dot42.DexImport("<init>", "()V", AccessFlags = 1)] public SchemeRegistry() /* MethodBuilder.Create */ { } /// <summary> /// <para>Obtains the scheme for a host. Convenience method for <code>getScheme(host.getSchemeName())</code></para><para><code> </code></para> /// </summary> /// <returns> /// <para>the scheme for the given host, never <code>null</code></para> /// </returns> /// <java-name> /// getScheme /// </java-name> [Dot42.DexImport("getScheme", "(Ljava/lang/String;)Lorg/apache/http/conn/scheme/Scheme;", AccessFlags = 49)] public global::Org.Apache.Http.Conn.Scheme.Scheme GetScheme(string host) /* MethodBuilder.Create */ { return default(global::Org.Apache.Http.Conn.Scheme.Scheme); } /// <summary> /// <para>Obtains the scheme for a host. Convenience method for <code>getScheme(host.getSchemeName())</code></para><para><code> </code></para> /// </summary> /// <returns> /// <para>the scheme for the given host, never <code>null</code></para> /// </returns> /// <java-name> /// getScheme /// </java-name> [Dot42.DexImport("getScheme", "(Lorg/apache/http/HttpHost;)Lorg/apache/http/conn/scheme/Scheme;", AccessFlags = 49)] public global::Org.Apache.Http.Conn.Scheme.Scheme GetScheme(global::Org.Apache.Http.HttpHost host) /* MethodBuilder.Create */ { return default(global::Org.Apache.Http.Conn.Scheme.Scheme); } /// <summary> /// <para>Obtains a scheme by name, if registered.</para><para></para> /// </summary> /// <returns> /// <para>the scheme, or <code>null</code> if there is none by this name </para> /// </returns> /// <java-name> /// get /// </java-name> [Dot42.DexImport("get", "(Ljava/lang/String;)Lorg/apache/http/conn/scheme/Scheme;", AccessFlags = 49)] public global::Org.Apache.Http.Conn.Scheme.Scheme Get(string name) /* MethodBuilder.Create */ { return default(global::Org.Apache.Http.Conn.Scheme.Scheme); } /// <summary> /// <para>Registers a scheme. The scheme can later be retrieved by its name using getScheme or get.</para><para></para> /// </summary> /// <returns> /// <para>the scheme previously registered with that name, or <code>null</code> if none was registered </para> /// </returns> /// <java-name> /// register /// </java-name> [Dot42.DexImport("register", "(Lorg/apache/http/conn/scheme/Scheme;)Lorg/apache/http/conn/scheme/Scheme;", AccessFlags = 49)] public global::Org.Apache.Http.Conn.Scheme.Scheme Register(global::Org.Apache.Http.Conn.Scheme.Scheme sch) /* MethodBuilder.Create */ { return default(global::Org.Apache.Http.Conn.Scheme.Scheme); } /// <summary> /// <para>Unregisters a scheme.</para><para></para> /// </summary> /// <returns> /// <para>the unregistered scheme, or <code>null</code> if there was none </para> /// </returns> /// <java-name> /// unregister /// </java-name> [Dot42.DexImport("unregister", "(Ljava/lang/String;)Lorg/apache/http/conn/scheme/Scheme;", AccessFlags = 49)] public global::Org.Apache.Http.Conn.Scheme.Scheme Unregister(string name) /* MethodBuilder.Create */ { return default(global::Org.Apache.Http.Conn.Scheme.Scheme); } /// <summary> /// <para>Obtains the names of the registered schemes in their default order.</para><para></para> /// </summary> /// <returns> /// <para>List containing registered scheme names. </para> /// </returns> /// <java-name> /// getSchemeNames /// </java-name> [Dot42.DexImport("getSchemeNames", "()Ljava/util/List;", AccessFlags = 49, Signature = "()Ljava/util/List<Ljava/lang/String;>;")] public global::Java.Util.IList<string> GetSchemeNames() /* MethodBuilder.Create */ { return default(global::Java.Util.IList<string>); } /// <summary> /// <para>Populates the internal collection of registered protocol schemes with the content of the map passed as a parameter.</para><para></para> /// </summary> /// <java-name> /// setItems /// </java-name> [Dot42.DexImport("setItems", "(Ljava/util/Map;)V", AccessFlags = 33, Signature = "(Ljava/util/Map<Ljava/lang/String;Lorg/apache/http/conn/scheme/Scheme;>;)V")] public void SetItems(global::Java.Util.IMap<string, global::Org.Apache.Http.Conn.Scheme.Scheme> map) /* MethodBuilder.Create */ { } /// <summary> /// <para>Obtains the names of the registered schemes in their default order.</para><para></para> /// </summary> /// <returns> /// <para>List containing registered scheme names. </para> /// </returns> /// <java-name> /// getSchemeNames /// </java-name> public global::Java.Util.IList<string> SchemeNames { [Dot42.DexImport("getSchemeNames", "()Ljava/util/List;", AccessFlags = 49, Signature = "()Ljava/util/List<Ljava/lang/String;>;")] get{ return GetSchemeNames(); } } } /// <java-name> /// org/apache/http/conn/scheme/HostNameResolver /// </java-name> [Dot42.DexImport("org/apache/http/conn/scheme/HostNameResolver", AccessFlags = 1537)] public partial interface IHostNameResolver /* scope: __dot42__ */ { /// <java-name> /// resolve /// </java-name> [Dot42.DexImport("resolve", "(Ljava/lang/String;)Ljava/net/InetAddress;", AccessFlags = 1025)] global::Java.Net.InetAddress Resolve(string hostname) /* MethodBuilder.Create */ ; } /// <summary> /// <para>The default class for creating sockets.</para><para><para> </para><simplesectsep></simplesectsep><para>Michael Becke </para></para> /// </summary> /// <java-name> /// org/apache/http/conn/scheme/PlainSocketFactory /// </java-name> [Dot42.DexImport("org/apache/http/conn/scheme/PlainSocketFactory", AccessFlags = 49)] public sealed partial class PlainSocketFactory : global::Org.Apache.Http.Conn.Scheme.ISocketFactory /* scope: __dot42__ */ { [Dot42.DexImport("<init>", "(Lorg/apache/http/conn/scheme/HostNameResolver;)V", AccessFlags = 1)] public PlainSocketFactory(global::Org.Apache.Http.Conn.Scheme.IHostNameResolver nameResolver) /* MethodBuilder.Create */ { } [Dot42.DexImport("<init>", "()V", AccessFlags = 1)] public PlainSocketFactory() /* MethodBuilder.Create */ { } /// <summary> /// <para>Gets the singleton instance of this class. </para> /// </summary> /// <returns> /// <para>the one and only plain socket factory </para> /// </returns> /// <java-name> /// getSocketFactory /// </java-name> [Dot42.DexImport("getSocketFactory", "()Lorg/apache/http/conn/scheme/PlainSocketFactory;", AccessFlags = 9)] public static global::Org.Apache.Http.Conn.Scheme.PlainSocketFactory GetSocketFactory() /* MethodBuilder.Create */ { return default(global::Org.Apache.Http.Conn.Scheme.PlainSocketFactory); } /// <summary> /// <para>Creates a new, unconnected socket. The socket should subsequently be passed to connectSocket.</para><para></para> /// </summary> /// <returns> /// <para>a new socket</para> /// </returns> /// <java-name> /// createSocket /// </java-name> [Dot42.DexImport("createSocket", "()Ljava/net/Socket;", AccessFlags = 1)] public global::Java.Net.Socket CreateSocket() /* MethodBuilder.Create */ { return default(global::Java.Net.Socket); } /// <summary> /// <para>Connects a socket to the given host.</para><para></para> /// </summary> /// <returns> /// <para>the connected socket. The returned object may be different from the <code>sock</code> argument if this factory supports a layered protocol.</para> /// </returns> /// <java-name> /// connectSocket /// </java-name> [Dot42.DexImport("connectSocket", "(Ljava/net/Socket;Ljava/lang/String;ILjava/net/InetAddress;ILorg/apache/http/para" + "ms/HttpParams;)Ljava/net/Socket;", AccessFlags = 1)] public global::Java.Net.Socket ConnectSocket(global::Java.Net.Socket sock, string host, int port, global::Java.Net.InetAddress localAddress, int localPort, global::Org.Apache.Http.Params.IHttpParams @params) /* MethodBuilder.Create */ { return default(global::Java.Net.Socket); } /// <summary> /// <para>Checks whether a socket connection is secure. This factory creates plain socket connections which are not considered secure.</para><para></para> /// </summary> /// <returns> /// <para><code>false</code></para> /// </returns> /// <java-name> /// isSecure /// </java-name> [Dot42.DexImport("isSecure", "(Ljava/net/Socket;)Z", AccessFlags = 17)] public bool IsSecure(global::Java.Net.Socket sock) /* MethodBuilder.Create */ { return default(bool); } /// <summary> /// <para>Compares this factory with an object. There is only one instance of this class.</para><para></para> /// </summary> /// <returns> /// <para>iff the argument is this object </para> /// </returns> /// <java-name> /// equals /// </java-name> [Dot42.DexImport("equals", "(Ljava/lang/Object;)Z", AccessFlags = 1)] public override bool Equals(object obj) /* MethodBuilder.Create */ { return default(bool); } /// <summary> /// <para>Obtains a hash code for this object. All instances of this class have the same hash code. There is only one instance of this class. </para> /// </summary> /// <java-name> /// hashCode /// </java-name> [Dot42.DexImport("hashCode", "()I", AccessFlags = 1)] public override int GetHashCode() /* MethodBuilder.Create */ { return default(int); } /// <summary> /// <para>Gets the singleton instance of this class. </para> /// </summary> /// <returns> /// <para>the one and only plain socket factory </para> /// </returns> /// <java-name> /// getSocketFactory /// </java-name> public static global::Org.Apache.Http.Conn.Scheme.PlainSocketFactory SocketFactory { [Dot42.DexImport("getSocketFactory", "()Lorg/apache/http/conn/scheme/PlainSocketFactory;", AccessFlags = 9)] get{ return GetSocketFactory(); } } } }
// // Copyright (c) Microsoft Corporation. All rights reserved. // namespace Microsoft.Zelig.Debugger.ArmProcessor { using System; using System.Collections.Generic; using System.ComponentModel; using System.Data; using System.Drawing; using System.Text; using System.IO; using System.Windows.Forms; using System.Threading; using EncDef = Microsoft.Zelig.TargetModel.ArmProcessor.EncodingDefinition; using InstructionSet = Microsoft.Zelig.TargetModel.ArmProcessor.InstructionSet; using IR = Microsoft.Zelig.CodeGeneration.IR; using RT = Microsoft.Zelig.Runtime; using TS = Microsoft.Zelig.Runtime.TypeSystem; using Hst = Microsoft.Zelig.Emulation.Hosting; public partial class RegistersView : UserControl { // // State // DebuggerMainForm m_owner; WatchHelper m_wh; // // Constructor Methods // public RegistersView() { InitializeComponent(); WatchHelper.SetColumns( treeBasedGridView_Registers ); } // // Helper Methods // public void Link( DebuggerMainForm owner ) { m_owner = owner; m_owner.HostingSite.NotifyOnExecutionStateChange += delegate( Hst.Forms.HostingSite host , Hst.Forms.HostingSite.ExecutionState oldState , Hst.Forms.HostingSite.ExecutionState newState ) { UpdateUI(); return Hst.Forms.HostingSite.NotificationResponse.DoNothing; }; m_owner.HostingSite.NotifyOnVisualizationEvent += delegate( Hst.Forms.HostingSite host , Hst.Forms.HostingSite.VisualizationEvent e ) { if(e == Hst.Forms.HostingSite.VisualizationEvent.NewStackFrame) { UpdateUI(); } return Hst.Forms.HostingSite.NotificationResponse.DoNothing; }; } private void UpdateUI() { ImageInformation imageInformation = m_owner.ImageInformation; StackFrame currentStackFrame = m_owner.SelectedStackFrame; //--// WatchHelper.Synchronize( ref m_wh, m_owner.MemoryDelta, treeBasedGridView_Registers.RootNode, false, false ); if(m_wh != null) { m_wh.HexadecimalDisplay = toolStripMenuItem_HexDisplay.Checked; } //--// treeBasedGridView_Registers.StartTreeUpdate(); treeBasedGridView_Registers.Enabled = currentStackFrame != null; if(currentStackFrame == null || m_owner.IsIdle == false) { //// DebuggerMainForm.GrayOutRowsInDataGridView( treeBasedGridView_Registers.Rows ); } else { IR.LowLevelVariableExpression[] array = imageInformation.AliveVariables( currentStackFrame.Region, currentStackFrame.RegionOffset ); var ht = HashTableFactory.NewWithReferenceEquality< IR.Abstractions.RegisterDescriptor, IR.PhysicalRegisterExpression >(); foreach(IR.LowLevelVariableExpression var in array) { IR.PhysicalRegisterExpression varReg = var as IR.PhysicalRegisterExpression; if(varReg != null) { ht[varReg.RegisterDescriptor] = varReg; } } var regCtx = currentStackFrame.RegisterContext; var lst = new List< WatchHelper.ItemDescriptor >(); foreach(IR.Abstractions.RegisterDescriptor regDesc in imageInformation.TypeSystem.PlatformAbstraction.GetRegisters()) { TS.TypeRepresentation registerType = null; string typeDescriptor = null; //--// IR.PhysicalRegisterExpression varReg; if(ht.TryGetValue( regDesc, out varReg )) { var varType = varReg.Type; registerType = varType; string typeName = varType.FullNameWithAbbreviation; IR.VariableExpression varSrc = varReg.SourceVariable; if(varSrc != null && varSrc.DebugName != null) { typeDescriptor = string.Format( "{0} {1}", typeName, varSrc.DebugName.Name ); } else { typeDescriptor = string.Format( "{0}", typeName ); } } else { var wkt = imageInformation.TypeSystem.WellKnownTypes; if(regDesc.InIntegerRegisterFile) { switch(regDesc.PhysicalStorageSize) { case 1: registerType = wkt.System_UInt32; break; case 2: registerType = wkt.System_UInt64; break; } } else if(regDesc.InFloatingPointRegisterFile) { switch(regDesc.PhysicalStorageSize) { case 1: registerType = wkt.System_Single; break; case 2: registerType = wkt.System_Double; break; } } if(registerType == null) { registerType = wkt.System_UInt32; } } //--// var valueHandle = new RegisterValueHandle( regCtx.GetValue( regDesc ), registerType, true ); var item = new WatchHelper.ItemDescriptor( m_wh, regDesc.Mnemonic, registerType, valueHandle, typeDescriptor ); lst.Add( item ); } m_wh.Update( lst, false ); } treeBasedGridView_Registers.EndTreeUpdate(); } // // Event Methods // private void treeBasedGridView_Registers_CellMouseClick( object sender , TreeBasedGridView.NodeMouseEventArgs e ) { if(e.Button == MouseButtons.Right) { contextMenuStrip1.Show( treeBasedGridView_Registers, e.Location ); } } private void toolStripMenuItem_HexDisplay_CheckedChanged( object sender , EventArgs e ) { if(treeBasedGridView_Registers.Enabled) { if(m_wh != null) { m_wh.HexadecimalDisplay = toolStripMenuItem_HexDisplay.Checked; } } } private void toolStripMenuItem_HexDisplay_Click( object sender , EventArgs e ) { toolStripMenuItem_HexDisplay.Checked = !toolStripMenuItem_HexDisplay.Checked; } } }
// // System.Web.Services.Description.BasicProfileChecker.cs // // Author: // Lluis Sanchez (lluis@novell.com) // // Copyright (C) Novell, Inc., 2004 // // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // #if NET_2_0 using System.Xml.Schema; using System.Xml; using System.Collections; namespace System.Web.Services.Description { internal class BasicProfileChecker: ConformanceChecker { public static BasicProfileChecker Instance = new BasicProfileChecker (); public override WsiClaims Claims { get { return WsiClaims.BP10; } } public override void Check (ConformanceCheckContext ctx, Import value) { if (value.Location == "" || value.Location == null) { ctx.ReportRuleViolation (value, BasicProfileRules.R2007); return; } object doc = ctx.GetDocument (value.Location); if (doc == null) ctx.ReportError (value, "Document '" + value.Location + "' not found"); if (doc is XmlSchema) ctx.ReportRuleViolation (value, BasicProfileRules.R2002); ServiceDescription imported = doc as ServiceDescription; if (imported == null) { ctx.ReportRuleViolation (value, BasicProfileRules.R2001); return; } // TODO: rule R2003 if (imported.TargetNamespace != value.Namespace) ctx.ReportRuleViolation (value, BasicProfileRules.R2005); } public override void Check (ConformanceCheckContext ctx, ServiceDescription value) { } public override void Check (ConformanceCheckContext ctx, ServiceDescriptionFormatExtension value) { if (value.Required) ctx.ReportRuleViolation (value, BasicProfileRules.R2026); } public override void Check (ConformanceCheckContext ctx, MessagePart value) { CheckWsdlQName (ctx, value, value.Type); CheckWsdlQName (ctx, value, value.Element); if (value.DefinedByElement && value.Element.Namespace == XmlSchema.Namespace) ctx.ReportRuleViolation (value, BasicProfileRules.R2206); } public override void Check (ConformanceCheckContext ctx, Types value) { } public override void Check (ConformanceCheckContext ctx, Message value) { // TODO: R2113 } public override void Check (ConformanceCheckContext ctx, Binding value) { SoapBinding sb = (SoapBinding) value.Extensions.Find (typeof(SoapBinding)); if (sb == null || sb.Transport == null || sb.Transport == "") { ctx.ReportRuleViolation (value, BasicProfileRules.R2701); return; } if (sb.Transport != "http://schemas.xmlsoap.org/soap/http") ctx.ReportRuleViolation (value, BasicProfileRules.R2702); LiteralType type = GetLiteralBindingType (value); if (type == LiteralType.NotLiteral) ctx.ReportRuleViolation (value, BasicProfileRules.R2706); else if (type == LiteralType.Inconsistent) ctx.ReportRuleViolation (value, BasicProfileRules.R2705); // Collect all parts referenced from this type Hashtable parts = new Hashtable (); PortType port = ctx.Services.GetPortType (value.Type); foreach (Operation op in port.Operations) { foreach (OperationMessage om in op.Messages) { Message msg = ctx.Services.GetMessage (om.Message); foreach (MessagePart part in msg.Parts) parts.Add (part,part); } } foreach (OperationBinding ob in value.Operations) { if (ob.Input != null) CheckMessageBinding (ctx, parts, ob.Input); if (ob.Output != null) CheckMessageBinding (ctx, parts, ob.Output); foreach (FaultBinding fb in ob.Faults) CheckMessageBinding (ctx, parts, fb); } if (parts.Count > 0) ctx.ReportRuleViolation (value, BasicProfileRules.R2209); } public override void Check (ConformanceCheckContext ctx, OperationBinding ob) { } void CheckMessageBinding (ConformanceCheckContext ctx, Hashtable portParts, MessageBinding value) { SoapBodyBinding sbb = (SoapBodyBinding) value.Extensions.Find (typeof(SoapBodyBinding)); Message msg = FindMessage (ctx, value); LiteralType bt = GetLiteralBindingType (value.OperationBinding.Binding); if (sbb != null) { if (bt == LiteralType.Document) { if (sbb.Parts != null && sbb.Parts.Length > 1) ctx.ReportRuleViolation (value, BasicProfileRules.R2201); if (sbb.Parts == null) { if (msg.Parts != null && msg.Parts.Count > 1) ctx.ReportRuleViolation (value, BasicProfileRules.R2210); if (msg.Parts.Count == 1) portParts.Remove (msg.Parts[0]); } else { if (sbb.Parts.Length == 0 && msg.Parts.Count == 1) { portParts.Remove (msg.Parts[0]); } else { foreach (string part in sbb.Parts) { MessagePart mp = msg.FindPartByName (part); portParts.Remove (mp); if (!mp.DefinedByElement) ctx.ReportRuleViolation (value, BasicProfileRules.R2204); } } } } else if (bt == LiteralType.Rpc) { if (sbb.Parts != null) { foreach (string part in sbb.Parts) { MessagePart mp = msg.FindPartByName (part); portParts.Remove (mp); if (!mp.DefinedByType) ctx.ReportRuleViolation (value, BasicProfileRules.R2203); } } } } SoapHeaderBinding shb = (SoapHeaderBinding) value.Extensions.Find (typeof(SoapHeaderBinding)); if (shb != null) { Message hm = ctx.Services.GetMessage (shb.Message); MessagePart mp = hm.FindPartByName (shb.Part); portParts.Remove (mp); if (mp != null && !mp.DefinedByElement) ctx.ReportRuleViolation (value, BasicProfileRules.R2205); } SoapHeaderFaultBinding shfb = (SoapHeaderFaultBinding) value.Extensions.Find (typeof(SoapHeaderFaultBinding)); if (shfb != null) { Message hm = ctx.Services.GetMessage (shfb.Message); MessagePart mp = hm.FindPartByName (shfb.Part); portParts.Remove (mp); if (mp != null && !mp.DefinedByElement) ctx.ReportRuleViolation (value, BasicProfileRules.R2205); } // TODO: SoapFaultBinding ?? } Message FindMessage (ConformanceCheckContext ctx, MessageBinding mb) { PortType pt = ctx.Services.GetPortType (mb.OperationBinding.Binding.Type); foreach (Operation op in pt.Operations) if (op.IsBoundBy (mb.OperationBinding)) { OperationMessage om; if (mb is InputBinding) om = op.Messages.Input; else if (mb is OutputBinding) om = op.Messages.Output; else if (mb is FaultBinding) om = op.Messages.Fault; else return null; return ctx.Services.GetMessage (om.Message); } return null; } public override void Check (ConformanceCheckContext ctx, Operation value) { } public override void Check (ConformanceCheckContext ctx, OperationMessage value) { } public override void Check (ConformanceCheckContext ctx, Port value) { } public override void Check (ConformanceCheckContext ctx, PortType value) { } public override void Check (ConformanceCheckContext ctx, Service value) { } public override void Check (ConformanceCheckContext ctx, XmlSchema s) { if (s.TargetNamespace == null || s.TargetNamespace == "") { foreach (XmlSchemaObject ob in s.Items) if (!(ob is XmlSchemaImport) && !(ob is XmlSchemaAnnotation)) { ctx.ReportRuleViolation (s, BasicProfileRules.R2105); break; } } } public override void Check (ConformanceCheckContext ctx, XmlSchemaImport value) { XmlSchema doc = ctx.GetDocument (value.SchemaLocation) as XmlSchema; if (doc == null) ctx.ReportError (value, "Schema '" + value.SchemaLocation + "' not found"); } public override void Check (ConformanceCheckContext ctx, XmlSchemaAttribute value) { CheckSchemaQName (ctx, value, value.RefName); CheckSchemaQName (ctx, value, value.SchemaTypeName); XmlAttribute[] uatts = value.UnhandledAttributes; if (uatts != null) { foreach (XmlAttribute at in uatts) if (at.LocalName == "arrayType" && at.NamespaceURI == "http://schemas.xmlsoap.org/wsdl/") ctx.ReportRuleViolation (value, BasicProfileRules.R2111); } } public override void Check (ConformanceCheckContext ctx, XmlSchemaAttributeGroupRef value) { CheckSchemaQName (ctx, value, value.RefName); } public override void Check (ConformanceCheckContext ctx, XmlSchemaComplexContentExtension value) { CheckSchemaQName (ctx, value, value.BaseTypeName); if (value.BaseTypeName.Namespace == "http://schemas.xmlsoap.org/soap/encoding/" && value.BaseTypeName.Name == "Array") ctx.ReportRuleViolation (value, BasicProfileRules.R2110); } public override void Check (ConformanceCheckContext ctx, XmlSchemaComplexContentRestriction value) { CheckSchemaQName (ctx, value, value.BaseTypeName); if (value.BaseTypeName.Namespace == "http://schemas.xmlsoap.org/soap/encoding/" && value.BaseTypeName.Name == "Array") ctx.ReportRuleViolation (value, BasicProfileRules.R2110); } public override void Check (ConformanceCheckContext ctx, XmlSchemaElement value) { CheckSchemaQName (ctx, value, value.RefName); CheckSchemaQName (ctx, value, value.SubstitutionGroup); CheckSchemaQName (ctx, value, value.SchemaTypeName); } public override void Check (ConformanceCheckContext ctx, XmlSchemaGroupRef value) { CheckSchemaQName (ctx, value, value.RefName); } public override void Check (ConformanceCheckContext ctx, XmlSchemaKeyref value) { CheckSchemaQName (ctx, value, value.Refer); } public override void Check (ConformanceCheckContext ctx, XmlSchemaSimpleContentExtension value) { CheckSchemaQName (ctx, value, value.BaseTypeName); } public override void Check (ConformanceCheckContext ctx, XmlSchemaSimpleContentRestriction value) { CheckSchemaQName (ctx, value, value.BaseTypeName); } public override void Check (ConformanceCheckContext ctx, XmlSchemaSimpleTypeList value) { CheckSchemaQName (ctx, value, value.ItemTypeName); } public override void Check (ConformanceCheckContext ctx, XmlSchemaSimpleTypeRestriction value) { CheckSchemaQName (ctx, value, value.BaseTypeName); } public override void Check (ConformanceCheckContext ctx, XmlSchemaSimpleTypeUnion value) { foreach (XmlQualifiedName name in value.MemberTypes) CheckSchemaQName (ctx, value, name); } // Helper methods void CheckWsdlQName (ConformanceCheckContext ctx, object element, XmlQualifiedName name) { if (name == null || name == XmlQualifiedName.Empty) return; if (name.Namespace == "" || name.Namespace == XmlSchema.Namespace) return; if (ctx.ServiceDescription.Types != null && ctx.ServiceDescription.Types.Schemas != null) { foreach (XmlSchema s in ctx.ServiceDescription.Types.Schemas) { if (s.TargetNamespace == name.Namespace) return; foreach (XmlSchemaObject i in s.Includes) if ((i is XmlSchemaImport) && ((XmlSchemaImport)i).Namespace == name.Namespace) return; } } ctx.ReportRuleViolation (element, BasicProfileRules.R2101); } void CheckSchemaQName (ConformanceCheckContext ctx, object element, XmlQualifiedName name) { if (name == null || name == XmlQualifiedName.Empty) return; if (name.Namespace == "" || name.Namespace == XmlSchema.Namespace) return; if (ctx.CurrentSchema.TargetNamespace == name.Namespace) return; foreach (XmlSchemaObject i in ctx.CurrentSchema.Includes) if ((i is XmlSchemaImport) && ((XmlSchemaImport)i).Namespace == name.Namespace) return; ctx.ReportRuleViolation (element, BasicProfileRules.R2102); } LiteralType GetLiteralBindingType (Binding b) { SoapBinding sb = (SoapBinding) b.Extensions.Find (typeof(SoapBinding)); SoapBindingStyle style = (sb != null) ? sb.Style : SoapBindingStyle.Document; if (style == SoapBindingStyle.Default) style = SoapBindingStyle.Document; foreach (OperationBinding ob in b.Operations) { SoapOperationBinding sob = (SoapOperationBinding) ob.Extensions.Find (typeof(SoapOperationBinding)); if (sob.Style != SoapBindingStyle.Default && sob.Style != style) return LiteralType.Inconsistent; if (ob.Input != null) { SoapBodyBinding sbb = (SoapBodyBinding) ob.Input.Extensions.Find (typeof(SoapBodyBinding)); if (sbb != null && sbb.Use != SoapBindingUse.Literal) return LiteralType.NotLiteral; SoapFaultBinding sfb = (SoapFaultBinding) ob.Input.Extensions.Find (typeof(SoapFaultBinding)); if (sfb != null && sfb.Use != SoapBindingUse.Literal) return LiteralType.NotLiteral; SoapHeaderBinding shb = (SoapHeaderBinding) ob.Input.Extensions.Find (typeof(SoapHeaderBinding)); if (shb != null && shb.Use != SoapBindingUse.Literal) return LiteralType.NotLiteral; SoapHeaderFaultBinding shfb = (SoapHeaderFaultBinding) ob.Input.Extensions.Find (typeof(SoapHeaderFaultBinding)); if (shfb != null && shfb.Use != SoapBindingUse.Literal) return LiteralType.NotLiteral; } if (ob.Output != null) { SoapBodyBinding sbb = (SoapBodyBinding) ob.Output.Extensions.Find (typeof(SoapBodyBinding)); if (sbb != null && sbb.Use != SoapBindingUse.Literal) return LiteralType.NotLiteral; SoapFaultBinding sfb = (SoapFaultBinding) ob.Input.Extensions.Find (typeof(SoapFaultBinding)); if (sfb != null && sfb.Use != SoapBindingUse.Literal) return LiteralType.NotLiteral; SoapHeaderBinding shb = (SoapHeaderBinding) ob.Input.Extensions.Find (typeof(SoapHeaderBinding)); if (shb != null && shb.Use != SoapBindingUse.Literal) return LiteralType.NotLiteral; SoapHeaderFaultBinding shfb = (SoapHeaderFaultBinding) ob.Input.Extensions.Find (typeof(SoapHeaderFaultBinding)); if (shfb != null && shfb.Use != SoapBindingUse.Literal) return LiteralType.NotLiteral; } } if (style == SoapBindingStyle.Document) return LiteralType.Document; else return LiteralType.Rpc; } enum LiteralType { NotLiteral, Inconsistent, Rpc, Document } } internal class BasicProfileRules { // 3.2 Conformance of Services, Consumers and Registries // Can't check: R0001 // 3.3 Conformance Annotation in Descriptions // Can't check: R0002, R0003 // 3.4 Conformance Annotation in Messages // Can't check: R0004, R0005, R0006, R0007 // 3.5 Conformance Annotation in Registry Data // UDDI related: R3020, R3030, R3021, R3005, R3004. // 4.1 XML Representation of SOAP Messages // Rules not related to service description // 4.2 SOAP Processing Model // Rules not related to service description // 4.3 Use of SOAP in HTTP // Rules not related to service description // 5.1 Document structure public static readonly ConformanceRule R2001 = new ConformanceRule ( "R2001", "A DESCRIPTION MUST only use the WSDL \"import\" statement to import another WSDL description", ""); public static readonly ConformanceRule R2002 = new ConformanceRule ( "R2002", "To import XML Schema Definitions, a DESCRIPTION MUST use the XML Schema \"import\" statement", ""); public static readonly ConformanceRule R2007 = new ConformanceRule ( "R2007", "A DESCRIPTION MUST specify a non-empty location attribute on the wsdl:import element", ""); public static readonly ConformanceRule R2005 = new ConformanceRule ( "R2005", "The targetNamespace attribute on the wsdl:definitions element of a description that is being imported MUST have same the value as the namespace attribute on the wsdl:import element in the importing DESCRIPTION", ""); public static readonly ConformanceRule R2026 = new ConformanceRule ( "R2026", "A DESCRIPTION SHOULD NOT include extension elements with a wsdl:required attribute value of \"true\" on any WSDL construct (wsdl:binding, wsdl:portType, wsdl:message, wsdl:types or wsdl:import) that claims conformance to the Profile", ""); // 5.2 Types public static readonly ConformanceRule R2101 = new ConformanceRule ( "R2101", "A DESCRIPTION MUST NOT use QName references to elements in namespaces that have been neither imported, nor defined in the referring WSDL document", ""); public static readonly ConformanceRule R2102 = new ConformanceRule ( "R2102", "A QName reference to a Schema component in a DESCRIPTION MUST use the namespace defined in the targetNamespace attribute on the xsd:schema element, or to a namespace defined in the namespace attribute on an xsd:import element within the xsd:schema element", ""); public static readonly ConformanceRule R2105 = new ConformanceRule ( "R2105", "All xsd:schema elements contained in a wsdl:types element of a DESCRIPTION MUST have a targetNamespace attribute with a valid and non-null value, UNLESS the xsd:schema element has xsd:import and/or xsd:annotation as its only child element(s)", ""); public static readonly ConformanceRule R2110 = new ConformanceRule ( "R2110", "In a DESCRIPTION, array declarations MUST NOT extend or restrict the soapenc:Array type", ""); public static readonly ConformanceRule R2111 = new ConformanceRule ( "R2111", "In a DESCRIPTION, array declarations MUST NOT use wsdl:arrayType attribute in the type declaration", ""); // R2112: Suggestion. // R2113: Not related to servide description // R2114: Suggestion. // 5.3 Messages public static readonly ConformanceRule R2201 = new ConformanceRule ( "R2201", "A document-literal binding in a DESCRIPTION MUST, in each of its soapbind:body element(s), have at most one part listed in the parts attribute, if the parts attribute is specified", ""); public static readonly ConformanceRule R2210 = new ConformanceRule ( "R2210", "If a document-literal binding in a DESCRIPTION does not specify the parts attribute on a soapbind:body element, the corresponding abstract wsdl:message MUST define zero or one wsdl:parts", ""); public static readonly ConformanceRule R2203 = new ConformanceRule ( "R2203", "An rpc-literal binding in a DESCRIPTION MUST refer, in its soapbind:body element(s), only to wsdl:part element(s) that have been defined using the type attribute", ""); public static readonly ConformanceRule R2204 = new ConformanceRule ( "R2204", "A document-literal binding in a DESCRIPTION MUST refer, in each of its soapbind:body element(s), only to wsdl:part element(s) that have been defined using the element attribute", ""); public static readonly ConformanceRule R2205 = new ConformanceRule ( "R2205", "A wsdl:binding in a DESCRIPTION MUST refer, in each of its soapbind:header, soapbind:headerfault and soapbind:fault elements, only to wsdl:part element(s) that have been defined using the element attribute", ""); public static readonly ConformanceRule R2209 = new ConformanceRule ( "R2209", "A wsdl:binding in a DESCRIPTION SHOULD bind every wsdl:part of a wsdl:message in the wsdl:portType to which it refers to one of soapbind:body, soapbind:header, soapbind:fault or soapbind:headerfault", ""); public static readonly ConformanceRule R2206 = new ConformanceRule ( "R2206", "A wsdl:message in a DESCRIPTION containing a wsdl:part that uses the element attribute MUST refer, in that attribute, to a global element declaration", ""); // R2211: Related to message structure // R2202: Suggestion. // R2207: Optional // R2208: Optional // 5.4 Port Types // TODO // 5.5 Bindings // TODO // 5.6 SOAP Binding public static readonly ConformanceRule R2701 = new ConformanceRule ( "R2701", "The wsdl:binding element in a DESCRIPTION MUST be constructed so that its soapbind:binding child element specifies the transport attribute", ""); public static readonly ConformanceRule R2702 = new ConformanceRule ( "R2702", "A wsdl:binding element in a DESCRIPTION MUST specify the HTTP transport protocol with SOAP binding. Specifically, the transport attribute of its soapbind:binding child MUST have the value \"http://schemas.xmlsoap.org/soap/http\"", ""); public static readonly ConformanceRule R2705 = new ConformanceRule ( "R2705", "A wsdl:binding in a DESCRIPTION MUST use either be a rpc-literal binding or a document-literal binding", ""); public static readonly ConformanceRule R2706 = new ConformanceRule ( "R2706", "A wsdl:binding in a DESCRIPTION MUST use the value of \"literal\" for the use attribute in all soapbind:body, soapbind:fault, soapbind:header and soapbind:headerfault elements", ""); // R2707: Interpretation rule: A wsdl:binding in a DESCRIPTION that contains one or more soapbind:body, soapbind:fault, soapbind:header or soapbind:headerfault elements that do not specify the use attribute MUST be interpreted as though the value "literal" had been specified in each case // R2709: Suggestion. // TODO } /* The following rules cannot be checked: R2002, R2003, R4004, R4003, R2022, R2023, R2004, R2010, R2011 There is no access to the unerlying xml The following are suggestions: R2008, R2112 The following are optional R4002, R2020, R2021, R2024, R2114 Can't be checked: R2025 Process related R2027 TODO: section 5.3 */ } #endif
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Runtime.Remoting; using System.Runtime.Remoting.Lifetime; using System.Security.Permissions; using System.Threading; using System.Reflection; using System.Collections; using System.Collections.Generic; using OpenSim.Region.ScriptEngine.Interfaces; using OpenSim.Region.ScriptEngine.Shared; using OpenSim.Region.ScriptEngine.Shared.Api.Runtime; namespace OpenSim.Region.ScriptEngine.Shared.ScriptBase { public partial class ScriptBaseClass : MarshalByRefObject, IScript { private Dictionary<string, MethodInfo> inits = new Dictionary<string, MethodInfo>(); // private ScriptSponsor m_sponser; public override Object InitializeLifetimeService() { ILease lease = (ILease)base.InitializeLifetimeService(); if (lease.CurrentState == LeaseState.Initial) { // Infinite lease.InitialLeaseTime = TimeSpan.FromMinutes(0); // lease.RenewOnCallTime = TimeSpan.FromSeconds(10.0); // lease.SponsorshipTimeout = TimeSpan.FromMinutes(1.0); } return lease; } #if DEBUG // For tracing GC while debugging public static bool GCDummy = false; ~ScriptBaseClass() { GCDummy = true; } #endif public ScriptBaseClass() { m_Executor = new Executor(this); MethodInfo[] myArrayMethodInfo = GetType().GetMethods(BindingFlags.Public | BindingFlags.Instance); foreach (MethodInfo mi in myArrayMethodInfo) { if (mi.Name.Length > 7 && mi.Name.Substring(0, 7) == "ApiType") { string type = mi.Name.Substring(7); inits[type] = mi; } } // m_sponser = new ScriptSponsor(); } private Executor m_Executor = null; public int GetStateEventFlags(string state) { return (int)m_Executor.GetStateEventFlags(state); } public void ExecuteEvent(string state, string FunctionName, object[] args) { m_Executor.ExecuteEvent(state, FunctionName, args); } public string[] GetApis() { string[] apis = new string[inits.Count]; inits.Keys.CopyTo(apis, 0); return apis; } private Dictionary<string, object> m_InitialValues = new Dictionary<string, object>(); private Dictionary<string, FieldInfo> m_Fields = new Dictionary<string, FieldInfo>(); public void InitApi(string api, IScriptApi data) { if (!inits.ContainsKey(api)) return; //ILease lease = (ILease)RemotingServices.GetLifetimeService(data as MarshalByRefObject); //RemotingServices.GetLifetimeService(data as MarshalByRefObject); // lease.Register(m_sponser); MethodInfo mi = inits[api]; Object[] args = new Object[1]; args[0] = data; mi.Invoke(this, args); m_InitialValues = GetVars(); } public virtual void StateChange(string newState) { } public void Close() { // m_sponser.Close(); } public Dictionary<string, object> GetVars() { Dictionary<string, object> vars = new Dictionary<string, object>(); if (m_Fields == null) return vars; m_Fields.Clear(); Type t = GetType(); FieldInfo[] fields = t.GetFields(BindingFlags.NonPublic | BindingFlags.Public | BindingFlags.Instance | BindingFlags.DeclaredOnly); foreach (FieldInfo field in fields) { m_Fields[field.Name] = field; if (field.FieldType == typeof(LSL_Types.list)) // ref type, copy { LSL_Types.list v = (LSL_Types.list)field.GetValue(this); Object[] data = new Object[v.Data.Length]; Array.Copy(v.Data, 0, data, 0, v.Data.Length); LSL_Types.list c = new LSL_Types.list(); c.Data = data; vars[field.Name] = c; } else if (field.FieldType == typeof(LSL_Types.LSLInteger) || field.FieldType == typeof(LSL_Types.LSLString) || field.FieldType == typeof(LSL_Types.LSLFloat) || field.FieldType == typeof(Int32) || field.FieldType == typeof(Double) || field.FieldType == typeof(Single) || field.FieldType == typeof(String) || field.FieldType == typeof(Byte) || field.FieldType == typeof(short) || field.FieldType == typeof(LSL_Types.Vector3) || field.FieldType == typeof(LSL_Types.Quaternion)) { vars[field.Name] = field.GetValue(this); } } return vars; } public void SetVars(Dictionary<string, object> vars) { foreach (KeyValuePair<string, object> var in vars) { if (m_Fields.ContainsKey(var.Key)) { if (m_Fields[var.Key].FieldType == typeof(LSL_Types.list)) { LSL_Types.list v = (LSL_Types.list)m_Fields[var.Key].GetValue(this); Object[] data = ((LSL_Types.list)var.Value).Data; v.Data = new Object[data.Length]; Array.Copy(data, 0, v.Data, 0, data.Length); m_Fields[var.Key].SetValue(this, v); } else if (m_Fields[var.Key].FieldType == typeof(LSL_Types.LSLInteger) || m_Fields[var.Key].FieldType == typeof(LSL_Types.LSLString) || m_Fields[var.Key].FieldType == typeof(LSL_Types.LSLFloat) || m_Fields[var.Key].FieldType == typeof(Int32) || m_Fields[var.Key].FieldType == typeof(Double) || m_Fields[var.Key].FieldType == typeof(Single) || m_Fields[var.Key].FieldType == typeof(String) || m_Fields[var.Key].FieldType == typeof(Byte) || m_Fields[var.Key].FieldType == typeof(short) || m_Fields[var.Key].FieldType == typeof(LSL_Types.Vector3) || m_Fields[var.Key].FieldType == typeof(LSL_Types.Quaternion) ) { m_Fields[var.Key].SetValue(this, var.Value); } } } } public void ResetVars() { SetVars(m_InitialValues); } public void NoOp() { // Does what is says on the packet. Nowt, nada, nothing. // Required for insertion after a jump label to do what it says on the packet! // With a bit of luck the compiler may even optimize it out. } } }
using System.CodeDom.Compiler; using Android.Runtime; #pragma warning disable 1591 //------------------------------------------------------------------------------ // <auto-generated> // This code was generated by a tool. // Runtime Version:4.0.30319.42000 // // Changes to this file may cause incorrect behavior and will be lost if // the code is regenerated. // </auto-generated> //------------------------------------------------------------------------------ [assembly: ResourceDesigner("ScratchingPost.Droid.Resource", IsApplication=true)] namespace ScratchingPost.Droid { [GeneratedCode("Xamarin.Android.Build.Tasks", "1.0.0.0")] public partial class Resource { static Resource() { ResourceIdManager.UpdateIdValues(); } public static void UpdateIdValues() { Acr.Support.Android.Resource.String.ApplicationName = String.ApplicationName; Acr.Support.Android.Resource.String.Hello = String.Hello; AndroidHUD.Resource.Attribute.ahBarColor = Attribute.ahBarColor; AndroidHUD.Resource.Attribute.ahBarLength = Attribute.ahBarLength; AndroidHUD.Resource.Attribute.ahBarWidth = Attribute.ahBarWidth; AndroidHUD.Resource.Attribute.ahCircleColor = Attribute.ahCircleColor; AndroidHUD.Resource.Attribute.ahDelayMillis = Attribute.ahDelayMillis; AndroidHUD.Resource.Attribute.ahRadius = Attribute.ahRadius; AndroidHUD.Resource.Attribute.ahRimColor = Attribute.ahRimColor; AndroidHUD.Resource.Attribute.ahRimWidth = Attribute.ahRimWidth; AndroidHUD.Resource.Attribute.ahSpinSpeed = Attribute.ahSpinSpeed; AndroidHUD.Resource.Attribute.ahText = Attribute.ahText; AndroidHUD.Resource.Attribute.ahTextColor = Attribute.ahTextColor; AndroidHUD.Resource.Attribute.ahTextSize = Attribute.ahTextSize; AndroidHUD.Resource.Drawable.ic_errorstatus = Drawable.ic_errorstatus; AndroidHUD.Resource.Drawable.ic_successstatus = Drawable.ic_successstatus; AndroidHUD.Resource.Drawable.roundedbg = Drawable.roundedbg; AndroidHUD.Resource.Drawable.roundedbgdark = Drawable.roundedbgdark; AndroidHUD.Resource.Id.loadingImage = Id.loadingImage; AndroidHUD.Resource.Id.loadingProgressBar = Id.loadingProgressBar; AndroidHUD.Resource.Id.loadingProgressWheel = Id.loadingProgressWheel; AndroidHUD.Resource.Id.textViewStatus = Id.textViewStatus; AndroidHUD.Resource.Layout.loading = Layout.loading; AndroidHUD.Resource.Layout.loadingimage = Layout.loadingimage; AndroidHUD.Resource.Layout.loadingprogress = Layout.loadingprogress; AndroidHUD.Resource.String.library_name = String.library_name; AndroidHUD.Resource.Styleable.ProgressWheel = Styleable.ProgressWheel; AndroidHUD.Resource.Styleable.ProgressWheel_ahBarColor = Styleable.ProgressWheel_ahBarColor; AndroidHUD.Resource.Styleable.ProgressWheel_ahBarLength = Styleable.ProgressWheel_ahBarLength; AndroidHUD.Resource.Styleable.ProgressWheel_ahBarWidth = Styleable.ProgressWheel_ahBarWidth; AndroidHUD.Resource.Styleable.ProgressWheel_ahCircleColor = Styleable.ProgressWheel_ahCircleColor; AndroidHUD.Resource.Styleable.ProgressWheel_ahDelayMillis = Styleable.ProgressWheel_ahDelayMillis; AndroidHUD.Resource.Styleable.ProgressWheel_ahRadius = Styleable.ProgressWheel_ahRadius; AndroidHUD.Resource.Styleable.ProgressWheel_ahRimColor = Styleable.ProgressWheel_ahRimColor; AndroidHUD.Resource.Styleable.ProgressWheel_ahRimWidth = Styleable.ProgressWheel_ahRimWidth; AndroidHUD.Resource.Styleable.ProgressWheel_ahSpinSpeed = Styleable.ProgressWheel_ahSpinSpeed; AndroidHUD.Resource.Styleable.ProgressWheel_ahText = Styleable.ProgressWheel_ahText; AndroidHUD.Resource.Styleable.ProgressWheel_ahTextColor = Styleable.ProgressWheel_ahTextColor; AndroidHUD.Resource.Styleable.ProgressWheel_ahTextSize = Styleable.ProgressWheel_ahTextSize; Splat.Resource.String.library_name = String.library_name; Xamarin.Forms.Platform.Resource.String.ApplicationName = String.ApplicationName; Xamarin.Forms.Platform.Resource.String.Hello = String.Hello; XLabs.Forms.Resource.Color.calendar_active_month_bg = Color.calendar_active_month_bg; XLabs.Forms.Resource.Color.calendar_bg = Color.calendar_bg; XLabs.Forms.Resource.Color.calendar_divider = Color.calendar_divider; XLabs.Forms.Resource.Color.calendar_highlighted_day_bg = Color.calendar_highlighted_day_bg; XLabs.Forms.Resource.Color.calendar_inactive_month_bg = Color.calendar_inactive_month_bg; XLabs.Forms.Resource.Color.calendar_selected_day_bg = Color.calendar_selected_day_bg; XLabs.Forms.Resource.Color.calendar_selected_range_bg = Color.calendar_selected_range_bg; XLabs.Forms.Resource.Color.calendar_text_active = Color.calendar_text_active; XLabs.Forms.Resource.Color.calendar_text_inactive = Color.calendar_text_inactive; XLabs.Forms.Resource.Color.calendar_text_selected = Color.calendar_text_selected; XLabs.Forms.Resource.Color.calendar_text_selector = Color.calendar_text_selector; XLabs.Forms.Resource.Color.calendar_text_unselectable = Color.calendar_text_unselectable; XLabs.Forms.Resource.Dimension.calendar_day_headers_paddingbottom = Dimension.calendar_day_headers_paddingbottom; XLabs.Forms.Resource.Dimension.calendar_month_title_bottommargin = Dimension.calendar_month_title_bottommargin; XLabs.Forms.Resource.Dimension.calendar_month_topmargin = Dimension.calendar_month_topmargin; XLabs.Forms.Resource.Dimension.calendar_text_medium = Dimension.calendar_text_medium; XLabs.Forms.Resource.Dimension.calendar_text_small = Dimension.calendar_text_small; XLabs.Forms.Resource.Drawable.ad16 = Drawable.ad16; XLabs.Forms.Resource.Drawable.calendar_bg_selector = Drawable.calendar_bg_selector; XLabs.Forms.Resource.Id.calendar_grid = Id.calendar_grid; XLabs.Forms.Resource.Id.calendar_view = Id.calendar_view; XLabs.Forms.Resource.Id.left_arrow = Id.left_arrow; XLabs.Forms.Resource.Id.right_arrow = Id.right_arrow; XLabs.Forms.Resource.Id.title = Id.title; XLabs.Forms.Resource.Layout.calendar_pager_layout = Layout.calendar_pager_layout; XLabs.Forms.Resource.Layout.calendar_picker = Layout.calendar_picker; XLabs.Forms.Resource.Layout.dialog = Layout.dialog; XLabs.Forms.Resource.Layout.month = Layout.month; XLabs.Forms.Resource.Layout.week = Layout.week; XLabs.Forms.Resource.String.ApplicationName = String.ApplicationName; XLabs.Forms.Resource.String.Hello = String.Hello; XLabs.Forms.Resource.String.day_name_format = String.day_name_format; XLabs.Forms.Resource.String.full_date_format = String.full_date_format; XLabs.Forms.Resource.String.invalid_date = String.invalid_date; XLabs.Forms.Resource.String.library_name = String.library_name; XLabs.Forms.Resource.String.month_name_format = String.month_name_format; XLabs.Forms.Resource.Style.CalendarCell = Style.CalendarCell; XLabs.Forms.Resource.Style.CalendarCell_CalendarDate = Style.CalendarCell_CalendarDate; XLabs.Forms.Resource.Style.CalendarCell_DayHeader = Style.CalendarCell_DayHeader; XLabs.Forms.Resource.Style.CalendarTitle = Style.CalendarTitle; XLabs.Platform.Resource.String.ApplicationName = String.ApplicationName; XLabs.Platform.Resource.String.Hello = String.Hello; } public partial class Attribute { // aapt resource value: 0x7f010016 public const int ahBarColor = 2130771990; // aapt resource value: 0x7f01001e public const int ahBarLength = 2130771998; // aapt resource value: 0x7f01001d public const int ahBarWidth = 2130771997; // aapt resource value: 0x7f01001b public const int ahCircleColor = 2130771995; // aapt resource value: 0x7f01001a public const int ahDelayMillis = 2130771994; // aapt resource value: 0x7f01001c public const int ahRadius = 2130771996; // aapt resource value: 0x7f010017 public const int ahRimColor = 2130771991; // aapt resource value: 0x7f010018 public const int ahRimWidth = 2130771992; // aapt resource value: 0x7f010019 public const int ahSpinSpeed = 2130771993; // aapt resource value: 0x7f010013 public const int ahText = 2130771987; // aapt resource value: 0x7f010014 public const int ahTextColor = 2130771988; // aapt resource value: 0x7f010015 public const int ahTextSize = 2130771989; // aapt resource value: 0x7f010001 public const int cameraBearing = 2130771969; // aapt resource value: 0x7f010002 public const int cameraTargetLat = 2130771970; // aapt resource value: 0x7f010003 public const int cameraTargetLng = 2130771971; // aapt resource value: 0x7f010004 public const int cameraTilt = 2130771972; // aapt resource value: 0x7f010005 public const int cameraZoom = 2130771973; // aapt resource value: 0x7f010012 public const int circleCrop = 2130771986; // aapt resource value: 0x7f010011 public const int imageAspectRatio = 2130771985; // aapt resource value: 0x7f010010 public const int imageAspectRatioAdjust = 2130771984; // aapt resource value: 0x7f010006 public const int liteMode = 2130771974; // aapt resource value: 0x7f010000 public const int mapType = 2130771968; // aapt resource value: 0x7f010007 public const int uiCompass = 2130771975; // aapt resource value: 0x7f01000f public const int uiMapToolbar = 2130771983; // aapt resource value: 0x7f010008 public const int uiRotateGestures = 2130771976; // aapt resource value: 0x7f010009 public const int uiScrollGestures = 2130771977; // aapt resource value: 0x7f01000a public const int uiTiltGestures = 2130771978; // aapt resource value: 0x7f01000b public const int uiZoomControls = 2130771979; // aapt resource value: 0x7f01000c public const int uiZoomGestures = 2130771980; // aapt resource value: 0x7f01000d public const int useViewLifecycle = 2130771981; // aapt resource value: 0x7f01000e public const int zOrderOnTop = 2130771982; static Attribute() { ResourceIdManager.UpdateIdValues(); } private Attribute() { } } public partial class Color { // aapt resource value: 0x7f05000a public const int calendar_active_month_bg = 2131034122; // aapt resource value: 0x7f05000b public const int calendar_bg = 2131034123; // aapt resource value: 0x7f05000c public const int calendar_divider = 2131034124; // aapt resource value: 0x7f05000f public const int calendar_highlighted_day_bg = 2131034127; // aapt resource value: 0x7f05000d public const int calendar_inactive_month_bg = 2131034125; // aapt resource value: 0x7f05000e public const int calendar_selected_day_bg = 2131034126; // aapt resource value: 0x7f050010 public const int calendar_selected_range_bg = 2131034128; // aapt resource value: 0x7f050012 public const int calendar_text_active = 2131034130; // aapt resource value: 0x7f050011 public const int calendar_text_inactive = 2131034129; // aapt resource value: 0x7f050013 public const int calendar_text_selected = 2131034131; // aapt resource value: 0x7f050015 public const int calendar_text_selector = 2131034133; // aapt resource value: 0x7f050014 public const int calendar_text_unselectable = 2131034132; // aapt resource value: 0x7f050009 public const int common_action_bar_splitter = 2131034121; // aapt resource value: 0x7f050000 public const int common_signin_btn_dark_text_default = 2131034112; // aapt resource value: 0x7f050002 public const int common_signin_btn_dark_text_disabled = 2131034114; // aapt resource value: 0x7f050003 public const int common_signin_btn_dark_text_focused = 2131034115; // aapt resource value: 0x7f050001 public const int common_signin_btn_dark_text_pressed = 2131034113; // aapt resource value: 0x7f050008 public const int common_signin_btn_default_background = 2131034120; // aapt resource value: 0x7f050004 public const int common_signin_btn_light_text_default = 2131034116; // aapt resource value: 0x7f050006 public const int common_signin_btn_light_text_disabled = 2131034118; // aapt resource value: 0x7f050007 public const int common_signin_btn_light_text_focused = 2131034119; // aapt resource value: 0x7f050005 public const int common_signin_btn_light_text_pressed = 2131034117; // aapt resource value: 0x7f050016 public const int common_signin_btn_text_dark = 2131034134; // aapt resource value: 0x7f050017 public const int common_signin_btn_text_light = 2131034135; static Color() { ResourceIdManager.UpdateIdValues(); } private Color() { } } public partial class Dimension { // aapt resource value: 0x7f070000 public const int calendar_day_headers_paddingbottom = 2131165184; // aapt resource value: 0x7f070002 public const int calendar_month_title_bottommargin = 2131165186; // aapt resource value: 0x7f070001 public const int calendar_month_topmargin = 2131165185; // aapt resource value: 0x7f070003 public const int calendar_text_medium = 2131165187; // aapt resource value: 0x7f070004 public const int calendar_text_small = 2131165188; static Dimension() { ResourceIdManager.UpdateIdValues(); } private Dimension() { } } public partial class Drawable { // aapt resource value: 0x7f020000 public const int ad16 = 2130837504; // aapt resource value: 0x7f020001 public const int calendar_bg_selector = 2130837505; // aapt resource value: 0x7f020002 public const int common_full_open_on_phone = 2130837506; // aapt resource value: 0x7f020003 public const int common_ic_googleplayservices = 2130837507; // aapt resource value: 0x7f020004 public const int common_signin_btn_icon_dark = 2130837508; // aapt resource value: 0x7f020005 public const int common_signin_btn_icon_disabled_dark = 2130837509; // aapt resource value: 0x7f020006 public const int common_signin_btn_icon_disabled_focus_dark = 2130837510; // aapt resource value: 0x7f020007 public const int common_signin_btn_icon_disabled_focus_light = 2130837511; // aapt resource value: 0x7f020008 public const int common_signin_btn_icon_disabled_light = 2130837512; // aapt resource value: 0x7f020009 public const int common_signin_btn_icon_focus_dark = 2130837513; // aapt resource value: 0x7f02000a public const int common_signin_btn_icon_focus_light = 2130837514; // aapt resource value: 0x7f02000b public const int common_signin_btn_icon_light = 2130837515; // aapt resource value: 0x7f02000c public const int common_signin_btn_icon_normal_dark = 2130837516; // aapt resource value: 0x7f02000d public const int common_signin_btn_icon_normal_light = 2130837517; // aapt resource value: 0x7f02000e public const int common_signin_btn_icon_pressed_dark = 2130837518; // aapt resource value: 0x7f02000f public const int common_signin_btn_icon_pressed_light = 2130837519; // aapt resource value: 0x7f020010 public const int common_signin_btn_text_dark = 2130837520; // aapt resource value: 0x7f020011 public const int common_signin_btn_text_disabled_dark = 2130837521; // aapt resource value: 0x7f020012 public const int common_signin_btn_text_disabled_focus_dark = 2130837522; // aapt resource value: 0x7f020013 public const int common_signin_btn_text_disabled_focus_light = 2130837523; // aapt resource value: 0x7f020014 public const int common_signin_btn_text_disabled_light = 2130837524; // aapt resource value: 0x7f020015 public const int common_signin_btn_text_focus_dark = 2130837525; // aapt resource value: 0x7f020016 public const int common_signin_btn_text_focus_light = 2130837526; // aapt resource value: 0x7f020017 public const int common_signin_btn_text_light = 2130837527; // aapt resource value: 0x7f020018 public const int common_signin_btn_text_normal_dark = 2130837528; // aapt resource value: 0x7f020019 public const int common_signin_btn_text_normal_light = 2130837529; // aapt resource value: 0x7f02001a public const int common_signin_btn_text_pressed_dark = 2130837530; // aapt resource value: 0x7f02001b public const int common_signin_btn_text_pressed_light = 2130837531; // aapt resource value: 0x7f02001c public const int ic_errorstatus = 2130837532; // aapt resource value: 0x7f02001d public const int ic_plusone_medium_off_client = 2130837533; // aapt resource value: 0x7f02001e public const int ic_plusone_small_off_client = 2130837534; // aapt resource value: 0x7f02001f public const int ic_plusone_standard_off_client = 2130837535; // aapt resource value: 0x7f020020 public const int ic_plusone_tall_off_client = 2130837536; // aapt resource value: 0x7f020021 public const int ic_successstatus = 2130837537; // aapt resource value: 0x7f020022 public const int icon = 2130837538; // aapt resource value: 0x7f020023 public const int powered_by_google_dark = 2130837539; // aapt resource value: 0x7f020024 public const int powered_by_google_light = 2130837540; // aapt resource value: 0x7f020025 public const int roundedbg = 2130837541; // aapt resource value: 0x7f020026 public const int roundedbgdark = 2130837542; static Drawable() { ResourceIdManager.UpdateIdValues(); } private Drawable() { } } public partial class Id { // aapt resource value: 0x7f090005 public const int adjust_height = 2131296261; // aapt resource value: 0x7f090006 public const int adjust_width = 2131296262; // aapt resource value: 0x7f09000f public const int calendar_grid = 2131296271; // aapt resource value: 0x7f090008 public const int calendar_view = 2131296264; // aapt resource value: 0x7f090000 public const int hybrid = 2131296256; // aapt resource value: 0x7f09000a public const int left_arrow = 2131296266; // aapt resource value: 0x7f09000d public const int loadingImage = 2131296269; // aapt resource value: 0x7f09000b public const int loadingProgressBar = 2131296267; // aapt resource value: 0x7f09000e public const int loadingProgressWheel = 2131296270; // aapt resource value: 0x7f090001 public const int none = 2131296257; // aapt resource value: 0x7f090002 public const int normal = 2131296258; // aapt resource value: 0x7f090009 public const int right_arrow = 2131296265; // aapt resource value: 0x7f090003 public const int satellite = 2131296259; // aapt resource value: 0x7f090004 public const int terrain = 2131296260; // aapt resource value: 0x7f09000c public const int textViewStatus = 2131296268; // aapt resource value: 0x7f090007 public const int title = 2131296263; static Id() { ResourceIdManager.UpdateIdValues(); } private Id() { } } public partial class Integer { // aapt resource value: 0x7f060000 public const int google_play_services_version = 2131099648; static Integer() { ResourceIdManager.UpdateIdValues(); } private Integer() { } } public partial class Layout { // aapt resource value: 0x7f030000 public const int calendar_pager_layout = 2130903040; // aapt resource value: 0x7f030001 public const int calendar_picker = 2130903041; // aapt resource value: 0x7f030002 public const int dialog = 2130903042; // aapt resource value: 0x7f030003 public const int loading = 2130903043; // aapt resource value: 0x7f030004 public const int loadingimage = 2130903044; // aapt resource value: 0x7f030005 public const int loadingprogress = 2130903045; // aapt resource value: 0x7f030006 public const int month = 2130903046; // aapt resource value: 0x7f030007 public const int week = 2130903047; static Layout() { ResourceIdManager.UpdateIdValues(); } private Layout() { } } public partial class String { // aapt resource value: 0x7f040023 public const int ApplicationName = 2130968611; // aapt resource value: 0x7f040022 public const int Hello = 2130968610; // aapt resource value: 0x7f040001 public const int auth_google_play_services_client_facebook_display_name = 2130968577; // aapt resource value: 0x7f040000 public const int auth_google_play_services_client_google_display_name = 2130968576; // aapt resource value: 0x7f040004 public const int common_android_wear_notification_needs_update_text = 2130968580; // aapt resource value: 0x7f040011 public const int common_android_wear_update_text = 2130968593; // aapt resource value: 0x7f04000f public const int common_android_wear_update_title = 2130968591; // aapt resource value: 0x7f04001f public const int common_google_play_services_api_unavailable_text = 2130968607; // aapt resource value: 0x7f04000d public const int common_google_play_services_enable_button = 2130968589; // aapt resource value: 0x7f04000c public const int common_google_play_services_enable_text = 2130968588; // aapt resource value: 0x7f04000b public const int common_google_play_services_enable_title = 2130968587; // aapt resource value: 0x7f040006 public const int common_google_play_services_error_notification_requested_by_msg = 2130968582; // aapt resource value: 0x7f04000a public const int common_google_play_services_install_button = 2130968586; // aapt resource value: 0x7f040008 public const int common_google_play_services_install_text_phone = 2130968584; // aapt resource value: 0x7f040009 public const int common_google_play_services_install_text_tablet = 2130968585; // aapt resource value: 0x7f040007 public const int common_google_play_services_install_title = 2130968583; // aapt resource value: 0x7f040017 public const int common_google_play_services_invalid_account_text = 2130968599; // aapt resource value: 0x7f040016 public const int common_google_play_services_invalid_account_title = 2130968598; // aapt resource value: 0x7f040005 public const int common_google_play_services_needs_enabling_title = 2130968581; // aapt resource value: 0x7f040015 public const int common_google_play_services_network_error_text = 2130968597; // aapt resource value: 0x7f040014 public const int common_google_play_services_network_error_title = 2130968596; // aapt resource value: 0x7f040003 public const int common_google_play_services_notification_needs_update_title = 2130968579; // aapt resource value: 0x7f040002 public const int common_google_play_services_notification_ticker = 2130968578; // aapt resource value: 0x7f040021 public const int common_google_play_services_sign_in_failed_text = 2130968609; // aapt resource value: 0x7f040020 public const int common_google_play_services_sign_in_failed_title = 2130968608; // aapt resource value: 0x7f040018 public const int common_google_play_services_unknown_issue = 2130968600; // aapt resource value: 0x7f04001a public const int common_google_play_services_unsupported_text = 2130968602; // aapt resource value: 0x7f040019 public const int common_google_play_services_unsupported_title = 2130968601; // aapt resource value: 0x7f04001b public const int common_google_play_services_update_button = 2130968603; // aapt resource value: 0x7f040010 public const int common_google_play_services_update_text = 2130968592; // aapt resource value: 0x7f04000e public const int common_google_play_services_update_title = 2130968590; // aapt resource value: 0x7f040013 public const int common_google_play_services_updating_text = 2130968595; // aapt resource value: 0x7f040012 public const int common_google_play_services_updating_title = 2130968594; // aapt resource value: 0x7f04001e public const int common_open_on_phone = 2130968606; // aapt resource value: 0x7f04001c public const int common_signin_button_text = 2130968604; // aapt resource value: 0x7f04001d public const int common_signin_button_text_long = 2130968605; // aapt resource value: 0x7f040025 public const int day_name_format = 2130968613; // aapt resource value: 0x7f040028 public const int full_date_format = 2130968616; // aapt resource value: 0x7f040026 public const int invalid_date = 2130968614; // aapt resource value: 0x7f040024 public const int library_name = 2130968612; // aapt resource value: 0x7f040027 public const int month_name_format = 2130968615; static String() { ResourceIdManager.UpdateIdValues(); } private String() { } } public partial class Style { // aapt resource value: 0x7f080001 public const int CalendarCell = 2131230721; // aapt resource value: 0x7f080003 public const int CalendarCell_CalendarDate = 2131230723; // aapt resource value: 0x7f080002 public const int CalendarCell_DayHeader = 2131230722; // aapt resource value: 0x7f080000 public const int CalendarTitle = 2131230720; static Style() { ResourceIdManager.UpdateIdValues(); } private Style() { } } public partial class Styleable { public static int[] LoadingImageView = new int[] { 2130771984, 2130771985, 2130771986}; // aapt resource value: 2 public const int LoadingImageView_circleCrop = 2; // aapt resource value: 1 public const int LoadingImageView_imageAspectRatio = 1; // aapt resource value: 0 public const int LoadingImageView_imageAspectRatioAdjust = 0; public static int[] MapAttrs = new int[] { 2130771968, 2130771969, 2130771970, 2130771971, 2130771972, 2130771973, 2130771974, 2130771975, 2130771976, 2130771977, 2130771978, 2130771979, 2130771980, 2130771981, 2130771982, 2130771983}; // aapt resource value: 1 public const int MapAttrs_cameraBearing = 1; // aapt resource value: 2 public const int MapAttrs_cameraTargetLat = 2; // aapt resource value: 3 public const int MapAttrs_cameraTargetLng = 3; // aapt resource value: 4 public const int MapAttrs_cameraTilt = 4; // aapt resource value: 5 public const int MapAttrs_cameraZoom = 5; // aapt resource value: 6 public const int MapAttrs_liteMode = 6; // aapt resource value: 0 public const int MapAttrs_mapType = 0; // aapt resource value: 7 public const int MapAttrs_uiCompass = 7; // aapt resource value: 15 public const int MapAttrs_uiMapToolbar = 15; // aapt resource value: 8 public const int MapAttrs_uiRotateGestures = 8; // aapt resource value: 9 public const int MapAttrs_uiScrollGestures = 9; // aapt resource value: 10 public const int MapAttrs_uiTiltGestures = 10; // aapt resource value: 11 public const int MapAttrs_uiZoomControls = 11; // aapt resource value: 12 public const int MapAttrs_uiZoomGestures = 12; // aapt resource value: 13 public const int MapAttrs_useViewLifecycle = 13; // aapt resource value: 14 public const int MapAttrs_zOrderOnTop = 14; public static int[] ProgressWheel = new int[] { 2130771987, 2130771988, 2130771989, 2130771990, 2130771991, 2130771992, 2130771993, 2130771994, 2130771995, 2130771996, 2130771997, 2130771998}; // aapt resource value: 3 public const int ProgressWheel_ahBarColor = 3; // aapt resource value: 11 public const int ProgressWheel_ahBarLength = 11; // aapt resource value: 10 public const int ProgressWheel_ahBarWidth = 10; // aapt resource value: 8 public const int ProgressWheel_ahCircleColor = 8; // aapt resource value: 7 public const int ProgressWheel_ahDelayMillis = 7; // aapt resource value: 9 public const int ProgressWheel_ahRadius = 9; // aapt resource value: 4 public const int ProgressWheel_ahRimColor = 4; // aapt resource value: 5 public const int ProgressWheel_ahRimWidth = 5; // aapt resource value: 6 public const int ProgressWheel_ahSpinSpeed = 6; // aapt resource value: 0 public const int ProgressWheel_ahText = 0; // aapt resource value: 1 public const int ProgressWheel_ahTextColor = 1; // aapt resource value: 2 public const int ProgressWheel_ahTextSize = 2; static Styleable() { ResourceIdManager.UpdateIdValues(); } private Styleable() { } } } } #pragma warning restore 1591
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Diagnostics; using System.IO.PortsTests; using System.Text; using System.Threading; using Legacy.Support; using Xunit; using Xunit.NetCore.Extensions; namespace System.IO.Ports.Tests { public class SerialStream_ReadByte_Generic : PortsTest { // Set bounds fore random timeout values. // If the min is to low read will not timeout accurately and the testcase will fail private const int minRandomTimeout = 250; // If the max is to large then the testcase will take forever to run private const int maxRandomTimeout = 2000; // If the percentage difference between the expected timeout and the actual timeout // found through Stopwatch is greater then 10% then the timeout value was not correctly // to the read method and the testcase fails. private const double maxPercentageDifference = .15; // The number of random bytes to receive private const int numRndByte = 8; private const int NUM_TRYS = 5; #region Test Cases [ConditionalFact(nameof(HasOneSerialPort))] public void ReadAfterClose() { using (SerialPort com = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName)) { Debug.WriteLine("Verifying read method throws exception after a call to Cloes()"); com.Open(); Stream serialStream = com.BaseStream; com.Close(); VerifyReadException(serialStream, typeof(ObjectDisposedException)); } } [ConditionalFact(nameof(HasOneSerialPort))] public void ReadAfterBaseStreamClose() { using (SerialPort com = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName)) { Debug.WriteLine("Verifying read method throws exception after a call to .BaseStream.Close()"); com.Open(); Stream serialStream = com.BaseStream; com.BaseStream.Close(); VerifyReadException(serialStream, typeof(ObjectDisposedException)); } } [Trait(XunitConstants.Category, XunitConstants.IgnoreForCI)] // Timing-sensitive [ConditionalFact(nameof(HasOneSerialPort))] public void Timeout() { using (SerialPort com = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName)) { var rndGen = new Random(-55); com.ReadTimeout = rndGen.Next(minRandomTimeout, maxRandomTimeout); Debug.WriteLine("Verifying ReadTimeout={0}", com.ReadTimeout); com.Open(); VerifyTimeout(com); } } [Trait(XunitConstants.Category, XunitConstants.IgnoreForCI)] // Timing-sensitive [ConditionalFact(nameof(HasOneSerialPort))] public void SuccessiveReadTimeoutNoData() { using (SerialPort com = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName)) { var rndGen = new Random(-55); com.ReadTimeout = rndGen.Next(minRandomTimeout, maxRandomTimeout); com.Encoding = Encoding.Unicode; Debug.WriteLine("Verifying ReadTimeout={0} with successive call to read method and no data", com.ReadTimeout); com.Open(); Assert.Throws<TimeoutException>(() => com.BaseStream.ReadByte()); VerifyTimeout(com); } } [ConditionalFact(nameof(HasNullModem))] public void SuccessiveReadTimeoutSomeData() { using (var com1 = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName)) { var rndGen = new Random(-55); var t = new Thread(WriteToCom1); com1.ReadTimeout = rndGen.Next(minRandomTimeout, maxRandomTimeout); com1.Encoding = new UTF8Encoding(); Debug.WriteLine( "Verifying ReadTimeout={0} with successive call to read method and some data being received in the first call", com1.ReadTimeout); com1.Open(); // Call WriteToCom1 asynchronously this will write to com1 some time before the following call // to a read method times out t.Start(); try { com1.BaseStream.ReadByte(); } catch (TimeoutException) { } // Wait for the thread to finish while (t.IsAlive) Thread.Sleep(50); // Make sure there is no bytes in the buffer so the next call to read will timeout com1.DiscardInBuffer(); VerifyTimeout(com1); } } private void WriteToCom1() { using (var com2 = new SerialPort(TCSupport.LocalMachineSerialInfo.SecondAvailablePortName)) { var rndGen = new Random(-55); var xmitBuffer = new byte[1]; int sleepPeriod = rndGen.Next(minRandomTimeout, maxRandomTimeout / 2); // Sleep some random period with of a maximum duration of half the largest possible timeout value for a read method on COM1 Thread.Sleep(sleepPeriod); com2.Open(); com2.Write(xmitBuffer, 0, xmitBuffer.Length); } } [ConditionalFact(nameof(HasNullModem))] public void DefaultParityReplaceByte() { VerifyParityReplaceByte(-1, numRndByte - 2); } [ConditionalFact(nameof(HasNullModem))] public void NoParityReplaceByte() { var rndGen = new Random(-55); VerifyParityReplaceByte('\0', rndGen.Next(0, numRndByte - 1), Encoding.UTF32); } [ConditionalFact(nameof(HasNullModem))] public void RNDParityReplaceByte() { var rndGen = new Random(-55); VerifyParityReplaceByte(rndGen.Next(0, 128), 0, new UTF8Encoding()); } [ConditionalFact(nameof(HasNullModem))] public void ParityErrorOnLastByte() { using (var com1 = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName)) using (var com2 = new SerialPort(TCSupport.LocalMachineSerialInfo.SecondAvailablePortName)) { var rndGen = new Random(15); var bytesToWrite = new byte[numRndByte]; var expectedBytes = new byte[numRndByte]; var actualBytes = new byte[numRndByte + 1]; var actualByteIndex = 0; /* 1 Additional character gets added to the input buffer when the parity error occurs on the last byte of a stream We are verifying that besides this everything gets read in correctly. See NDP Whidbey: 24216 for more info on this */ Debug.WriteLine("Verifying default ParityReplace byte with a parity error on the last byte"); // Generate random characters without an parity error for (var i = 0; i < bytesToWrite.Length; i++) { var randByte = (byte)rndGen.Next(0, 128); bytesToWrite[i] = randByte; expectedBytes[i] = randByte; } bytesToWrite[bytesToWrite.Length - 1] = (byte)(bytesToWrite[bytesToWrite.Length - 1] | 0x80); // Create a parity error on the last byte expectedBytes[expectedBytes.Length - 1] = com1.ParityReplace; // Set the last expected byte to be the ParityReplace Byte com1.Parity = Parity.Space; com1.DataBits = 7; com1.ReadTimeout = 250; com1.Open(); com2.Open(); com2.Write(bytesToWrite, 0, bytesToWrite.Length); TCSupport.WaitForReadBufferToLoad(com1, bytesToWrite.Length + 1); while (true) { int byteRead; try { byteRead = com1.ReadByte(); } catch (TimeoutException) { break; } actualBytes[actualByteIndex] = (byte)byteRead; actualByteIndex++; } // Compare the chars that were written with the ones we expected to read for (var i = 0; i < expectedBytes.Length; i++) { if (expectedBytes[i] != actualBytes[i]) { Fail("ERROR!!!: Expected to read {0} actual read {1}", (int)expectedBytes[i], (int)actualBytes[i]); } } if (1 < com1.BytesToRead) { Fail("ERROR!!!: Expected BytesToRead=0 actual={0}", com1.BytesToRead); Fail("ByteRead={0}, {1}", com1.ReadByte(), bytesToWrite[bytesToWrite.Length - 1]); } bytesToWrite[bytesToWrite.Length - 1] = (byte)(bytesToWrite[bytesToWrite.Length - 1] & 0x7F); // Clear the parity error on the last byte expectedBytes[expectedBytes.Length - 1] = bytesToWrite[bytesToWrite.Length - 1]; VerifyRead(com1, com2, bytesToWrite, expectedBytes, Encoding.ASCII); } } #endregion #region Verification for Test Cases private void VerifyTimeout(SerialPort com) { var timer = new Stopwatch(); int expectedTime = com.ReadTimeout; var actualTime = 0; double percentageDifference; try { com.BaseStream.ReadByte(); // Warm up read method Fail("Err_6941814ahbpa!!!: Read did not throw Timeout Exception when it timed out for the first time"); } catch (TimeoutException) { } Thread.CurrentThread.Priority = ThreadPriority.Highest; for (var i = 0; i < NUM_TRYS; i++) { timer.Start(); try { com.BaseStream.ReadByte(); Fail("Err_17087ahps!!!: Read did not reuturn 0 when it timed out"); } catch (TimeoutException) { } timer.Stop(); actualTime += (int)timer.ElapsedMilliseconds; timer.Reset(); } Thread.CurrentThread.Priority = ThreadPriority.Normal; actualTime /= NUM_TRYS; percentageDifference = Math.Abs((expectedTime - actualTime) / (double)expectedTime); // Verify that the percentage difference between the expected and actual timeout is less then maxPercentageDifference if (maxPercentageDifference < percentageDifference) { Fail("ERROR!!!: The read method timedout in {0} expected {1} percentage difference: {2}", actualTime, expectedTime, percentageDifference); } if (com.IsOpen) com.Close(); } private void VerifyReadException(Stream serialStream, Type expectedException) { Assert.Throws(expectedException, () => serialStream.ReadByte()); } private void VerifyParityReplaceByte(int parityReplace, int parityErrorIndex) { VerifyParityReplaceByte(parityReplace, parityErrorIndex, new ASCIIEncoding()); } private void VerifyParityReplaceByte(int parityReplace, int parityErrorIndex, Encoding encoding) { using (var com1 = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName)) using (var com2 = new SerialPort(TCSupport.LocalMachineSerialInfo.SecondAvailablePortName)) { var rndGen = new Random(-55); var byteBuffer = new byte[numRndByte]; var expectedBytes = new byte[numRndByte]; int expectedChar; // Generate random bytes without an parity error for (var i = 0; i < byteBuffer.Length; i++) { int randChar = rndGen.Next(0, 128); byteBuffer[i] = (byte)randChar; expectedBytes[i] = (byte)randChar; } if (-1 == parityReplace) { // If parityReplace is -1 and we should just use the default value expectedChar = com1.ParityReplace; } else if ('\0' == parityReplace) { // If parityReplace is the null charachater and parity replacement should not occur com1.ParityReplace = (byte)parityReplace; expectedChar = expectedBytes[parityErrorIndex]; } else { // Else parityReplace was set to a value and we should expect this value to be returned on a parity error com1.ParityReplace = (byte)parityReplace; expectedChar = parityReplace; } // Create an parity error by setting the highest order bit to true byteBuffer[parityErrorIndex] = (byte)(byteBuffer[parityErrorIndex] | 0x80); expectedBytes[parityErrorIndex] = (byte)expectedChar; Debug.WriteLine("Verifying ParityReplace={0} with an ParityError at: {1} ", com1.ParityReplace, parityErrorIndex); com1.Parity = Parity.Space; com1.DataBits = 7; com1.Open(); com2.Open(); VerifyRead(com1, com2, byteBuffer, expectedBytes, encoding); } } private void VerifyRead(SerialPort com1, SerialPort com2, byte[] bytesToWrite, byte[] expectedBytes, Encoding encoding) { var byteRcvBuffer = new byte[expectedBytes.Length]; var rcvBufferSize = 0; int i; com2.Write(bytesToWrite, 0, bytesToWrite.Length); com1.ReadTimeout = 250; com1.Encoding = encoding; TCSupport.WaitForReadBufferToLoad(com1, bytesToWrite.Length); i = 0; while (true) { int readInt; try { readInt = com1.BaseStream.ReadByte(); } catch (TimeoutException) { break; } // While their are more bytes to be read if (expectedBytes.Length <= i) { // If we have read in more bytes then we expecte Fail("ERROR!!!: We have received more bytes then were sent"); break; } byteRcvBuffer[i] = (byte)readInt; rcvBufferSize++; if (bytesToWrite.Length - rcvBufferSize != com1.BytesToRead) { Fail("ERROR!!!: Expected BytesToRead={0} actual={1}", bytesToWrite.Length - rcvBufferSize, com1.BytesToRead); } if (readInt != expectedBytes[i]) { // If the bytes read is not the expected byte Fail("ERROR!!!: Expected to read {0} actual read byte {1}", expectedBytes[i], (byte)readInt); } i++; } if (rcvBufferSize != expectedBytes.Length) { Fail("ERROR!!! Expected to read {0} char actually read {1} chars", bytesToWrite.Length, rcvBufferSize); } } #endregion } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using Microsoft.Build.Framework; using Microsoft.Build.BackEnd; using Microsoft.Build.Construction; using Microsoft.Build.Shared; using Microsoft.Build.BackEnd.Logging; using System.Collections.Generic; using Microsoft.Build.Execution; using Microsoft.Build.Collections; using System.Collections; using Microsoft.Build.Evaluation; using Microsoft.Build.Unittest; using TaskItem = Microsoft.Build.Execution.ProjectItemInstance.TaskItem; using System.Threading.Tasks; using Xunit; namespace Microsoft.Build.UnitTests.BackEnd { /// <summary> /// Test the task host class which acts as a communication mechanism between tasks and the msbuild engine. /// </summary> public class TaskHost_Tests { /// <summary> /// Task host for the test /// </summary> private TaskHost _taskHost; /// <summary> /// Mock host for the tests /// </summary> private MockHost _mockHost; /// <summary> /// Custom logger for the tests /// </summary> private MyCustomLogger _customLogger; /// <summary> /// Element location for the tests /// </summary> private ElementLocation _elementLocation; /// <summary> /// Logging service for the tests /// </summary> private ILoggingService _loggingService; /// <summary> /// Mock request callback that provides the build results. /// </summary> private MockIRequestBuilderCallback _mockRequestCallback; /// <summary> /// Set up and initialize before each test is run /// </summary> public TaskHost_Tests() { LoggingServiceFactory loggingFactory = new LoggingServiceFactory(LoggerMode.Synchronous, 1); _loggingService = loggingFactory.CreateInstance(BuildComponentType.LoggingService) as LoggingService; _customLogger = new MyCustomLogger(); _mockHost = new MockHost(); _mockHost.LoggingService = _loggingService; _loggingService.RegisterLogger(_customLogger); _elementLocation = ElementLocation.Create("MockFile", 5, 5); BuildRequest buildRequest = new BuildRequest(1 /* submissionId */, 1, 1, new List<string>(), null, BuildEventContext.Invalid, null); BuildRequestConfiguration configuration = new BuildRequestConfiguration(1, new BuildRequestData("Nothing", new Dictionary<string, string>(), "4.0", new string[0], null), "2.0"); configuration.Project = new ProjectInstance(ProjectRootElement.Create()); BuildRequestEntry entry = new BuildRequestEntry(buildRequest, configuration); BuildResult buildResult = new BuildResult(buildRequest, false); buildResult.AddResultsForTarget("Build", new TargetResult(new TaskItem[] { new TaskItem("IamSuper", configuration.ProjectFullPath) }, BuildResultUtilities.GetSkippedResult())); _mockRequestCallback = new MockIRequestBuilderCallback(new BuildResult[] { buildResult }); entry.Builder = (IRequestBuilder)_mockRequestCallback; _taskHost = new TaskHost(_mockHost, entry, _elementLocation, null /*Don't care about the callback either unless doing a build*/); _taskHost.LoggingContext = new TaskLoggingContext(_loggingService, BuildEventContext.Invalid); } /// <summary> /// Verify when pulling target outputs out that we do not get the lives ones which are in the cache. /// This is to prevent changes to the target outputs from being reflected in the cache if the changes are made in the task which calls the msbuild callback. /// </summary> [Fact] public void TestLiveTargetOutputs() { IDictionary targetOutputs = new Hashtable(); IDictionary projectProperties = new Hashtable(); _taskHost.BuildProjectFile("ProjectFile", new string[] { "Build" }, projectProperties, targetOutputs); Assert.NotNull(((ITaskItem[])targetOutputs["Build"])[0]); TaskItem targetOutputItem = ((ITaskItem[])targetOutputs["Build"])[0] as TaskItem; TaskItem mockItemInCache = _mockRequestCallback.BuildResultsToReturn[0].ResultsByTarget["Build"].Items[0] as TaskItem; // Assert the contents are the same Assert.True(targetOutputItem.Equals(mockItemInCache)); // Assert they are different instances. Assert.False(object.ReferenceEquals(targetOutputItem, mockItemInCache)); } /// <summary> /// Makes sure that if a task tries to log a custom error event that subclasses our own /// BuildErrorEventArgs, that the subclass makes it all the way to the logger. In other /// words, the engine should not try to read data out of the event args and construct /// its own. /// </summary> [Fact] public void CustomBuildErrorEventIsPreserved() { // Create a custom build event args that derives from MSBuild's BuildErrorEventArgs. // Set a custom field on this event (FXCopRule). MyCustomBuildErrorEventArgs fxcopError = new MyCustomBuildErrorEventArgs("Your code failed."); fxcopError.FXCopRule = "CodeViolation"; // Log the custom event args. (Pretend that the task actually did this.) _taskHost.LogErrorEvent(fxcopError); // Make sure our custom logger received the actual custom event and not some fake. Assert.True(_customLogger.LastError is MyCustomBuildErrorEventArgs); // "Expected Custom Error Event" // Make sure the special fields in the custom event match what we originally logged. fxcopError = _customLogger.LastError as MyCustomBuildErrorEventArgs; Assert.Equal("Your code failed.", fxcopError.Message); Assert.Equal("CodeViolation", fxcopError.FXCopRule); } /// <summary> /// Makes sure that if a task tries to log a custom warning event that subclasses our own /// BuildWarningEventArgs, that the subclass makes it all the way to the logger. In other /// words, the engine should not try to read data out of the event args and construct /// its own. /// </summary> [Fact] public void CustomBuildWarningEventIsPreserved() { // Create a custom build event args that derives from MSBuild's BuildWarningEventArgs. // Set a custom field on this event (FXCopRule). MyCustomBuildWarningEventArgs fxcopWarning = new MyCustomBuildWarningEventArgs("Your code failed."); fxcopWarning.FXCopRule = "CodeViolation"; _taskHost.LogWarningEvent(fxcopWarning); // Make sure our custom logger received the actual custom event and not some fake. Assert.True(_customLogger.LastWarning is MyCustomBuildWarningEventArgs); // "Expected Custom Warning Event" // Make sure the special fields in the custom event match what we originally logged. fxcopWarning = _customLogger.LastWarning as MyCustomBuildWarningEventArgs; Assert.Equal("Your code failed.", fxcopWarning.Message); Assert.Equal("CodeViolation", fxcopWarning.FXCopRule); } /// <summary> /// Makes sure that if a task tries to log a custom message event that subclasses our own /// BuildMessageEventArgs, that the subclass makes it all the way to the logger. In other /// words, the engine should not try to read data out of the event args and construct /// its own. /// </summary> [Fact] public void CustomBuildMessageEventIsPreserved() { // Create a custom build event args that derives from MSBuild's BuildMessageEventArgs. // Set a custom field on this event (FXCopRule). MyCustomMessageEvent customMessage = new MyCustomMessageEvent("I am a message"); customMessage.CustomMessage = "CodeViolation"; _taskHost.LogMessageEvent(customMessage); // Make sure our custom logger received the actual custom event and not some fake. Assert.True(_customLogger.LastMessage is MyCustomMessageEvent); // "Expected Custom message Event" customMessage = _customLogger.LastMessage as MyCustomMessageEvent; Assert.Equal("I am a message", customMessage.Message); Assert.Equal("CodeViolation", customMessage.CustomMessage); } /// <summary> /// Test that error events are correctly logged and take into account continue on error /// </summary> [Fact] public void TestLogErrorEventWithContinueOnError() { _taskHost.ContinueOnError = false; _taskHost.LogErrorEvent(new BuildErrorEventArgs("SubCategory", "code", null, 0, 1, 2, 3, "message", "Help", "Sender")); // Make sure our custom logger received the actual custom event and not some fake. Assert.True(_customLogger.LastError is BuildErrorEventArgs); // "Expected Error Event" Assert.Equal(0, _customLogger.LastError.LineNumber); // "Expected line number to be 0" _taskHost.ContinueOnError = true; _taskHost.ConvertErrorsToWarnings = true; Assert.Null(_customLogger.LastWarning); // "Expected no Warning Event at this point" // Log the custom event args. (Pretend that the task actually did this.) _taskHost.LogErrorEvent(new BuildErrorEventArgs("SubCategory", "code", null, 0, 1, 2, 3, "message", "Help", "Sender")); // Make sure our custom logger received the actual custom event and not some fake. Assert.True(_customLogger.LastWarning is BuildWarningEventArgs); // "Expected Warning Event" Assert.Equal(0, _customLogger.LastWarning.LineNumber); // "Expected line number to be 0" _taskHost.ContinueOnError = true; _taskHost.ConvertErrorsToWarnings = false; Assert.Equal(1, _customLogger.NumberOfWarning); // "Expected one Warning Event at this point" Assert.Equal(1, _customLogger.NumberOfError); // "Expected one Warning Event at this point" // Log the custom event args. (Pretend that the task actually did this.) _taskHost.LogErrorEvent(new BuildErrorEventArgs("SubCategory", "code", null, 0, 1, 2, 3, "message", "Help", "Sender")); // Make sure our custom logger received the actual custom event and not some fake. Assert.True(_customLogger.LastError is BuildErrorEventArgs); // "Expected Error Event" Assert.Equal(0, _customLogger.LastWarning.LineNumber); // "Expected line number to be 0" } /// <summary> /// Test that a null error event will cause an exception /// </summary> [Fact] public void TestLogErrorEventNull() { Assert.Throws<ArgumentNullException>(() => { _taskHost.LogErrorEvent(null); } ); } /// <summary> /// Test that a null warning event will cause an exception /// </summary> [Fact] public void TestLogWarningEventNull() { Assert.Throws<ArgumentNullException>(() => { _taskHost.LogWarningEvent(null); } ); } /// <summary> /// Test that a null message event will cause an exception /// </summary> [Fact] public void TestLogMessageEventNull() { Assert.Throws<ArgumentNullException>(() => { _taskHost.LogMessageEvent(null); } ); } /// <summary> /// Test that a null custom event will cause an exception /// </summary> [Fact] public void TestLogCustomEventNull() { Assert.Throws<ArgumentNullException>(() => { _taskHost.LogCustomEvent(null); } ); } /// <summary> /// Test that errors are logged properly /// </summary> [Fact] public void TestLogErrorEvent() { // Log the custom event args. (Pretend that the task actually did this.) _taskHost.LogErrorEvent(new BuildErrorEventArgs("SubCategory", "code", null, 0, 1, 2, 3, "message", "Help", "Sender")); // Make sure our custom logger received the actual custom event and not some fake. Assert.True(_customLogger.LastError is BuildErrorEventArgs); // "Expected Error Event" Assert.Equal(0, _customLogger.LastError.LineNumber); // "Expected line number to be 0" } /// <summary> /// Test that warnings are logged properly /// </summary> [Fact] public void TestLogWarningEvent() { // Log the custom event args. (Pretend that the task actually did this.) _taskHost.LogWarningEvent(new BuildWarningEventArgs("SubCategory", "code", null, 0, 1, 2, 3, "message", "Help", "Sender")); // Make sure our custom logger received the actual custom event and not some fake. Assert.True(_customLogger.LastWarning is BuildWarningEventArgs); // "Expected Warning Event" Assert.Equal(0, _customLogger.LastWarning.LineNumber); // "Expected line number to be 0" } /// <summary> /// Test that messages are logged properly /// </summary> [Fact] public void TestLogMessageEvent() { // Log the custom event args. (Pretend that the task actually did this.) _taskHost.LogMessageEvent(new BuildMessageEventArgs("message", "HelpKeyword", "senderName", MessageImportance.High)); // Make sure our custom logger received the actual custom event and not some fake. Assert.True(_customLogger.LastMessage is BuildMessageEventArgs); // "Expected Message Event" Assert.Equal(MessageImportance.High, _customLogger.LastMessage.Importance); // "Expected Message importance to be high" } /// <summary> /// Test that custom events are logged properly /// </summary> [Fact] public void TestLogCustomEvent() { // Log the custom event args. (Pretend that the task actually did this.) _taskHost.LogCustomEvent(new MyCustomBuildEventArgs("testCustomBuildEvent")); // Make sure our custom logger received the actual custom event and not some fake. Assert.True(_customLogger.LastCustom is CustomBuildEventArgs); // "Expected custom build Event" Assert.Equal("testCustomBuildEvent", _customLogger.LastCustom.Message); } #region NotSerializableEvents /// <summary> /// Test that errors are logged properly /// </summary> [Fact] public void TestLogErrorEventNotSerializableSP() { // Log the custom event args. (Pretend that the task actually did this.) _taskHost.LogErrorEvent(new MyCustomBuildErrorEventArgsNotSerializable("SubCategory")); // Make sure our custom logger received the actual custom event and not some fake. Assert.True(_customLogger.LastError is BuildErrorEventArgs); // "Expected Error Event" Assert.Contains("SubCategory", _customLogger.LastError.Message); // "Expected line number to be 0" } /// <summary> /// Test that warnings are logged properly /// </summary> [Fact] public void TestLogWarningEventNotSerializableSP() { // Log the custom event args. (Pretend that the task actually did this.) _taskHost.LogWarningEvent(new MyCustomBuildWarningEventArgsNotSerializable("SubCategory")); // Make sure our custom logger received the actual custom event and not some fake. Assert.True(_customLogger.LastWarning is MyCustomBuildWarningEventArgsNotSerializable); // "Expected Warning Event" Assert.Contains("SubCategory", _customLogger.LastWarning.Message); // "Expected line number to be 0" } /// <summary> /// Test that messages are logged properly /// </summary> [Fact] public void TestLogMessageEventNotSerializableSP() { // Log the custom event args. (Pretend that the task actually did this.) _taskHost.LogMessageEvent(new MyCustomMessageEventNotSerializable("message")); // Make sure our custom logger received the actual custom event and not some fake. Assert.True(_customLogger.LastMessage is MyCustomMessageEventNotSerializable); // "Expected Message Event" Assert.Contains("message", _customLogger.LastMessage.Message); // "Expected Message importance to be high" } /// <summary> /// Test that custom events are logged properly /// </summary> [Fact] public void TestLogCustomEventNotSerializableSP() { // Log the custom event args. (Pretend that the task actually did this.) _taskHost.LogCustomEvent(new MyCustomBuildEventArgsNotSerializable("testCustomBuildEvent")); // Make sure our custom logger received the actual custom event and not some fake. Assert.True(_customLogger.LastCustom is MyCustomBuildEventArgsNotSerializable); // "Expected custom build Event" Assert.Equal("testCustomBuildEvent", _customLogger.LastCustom.Message); } /// <summary> /// Test that errors are logged properly /// </summary> [Fact] public void TestLogErrorEventNotSerializableMP() { MyCustomBuildErrorEventArgsNotSerializable e = new MyCustomBuildErrorEventArgsNotSerializable("SubCategory"); _mockHost.BuildParameters.MaxNodeCount = 4; Assert.True(_taskHost.IsRunningMultipleNodes); // Log the custom event args. (Pretend that the task actually did this.) _taskHost.LogErrorEvent(e); Assert.Null(_customLogger.LastError); // "Expected no error Event" Assert.True(_customLogger.LastWarning is BuildWarningEventArgs); // "Expected Warning Event" string message = ResourceUtilities.FormatResourceStringStripCodeAndKeyword("ExpectedEventToBeSerializable", e.GetType().Name); Assert.Contains(message, _customLogger.LastWarning.Message); // "Expected line to contain NotSerializable message but it did not" } /// <summary> /// Test that warnings are logged properly /// </summary> [Fact] public void TestLogWarningEventNotSerializableMP() { MyCustomBuildWarningEventArgsNotSerializable e = new MyCustomBuildWarningEventArgsNotSerializable("SubCategory"); _mockHost.BuildParameters.MaxNodeCount = 4; _taskHost.LogWarningEvent(e); Assert.True(_taskHost.IsRunningMultipleNodes); Assert.True(_customLogger.LastWarning is BuildWarningEventArgs); // "Expected Warning Event" Assert.Equal(1, _customLogger.NumberOfWarning); // "Expected there to be only one warning" string message = ResourceUtilities.FormatResourceStringStripCodeAndKeyword("ExpectedEventToBeSerializable", e.GetType().Name); Assert.Contains(message, _customLogger.LastWarning.Message); // "Expected line to contain NotSerializable message but it did not" } /// <summary> /// Test that messages are logged properly /// </summary> [Fact] public void TestLogMessageEventNotSerializableMP() { MyCustomMessageEventNotSerializable e = new MyCustomMessageEventNotSerializable("Message"); _mockHost.BuildParameters.MaxNodeCount = 4; _taskHost.LogMessageEvent(e); Assert.True(_taskHost.IsRunningMultipleNodes); // Make sure our custom logger received the actual custom event and not some fake. Assert.True(_customLogger.LastWarning is BuildWarningEventArgs); // "Expected Warning Event" Assert.Equal(1, _customLogger.NumberOfWarning); // "Expected there to be only one warning" string message = ResourceUtilities.FormatResourceStringStripCodeAndKeyword("ExpectedEventToBeSerializable", e.GetType().Name); Assert.Contains(message, _customLogger.LastWarning.Message); // "Expected line to contain NotSerializable message but it did not" } /// <summary> /// Test that custom events are logged properly /// </summary> [Fact] public void TestLogCustomEventNotSerializableMP() { MyCustomBuildEventArgsNotSerializable e = new MyCustomBuildEventArgsNotSerializable("testCustomBuildEvent"); _mockHost.BuildParameters.MaxNodeCount = 4; _taskHost.LogCustomEvent(e); Assert.True(_taskHost.IsRunningMultipleNodes); Assert.Null(_customLogger.LastCustom as MyCustomBuildEventArgsNotSerializable); // "Expected no custom Event" // Make sure our custom logger received the actual custom event and not some fake. Assert.True(_customLogger.LastWarning is BuildWarningEventArgs); // "Expected Warning Event" Assert.Equal(1, _customLogger.NumberOfWarning); // "Expected there to be only one warning" string message = ResourceUtilities.FormatResourceStringStripCodeAndKeyword("ExpectedEventToBeSerializable", e.GetType().Name); Assert.Contains(message, _customLogger.LastWarning.Message); // "Expected line to contain NotSerializable message but it did not" } #endregion /// <summary> /// Verify IsRunningMultipleNodes /// </summary> [Fact] public void IsRunningMultipleNodes1Node() { _mockHost.BuildParameters.MaxNodeCount = 1; Assert.False(_taskHost.IsRunningMultipleNodes); // "Expect IsRunningMultipleNodes to be false with 1 node" } /// <summary> /// Verify IsRunningMultipleNodes /// </summary> [Fact] public void IsRunningMultipleNodes4Nodes() { _mockHost.BuildParameters.MaxNodeCount = 4; Assert.True(_taskHost.IsRunningMultipleNodes); // "Expect IsRunningMultipleNodes to be true with 4 nodes" } #if FEATURE_CODETASKFACTORY /// <summary> /// Task logging after it's done should not crash us. /// </summary> [Fact] public void LogCustomAfterTaskIsDone() { string projectFileContents = @" <Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003' ToolsVersion='msbuilddefaulttoolsversion'> <UsingTask TaskName='test' TaskFactory='CodeTaskFactory' AssemblyFile='$(MSBuildToolsPath)\Microsoft.Build.Tasks.Core.dll' > <Task> <Using Namespace='System' /> <Using Namespace='System.Threading' /> <Code Type='Fragment' Language='cs'> <![CDATA[ Log.LogWarning(""[1]""); ThreadPool.QueueUserWorkItem(state=> { Thread.Sleep(100); Log.LogExternalProjectStarted(""a"", ""b"", ""c"", ""d""); // this logs a custom event }); ]]> </Code> </Task> </UsingTask> <Target Name='Build'> <test/> <Warning Text=""[3]""/> </Target> </Project>"; MockLogger mockLogger = Helpers.BuildProjectWithNewOMExpectSuccess(projectFileContents); mockLogger.AssertLogContains("[1]"); mockLogger.AssertLogContains("[3]"); // [2] may or may not appear. } /// <summary> /// Task logging after it's done should not crash us. /// </summary> [Fact] public void LogCommentAfterTaskIsDone() { string projectFileContents = @" <Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003' ToolsVersion='msbuilddefaulttoolsversion'> <UsingTask TaskName='test' TaskFactory='CodeTaskFactory' AssemblyFile='$(MSBuildToolsPath)\Microsoft.Build.Tasks.Core.dll' > <Task> <Using Namespace='System' /> <Using Namespace='System.Threading' /> <Code Type='Fragment' Language='cs'> <![CDATA[ Log.LogMessage(""[1]""); ThreadPool.QueueUserWorkItem(state=> { Thread.Sleep(100); Log.LogMessage(""[2]""); }); ]]> </Code> </Task> </UsingTask> <Target Name='Build'> <test/> <Message Text=""[3]""/> </Target> </Project>"; MockLogger mockLogger = Helpers.BuildProjectWithNewOMExpectSuccess(projectFileContents); mockLogger.AssertLogContains("[1]"); mockLogger.AssertLogContains("[3]"); // [2] may or may not appear. } /// <summary> /// Task logging after it's done should not crash us. /// </summary> [Fact] public void LogWarningAfterTaskIsDone() { string projectFileContents = @" <Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003' ToolsVersion='msbuilddefaulttoolsversion'> <UsingTask TaskName='test' TaskFactory='CodeTaskFactory' AssemblyFile='$(MSBuildToolsPath)\Microsoft.Build.Tasks.Core.dll' > <Task> <Using Namespace='System' /> <Using Namespace='System.Threading' /> <Code Type='Fragment' Language='cs'> <![CDATA[ Log.LogWarning(""[1]""); ThreadPool.QueueUserWorkItem(state=> { Thread.Sleep(100); Log.LogWarning(""[2]""); }); ]]> </Code> </Task> </UsingTask> <Target Name='Build'> <test/> <Warning Text=""[3]""/> </Target> </Project>"; MockLogger mockLogger = Helpers.BuildProjectWithNewOMExpectSuccess(projectFileContents); mockLogger.AssertLogContains("[1]"); mockLogger.AssertLogContains("[3]"); // [2] may or may not appear. } /// <summary> /// Task logging after it's done should not crash us. /// </summary> [Fact] public void LogErrorAfterTaskIsDone() { string projectFileContents = @" <Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003' ToolsVersion='msbuilddefaulttoolsversion'> <UsingTask TaskName='test' TaskFactory='CodeTaskFactory' AssemblyFile='$(MSBuildToolsPath)\Microsoft.Build.Tasks.Core.dll' > <Task> <Using Namespace='System' /> <Using Namespace='System.Threading' /> <Code Type='Fragment' Language='cs'> <![CDATA[ Log.LogError(""[1]""); ThreadPool.QueueUserWorkItem(state=> { Thread.Sleep(100); Log.LogError(""[2]""); }); ]]> </Code> </Task> </UsingTask> <Target Name='Build'> <test ContinueOnError=""true""/> <Warning Text=""[3]""/> </Target> </Project>"; MockLogger mockLogger = Helpers.BuildProjectWithNewOMExpectSuccess(projectFileContents); mockLogger.AssertLogContains("[1]"); mockLogger.AssertLogContains("[3]"); // [2] may or may not appear. } #endif #region Helper Classes /// <summary> /// Create a custom message event to make sure it can get sent correctly /// </summary> [Serializable] internal class MyCustomMessageEvent : BuildMessageEventArgs { /// <summary> /// Some custom data for the custom event. /// </summary> private string _customMessage; /// <summary> /// Constructor /// </summary> internal MyCustomMessageEvent ( string message ) : base(message, null, null, MessageImportance.High) { } /// <summary> /// Some data which can be set on the custom message event to make sure it makes it to the logger. /// </summary> internal string CustomMessage { get { return _customMessage; } set { _customMessage = value; } } } /// <summary> /// Create a custom build event to test the logging of custom build events against the task host /// </summary> [Serializable] internal class MyCustomBuildEventArgs : CustomBuildEventArgs { /// <summary> /// Constructor /// </summary> public MyCustomBuildEventArgs() : base() { } /// <summary> /// Constructor which adds a message /// </summary> public MyCustomBuildEventArgs(string message) : base(message, "HelpKeyword", "SenderName") { } } /// <summary> /// Class which implements a simple custom build error /// </summary> [Serializable] internal class MyCustomBuildErrorEventArgs : BuildErrorEventArgs { /// <summary> /// Some custom data for the custom event. /// </summary> private string _fxcopRule; /// <summary> /// Constructor /// </summary> internal MyCustomBuildErrorEventArgs ( string message ) : base(null, null, null, 0, 0, 0, 0, message, null, null) { } /// <summary> /// Some data which can be set on the custom error event to make sure it makes it to the logger. /// </summary> internal string FXCopRule { get { return _fxcopRule; } set { _fxcopRule = value; } } } /// <summary> /// Class which implements a simple custom build warning /// </summary> [Serializable] internal class MyCustomBuildWarningEventArgs : BuildWarningEventArgs { /// <summary> /// Custom data for the custom event /// </summary> private string _fxcopRule; /// <summary> /// Constructor /// </summary> internal MyCustomBuildWarningEventArgs ( string message ) : base(null, null, null, 0, 0, 0, 0, message, null, null) { } /// <summary> /// Getter for the custom data in the custom event. /// </summary> internal string FXCopRule { get { return _fxcopRule; } set { _fxcopRule = value; } } } /// <summary> /// Create a custom message event to make sure it can get sent correctly /// </summary> internal class MyCustomMessageEventNotSerializable : BuildMessageEventArgs { /// <summary> /// Some custom data for the custom event. /// </summary> private string _customMessage; /// <summary> /// Constructor /// </summary> internal MyCustomMessageEventNotSerializable ( string message ) : base(message, null, null, MessageImportance.High) { } /// <summary> /// Some data which can be set on the custom message event to make sure it makes it to the logger. /// </summary> internal string CustomMessage { get { return _customMessage; } set { _customMessage = value; } } } /// <summary> /// Custom build event which is not marked serializable. This is used to make sure we warn if we try and log a not serializable type in multiproc. /// </summary> internal class MyCustomBuildEventArgsNotSerializable : CustomBuildEventArgs { // If binary serialization is not available, then we use a simple serializer which relies on a default constructor. So to test // what happens for an event that's not serializable, don't include a default constructor. /// <summary> /// Default constructor /// </summary> public MyCustomBuildEventArgsNotSerializable() : base() { } /// <summary> /// Constructor which takes a message /// </summary> public MyCustomBuildEventArgsNotSerializable(string message) : base(message, "HelpKeyword", "SenderName") { } } /// <summary> /// Class which implements a simple custom build error which is not serializable /// </summary> internal class MyCustomBuildErrorEventArgsNotSerializable : BuildErrorEventArgs { /// <summary> /// Custom data for the custom event /// </summary> private string _fxcopRule; /// <summary> /// Constructor /// </summary> internal MyCustomBuildErrorEventArgsNotSerializable ( string message ) : base(null, null, null, 0, 0, 0, 0, message, null, null) { } /// <summary> /// Getter and setter for the custom data /// </summary> internal string FXCopRule { get { return _fxcopRule; } set { _fxcopRule = value; } } } /// <summary> /// Class which implements a simple custom build warning which is not serializable /// </summary> internal class MyCustomBuildWarningEventArgsNotSerializable : BuildWarningEventArgs { /// <summary> /// Custom data for the custom event /// </summary> private string _fxcopRule; /// <summary> /// Constructor /// </summary> internal MyCustomBuildWarningEventArgsNotSerializable ( string message ) : base(null, null, null, 0, 0, 0, 0, message, null, null) { } /// <summary> /// Getter and setter for the custom data /// </summary> internal string FXCopRule { get { return _fxcopRule; } set { _fxcopRule = value; } } } /// <summary> /// Custom logger which will be used for testing /// </summary> internal class MyCustomLogger : ILogger { /// <summary> /// Last error event the logger encountered /// </summary> private BuildErrorEventArgs _lastError = null; /// <summary> /// Last warning event the logger encountered /// </summary> private BuildWarningEventArgs _lastWarning = null; /// <summary> /// Last message event the logger encountered /// </summary> private BuildMessageEventArgs _lastMessage = null; /// <summary> /// Last custom build event the logger encountered /// </summary> private CustomBuildEventArgs _lastCustom = null; /// <summary> /// Number of errors /// </summary> private int _numberOfError = 0; /// <summary> /// Number of warnings /// </summary> private int _numberOfWarning = 0; /// <summary> /// Number of messages /// </summary> private int _numberOfMessage = 0; /// <summary> /// Number of custom build events /// </summary> private int _numberOfCustom = 0; /// <summary> /// Last error logged /// </summary> public BuildErrorEventArgs LastError { get { return _lastError; } set { _lastError = value; } } /// <summary> /// Last warning logged /// </summary> public BuildWarningEventArgs LastWarning { get { return _lastWarning; } set { _lastWarning = value; } } /// <summary> /// Last message logged /// </summary> public BuildMessageEventArgs LastMessage { get { return _lastMessage; } set { _lastMessage = value; } } /// <summary> /// Last custom event logged /// </summary> public CustomBuildEventArgs LastCustom { get { return _lastCustom; } set { _lastCustom = value; } } /// <summary> /// Number of errors logged /// </summary> public int NumberOfError { get { return _numberOfError; } set { _numberOfError = value; } } /// <summary> /// Number of warnings logged /// </summary> public int NumberOfWarning { get { return _numberOfWarning; } set { _numberOfWarning = value; } } /// <summary> /// Number of message logged /// </summary> public int NumberOfMessage { get { return _numberOfMessage; } set { _numberOfMessage = value; } } /// <summary> /// Number of custom events logged /// </summary> public int NumberOfCustom { get { return _numberOfCustom; } set { _numberOfCustom = value; } } /// <summary> /// Verbosity of the log; /// </summary> public LoggerVerbosity Verbosity { get { return LoggerVerbosity.Normal; } set { } } /// <summary> /// Parameters for the logger /// </summary> public string Parameters { get { return String.Empty; } set { } } /// <summary> /// Initialize the logger against the event source /// </summary> public void Initialize(IEventSource eventSource) { eventSource.ErrorRaised += new BuildErrorEventHandler(MyCustomErrorHandler); eventSource.WarningRaised += new BuildWarningEventHandler(MyCustomWarningHandler); eventSource.MessageRaised += new BuildMessageEventHandler(MyCustomMessageHandler); eventSource.CustomEventRaised += new CustomBuildEventHandler(MyCustomBuildHandler); eventSource.AnyEventRaised += new AnyEventHandler(EventSource_AnyEventRaised); } /// <summary> /// Do any cleanup and shutdown once the logger is done. /// </summary> public void Shutdown() { } /// <summary> /// Log if we have received any event. /// </summary> internal void EventSource_AnyEventRaised(object sender, BuildEventArgs e) { if (e.Message != null) { Console.Out.WriteLine("AnyEvent:" + e.Message.ToString()); } } /// <summary> /// Log and record the number of errors. /// </summary> internal void MyCustomErrorHandler(object s, BuildErrorEventArgs e) { _numberOfError++; _lastError = e; if (e.Message != null) { Console.Out.WriteLine("CustomError:" + e.Message.ToString()); } } /// <summary> /// Log and record the number of warnings. /// </summary> internal void MyCustomWarningHandler(object s, BuildWarningEventArgs e) { _numberOfWarning++; _lastWarning = e; if (e.Message != null) { Console.Out.WriteLine("CustomWarning:" + e.Message.ToString()); } } /// <summary> /// Log and record the number of messages. /// </summary> internal void MyCustomMessageHandler(object s, BuildMessageEventArgs e) { _numberOfMessage++; _lastMessage = e; if (e.Message != null) { Console.Out.WriteLine("CustomMessage:" + e.Message.ToString()); } } /// <summary> /// Log and record the number of custom build events. /// </summary> internal void MyCustomBuildHandler(object s, CustomBuildEventArgs e) { _numberOfCustom++; _lastCustom = e; if (e.Message != null) { Console.Out.WriteLine("CustomEvent:" + e.Message.ToString()); } } } /// <summary> /// Mock this class so that we can determine if build results are being cloned or if the live copies are being returned to the callers of the msbuild callback. /// </summary> internal class MockIRequestBuilderCallback : IRequestBuilderCallback, IRequestBuilder { /// <summary> /// BuildResults to return from the BuildProjects method. /// </summary> private BuildResult[] _buildResultsToReturn; /// <summary> /// Constructor which takes an array of build results to return from the BuildProjects method when it is called. /// </summary> internal MockIRequestBuilderCallback(BuildResult[] buildResultsToReturn) { _buildResultsToReturn = buildResultsToReturn; OnNewBuildRequests += new NewBuildRequestsDelegate(MockIRequestBuilderCallback_OnNewBuildRequests); OnBuildRequestCompleted += new BuildRequestCompletedDelegate(MockIRequestBuilderCallback_OnBuildRequestCompleted); OnBuildRequestBlocked += new BuildRequestBlockedDelegate(MockIRequestBuilderCallback_OnBuildRequestBlocked); } #pragma warning disable 0067 // not used /// <summary> /// Not Implemented /// </summary> public event NewBuildRequestsDelegate OnNewBuildRequests; /// <summary> /// Not Implemented /// </summary> public event BuildRequestCompletedDelegate OnBuildRequestCompleted; /// <summary> /// Not Implemented /// </summary> public event BuildRequestBlockedDelegate OnBuildRequestBlocked; #pragma warning restore /// <summary> /// BuildResults to return from the BuildProjects method. /// </summary> public BuildResult[] BuildResultsToReturn { get { return _buildResultsToReturn; } set { _buildResultsToReturn = value; } } /// <summary> /// Mock of the BuildProjects method on the callback. /// </summary> public Task<BuildResult[]> BuildProjects(string[] projectFiles, PropertyDictionary<ProjectPropertyInstance>[] properties, string[] toolsVersions, string[] targets, bool waitForResults, bool skipNonexistentTargets) { return Task<BuildResult[]>.FromResult(_buildResultsToReturn); } /// <summary> /// Mock of Yield /// </summary> public void Yield() { } /// <summary> /// Mock of Reacquire /// </summary> public void Reacquire() { } /// <summary> /// Mock /// </summary> public void EnterMSBuildCallbackState() { } /// <summary> /// Mock /// </summary> public void ExitMSBuildCallbackState() { } /// <summary> /// Mock of the Block on target in progress. /// </summary> public Task BlockOnTargetInProgress(int blockingRequestId, string blockingTarget, BuildResult partialBuildResult) { throw new NotImplementedException(); } /// <summary> /// Not Implemented /// </summary> public void BuildRequest(NodeLoggingContext nodeLoggingContext, BuildRequestEntry entry) { throw new NotImplementedException(); } /// <summary> /// Not Implemented /// </summary> public void ContinueRequest() { throw new NotImplementedException(); } /// <summary> /// Not Implemented /// </summary> public void CancelRequest() { throw new NotImplementedException(); } /// <summary> /// Not Implemented /// </summary> public void BeginCancel() { throw new NotImplementedException(); } /// <summary> /// Not Implemented /// </summary> public void WaitForCancelCompletion() { throw new NotImplementedException(); } /// <summary> /// Not Implemented /// </summary> private void MockIRequestBuilderCallback_OnBuildRequestBlocked(BuildRequestEntry sourceEntry, int blockingGlobalRequestId, string blockingTarget, IBuildResults partialBuildResult = null) { throw new NotImplementedException(); } /// <summary> /// Not Implemented /// </summary> private void MockIRequestBuilderCallback_OnBuildRequestCompleted(BuildRequestEntry completedEntry) { throw new NotImplementedException(); } /// <summary> /// Not Implemented /// </summary> private void MockIRequestBuilderCallback_OnNewBuildRequests(BuildRequestEntry sourceEntry, FullyQualifiedBuildRequest[] requests) { throw new NotImplementedException(); } } #endregion } }
// tablepaneltests.cs // // Copyright (c) 2013 Brent Knowles (http://www.brentknowles.com) // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // // Review documentation at http://www.yourothermind.com for updated implementation notes, license updates // or other general information/ // // Author information available at http://www.brentknowles.com or http://www.amazon.com/Brent-Knowles/e/B0035WW7OW // Full source code: https://github.com/BrentKnowles/YourOtherMind //### using System; using NUnit.Framework; using appframe; using System.Collections.Generic; using CoreUtilities.Tables; using System.Windows.Forms; using Layout; using System.Data; using System.Xml; using System.Threading; namespace Testing { [TestFixture] public class tablepaneltests { public tablepaneltests () { } [Test] public void ImportListTest_MatchingColumns() { _TestSingleTon.Instance._SetupForLayoutPanelTests(); Form form = new Form(); FAKE_LayoutPanel panel = new FAKE_LayoutPanel (CoreUtilities.Constants.BLANK, false); form.Controls.Add (panel); // needed else DataGrid does not initialize form.Show (); form.Visible = false; //NOTE: For now remember that htis ADDS 1 Extra notes panel.NewLayout ("mynewpanel", true, null); NoteDataXML_Table test = new NoteDataXML_Table (425, 380); test.GuidForNote = "thisguid1"; test.Caption = "note1"; panel.SaveLayout (); string[] newData = new string[3]{"1,col2,col3,col4","2,col2,col3,col4","3,col2,col3,col4"}; TableWrapper.ImportList(newData, (DataTable)test.dataSource); panel.SaveLayout(); List<string> values = test.GetValuesForColumn(1, "*"); Assert.AreEqual (3, values.Count); Assert.AreEqual ("col2", values[0]); // Number of 1,2,3,4 must match number of columns // // 1,fish,2 // 2,snake,3 // 3,lizard,4 form.Dispose (); } [Test] public void ImportListTest_UnequalColumns() { _TestSingleTon.Instance._SetupForLayoutPanelTests(); Form form = new Form(); FAKE_LayoutPanel panel = new FAKE_LayoutPanel (CoreUtilities.Constants.BLANK, false); form.Controls.Add (panel); // needed else DataGrid does not initialize form.Show (); form.Visible = false; //NOTE: For now remember that htis ADDS 1 Extra notes panel.NewLayout ("mynewpanel", true, null); NoteDataXML_Table test = new NoteDataXML_Table (425, 380); test.GuidForNote = "thisguid1"; test.Caption = "note1"; panel.SaveLayout (); string[] newData = new string[3]{"1,col2,col3","2,col2,col3","3,col2,col3"}; TableWrapper.ImportList(newData, (DataTable)test.dataSource); panel.SaveLayout(); List<string> values = test.GetValuesForColumn(1, "*"); Assert.AreEqual (3, values.Count); Assert.AreEqual ("1,col2,col3", values[0]); form.Dispose (); } [Test] [RequiresSTA] public void CopyToClipboard() { // Table // Roll Result NextTable Modifier ff seer eee // z. This is a test. How to do al inefeed,,,length,,, // beer.This is the story that never was,e,,,bb,, _TestSingleTon.Instance._SetupForLayoutPanelTests(); Form form = new Form(); FAKE_LayoutPanel panel = new FAKE_LayoutPanel (CoreUtilities.Constants.BLANK, false); form.Controls.Add (panel); // needed else DataGrid does not initialize form.Show (); form.Visible = false; //NOTE: For now remember that htis ADDS 1 Extra notes panel.NewLayout ("mynewpanel", true, null); FAKE_NoteDataXML_Table test = new FAKE_NoteDataXML_Table (33,0009); test.GuidForNote = "thisguid1"; test.Caption = "note1"; test.CreateParent(panel); test.AddRow(new object[3] {"1", "value1", "testA"}); test.AddRow(new object[3] {"2", "value2", "testB"}); test.AddRow(new object[3] {"3", "value3", "testC"}); panel.SaveLayout (); test.Copy (); string result = Clipboard.GetText(); Assert.AreEqual(91, result.Length); form.Dispose (); } [Test] public void InsertRowTest() { _TestSingleTon.Instance._SetupForLayoutPanelTests(); Form form = new Form(); FAKE_LayoutPanel panel = new FAKE_LayoutPanel (CoreUtilities.Constants.BLANK, false); form.Controls.Add (panel); // needed else DataGrid does not initialize form.Show (); form.Visible = false; //NOTE: For now remember that htis ADDS 1 Extra notes panel.NewLayout ("mynewpanel", true, null); FAKE_NoteDataXML_Table test = new FAKE_NoteDataXML_Table (33,0009); test.GuidForNote = "thisguid1"; test.Caption = "note1"; test.CreateParent(panel); test.AddRow(new object[3] {"1", "value1", "testA"}); test.AddRow(new object[3] {"2", "value2", "testB"}); test.AddRow(new object[3] {"3", "value3", "testC"}); panel.SaveLayout (); Assert.AreEqual (3, test.RowCount()); test.GetTablePanel().InsertRow(); Assert.AreEqual (4, test.RowCount()); test.GetTablePanel().InsertRow(); test.GetTablePanel().InsertRow(); Assert.AreEqual (6, test.RowCount()); form.Dispose (); } } }
using System; using System.Collections.Generic; using System.Linq; using Microsoft.Xna.Framework; using Microsoft.Xna.Framework.Audio; using Microsoft.Xna.Framework.Content; using Microsoft.Xna.Framework.GamerServices; using Microsoft.Xna.Framework.Graphics; using Microsoft.Xna.Framework.Input; using Microsoft.Xna.Framework.Media; namespace CheckCuboid { /// <summary> /// This is the main type for your game /// </summary> public class Game : Microsoft.Xna.Framework.Game { GraphicsDeviceManager graphics; SpriteBatch spriteBatch; InputManager _obj_input; Camera _obj_camera; GraphicsDevice _obj_graphics; SpriteFont _obj_font; bool isInside = false; bool moveCube = false; //Cube Model mCubeModel = null; Vector3 mCubePosition = Vector3.Zero; Vector3 mCubeScale = Vector3.One; Matrix[] mCubeTransforms; Vector3 mCubeRotation = Vector3.Zero; Matrix mCubeRotationMatrix = Matrix.Identity; Matrix mCubeWorld = Matrix.Identity; //Point Model mPointModel = null; Vector3 mPointPosition = Vector3.Zero; Vector3 mPointScale = Vector3.One; Matrix[] mPointTransforms; Vector3 mPointRotation = Vector3.Zero; Matrix mPointRotationMatrix = Matrix.Identity; Matrix mPointWorld = Matrix.Identity; #region "Check Cuboid Code" public bool IsVectorInCube(Vector3 _pointPosition) { Vector3 relativePos = _pointPosition - this.mCubePosition; Matrix reverseMat = Matrix.Invert(this.mCubeRotationMatrix); Vector3 rotatedPos = new Vector3((reverseMat.M11 * relativePos.X) + (reverseMat.M21 * relativePos.Y) + (reverseMat.M31 * relativePos.Z), (reverseMat.M12 * relativePos.X) + (reverseMat.M22 * relativePos.Y) + (reverseMat.M23 * relativePos.Z), (reverseMat.M13 * relativePos.X) + (reverseMat.M23 * relativePos.Y) + (reverseMat.M33 * relativePos.Z)); if (Math.Abs(rotatedPos.X) >= this.mCubeScale.X / 2) return false; if (Math.Abs(rotatedPos.Y) >= this.mCubeScale.Y / 2) return false; if (Math.Abs(rotatedPos.Z) >= this.mCubeScale.Z / 2) return false; return true; } #endregion public Game() { graphics = new GraphicsDeviceManager(this); Content.RootDirectory = "Content"; this.Window.Title = "Check Cuboid - Daniel Mcassey (495652)"; this.graphics.PreferredBackBufferHeight = 720; this.graphics.PreferredBackBufferWidth = 1280; this.graphics.PreferMultiSampling = true; } /// <summary> /// Allows the game to perform any initialization it needs to before starting to run. /// This is where it can query for any required services and load any non-graphic /// related content. Calling base.Initialize will enumerate through any components /// and initialize them as well. /// </summary> protected override void Initialize() { this._obj_graphics = this.graphics.GraphicsDevice; this._obj_input = new InputManager(); this._obj_input.AddKeyboardInput("KEY_LEFT", Keys.Left, false); this._obj_input.AddKeyboardInput("KEY_RIGHT", Keys.Right, false); this._obj_input.AddKeyboardInput("KEY_UP", Keys.Up, false); this._obj_input.AddKeyboardInput("KEY_DOWN", Keys.Down, false); this._obj_input.AddKeyboardInput("KEY_FORWARD", Keys.OemPlus, false); this._obj_input.AddKeyboardInput("KEY_BACK", Keys.OemMinus, false); this._obj_input.AddKeyboardInput("CUBE_SCALE_UP", Keys.U, false); this._obj_input.AddKeyboardInput("CUBE_SCALE_DOWN", Keys.O, false); this._obj_input.AddKeyboardInput("CUBE_ROT_LEFT", Keys.J, false); this._obj_input.AddKeyboardInput("CUBE_ROT_RIGHT", Keys.L, false); this._obj_input.AddKeyboardInput("CUBE_ROT_UP", Keys.I, false); this._obj_input.AddKeyboardInput("CUBE_ROT_DOWN", Keys.K, false); this._obj_input.AddKeyboardInput("CUBE_RESET", Keys.F1, true); this._obj_input.AddKeyboardInput("CUBE_TOGL_MOVE", Keys.F2, true); this._obj_input.AddKeyboardInput("CAMERA_LEFT", Keys.A, false); this._obj_input.AddKeyboardInput("CAMERA_RIGHT", Keys.D, false); this._obj_input.AddKeyboardInput("CAMERA_UP", Keys.W, false); this._obj_input.AddKeyboardInput("CAMERA_DOWN", Keys.S, false); this._obj_input.AddKeyboardInput("CAMERA_RESET", Keys.Space, true); this._obj_input.AddKeyboardInput("CAMERA_ZOOM_IN", Keys.E, false); this._obj_input.AddKeyboardInput("CAMERA_ZOOM_OUT", Keys.Q, false); this._obj_input.AddKeyboardInput("CAMERA_RESET", Keys.Space, true); this._obj_camera = new Camera(ref this._obj_graphics, ref this._obj_input); base.Initialize(); } /// <summary> /// LoadContent will be called once per game and is the place to load /// all of your content. /// </summary> protected override void LoadContent() { spriteBatch = new SpriteBatch(GraphicsDevice); this.mCubeModel = this.Content.Load<Model>("Models/cuboid"); this.mCubeTransforms = new Matrix[this.mCubeModel.Bones.Count]; this.mCubeModel.CopyAbsoluteBoneTransformsTo(this.mCubeTransforms); this.mCubeScale = new Vector3(1.5f, 1f, 1f); this.mCubeScale *= 100; this.mPointModel = this.Content.Load<Model>("Models/sphere"); this.mPointTransforms = new Matrix[this.mPointModel.Bones.Count]; this.mPointModel.CopyAbsoluteBoneTransformsTo(this.mPointTransforms); this.mPointScale *= 1; this._obj_font = this.Content.Load<SpriteFont>("Fonts/default"); } /// <summary> /// UnloadContent will be called once per game and is the place to unload /// all content. /// </summary> protected override void UnloadContent() { // TODO: Unload any non ContentManager content here } /// <summary> /// Allows the game to run logic such as updating the world, /// checking for collisions, gathering input, and playing audio. /// </summary> /// <param name="gameTime">Provides a snapshot of timing values.</param> protected override void Update(GameTime gameTime) { float timeDiff = (float)gameTime.ElapsedGameTime.TotalSeconds; this._obj_input.startUpdate(); #region "Controls" if (GamePad.GetState(PlayerIndex.One).Buttons.Back == ButtonState.Pressed) this.Exit(); float pointSpeed = 1f; if (this.moveCube) { if (this._obj_input.IsPressed("KEY_UP", PlayerIndex.One)) this.mCubePosition += new Vector3(0f, pointSpeed, 0f); if (this._obj_input.IsPressed("KEY_DOWN", PlayerIndex.One)) this.mCubePosition += new Vector3(0f, -pointSpeed, 0f); if (this._obj_input.IsPressed("KEY_LEFT", PlayerIndex.One)) this.mCubePosition += new Vector3(-pointSpeed, 0f, 0f); if (this._obj_input.IsPressed("KEY_RIGHT", PlayerIndex.One)) this.mCubePosition += new Vector3(pointSpeed, 0f, 0f); if (this._obj_input.IsPressed("KEY_FORWARD", PlayerIndex.One)) this.mCubePosition += new Vector3(0f, 0f, -pointSpeed); if (this._obj_input.IsPressed("KEY_BACK", PlayerIndex.One)) this.mCubePosition += new Vector3(0f, 0f, pointSpeed); } else { if (this._obj_input.IsPressed("KEY_UP", PlayerIndex.One)) this.mPointPosition += new Vector3(0f, pointSpeed, 0f); if (this._obj_input.IsPressed("KEY_DOWN", PlayerIndex.One)) this.mPointPosition += new Vector3(0f, -pointSpeed, 0f); if (this._obj_input.IsPressed("KEY_LEFT", PlayerIndex.One)) this.mPointPosition += new Vector3(-pointSpeed, 0f, 0f); if (this._obj_input.IsPressed("KEY_RIGHT", PlayerIndex.One)) this.mPointPosition += new Vector3(pointSpeed, 0f, 0f); if (this._obj_input.IsPressed("KEY_FORWARD", PlayerIndex.One)) this.mPointPosition += new Vector3(0f, 0f, -pointSpeed); if (this._obj_input.IsPressed("KEY_BACK", PlayerIndex.One)) this.mPointPosition += new Vector3(0f, 0f, pointSpeed); } if (this._obj_input.IsPressed("CUBE_RESET", PlayerIndex.One)) { this.mCubePosition = Vector3.Zero; this.mCubeScale = new Vector3(1.5f, 1f, 1f) * 100; this.mCubeRotation = Vector3.Zero; } if (this._obj_input.IsPressed("CUBE_TOGL_MOVE", PlayerIndex.One)) this.moveCube = !this.moveCube; if (this._obj_input.IsPressed("CUBE_SCALE_UP", PlayerIndex.One)) this.mCubeScale += new Vector3(1f, 1f, 1f); if (this._obj_input.IsPressed("CUBE_SCALE_DOWN", PlayerIndex.One)) this.mCubeScale -= new Vector3(1f, 1f, 1f); if (this._obj_input.IsPressed("CUBE_ROT_LEFT", PlayerIndex.One)) this.mCubeRotation += new Vector3(0f, -1f, 0f) * timeDiff; else if (this._obj_input.IsPressed("CUBE_ROT_RIGHT", PlayerIndex.One)) this.mCubeRotation += new Vector3(0f, 1f, 0f) * timeDiff; if (this._obj_input.IsPressed("CUBE_ROT_UP", PlayerIndex.One)) this.mCubeRotation += new Vector3(-1f, 0f, 0f) * timeDiff; else if (this._obj_input.IsPressed("CUBE_ROT_DOWN", PlayerIndex.One)) this.mCubeRotation += new Vector3(1f, 0f, 0f) * timeDiff; #endregion this.mCubeRotationMatrix = (Matrix.CreateRotationY(this.mCubeRotation.Y) * Matrix.CreateRotationZ(this.mCubeRotation.Z) * Matrix.CreateRotationX(this.mCubeRotation.X)); this.mCubeWorld = (Matrix.CreateScale(this.mCubeScale) * this.mCubeRotationMatrix * Matrix.CreateTranslation(this.mCubePosition)); this.mPointRotationMatrix = (Matrix.CreateRotationY(this.mPointRotation.Y) * Matrix.CreateRotationZ(this.mPointRotation.Z) * Matrix.CreateRotationX(this.mPointRotation.X)); this.mPointWorld = (Matrix.CreateScale(this.mPointScale) * this.mPointRotationMatrix * Matrix.CreateTranslation(this.mPointPosition)); this._obj_camera.HandleInput(PlayerIndex.One); this._obj_camera.Update(timeDiff, Matrix.Identity); this.isInside = this.IsVectorInCube(this.mPointPosition); this._obj_input.endUpdate(); base.Update(gameTime); } /// <summary> /// This is called when the game should draw itself. /// </summary> /// <param name="gameTime">Provides a snapshot of timing values.</param> protected override void Draw(GameTime gameTime) { GraphicsDevice.Clear(Color.CornflowerBlue); string statusInsideCube = "Inside Cube: "; string statusPointPos = "Point Position [x,y,z]: " + this.mPointPosition.ToString(); string statusCubePos = "Cube Position [x,y,z]: " + this.mCubePosition.ToString(); if (this.isInside) statusInsideCube += "YES"; else statusInsideCube += "NO"; spriteBatch.Begin(); spriteBatch.DrawString(this._obj_font, statusInsideCube, new Vector2(20, 10), Color.Black, 0f, Vector2.Zero, 0.35f, SpriteEffects.None, 0f); spriteBatch.DrawString(this._obj_font, statusPointPos, new Vector2(20, 30), Color.Black, 0f, Vector2.Zero, 0.35f, SpriteEffects.None, 0f); spriteBatch.DrawString(this._obj_font, statusCubePos, new Vector2(20, 50), Color.Black, 0f, Vector2.Zero, 0.35f, SpriteEffects.None, 0f); spriteBatch.End(); this._obj_graphics.BlendState = BlendState.Additive; foreach (ModelMesh localMesh in this.mPointModel.Meshes) { Matrix localWorld = this.mPointTransforms[localMesh.ParentBone.Index] * this.mPointWorld; Matrix localWorldInverseTranspose = Matrix.Transpose(Matrix.Invert(localWorld)); foreach (BasicEffect localEffect in localMesh.Effects) { localEffect.EnableDefaultLighting(); localEffect.World = localWorld; localEffect.View = this._obj_camera.View; localEffect.Projection = this._obj_camera.Projection; localEffect.DiffuseColor = Vector3.Normalize(Color.Red.ToVector3()); localEffect.AmbientLightColor = Vector3.Normalize(Color.White.ToVector3()); localEffect.SpecularPower = 0f; if (this.isInside) localEffect.SpecularColor = Vector3.Normalize(Color.Red.ToVector3()); else localEffect.SpecularColor = Vector3.Normalize(Color.White.ToVector3()); localEffect.FogColor = Vector3.Normalize(Color.White.ToVector3()); localEffect.Alpha = 1f; } localMesh.Draw(); } foreach (ModelMesh localMesh in this.mCubeModel.Meshes) { Matrix localWorld = this.mCubeTransforms[localMesh.ParentBone.Index] * this.mCubeWorld; Matrix localWorldInverseTranspose = Matrix.Transpose(Matrix.Invert(localWorld)); foreach (BasicEffect localEffect in localMesh.Effects) { localEffect.EnableDefaultLighting(); localEffect.World = localWorld; localEffect.View = this._obj_camera.View; localEffect.Projection = this._obj_camera.Projection; localEffect.DiffuseColor = Vector3.Normalize(Color.DarkGray.ToVector3()); localEffect.Alpha = 0.55f; } localMesh.Draw(); } base.Draw(gameTime); } } }
/* * Copyright (c) InWorldz Halcyon Developers * Copyright (c) Contributors, http://opensimulator.org/ * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSim Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.IO; using System.Net; using System.Reflection; using System.Text; using System.Threading; using System.Web; using log4net; namespace OpenSim.Framework.Communications { /// <summary> /// Implementation of a generic REST client /// </summary> /// <remarks> /// This class is a generic implementation of a REST (Representational State Transfer) web service. This /// class is designed to execute both synchronously and asynchronously. /// /// Internally the implementation works as a two stage asynchronous web-client. /// When the request is initiated, RestClient will query asynchronously for for a web-response, /// sleeping until the initial response is returned by the server. Once the initial response is retrieved /// the second stage of asynchronous requests will be triggered, in an attempt to read of the response /// object into a memorystream as a sequence of asynchronous reads. /// /// The asynchronisity of RestClient is designed to move as much processing into the back-ground, allowing /// other threads to execute, while it waits for a response from the web-service. RestClient itself can be /// invoked by the caller in either synchronous mode or asynchronous modes. /// </remarks> public class RestClient : IDisposable { private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); // private string realuri; #region member variables /// <summary> /// The base Uri of the web-service e.g. http://www.google.com /// </summary> private string _url; /// <summary> /// Path elements of the query /// </summary> private List<string> _pathElements = new List<string>(); /// <summary> /// Parameter elements of the query, e.g. min=34 /// </summary> private Dictionary<string, string> _parameterElements = new Dictionary<string, string>(); /// <summary> /// Request method. E.g. GET, POST, PUT or DELETE /// </summary> private string _method; /// <summary> /// Temporary buffer used to store bytes temporarily as they come in from the server /// </summary> private byte[] _readbuf; /// <summary> /// MemoryStream representing the resultiong resource /// </summary> private Stream _resource; /// <summary> /// WebRequest object, held as a member variable /// </summary> private HttpWebRequest _request; /// <summary> /// WebResponse object, held as a member variable, so we can close it /// </summary> private HttpWebResponse _response; /// <summary> /// This flag will help block the main synchroneous method, in case we run in synchroneous mode /// </summary> public static ManualResetEvent _allDone = new ManualResetEvent(false); /// <summary> /// Default time out period /// </summary> //private const int DefaultTimeout = 10*1000; // 10 seconds timeout /// <summary> /// Default Buffer size of a block requested from the web-server /// </summary> private const int BufferSize = 4096; // Read blocks of 4 KB. /// <summary> /// if an exception occours during async processing, we need to save it, so it can be /// rethrown on the primary thread; /// </summary> private Exception _asyncException; #endregion member variables #region constructors /// <summary> /// Instantiate a new RestClient /// </summary> /// <param name="url">Web-service to query, e.g. http://osgrid.org:8003</param> public RestClient(string url) { _url = url; _readbuf = new byte[BufferSize]; _resource = new MemoryStream(); _request = null; _response = null; _lock = new object(); } private object _lock; #endregion constructors /// <summary> /// Add a path element to the query, e.g. assets /// </summary> /// <param name="element">path entry</param> public void AddResourcePath(string element) { if (isSlashed(element)) _pathElements.Add(element.Substring(0, element.Length - 1)); else _pathElements.Add(element); } /// <summary> /// Add a query parameter to the Url /// </summary> /// <param name="name">Name of the parameter, e.g. min</param> /// <param name="value">Value of the parameter, e.g. 42</param> public void AddQueryParameter(string name, string value) { try { _parameterElements.Add(HttpUtility.UrlEncode(name), HttpUtility.UrlEncode(value)); } catch (ArgumentException) { m_log.Error("[REST]: Query parameter " + name + " is already added."); } catch (Exception e) { m_log.Error("[REST]: An exception was raised adding query parameter to dictionary. Exception: {0}",e); } } /// <summary> /// Add a query parameter to the Url /// </summary> /// <param name="name">Name of the parameter, e.g. min</param> public void AddQueryParameter(string name) { try { _parameterElements.Add(HttpUtility.UrlEncode(name), null); } catch (ArgumentException) { m_log.Error("[REST]: Query parameter " + name + " is already added."); } catch (Exception e) { m_log.Error("[REST]: An exception was raised adding query parameter to dictionary. Exception: {0}",e); } } /// <summary> /// Web-Request method, e.g. GET, PUT, POST, DELETE /// </summary> public string RequestMethod { get { return _method; } set { _method = value; } } /// <summary> /// True if string contains a trailing slash '/' /// </summary> /// <param name="s">string to be examined</param> /// <returns>true if slash is present</returns> private static bool isSlashed(string s) { return s.Substring(s.Length - 1, 1) == "/"; } /// <summary> /// Build a Uri based on the initial Url, path elements and parameters /// </summary> /// <returns>fully constructed Uri</returns> private Uri buildUri() { StringBuilder sb = new StringBuilder(); sb.Append(_url); foreach (string e in _pathElements) { sb.Append("/"); sb.Append(e); } bool firstElement = true; foreach (KeyValuePair<string, string> kv in _parameterElements) { if (firstElement) { sb.Append("?"); firstElement = false; } else sb.Append("&"); sb.Append(kv.Key); if (!string.IsNullOrEmpty(kv.Value)) { sb.Append("="); sb.Append(kv.Value); } } // realuri = sb.ToString(); //m_log.InfoFormat("[REST CLIENT]: RestURL: {0}", realuri); return new Uri(sb.ToString()); } #region Async communications with server /// <summary> /// Async method, invoked when a block of data has been received from the service /// </summary> /// <param name="ar"></param> private void StreamIsReadyDelegate(IAsyncResult ar) { try { Stream s = (Stream) ar.AsyncState; int read = s.EndRead(ar); if (read > 0) { _resource.Write(_readbuf, 0, read); // IAsyncResult asynchronousResult = // s.BeginRead(_readbuf, 0, BufferSize, new AsyncCallback(StreamIsReadyDelegate), s); s.BeginRead(_readbuf, 0, BufferSize, new AsyncCallback(StreamIsReadyDelegate), s); // TODO! Implement timeout, without killing the server //ThreadPool.RegisterWaitForSingleObject(asynchronousResult.AsyncWaitHandle, new WaitOrTimerCallback(TimeoutCallback), _request, DefaultTimeout, true); } else { s.Close(); _allDone.Set(); } } catch (Exception e) { _allDone.Set(); _asyncException = e; } } #endregion Async communications with server /// <summary> /// Perform a synchronous request /// </summary> public Stream Request() { lock (_lock) { _request = (HttpWebRequest) WebRequest.Create(buildUri()); _request.KeepAlive = false; _request.ContentType = "application/xml"; _request.Timeout = 200000; _request.Method = RequestMethod; _asyncException = null; // IAsyncResult responseAsyncResult = _request.BeginGetResponse(new AsyncCallback(ResponseIsReadyDelegate), _request); try { _response = (HttpWebResponse) _request.GetResponse(); } catch (WebException e) { HttpWebResponse errorResponse = e.Response as HttpWebResponse; if (null != errorResponse && HttpStatusCode.NotFound == errorResponse.StatusCode) { m_log.Warn("[ASSET] Asset not found (404)"); } else { m_log.Error("[ASSET] Error fetching asset from asset server"); m_log.Debug(e.ToString()); } return null; } Stream src = _response.GetResponseStream(); int length = src.Read(_readbuf, 0, BufferSize); while (length > 0) { _resource.Write(_readbuf, 0, length); length = src.Read(_readbuf, 0, BufferSize); } // TODO! Implement timeout, without killing the server // this line implements the timeout, if there is a timeout, the callback fires and the request becomes aborted //ThreadPool.RegisterWaitForSingleObject(responseAsyncResult.AsyncWaitHandle, new WaitOrTimerCallback(TimeoutCallback), _request, DefaultTimeout, true); // _allDone.WaitOne(); if (_response != null) _response.Close(); if (_asyncException != null) throw _asyncException; if (_resource != null) { _resource.Flush(); _resource.Seek(0, SeekOrigin.Begin); } return _resource; } } public Stream Request(Stream src) { _request = (HttpWebRequest) WebRequest.Create(buildUri()); _request.KeepAlive = false; _request.ContentType = "application/xml"; _request.Timeout = 900000; _request.Method = RequestMethod; _asyncException = null; _request.ContentLength = src.Length; m_log.InfoFormat("[REST]: Request Length {0}", _request.ContentLength); m_log.InfoFormat("[REST]: Sending Web Request {0}", buildUri()); src.Seek(0, SeekOrigin.Begin); m_log.Info("[REST]: Seek is ok"); Stream dst = _request.GetRequestStream(); m_log.Info("[REST]: GetRequestStream is ok"); byte[] buf = new byte[1024]; int length = src.Read(buf, 0, 1024); m_log.Info("[REST]: First Read is ok"); while (length > 0) { dst.Write(buf, 0, length); length = src.Read(buf, 0, 1024); } _response = (HttpWebResponse) _request.GetResponse(); // IAsyncResult responseAsyncResult = _request.BeginGetResponse(new AsyncCallback(ResponseIsReadyDelegate), _request); // TODO! Implement timeout, without killing the server // this line implements the timeout, if there is a timeout, the callback fires and the request becomes aborted //ThreadPool.RegisterWaitForSingleObject(responseAsyncResult.AsyncWaitHandle, new WaitOrTimerCallback(TimeoutCallback), _request, DefaultTimeout, true); return null; } #region Async Invocation public IAsyncResult BeginRequest(AsyncCallback callback, object state) { /// <summary> /// In case, we are invoked asynchroneously this object will keep track of the state /// </summary> AsyncResult<Stream> ar = new AsyncResult<Stream>(callback, state); ThreadPool.QueueUserWorkItem(RequestHelper, ar); return ar; } public Stream EndRequest(IAsyncResult asyncResult) { AsyncResult<Stream> ar = (AsyncResult<Stream>) asyncResult; // Wait for operation to complete, then return result or // throw exception return ar.EndInvoke(); } private void RequestHelper(Object asyncResult) { // We know that it's really an AsyncResult<DateTime> object AsyncResult<Stream> ar = (AsyncResult<Stream>) asyncResult; try { // Perform the operation; if sucessful set the result Stream s = Request(); ar.SetAsCompleted(s, false); } catch (Exception e) { // If operation fails, set the exception ar.HandleException(e, false); } } #endregion Async Invocation public void Dispose() { _allDone.Dispose(); } } }
/* * * (c) Copyright Ascensio System Limited 2010-2021 * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ using System; using System.Collections.Generic; using System.Linq; using System.Threading; using ASC.Common.Data.Sql.Expressions; using ASC.Core; using ASC.Files.Core; using ASC.Web.Studio.Core; namespace ASC.Files.Thirdparty.GoogleDrive { internal class GoogleDriveFolderDao : GoogleDriveDaoBase, IFolderDao { public GoogleDriveFolderDao(GoogleDriveDaoSelector.GoogleDriveInfo googleDriveInfo, GoogleDriveDaoSelector googleDriveDaoSelector) : base(googleDriveInfo, googleDriveDaoSelector) { } public Folder GetFolder(object folderId) { return ToFolder(GetDriveEntry(folderId)); } public Folder GetFolder(string title, object parentId) { return ToFolder(GetDriveEntries(parentId, true) .FirstOrDefault(folder => folder.Name.Equals(title, StringComparison.InvariantCultureIgnoreCase))); } public Folder GetRootFolderByFile(object fileId) { return GetRootFolder(""); } public List<Folder> GetFolders(object parentId) { return GetDriveEntries(parentId, true).Select(ToFolder).ToList(); } public List<Folder> GetFolders(object parentId, OrderBy orderBy, FilterType filterType, bool subjectGroup, Guid subjectID, string searchText, bool withSubfolders = false) { if (filterType == FilterType.FilesOnly || filterType == FilterType.ByExtension || filterType == FilterType.DocumentsOnly || filterType == FilterType.ImagesOnly || filterType == FilterType.PresentationsOnly || filterType == FilterType.SpreadsheetsOnly || filterType == FilterType.ArchiveOnly || filterType == FilterType.MediaOnly) return new List<Folder>(); var folders = GetFolders(parentId).AsEnumerable(); //TODO:!!! if (subjectID != Guid.Empty) { folders = folders.Where(x => subjectGroup ? CoreContext.UserManager.IsUserInGroup(x.CreateBy, subjectID) : x.CreateBy == subjectID); } if (!string.IsNullOrEmpty(searchText)) folders = folders.Where(x => x.Title.IndexOf(searchText, StringComparison.OrdinalIgnoreCase) != -1); if (orderBy == null) orderBy = new OrderBy(SortedByType.DateAndTime, false); switch (orderBy.SortedBy) { case SortedByType.Author: folders = orderBy.IsAsc ? folders.OrderBy(x => x.CreateBy) : folders.OrderByDescending(x => x.CreateBy); break; case SortedByType.AZ: folders = orderBy.IsAsc ? folders.OrderBy(x => x.Title) : folders.OrderByDescending(x => x.Title); break; case SortedByType.DateAndTime: folders = orderBy.IsAsc ? folders.OrderBy(x => x.ModifiedOn) : folders.OrderByDescending(x => x.ModifiedOn); break; case SortedByType.DateAndTimeCreation: folders = orderBy.IsAsc ? folders.OrderBy(x => x.CreateOn) : folders.OrderByDescending(x => x.CreateOn); break; default: folders = orderBy.IsAsc ? folders.OrderBy(x => x.Title) : folders.OrderByDescending(x => x.Title); break; } return folders.ToList(); } public List<Folder> GetFolders(IEnumerable<object> folderIds, FilterType filterType = FilterType.None, bool subjectGroup = false, Guid? subjectID = null, string searchText = "", bool searchSubfolders = false, bool checkShare = true) { if (filterType == FilterType.FilesOnly || filterType == FilterType.ByExtension || filterType == FilterType.DocumentsOnly || filterType == FilterType.ImagesOnly || filterType == FilterType.PresentationsOnly || filterType == FilterType.SpreadsheetsOnly || filterType == FilterType.ArchiveOnly || filterType == FilterType.MediaOnly) return new List<Folder>(); var folders = folderIds.Select(GetFolder); if (subjectID.HasValue && subjectID != Guid.Empty) { folders = folders.Where(x => subjectGroup ? CoreContext.UserManager.IsUserInGroup(x.CreateBy, subjectID.Value) : x.CreateBy == subjectID); } if (!string.IsNullOrEmpty(searchText)) folders = folders.Where(x => x.Title.IndexOf(searchText, StringComparison.OrdinalIgnoreCase) != -1); return folders.ToList(); } public List<Folder> GetParentFolders(object folderId) { var path = new List<Folder>(); while (folderId != null) { var driveFolder = GetDriveEntry(folderId); if (driveFolder is ErrorDriveEntry) { folderId = null; } else { path.Add(ToFolder(driveFolder)); folderId = GetParentDriveId(driveFolder); } } path.Reverse(); return path; } public object SaveFolder(Folder folder) { if (folder == null) throw new ArgumentNullException("folder"); if (folder.ID != null) { return RenameFolder(folder, folder.Title); } if (folder.ParentFolderID != null) { var driveFolderId = MakeDriveId(folder.ParentFolderID); var driveFolder = GoogleDriveProviderInfo.Storage.InsertEntry(null, folder.Title, driveFolderId, true); GoogleDriveProviderInfo.CacheReset(driveFolder); var parentDriveId = GetParentDriveId(driveFolder); if (parentDriveId != null) GoogleDriveProviderInfo.CacheReset(parentDriveId, true); return MakeId(driveFolder); } return null; } public void DeleteFolder(object folderId) { var driveFolder = GetDriveEntry(folderId); var id = MakeId(driveFolder); using (var db = GetDb()) using (var tx = db.BeginTransaction()) { var hashIDs = db.ExecuteList(Query("files_thirdparty_id_mapping") .Select("hash_id") .Where(Exp.Like("id", id, SqlLike.StartWith))) .ConvertAll(x => x[0]); db.ExecuteNonQuery(Delete("files_tag_link").Where(Exp.In("entry_id", hashIDs))); db.ExecuteNonQuery(Delete("files_tag").Where(Exp.EqColumns("0", Query("files_tag_link l").SelectCount().Where(Exp.EqColumns("tag_id", "id"))))); db.ExecuteNonQuery(Delete("files_security").Where(Exp.In("entry_id", hashIDs))); db.ExecuteNonQuery(Delete("files_thirdparty_id_mapping").Where(Exp.In("hash_id", hashIDs))); tx.Commit(); } if (!(driveFolder is ErrorDriveEntry)) GoogleDriveProviderInfo.Storage.DeleteEntry(driveFolder.Id); GoogleDriveProviderInfo.CacheReset(driveFolder.Id); var parentDriveId = GetParentDriveId(driveFolder); if (parentDriveId != null) GoogleDriveProviderInfo.CacheReset(parentDriveId, true); } public object MoveFolder(object folderId, object toFolderId, CancellationToken? cancellationToken) { var driveFolder = GetDriveEntry(folderId); if (driveFolder is ErrorDriveEntry) throw new Exception(((ErrorDriveEntry)driveFolder).Error); var toDriveFolder = GetDriveEntry(toFolderId); if (toDriveFolder is ErrorDriveEntry) throw new Exception(((ErrorDriveEntry)toDriveFolder).Error); var fromFolderDriveId = GetParentDriveId(driveFolder); driveFolder = GoogleDriveProviderInfo.Storage.InsertEntryIntoFolder(driveFolder, toDriveFolder.Id); if (fromFolderDriveId != null) { GoogleDriveProviderInfo.Storage.RemoveEntryFromFolder(driveFolder, fromFolderDriveId); } GoogleDriveProviderInfo.CacheReset(driveFolder.Id); GoogleDriveProviderInfo.CacheReset(fromFolderDriveId, true); GoogleDriveProviderInfo.CacheReset(toDriveFolder.Id, true); return MakeId(driveFolder.Id); } public Folder CopyFolder(object folderId, object toFolderId, CancellationToken? cancellationToken) { var driveFolder = GetDriveEntry(folderId); if (driveFolder is ErrorDriveEntry) throw new Exception(((ErrorDriveEntry)driveFolder).Error); var toDriveFolder = GetDriveEntry(toFolderId); if (toDriveFolder is ErrorDriveEntry) throw new Exception(((ErrorDriveEntry)toDriveFolder).Error); var newDriveFolder = GoogleDriveProviderInfo.Storage.InsertEntry(null, driveFolder.Name, toDriveFolder.Id, true); GoogleDriveProviderInfo.CacheReset(newDriveFolder); GoogleDriveProviderInfo.CacheReset(toDriveFolder.Id, true); GoogleDriveProviderInfo.CacheReset(toDriveFolder.Id); return ToFolder(newDriveFolder); } public IDictionary<object, string> CanMoveOrCopy(object[] folderIds, object to) { return new Dictionary<object, string>(); } public object RenameFolder(Folder folder, string newTitle) { var driveFolder = GetDriveEntry(folder.ID); if (IsRoot(driveFolder)) { //It's root folder GoogleDriveDaoSelector.RenameProvider(GoogleDriveProviderInfo, newTitle); //rename provider customer title } else { //rename folder driveFolder.Name = newTitle; driveFolder = GoogleDriveProviderInfo.Storage.RenameEntry(driveFolder.Id, driveFolder.Name); } GoogleDriveProviderInfo.CacheReset(driveFolder); var parentDriveId = GetParentDriveId(driveFolder); if (parentDriveId != null) GoogleDriveProviderInfo.CacheReset(parentDriveId, true); return MakeId(driveFolder.Id); } public int GetItemsCount(object folderId) { throw new NotImplementedException(); } public bool IsEmpty(object folderId) { var driveId = MakeDriveId(folderId); //note: without cache return GoogleDriveProviderInfo.Storage.GetEntries(driveId).Count == 0; } public bool UseTrashForRemove(Folder folder) { return false; } public bool UseRecursiveOperation(object folderId, object toRootFolderId) { return true; } public bool CanCalculateSubitems(object entryId) { return false; } public long GetMaxUploadSize(object folderId, bool chunkedUpload) { var storageMaxUploadSize = GoogleDriveProviderInfo.Storage.GetMaxUploadSize(); return chunkedUpload ? storageMaxUploadSize : Math.Min(storageMaxUploadSize, SetupInfo.AvailableFileSize); } #region Only for TMFolderDao public void ReassignFolders(IEnumerable<object> folderIds, Guid newOwnerId) { } public IEnumerable<Folder> Search(string text, bool bunch) { return null; } public object GetFolderID(string module, string bunch, string data, bool createIfNotExists) { return null; } public IEnumerable<object> GetFolderIDs(string module, string bunch, IEnumerable<string> data, bool createIfNotExists) { return new List<object>(); } public object GetFolderIDCommon(bool createIfNotExists) { return null; } public object GetFolderIDUser(bool createIfNotExists, Guid? userId) { return null; } public object GetFolderIDShare(bool createIfNotExists) { return null; } public object GetFolderIDRecent(bool createIfNotExists) { return null; } public object GetFolderIDFavorites(bool createIfNotExists) { return null; } public object GetFolderIDTemplates(bool createIfNotExists) { return null; } public object GetFolderIDPrivacy(bool createIfNotExists, Guid? userId) { return null; } public object GetFolderIDTrash(bool createIfNotExists, Guid? userId) { return null; } public object GetFolderIDPhotos(bool createIfNotExists) { return null; } public object GetFolderIDProjects(bool createIfNotExists) { return null; } public string GetBunchObjectID(object folderID) { return null; } public Dictionary<string, string> GetBunchObjectIDs(IEnumerable<object> folderIDs) { return null; } #endregion } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Timers; using System.Collections.Generic; using System.IO; using System.Net.Sockets; using System.Reflection; using System.Text.RegularExpressions; using System.Threading; using OpenMetaverse; using log4net; using Nini.Config; using OpenSim.Framework; using OpenSim.Framework.Monitoring; using OpenSim.Region.Framework.Interfaces; using OpenSim.Region.Framework.Scenes; namespace OpenSim.Region.OptionalModules.Avatar.Chat { public class IRCConnector { #region Global (static) state private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); // Local constants // This computation is not the real region center if the region is larger than 256. // This computation isn't fixed because there is not a handle back to the region. private static readonly Vector3 CenterOfRegion = new Vector3(((int)Constants.RegionSize * 0.5f), ((int)Constants.RegionSize * 0.5f), 20); private static readonly char[] CS_SPACE = { ' ' }; private const int WD_INTERVAL = 1000; // base watchdog interval private static int PING_PERIOD = 15; // WD intervals per PING private static int ICCD_PERIOD = 10; // WD intervals between Connects private static int L_TIMEOUT = 25; // Login time out interval private static int _idk_ = 0; // core connector identifier private static int _pdk_ = 0; // ping interval counter private static int _icc_ = ICCD_PERIOD; // IRC connect counter // List of configured connectors private static List<IRCConnector> m_connectors = new List<IRCConnector>(); // Watchdog state private static System.Timers.Timer m_watchdog = null; // The watch-dog gets started as soon as the class is instantiated, and // ticks once every second (WD_INTERVAL) static IRCConnector() { m_log.DebugFormat("[IRC-Connector]: Static initialization started"); m_watchdog = new System.Timers.Timer(WD_INTERVAL); m_watchdog.Elapsed += new ElapsedEventHandler(WatchdogHandler); m_watchdog.AutoReset = true; m_watchdog.Start(); m_log.DebugFormat("[IRC-Connector]: Static initialization complete"); } #endregion #region Instance state // Connector identity internal int idn = _idk_++; // How many regions depend upon this connection // This count is updated by the ChannelState object and reflects the sum // of the region clients associated with the set of associated channel // state instances. That's why it cannot be managed here. internal int depends = 0; // This variable counts the number of resets that have been performed // on the connector. When a listener thread terminates, it checks to // see of the reset count has changed before it schedules another // reset. internal int m_resetk = 0; private Object msyncConnect = new Object(); internal bool m_randomizeNick = true; // add random suffix internal string m_baseNick = null; // base name for randomizing internal string m_nick = null; // effective nickname public string Nick // Public property { get { return m_nick; } set { m_nick = value; } } private bool m_enabled = false; // connector enablement public bool Enabled { get { return m_enabled; } } private bool m_connected = false; // connection status private bool m_pending = false; // login disposition private int m_timeout = L_TIMEOUT; // login timeout counter public bool Connected { get { return m_connected; } } private string m_ircChannel; // associated channel id public string IrcChannel { get { return m_ircChannel; } set { m_ircChannel = value; } } private uint m_port = 6667; // session port public uint Port { get { return m_port; } set { m_port = value; } } private string m_server = null; // IRC server name public string Server { get { return m_server; } set { m_server = value; } } private string m_password = null; public string Password { get { return m_password; } set { m_password = value; } } private string m_user = "USER OpenSimBot 8 * :I'm an OpenSim to IRC bot"; public string User { get { return m_user; } } // Network interface private TcpClient m_tcp; private NetworkStream m_stream = null; private StreamReader m_reader; private StreamWriter m_writer; // Channel characteristic info (if available) internal string usermod = String.Empty; internal string chanmod = String.Empty; internal string version = String.Empty; internal bool motd = false; #endregion #region connector instance management internal IRCConnector(ChannelState cs) { // Prepare network interface m_tcp = null; m_writer = null; m_reader = null; // Setup IRC session parameters m_server = cs.Server; m_password = cs.Password; m_baseNick = cs.BaseNickname; m_randomizeNick = cs.RandomizeNickname; m_ircChannel = cs.IrcChannel; m_port = cs.Port; m_user = cs.User; if (m_watchdog == null) { // Non-differentiating ICCD_PERIOD = cs.ConnectDelay; PING_PERIOD = cs.PingDelay; // Smaller values are not reasonable if (ICCD_PERIOD < 5) ICCD_PERIOD = 5; if (PING_PERIOD < 5) PING_PERIOD = 5; _icc_ = ICCD_PERIOD; // get started right away! } // The last line of defense if (m_server == null || m_baseNick == null || m_ircChannel == null || m_user == null) throw new Exception("Invalid connector configuration"); // Generate an initial nickname if (m_randomizeNick) m_nick = m_baseNick + Util.RandomClass.Next(1, 99); else m_nick = m_baseNick; m_log.InfoFormat("[IRC-Connector-{0}]: Initialization complete", idn); } ~IRCConnector() { m_watchdog.Stop(); Close(); } // Mark the connector as connectable. Harmless if already enabled. public void Open() { if (!m_enabled) { if (!Connected) { Connect(); } lock (m_connectors) m_connectors.Add(this); m_enabled = true; } } // Only close the connector if the dependency count is zero. public void Close() { m_log.InfoFormat("[IRC-Connector-{0}] Closing", idn); lock (msyncConnect) { if ((depends == 0) && Enabled) { m_enabled = false; if (Connected) { m_log.DebugFormat("[IRC-Connector-{0}] Closing interface", idn); // Cleanup the IRC session try { m_writer.WriteLine(String.Format("QUIT :{0} to {1} wormhole to {2} closing", m_nick, m_ircChannel, m_server)); m_writer.Flush(); } catch (Exception) { } m_connected = false; try { m_writer.Close(); } catch (Exception) { } try { m_reader.Close(); } catch (Exception) { } try { m_stream.Close(); } catch (Exception) { } try { m_tcp.Close(); } catch (Exception) { } } lock (m_connectors) m_connectors.Remove(this); } } m_log.InfoFormat("[IRC-Connector-{0}] Closed", idn); } #endregion #region session management // Connect to the IRC server. A connector should always be connected, once enabled public void Connect() { if (!m_enabled) return; // Delay until next WD cycle if this is too close to the last start attempt if(_icc_ < ICCD_PERIOD) return; m_log.DebugFormat("[IRC-Connector-{0}]: Connection request for {1} on {2}:{3}", idn, m_nick, m_server, m_ircChannel); _icc_ = 0; lock (msyncConnect) { try { if (m_connected) return; m_connected = true; m_pending = true; m_timeout = L_TIMEOUT; m_tcp = new TcpClient(m_server, (int)m_port); m_stream = m_tcp.GetStream(); m_reader = new StreamReader(m_stream); m_writer = new StreamWriter(m_stream); m_log.InfoFormat("[IRC-Connector-{0}]: Connected to {1}:{2}", idn, m_server, m_port); WorkManager.StartThread(ListenerRun, "IRCConnectionListenerThread", ThreadPriority.Normal, true, false); // This is the message order recommended by RFC 2812 if (m_password != null) m_writer.WriteLine(String.Format("PASS {0}", m_password)); m_writer.WriteLine(String.Format("NICK {0}", m_nick)); m_writer.Flush(); m_writer.WriteLine(m_user); m_writer.Flush(); } catch (Exception e) { m_log.ErrorFormat("[IRC-Connector-{0}] cannot connect {1} to {2}:{3}: {4}", idn, m_nick, m_server, m_port, e.Message); // It might seem reasonable to reset connected and pending status here // Seeing as we know that the login has failed, but if we do that, then // connection will be retried each time the interconnection interval // expires. By leaving them as they are, the connection will be retried // when the login timeout expires. Which is preferred. } } return; } // Reconnect is used to force a re-cycle of the IRC connection. Should generally // be a transparent event public void Reconnect() { m_log.DebugFormat("[IRC-Connector-{0}]: Reconnect request for {1} on {2}:{3}", idn, m_nick, m_server, m_ircChannel); // Don't do this if a Connect is in progress... lock (msyncConnect) { if (m_connected) { m_log.InfoFormat("[IRC-Connector-{0}] Resetting connector", idn); // Mark as disconnected. This will allow the listener thread // to exit if still in-flight. // The listener thread is not aborted - it *might* actually be // the thread that is running the Reconnect! Instead just close // the socket and it will disappear of its own accord, once this // processing is completed. try { m_writer.Close(); } catch (Exception) { } try { m_reader.Close(); } catch (Exception) { } try { m_tcp.Close(); } catch (Exception) { } m_connected = false; m_pending = false; m_resetk++; } } Connect(); } #endregion #region Outbound (to-IRC) message handlers public void PrivMsg(string pattern, string from, string region, string msg) { // m_log.DebugFormat("[IRC-Connector-{0}] PrivMsg to IRC from {1}: <{2}>", idn, from, // String.Format(pattern, m_ircChannel, from, region, msg)); // One message to the IRC server try { m_writer.WriteLine(pattern, m_ircChannel, from, region, msg); m_writer.Flush(); // m_log.DebugFormat("[IRC-Connector-{0}]: PrivMsg from {1} in {2}: {3}", idn, from, region, msg); } catch (IOException) { m_log.ErrorFormat("[IRC-Connector-{0}]: PrivMsg I/O Error: disconnected from IRC server", idn); Reconnect(); } catch (Exception ex) { m_log.ErrorFormat("[IRC-Connector-{0}]: PrivMsg exception : {1}", idn, ex.Message); m_log.Debug(ex); } } public void Send(string msg) { // m_log.DebugFormat("[IRC-Connector-{0}] Send to IRC : <{1}>", idn, msg); try { m_writer.WriteLine(msg); m_writer.Flush(); // m_log.DebugFormat("[IRC-Connector-{0}] Sent command string: {1}", idn, msg); } catch (IOException) { m_log.ErrorFormat("[IRC-Connector-{0}] Disconnected from IRC server.(Send)", idn); Reconnect(); } catch (Exception ex) { m_log.ErrorFormat("[IRC-Connector-{0}] Send exception trap: {0}", idn, ex.Message); m_log.Debug(ex); } } #endregion public void ListenerRun() { string inputLine; int resetk = m_resetk; try { while (m_enabled && m_connected) { if ((inputLine = m_reader.ReadLine()) == null) throw new Exception("Listener input socket closed"); Watchdog.UpdateThread(); // m_log.Info("[IRCConnector]: " + inputLine); if (inputLine.Contains("PRIVMSG")) { Dictionary<string, string> data = ExtractMsg(inputLine); // Any chat ??? if (data != null) { OSChatMessage c = new OSChatMessage(); c.Message = data["msg"]; c.Type = ChatTypeEnum.Region; c.Position = CenterOfRegion; c.From = data["nick"] + "@IRC"; c.Sender = null; c.SenderUUID = UUID.Zero; // Is message "\001ACTION foo bar\001"? // Then change to: "/me foo bar" if ((1 == c.Message[0]) && c.Message.Substring(1).StartsWith("ACTION")) c.Message = String.Format("/me {0}", c.Message.Substring(8, c.Message.Length - 9)); ChannelState.OSChat(this, c, false); } } else { ProcessIRCCommand(inputLine); } } } catch (Exception /*e*/) { // m_log.ErrorFormat("[IRC-Connector-{0}]: ListenerRun exception trap: {1}", idn, e.Message); // m_log.Debug(e); } // This is potentially circular, but harmless if so. // The connection is marked as not connected the first time // through reconnect. if (m_enabled && (m_resetk == resetk)) Reconnect(); Watchdog.RemoveThread(); } private Regex RE = new Regex(@":(?<nick>[\w-]*)!(?<user>\S*) PRIVMSG (?<channel>\S+) :(?<msg>.*)", RegexOptions.Multiline); private Dictionary<string, string> ExtractMsg(string input) { //examines IRC commands and extracts any private messages // which will then be reboadcast in the Sim // m_log.InfoFormat("[IRC-Connector-{0}]: ExtractMsg: {1}", idn, input); Dictionary<string, string> result = null; MatchCollection matches = RE.Matches(input); // Get some direct matches $1 $4 is a if ((matches.Count == 0) || (matches.Count != 1) || (matches[0].Groups.Count != 5)) { // m_log.Info("[IRCConnector]: Number of matches: " + matches.Count); // if (matches.Count > 0) // { // m_log.Info("[IRCConnector]: Number of groups: " + matches[0].Groups.Count); // } return null; } result = new Dictionary<string, string>(); result.Add("nick", matches[0].Groups[1].Value); result.Add("user", matches[0].Groups[2].Value); result.Add("channel", matches[0].Groups[3].Value); result.Add("msg", matches[0].Groups[4].Value); return result; } public void BroadcastSim(string sender, string format, params string[] args) { try { OSChatMessage c = new OSChatMessage(); c.From = sender; c.Message = String.Format(format, args); c.Type = ChatTypeEnum.Region; // ChatTypeEnum.Say; c.Position = CenterOfRegion; c.Sender = null; c.SenderUUID = UUID.Zero; ChannelState.OSChat(this, c, true); } catch (Exception ex) // IRC gate should not crash Sim { m_log.ErrorFormat("[IRC-Connector-{0}]: BroadcastSim Exception Trap: {1}\n{2}", idn, ex.Message, ex.StackTrace); } } #region IRC Command Handlers public void ProcessIRCCommand(string command) { string[] commArgs; string c_server = m_server; string pfx = String.Empty; string cmd = String.Empty; string parms = String.Empty; // ":" indicates that a prefix is present // There are NEVER more than 17 real // fields. A parameter that starts with // ":" indicates that the remainder of the // line is a single parameter value. commArgs = command.Split(CS_SPACE, 2); if (commArgs[0].StartsWith(":")) { pfx = commArgs[0].Substring(1); commArgs = commArgs[1].Split(CS_SPACE, 2); } cmd = commArgs[0]; parms = commArgs[1]; // m_log.DebugFormat("[IRC-Connector-{0}] prefix = <{1}> cmd = <{2}>", idn, pfx, cmd); switch (cmd) { // Messages 001-004 are always sent // following signon. case "001": // Welcome ... case "002": // Server information case "003": // Welcome ... break; case "004": // Server information m_log.DebugFormat("[IRC-Connector-{0}] [{1}] parms = <{2}>", idn, cmd, parms); commArgs = parms.Split(CS_SPACE); c_server = commArgs[1]; m_server = c_server; version = commArgs[2]; usermod = commArgs[3]; chanmod = commArgs[4]; m_writer.WriteLine(String.Format("JOIN {0}", m_ircChannel)); m_writer.Flush(); m_log.InfoFormat("[IRC-Connector-{0}]: sent request to join {1} ", idn, m_ircChannel); break; case "005": // Server information break; case "042": case "250": case "251": case "252": case "254": case "255": case "265": case "266": case "332": // Subject case "333": // Subject owner (?) case "353": // Name list case "366": // End-of-Name list marker case "372": // MOTD body case "375": // MOTD start // m_log.InfoFormat("[IRC-Connector-{0}] [{1}] {2}", idn, cmd, parms.Split(CS_SPACE,2)[1]); break; case "376": // MOTD end // m_log.InfoFormat("[IRC-Connector-{0}] [{1}] {2}", idn, cmd, parms.Split(CS_SPACE,2)[1]); motd = true; break; case "451": // Not registered break; case "433": // Nickname in use // Gen a new name m_nick = m_baseNick + Util.RandomClass.Next(1, 99); m_log.ErrorFormat("[IRC-Connector-{0}]: [{1}] IRC SERVER reports NicknameInUse, trying {2}", idn, cmd, m_nick); // Retry m_writer.WriteLine(String.Format("NICK {0}", m_nick)); m_writer.Flush(); m_writer.WriteLine(m_user); m_writer.Flush(); m_writer.WriteLine(String.Format("JOIN {0}", m_ircChannel)); m_writer.Flush(); break; case "479": // Bad channel name, etc. This will never work, so disable the connection m_log.ErrorFormat("[IRC-Connector-{0}] [{1}] {2}", idn, cmd, parms.Split(CS_SPACE, 2)[1]); m_log.ErrorFormat("[IRC-Connector-{0}] [{1}] Connector disabled", idn, cmd); m_enabled = false; m_connected = false; m_pending = false; break; case "NOTICE": // m_log.WarnFormat("[IRC-Connector-{0}] [{1}] {2}", idn, cmd, parms.Split(CS_SPACE,2)[1]); break; case "ERROR": m_log.ErrorFormat("[IRC-Connector-{0}] [{1}] {2}", idn, cmd, parms.Split(CS_SPACE, 2)[1]); if (parms.Contains("reconnect too fast")) ICCD_PERIOD++; m_pending = false; Reconnect(); break; case "PING": m_log.DebugFormat("[IRC-Connector-{0}] [{1}] parms = <{2}>", idn, cmd, parms); m_writer.WriteLine(String.Format("PONG {0}", parms)); m_writer.Flush(); break; case "PONG": break; case "JOIN": m_log.DebugFormat("[IRC-Connector-{0}] [{1}] parms = <{2}>", idn, cmd, parms); eventIrcJoin(pfx, cmd, parms); break; case "PART": m_log.DebugFormat("[IRC-Connector-{0}] [{1}] parms = <{2}>", idn, cmd, parms); eventIrcPart(pfx, cmd, parms); break; case "MODE": m_log.DebugFormat("[IRC-Connector-{0}] [{1}] parms = <{2}>", idn, cmd, parms); eventIrcMode(pfx, cmd, parms); break; case "NICK": m_log.DebugFormat("[IRC-Connector-{0}] [{1}] parms = <{2}>", idn, cmd, parms); eventIrcNickChange(pfx, cmd, parms); break; case "KICK": m_log.DebugFormat("[IRC-Connector-{0}] [{1}] parms = <{2}>", idn, cmd, parms); eventIrcKick(pfx, cmd, parms); break; case "QUIT": m_log.DebugFormat("[IRC-Connector-{0}] [{1}] parms = <{2}>", idn, cmd, parms); eventIrcQuit(pfx, cmd, parms); break; default: m_log.DebugFormat("[IRC-Connector-{0}] Command '{1}' ignored, parms = {2}", idn, cmd, parms); break; } // m_log.DebugFormat("[IRC-Connector-{0}] prefix = <{1}> cmd = <{2}> complete", idn, pfx, cmd); } public void eventIrcJoin(string prefix, string command, string parms) { string[] args = parms.Split(CS_SPACE, 2); string IrcUser = prefix.Split('!')[0]; string IrcChannel = args[0]; if (IrcChannel.StartsWith(":")) IrcChannel = IrcChannel.Substring(1); if(IrcChannel == m_ircChannel) { m_log.InfoFormat("[IRC-Connector-{0}] Joined requested channel {1} at {2}", idn, IrcChannel,m_server); m_pending = false; } else m_log.InfoFormat("[IRC-Connector-{0}] Joined unknown channel {1} at {2}", idn, IrcChannel,m_server); BroadcastSim(IrcUser, "/me joins {0}", IrcChannel); } public void eventIrcPart(string prefix, string command, string parms) { string[] args = parms.Split(CS_SPACE, 2); string IrcUser = prefix.Split('!')[0]; string IrcChannel = args[0]; m_log.DebugFormat("[IRC-Connector-{0}] Event: IRCPart {1}:{2}", idn, m_server, m_ircChannel); BroadcastSim(IrcUser, "/me parts {0}", IrcChannel); } public void eventIrcMode(string prefix, string command, string parms) { string[] args = parms.Split(CS_SPACE, 2); string UserMode = args[1]; m_log.DebugFormat("[IRC-Connector-{0}] Event: IRCMode {1}:{2}", idn, m_server, m_ircChannel); if (UserMode.Substring(0, 1) == ":") { UserMode = UserMode.Remove(0, 1); } } public void eventIrcNickChange(string prefix, string command, string parms) { string[] args = parms.Split(CS_SPACE, 2); string UserOldNick = prefix.Split('!')[0]; string UserNewNick = args[0].Remove(0, 1); m_log.DebugFormat("[IRC-Connector-{0}] Event: IRCNickChange {1}:{2}", idn, m_server, m_ircChannel); BroadcastSim(UserOldNick, "/me is now known as {0}", UserNewNick); } public void eventIrcKick(string prefix, string command, string parms) { string[] args = parms.Split(CS_SPACE, 3); string UserKicker = prefix.Split('!')[0]; string IrcChannel = args[0]; string UserKicked = args[1]; string KickMessage = args[2]; m_log.DebugFormat("[IRC-Connector-{0}] Event: IRCKick {1}:{2}", idn, m_server, m_ircChannel); BroadcastSim(UserKicker, "/me kicks kicks {0} off {1} saying \"{2}\"", UserKicked, IrcChannel, KickMessage); if (UserKicked == m_nick) { BroadcastSim(m_nick, "Hey, that was me!!!"); } } public void eventIrcQuit(string prefix, string command, string parms) { string IrcUser = prefix.Split('!')[0]; string QuitMessage = parms; m_log.DebugFormat("[IRC-Connector-{0}] Event: IRCQuit {1}:{2}", idn, m_server, m_ircChannel); BroadcastSim(IrcUser, "/me quits saying \"{0}\"", QuitMessage); } #endregion #region Connector Watch Dog // A single watch dog monitors extant connectors and makes sure that they // are re-connected as necessary. If a connector IS connected, then it is // pinged, but only if a PING period has elapsed. protected static void WatchdogHandler(Object source, ElapsedEventArgs args) { // m_log.InfoFormat("[IRC-Watchdog] Status scan, pdk = {0}, icc = {1}", _pdk_, _icc_); _pdk_ = (_pdk_ + 1) % PING_PERIOD; // cycle the ping trigger _icc_++; // increment the inter-consecutive-connect-delay counter lock (m_connectors) foreach (IRCConnector connector in m_connectors) { // m_log.InfoFormat("[IRC-Watchdog] Scanning {0}", connector); if (connector.Enabled) { if (!connector.Connected) { try { // m_log.DebugFormat("[IRC-Watchdog] Connecting {1}:{2}", connector.idn, connector.m_server, connector.m_ircChannel); connector.Connect(); } catch (Exception e) { m_log.ErrorFormat("[IRC-Watchdog] Exception on connector {0}: {1} ", connector.idn, e.Message); } } else { if (connector.m_pending) { if (connector.m_timeout == 0) { m_log.ErrorFormat("[IRC-Watchdog] Login timed-out for connector {0}, reconnecting", connector.idn); connector.Reconnect(); } else connector.m_timeout--; } // Being marked connected is not enough to ping. Socket establishment can sometimes take a long // time, in which case the watch dog might try to ping the server before the socket has been // set up, with nasty side-effects. else if (_pdk_ == 0) { try { connector.m_writer.WriteLine(String.Format("PING :{0}", connector.m_server)); connector.m_writer.Flush(); } catch (Exception e) { m_log.ErrorFormat("[IRC-PingRun] Exception on connector {0}: {1} ", connector.idn, e.Message); m_log.Debug(e); connector.Reconnect(); } } } } } // m_log.InfoFormat("[IRC-Watchdog] Status scan completed"); } #endregion } }
// Copyright (c) Microsoft and contributors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // // See the License for the specific language governing permissions and // limitations under the License. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Microsoft.Azure.Batch.Protocol { using System.Linq; using Microsoft.Rest; using Microsoft.Rest.Azure; using Models; /// <summary> /// ApplicationOperations operations. /// </summary> internal partial class ApplicationOperations : Microsoft.Rest.IServiceOperations<BatchServiceClient>, IApplicationOperations { /// <summary> /// Initializes a new instance of the ApplicationOperations class. /// </summary> /// <param name='client'> /// Reference to the service client. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> internal ApplicationOperations(BatchServiceClient client) { if (client == null) { throw new System.ArgumentNullException("client"); } this.Client = client; } /// <summary> /// Gets a reference to the BatchServiceClient /// </summary> public BatchServiceClient Client { get; private set; } /// <summary> /// Lists all of the applications available in the specified account. /// </summary> /// <param name='applicationListOptions'> /// Additional parameters for the operation /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="BatchErrorException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async System.Threading.Tasks.Task<Microsoft.Rest.Azure.AzureOperationResponse<Microsoft.Rest.Azure.IPage<ApplicationSummary>,ApplicationListHeaders>> ListWithHttpMessagesAsync(ApplicationListOptions applicationListOptions = default(ApplicationListOptions), System.Collections.Generic.Dictionary<string, System.Collections.Generic.List<string>> customHeaders = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { if (this.Client.ApiVersion == null) { throw new Microsoft.Rest.ValidationException(Microsoft.Rest.ValidationRules.CannotBeNull, "this.Client.ApiVersion"); } int? maxResults = default(int?); if (applicationListOptions != null) { maxResults = applicationListOptions.MaxResults; } int? timeout = default(int?); if (applicationListOptions != null) { timeout = applicationListOptions.Timeout; } string clientRequestId = default(string); if (applicationListOptions != null) { clientRequestId = applicationListOptions.ClientRequestId; } bool? returnClientRequestId = default(bool?); if (applicationListOptions != null) { returnClientRequestId = applicationListOptions.ReturnClientRequestId; } System.DateTime? ocpDate = default(System.DateTime?); if (applicationListOptions != null) { ocpDate = applicationListOptions.OcpDate; } // Tracing bool _shouldTrace = Microsoft.Rest.ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = Microsoft.Rest.ServiceClientTracing.NextInvocationId.ToString(); System.Collections.Generic.Dictionary<string, object> tracingParameters = new System.Collections.Generic.Dictionary<string, object>(); tracingParameters.Add("maxResults", maxResults); tracingParameters.Add("timeout", timeout); tracingParameters.Add("clientRequestId", clientRequestId); tracingParameters.Add("returnClientRequestId", returnClientRequestId); tracingParameters.Add("ocpDate", ocpDate); tracingParameters.Add("cancellationToken", cancellationToken); Microsoft.Rest.ServiceClientTracing.Enter(_invocationId, this, "List", tracingParameters); } // Construct URL var _baseUrl = this.Client.BaseUri.AbsoluteUri; var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "applications").ToString(); System.Collections.Generic.List<string> _queryParameters = new System.Collections.Generic.List<string>(); if (this.Client.ApiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(this.Client.ApiVersion))); } if (maxResults != null) { _queryParameters.Add(string.Format("maxresults={0}", System.Uri.EscapeDataString(Microsoft.Rest.Serialization.SafeJsonConvert.SerializeObject(maxResults, this.Client.SerializationSettings).Trim('"')))); } if (timeout != null) { _queryParameters.Add(string.Format("timeout={0}", System.Uri.EscapeDataString(Microsoft.Rest.Serialization.SafeJsonConvert.SerializeObject(timeout, this.Client.SerializationSettings).Trim('"')))); } if (_queryParameters.Count > 0) { _url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters); } // Create HTTP transport objects System.Net.Http.HttpRequestMessage _httpRequest = new System.Net.Http.HttpRequestMessage(); System.Net.Http.HttpResponseMessage _httpResponse = null; _httpRequest.Method = new System.Net.Http.HttpMethod("GET"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("client-request-id", System.Guid.NewGuid().ToString()); } if (this.Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage); } if (clientRequestId != null) { if (_httpRequest.Headers.Contains("client-request-id")) { _httpRequest.Headers.Remove("client-request-id"); } _httpRequest.Headers.TryAddWithoutValidation("client-request-id", clientRequestId); } if (returnClientRequestId != null) { if (_httpRequest.Headers.Contains("return-client-request-id")) { _httpRequest.Headers.Remove("return-client-request-id"); } _httpRequest.Headers.TryAddWithoutValidation("return-client-request-id", Microsoft.Rest.Serialization.SafeJsonConvert.SerializeObject(returnClientRequestId, this.Client.SerializationSettings).Trim('"')); } if (ocpDate != null) { if (_httpRequest.Headers.Contains("ocp-date")) { _httpRequest.Headers.Remove("ocp-date"); } _httpRequest.Headers.TryAddWithoutValidation("ocp-date", Microsoft.Rest.Serialization.SafeJsonConvert.SerializeObject(ocpDate, new Microsoft.Rest.Serialization.DateTimeRfc1123JsonConverter()).Trim('"')); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (this.Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } System.Net.HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new BatchErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); BatchError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<BatchError>(_responseContent, this.Client.DeserializationSettings); if (_errorBody != null) { ex.Body = _errorBody; } } catch (Newtonsoft.Json.JsonException) { // Ignore the exception } ex.Request = new Microsoft.Rest.HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new Microsoft.Rest.HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new Microsoft.Rest.Azure.AzureOperationResponse<Microsoft.Rest.Azure.IPage<ApplicationSummary>,ApplicationListHeaders>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Page<ApplicationSummary>>(_responseContent, this.Client.DeserializationSettings); } catch (Newtonsoft.Json.JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new Microsoft.Rest.SerializationException("Unable to deserialize the response.", _responseContent, ex); } } try { _result.Headers = _httpResponse.GetHeadersAsJson().ToObject<ApplicationListHeaders>(Newtonsoft.Json.JsonSerializer.Create(this.Client.DeserializationSettings)); } catch (Newtonsoft.Json.JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new Microsoft.Rest.SerializationException("Unable to deserialize the headers.", _httpResponse.GetHeadersAsJson().ToString(), ex); } if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Gets information about the specified application. /// </summary> /// <param name='applicationId'> /// The id of the application. /// </param> /// <param name='applicationGetOptions'> /// Additional parameters for the operation /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="BatchErrorException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async System.Threading.Tasks.Task<Microsoft.Rest.Azure.AzureOperationResponse<ApplicationSummary,ApplicationGetHeaders>> GetWithHttpMessagesAsync(string applicationId, ApplicationGetOptions applicationGetOptions = default(ApplicationGetOptions), System.Collections.Generic.Dictionary<string, System.Collections.Generic.List<string>> customHeaders = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { if (applicationId == null) { throw new Microsoft.Rest.ValidationException(Microsoft.Rest.ValidationRules.CannotBeNull, "applicationId"); } if (this.Client.ApiVersion == null) { throw new Microsoft.Rest.ValidationException(Microsoft.Rest.ValidationRules.CannotBeNull, "this.Client.ApiVersion"); } int? timeout = default(int?); if (applicationGetOptions != null) { timeout = applicationGetOptions.Timeout; } string clientRequestId = default(string); if (applicationGetOptions != null) { clientRequestId = applicationGetOptions.ClientRequestId; } bool? returnClientRequestId = default(bool?); if (applicationGetOptions != null) { returnClientRequestId = applicationGetOptions.ReturnClientRequestId; } System.DateTime? ocpDate = default(System.DateTime?); if (applicationGetOptions != null) { ocpDate = applicationGetOptions.OcpDate; } // Tracing bool _shouldTrace = Microsoft.Rest.ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = Microsoft.Rest.ServiceClientTracing.NextInvocationId.ToString(); System.Collections.Generic.Dictionary<string, object> tracingParameters = new System.Collections.Generic.Dictionary<string, object>(); tracingParameters.Add("applicationId", applicationId); tracingParameters.Add("timeout", timeout); tracingParameters.Add("clientRequestId", clientRequestId); tracingParameters.Add("returnClientRequestId", returnClientRequestId); tracingParameters.Add("ocpDate", ocpDate); tracingParameters.Add("cancellationToken", cancellationToken); Microsoft.Rest.ServiceClientTracing.Enter(_invocationId, this, "Get", tracingParameters); } // Construct URL var _baseUrl = this.Client.BaseUri.AbsoluteUri; var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "applications/{applicationId}").ToString(); _url = _url.Replace("{applicationId}", System.Uri.EscapeDataString(applicationId)); System.Collections.Generic.List<string> _queryParameters = new System.Collections.Generic.List<string>(); if (this.Client.ApiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(this.Client.ApiVersion))); } if (timeout != null) { _queryParameters.Add(string.Format("timeout={0}", System.Uri.EscapeDataString(Microsoft.Rest.Serialization.SafeJsonConvert.SerializeObject(timeout, this.Client.SerializationSettings).Trim('"')))); } if (_queryParameters.Count > 0) { _url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters); } // Create HTTP transport objects System.Net.Http.HttpRequestMessage _httpRequest = new System.Net.Http.HttpRequestMessage(); System.Net.Http.HttpResponseMessage _httpResponse = null; _httpRequest.Method = new System.Net.Http.HttpMethod("GET"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("client-request-id", System.Guid.NewGuid().ToString()); } if (this.Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage); } if (clientRequestId != null) { if (_httpRequest.Headers.Contains("client-request-id")) { _httpRequest.Headers.Remove("client-request-id"); } _httpRequest.Headers.TryAddWithoutValidation("client-request-id", clientRequestId); } if (returnClientRequestId != null) { if (_httpRequest.Headers.Contains("return-client-request-id")) { _httpRequest.Headers.Remove("return-client-request-id"); } _httpRequest.Headers.TryAddWithoutValidation("return-client-request-id", Microsoft.Rest.Serialization.SafeJsonConvert.SerializeObject(returnClientRequestId, this.Client.SerializationSettings).Trim('"')); } if (ocpDate != null) { if (_httpRequest.Headers.Contains("ocp-date")) { _httpRequest.Headers.Remove("ocp-date"); } _httpRequest.Headers.TryAddWithoutValidation("ocp-date", Microsoft.Rest.Serialization.SafeJsonConvert.SerializeObject(ocpDate, new Microsoft.Rest.Serialization.DateTimeRfc1123JsonConverter()).Trim('"')); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (this.Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } System.Net.HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new BatchErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); BatchError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<BatchError>(_responseContent, this.Client.DeserializationSettings); if (_errorBody != null) { ex.Body = _errorBody; } } catch (Newtonsoft.Json.JsonException) { // Ignore the exception } ex.Request = new Microsoft.Rest.HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new Microsoft.Rest.HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new Microsoft.Rest.Azure.AzureOperationResponse<ApplicationSummary,ApplicationGetHeaders>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<ApplicationSummary>(_responseContent, this.Client.DeserializationSettings); } catch (Newtonsoft.Json.JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new Microsoft.Rest.SerializationException("Unable to deserialize the response.", _responseContent, ex); } } try { _result.Headers = _httpResponse.GetHeadersAsJson().ToObject<ApplicationGetHeaders>(Newtonsoft.Json.JsonSerializer.Create(this.Client.DeserializationSettings)); } catch (Newtonsoft.Json.JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new Microsoft.Rest.SerializationException("Unable to deserialize the headers.", _httpResponse.GetHeadersAsJson().ToString(), ex); } if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Lists all of the applications available in the specified account. /// </summary> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='applicationListNextOptions'> /// Additional parameters for the operation /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="BatchErrorException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async System.Threading.Tasks.Task<Microsoft.Rest.Azure.AzureOperationResponse<Microsoft.Rest.Azure.IPage<ApplicationSummary>,ApplicationListHeaders>> ListNextWithHttpMessagesAsync(string nextPageLink, ApplicationListNextOptions applicationListNextOptions = default(ApplicationListNextOptions), System.Collections.Generic.Dictionary<string, System.Collections.Generic.List<string>> customHeaders = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { if (nextPageLink == null) { throw new Microsoft.Rest.ValidationException(Microsoft.Rest.ValidationRules.CannotBeNull, "nextPageLink"); } string clientRequestId = default(string); if (applicationListNextOptions != null) { clientRequestId = applicationListNextOptions.ClientRequestId; } bool? returnClientRequestId = default(bool?); if (applicationListNextOptions != null) { returnClientRequestId = applicationListNextOptions.ReturnClientRequestId; } System.DateTime? ocpDate = default(System.DateTime?); if (applicationListNextOptions != null) { ocpDate = applicationListNextOptions.OcpDate; } // Tracing bool _shouldTrace = Microsoft.Rest.ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = Microsoft.Rest.ServiceClientTracing.NextInvocationId.ToString(); System.Collections.Generic.Dictionary<string, object> tracingParameters = new System.Collections.Generic.Dictionary<string, object>(); tracingParameters.Add("nextPageLink", nextPageLink); tracingParameters.Add("clientRequestId", clientRequestId); tracingParameters.Add("returnClientRequestId", returnClientRequestId); tracingParameters.Add("ocpDate", ocpDate); tracingParameters.Add("cancellationToken", cancellationToken); Microsoft.Rest.ServiceClientTracing.Enter(_invocationId, this, "ListNext", tracingParameters); } // Construct URL string _url = "{nextLink}"; _url = _url.Replace("{nextLink}", nextPageLink); System.Collections.Generic.List<string> _queryParameters = new System.Collections.Generic.List<string>(); if (_queryParameters.Count > 0) { _url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters); } // Create HTTP transport objects System.Net.Http.HttpRequestMessage _httpRequest = new System.Net.Http.HttpRequestMessage(); System.Net.Http.HttpResponseMessage _httpResponse = null; _httpRequest.Method = new System.Net.Http.HttpMethod("GET"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("client-request-id", System.Guid.NewGuid().ToString()); } if (this.Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage); } if (clientRequestId != null) { if (_httpRequest.Headers.Contains("client-request-id")) { _httpRequest.Headers.Remove("client-request-id"); } _httpRequest.Headers.TryAddWithoutValidation("client-request-id", clientRequestId); } if (returnClientRequestId != null) { if (_httpRequest.Headers.Contains("return-client-request-id")) { _httpRequest.Headers.Remove("return-client-request-id"); } _httpRequest.Headers.TryAddWithoutValidation("return-client-request-id", Microsoft.Rest.Serialization.SafeJsonConvert.SerializeObject(returnClientRequestId, this.Client.SerializationSettings).Trim('"')); } if (ocpDate != null) { if (_httpRequest.Headers.Contains("ocp-date")) { _httpRequest.Headers.Remove("ocp-date"); } _httpRequest.Headers.TryAddWithoutValidation("ocp-date", Microsoft.Rest.Serialization.SafeJsonConvert.SerializeObject(ocpDate, new Microsoft.Rest.Serialization.DateTimeRfc1123JsonConverter()).Trim('"')); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (this.Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } System.Net.HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new BatchErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); BatchError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<BatchError>(_responseContent, this.Client.DeserializationSettings); if (_errorBody != null) { ex.Body = _errorBody; } } catch (Newtonsoft.Json.JsonException) { // Ignore the exception } ex.Request = new Microsoft.Rest.HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new Microsoft.Rest.HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new Microsoft.Rest.Azure.AzureOperationResponse<Microsoft.Rest.Azure.IPage<ApplicationSummary>,ApplicationListHeaders>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Page<ApplicationSummary>>(_responseContent, this.Client.DeserializationSettings); } catch (Newtonsoft.Json.JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new Microsoft.Rest.SerializationException("Unable to deserialize the response.", _responseContent, ex); } } try { _result.Headers = _httpResponse.GetHeadersAsJson().ToObject<ApplicationListHeaders>(Newtonsoft.Json.JsonSerializer.Create(this.Client.DeserializationSettings)); } catch (Newtonsoft.Json.JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new Microsoft.Rest.SerializationException("Unable to deserialize the headers.", _httpResponse.GetHeadersAsJson().ToString(), ex); } if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.Exit(_invocationId, _result); } return _result; } } }
using System; using System.Web.Security; using System.Web.UI; using System.Web.UI.WebControls; using System.Windows.Forms.VisualStyles; using umbraco.cms.businesslogic.member; using umbraco.cms.businesslogic.web; using umbraco.controls; using umbraco.cms.helpers; using umbraco.BasePages; namespace umbraco.presentation.umbraco.dialogs { /// <summary> /// Summary description for protectPage. /// </summary> public partial class protectPage : UmbracoEnsuredPage { public protectPage() { CurrentApp = BusinessLogic.DefaultApps.content.ToString(); } protected System.Web.UI.WebControls.Literal jsShowWindow; protected DualSelectbox _memberGroups = new DualSelectbox(); protected ContentPicker loginPagePicker = new ContentPicker(); protected ContentPicker errorPagePicker = new ContentPicker(); protected void selectMode(object sender, EventArgs e) { p_mode.Visible = false; p_buttons.Visible = true; if (rb_simple.Checked) { pane_advanced.Visible = false; pane_simple.Visible = true; bt_protect.CommandName = "simple"; } else { pane_advanced.Visible = true; pane_simple.Visible = false; bt_protect.CommandName = "advanced"; } } protected void Page_Load(object sender, System.EventArgs e) { // Check for editing int documentId = int.Parse(helper.Request("nodeId")); cms.businesslogic.web.Document documentObject = new cms.businesslogic.web.Document(documentId); jsShowWindow.Text = ""; ph_errorpage.Controls.Add(errorPagePicker); ph_loginpage.Controls.Add(loginPagePicker); pp_login.Text = ui.Text("login"); pp_pass.Text = ui.Text("password"); pp_loginPage.Text = ui.Text("paLoginPage"); pp_errorPage.Text = ui.Text("paErrorPage"); pane_chooseMode.Text = ui.Text("publicAccess", "paHowWould", base.getUser()); pane_pages.Text = ui.Text("publicAccess", "paSelectPages", base.getUser()); pane_simple.Text = ui.Text("publicAccess", "paSimple", base.getUser()); pane_advanced.Text = ui.Text("publicAccess", "paAdvanced", base.getUser()); if (!IsPostBack) { if (Access.IsProtected(documentId, documentObject.Path) && Access.GetProtectionType(documentId) != ProtectionType.NotProtected) { bt_buttonRemoveProtection.Visible = true; bt_buttonRemoveProtection.Attributes.Add("onClick", "return confirm('" + ui.Text("areyousure") + "')"); // Get login and error pages int errorPage = Access.GetErrorPage(documentObject.Path); int loginPage = Access.GetLoginPage(documentObject.Path); try { Document loginPageObj = new Document(loginPage); if (loginPageObj != null) { loginPagePicker.Value = loginPage.ToString(); } Document errorPageObj = new Document(errorPage); errorPagePicker.Value = errorPage.ToString(); } catch { } if (Access.GetProtectionType(documentId) == ProtectionType.Simple) { MembershipUser m = Access.GetAccessingMembershipUser(documentId); simpleLogin.Text = m.UserName; pane_simple.Visible = true; pane_advanced.Visible = false; bt_protect.CommandName = "simple"; } else if (Access.GetProtectionType(documentId) == ProtectionType.Advanced) { pane_simple.Visible = false; pane_advanced.Visible = true; bt_protect.CommandName = "advanced"; } p_buttons.Visible = true; p_mode.Visible = false; } } // Load up membergrouops _memberGroups.ID = "Membergroups"; _memberGroups.Width = 175; string selectedGroups = ""; string[] _roles = Roles.GetAllRoles(); if (_roles.Length > 0) { foreach (string role in _roles) { ListItem li = new ListItem(role, role); if (!IsPostBack) { if (cms.businesslogic.web.Access.IsProtectedByMembershipRole(int.Parse(helper.Request("nodeid")), role)) selectedGroups += role + ","; } _memberGroups.Items.Add(li); } } else { p_noGroupsFound.Visible = true; rb_advanced.Enabled = false; } _memberGroups.Value = selectedGroups; groupsSelector.Controls.Add(_memberGroups); bt_protect.Text = ui.Text("update"); bt_buttonRemoveProtection.Text = ui.Text("paRemoveProtection"); // Put user code to initialize the page here } #region Web Form Designer generated code override protected void OnInit(EventArgs e) { // // CODEGEN: This call is required by the ASP.NET Web Form Designer. // InitializeComponent(); base.OnInit(e); } /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { } #endregion protected void protect_Click(object sender, CommandEventArgs e) { if (string.IsNullOrEmpty(errorPagePicker.Value)) cv_errorPage.IsValid = false; if (string.IsNullOrEmpty(loginPagePicker.Value)) cv_loginPage.IsValid = false; if (Page.IsValid) { int pageId = int.Parse(helper.Request("nodeId")); p_buttons.Visible = false; pane_advanced.Visible = false; pane_simple.Visible = false; if (e.CommandName == "simple") { MembershipUser member = Membership.GetUser(simpleLogin.Text); if (member == null) { // this needs to work differently depending on umbraco members or external membership provider if (!cms.businesslogic.member.Member.InUmbracoMemberMode()) { member = Membership.CreateUser(simpleLogin.Text, simplePassword.Text); } else { try { if ( cms.businesslogic.member.MemberType.GetByAlias("_umbracoSystemDefaultProtectType") == null) { cms.businesslogic.member.MemberType.MakeNew(BusinessLogic.User.GetUser(0), "_umbracoSystemDefaultProtectType"); } } catch { cms.businesslogic.member.MemberType.MakeNew(BusinessLogic.User.GetUser(0), "_umbracoSystemDefaultProtectType"); } // create member Member mem = cms.businesslogic.member.Member.MakeNew(simpleLogin.Text, "", cms.businesslogic.member.MemberType.GetByAlias("_umbracoSystemDefaultProtectType"), base.getUser()); // working around empty password restriction for Umbraco Member Mode mem.Password = simplePassword.Text; member = Membership.GetUser(simpleLogin.Text); } } else { // change password if it's not empty if (string.IsNullOrWhiteSpace(simplePassword.Text) == false) { var mem = Member.GetMemberFromLoginName(simpleLogin.Text); mem.Password = simplePassword.Text; } } // Create or find a memberGroup string simpleRoleName = "__umbracoRole_" + simpleLogin.Text; if (!Roles.RoleExists(simpleRoleName)) { Roles.CreateRole(simpleRoleName); } if (!Roles.IsUserInRole(member.UserName, simpleRoleName)) { Roles.AddUserToRole(member.UserName, simpleRoleName); } Access.ProtectPage(true, pageId, int.Parse(loginPagePicker.Value), int.Parse(errorPagePicker.Value)); Access.AddMembershipRoleToDocument(pageId, simpleRoleName); Access.AddMembershipUserToDocument(pageId, member.UserName); } else if (e.CommandName == "advanced") { cms.businesslogic.web.Access.ProtectPage(false, pageId, int.Parse(loginPagePicker.Value), int.Parse(errorPagePicker.Value)); foreach (ListItem li in _memberGroups.Items) if (("," + _memberGroups.Value + ",").IndexOf("," + li.Value + ",") > -1) cms.businesslogic.web.Access.AddMembershipRoleToDocument(pageId, li.Value); else cms.businesslogic.web.Access.RemoveMembershipRoleFromDocument(pageId, li.Value); } feedback.Text = ui.Text("publicAccess", "paIsProtected", new cms.businesslogic.CMSNode(pageId).Text, null) + "</p><p><a href='#' onclick='" + ClientTools.Scripts.CloseModalWindow() + "'>" + ui.Text("closeThisWindow") + "</a>"; ClientTools.ReloadActionNode(true, false); feedback.type = global::umbraco.uicontrols.Feedback.feedbacktype.success; } } protected void buttonRemoveProtection_Click(object sender, System.EventArgs e) { int pageId = int.Parse(helper.Request("nodeId")); p_buttons.Visible = false; pane_advanced.Visible = false; pane_simple.Visible = false; Access.RemoveProtection(pageId); feedback.Text = ui.Text("publicAccess", "paIsRemoved", new cms.businesslogic.CMSNode(pageId).Text, null) + "</p><p><a href='#' onclick='" + ClientTools.Scripts.CloseModalWindow() + "'>" + ui.Text("closeThisWindow") + "</a>"; ClientTools.ReloadActionNode(true, false); feedback.type = global::umbraco.uicontrols.Feedback.feedbacktype.success; } } }
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! using gax = Google.Api.Gax; using gcav = Google.Cloud.AIPlatform.V1; using sys = System; namespace Google.Cloud.AIPlatform.V1 { /// <summary>Resource name for the <c>ModelDeploymentMonitoringJob</c> resource.</summary> public sealed partial class ModelDeploymentMonitoringJobName : gax::IResourceName, sys::IEquatable<ModelDeploymentMonitoringJobName> { /// <summary>The possible contents of <see cref="ModelDeploymentMonitoringJobName"/>.</summary> public enum ResourceNameType { /// <summary>An unparsed resource name.</summary> Unparsed = 0, /// <summary> /// A resource name with pattern /// <c> /// projects/{project}/locations/{location}/modelDeploymentMonitoringJobs/{model_deployment_monitoring_job}</c> /// . /// </summary> ProjectLocationModelDeploymentMonitoringJob = 1, } private static gax::PathTemplate s_projectLocationModelDeploymentMonitoringJob = new gax::PathTemplate("projects/{project}/locations/{location}/modelDeploymentMonitoringJobs/{model_deployment_monitoring_job}"); /// <summary> /// Creates a <see cref="ModelDeploymentMonitoringJobName"/> containing an unparsed resource name. /// </summary> /// <param name="unparsedResourceName">The unparsed resource name. Must not be <c>null</c>.</param> /// <returns> /// A new instance of <see cref="ModelDeploymentMonitoringJobName"/> containing the provided /// <paramref name="unparsedResourceName"/>. /// </returns> public static ModelDeploymentMonitoringJobName FromUnparsed(gax::UnparsedResourceName unparsedResourceName) => new ModelDeploymentMonitoringJobName(ResourceNameType.Unparsed, gax::GaxPreconditions.CheckNotNull(unparsedResourceName, nameof(unparsedResourceName))); /// <summary> /// Creates a <see cref="ModelDeploymentMonitoringJobName"/> with the pattern /// <c>projects/{project}/locations/{location}/modelDeploymentMonitoringJobs/{model_deployment_monitoring_job}</c> /// . /// </summary> /// <param name="projectId">The <c>Project</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="locationId">The <c>Location</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="modelDeploymentMonitoringJobId"> /// The <c>ModelDeploymentMonitoringJob</c> ID. Must not be <c>null</c> or empty. /// </param> /// <returns> /// A new instance of <see cref="ModelDeploymentMonitoringJobName"/> constructed from the provided ids. /// </returns> public static ModelDeploymentMonitoringJobName FromProjectLocationModelDeploymentMonitoringJob(string projectId, string locationId, string modelDeploymentMonitoringJobId) => new ModelDeploymentMonitoringJobName(ResourceNameType.ProjectLocationModelDeploymentMonitoringJob, projectId: gax::GaxPreconditions.CheckNotNullOrEmpty(projectId, nameof(projectId)), locationId: gax::GaxPreconditions.CheckNotNullOrEmpty(locationId, nameof(locationId)), modelDeploymentMonitoringJobId: gax::GaxPreconditions.CheckNotNullOrEmpty(modelDeploymentMonitoringJobId, nameof(modelDeploymentMonitoringJobId))); /// <summary> /// Formats the IDs into the string representation of this <see cref="ModelDeploymentMonitoringJobName"/> with /// pattern /// <c>projects/{project}/locations/{location}/modelDeploymentMonitoringJobs/{model_deployment_monitoring_job}</c> /// . /// </summary> /// <param name="projectId">The <c>Project</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="locationId">The <c>Location</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="modelDeploymentMonitoringJobId"> /// The <c>ModelDeploymentMonitoringJob</c> ID. Must not be <c>null</c> or empty. /// </param> /// <returns> /// The string representation of this <see cref="ModelDeploymentMonitoringJobName"/> with pattern /// <c>projects/{project}/locations/{location}/modelDeploymentMonitoringJobs/{model_deployment_monitoring_job}</c> /// . /// </returns> public static string Format(string projectId, string locationId, string modelDeploymentMonitoringJobId) => FormatProjectLocationModelDeploymentMonitoringJob(projectId, locationId, modelDeploymentMonitoringJobId); /// <summary> /// Formats the IDs into the string representation of this <see cref="ModelDeploymentMonitoringJobName"/> with /// pattern /// <c>projects/{project}/locations/{location}/modelDeploymentMonitoringJobs/{model_deployment_monitoring_job}</c> /// . /// </summary> /// <param name="projectId">The <c>Project</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="locationId">The <c>Location</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="modelDeploymentMonitoringJobId"> /// The <c>ModelDeploymentMonitoringJob</c> ID. Must not be <c>null</c> or empty. /// </param> /// <returns> /// The string representation of this <see cref="ModelDeploymentMonitoringJobName"/> with pattern /// <c>projects/{project}/locations/{location}/modelDeploymentMonitoringJobs/{model_deployment_monitoring_job}</c> /// . /// </returns> public static string FormatProjectLocationModelDeploymentMonitoringJob(string projectId, string locationId, string modelDeploymentMonitoringJobId) => s_projectLocationModelDeploymentMonitoringJob.Expand(gax::GaxPreconditions.CheckNotNullOrEmpty(projectId, nameof(projectId)), gax::GaxPreconditions.CheckNotNullOrEmpty(locationId, nameof(locationId)), gax::GaxPreconditions.CheckNotNullOrEmpty(modelDeploymentMonitoringJobId, nameof(modelDeploymentMonitoringJobId))); /// <summary> /// Parses the given resource name string into a new <see cref="ModelDeploymentMonitoringJobName"/> instance. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item> /// <description> /// <c>projects/{project}/locations/{location}/modelDeploymentMonitoringJobs/{model_deployment_monitoring_job}</c> /// </description> /// </item> /// </list> /// </remarks> /// <param name="modelDeploymentMonitoringJobName"> /// The resource name in string form. Must not be <c>null</c>. /// </param> /// <returns>The parsed <see cref="ModelDeploymentMonitoringJobName"/> if successful.</returns> public static ModelDeploymentMonitoringJobName Parse(string modelDeploymentMonitoringJobName) => Parse(modelDeploymentMonitoringJobName, false); /// <summary> /// Parses the given resource name string into a new <see cref="ModelDeploymentMonitoringJobName"/> instance; /// optionally allowing an unparseable resource name. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item> /// <description> /// <c>projects/{project}/locations/{location}/modelDeploymentMonitoringJobs/{model_deployment_monitoring_job}</c> /// </description> /// </item> /// </list> /// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>. /// </remarks> /// <param name="modelDeploymentMonitoringJobName"> /// The resource name in string form. Must not be <c>null</c>. /// </param> /// <param name="allowUnparsed"> /// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/> /// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is /// specified. /// </param> /// <returns>The parsed <see cref="ModelDeploymentMonitoringJobName"/> if successful.</returns> public static ModelDeploymentMonitoringJobName Parse(string modelDeploymentMonitoringJobName, bool allowUnparsed) => TryParse(modelDeploymentMonitoringJobName, allowUnparsed, out ModelDeploymentMonitoringJobName result) ? result : throw new sys::ArgumentException("The given resource-name matches no pattern."); /// <summary> /// Tries to parse the given resource name string into a new <see cref="ModelDeploymentMonitoringJobName"/> /// instance. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item> /// <description> /// <c>projects/{project}/locations/{location}/modelDeploymentMonitoringJobs/{model_deployment_monitoring_job}</c> /// </description> /// </item> /// </list> /// </remarks> /// <param name="modelDeploymentMonitoringJobName"> /// The resource name in string form. Must not be <c>null</c>. /// </param> /// <param name="result"> /// When this method returns, the parsed <see cref="ModelDeploymentMonitoringJobName"/>, or <c>null</c> if /// parsing failed. /// </param> /// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns> public static bool TryParse(string modelDeploymentMonitoringJobName, out ModelDeploymentMonitoringJobName result) => TryParse(modelDeploymentMonitoringJobName, false, out result); /// <summary> /// Tries to parse the given resource name string into a new <see cref="ModelDeploymentMonitoringJobName"/> /// instance; optionally allowing an unparseable resource name. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item> /// <description> /// <c>projects/{project}/locations/{location}/modelDeploymentMonitoringJobs/{model_deployment_monitoring_job}</c> /// </description> /// </item> /// </list> /// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>. /// </remarks> /// <param name="modelDeploymentMonitoringJobName"> /// The resource name in string form. Must not be <c>null</c>. /// </param> /// <param name="allowUnparsed"> /// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/> /// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is /// specified. /// </param> /// <param name="result"> /// When this method returns, the parsed <see cref="ModelDeploymentMonitoringJobName"/>, or <c>null</c> if /// parsing failed. /// </param> /// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns> public static bool TryParse(string modelDeploymentMonitoringJobName, bool allowUnparsed, out ModelDeploymentMonitoringJobName result) { gax::GaxPreconditions.CheckNotNull(modelDeploymentMonitoringJobName, nameof(modelDeploymentMonitoringJobName)); gax::TemplatedResourceName resourceName; if (s_projectLocationModelDeploymentMonitoringJob.TryParseName(modelDeploymentMonitoringJobName, out resourceName)) { result = FromProjectLocationModelDeploymentMonitoringJob(resourceName[0], resourceName[1], resourceName[2]); return true; } if (allowUnparsed) { if (gax::UnparsedResourceName.TryParse(modelDeploymentMonitoringJobName, out gax::UnparsedResourceName unparsedResourceName)) { result = FromUnparsed(unparsedResourceName); return true; } } result = null; return false; } private ModelDeploymentMonitoringJobName(ResourceNameType type, gax::UnparsedResourceName unparsedResourceName = null, string locationId = null, string modelDeploymentMonitoringJobId = null, string projectId = null) { Type = type; UnparsedResource = unparsedResourceName; LocationId = locationId; ModelDeploymentMonitoringJobId = modelDeploymentMonitoringJobId; ProjectId = projectId; } /// <summary> /// Constructs a new instance of a <see cref="ModelDeploymentMonitoringJobName"/> class from the component parts /// of pattern /// <c>projects/{project}/locations/{location}/modelDeploymentMonitoringJobs/{model_deployment_monitoring_job}</c> /// </summary> /// <param name="projectId">The <c>Project</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="locationId">The <c>Location</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="modelDeploymentMonitoringJobId"> /// The <c>ModelDeploymentMonitoringJob</c> ID. Must not be <c>null</c> or empty. /// </param> public ModelDeploymentMonitoringJobName(string projectId, string locationId, string modelDeploymentMonitoringJobId) : this(ResourceNameType.ProjectLocationModelDeploymentMonitoringJob, projectId: gax::GaxPreconditions.CheckNotNullOrEmpty(projectId, nameof(projectId)), locationId: gax::GaxPreconditions.CheckNotNullOrEmpty(locationId, nameof(locationId)), modelDeploymentMonitoringJobId: gax::GaxPreconditions.CheckNotNullOrEmpty(modelDeploymentMonitoringJobId, nameof(modelDeploymentMonitoringJobId))) { } /// <summary>The <see cref="ResourceNameType"/> of the contained resource name.</summary> public ResourceNameType Type { get; } /// <summary> /// The contained <see cref="gax::UnparsedResourceName"/>. Only non-<c>null</c> if this instance contains an /// unparsed resource name. /// </summary> public gax::UnparsedResourceName UnparsedResource { get; } /// <summary> /// The <c>Location</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name. /// </summary> public string LocationId { get; } /// <summary> /// The <c>ModelDeploymentMonitoringJob</c> ID. Will not be <c>null</c>, unless this instance contains an /// unparsed resource name. /// </summary> public string ModelDeploymentMonitoringJobId { get; } /// <summary> /// The <c>Project</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name. /// </summary> public string ProjectId { get; } /// <summary>Whether this instance contains a resource name with a known pattern.</summary> public bool IsKnownPattern => Type != ResourceNameType.Unparsed; /// <summary>The string representation of the resource name.</summary> /// <returns>The string representation of the resource name.</returns> public override string ToString() { switch (Type) { case ResourceNameType.Unparsed: return UnparsedResource.ToString(); case ResourceNameType.ProjectLocationModelDeploymentMonitoringJob: return s_projectLocationModelDeploymentMonitoringJob.Expand(ProjectId, LocationId, ModelDeploymentMonitoringJobId); default: throw new sys::InvalidOperationException("Unrecognized resource-type."); } } /// <summary>Returns a hash code for this resource name.</summary> public override int GetHashCode() => ToString().GetHashCode(); /// <inheritdoc/> public override bool Equals(object obj) => Equals(obj as ModelDeploymentMonitoringJobName); /// <inheritdoc/> public bool Equals(ModelDeploymentMonitoringJobName other) => ToString() == other?.ToString(); /// <inheritdoc/> public static bool operator ==(ModelDeploymentMonitoringJobName a, ModelDeploymentMonitoringJobName b) => ReferenceEquals(a, b) || (a?.Equals(b) ?? false); /// <inheritdoc/> public static bool operator !=(ModelDeploymentMonitoringJobName a, ModelDeploymentMonitoringJobName b) => !(a == b); } public partial class ModelDeploymentMonitoringJob { /// <summary> /// <see cref="gcav::ModelDeploymentMonitoringJobName"/>-typed view over the <see cref="Name"/> resource name /// property. /// </summary> public gcav::ModelDeploymentMonitoringJobName ModelDeploymentMonitoringJobName { get => string.IsNullOrEmpty(Name) ? null : gcav::ModelDeploymentMonitoringJobName.Parse(Name, allowUnparsed: true); set => Name = value?.ToString() ?? ""; } /// <summary> /// <see cref="EndpointName"/>-typed view over the <see cref="Endpoint"/> resource name property. /// </summary> public EndpointName EndpointAsEndpointName { get => string.IsNullOrEmpty(Endpoint) ? null : EndpointName.Parse(Endpoint, allowUnparsed: true); set => Endpoint = value?.ToString() ?? ""; } } }
#region License // // FieldContact.cs April 2007 // // Copyright (C) 2007, Niall Gallagher <niallg@users.sf.net> // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or // implied. See the License for the specific language governing // permissions and limitations under the License. // #endregion #region Using directives using System; #endregion namespace SimpleFramework.Xml.Core { /// <summary> /// The <c>FieldContact</c> object is used to act as a contact /// for a field within an object. This allows a value to be set on an /// object field during deserialization and acquired from the same /// field during serialization. /// </summary> /// <seealso> /// SimpleFramework.Xml.Core.FieldScanner /// </seealso> class FieldContact : Contact { /// <summary> /// This is the label that marks the field within the object. /// </summary> private Annotation label; /// <summary> /// This represents the field within the schema class object. /// </summary> private Field field; /// <summary> /// This is the name for this contact as taken from the field. /// </summary> private String name; /// <summary> /// This is the modifiers for the field that this represents. /// </summary> private int modifier; /// <summary> /// Constructor for the <c>FieldContact</c> object. This is /// used as a point of contact for a field within a schema class. /// Values can be read and written directly to the field with this. /// </summary> /// <param name="field"> /// this is the field that is the point of contact /// </param> /// <param name="label"> /// this is the annotation that is used by the field /// </param> public FieldContact(Field field, Annotation label) { this.modifier = field.getModifiers(); this.label = label; this.field = field; } /// <summary> /// This is used to determine if the annotated contact is for a /// read only variable. A read only variable is a field that /// can be set from within the constructor such as a blank readonly /// variable. It can also be a method with no set counterpart. /// </summary> /// <returns> /// this returns true if the contact is a constant one /// </returns> public bool IsReadOnly() { return !IsStatic() && IsFinal(); } /// <summary> /// This is used to determine if the annotated contact is for a /// static field or method. A static field or method is one that /// contains the "static" keyword. Any const fields will /// be read only and does not require any matching annotation. /// </summary> /// <returns> /// this returns true if the contact is a static one /// </returns> public bool IsStatic() { return Modifier.IsStatic(modifier); } /// <summary> /// This is used to identify annotated methods are fields that /// can not be modified. Such field will require that there is /// a constructor that can have the value injected in to it. /// </summary> /// <returns> /// this returns true if the field or method is readonly /// </returns> public bool IsFinal() { return Modifier.IsFinal(modifier); } /// <summary> /// This will provide the contact type. The contact type is the /// class that is to be set and get on the object. This represents /// the return type for the get and the parameter for the set. /// </summary> /// <returns> /// this returns the type that this contact represents /// </returns> public Class Type { get { return field.Type; } } //public Class GetType() { // return field.Type; //} /// This provides the dependent class for the contact. This will /// actually represent a generic type for the actual type. For /// contacts that use a <c>Collection</c> type this will /// be the generic type parameter for that collection. /// </summary> /// <returns> /// this returns the dependent type for the contact /// </returns> public Class Dependent { get { return Reflector.getDependent(field); } } //public Class GetDependent() { // return Reflector.getDependent(field); //} /// This provides the dependent classes for the contact. This will /// typically represent a generic types for the actual type. For /// contacts that use a <c>Map</c> type this will be the /// generic type parameter for that map type declaration. /// </summary> /// <returns> /// this returns the dependent type for the contact /// </returns> public Class[] Dependents { get { return Reflector.getDependents(field); } } //public Class[] GetDependents() { // return Reflector.getDependents(field); //} /// This is used to acquire the name of the field. This will return /// the name of the field which can then be used to determine the /// XML attribute or element the contact represents. This ensures /// that the name provided string is internalized for performance. /// </summary> /// <returns> /// this returns the name of the field represented /// </returns> public String Name { get { if(name == null) { name = GetName(field); } return name; } } //public String GetName() { // if(name == null) { // name = GetName(field); // } // return name; //} /// This is used to acquire the name of the field such that it is /// an internalized string. Internalization of the contact name /// ensures that comparisons can be made to annotation names with /// a simple reference comparison rather than a string comparison. /// </summary> /// <param name="field"> /// the field to acquire the internalized name from /// </param> /// <returns> /// this returns the name of the string, internalized /// </returns> public String GetName(Field field) { String name = field.GetName(); if(name != null) { name = name.intern(); } return name; } /// <summary> /// This is the annotation associated with the point of contact. /// This will be an XML annotation that describes how the contact /// should be serialized and deserialized from the object. /// </summary> /// <returns> /// this provides the annotation associated with this /// </returns> public Annotation Annotation { get { return label; } } //public Annotation GetAnnotation() { // return label; //} /// This is the annotation associated with the point of contact. /// This will be an XML annotation that describes how the contact /// should be serialized and deserialized from the object. /// </summary> /// <param name="type"> /// this is the type of the annotation to acquire /// </param> /// <returns> /// this provides the annotation associated with this /// </returns> public <T : Annotation> T getAnnotation(Class<T> type) { if(type == label.annotationType()) { return (T) label; } return field.getAnnotation(type); } /// <summary> /// This is used to set the specified value on the provided object. /// The value provided must be an instance of the contact class so /// that it can be set without a runtime class compatibility error. /// </summary> /// <param name="source"> /// this is the object to set the value on /// </param> /// <param name="value"> /// this is the value that is to be set on the object /// </param> public void Set(Object source, Object value) { if(!IsFinal()) { field.Set(source, value); } } /// <summary> /// This is used to get the specified value on the provided object. /// The value returned from this method will be an instance of the /// contact class type. If the returned object is of a different /// type then the serialization process will fail. /// </summary> /// <param name="source"> /// this is the object to acquire the value from /// </param> /// <returns> /// this is the value that is acquired from the object /// </returns> public Object Get(Object source) { return field.Get(source); } /// <summary> /// This is used to describe the contact as it exists within the /// owning class. It is used to provide error messages that can /// be used to debug issues that occur when processing a contact. /// The string provided is the generic field string. /// </summary> /// <returns> /// this returns a string representation of the contact /// </returns> public String ToString() { return String.format("field '%s' %s", GetName(), field.ToString()); } } }
// // GtkBaseClient.cs // // Author: // Aaron Bockover <abockover@novell.com> // // Copyright (C) 2007 Novell, Inc. // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System; using System.IO; using Mono.Addins; using Hyena; using Banshee.Base; using Banshee.Metrics; using Banshee.Database; using Banshee.ServiceStack; using Banshee.Gui.Dialogs; namespace Banshee.Gui { public abstract class GtkBaseClient : Client { static GtkBaseClient () { Paths.ApplicationName = Application.InternalName; user_gtkrc = Path.Combine (Paths.ApplicationData, "gtkrc"); } private static Type client_type; private static string user_gtkrc; public static void Startup<T> (string [] args) where T : GtkBaseClient { Hyena.Log.InformationFormat ("Running Banshee {0}: [{1}]", Application.Version, Application.BuildDisplayInfo); // This could go into GtkBaseClient, but it's probably something we // should really only support at each client level if (File.Exists (user_gtkrc) && !ApplicationContext.CommandLine.Contains ("no-gtkrc")) { Gtk.Rc.AddDefaultFile (user_gtkrc); } // Boot the client Banshee.Gui.GtkBaseClient.Startup<T> (); } public static void Startup<T> () where T : GtkBaseClient { if (client_type != null) { throw new ApplicationException ("Only a single GtkBaseClient can be initialized through Entry<T>"); } client_type = typeof (T); Hyena.Gui.CleanRoomStartup.Startup (Startup); } private static void Startup () { ((GtkBaseClient)Activator.CreateInstance (client_type)).Run (); } private string default_icon_name; protected GtkBaseClient () : this (true, Application.IconName) { } protected GtkBaseClient (bool initializeDefault, string defaultIconName) { this.default_icon_name = defaultIconName; if (initializeDefault) { Initialize (true); } } protected virtual void PreInitializeGtk () { } protected virtual void InitializeGtk () { Log.Debug ("Initializing GTK"); if (!GLib.Thread.Supported) { GLib.Thread.Init (); } Gtk.Application.Init (); if (ApplicationContext.CommandLine.Contains ("debug-gtkrc")) { Log.Information ("Note: gtkrc files will be checked for reload every 5 seconds!"); GLib.Timeout.Add (5000, delegate { if (Gtk.Rc.ReparseAll ()) { Gtk.Rc.ResetStyles (Gtk.Settings.Default); Log.Information ("gtkrc has been reloaded"); } return true; }); } } protected virtual void PostInitializeGtk () { Log.Debug ("Post-Initializing GTK"); foreach (TypeExtensionNode node in AddinManager.GetExtensionNodes ("/Banshee/ThickClient/GtkBaseClient/PostInitializeGtk")) { try { node.CreateInstance (); } catch (Exception e) { Log.Exception ("PostInitializeGtk extension failed to run", e); } } } protected void Initialize (bool registerCommonServices) { // Set the process name so system process listings and commands are pretty ApplicationContext.TrySetProcessName (Application.InternalName); PreInitializeGtk (); InitializeGtk (); Application.Initialize (); PostInitializeGtk (); Gtk.Window.DefaultIconName = default_icon_name; ThreadAssist.InitializeMainThread (); ThreadAssist.ProxyToMainHandler = Banshee.ServiceStack.Application.Invoke; Gdk.Global.ProgramClass = Application.InternalName; #if HAVE_GLIBSHARP_2_12_7 GLib.Global.ApplicationName = "Banshee"; #endif if (ApplicationContext.Debugging) { GLib.Log.SetLogHandler ("Gtk", GLib.LogLevelFlags.Critical, GLib.Log.PrintTraceLogFunction); Gdk.Window.DebugUpdates = !String.IsNullOrEmpty (Environment.GetEnvironmentVariable ("GDK_DEBUG_UPDATES")); } ServiceManager.ServiceStarted += OnServiceStarted; // Register specific services this client will care about if (registerCommonServices) { Banshee.Gui.CommonServices.Register (); } OnRegisterServices (); Application.ShutdownPromptHandler = OnShutdownPrompt; Application.TimeoutHandler = RunTimeout; Application.IdleHandler = RunIdle; Application.IdleTimeoutRemoveHandler = IdleTimeoutRemove; BansheeMetrics.Started += OnMetricsStarted; // Start the core boot process Application.PushClient (this); Application.Run (); if (!Banshee.Configuration.DefaultApplicationHelper.NeverAsk && Banshee.Configuration.DefaultApplicationHelper.HaveHelper) { Application.ClientStarted += delegate { Banshee.Gui.Dialogs.DefaultApplicationHelperDialog.RunIfAppropriate (); }; } Log.Notify += OnLogNotify; } private void OnMetricsStarted () { var metrics = BansheeMetrics.Instance; var screen = Gdk.Screen.Default; metrics.Add ("Display/NScreens", Gdk.Display.Default.NScreens); metrics.Add ("Screen/Height", screen.Height); metrics.Add ("Screen/Width", screen.Width); metrics.Add ("Screen/IsComposited", screen.IsComposited); metrics.Add ("Screen/NMonitors", screen.NMonitors); } public virtual void Run () { RunIdle (delegate { OnStarted (); return false; }); Log.Debug ("Starting GTK main loop"); Gtk.Application.Run (); } protected virtual void OnRegisterServices () { } private void OnServiceStarted (ServiceStartedArgs args) { if (args.Service is BansheeDbConnection) { ServiceManager.ServiceStarted -= OnServiceStarted; BansheeDbFormatMigrator migrator = ((BansheeDbConnection)args.Service).Migrator; if (migrator != null) { migrator.Started += OnMigratorStarted; migrator.Finished += OnMigratorFinished; } } } private void OnMigratorStarted (object o, EventArgs args) { BansheeDbFormatMigrator migrator = (BansheeDbFormatMigrator)o; new BansheeDbFormatMigratorMonitor (migrator); } private void OnMigratorFinished (object o, EventArgs args) { BansheeDbFormatMigrator migrator = (BansheeDbFormatMigrator)o; migrator.Started -= OnMigratorStarted; migrator.Finished -= OnMigratorFinished; } private void OnLogNotify (LogNotifyArgs args) { RunIdle (delegate { ShowLogCoreEntry (args.Entry); return false; }); } private void ShowLogCoreEntry (LogEntry entry) { Gtk.Window window = null; Gtk.MessageType mtype; if (ServiceManager.Contains<GtkElementsService> ()) { window = ServiceManager.Get<GtkElementsService> ().PrimaryWindow; } switch (entry.Type) { case LogEntryType.Warning: mtype = Gtk.MessageType.Warning; break; case LogEntryType.Information: mtype = Gtk.MessageType.Info; break; case LogEntryType.Error: default: mtype = Gtk.MessageType.Error; break; } Hyena.Widgets.HigMessageDialog dialog = new Hyena.Widgets.HigMessageDialog ( window, Gtk.DialogFlags.Modal, mtype, Gtk.ButtonsType.Close, entry.Message, entry.Details); dialog.Title = String.Empty; dialog.Run (); dialog.Destroy (); } private bool OnShutdownPrompt () { ConfirmShutdownDialog dialog = new ConfirmShutdownDialog (); try { return dialog.Run () != Gtk.ResponseType.Cancel; } finally { dialog.Destroy (); } } protected uint RunTimeout (uint milliseconds, TimeoutHandler handler) { return GLib.Timeout.Add (milliseconds, delegate { return handler (); }); } protected uint RunIdle (IdleHandler handler) { return GLib.Idle.Add (delegate { return handler (); }); } protected bool IdleTimeoutRemove (uint id) { return GLib.Source.Remove (id); } } }
using UnityEngine; using System.Collections.Generic; [System.Serializable] public class MegaCloneObj { public Mesh mesh; public float Gap; public Vector3 Offset; public Vector3 Scale; public float Weight; Vector3[] mverts; Vector2[] muvs; int[] mtris; } public class MegaLoftLayerCloneSimple : MegaLoftLayerBase { public Mesh cloneMesh; public bool showstartparams = true; public bool showmainparams = true; public bool showendparams = true; public bool StartEnabled = true; public bool MainEnabled = true; public bool EndEnabled = true; public Vector3 StartScale = Vector3.one; public Vector3 MainScale = Vector3.one; public Vector3 EndScale = Vector3.one; public float start = 0.0f; public float GlobalScale = 1.0f; public float StartGap = 0.0f; public float EndGap = 0.0f; public float Gap = 0.0f; public float RemoveDof = 1.0f; public int repeat = 1; public float Length = 0.0f; public float tangent = 0.1f; public MegaAxis axis = MegaAxis.Z; public Vector3 rot = Vector3.zero; public Mesh startObj; public Mesh mainObj; public Mesh endObj; public float twist = 0.0f; public float damage = 0.0f; public MegaShapeLoft surfaceLoft; public int surfaceLayer = -1; public AnimationCurve ScaleCrv = new AnimationCurve(new Keyframe(0, 0), new Keyframe(1, 0)); public bool useCrossCrv = false; public AnimationCurve CrossCrv = new AnimationCurve(new Keyframe(0, 0), new Keyframe(1, 0)); public bool useTwist = false; public AnimationCurve twistCrv = new AnimationCurve(new Keyframe(0, 0), new Keyframe(1, 0)); public float CrossAlpha = 0.0f; public bool CalcUp = true; public float calcUpAmount = 1.0f; public Vector3 StartOff = Vector3.zero; public Vector3 MainOff = Vector3.zero; // If we have multi mains then each needs a value public Vector3 EndOff = Vector3.zero; public Vector3 tmrot = Vector3.zero; public Vector3 Offset = Vector3.zero; // normals as well Vector3[] sverts; Vector2[] suvs; int[] stris; Vector3[] mverts; Vector2[] muvs; int[] mtris; Vector3[] everts; Vector2[] euvs; int[] etris; Matrix4x4 meshtm; Matrix4x4 tm; Matrix4x4 mat; Quaternion meshrot; Quaternion tw; Matrix4x4 wtm; float LayerLength = 0.0f; [ContextMenu("Help")] public void Help() { Application.OpenURL("http://www.west-racing.com/mf/?page_id=2146"); } public void SetMesh(Mesh newmesh, int which) { //Bounds b = new Bounds(Vector3.zero, Vector3.one); //if ( newmesh ) //{ // b = newmesh.bounds; //} switch ( which ) { case 0: //if ( startObj == null ) // StartGap = MegaUtils.LargestValue1(b.size); startObj = newmesh; break; case 1: //if ( mainObj == null ) // Gap = MegaUtils.LargestValue1(b.size); mainObj = newmesh; break; case 2: //if ( endObj == null ) // EndGap = MegaUtils.LargestValue1(b.size); endObj = newmesh; break; } } public override bool Valid() { if ( LayerEnabled && surfaceLoft && surfaceLayer >= 0 ) { if ( startObj || mainObj || endObj ) { if ( surfaceLoft && surfaceLayer >= 0 ) return true; } } return false; } public override bool LayerNotify(MegaLoftLayerBase layer, int reason) { if ( surfaceLoft != null && surfaceLayer >= 0 ) { if ( surfaceLoft.Layers[surfaceLayer] == layer ) return true; } return false; } public override bool LoftNotify(MegaShapeLoft loft, int reason) { if ( surfaceLoft != null && surfaceLoft == loft ) return true; return false; } public int NumSubMeshes() { return 1; } public int SubMeshTris(int i) { return 0; //tris.Count; } public int SubMeshVerts(int i) { return 0; //verts.Count; } public Material GetMaterials(int i) { return material; } // call this when we set a new object void Init() { //transform.position = Vector3.zero; // need to get alpha for each vert in the source mesh if ( startObj != null ) { sverts = startObj.vertices; suvs = startObj.uv; stris = startObj.triangles; } if ( endObj != null ) { everts = endObj.vertices; euvs = endObj.uv; etris = endObj.triangles; } if ( mainObj != null ) { mverts = mainObj.vertices; muvs = mainObj.uv; mtris = mainObj.triangles; } } public override Vector3 GetPos(MegaShapeLoft loft, float ca, float a) { return Vector3.zero; } public override bool PrepareLoft(MegaShapeLoft loft, int sc) { Init(); int vcount = 0; int tcount = 0; if ( startObj && StartEnabled ) { vcount += sverts.Length; tcount += stris.Length; } if ( endObj && EndEnabled ) { vcount += everts.Length; tcount += etris.Length; } if ( mainObj && MainEnabled ) { if ( Length != 0.0f ) { MegaShape path = null; float dist = 0.0f; MegaLoftLayerSimple layer = (MegaLoftLayerSimple)surfaceLoft.Layers[surfaceLayer]; path = layer.layerPath; dist = layer.LoftLength * Length; if ( path ) { Vector3 scl = MainScale * GlobalScale; Vector3 size = Vector3.zero; //Vector3.Scale(mainObj.bounds.size, scl); // * + Gap) size.x = (mainObj.bounds.size.x * scl.x) + (Gap * GlobalScale); size.y = (mainObj.bounds.size.y * scl.y) + (Gap * GlobalScale); size.z = (mainObj.bounds.size.z * scl.z) + (Gap * GlobalScale); // TODO: 2 should be axis? repeat = (int)(dist / size[(int)axis]); //(int)axis]); // + Gap)); } } vcount += (mverts.Length * repeat); tcount += (mtris.Length * repeat); } if ( loftverts == null || loftverts.Length != vcount ) loftverts = new Vector3[vcount]; if ( loftuvs == null || loftuvs.Length != vcount ) loftuvs = new Vector2[vcount]; if ( lofttris == null || lofttris.Length != tcount ) lofttris = new int[tcount]; return true; } Vector3 Deform(Vector3 p, MegaShapeLoft loft, MegaLoftLayerSimple layer, float percent, float ca, float off, Vector3 scale, float removeDof, Vector3 locoff) { p = tm.MultiplyPoint3x4(p); p.x *= scale.x; p.y *= scale.y; p.z *= scale.z; p.z += off; p += locoff; float alpha = (p.z * LayerLength) + percent; if ( useCrossCrv ) ca += CrossCrv.Evaluate(alpha); Vector3 ps1; Vector3 ps; if ( CalcUp ) { Vector3 upv = Vector3.zero; Vector3 right = Vector3.zero; Vector3 fwd = Vector3.zero; ps = layer.GetPosAndFrame(loft, ca, alpha, (tangent * 0.001f), out ps1, out upv, out right, out fwd); tw = Quaternion.LookRotation(fwd, upv); Quaternion rot = tw * meshrot; if ( useTwist ) rot *= Quaternion.AngleAxis(twist * twistCrv.Evaluate(alpha), Vector3.forward); //wtm.SetTRS(ps, rot, Vector3.one); MegaMatrix.SetTR(ref wtm, ps, rot); wtm = mat * wtm; p.z = 0.0f; return wtm.MultiplyPoint3x4(p); } else { ps = layer.GetPosAndLook(loft, ca, alpha, (tangent * 0.001f), out ps1); if ( useTwist ) tw = meshrot * Quaternion.AngleAxis(twist * twistCrv.Evaluate(alpha), Vector3.forward); // * meshrot; else tw = meshrot; } Vector3 relativePos = ps1 - ps; relativePos.y *= removeDof; Quaternion rotation = Quaternion.LookRotation(relativePos) * tw; // * meshrot; //wtm.SetTRS(ps, rotation, Vector3.one); MegaMatrix.SetTR(ref wtm, ps, rotation); wtm = mat * wtm; p.z = 0.0f; return wtm.MultiplyPoint3x4(p); } public override int BuildMesh(MegaShapeLoft loft, int triindex) { MegaLoftLayerSimple layer = (MegaLoftLayerSimple)surfaceLoft.Layers[surfaceLayer]; LayerLength = 1.0f / layer.GetLength(surfaceLoft); if ( tangent < 0.1f ) tangent = 0.1f; //mat = surfaceLoft.transform.localToWorldMatrix; //mat = transform.localToWorldMatrix * surfaceLoft.transform.worldToLocalMatrix; // * transform.worldToLocalMatrix; //mat = surfaceLoft.transform.localToWorldMatrix; mat = surfaceLoft.transform.localToWorldMatrix * transform.worldToLocalMatrix; tm = Matrix4x4.identity; MegaMatrix.Rotate(ref tm, Mathf.Deg2Rad * tmrot); meshtm = Matrix4x4.identity; MegaMatrix.Rotate(ref meshtm, Mathf.Deg2Rad * rot); meshrot = Quaternion.Euler(rot); float off = 0.0f; int trioff = 0; int vi = 0; int fi = 0; float ca = CrossAlpha; //Mathf.Repeat(CrossAlpha, 1.0001f); if ( startObj != null && StartEnabled ) { // deform start along the curve // so for each vertex find offset to calc alpha to find rotation and position of vert // need to add vert to list, also uv and tri (uv is a copy) Vector3 sscl = StartScale * GlobalScale; Vector3 soff = Vector3.Scale(StartOff + Offset, sscl); off -= startObj.bounds.min[(int)axis] * sscl[(int)axis]; // - startObj.bounds.size[(int)axis]; //0.0f; //sz; for ( int i = 0; i < sverts.Length; i++ ) { Vector3 p = sverts[i]; p = Deform(p, surfaceLoft, layer, start, ca, off, sscl, RemoveDof, soff); loftverts[vi] = p; // + StartOff; loftuvs[vi++] = suvs[i]; } // Tris are a copy, could use InsertRange for ( int i = 0; i < stris.Length; i++ ) lofttris[fi++] = stris[i] + triindex; off += startObj.bounds.max[(int)axis] * sscl[(int)axis]; //sw; off += StartGap * GlobalScale; trioff = vi; //verts.Count; } if ( mainObj != null && MainEnabled ) { float mw = mainObj.bounds.size[(int)axis]; // * (GlobalScale * 0.01f); Vector3 mscl = MainScale * GlobalScale; Vector3 moff = Vector3.Scale(MainOff + Offset, mscl); off -= mainObj.bounds.min[(int)axis] * mscl[(int)axis]; mw *= mscl[(int)axis]; float gaps = Gap * GlobalScale; for ( int r = 0; r < repeat; r++ ) { for ( int i = 0; i < mverts.Length; i++ ) { Vector3 p = mverts[i]; p = Deform(p, surfaceLoft, layer, start, ca, off, mscl, RemoveDof, moff); loftverts[vi] = p; // + MainOff; loftuvs[vi++] = muvs[i]; } for ( int i = 0; i < mtris.Length; i++ ) lofttris[fi++] = mtris[i] + trioff + triindex; off += mw; off += gaps; trioff = vi; //verts.Count; } off -= gaps; //Gap; off += (mainObj.bounds.max[(int)axis] * mscl[(int)axis]) - mw; } if ( endObj != null && EndEnabled ) { Vector3 escl = EndScale * GlobalScale; Vector3 eoff = Vector3.Scale(EndOff + Offset, escl); off -= endObj.bounds.min[(int)axis] * escl[(int)axis]; off += EndGap * GlobalScale; for ( int i = 0; i < everts.Length; i++ ) { Vector3 p = everts[i]; p = Deform(p, surfaceLoft, layer, start, ca, off, escl, RemoveDof, eoff); loftverts[vi] = p; // + EndOff; loftuvs[vi++] = euvs[i]; } for ( int i = 0; i < etris.Length; i++ ) lofttris[fi++] = etris[i] + trioff + triindex; trioff += everts.Length; } return triindex; } public override MegaLoftLayerBase Copy(GameObject go) { MegaLoftLayerCloneSimple layer = go.AddComponent<MegaLoftLayerCloneSimple>(); Copy(this, layer); loftverts = null; loftuvs = null; loftcols = null; lofttris = null; if ( layer.surfaceLoft == GetComponent<MegaShapeLoft>() ) layer.surfaceLoft = go.GetComponent<MegaShapeLoft>(); return null; } }
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for Additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ namespace NPOI.XWPF.UserModel { using System; using NUnit.Framework; using NPOI.XWPF; using NPOI.OpenXmlFormats.Wordprocessing; using System.Collections.Generic; /** * Tests for XWPF Run */ [TestFixture] public class TestXWPFTable { [SetUp] public void SetUp() { /* XWPFDocument doc = new XWPFDocument(); p = doc.CreateParagraph(); this.ctRun = CTR.Factory.NewInstance(); */ } [Test] public void TestConstructor() { XWPFDocument doc = new XWPFDocument(); CT_Tbl ctTable = new CT_Tbl(); XWPFTable xtab = new XWPFTable(ctTable, doc); Assert.IsNotNull(xtab); Assert.AreEqual(1, ctTable.SizeOfTrArray()); Assert.AreEqual(1, ctTable.GetTrArray(0).SizeOfTcArray()); Assert.IsNotNull(ctTable.GetTrArray(0).GetTcArray(0).GetPArray(0)); ctTable = new CT_Tbl(); xtab = new XWPFTable(ctTable, doc, 3, 2); Assert.IsNotNull(xtab); Assert.AreEqual(3, ctTable.SizeOfTrArray()); Assert.AreEqual(2, ctTable.GetTrArray(0).SizeOfTcArray()); Assert.IsNotNull(ctTable.GetTrArray(0).GetTcArray(0).GetPArray(0)); } [Test] public void TestGetText() { XWPFDocument doc = new XWPFDocument(); CT_Tbl table = new CT_Tbl(); CT_Row row = table.AddNewTr(); CT_Tc cell = row.AddNewTc(); CT_P paragraph = cell.AddNewP(); CT_R run = paragraph.AddNewR(); CT_Text text = run.AddNewT(); text.Value = ("finally I can Write!"); XWPFTable xtab = new XWPFTable(table, doc); Assert.AreEqual("finally I can Write!\n", xtab.Text); } [Test] public void TestCreateRow() { XWPFDocument doc = new XWPFDocument(); CT_Tbl table = new CT_Tbl(); CT_Row r1 = table.AddNewTr(); r1.AddNewTc().AddNewP(); r1.AddNewTc().AddNewP(); CT_Row r2 = table.AddNewTr(); r2.AddNewTc().AddNewP(); r2.AddNewTc().AddNewP(); CT_Row r3 = table.AddNewTr(); r3.AddNewTc().AddNewP(); r3.AddNewTc().AddNewP(); XWPFTable xtab = new XWPFTable(table, doc); Assert.AreEqual(3, xtab.NumberOfRows); Assert.IsNotNull(xtab.GetRow(2)); //add a new row xtab.CreateRow(); Assert.AreEqual(4, xtab.NumberOfRows); //check number of cols Assert.AreEqual(2, table.GetTrArray(0).SizeOfTcArray()); //check creation of first row xtab = new XWPFTable(new CT_Tbl(), doc); Assert.AreEqual(1, xtab.GetCTTbl().GetTrArray(0).SizeOfTcArray()); } [Test] public void TestSetGetWidth() { XWPFDocument doc = new XWPFDocument(); CT_Tbl table = new CT_Tbl(); table.AddNewTblPr().AddNewTblW().w = "1000"; XWPFTable xtab = new XWPFTable(table, doc); Assert.AreEqual(1000, xtab.Width); xtab.Width = 100; Assert.AreEqual(100, int.Parse(table.tblPr.tblW.w)); } [Test] public void TestSetGetHeight() { XWPFDocument doc = new XWPFDocument(); CT_Tbl table = new CT_Tbl(); XWPFTable xtab = new XWPFTable(table, doc); XWPFTableRow row = xtab.CreateRow(); row.Height = (20); Assert.AreEqual(20, row.Height); } [Test] public void TestSetGetMargins() { // instantiate the following class so it'll Get picked up by // the XmlBean process and Added to the jar file. it's required // for the following XWPFTable methods. CT_TblCellMar ctm = new CT_TblCellMar(); Assert.IsNotNull(ctm); // create a table XWPFDocument doc = new XWPFDocument(); CT_Tbl ctTable = new CT_Tbl(); XWPFTable table = new XWPFTable(ctTable, doc); // Set margins table.SetCellMargins(50, 50, 250, 450); // Get margin components int t = table.CellMarginTop; Assert.AreEqual(50, t); int l = table.CellMarginLeft; Assert.AreEqual(50, l); int b = table.CellMarginBottom; Assert.AreEqual(250, b); int r = table.CellMarginRight; Assert.AreEqual(450, r); } [Test] public void TestSetGetHBorders() { // instantiate the following classes so they'll Get picked up by // the XmlBean process and Added to the jar file. they are required // for the following XWPFTable methods. CT_TblBorders cttb = new CT_TblBorders(); Assert.IsNotNull(cttb); ST_Border stb = new ST_Border(); Assert.IsNotNull(stb); // create a table XWPFDocument doc = new XWPFDocument(); CT_Tbl ctTable = new CT_Tbl(); XWPFTable table = new XWPFTable(ctTable, doc); // Set inside horizontal border table.SetInsideHBorder(NPOI.XWPF.UserModel.XWPFTable.XWPFBorderType.SINGLE, 4, 0, "FF0000"); // Get inside horizontal border components int s = table.InsideHBorderSize; Assert.AreEqual(4, s); int sp = table.InsideHBorderSpace; Assert.AreEqual(0, sp); String clr = table.InsideHBorderColor; Assert.AreEqual("FF0000", clr); NPOI.XWPF.UserModel.XWPFTable.XWPFBorderType bt = table.InsideHBorderType; Assert.AreEqual(NPOI.XWPF.UserModel.XWPFTable.XWPFBorderType.SINGLE, bt); } [Test] public void TestSetGetVBorders() { // create a table XWPFDocument doc = new XWPFDocument(); CT_Tbl ctTable = new CT_Tbl(); XWPFTable table = new XWPFTable(ctTable, doc); // Set inside vertical border table.SetInsideVBorder(NPOI.XWPF.UserModel.XWPFTable.XWPFBorderType.DOUBLE, 4, 0, "00FF00"); // Get inside vertical border components NPOI.XWPF.UserModel.XWPFTable.XWPFBorderType bt = table.InsideVBorderType; Assert.AreEqual(NPOI.XWPF.UserModel.XWPFTable.XWPFBorderType.DOUBLE, bt); int sz = table.InsideVBorderSize; Assert.AreEqual(4, sz); int sp = table.InsideVBorderSpace; Assert.AreEqual(0, sp); String clr = table.InsideVBorderColor; Assert.AreEqual("00FF00", clr); } [Test] public void TestSetGetRowBandSize() { XWPFDocument doc = new XWPFDocument(); CT_Tbl ctTable = new CT_Tbl(); XWPFTable table = new XWPFTable(ctTable, doc); table.RowBandSize = 12; int sz = table.RowBandSize; Assert.AreEqual(12, sz); } [Test] public void TestSetGetColBandSize() { XWPFDocument doc = new XWPFDocument(); CT_Tbl ctTable = new CT_Tbl(); XWPFTable table = new XWPFTable(ctTable, doc); table.ColBandSize = 16; int sz = table.ColBandSize; Assert.AreEqual(16, sz); } [Test] public void TestCreateTable() { // open an empty document XWPFDocument doc = XWPFTestDataSamples.OpenSampleDocument("sample.docx"); // create a table with 5 rows and 7 coloumns int noRows = 5; int noCols = 7; XWPFTable table = doc.CreateTable(noRows, noCols); // assert the table is empty List<XWPFTableRow> rows = table.Rows; Assert.AreEqual(noRows, rows.Count, "Table has less rows than requested."); foreach (XWPFTableRow xwpfRow in rows) { Assert.IsNotNull(xwpfRow); for (int i = 0; i < 7; i++) { XWPFTableCell xwpfCell = xwpfRow.GetCell(i); Assert.IsNotNull(xwpfCell); Assert.AreEqual(1, xwpfCell.Paragraphs.Count, "Empty cells should not have one paragraph."); xwpfCell = xwpfRow.GetCell(i); Assert.AreEqual(1, xwpfCell.Paragraphs.Count, "Calling 'getCell' must not modify cells content."); } } doc.Package.Revert(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // ReSharper disable UnusedAutoPropertyAccessor.Global // ReSharper disable MemberCanBePrivate.Global #pragma warning disable 618 namespace Apache.Ignite.Core.Tests { using System; using System.Collections.Generic; using System.Configuration; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.IO; using System.Linq; using System.Text; using System.Threading; using System.Xml; using System.Xml.Linq; using System.Xml.Schema; using Apache.Ignite.Core.Binary; using Apache.Ignite.Core.Cache.Affinity.Rendezvous; using Apache.Ignite.Core.Cache.Configuration; using Apache.Ignite.Core.Cache.Eviction; using Apache.Ignite.Core.Cache.Expiry; using Apache.Ignite.Core.Cache.Store; using Apache.Ignite.Core.Ssl; using Apache.Ignite.Core.Common; using Apache.Ignite.Core.Communication.Tcp; using Apache.Ignite.Core.Configuration; using Apache.Ignite.Core.DataStructures.Configuration; using Apache.Ignite.Core.Deployment; using Apache.Ignite.Core.Discovery.Tcp; using Apache.Ignite.Core.Discovery.Tcp.Multicast; using Apache.Ignite.Core.Events; using Apache.Ignite.Core.Failure; using Apache.Ignite.Core.Lifecycle; using Apache.Ignite.Core.Log; using Apache.Ignite.Core.PersistentStore; using Apache.Ignite.Core.Plugin.Cache; using Apache.Ignite.Core.Tests.Binary; using Apache.Ignite.Core.Tests.Plugin; using Apache.Ignite.Core.Transactions; using Apache.Ignite.NLog; using NUnit.Framework; using CheckpointWriteOrder = Apache.Ignite.Core.PersistentStore.CheckpointWriteOrder; using DataPageEvictionMode = Apache.Ignite.Core.Cache.Configuration.DataPageEvictionMode; using WalMode = Apache.Ignite.Core.PersistentStore.WalMode; /// <summary> /// Tests <see cref="IgniteConfiguration"/> serialization. /// </summary> public class IgniteConfigurationSerializerTest { /// <summary> /// Tests the predefined XML. /// </summary> [Test] public void TestPredefinedXml() { var xml = File.ReadAllText("Config\\full-config.xml"); var cfg = IgniteConfiguration.FromXml(xml); Assert.AreEqual("c:", cfg.WorkDirectory); Assert.AreEqual("127.1.1.1", cfg.Localhost); Assert.IsTrue(cfg.IsDaemon); Assert.AreEqual(1024, cfg.JvmMaxMemoryMb); Assert.AreEqual(TimeSpan.FromSeconds(10), cfg.MetricsLogFrequency); Assert.AreEqual(TimeSpan.FromMinutes(1), ((TcpDiscoverySpi)cfg.DiscoverySpi).JoinTimeout); Assert.AreEqual("192.168.1.1", ((TcpDiscoverySpi)cfg.DiscoverySpi).LocalAddress); Assert.AreEqual(6655, ((TcpDiscoverySpi)cfg.DiscoverySpi).LocalPort); Assert.AreEqual(7, ((TcpDiscoveryMulticastIpFinder) ((TcpDiscoverySpi) cfg.DiscoverySpi).IpFinder).AddressRequestAttempts); Assert.AreEqual(new[] { "-Xms1g", "-Xmx4g" }, cfg.JvmOptions); Assert.AreEqual(15, ((LifecycleBean) cfg.LifecycleHandlers.Single()).Foo); Assert.AreEqual("testBar", ((NameMapper) cfg.BinaryConfiguration.NameMapper).Bar); Assert.AreEqual( "Apache.Ignite.Core.Tests.IgniteConfigurationSerializerTest+FooClass, Apache.Ignite.Core.Tests", cfg.BinaryConfiguration.Types.Single()); Assert.IsFalse(cfg.BinaryConfiguration.CompactFooter); Assert.AreEqual(new[] {42, EventType.TaskFailed, EventType.JobFinished}, cfg.IncludedEventTypes); Assert.AreEqual(@"c:\myconfig.xml", cfg.SpringConfigUrl); Assert.IsTrue(cfg.AutoGenerateIgniteInstanceName); Assert.AreEqual(new TimeSpan(1, 2, 3), cfg.LongQueryWarningTimeout); Assert.IsFalse(cfg.IsActiveOnStart); Assert.IsTrue(cfg.AuthenticationEnabled); Assert.AreEqual("someId012", cfg.ConsistentId); Assert.IsFalse(cfg.RedirectJavaConsoleOutput); Assert.AreEqual("secondCache", cfg.CacheConfiguration.Last().Name); var cacheCfg = cfg.CacheConfiguration.First(); Assert.AreEqual(CacheMode.Replicated, cacheCfg.CacheMode); Assert.IsTrue(cacheCfg.ReadThrough); Assert.IsTrue(cacheCfg.WriteThrough); Assert.IsInstanceOf<MyPolicyFactory>(cacheCfg.ExpiryPolicyFactory); Assert.IsTrue(cacheCfg.EnableStatistics); Assert.IsFalse(cacheCfg.WriteBehindCoalescing); Assert.AreEqual(PartitionLossPolicy.ReadWriteAll, cacheCfg.PartitionLossPolicy); Assert.AreEqual("fooGroup", cacheCfg.GroupName); Assert.AreEqual("bar", cacheCfg.KeyConfiguration.Single().AffinityKeyFieldName); Assert.AreEqual("foo", cacheCfg.KeyConfiguration.Single().TypeName); Assert.IsTrue(cacheCfg.OnheapCacheEnabled); Assert.AreEqual(8, cacheCfg.StoreConcurrentLoadAllThreshold); Assert.AreEqual(9, cacheCfg.RebalanceOrder); Assert.AreEqual(10, cacheCfg.RebalanceBatchesPrefetchCount); Assert.AreEqual(11, cacheCfg.MaxQueryIteratorsCount); Assert.AreEqual(12, cacheCfg.QueryDetailMetricsSize); Assert.AreEqual(13, cacheCfg.QueryParallelism); Assert.AreEqual("mySchema", cacheCfg.SqlSchema); var queryEntity = cacheCfg.QueryEntities.Single(); Assert.AreEqual(typeof(int), queryEntity.KeyType); Assert.AreEqual(typeof(string), queryEntity.ValueType); Assert.AreEqual("myTable", queryEntity.TableName); Assert.AreEqual("length", queryEntity.Fields.Single().Name); Assert.AreEqual(typeof(int), queryEntity.Fields.Single().FieldType); Assert.IsTrue(queryEntity.Fields.Single().IsKeyField); Assert.IsTrue(queryEntity.Fields.Single().NotNull); Assert.AreEqual(3.456d, (double)queryEntity.Fields.Single().DefaultValue); Assert.AreEqual("somefield.field", queryEntity.Aliases.Single().FullName); Assert.AreEqual("shortField", queryEntity.Aliases.Single().Alias); var queryIndex = queryEntity.Indexes.Single(); Assert.AreEqual(QueryIndexType.Geospatial, queryIndex.IndexType); Assert.AreEqual("indexFld", queryIndex.Fields.Single().Name); Assert.AreEqual(true, queryIndex.Fields.Single().IsDescending); Assert.AreEqual(123, queryIndex.InlineSize); var nearCfg = cacheCfg.NearConfiguration; Assert.IsNotNull(nearCfg); Assert.AreEqual(7, nearCfg.NearStartSize); var plc = nearCfg.EvictionPolicy as FifoEvictionPolicy; Assert.IsNotNull(plc); Assert.AreEqual(10, plc.BatchSize); Assert.AreEqual(20, plc.MaxSize); Assert.AreEqual(30, plc.MaxMemorySize); var plc2 = cacheCfg.EvictionPolicy as LruEvictionPolicy; Assert.IsNotNull(plc2); Assert.AreEqual(1, plc2.BatchSize); Assert.AreEqual(2, plc2.MaxSize); Assert.AreEqual(3, plc2.MaxMemorySize); var af = cacheCfg.AffinityFunction as RendezvousAffinityFunction; Assert.IsNotNull(af); Assert.AreEqual(99, af.Partitions); Assert.IsTrue(af.ExcludeNeighbors); Assert.AreEqual(new Dictionary<string, object> { {"myNode", "true"}, {"foo", new FooClass {Bar = "Baz"}} }, cfg.UserAttributes); var atomicCfg = cfg.AtomicConfiguration; Assert.AreEqual(2, atomicCfg.Backups); Assert.AreEqual(CacheMode.Local, atomicCfg.CacheMode); Assert.AreEqual(250, atomicCfg.AtomicSequenceReserveSize); var tx = cfg.TransactionConfiguration; Assert.AreEqual(TransactionConcurrency.Optimistic, tx.DefaultTransactionConcurrency); Assert.AreEqual(TransactionIsolation.RepeatableRead, tx.DefaultTransactionIsolation); Assert.AreEqual(new TimeSpan(0,1,2), tx.DefaultTimeout); Assert.AreEqual(15, tx.PessimisticTransactionLogSize); Assert.AreEqual(TimeSpan.FromSeconds(33), tx.PessimisticTransactionLogLinger); var comm = cfg.CommunicationSpi as TcpCommunicationSpi; Assert.IsNotNull(comm); Assert.AreEqual(33, comm.AckSendThreshold); Assert.AreEqual(new TimeSpan(0, 1, 2), comm.IdleConnectionTimeout); Assert.IsInstanceOf<TestLogger>(cfg.Logger); var binType = cfg.BinaryConfiguration.TypeConfigurations.Single(); Assert.AreEqual("typeName", binType.TypeName); Assert.AreEqual("affKeyFieldName", binType.AffinityKeyFieldName); Assert.IsTrue(binType.IsEnum); Assert.AreEqual(true, binType.KeepDeserialized); Assert.IsInstanceOf<IdMapper>(binType.IdMapper); Assert.IsInstanceOf<NameMapper>(binType.NameMapper); Assert.IsInstanceOf<TestSerializer>(binType.Serializer); var plugins = cfg.PluginConfigurations; Assert.IsNotNull(plugins); Assert.IsNotNull(plugins.Cast<TestIgnitePluginConfiguration>().SingleOrDefault()); Assert.IsNotNull(cacheCfg.PluginConfigurations.Cast<MyPluginConfiguration>().SingleOrDefault()); var eventStorage = cfg.EventStorageSpi as MemoryEventStorageSpi; Assert.IsNotNull(eventStorage); Assert.AreEqual(23.45, eventStorage.ExpirationTimeout.TotalSeconds); Assert.AreEqual(129, eventStorage.MaxEventCount); var memCfg = cfg.MemoryConfiguration; Assert.IsNotNull(memCfg); Assert.AreEqual(3, memCfg.ConcurrencyLevel); Assert.AreEqual("dfPlc", memCfg.DefaultMemoryPolicyName); Assert.AreEqual(45, memCfg.PageSize); Assert.AreEqual(67, memCfg.SystemCacheInitialSize); Assert.AreEqual(68, memCfg.SystemCacheMaxSize); var memPlc = memCfg.MemoryPolicies.Single(); Assert.AreEqual(1, memPlc.EmptyPagesPoolSize); Assert.AreEqual(0.2, memPlc.EvictionThreshold); Assert.AreEqual("dfPlc", memPlc.Name); Assert.AreEqual(DataPageEvictionMode.RandomLru, memPlc.PageEvictionMode); Assert.AreEqual("abc", memPlc.SwapFilePath); Assert.AreEqual(89, memPlc.InitialSize); Assert.AreEqual(98, memPlc.MaxSize); Assert.IsTrue(memPlc.MetricsEnabled); Assert.AreEqual(9, memPlc.SubIntervals); Assert.AreEqual(TimeSpan.FromSeconds(62), memPlc.RateTimeInterval); Assert.AreEqual(PeerAssemblyLoadingMode.CurrentAppDomain, cfg.PeerAssemblyLoadingMode); var sql = cfg.SqlConnectorConfiguration; Assert.IsNotNull(sql); Assert.AreEqual("bar", sql.Host); Assert.AreEqual(10, sql.Port); Assert.AreEqual(11, sql.PortRange); Assert.AreEqual(12, sql.SocketSendBufferSize); Assert.AreEqual(13, sql.SocketReceiveBufferSize); Assert.IsTrue(sql.TcpNoDelay); Assert.AreEqual(14, sql.MaxOpenCursorsPerConnection); Assert.AreEqual(15, sql.ThreadPoolSize); var client = cfg.ClientConnectorConfiguration; Assert.IsNotNull(client); Assert.AreEqual("bar", client.Host); Assert.AreEqual(10, client.Port); Assert.AreEqual(11, client.PortRange); Assert.AreEqual(12, client.SocketSendBufferSize); Assert.AreEqual(13, client.SocketReceiveBufferSize); Assert.IsTrue(client.TcpNoDelay); Assert.AreEqual(14, client.MaxOpenCursorsPerConnection); Assert.AreEqual(15, client.ThreadPoolSize); Assert.AreEqual(19, client.IdleTimeout.TotalSeconds); var pers = cfg.PersistentStoreConfiguration; Assert.AreEqual(true, pers.AlwaysWriteFullPages); Assert.AreEqual(TimeSpan.FromSeconds(1), pers.CheckpointingFrequency); Assert.AreEqual(2, pers.CheckpointingPageBufferSize); Assert.AreEqual(3, pers.CheckpointingThreads); Assert.AreEqual(TimeSpan.FromSeconds(4), pers.LockWaitTime); Assert.AreEqual("foo", pers.PersistentStorePath); Assert.AreEqual(5, pers.TlbSize); Assert.AreEqual("bar", pers.WalArchivePath); Assert.AreEqual(TimeSpan.FromSeconds(6), pers.WalFlushFrequency); Assert.AreEqual(7, pers.WalFsyncDelayNanos); Assert.AreEqual(8, pers.WalHistorySize); Assert.AreEqual(WalMode.None, pers.WalMode); Assert.AreEqual(9, pers.WalRecordIteratorBufferSize); Assert.AreEqual(10, pers.WalSegments); Assert.AreEqual(11, pers.WalSegmentSize); Assert.AreEqual("baz", pers.WalStorePath); Assert.IsTrue(pers.MetricsEnabled); Assert.AreEqual(3, pers.SubIntervals); Assert.AreEqual(TimeSpan.FromSeconds(6), pers.RateTimeInterval); Assert.AreEqual(CheckpointWriteOrder.Random, pers.CheckpointWriteOrder); Assert.IsTrue(pers.WriteThrottlingEnabled); var listeners = cfg.LocalEventListeners; Assert.AreEqual(2, listeners.Count); var rebalListener = (LocalEventListener<CacheRebalancingEvent>) listeners.First(); Assert.AreEqual(new[] {EventType.CacheObjectPut, 81}, rebalListener.EventTypes); Assert.AreEqual("Apache.Ignite.Core.Tests.EventsTestLocalListeners+Listener`1" + "[Apache.Ignite.Core.Events.CacheRebalancingEvent]", rebalListener.Listener.GetType().ToString()); var ds = cfg.DataStorageConfiguration; Assert.IsFalse(ds.AlwaysWriteFullPages); Assert.AreEqual(TimeSpan.FromSeconds(1), ds.CheckpointFrequency); Assert.AreEqual(3, ds.CheckpointThreads); Assert.AreEqual(4, ds.ConcurrencyLevel); Assert.AreEqual(TimeSpan.FromSeconds(5), ds.LockWaitTime); Assert.IsTrue(ds.MetricsEnabled); Assert.AreEqual(6, ds.PageSize); Assert.AreEqual("cde", ds.StoragePath); Assert.AreEqual(TimeSpan.FromSeconds(7), ds.MetricsRateTimeInterval); Assert.AreEqual(8, ds.MetricsSubIntervalCount); Assert.AreEqual(9, ds.SystemRegionInitialSize); Assert.AreEqual(10, ds.SystemRegionMaxSize); Assert.AreEqual(11, ds.WalThreadLocalBufferSize); Assert.AreEqual("abc", ds.WalArchivePath); Assert.AreEqual(TimeSpan.FromSeconds(12), ds.WalFlushFrequency); Assert.AreEqual(13, ds.WalFsyncDelayNanos); Assert.AreEqual(14, ds.WalHistorySize); Assert.AreEqual(Core.Configuration.WalMode.Background, ds.WalMode); Assert.AreEqual(15, ds.WalRecordIteratorBufferSize); Assert.AreEqual(16, ds.WalSegments); Assert.AreEqual(17, ds.WalSegmentSize); Assert.AreEqual("wal-store", ds.WalPath); Assert.AreEqual(TimeSpan.FromSeconds(18), ds.WalAutoArchiveAfterInactivity); Assert.IsTrue(ds.WriteThrottlingEnabled); var dr = ds.DataRegionConfigurations.Single(); Assert.AreEqual(1, dr.EmptyPagesPoolSize); Assert.AreEqual(2, dr.EvictionThreshold); Assert.AreEqual(3, dr.InitialSize); Assert.AreEqual(4, dr.MaxSize); Assert.AreEqual("reg2", dr.Name); Assert.AreEqual(Core.Configuration.DataPageEvictionMode.RandomLru, dr.PageEvictionMode); Assert.AreEqual(TimeSpan.FromSeconds(1), dr.MetricsRateTimeInterval); Assert.AreEqual(5, dr.MetricsSubIntervalCount); Assert.AreEqual("swap", dr.SwapPath); Assert.IsTrue(dr.MetricsEnabled); Assert.AreEqual(7, dr.CheckpointPageBufferSize); dr = ds.DefaultDataRegionConfiguration; Assert.AreEqual(2, dr.EmptyPagesPoolSize); Assert.AreEqual(3, dr.EvictionThreshold); Assert.AreEqual(4, dr.InitialSize); Assert.AreEqual(5, dr.MaxSize); Assert.AreEqual("reg1", dr.Name); Assert.AreEqual(Core.Configuration.DataPageEvictionMode.Disabled, dr.PageEvictionMode); Assert.AreEqual(TimeSpan.FromSeconds(3), dr.MetricsRateTimeInterval); Assert.AreEqual(6, dr.MetricsSubIntervalCount); Assert.AreEqual("swap2", dr.SwapPath); Assert.IsFalse(dr.MetricsEnabled); Assert.IsInstanceOf<SslContextFactory>(cfg.SslContextFactory); Assert.IsInstanceOf<StopNodeOrHaltFailureHandler>(cfg.FailureHandler); var failureHandler = (StopNodeOrHaltFailureHandler)cfg.FailureHandler; Assert.IsTrue(failureHandler.TryStop); Assert.AreEqual(TimeSpan.Parse("0:1:0"), failureHandler.Timeout); } /// <summary> /// Tests the serialize deserialize. /// </summary> [Test] public void TestSerializeDeserialize() { // Test custom CheckSerializeDeserialize(GetTestConfig()); // Test custom with different culture to make sure numbers are serialized properly RunWithCustomCulture(() => CheckSerializeDeserialize(GetTestConfig())); // Test default CheckSerializeDeserialize(new IgniteConfiguration()); } /// <summary> /// Tests that all properties are present in the schema. /// </summary> [Test] public void TestAllPropertiesArePresentInSchema() { CheckAllPropertiesArePresentInSchema("IgniteConfigurationSection.xsd", "igniteConfiguration", typeof(IgniteConfiguration)); } /// <summary> /// Checks that all properties are present in schema. /// </summary> [SuppressMessage("ReSharper", "PossibleNullReferenceException")] public static void CheckAllPropertiesArePresentInSchema(string xsd, string sectionName, Type type) { var schema = XDocument.Load(xsd) .Root.Elements() .Single(x => x.Attribute("name").Value == sectionName); CheckPropertyIsPresentInSchema(type, schema); } /// <summary> /// Checks the property is present in schema. /// </summary> // ReSharper disable once UnusedParameter.Local // ReSharper disable once ParameterOnlyUsedForPreconditionCheck.Local private static void CheckPropertyIsPresentInSchema(Type type, XElement schema) { Func<string, string> toLowerCamel = x => char.ToLowerInvariant(x[0]) + x.Substring(1); foreach (var prop in type.GetProperties()) { if (!prop.CanWrite) continue; // Read-only properties are not configured in XML. if (prop.GetCustomAttributes(typeof(ObsoleteAttribute), true).Any()) continue; // Skip deprecated. var propType = prop.PropertyType; var isCollection = propType.IsGenericType && propType.GetGenericTypeDefinition() == typeof(ICollection<>); if (isCollection) propType = propType.GetGenericArguments().First(); var propName = toLowerCamel(prop.Name); Assert.IsTrue(schema.Descendants().Select(x => x.Attribute("name")) .Any(x => x != null && x.Value == propName), "Property is missing in XML schema: " + propName); var isComplexProp = propType.Namespace != null && propType.Namespace.StartsWith("Apache.Ignite.Core"); if (isComplexProp) CheckPropertyIsPresentInSchema(propType, schema); } } /// <summary> /// Tests the schema validation. /// </summary> [Test] public void TestSchemaValidation() { CheckSchemaValidation(); RunWithCustomCulture(CheckSchemaValidation); // Check invalid xml const string invalidXml = @"<igniteConfiguration xmlns='http://ignite.apache.org/schema/dotnet/IgniteConfigurationSection'> <binaryConfiguration /><binaryConfiguration /> </igniteConfiguration>"; Assert.Throws<XmlSchemaValidationException>(() => CheckSchemaValidation(invalidXml)); } /// <summary> /// Tests the XML conversion. /// </summary> [Test] public void TestToXml() { // Empty config Assert.AreEqual("<?xml version=\"1.0\" encoding=\"utf-16\"?>\r\n<igniteConfiguration " + "xmlns=\"http://ignite.apache.org/schema/dotnet/IgniteConfigurationSection\" />", new IgniteConfiguration().ToXml()); // Some properties var cfg = new IgniteConfiguration { IgniteInstanceName = "myGrid", ClientMode = true, CacheConfiguration = new[] { new CacheConfiguration("myCache") { CacheMode = CacheMode.Replicated, QueryEntities = new[] { new QueryEntity(typeof(int)), new QueryEntity(typeof(int), typeof(string)) } } }, IncludedEventTypes = new[] { EventType.CacheEntryCreated, EventType.CacheNodesLeft } }; Assert.AreEqual(FixLineEndings(@"<?xml version=""1.0"" encoding=""utf-16""?> <igniteConfiguration clientMode=""true"" igniteInstanceName=""myGrid"" xmlns=""http://ignite.apache.org/schema/dotnet/IgniteConfigurationSection""> <cacheConfiguration> <cacheConfiguration cacheMode=""Replicated"" name=""myCache""> <queryEntities> <queryEntity valueType=""System.Int32"" valueTypeName=""java.lang.Integer"" /> <queryEntity keyType=""System.Int32"" keyTypeName=""java.lang.Integer"" valueType=""System.String"" valueTypeName=""java.lang.String"" /> </queryEntities> </cacheConfiguration> </cacheConfiguration> <includedEventTypes> <int>CacheEntryCreated</int> <int>CacheNodesLeft</int> </includedEventTypes> </igniteConfiguration>"), cfg.ToXml()); // Custom section name and indent var sb = new StringBuilder(); var settings = new XmlWriterSettings { Indent = true, IndentChars = " " }; using (var xmlWriter = XmlWriter.Create(sb, settings)) { cfg.ToXml(xmlWriter, "igCfg"); } Assert.AreEqual(FixLineEndings(@"<?xml version=""1.0"" encoding=""utf-16""?> <igCfg clientMode=""true"" igniteInstanceName=""myGrid"" xmlns=""http://ignite.apache.org/schema/dotnet/IgniteConfigurationSection""> <cacheConfiguration> <cacheConfiguration cacheMode=""Replicated"" name=""myCache""> <queryEntities> <queryEntity valueType=""System.Int32"" valueTypeName=""java.lang.Integer"" /> <queryEntity keyType=""System.Int32"" keyTypeName=""java.lang.Integer"" valueType=""System.String"" valueTypeName=""java.lang.String"" /> </queryEntities> </cacheConfiguration> </cacheConfiguration> <includedEventTypes> <int>CacheEntryCreated</int> <int>CacheNodesLeft</int> </includedEventTypes> </igCfg>"), sb.ToString()); } /// <summary> /// Tests the deserialization. /// </summary> [Test] public void TestFromXml() { // Empty section. var cfg = IgniteConfiguration.FromXml("<x />"); AssertExtensions.ReflectionEqual(new IgniteConfiguration(), cfg); // Empty section with XML header. cfg = IgniteConfiguration.FromXml("<?xml version=\"1.0\" encoding=\"utf-16\"?><x />"); AssertExtensions.ReflectionEqual(new IgniteConfiguration(), cfg); // Simple test. cfg = IgniteConfiguration.FromXml(@"<igCfg igniteInstanceName=""myGrid"" clientMode=""true"" />"); AssertExtensions.ReflectionEqual(new IgniteConfiguration {IgniteInstanceName = "myGrid", ClientMode = true}, cfg); // Invalid xml. var ex = Assert.Throws<ConfigurationErrorsException>(() => IgniteConfiguration.FromXml(@"<igCfg foo=""bar"" />")); Assert.AreEqual("Invalid IgniteConfiguration attribute 'foo=bar', there is no such property " + "on 'Apache.Ignite.Core.IgniteConfiguration'", ex.Message); // Xml reader. using (var xmlReader = XmlReader.Create( new StringReader(@"<igCfg igniteInstanceName=""myGrid"" clientMode=""true"" />"))) { cfg = IgniteConfiguration.FromXml(xmlReader); } AssertExtensions.ReflectionEqual(new IgniteConfiguration { IgniteInstanceName = "myGrid", ClientMode = true }, cfg); } /// <summary> /// Ensures windows-style \r\n line endings in a string literal. /// Git settings may cause string literals in both styles. /// </summary> private static string FixLineEndings(string s) { return s.Split('\n').Select(x => x.TrimEnd('\r')) .Aggregate((acc, x) => string.Format("{0}\r\n{1}", acc, x)); } /// <summary> /// Checks the schema validation. /// </summary> private static void CheckSchemaValidation() { CheckSchemaValidation(GetTestConfig().ToXml()); } /// <summary> /// Checks the schema validation. /// </summary> private static void CheckSchemaValidation(string xml) { var xmlns = "http://ignite.apache.org/schema/dotnet/IgniteConfigurationSection"; var schemaFile = "IgniteConfigurationSection.xsd"; CheckSchemaValidation(xml, xmlns, schemaFile); } /// <summary> /// Checks the schema validation. /// </summary> public static void CheckSchemaValidation(string xml, string xmlns, string schemaFile) { var document = new XmlDocument(); document.Schemas.Add(xmlns, XmlReader.Create(schemaFile)); document.Load(new StringReader(xml)); document.Validate(null); } /// <summary> /// Checks the serialize deserialize. /// </summary> /// <param name="cfg">The config.</param> private static void CheckSerializeDeserialize(IgniteConfiguration cfg) { var resCfg = SerializeDeserialize(cfg); AssertExtensions.ReflectionEqual(cfg, resCfg); } /// <summary> /// Serializes and deserializes a config. /// </summary> private static IgniteConfiguration SerializeDeserialize(IgniteConfiguration cfg) { var xml = cfg.ToXml(); return IgniteConfiguration.FromXml(xml); } /// <summary> /// Gets the test configuration. /// </summary> private static IgniteConfiguration GetTestConfig() { return new IgniteConfiguration { IgniteInstanceName = "gridName", JvmOptions = new[] {"1", "2"}, Localhost = "localhost11", JvmClasspath = "classpath", Assemblies = new[] {"asm1", "asm2", "asm3"}, BinaryConfiguration = new BinaryConfiguration { TypeConfigurations = new[] { new BinaryTypeConfiguration { IsEnum = true, KeepDeserialized = true, AffinityKeyFieldName = "affKeyFieldName", TypeName = "typeName", IdMapper = new IdMapper(), NameMapper = new NameMapper(), Serializer = new TestSerializer() }, new BinaryTypeConfiguration { IsEnum = false, KeepDeserialized = false, AffinityKeyFieldName = "affKeyFieldName", TypeName = "typeName2", Serializer = new BinaryReflectiveSerializer() } }, Types = new[] {typeof(string).FullName}, IdMapper = new IdMapper(), KeepDeserialized = true, NameMapper = new NameMapper(), Serializer = new TestSerializer() }, CacheConfiguration = new[] { new CacheConfiguration("cacheName") { AtomicityMode = CacheAtomicityMode.Transactional, Backups = 15, CacheMode = CacheMode.Replicated, CacheStoreFactory = new TestCacheStoreFactory(), CopyOnRead = false, EagerTtl = false, Invalidate = true, KeepBinaryInStore = true, LoadPreviousValue = true, LockTimeout = TimeSpan.FromSeconds(56), MaxConcurrentAsyncOperations = 24, QueryEntities = new[] { new QueryEntity { Fields = new[] { new QueryField("field", typeof(int)) { IsKeyField = true, NotNull = true, DefaultValue = "foo" } }, Indexes = new[] { new QueryIndex("field") { IndexType = QueryIndexType.FullText, InlineSize = 32 } }, Aliases = new[] { new QueryAlias("field.field", "fld") }, KeyType = typeof(string), ValueType = typeof(long), TableName = "table-1", KeyFieldName = "k", ValueFieldName = "v" }, }, ReadFromBackup = false, RebalanceBatchSize = 33, RebalanceDelay = TimeSpan.MaxValue, RebalanceMode = CacheRebalanceMode.Sync, RebalanceThrottle = TimeSpan.FromHours(44), RebalanceTimeout = TimeSpan.FromMinutes(8), SqlEscapeAll = true, WriteBehindBatchSize = 45, WriteBehindEnabled = true, WriteBehindFlushFrequency = TimeSpan.FromSeconds(55), WriteBehindFlushSize = 66, WriteBehindFlushThreadCount = 2, WriteBehindCoalescing = false, WriteSynchronizationMode = CacheWriteSynchronizationMode.FullAsync, NearConfiguration = new NearCacheConfiguration { NearStartSize = 5, EvictionPolicy = new FifoEvictionPolicy { BatchSize = 19, MaxMemorySize = 1024, MaxSize = 555 } }, EvictionPolicy = new LruEvictionPolicy { BatchSize = 18, MaxMemorySize = 1023, MaxSize = 554 }, AffinityFunction = new RendezvousAffinityFunction { ExcludeNeighbors = true, Partitions = 48 }, ExpiryPolicyFactory = new MyPolicyFactory(), EnableStatistics = true, PluginConfigurations = new[] { new MyPluginConfiguration() }, MemoryPolicyName = "somePolicy", PartitionLossPolicy = PartitionLossPolicy.ReadOnlyAll, GroupName = "abc", SqlIndexMaxInlineSize = 24, KeyConfiguration = new[] { new CacheKeyConfiguration { AffinityKeyFieldName = "abc", TypeName = "def" }, }, OnheapCacheEnabled = true, StoreConcurrentLoadAllThreshold = 7, RebalanceOrder = 3, RebalanceBatchesPrefetchCount = 4, MaxQueryIteratorsCount = 512, QueryDetailMetricsSize = 100, QueryParallelism = 16, SqlSchema = "foo" } }, ClientMode = true, DiscoverySpi = new TcpDiscoverySpi { NetworkTimeout = TimeSpan.FromSeconds(1), SocketTimeout = TimeSpan.FromSeconds(2), AckTimeout = TimeSpan.FromSeconds(3), JoinTimeout = TimeSpan.FromSeconds(4), MaxAckTimeout = TimeSpan.FromSeconds(5), IpFinder = new TcpDiscoveryMulticastIpFinder { TimeToLive = 110, MulticastGroup = "multicastGroup", AddressRequestAttempts = 10, MulticastPort = 987, ResponseTimeout = TimeSpan.FromDays(1), LocalAddress = "127.0.0.2", Endpoints = new[] {"", "abc"} }, ClientReconnectDisabled = true, ForceServerMode = true, IpFinderCleanFrequency = TimeSpan.FromMinutes(7), LocalAddress = "127.0.0.1", LocalPort = 49900, LocalPortRange = 13, ReconnectCount = 11, StatisticsPrintFrequency = TimeSpan.FromSeconds(20), ThreadPriority = 6, TopologyHistorySize = 1234567 }, IgniteHome = "igniteHome", IncludedEventTypes = EventType.CacheQueryAll, JvmDllPath = @"c:\jvm", JvmInitialMemoryMb = 1024, JvmMaxMemoryMb = 2048, LifecycleHandlers = new[] {new LifecycleBean(), new LifecycleBean()}, MetricsExpireTime = TimeSpan.FromSeconds(15), MetricsHistorySize = 45, MetricsLogFrequency = TimeSpan.FromDays(2), MetricsUpdateFrequency = TimeSpan.MinValue, NetworkSendRetryCount = 7, NetworkSendRetryDelay = TimeSpan.FromSeconds(98), NetworkTimeout = TimeSpan.FromMinutes(4), SuppressWarnings = true, WorkDirectory = @"c:\work", IsDaemon = true, UserAttributes = Enumerable.Range(1, 10).ToDictionary(x => x.ToString(), x => x % 2 == 0 ? (object) x : new FooClass {Bar = x.ToString()}), AtomicConfiguration = new AtomicConfiguration { CacheMode = CacheMode.Replicated, AtomicSequenceReserveSize = 200, Backups = 2 }, TransactionConfiguration = new TransactionConfiguration { PessimisticTransactionLogSize = 23, DefaultTransactionIsolation = TransactionIsolation.ReadCommitted, DefaultTimeout = TimeSpan.FromDays(2), DefaultTransactionConcurrency = TransactionConcurrency.Optimistic, PessimisticTransactionLogLinger = TimeSpan.FromHours(3) }, CommunicationSpi = new TcpCommunicationSpi { LocalPort = 47501, MaxConnectTimeout = TimeSpan.FromSeconds(34), MessageQueueLimit = 15, ConnectTimeout = TimeSpan.FromSeconds(17), IdleConnectionTimeout = TimeSpan.FromSeconds(19), SelectorsCount = 8, ReconnectCount = 33, SocketReceiveBufferSize = 512, AckSendThreshold = 99, DirectBuffer = false, DirectSendBuffer = true, LocalPortRange = 45, LocalAddress = "127.0.0.1", TcpNoDelay = false, SlowClientQueueLimit = 98, SocketSendBufferSize = 2045, UnacknowledgedMessagesBufferSize = 3450 }, SpringConfigUrl = "test", Logger = new IgniteNLogLogger(), FailureDetectionTimeout = TimeSpan.FromMinutes(2), ClientFailureDetectionTimeout = TimeSpan.FromMinutes(3), LongQueryWarningTimeout = TimeSpan.FromDays(4), PluginConfigurations = new[] {new TestIgnitePluginConfiguration()}, EventStorageSpi = new MemoryEventStorageSpi { ExpirationTimeout = TimeSpan.FromMilliseconds(12345), MaxEventCount = 257 }, MemoryConfiguration = new MemoryConfiguration { ConcurrencyLevel = 3, DefaultMemoryPolicyName = "somePolicy", PageSize = 4, SystemCacheInitialSize = 5, SystemCacheMaxSize = 6, MemoryPolicies = new[] { new MemoryPolicyConfiguration { Name = "myDefaultPlc", PageEvictionMode = DataPageEvictionMode.Random2Lru, InitialSize = 245 * 1024 * 1024, MaxSize = 345 * 1024 * 1024, EvictionThreshold = 0.88, EmptyPagesPoolSize = 77, SwapFilePath = "myPath1", RateTimeInterval = TimeSpan.FromSeconds(22), SubIntervals = 99 }, new MemoryPolicyConfiguration { Name = "customPlc", PageEvictionMode = DataPageEvictionMode.RandomLru, EvictionThreshold = 0.77, EmptyPagesPoolSize = 66, SwapFilePath = "somePath2", MetricsEnabled = true } } }, PeerAssemblyLoadingMode = PeerAssemblyLoadingMode.CurrentAppDomain, ClientConnectorConfiguration = new ClientConnectorConfiguration { Host = "foo", Port = 2, PortRange = 3, MaxOpenCursorsPerConnection = 4, SocketReceiveBufferSize = 5, SocketSendBufferSize = 6, TcpNoDelay = false, ThinClientEnabled = false, OdbcEnabled = false, JdbcEnabled = false, ThreadPoolSize = 7, IdleTimeout = TimeSpan.FromMinutes(5) }, PersistentStoreConfiguration = new PersistentStoreConfiguration { AlwaysWriteFullPages = true, CheckpointingFrequency = TimeSpan.FromSeconds(25), CheckpointingPageBufferSize = 28 * 1024 * 1024, CheckpointingThreads = 2, LockWaitTime = TimeSpan.FromSeconds(5), PersistentStorePath = Path.GetTempPath(), TlbSize = 64 * 1024, WalArchivePath = Path.GetTempPath(), WalFlushFrequency = TimeSpan.FromSeconds(3), WalFsyncDelayNanos = 3, WalHistorySize = 10, WalMode = WalMode.Background, WalRecordIteratorBufferSize = 32 * 1024 * 1024, WalSegments = 6, WalSegmentSize = 5 * 1024 * 1024, WalStorePath = Path.GetTempPath(), SubIntervals = 25, MetricsEnabled = true, RateTimeInterval = TimeSpan.FromDays(1), CheckpointWriteOrder = CheckpointWriteOrder.Random, WriteThrottlingEnabled = true }, IsActiveOnStart = false, ConsistentId = "myId123", LocalEventListeners = new[] { new LocalEventListener<IEvent> { EventTypes = new[] {1, 2}, Listener = new MyEventListener() } }, DataStorageConfiguration = new DataStorageConfiguration { AlwaysWriteFullPages = true, CheckpointFrequency = TimeSpan.FromSeconds(25), CheckpointThreads = 2, LockWaitTime = TimeSpan.FromSeconds(5), StoragePath = Path.GetTempPath(), WalThreadLocalBufferSize = 64 * 1024, WalArchivePath = Path.GetTempPath(), WalFlushFrequency = TimeSpan.FromSeconds(3), WalFsyncDelayNanos = 3, WalHistorySize = 10, WalMode = Core.Configuration.WalMode.None, WalRecordIteratorBufferSize = 32 * 1024 * 1024, WalSegments = 6, WalSegmentSize = 5 * 1024 * 1024, WalPath = Path.GetTempPath(), MetricsEnabled = true, MetricsSubIntervalCount = 7, MetricsRateTimeInterval = TimeSpan.FromSeconds(9), CheckpointWriteOrder = Core.Configuration.CheckpointWriteOrder.Sequential, WriteThrottlingEnabled = true, SystemRegionInitialSize = 64 * 1024 * 1024, SystemRegionMaxSize = 128 * 1024 * 1024, ConcurrencyLevel = 1, PageSize = 5 * 1024, WalAutoArchiveAfterInactivity = TimeSpan.FromSeconds(19), DefaultDataRegionConfiguration = new DataRegionConfiguration { Name = "reg1", EmptyPagesPoolSize = 50, EvictionThreshold = 0.8, InitialSize = 100 * 1024 * 1024, MaxSize = 150 * 1024 * 1024, MetricsEnabled = true, PageEvictionMode = Core.Configuration.DataPageEvictionMode.RandomLru, PersistenceEnabled = false, MetricsRateTimeInterval = TimeSpan.FromMinutes(2), MetricsSubIntervalCount = 6, SwapPath = Path.GetTempPath(), CheckpointPageBufferSize = 7 }, DataRegionConfigurations = new[] { new DataRegionConfiguration { Name = "reg2", EmptyPagesPoolSize = 51, EvictionThreshold = 0.7, InitialSize = 101 * 1024 * 1024, MaxSize = 151 * 1024 * 1024, MetricsEnabled = false, PageEvictionMode = Core.Configuration.DataPageEvictionMode.RandomLru, PersistenceEnabled = false, MetricsRateTimeInterval = TimeSpan.FromMinutes(3), MetricsSubIntervalCount = 7, SwapPath = Path.GetTempPath() } } }, SslContextFactory = new SslContextFactory(), FailureHandler = new StopNodeOrHaltFailureHandler() { TryStop = false, Timeout = TimeSpan.FromSeconds(10) } }; } /// <summary> /// Runs the with custom culture. /// </summary> /// <param name="action">The action.</param> private static void RunWithCustomCulture(Action action) { RunWithCulture(action, CultureInfo.InvariantCulture); RunWithCulture(action, CultureInfo.GetCultureInfo("ru-RU")); } /// <summary> /// Runs the with culture. /// </summary> /// <param name="action">The action.</param> /// <param name="cultureInfo">The culture information.</param> private static void RunWithCulture(Action action, CultureInfo cultureInfo) { var oldCulture = Thread.CurrentThread.CurrentCulture; try { Thread.CurrentThread.CurrentCulture = cultureInfo; action(); } finally { Thread.CurrentThread.CurrentCulture = oldCulture; } } /// <summary> /// Test bean. /// </summary> public class LifecycleBean : ILifecycleHandler { /// <summary> /// Gets or sets the foo. /// </summary> /// <value> /// The foo. /// </value> public int Foo { get; set; } /// <summary> /// This method is called when lifecycle event occurs. /// </summary> /// <param name="evt">Lifecycle event.</param> public void OnLifecycleEvent(LifecycleEventType evt) { // No-op. } } /// <summary> /// Test mapper. /// </summary> public class NameMapper : IBinaryNameMapper { /// <summary> /// Gets or sets the bar. /// </summary> /// <value> /// The bar. /// </value> public string Bar { get; set; } /// <summary> /// Gets the type name. /// </summary> /// <param name="name">The name.</param> /// <returns> /// Type name. /// </returns> public string GetTypeName(string name) { return name; } /// <summary> /// Gets the field name. /// </summary> /// <param name="name">The name.</param> /// <returns> /// Field name. /// </returns> public string GetFieldName(string name) { return name; } } /// <summary> /// Serializer. /// </summary> public class TestSerializer : IBinarySerializer { /** <inheritdoc /> */ public void WriteBinary(object obj, IBinaryWriter writer) { // No-op. } /** <inheritdoc /> */ public void ReadBinary(object obj, IBinaryReader reader) { // No-op. } } /// <summary> /// Test class. /// </summary> public class FooClass { public string Bar { get; set; } public override bool Equals(object obj) { if (ReferenceEquals(null, obj)) return false; if (ReferenceEquals(this, obj)) return true; if (obj.GetType() != GetType()) return false; return string.Equals(Bar, ((FooClass) obj).Bar); } public override int GetHashCode() { return Bar != null ? Bar.GetHashCode() : 0; } public static bool operator ==(FooClass left, FooClass right) { return Equals(left, right); } public static bool operator !=(FooClass left, FooClass right) { return !Equals(left, right); } } /// <summary> /// Test factory. /// </summary> public class TestCacheStoreFactory : IFactory<ICacheStore> { /// <summary> /// Creates an instance of the cache store. /// </summary> /// <returns> /// New instance of the cache store. /// </returns> public ICacheStore CreateInstance() { return null; } } /// <summary> /// Test logger. /// </summary> public class TestLogger : ILogger { /** <inheritdoc /> */ public void Log(LogLevel level, string message, object[] args, IFormatProvider formatProvider, string category, string nativeErrorInfo, Exception ex) { throw new NotImplementedException(); } /** <inheritdoc /> */ public bool IsEnabled(LogLevel level) { throw new NotImplementedException(); } } /// <summary> /// Test factory. /// </summary> public class MyPolicyFactory : IFactory<IExpiryPolicy> { /** <inheritdoc /> */ public IExpiryPolicy CreateInstance() { throw new NotImplementedException(); } } public class MyPluginConfiguration : ICachePluginConfiguration { int? ICachePluginConfiguration.CachePluginConfigurationClosureFactoryId { get { return 0; } } void ICachePluginConfiguration.WriteBinary(IBinaryRawWriter writer) { throw new NotImplementedException(); } } public class MyEventListener : IEventListener<IEvent> { public bool Invoke(IEvent evt) { throw new NotImplementedException(); } } } }
using System.Collections.Generic; using Microsoft.Xna.Framework; using Microsoft.Xna.Framework.Graphics; namespace geek.GameEngine.Visuals { public abstract class VisualObjectBase { #region Constructors protected VisualObjectBase() { IsVisible = true; } #endregion #region Fields /// <summary> /// Visual object position relative to it's parent (scene, batch, etc). /// </summary> public Vector2 Position; /// <summary> /// Base object, to which current object is relative. /// </summary> public ObjectGroup Parent { get; set; } /// <summary> /// Gets or sets the flag indicating the object is to be displayed. /// </summary> public bool IsVisible; /// <summary> /// The width of the object's bounding box. /// </summary> public float Width { get { return GetBoundingBox().Width; } } /// <summary> /// The height of the bounding box. /// </summary> public float Height { get { return GetBoundingBox().Height; } } #endregion #region Methods /// <summary> /// Returns the absolute position of the sprite in scene coordinates. /// /// </summary> /// <returns></returns> public virtual Vector2 AbsolutePosition { get { return Parent == null ? Position : Parent.AbsolutePosition + Position; } } /// <summary> /// Returns the relative position of the sprite to the screen. /// This allows automatic scrolling. /// </summary> /// <returns></returns> public virtual Vector2 RelativePosition { get { return Parent == null ? Position : Parent.RelativePosition + Position; } } /// <summary> /// Get a position depending on the flag. /// </summary> /// <param name="absolute">Absolute or relative position.</param> /// <returns></returns> public Vector2 GetPosition(bool absolute = false) { return absolute ? AbsolutePosition : RelativePosition; } /// <summary> /// Get a bounding box for the current object. /// </summary> /// <param name="absolute">Absolute or relative coordinates?</param> /// <returns></returns> public virtual Rectangle GetBoundingBox(bool absolute = false) { var pos = GetPosition(absolute); return new Rectangle((int)pos.X, (int)pos.Y, 0, 0); } /// <summary> /// The update logic method. /// </summary> public abstract void Update(); /// <summary> /// The update screen method. /// </summary> /// <param name="batch">SpriteBatch to draw to.</param> public abstract void Draw(SpriteBatch batch); #endregion #region Visual layering /// <summary> /// The layer ID. /// It is updated when any sprite is drawn, making layering depend on drawing order. /// The limit is 10K objects per redraw, which seems like a fair limit. /// </summary> private static float _LayerId; public static float LayerId { get { return _LayerId -= 0.00001f; } } /// <summary> /// Bring the object 1 layer down in the drawing order. /// </summary> public void BringDown() { List<VisualObjectBase> list; int index; var ok = getContainingObjectList(out list, out index); if (!ok || index == list.Count-1) return; GameCore.RegisterDeferredAction(() => { var tmp = list[index]; list[index] = list[index + 1]; list[index + 1] = tmp; } ); } /// <summary> /// Bring the object to the bottom of current object batch. /// </summary> public void BringToBack() { List<VisualObjectBase> list; int index; var ok = getContainingObjectList(out list, out index); if (!ok || index == list.Count - 1) return; GameCore.RegisterDeferredAction(() => { list.Remove(this); list.Add(this); } ); } /// <summary> /// Bring the object 1 layer up in the drawing order. /// </summary> public void BringUp() { List<VisualObjectBase> list; int index; var ok = getContainingObjectList(out list, out index); if (!ok || index == 0) return; GameCore.RegisterDeferredAction(() => { var tmp = list[index]; list[index] = list[index - 1]; list[index - 1] = tmp; } ); } /// <summary> /// Bring the object to the top of current object batch. /// </summary> public void BringToFront() { List<VisualObjectBase> list; int index; var ok = getContainingObjectList(out list, out index); if (!ok || index == 0) return; GameCore.RegisterDeferredAction(() => { list.Remove(this); list.Insert(0, this); } ); } /// <summary> /// Remove the object from visual list. /// </summary> public virtual void Remove() { List<VisualObjectBase> list; int index; var ok = getContainingObjectList(out list, out index); if (!ok) return; GameCore.RegisterDeferredAction(() => list.Remove(this)); } /// <summary> /// Gets the parent VisualObjectBatch's list for further manipulations with layering. /// </summary> /// <param name="list">The list containing current object.</param> /// <param name="index">The index of the item in the list.</param> /// <returns>Success flag.</returns> private bool getContainingObjectList(out List<VisualObjectBase> list, out int index) { list = null; index = -1; if (Parent == null) return false; list = Parent.Objects; index = list.IndexOf(this); return index != -1; } /// <summary> /// Reset the layer id to start new drawing loop. /// </summary> public static void ResetLayerId() { _LayerId = 1f; } #endregion } }
/* * Copyright (c) 2008, openmetaverse.org * All rights reserved. * * - Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * - Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * - Neither the name of the openmetaverse.org nor the names * of its contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections; using System.Collections.Generic; using System.IO; using System.Reflection; using System.Text; namespace OpenMetaverse.StructuredData { /// <summary> /// /// </summary> public enum OSDType { /// <summary></summary> Unknown, /// <summary></summary> Boolean, /// <summary></summary> Integer, /// <summary></summary> Real, /// <summary></summary> String, /// <summary></summary> UUID, /// <summary></summary> Date, /// <summary></summary> URI, /// <summary></summary> Binary, /// <summary></summary> Map, /// <summary></summary> Array } public enum OSDFormat { Xml = 0, Json, Binary } /// <summary> /// /// </summary> public class OSDException : Exception { public OSDException(string message) : base(message) { } } /// <summary> /// /// </summary> public partial class OSD { public virtual OSDType Type { get { return OSDType.Unknown; } } public virtual bool AsBoolean() { return false; } public virtual int AsInteger() { return 0; } public virtual uint AsUInteger() { return 0; } public virtual long AsLong() { return 0; } public virtual ulong AsULong() { return 0; } public virtual double AsReal() { return 0d; } public virtual string AsString() { return String.Empty; } public virtual UUID AsUUID() { return UUID.Zero; } public virtual DateTime AsDate() { return Utils.Epoch; } public virtual Uri AsUri() { return null; } public virtual byte[] AsBinary() { return Utils.EmptyBytes; } public virtual Vector2 AsVector2() { return Vector2.Zero; } public virtual Vector3 AsVector3() { return Vector3.Zero; } public virtual Vector3d AsVector3d() { return Vector3d.Zero; } public virtual Vector4 AsVector4() { return Vector4.Zero; } public virtual Quaternion AsQuaternion() { return Quaternion.Identity; } public virtual Color4 AsColor4() { return Color4.Black; } public override string ToString() { return "undef"; } public static OSD FromBoolean(bool value) { return new OSDBoolean(value); } public static OSD FromInteger(int value) { return new OSDInteger(value); } public static OSD FromInteger(uint value) { return new OSDInteger((int)value); } public static OSD FromInteger(short value) { return new OSDInteger((int)value); } public static OSD FromInteger(ushort value) { return new OSDInteger((int)value); } public static OSD FromInteger(sbyte value) { return new OSDInteger((int)value); } public static OSD FromInteger(byte value) { return new OSDInteger((int)value); } public static OSD FromUInteger(uint value) { return new OSDBinary(value); } public static OSD FromLong(long value) { return new OSDBinary(value); } public static OSD FromULong(ulong value) { return new OSDBinary(value); } public static OSD FromReal(double value) { return new OSDReal(value); } public static OSD FromReal(float value) { return new OSDReal((double)value); } public static OSD FromString(string value) { return new OSDString(value); } public static OSD FromUUID(UUID value) { return new OSDUUID(value); } public static OSD FromDate(DateTime value) { return new OSDDate(value); } public static OSD FromUri(Uri value) { return new OSDUri(value); } public static OSD FromBinary(byte[] value) { return new OSDBinary(value); } public static OSD FromVector2(Vector2 value) { OSDArray array = new OSDArray(); array.Add(OSD.FromReal(value.X)); array.Add(OSD.FromReal(value.Y)); return array; } public static OSD FromVector3(Vector3 value) { OSDArray array = new OSDArray(); array.Add(OSD.FromReal(value.X)); array.Add(OSD.FromReal(value.Y)); array.Add(OSD.FromReal(value.Z)); return array; } public static OSD FromVector3d(Vector3d value) { OSDArray array = new OSDArray(); array.Add(OSD.FromReal(value.X)); array.Add(OSD.FromReal(value.Y)); array.Add(OSD.FromReal(value.Z)); return array; } public static OSD FromVector4(Vector4 value) { OSDArray array = new OSDArray(); array.Add(OSD.FromReal(value.X)); array.Add(OSD.FromReal(value.Y)); array.Add(OSD.FromReal(value.Z)); array.Add(OSD.FromReal(value.W)); return array; } public static OSD FromQuaternion(Quaternion value) { OSDArray array = new OSDArray(); array.Add(OSD.FromReal(value.X)); array.Add(OSD.FromReal(value.Y)); array.Add(OSD.FromReal(value.Z)); array.Add(OSD.FromReal(value.W)); return array; } public static OSD FromColor4(Color4 value) { OSDArray array = new OSDArray(); array.Add(OSD.FromReal(value.R)); array.Add(OSD.FromReal(value.G)); array.Add(OSD.FromReal(value.B)); array.Add(OSD.FromReal(value.A)); return array; } public static OSD FromObject(object value) { if (value == null) { return new OSD(); } else if (value is bool) { return new OSDBoolean((bool)value); } else if (value is int) { return new OSDInteger((int)value); } else if (value is uint) { return new OSDBinary((uint)value); } else if (value is short) { return new OSDInteger((int)(short)value); } else if (value is ushort) { return new OSDInteger((int)(ushort)value); } else if (value is sbyte) { return new OSDInteger((int)(sbyte)value); } else if (value is byte) { return new OSDInteger((int)(byte)value); } else if (value is double) { return new OSDReal((double)value); } else if (value is float) { return new OSDReal((double)(float)value); } else if (value is string) { return new OSDString((string)value); } else if (value is UUID) { return new OSDUUID((UUID)value); } else if (value is DateTime) { return new OSDDate((DateTime)value); } else if (value is Uri) { return new OSDUri((Uri)value); } else if (value is byte[]) { return new OSDBinary((byte[])value); } else if (value is long) { return new OSDBinary((long)value); } else if (value is ulong) { return new OSDBinary((ulong)value); } else if (value is Vector2) { return FromVector2((Vector2)value); } else if (value is Vector3) { return FromVector3((Vector3)value); } else if (value is Vector3d) { return FromVector3d((Vector3d)value); } else if (value is Vector4) { return FromVector4((Vector4)value); } else if (value is Quaternion) { return FromQuaternion((Quaternion)value); } else if (value is Color4) { return FromColor4((Color4)value); } else return new OSD(); } public static object ToObject(Type type, OSD value) { if (type == typeof(ulong)) { if (value.Type == OSDType.Binary) { byte[] bytes = value.AsBinary(); return Utils.BytesToUInt64(bytes); } else { return (ulong)value.AsInteger(); } } else if (type == typeof(uint)) { if (value.Type == OSDType.Binary) { byte[] bytes = value.AsBinary(); return Utils.BytesToUInt(bytes); } else { return (uint)value.AsInteger(); } } else if (type == typeof(ushort)) { return (ushort)value.AsInteger(); } else if (type == typeof(byte)) { return (byte)value.AsInteger(); } else if (type == typeof(short)) { return (short)value.AsInteger(); } else if (type == typeof(string)) { return value.AsString(); } else if (type == typeof(bool)) { return value.AsBoolean(); } else if (type == typeof(float)) { return (float)value.AsReal(); } else if (type == typeof(double)) { return value.AsReal(); } else if (type == typeof(int)) { return value.AsInteger(); } else if (type == typeof(UUID)) { return value.AsUUID(); } else if (type == typeof(Vector3)) { if (value.Type == OSDType.Array) return ((OSDArray)value).AsVector3(); else return Vector3.Zero; } else if (type == typeof(Vector4)) { if (value.Type == OSDType.Array) return ((OSDArray)value).AsVector4(); else return Vector4.Zero; } else if (type == typeof(Quaternion)) { if (value.Type == OSDType.Array) return ((OSDArray)value).AsQuaternion(); else return Quaternion.Identity; } else { return null; } } #region Implicit Conversions public static implicit operator OSD(bool value) { return new OSDBoolean(value); } public static implicit operator OSD(int value) { return new OSDInteger(value); } public static implicit operator OSD(uint value) { return new OSDInteger((int)value); } public static implicit operator OSD(short value) { return new OSDInteger((int)value); } public static implicit operator OSD(ushort value) { return new OSDInteger((int)value); } public static implicit operator OSD(sbyte value) { return new OSDInteger((int)value); } public static implicit operator OSD(byte value) { return new OSDInteger((int)value); } public static implicit operator OSD(long value) { return new OSDBinary(value); } public static implicit operator OSD(ulong value) { return new OSDBinary(value); } public static implicit operator OSD(double value) { return new OSDReal(value); } public static implicit operator OSD(float value) { return new OSDReal(value); } public static implicit operator OSD(string value) { return new OSDString(value); } public static implicit operator OSD(UUID value) { return new OSDUUID(value); } public static implicit operator OSD(DateTime value) { return new OSDDate(value); } public static implicit operator OSD(Uri value) { return new OSDUri(value); } public static implicit operator OSD(byte[] value) { return new OSDBinary(value); } public static implicit operator OSD(Vector2 value) { return OSD.FromVector2(value); } public static implicit operator OSD(Vector3 value) { return OSD.FromVector3(value); } public static implicit operator OSD(Vector3d value) { return OSD.FromVector3d(value); } public static implicit operator OSD(Vector4 value) { return OSD.FromVector4(value); } public static implicit operator OSD(Quaternion value) { return OSD.FromQuaternion(value); } public static implicit operator OSD(Color4 value) { return OSD.FromColor4(value); } public static implicit operator bool(OSD value) { return value.AsBoolean(); } public static implicit operator int(OSD value) { return value.AsInteger(); } public static implicit operator uint(OSD value) { return value.AsUInteger(); } public static implicit operator long(OSD value) { return value.AsLong(); } public static implicit operator ulong(OSD value) { return value.AsULong(); } public static implicit operator double(OSD value) { return value.AsReal(); } public static implicit operator float(OSD value) { return (float)value.AsReal(); } public static implicit operator string(OSD value) { return value.AsString(); } public static implicit operator UUID(OSD value) { return value.AsUUID(); } public static implicit operator DateTime(OSD value) { return value.AsDate(); } public static implicit operator Uri(OSD value) { return value.AsUri(); } public static implicit operator byte[](OSD value) { return value.AsBinary(); } public static implicit operator Vector2(OSD value) { return value.AsVector2(); } public static implicit operator Vector3(OSD value) { return value.AsVector3(); } public static implicit operator Vector3d(OSD value) { return value.AsVector3d(); } public static implicit operator Vector4(OSD value) { return value.AsVector4(); } public static implicit operator Quaternion(OSD value) { return value.AsQuaternion(); } public static implicit operator Color4(OSD value) { return value.AsColor4(); } #endregion Implicit Conversions /// <summary> /// Uses reflection to create an SDMap from all of the SD /// serializable types in an object /// </summary> /// <param name="obj">Class or struct containing serializable types</param> /// <returns>An SDMap holding the serialized values from the /// container object</returns> public static OSDMap SerializeMembers(object obj) { Type t = obj.GetType(); FieldInfo[] fields = t.GetFields(); OSDMap map = new OSDMap(fields.Length); for (int i = 0; i < fields.Length; i++) { FieldInfo field = fields[i]; if (!Attribute.IsDefined(field, typeof(NonSerializedAttribute))) { OSD serializedField = OSD.FromObject(field.GetValue(obj)); if (serializedField.Type != OSDType.Unknown || field.FieldType == typeof(string) || field.FieldType == typeof(byte[])) map.Add(field.Name, serializedField); } } return map; } /// <summary> /// Uses reflection to deserialize member variables in an object from /// an SDMap /// </summary> /// <param name="obj">Reference to an object to fill with deserialized /// values</param> /// <param name="serialized">Serialized values to put in the target /// object</param> public static void DeserializeMembers(ref object obj, OSDMap serialized) { Type t = obj.GetType(); FieldInfo[] fields = t.GetFields(); for (int i = 0; i < fields.Length; i++) { FieldInfo field = fields[i]; if (!Attribute.IsDefined(field, typeof(NonSerializedAttribute))) { OSD serializedField; if (serialized.TryGetValue(field.Name, out serializedField)) field.SetValue(obj, ToObject(field.FieldType, serializedField)); } } } } /// <summary> /// /// </summary> public sealed class OSDBoolean : OSD { private bool value; private static byte[] trueBinary = { 0x31 }; private static byte[] falseBinary = { 0x30 }; public override OSDType Type { get { return OSDType.Boolean; } } public OSDBoolean(bool value) { this.value = value; } public override bool AsBoolean() { return value; } public override int AsInteger() { return value ? 1 : 0; } public override double AsReal() { return value ? 1d : 0d; } public override string AsString() { return value ? "1" : "0"; } public override byte[] AsBinary() { return value ? trueBinary : falseBinary; } public override string ToString() { return AsString(); } } /// <summary> /// /// </summary> public sealed class OSDInteger : OSD { private int value; public override OSDType Type { get { return OSDType.Integer; } } public OSDInteger(int value) { this.value = value; } public override bool AsBoolean() { return value != 0; } public override int AsInteger() { return value; } public override uint AsUInteger() { return (uint)value; } public override long AsLong() { return value; } public override ulong AsULong() { return (ulong)value; } public override double AsReal() { return (double)value; } public override string AsString() { return value.ToString(); } public override byte[] AsBinary() { return Utils.IntToBytesBig(value); } public override string ToString() { return AsString(); } } /// <summary> /// /// </summary> public sealed class OSDReal : OSD { private double value; public override OSDType Type { get { return OSDType.Real; } } public OSDReal(double value) { this.value = value; } public override bool AsBoolean() { return (!Double.IsNaN(value) && value != 0d); } public override int AsInteger() { if (Double.IsNaN(value)) return 0; if (value > (double)Int32.MaxValue) return Int32.MaxValue; if (value < (double)Int32.MinValue) return Int32.MinValue; return (int)Math.Round(value); } public override uint AsUInteger() { if (Double.IsNaN(value)) return 0; if (value > (double)UInt32.MaxValue) return UInt32.MaxValue; if (value < (double)UInt32.MinValue) return UInt32.MinValue; return (uint)Math.Round(value); } public override long AsLong() { if (Double.IsNaN(value)) return 0; if (value > (double)Int64.MaxValue) return Int64.MaxValue; if (value < (double)Int64.MinValue) return Int64.MinValue; return (long)Math.Round(value); } public override ulong AsULong() { if (Double.IsNaN(value)) return 0; if (value > (double)UInt64.MaxValue) return Int32.MaxValue; if (value < (double)UInt64.MinValue) return UInt64.MinValue; return (ulong)Math.Round(value); } public override double AsReal() { return value; } // "r" ensures the value will correctly round-trip back through Double.TryParse public override string AsString() { return value.ToString("r", Utils.EnUsCulture); } public override byte[] AsBinary() { return Utils.DoubleToBytesBig(value); } public override string ToString() { return AsString(); } } /// <summary> /// /// </summary> public sealed class OSDString : OSD { private string value; public override OSDType Type { get { return OSDType.String; } } public OSDString(string value) { // Refuse to hold null pointers if (value != null) this.value = value; else this.value = String.Empty; } public override bool AsBoolean() { if (String.IsNullOrEmpty(value)) return false; if (value == "0" || value.ToLower() == "false") return false; return true; } public override int AsInteger() { double dbl; if (Double.TryParse(value, out dbl)) return (int)Math.Floor(dbl); else return 0; } public override uint AsUInteger() { double dbl; if (Double.TryParse(value, out dbl)) return (uint)Math.Floor(dbl); else return 0; } public override long AsLong() { double dbl; if (Double.TryParse(value, out dbl)) return (long)Math.Floor(dbl); else return 0; } public override ulong AsULong() { double dbl; if (Double.TryParse(value, out dbl)) return (ulong)Math.Floor(dbl); else return 0; } public override double AsReal() { double dbl; if (Double.TryParse(value, out dbl)) return dbl; else return 0d; } public override string AsString() { return value; } public override byte[] AsBinary() { return Encoding.UTF8.GetBytes(value); } public override UUID AsUUID() { UUID uuid; if (UUID.TryParse(value, out uuid)) return uuid; else return UUID.Zero; } public override DateTime AsDate() { DateTime dt; if (DateTime.TryParse(value, out dt)) return dt; else return Utils.Epoch; } public override Uri AsUri() { Uri uri; if (Uri.TryCreate(value, UriKind.RelativeOrAbsolute, out uri)) return uri; else return null; } public override string ToString() { return AsString(); } } /// <summary> /// /// </summary> public sealed class OSDUUID : OSD { private UUID value; public override OSDType Type { get { return OSDType.UUID; } } public OSDUUID(UUID value) { this.value = value; } public override bool AsBoolean() { return (value == UUID.Zero) ? false : true; } public override string AsString() { return value.ToString(); } public override UUID AsUUID() { return value; } public override byte[] AsBinary() { return value.GetBytes(); } public override string ToString() { return AsString(); } } /// <summary> /// /// </summary> public sealed class OSDDate : OSD { private DateTime value; public override OSDType Type { get { return OSDType.Date; } } public OSDDate(DateTime value) { this.value = value; } public override string AsString() { string format; if (value.Millisecond > 0) format = "yyyy-MM-ddTHH:mm:ss.ffZ"; else format = "yyyy-MM-ddTHH:mm:ssZ"; return value.ToUniversalTime().ToString(format); } public override int AsInteger() { return (int)Utils.DateTimeToUnixTime(value); } public override uint AsUInteger() { return Utils.DateTimeToUnixTime(value); } public override long AsLong() { return (long)Utils.DateTimeToUnixTime(value); } public override ulong AsULong() { return Utils.DateTimeToUnixTime(value); } public override byte[] AsBinary() { TimeSpan ts = value.ToUniversalTime() - new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc); return Utils.DoubleToBytesBig(ts.TotalSeconds); } public override DateTime AsDate() { return value; } public override string ToString() { return AsString(); } } /// <summary> /// /// </summary> public sealed class OSDUri : OSD { private Uri value; public override OSDType Type { get { return OSDType.URI; } } public OSDUri(Uri value) { this.value = value; } public override string AsString() { if (value != null) { if (value.IsAbsoluteUri) return value.AbsoluteUri; else return value.ToString(); } return string.Empty; } public override Uri AsUri() { return value; } public override byte[] AsBinary() { return Encoding.UTF8.GetBytes(AsString()); } public override string ToString() { return AsString(); } } /// <summary> /// /// </summary> public sealed class OSDBinary : OSD { private byte[] value; public override OSDType Type { get { return OSDType.Binary; } } public OSDBinary(byte[] value) { if (value != null) this.value = value; else this.value = Utils.EmptyBytes; } public OSDBinary(uint value) { this.value = new byte[] { (byte)((value >> 24) % 256), (byte)((value >> 16) % 256), (byte)((value >> 8) % 256), (byte)(value % 256) }; } public OSDBinary(long value) { this.value = new byte[] { (byte)((value >> 56) % 256), (byte)((value >> 48) % 256), (byte)((value >> 40) % 256), (byte)((value >> 32) % 256), (byte)((value >> 24) % 256), (byte)((value >> 16) % 256), (byte)((value >> 8) % 256), (byte)(value % 256) }; } public OSDBinary(ulong value) { this.value = new byte[] { (byte)((value >> 56) % 256), (byte)((value >> 48) % 256), (byte)((value >> 40) % 256), (byte)((value >> 32) % 256), (byte)((value >> 24) % 256), (byte)((value >> 16) % 256), (byte)((value >> 8) % 256), (byte)(value % 256) }; } public override string AsString() { return Convert.ToBase64String(value); } public override byte[] AsBinary() { return value; } public override uint AsUInteger() { return (uint)( (value[0] << 24) + (value[1] << 16) + (value[2] << 8) + (value[3] << 0)); } public override long AsLong() { return (long)( ((long)value[0] << 56) + ((long)value[1] << 48) + ((long)value[2] << 40) + ((long)value[3] << 32) + ((long)value[4] << 24) + ((long)value[5] << 16) + ((long)value[6] << 8) + ((long)value[7] << 0)); } public override ulong AsULong() { return (ulong)( ((ulong)value[0] << 56) + ((ulong)value[1] << 48) + ((ulong)value[2] << 40) + ((ulong)value[3] << 32) + ((ulong)value[4] << 24) + ((ulong)value[5] << 16) + ((ulong)value[6] << 8) + ((ulong)value[7] << 0)); } public override string ToString() { return Utils.BytesToHexString(value, null); } } /// <summary> /// /// </summary> public sealed class OSDMap : OSD, IDictionary<string, OSD> { private Dictionary<string, OSD> value; public override OSDType Type { get { return OSDType.Map; } } public OSDMap() { value = new Dictionary<string, OSD>(); } public OSDMap(int capacity) { value = new Dictionary<string, OSD>(capacity); } public OSDMap(Dictionary<string, OSD> value) { if (value != null) this.value = value; else this.value = new Dictionary<string, OSD>(); } public override bool AsBoolean() { return value.Count > 0; } public override string ToString() { return OSDParser.SerializeJsonString(this, true); } #region IDictionary Implementation public int Count { get { return value.Count; } } public bool IsReadOnly { get { return false; } } public ICollection<string> Keys { get { return value.Keys; } } public ICollection<OSD> Values { get { return value.Values; } } public OSD this[string key] { get { OSD llsd; if (this.value.TryGetValue(key, out llsd)) return llsd; else return new OSD(); } set { this.value[key] = value; } } public bool ContainsKey(string key) { return value.ContainsKey(key); } public void Add(string key, OSD llsd) { value.Add(key, llsd); } public void Add(KeyValuePair<string, OSD> kvp) { value.Add(kvp.Key, kvp.Value); } public bool Remove(string key) { return value.Remove(key); } public bool TryGetValue(string key, out OSD llsd) { return value.TryGetValue(key, out llsd); } public void Clear() { value.Clear(); } public bool Contains(KeyValuePair<string, OSD> kvp) { // This is a bizarre function... we don't really implement it // properly, hopefully no one wants to use it return value.ContainsKey(kvp.Key); } public void CopyTo(KeyValuePair<string, OSD>[] array, int index) { throw new NotImplementedException(); } public bool Remove(KeyValuePair<string, OSD> kvp) { return this.value.Remove(kvp.Key); } public System.Collections.IDictionaryEnumerator GetEnumerator() { return value.GetEnumerator(); } IEnumerator<KeyValuePair<string, OSD>> IEnumerable<KeyValuePair<string, OSD>>.GetEnumerator() { return null; } IEnumerator IEnumerable.GetEnumerator() { return value.GetEnumerator(); } #endregion IDictionary Implementation } /// <summary> /// /// </summary> public sealed class OSDArray : OSD, IList<OSD> { private List<OSD> value; public override OSDType Type { get { return OSDType.Array; } } public OSDArray() { value = new List<OSD>(); } public OSDArray(int capacity) { value = new List<OSD>(capacity); } public OSDArray(List<OSD> value) { if (value != null) this.value = value; else this.value = new List<OSD>(); } public override byte[] AsBinary() { byte[] binary = new byte[value.Count]; for (int i = 0; i < value.Count; i++) binary[i] = (byte)value[i].AsInteger(); return binary; } public override long AsLong() { OSDBinary binary = new OSDBinary(AsBinary()); return binary.AsLong(); } public override ulong AsULong() { OSDBinary binary = new OSDBinary(AsBinary()); return binary.AsULong(); } public override uint AsUInteger() { OSDBinary binary = new OSDBinary(AsBinary()); return binary.AsUInteger(); } public override Vector2 AsVector2() { Vector2 vector = Vector2.Zero; if (this.Count == 2) { vector.X = (float)this[0].AsReal(); vector.Y = (float)this[1].AsReal(); } return vector; } public override Vector3 AsVector3() { Vector3 vector = Vector3.Zero; if (this.Count == 3) { vector.X = (float)this[0].AsReal(); vector.Y = (float)this[1].AsReal(); vector.Z = (float)this[2].AsReal(); } return vector; } public override Vector3d AsVector3d() { Vector3d vector = Vector3d.Zero; if (this.Count == 3) { vector.X = this[0].AsReal(); vector.Y = this[1].AsReal(); vector.Z = this[2].AsReal(); } return vector; } public override Vector4 AsVector4() { Vector4 vector = Vector4.Zero; if (this.Count == 4) { vector.X = (float)this[0].AsReal(); vector.Y = (float)this[1].AsReal(); vector.Z = (float)this[2].AsReal(); vector.W = (float)this[3].AsReal(); } return vector; } public override Quaternion AsQuaternion() { Quaternion quaternion = Quaternion.Identity; if (this.Count == 4) { quaternion.X = (float)this[0].AsReal(); quaternion.Y = (float)this[1].AsReal(); quaternion.Z = (float)this[2].AsReal(); quaternion.W = (float)this[3].AsReal(); } return quaternion; } public override Color4 AsColor4() { Color4 color = Color4.Black; if (this.Count == 4) { color.R = (float)this[0].AsReal(); color.G = (float)this[1].AsReal(); color.B = (float)this[2].AsReal(); color.A = (float)this[3].AsReal(); } return color; } public override bool AsBoolean() { return value.Count > 0; } public override string ToString() { return OSDParser.SerializeJsonString(this, true); } #region IList Implementation public int Count { get { return value.Count; } } public bool IsReadOnly { get { return false; } } public OSD this[int index] { get { return value[index]; } set { this.value[index] = value; } } public int IndexOf(OSD llsd) { return value.IndexOf(llsd); } public void Insert(int index, OSD llsd) { value.Insert(index, llsd); } public void RemoveAt(int index) { value.RemoveAt(index); } public void Add(OSD llsd) { value.Add(llsd); } public void Clear() { value.Clear(); } public bool Contains(OSD llsd) { return value.Contains(llsd); } public bool Contains(string element) { for (int i = 0; i < value.Count; i++) { if (value[i].Type == OSDType.String && value[i].AsString() == element) return true; } return false; } public void CopyTo(OSD[] array, int index) { throw new NotImplementedException(); } public bool Remove(OSD llsd) { return value.Remove(llsd); } IEnumerator IEnumerable.GetEnumerator() { return value.GetEnumerator(); } IEnumerator<OSD> IEnumerable<OSD>.GetEnumerator() { return value.GetEnumerator(); } #endregion IList Implementation } public partial class OSDParser { const string LLSD_BINARY_HEADER = "<? llsd/binary ?>"; const string LLSD_XML_HEADER = "<llsd>"; const string LLSD_XML_ALT_HEADER = "<?xml"; const string LLSD_XML_ALT2_HEADER = "<? llsd/xml ?>"; public static OSD Deserialize(byte[] data) { string header = Encoding.ASCII.GetString(data, 0, data.Length >= 17 ? 17 : data.Length); if (header.StartsWith(LLSD_BINARY_HEADER, StringComparison.InvariantCultureIgnoreCase)) { return DeserializeLLSDBinary(data); } else if (header.StartsWith(LLSD_XML_HEADER, StringComparison.InvariantCultureIgnoreCase) || header.StartsWith(LLSD_XML_ALT_HEADER, StringComparison.InvariantCultureIgnoreCase) || header.StartsWith(LLSD_XML_ALT2_HEADER, StringComparison.InvariantCultureIgnoreCase)) { return DeserializeLLSDXml(data); } else { return DeserializeJson(Encoding.UTF8.GetString(data)); } } public static OSD Deserialize(string data) { if (data.StartsWith(LLSD_BINARY_HEADER, StringComparison.InvariantCultureIgnoreCase)) { return DeserializeLLSDBinary(Encoding.UTF8.GetBytes(data)); } else if (data.StartsWith(LLSD_XML_HEADER, StringComparison.InvariantCultureIgnoreCase) || data.StartsWith(LLSD_XML_ALT_HEADER, StringComparison.InvariantCultureIgnoreCase) || data.StartsWith(LLSD_XML_ALT2_HEADER, StringComparison.InvariantCultureIgnoreCase)) { return DeserializeLLSDXml(data); } else { return DeserializeJson(data); } } public static OSD Deserialize(Stream stream) { if (stream.CanSeek) { byte[] headerData = new byte[14]; stream.Read(headerData, 0, 14); stream.Seek(0, SeekOrigin.Begin); string header = Encoding.ASCII.GetString(headerData); if (header.StartsWith(LLSD_BINARY_HEADER)) return DeserializeLLSDBinary(stream); else if (header.StartsWith(LLSD_XML_HEADER) || header.StartsWith(LLSD_XML_ALT_HEADER) || header.StartsWith(LLSD_XML_ALT2_HEADER)) return DeserializeLLSDXml(stream); else return DeserializeJson(stream); } else { throw new OSDException("Cannot deserialize structured data from unseekable streams"); } } } }
// created on 09/04/2003 at 18:58 // // System.Runtime.Serialization.Formatters.Soap.SoapTypeMapper // // Authors: // Jean-Marc Andre (jean-marc.andre@polymtl.ca) // // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System; using System.Reflection; using System.Collections; using System.Runtime.Remoting; using System.Xml; using System.Xml.Serialization; using System.Runtime.Serialization.Formatters; using System.Xml.Schema; using System.Runtime.Remoting.Metadata.W3cXsd2001; using System.Globalization; namespace System.Runtime.Serialization.Formatters.Soap { internal class Element { private string _prefix; private string _localName; private string _namespaceURI; public Element(string prefix, string localName, string namespaceURI) { _prefix = prefix; _localName = localName; _namespaceURI = namespaceURI; } public Element(string localName, string namespaceURI): this(null, localName, namespaceURI) { } public string Prefix { get { return _prefix; } } public string LocalName { get { return _localName; } } public string NamespaceURI { get { return _namespaceURI; } } public override bool Equals(object obj) { Element element = obj as Element; return (_localName == XmlConvert.DecodeName(element._localName) && _namespaceURI == XmlConvert.DecodeName(element._namespaceURI))?true:false; } public override int GetHashCode() { return (String.Format("{0} {1}", XmlConvert.DecodeName(_localName), XmlConvert.DecodeName(_namespaceURI))).GetHashCode(); } public override string ToString() { return string.Format("Element.Prefix = {0}, Element.LocalName = {1}, Element.NamespaceURI = {2}", this.Prefix, this.LocalName, this.NamespaceURI); } } internal class SoapTypeMapper { private static Hashtable xmlNodeToTypeTable = new Hashtable(); private static Hashtable typeToXmlNodeTable = new Hashtable(); public static readonly string SoapEncodingNamespace = "http://schemas.xmlsoap.org/soap/encoding/"; public static readonly string SoapEncodingPrefix = "SOAP-ENC"; public static readonly string SoapEnvelopeNamespace = "http://schemas.xmlsoap.org/soap/envelope/"; public static readonly string SoapEnvelopePrefix = "SOAP-ENV"; //internal static readonly string SoapEnvelope; private XmlTextWriter _xmlWriter; private long _prefixNumber; private Hashtable namespaceToPrefixTable = new Hashtable(); private SerializationBinder _binder; private static ArrayList _canBeValueTypeList; private FormatterAssemblyStyle _assemblyFormat = FormatterAssemblyStyle.Full; private Element elementString; // Constructor used by SoapReader public SoapTypeMapper(SerializationBinder binder) { _binder = binder; } // Constructor used by SoapWriter public SoapTypeMapper( XmlTextWriter xmlWriter, FormatterAssemblyStyle assemblyFormat, FormatterTypeStyle typeFormat) { _xmlWriter = xmlWriter; _assemblyFormat = assemblyFormat; _prefixNumber = 1; //Type elementType = typeof(string); if(typeFormat == FormatterTypeStyle.XsdString) { elementString = new Element("xsd", "string", XmlSchema.Namespace); } else { elementString = new Element(SoapEncodingPrefix, "string", SoapEncodingNamespace); } // typeToXmlNodeTable.Add(elementType.AssemblyQualifiedName, element); } static SoapTypeMapper() { // SoapEnvelope = String.Format( // "<{0}:Envelope xmlns:{0}='{1}' xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance' xmlns:xsd='{2}' xmlns:{3}='{4}' xmlns:clr='{5}' SOAP-ENV:encodingStyle='http://schemas.xmlsoap.org/soap/encoding/'/>", // SoapEnvelopePrefix, // SoapEnvelopeNamespace, // XmlSchema.Namespace, // SoapEncodingPrefix, // SoapEncodingNamespace, // SoapServices.XmlNsForClrType); _canBeValueTypeList = new ArrayList(); _canBeValueTypeList.Add(typeof(DateTime).ToString()); _canBeValueTypeList.Add(typeof(TimeSpan).ToString()); _canBeValueTypeList.Add(typeof(string).ToString()); _canBeValueTypeList.Add(typeof(decimal).ToString()); _canBeValueTypeList.Sort(); InitMappingTables(); } public Type this[Element element] { get { Type type = null; string localName = XmlConvert.DecodeName(element.LocalName); string namespaceURI = XmlConvert.DecodeName(element.NamespaceURI); string typeNamespace, assemblyName; SoapServices.DecodeXmlNamespaceForClrTypeNamespace( element.NamespaceURI, out typeNamespace, out assemblyName); string typeName = typeNamespace + Type.Delimiter + localName; if(assemblyName != null && assemblyName != string.Empty && _binder != null) { type = _binder.BindToType(assemblyName, typeName); } if(type == null) { string assemblyQualifiedName = (string)xmlNodeToTypeTable[element]; if(assemblyQualifiedName != null) type = Type.GetType(assemblyQualifiedName); else { type = Type.GetType(element.LocalName); if(type == null) { type = Type.GetType(typeName); if(type == null) { if(assemblyName == null || assemblyName == String.Empty) throw new SerializationException( String.Format("Parse Error, no assembly associated with XML key {0} {1}", localName, namespaceURI)); type = FormatterServices.GetTypeFromAssembly( Assembly.Load(assemblyName), typeName); } } } if(type == null) throw new SerializationException(); } return type; } } public Element this[string typeFullName, string assemblyName] { get { Element element; string typeNamespace = string.Empty; string typeName = typeFullName; if(_assemblyFormat == FormatterAssemblyStyle.Simple) { string[] items = assemblyName.Split(','); assemblyName = items[0]; } string assemblyQualifiedName = typeFullName + ", " + assemblyName; element = (Element) typeToXmlNodeTable[assemblyQualifiedName]; if(element == null) { int typeNameIndex = typeFullName.LastIndexOf('.'); if(typeNameIndex != -1) { typeNamespace = typeFullName.Substring(0, typeNameIndex); typeName = typeFullName.Substring(typeNamespace.Length + 1); } string namespaceURI = SoapServices.CodeXmlNamespaceForClrTypeNamespace( typeNamespace, (!assemblyName.StartsWith("mscorlib"))?assemblyName:String.Empty); string prefix = (string) namespaceToPrefixTable[namespaceURI]; if(prefix == null || prefix == string.Empty) { prefix = "a" + (_prefixNumber++).ToString(); namespaceToPrefixTable[namespaceURI] = prefix; } element = new Element( prefix, XmlConvert.EncodeName(typeName), namespaceURI); } return element; } } public Element this[Type type] { get { if(type == typeof(string)) return elementString; Element element = (Element) typeToXmlNodeTable[type.AssemblyQualifiedName]; if(element == null) { element = this[type.FullName, type.Assembly.FullName]; // if(_assemblyFormat == FormatterAssemblyStyle.Full) // element = this[type.FullName, type.Assembly.FullName]; // else // element = this[type.FullName, type.Assembly.GetName().Name]; } else { element = new Element((element.Prefix == null)?_xmlWriter.LookupPrefix(element.NamespaceURI):element.Prefix, element.LocalName, element.NamespaceURI); } if(element == null) throw new SerializationException("Oooops"); return element; } } public static bool CanBeValue(Type type) { if(type.IsPrimitive) return true; if(type.IsEnum) return true; if(_canBeValueTypeList.BinarySearch(type.ToString()) >= 0) { return true; } return false; } private static void InitMappingTables() { Element element; Type elementType; element = new Element("Array", SoapEncodingNamespace); elementType = typeof(System.Array); xmlNodeToTypeTable.Add(element, elementType.AssemblyQualifiedName); typeToXmlNodeTable.Add(elementType.AssemblyQualifiedName, element); element = new Element("string", XmlSchema.Namespace); elementType = typeof(string); xmlNodeToTypeTable.Add(element, elementType.AssemblyQualifiedName); // typeToXmlNodeTable.Add(elementType.AssemblyQualifiedName, element); element = new Element("string", SoapEncodingNamespace); xmlNodeToTypeTable.Add(element, elementType.AssemblyQualifiedName); element = new Element("boolean", XmlSchema.Namespace); elementType = typeof(bool); xmlNodeToTypeTable.Add(element, elementType.AssemblyQualifiedName); typeToXmlNodeTable.Add(elementType.AssemblyQualifiedName, element); element = new Element("byte", XmlSchema.Namespace); elementType = typeof(sbyte); xmlNodeToTypeTable.Add(element, elementType.AssemblyQualifiedName); typeToXmlNodeTable.Add(elementType.AssemblyQualifiedName, element); element = new Element("unsignedByte", XmlSchema.Namespace); elementType = typeof(byte); xmlNodeToTypeTable.Add(element, elementType.AssemblyQualifiedName); typeToXmlNodeTable.Add(elementType.AssemblyQualifiedName, element); element = new Element("long", XmlSchema.Namespace); elementType = typeof(long); xmlNodeToTypeTable.Add(element, elementType.AssemblyQualifiedName); typeToXmlNodeTable.Add(elementType.AssemblyQualifiedName, element); element = new Element("unsignedLong", XmlSchema.Namespace); elementType = typeof(ulong); xmlNodeToTypeTable.Add(element, elementType.AssemblyQualifiedName); typeToXmlNodeTable.Add(elementType.AssemblyQualifiedName, element); element = new Element("int", XmlSchema.Namespace); elementType = typeof(int); xmlNodeToTypeTable.Add(element, elementType.AssemblyQualifiedName); typeToXmlNodeTable.Add(elementType.AssemblyQualifiedName, element); element = new Element("unsignedInt", XmlSchema.Namespace); elementType = typeof(uint); xmlNodeToTypeTable.Add(element, elementType.AssemblyQualifiedName); typeToXmlNodeTable.Add(elementType.AssemblyQualifiedName, element); element = new Element("float", XmlSchema.Namespace); elementType = typeof(float); xmlNodeToTypeTable.Add(element, elementType.AssemblyQualifiedName); typeToXmlNodeTable.Add(elementType.AssemblyQualifiedName, element); element = new Element("double", XmlSchema.Namespace); elementType = typeof(double); xmlNodeToTypeTable.Add(element, elementType.AssemblyQualifiedName); typeToXmlNodeTable.Add(elementType.AssemblyQualifiedName, element); element = new Element("decimal", XmlSchema.Namespace); elementType = typeof(decimal); xmlNodeToTypeTable.Add(element, elementType.AssemblyQualifiedName); typeToXmlNodeTable.Add(elementType.AssemblyQualifiedName, element); element = new Element("short", XmlSchema.Namespace); elementType = typeof(short); xmlNodeToTypeTable.Add(element, elementType.AssemblyQualifiedName); typeToXmlNodeTable.Add(elementType.AssemblyQualifiedName, element); element = new Element("unsignedShort", XmlSchema.Namespace); elementType = typeof(ushort); xmlNodeToTypeTable.Add(element, elementType.AssemblyQualifiedName); typeToXmlNodeTable.Add(elementType.AssemblyQualifiedName, element); element = new Element("anyType", XmlSchema.Namespace); elementType = typeof(object); xmlNodeToTypeTable.Add(element, elementType.AssemblyQualifiedName); typeToXmlNodeTable.Add(elementType.AssemblyQualifiedName, element); element = new Element("dateTime", XmlSchema.Namespace); elementType = typeof(DateTime); xmlNodeToTypeTable.Add(element, elementType.AssemblyQualifiedName); typeToXmlNodeTable.Add(elementType.AssemblyQualifiedName, element); element = new Element("duration", XmlSchema.Namespace); elementType = typeof(TimeSpan); xmlNodeToTypeTable.Add(element, elementType.AssemblyQualifiedName); typeToXmlNodeTable.Add(elementType.AssemblyQualifiedName, element); element = new Element("Fault", SoapEnvelopeNamespace); elementType = typeof(System.Runtime.Serialization.Formatters.SoapFault); xmlNodeToTypeTable.Add(element, elementType.AssemblyQualifiedName); typeToXmlNodeTable.Add(elementType.AssemblyQualifiedName, element); element = new Element("base64", SoapEncodingNamespace); elementType = typeof(byte[]); xmlNodeToTypeTable.Add(element, elementType.AssemblyQualifiedName); typeToXmlNodeTable.Add(elementType.AssemblyQualifiedName, element); } public static string GetXsdValue (object value) { if (value is DateTime) { return SoapDateTime.ToString ((DateTime)value); } else if (value is decimal) { return ((decimal) value).ToString (CultureInfo.InvariantCulture); } else if (value is double) { return ((double) value).ToString ("G17", CultureInfo.InvariantCulture); } else if (value is float) { return ((float) value).ToString ("G9", CultureInfo.InvariantCulture); } else if (value is TimeSpan) { return SoapDuration.ToString ((TimeSpan)value); } else if (value is bool) { return ((bool) value) ? "true" : "false"; } else { return value.ToString (); } } public static object ParseXsdValue (string value, Type type) { if (type == typeof(DateTime)) { return SoapDateTime.Parse (value); } else if (type == typeof(decimal)) { return decimal.Parse (value, CultureInfo.InvariantCulture); } else if (type == typeof(double)) { return double.Parse (value, CultureInfo.InvariantCulture); } else if (type == typeof(float)) { return float.Parse (value, CultureInfo.InvariantCulture); } else if (type == typeof (TimeSpan)) { return SoapDuration.Parse (value); } else if(type.IsEnum) { return Enum.Parse(type, value); } else { return Convert.ChangeType (value, type, CultureInfo.InvariantCulture); } } } }
using System; using System.Data; using System.Data.SqlClient; using Csla; using Csla.Data; namespace ParentLoad.Business.ERLevel { /// <summary> /// A05_SubContinent_ReChild (editable child object).<br/> /// This is a generated base class of <see cref="A05_SubContinent_ReChild"/> business object. /// </summary> /// <remarks> /// This class is an item of <see cref="A04_SubContinent"/> collection. /// </remarks> [Serializable] public partial class A05_SubContinent_ReChild : BusinessBase<A05_SubContinent_ReChild> { #region State Fields [NotUndoable] private byte[] _rowVersion = new byte[] {}; [NotUndoable] [NonSerialized] internal int subContinent_ID2 = 0; #endregion #region Business Properties /// <summary> /// Maintains metadata about <see cref="SubContinent_Child_Name"/> property. /// </summary> public static readonly PropertyInfo<string> SubContinent_Child_NameProperty = RegisterProperty<string>(p => p.SubContinent_Child_Name, "Sub Continent Child Name"); /// <summary> /// Gets or sets the Sub Continent Child Name. /// </summary> /// <value>The Sub Continent Child Name.</value> public string SubContinent_Child_Name { get { return GetProperty(SubContinent_Child_NameProperty); } set { SetProperty(SubContinent_Child_NameProperty, value); } } #endregion #region Factory Methods /// <summary> /// Factory method. Creates a new <see cref="A05_SubContinent_ReChild"/> object. /// </summary> /// <returns>A reference to the created <see cref="A05_SubContinent_ReChild"/> object.</returns> internal static A05_SubContinent_ReChild NewA05_SubContinent_ReChild() { return DataPortal.CreateChild<A05_SubContinent_ReChild>(); } /// <summary> /// Factory method. Loads a <see cref="A05_SubContinent_ReChild"/> object from the given SafeDataReader. /// </summary> /// <param name="dr">The SafeDataReader to use.</param> /// <returns>A reference to the fetched <see cref="A05_SubContinent_ReChild"/> object.</returns> internal static A05_SubContinent_ReChild GetA05_SubContinent_ReChild(SafeDataReader dr) { A05_SubContinent_ReChild obj = new A05_SubContinent_ReChild(); // show the framework that this is a child object obj.MarkAsChild(); obj.Fetch(dr); obj.MarkOld(); // check all object rules and property rules obj.BusinessRules.CheckRules(); return obj; } #endregion #region Constructor /// <summary> /// Initializes a new instance of the <see cref="A05_SubContinent_ReChild"/> class. /// </summary> /// <remarks> Do not use to create a Csla object. Use factory methods instead.</remarks> [System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)] public A05_SubContinent_ReChild() { // Use factory methods and do not use direct creation. // show the framework that this is a child object MarkAsChild(); } #endregion #region Data Access /// <summary> /// Loads default values for the <see cref="A05_SubContinent_ReChild"/> object properties. /// </summary> [Csla.RunLocal] protected override void Child_Create() { var args = new DataPortalHookArgs(); OnCreate(args); base.Child_Create(); } /// <summary> /// Loads a <see cref="A05_SubContinent_ReChild"/> object from the given SafeDataReader. /// </summary> /// <param name="dr">The SafeDataReader to use.</param> private void Fetch(SafeDataReader dr) { // Value properties LoadProperty(SubContinent_Child_NameProperty, dr.GetString("SubContinent_Child_Name")); _rowVersion = dr.GetValue("RowVersion") as byte[]; // parent properties subContinent_ID2 = dr.GetInt32("SubContinent_ID2"); var args = new DataPortalHookArgs(dr); OnFetchRead(args); } /// <summary> /// Inserts a new <see cref="A05_SubContinent_ReChild"/> object in the database. /// </summary> /// <param name="parent">The parent object.</param> [Transactional(TransactionalTypes.TransactionScope)] private void Child_Insert(A04_SubContinent parent) { using (var ctx = ConnectionManager<SqlConnection>.GetManager("DeepLoad")) { using (var cmd = new SqlCommand("AddA05_SubContinent_ReChild", ctx.Connection)) { cmd.CommandType = CommandType.StoredProcedure; cmd.Parameters.AddWithValue("@SubContinent_ID2", parent.SubContinent_ID).DbType = DbType.Int32; cmd.Parameters.AddWithValue("@SubContinent_Child_Name", ReadProperty(SubContinent_Child_NameProperty)).DbType = DbType.String; cmd.Parameters.Add("@NewRowVersion", SqlDbType.Timestamp).Direction = ParameterDirection.Output; var args = new DataPortalHookArgs(cmd); OnInsertPre(args); cmd.ExecuteNonQuery(); OnInsertPost(args); _rowVersion = (byte[]) cmd.Parameters["@NewRowVersion"].Value; } } } /// <summary> /// Updates in the database all changes made to the <see cref="A05_SubContinent_ReChild"/> object. /// </summary> /// <param name="parent">The parent object.</param> [Transactional(TransactionalTypes.TransactionScope)] private void Child_Update(A04_SubContinent parent) { if (!IsDirty) return; using (var ctx = ConnectionManager<SqlConnection>.GetManager("DeepLoad")) { using (var cmd = new SqlCommand("UpdateA05_SubContinent_ReChild", ctx.Connection)) { cmd.CommandType = CommandType.StoredProcedure; cmd.Parameters.AddWithValue("@SubContinent_ID2", parent.SubContinent_ID).DbType = DbType.Int32; cmd.Parameters.AddWithValue("@SubContinent_Child_Name", ReadProperty(SubContinent_Child_NameProperty)).DbType = DbType.String; cmd.Parameters.AddWithValue("@RowVersion", _rowVersion).DbType = DbType.Binary; cmd.Parameters.Add("@NewRowVersion", SqlDbType.Timestamp).Direction = ParameterDirection.Output; var args = new DataPortalHookArgs(cmd); OnUpdatePre(args); cmd.ExecuteNonQuery(); OnUpdatePost(args); _rowVersion = (byte[]) cmd.Parameters["@NewRowVersion"].Value; } } } /// <summary> /// Self deletes the <see cref="A05_SubContinent_ReChild"/> object from database. /// </summary> /// <param name="parent">The parent object.</param> [Transactional(TransactionalTypes.TransactionScope)] private void Child_DeleteSelf(A04_SubContinent parent) { using (var ctx = ConnectionManager<SqlConnection>.GetManager("DeepLoad")) { using (var cmd = new SqlCommand("DeleteA05_SubContinent_ReChild", ctx.Connection)) { cmd.CommandType = CommandType.StoredProcedure; cmd.Parameters.AddWithValue("@SubContinent_ID2", parent.SubContinent_ID).DbType = DbType.Int32; var args = new DataPortalHookArgs(cmd); OnDeletePre(args); cmd.ExecuteNonQuery(); OnDeletePost(args); } } } #endregion #region DataPortal Hooks /// <summary> /// Occurs after setting all defaults for object creation. /// </summary> partial void OnCreate(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Delete, after setting query parameters and before the delete operation. /// </summary> partial void OnDeletePre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Delete, after the delete operation, before Commit(). /// </summary> partial void OnDeletePost(DataPortalHookArgs args); /// <summary> /// Occurs after setting query parameters and before the fetch operation. /// </summary> partial void OnFetchPre(DataPortalHookArgs args); /// <summary> /// Occurs after the fetch operation (object or collection is fully loaded and set up). /// </summary> partial void OnFetchPost(DataPortalHookArgs args); /// <summary> /// Occurs after the low level fetch operation, before the data reader is destroyed. /// </summary> partial void OnFetchRead(DataPortalHookArgs args); /// <summary> /// Occurs after setting query parameters and before the update operation. /// </summary> partial void OnUpdatePre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after the update operation, before setting back row identifiers (RowVersion) and Commit(). /// </summary> partial void OnUpdatePost(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after setting query parameters and before the insert operation. /// </summary> partial void OnInsertPre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after the insert operation, before setting back row identifiers (ID and RowVersion) and Commit(). /// </summary> partial void OnInsertPost(DataPortalHookArgs args); #endregion } }
// Copyright (c) 2014 Thong Nguyen (tumtumtum@gmail.com) using System; using System.Collections.Generic; using System.Linq; namespace Platform { /// <summary> /// Provides extension methods for array classes /// </summary> public static class ArrayUtils { /// <summary> /// Creates a new array that is made up of the elements of the given array /// after they have been converted by the given converter. /// </summary> /// <typeparam name="T">The element type of the original array</typeparam> /// <typeparam name="U">The element type of the new array</typeparam> /// <param name="self">The array to convert</param> /// <param name="convert">The converter that will convert the elements of /// original array into the element of the new array</param> /// <returns> /// A new array /// </returns> public static U[] NewArray<T, U>(this T[] self, Converter<T, U> convert) { var retval = new U[self.Length]; for (var i = 0; i < self.Length; i++) { retval[i] = convert(self[i]); } return retval; } /// <summary> /// Checks to see if the elements of array1 is equal to the elements of array2 /// </summary> /// <typeparam name="T">The type of array</typeparam> /// <param name="array1">The first array to compare</param> /// <param name="array2">The second array to compare</param> /// <returns> /// True if all the elements if <c>array1</c> equal the corresponding /// elements of <c>array2</c>. /// False if arrays are of inequal length or any of the elements don't match /// their corresponding elements. /// </returns> public static bool ElementsAreEqual<T>(this T[] array1, T[] array2) { if (array1.Length != array2.Length) { return false; } for (var i = 0; i < array1.Length; i++) { if (!Object.Equals(array1[i], array2[i])) { return false; } } return true; } /// <summary> /// Returns the index of the first element that contains any of the elements /// provided in <c>values</c> /// </summary> /// <typeparam name="T">The type of array to work with</typeparam> /// <param name="array">The array to search</param> /// <param name="values">An array of values</param> /// <returns>The index of the element if found or -1</returns> public static int IndexOfAny<T>(this T[] array, params T[] values) { for (var i = 0; i < array.Length; i++) { var x = Array.IndexOf<T>(values, array[i]); if (x >= 0) { return i; } } return -1; } /// <summary> /// Returns the index of the first element that is accepted by the given /// predicate /// </summary> /// <typeparam name="T">The type of array to work with</typeparam> /// <param name="array">The array to search</param> /// <param name="accept">The predicate that will validate if the right character is found</param> /// <returns>The index of the element if found or -1</returns> public static int IndexOfAny<T>(this T[] array, Predicate<T> accept) { for (var i = 0; i < array.Length; i++) { if (accept(array[i])) { return i; } } return -1; } /// <summary> /// Creates a new array that is the combination of the current array and the given /// enumeration of elements /// </summary> /// <typeparam name="T">The type of array</typeparam> /// <param name="array1">The array whose elements will make up the start of the new array</param> /// <param name="enumerable">The enumerable whose elements will make up the second half of the new array</param> /// <returns>A new array</returns> public static T[] Combine<T>(this T[] array1, IEnumerable<T> enumerable) { var list = new List<T>(Math.Max(16, array1.Length * 2)); list.AddRange(array1); list.AddRange(enumerable); return list.ToArray(); } /// <summary> /// Creates a new array that is the combination of the current array and the given /// array or elements /// </summary> /// <typeparam name="T">The type of array</typeparam> /// <param name="array1">The array whose elements will make up the start of the new array</param> /// <param name="array2">The array whose elements will make up the second half of the new array</param> /// <returns>A new array</returns> public static T[] Combine<T>(this T[] array1, params T[] array2) { var retval = new T[array1.Length + array2.Length]; Array.Copy(array1, retval, array1.Length); Array.Copy(array2, 0, retval, array1.Length, array2.Length); return retval; } /// <summary> /// Creates a new array that is made up of the current array with a new element prepended. /// </summary> /// <typeparam name="T">The type of array</typeparam> /// <param name="array1">The array to create the new array from</param> /// <param name="obj1">The last element to be prepended onto the new array</param> /// <returns>A new array</returns> public static T[] Prepend<T>(this T[] array1, T obj1) { T[] retval; retval = new T[array1.Length + 1]; retval[0] = obj1; Array.Copy(array1, 0, retval, 1, array1.Length); return retval; } /// <summary> /// Creates a new array that is made up of the current array with a new element appeneded. /// </summary> /// <typeparam name="T">The type of array</typeparam> /// <param name="array1">The array to create the new array from</param> /// <param name="obj1">The last element to be appeneded onto the new array</param> /// <returns>A new array</returns> public static T[] Append<T>(this T[] array1, T obj1) { var retval = new T[array1.Length + 1]; Array.Copy(array1, 0, retval, 0, array1.Length); retval[array1.Length] = obj1; return retval; } /// <summary> /// Creates a new array from elements of another array /// </summary> /// <typeparam name="T">The type of array to create</typeparam> /// <param name="array">The array whose elements will make up the new array</param> /// <param name="offset"> /// The offset to the element in <c>array</c> that will make up the first /// element of the new array /// </param> /// <param name="length">The length of the new array</param> /// <returns>A new array</returns> public static T[] NewArray<T>(this T[] array, int offset, int length) { var retval = new T[length]; Array.Copy(array, 0, retval, 0, length); return retval; } /// <summary> /// Creates a new array from a generic <c>ICollection</c> /// </summary> /// <typeparam name="T">The type of array to create</typeparam> /// <param name="collection">The collection to create the array from</param> /// <returns>A new array</returns> public static T[] NewArray<T>(this ICollection<T> collection) { return NewArray<T, T>(collection, ConverterUtils<T, T>.NoConvert); } /// <summary> /// Creates a new array from a generic <c>ICollection</c>; optionally converting /// the collection's elements. /// </summary> /// <typeparam name="T">The type of array to create</typeparam> /// <param name="collection">The collection to create the array from</param> /// <param name="convert"> /// The converter used to convert elements from the collection type to the array type /// </param> /// <returns>A new array</returns> public static D[] NewArray<T, D>(ICollection<T> collection, Converter<T, D> convert) { var i = 0; var retval = new D[collection.Count]; foreach (var value in collection) { retval[i++] = convert(value); } return retval; } /// <summary> /// Creates a new array from a generic <c>IEnumerable</c> /// </summary> /// <typeparam name="T">The type of array to create</typeparam> /// <param name="enumerable">The enumerable to create the array from</param> /// <returns></returns> public static T[] NewArray<T>(IEnumerable<T> enumerable) { return enumerable.ToArray(); } /// <summary> /// Creates a new array from an <c>IEnumerable</c> /// </summary> /// <typeparam name="T">The type of array to create</typeparam> /// <param name="enumerable">The enumerable to create the array from</param> /// <returns></returns> public static T[] NewArray<T>(System.Collections.IEnumerable enumerable) { return enumerable.Cast<T>().ToArray(); } } }
// // Copyright (C) DataStax Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // using System; using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.Text; using System.Linq; using System.Threading; using Cassandra.IntegrationTests.TestBase; using Cassandra.Tests; namespace Cassandra.IntegrationTests.TestClusterManagement { public class CcmBridge : IDisposable { public DirectoryInfo CcmDir { get; private set; } public const int DefaultCmdTimeout = 90 * 1000; public const int StartCmdTimeout = 150 * 1000; public string Name { get; private set; } public string IpPrefix { get; private set; } public CcmBridge(string name, string ipPrefix) { Name = name; IpPrefix = ipPrefix; CcmDir = Directory.CreateDirectory(Path.Combine(Path.GetTempPath(), Path.GetRandomFileName())); } public void Dispose() { } public void Create(string version, bool useSsl) { var sslParams = ""; if (useSsl) { var sslPath = Path.Combine(TestHelper.GetHomePath(), "ssl"); if (!File.Exists(Path.Combine(sslPath, "keystore.jks"))) { throw new Exception(string.Format("In order to use SSL with CCM you must provide have the keystore.jks and cassandra.crt files located in your {0} folder", sslPath)); } sslParams = "--ssl " + sslPath; } ExecuteCcm(string.Format("create {0} -i {1} -v {2} {3}", Name, IpPrefix, version, sslParams)); } public void Start(string[] jvmArgs) { var parameters = new List<string> { "start", "--wait-for-binary-proto" }; if (TestUtils.IsWin) { parameters.Add("--quiet-windows"); } if (jvmArgs != null) { foreach (var arg in jvmArgs) { parameters.Add("--jvm_arg"); parameters.Add(arg); } } ExecuteCcm(string.Join(" ", parameters), StartCmdTimeout); } public void Populate(int dc1NodeLength, int dc2NodeLength, bool useVNodes) { var parameters = new List<string> { "populate", "-n", dc1NodeLength + (dc2NodeLength > 0 ? ":" + dc2NodeLength : null) }; if (useVNodes) { parameters.Add("--vnodes"); } ExecuteCcm(string.Join(" ", parameters)); } public void SwitchToThis() { string switchCmd = "switch " + Name; ExecuteCcm(switchCmd, DefaultCmdTimeout, false); } public void List() { ExecuteCcm("list"); } public void Stop() { ExecuteCcm("stop"); } public void StopForce() { ExecuteCcm("stop --not-gently"); } public void Start(int n, string additionalArgs = null) { string quietWindows = null; if (TestUtils.IsWin) { quietWindows = "--quiet-windows"; } ExecuteCcm(string.Format("node{0} start --wait-for-binary-proto {1} {2}", n, additionalArgs, quietWindows)); } public void Stop(int n) { ExecuteCcm(string.Format("node{0} stop", n)); } public void StopForce(int n) { ExecuteCcm(string.Format("node{0} stop --not-gently", n)); } public void Remove() { ExecuteCcm("remove"); } public void Remove(int nodeId) { ExecuteCcm(string.Format("node{0} remove", nodeId)); } public void BootstrapNode(int n) { BootstrapNode(n, null); } public void BootstrapNode(int n, string dc) { ExecuteCcm(string.Format("add node{0} -i {1}{2} -j {3} -b -s {4}", n, IpPrefix, n, 7000 + 100 * n, dc != null ? "-d " + dc : null)); Start(n); } public void DecommissionNode(int n) { ExecuteCcm(string.Format("node{0} decommission", n)); } public static ProcessOutput ExecuteCcm(string args, int timeout = DefaultCmdTimeout, bool throwOnProcessError = true) { var executable = "/usr/local/bin/ccm"; if (TestUtils.IsWin) { executable = "cmd.exe"; args = "/c ccm " + args; } Trace.TraceInformation(executable + " " + args); var output = ExecuteProcess(executable, args, timeout); if (throwOnProcessError) { ValidateOutput(output); } return output; } private static void ValidateOutput(ProcessOutput output) { if (output.ExitCode != 0) { throw new TestInfrastructureException($"Process exited in error {output}"); } } /// <summary> /// Spawns a new process (platform independent) /// </summary> public static ProcessOutput ExecuteProcess(string processName, string args, int timeout = DefaultCmdTimeout) { var output = new ProcessOutput(); using (var process = new Process()) { process.StartInfo.FileName = processName; process.StartInfo.Arguments = args; process.StartInfo.RedirectStandardOutput = true; process.StartInfo.RedirectStandardError = true; //Hide the python window if possible process.StartInfo.UseShellExecute = false; process.StartInfo.CreateNoWindow = true; #if !NETCORE process.StartInfo.WindowStyle = ProcessWindowStyle.Hidden; #endif using (var outputWaitHandle = new AutoResetEvent(false)) using (var errorWaitHandle = new AutoResetEvent(false)) { process.OutputDataReceived += (sender, e) => { if (e.Data == null) { try { outputWaitHandle.Set(); } catch { //probably is already disposed } } else { output.OutputText.AppendLine(e.Data); } }; process.ErrorDataReceived += (sender, e) => { if (e.Data == null) { try { errorWaitHandle.Set(); } catch { //probably is already disposed } } else { output.OutputText.AppendLine(e.Data); } }; process.Start(); process.BeginOutputReadLine(); process.BeginErrorReadLine(); if (process.WaitForExit(timeout) && outputWaitHandle.WaitOne(timeout) && errorWaitHandle.WaitOne(timeout)) { // Process completed. output.ExitCode = process.ExitCode; } else { // Timed out. output.ExitCode = -1; } } } return output; } } }
// Copyright (c) DotSpatial Team. All rights reserved. // Licensed under the MIT license. See License.txt file in the project root for full license information. using System; using System.Collections.Generic; using System.ComponentModel; using System.Drawing; using System.IO; using DotSpatial.Data; using DotSpatial.Symbology; using NetTopologySuite.Geometries; namespace DotSpatial.Controls { /// <summary> /// This is a specialized FeatureLayer that specifically handles raster drawing. /// </summary> public class MapRasterLayer : RasterLayer, IMapRasterLayer { #region Constructors /// <summary> /// Initializes a new instance of the <see cref="MapRasterLayer"/> class from the specified fileName. /// </summary> /// <param name="fileName">Filename of the corresponding raster file.</param> /// <param name="symbolizer">Symbolizer used for drawing the raster data.</param> public MapRasterLayer(string fileName, IRasterSymbolizer symbolizer) : base(fileName, symbolizer) { LegendText = Path.GetFileNameWithoutExtension(fileName); if ((long)DataSet.NumRows * DataSet.NumColumns > MaxCellsInMemory) { string pyrFile = Path.ChangeExtension(fileName, ".mwi"); BitmapGetter = File.Exists(pyrFile) && File.Exists(Path.ChangeExtension(pyrFile, ".mwh")) ? new PyramidImage(pyrFile) : CreatePyramidImage(pyrFile, DataManager.DefaultDataManager.ProgressHandler); } else { Bitmap bmp = new Bitmap(DataSet.NumColumns, DataSet.NumRows); symbolizer.Raster = DataSet; DataSet.DrawToBitmap(symbolizer, bmp); var id = new InRamImage(bmp) { Bounds = DataSet.Bounds }; BitmapGetter = id; } } /// <summary> /// Initializes a new instance of the <see cref="MapRasterLayer"/> class and the specified image data to use for rendering it. /// </summary> /// <param name="baseRaster">Raster used as data for the layer.</param> /// <param name="baseImage">ImageData used for rendering.</param> public MapRasterLayer(IRaster baseRaster, ImageData baseImage) : base(baseRaster) { LegendText = Path.GetFileNameWithoutExtension(baseRaster.Filename); BitmapGetter = baseImage; } /// <summary> /// Initializes a new instance of the <see cref="MapRasterLayer"/> class, and will create a "FallLeaves" image based on the raster values. /// </summary> /// <param name="raster">The raster to use.</param> public MapRasterLayer(IRaster raster) : base(raster) { LegendText = Path.GetFileNameWithoutExtension(raster.Filename); // string imageFile = Path.ChangeExtension(raster.Filename, ".png"); // if (File.Exists(imageFile)) File.Delete(imageFile); if ((long)raster.NumRows * raster.NumColumns > MaxCellsInMemory) { // For huge images, assume that GDAL or something was needed anyway, // and we would rather avoid having to re-create the pyramids if there is any chance // that the old values will work ok. string pyrFile = Path.ChangeExtension(raster.Filename, ".mwi"); if (File.Exists(pyrFile) && File.Exists(Path.ChangeExtension(pyrFile, ".mwh"))) { BitmapGetter = new PyramidImage(pyrFile); LegendText = Path.GetFileNameWithoutExtension(raster.Filename); } else { BitmapGetter = CreatePyramidImage(pyrFile, DataManager.DefaultDataManager.ProgressHandler); } } else { // Ensure smaller images match the scheme. Bitmap bmp = new Bitmap(raster.NumColumns, raster.NumRows); raster.PaintColorSchemeToBitmap(Symbolizer, bmp, raster.ProgressHandler); var id = new InRamImage(bmp) { Bounds = { AffineCoefficients = raster.Bounds.AffineCoefficients } }; BitmapGetter = id; } } #endregion #region Events /// <summary> /// Fires an event that indicates to the parent map-frame that it should first /// redraw the specified clip /// </summary> public event EventHandler<ClipArgs> BufferChanged; #endregion #region Properties /// <summary> /// Gets or sets the back buffer that will be drawn to as part of the initialization process. /// </summary> [Browsable(false)] [DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden)] [ShallowCopy] public Image BackBuffer { get; set; } /// <summary> /// Gets or sets the current buffer. /// </summary> [Browsable(false)] [DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden)] [ShallowCopy] public Image Buffer { get; set; } /// <summary> /// Gets or sets the geographic region represented by the buffer /// Calling Initialize will set this automatically. /// </summary> [Browsable(false)] [DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden)] [ShallowCopy] public Envelope BufferEnvelope { get; set; } /// <summary> /// Gets or sets the rectangle in pixels to use as the back buffer. /// Calling Initialize will set this automatically. /// </summary> [Browsable(false)] [DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden)] [ShallowCopy] public Rectangle BufferRectangle { get; set; } /// <summary> /// Gets or sets a value indicating whether the image layer is initialized. /// </summary> [Browsable(false)] [DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden)] public new bool IsInitialized { get; set; } #endregion #region Methods /// <summary> /// Call StartDrawing before using this. /// </summary> /// <param name="rectangles">The rectangular region in pixels to clear.</param> /// <param name= "color">The color to use when clearing. Specifying transparent /// will replace content with transparent pixels.</param> public void Clear(List<Rectangle> rectangles, Color color) { if (BackBuffer == null) return; Graphics g = Graphics.FromImage(BackBuffer); foreach (Rectangle r in rectangles) { if (r.IsEmpty == false) { g.Clip = new Region(r); g.Clear(color); } } g.Dispose(); } /// <summary> /// This will draw any features that intersect this region. To specify the features /// directly, use OnDrawFeatures. This will not clear existing buffer content. /// For that call Initialize instead. /// </summary> /// <param name="args">A GeoArgs clarifying the transformation from geographic to image space.</param> /// <param name="regions">The geographic regions to draw.</param> /// <param name="selected">Indicates whether to draw the normal colored features or the selection colored features. Because rasters can't be selected they won't be drawn if selected is true.</param> public void DrawRegions(MapArgs args, List<Extent> regions, bool selected) { if (selected) return; List<Rectangle> clipRects = args.ProjToPixel(regions); DrawWindows(args, regions, clipRects); } /// <summary> /// Indicates that the drawing process has been finalized and swaps the back buffer /// to the front buffer. /// </summary> public void FinishDrawing() { OnFinishDrawing(); if (Buffer != null && Buffer != BackBuffer) Buffer.Dispose(); Buffer = BackBuffer; } /// <summary> /// Copies any current content to the back buffer so that drawing should occur on the /// back buffer (instead of the fore-buffer). Calling draw methods without /// calling this may cause exceptions. /// </summary> /// <param name="preserve">Boolean, true if the front buffer content should be copied to the back buffer /// where drawing will be taking place.</param> public void StartDrawing(bool preserve) { Bitmap backBuffer = new Bitmap(BufferRectangle.Width, BufferRectangle.Height); if (Buffer?.Width == backBuffer.Width && Buffer.Height == backBuffer.Height) { if (preserve) { Graphics g = Graphics.FromImage(backBuffer); g.DrawImageUnscaled(Buffer, 0, 0); } } if (BackBuffer != null && BackBuffer != Buffer) BackBuffer.Dispose(); BackBuffer = backBuffer; OnStartDrawing(); } /// <inheritdoc /> protected override void Dispose(bool disposing) { if (disposing) { if (Buffer != BackBuffer && Buffer != null) { Buffer.Dispose(); Buffer = null; } if (BackBuffer != null) { BackBuffer.Dispose(); BackBuffer = null; } BufferEnvelope = null; BufferRectangle = Rectangle.Empty; IsInitialized = false; } base.Dispose(disposing); } /// <summary> /// Fires the OnBufferChanged event. /// </summary> /// <param name="clipRectangles">The Rectangle in pixels.</param> protected virtual void OnBufferChanged(List<Rectangle> clipRectangles) { if (BufferChanged != null) { ClipArgs e = new ClipArgs(clipRectangles); BufferChanged(this, e); } } /// <summary> /// Indicates that whatever drawing is going to occur has finished and the contents /// are about to be flipped forward to the front buffer. /// </summary> protected virtual void OnFinishDrawing() { } /// <summary> /// Occurs when a new drawing is started, but after the BackBuffer has been established. /// </summary> protected virtual void OnStartDrawing() { } /// <summary> /// This draws to the back buffer. If the back buffer doesn't exist, this will create one. /// This will not flip the back buffer to the front. /// </summary> /// <param name="args">The map arguments.</param> /// <param name="regions">The regions. </param> /// <param name="clipRectangles">The clip rectangles.</param> private void DrawWindows(MapArgs args, IList<Extent> regions, IList<Rectangle> clipRectangles) { Graphics g; if (args.Device != null) { g = args.Device; // A device on the MapArgs is optional, but overrides the normal buffering behaviors. } else { if (BackBuffer == null) BackBuffer = new Bitmap(BufferRectangle.Width, BufferRectangle.Height); g = Graphics.FromImage(BackBuffer); } int numBounds = Math.Min(regions.Count, clipRectangles.Count); for (int i = 0; i < numBounds; i++) { using (Bitmap bmp = BitmapGetter.GetBitmap(regions[i], clipRectangles[i])) { if (bmp != null) g.DrawImage(bmp, new Rectangle(0, 0, clipRectangles[i].Width, clipRectangles[i].Height)); } } if (args.Device == null) g.Dispose(); } #endregion } }
using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Text; using BlogTemplate._1.Models; using Xunit; namespace BlogTemplate._1.Tests.Fakes { public class FakeFileSystem : IFileSystem { HashSet<string> _directories = new HashSet<string>(); Dictionary<string, MemoryStream> _files = new Dictionary<string, MemoryStream>(); public void AddFile(string filePath) { AddFile(filePath, new byte[0]); } public void AddFile(string filePath, string content) { // TODO: Should we be using default encoding? If we use something specific, it should probably // be consistenly used by the data store to read the content correctly as well. byte[] contentBytes = Encoding.Default.GetBytes(content); AddFile(filePath, contentBytes); } public void AddFile(string filePath, byte[] content) { _files.Add(filePath, new MemoryStream(content.Length)); _files[filePath].Write(content, 0, content.Length); AddDirectory(Path.GetDirectoryName(filePath)); } public void AddDirectory(string path) { while(!string.IsNullOrEmpty(path)) { _directories.Add(path); path = Path.GetDirectoryName(path); } } #region IFileSystem void IFileSystem.WriteFile(string path, byte[] data) { AddFile(path, data); } void IFileSystem.CreateDirectory(string path) { AddDirectory(path); } void IFileSystem.DeleteFile(string path) { if(!_files.ContainsKey(path)) { throw new FileNotFoundException(path); } _files.Remove(path); } bool IFileSystem.DirectoryExists(string path) { return _directories.Contains(path); } IEnumerable<string> IFileSystem.EnumerateFiles(string directoryPath) { IEnumerable<string> filenames = _files.Keys.Where(key => string.Equals(Path.GetDirectoryName(key), directoryPath, StringComparison.OrdinalIgnoreCase)); return filenames; } bool IFileSystem.FileExists(string path) { return _files.ContainsKey(path); } DateTime IFileSystem.GetFileLastWriteTime(string path) { return DateTime.UtcNow; } string IFileSystem.ReadFileText(string path) { if(!_files.ContainsKey(path)) { throw new FileNotFoundException(path); } _files[path].Seek(0, SeekOrigin.Begin); StreamReader reader = new StreamReader(_files[path]); return reader.ReadToEnd(); } void IFileSystem.WriteFileText(string path, string text) { if(!_files.ContainsKey(path)) { AddFile(path); } _files[path].Seek(0, SeekOrigin.Begin); StreamWriter writer = new StreamWriter(_files[path]); writer.Write(text); writer.Flush(); } void IFileSystem.AppendFile(string path, byte[] data) { ((IFileSystem)this).AppendFile(path, data, 0, data.Length); } void IFileSystem.AppendFile(string path, byte[] data, int offset, int count) { if (!_files.ContainsKey(path)) { AddFile(path); } _files[path].Seek(0, SeekOrigin.Begin); MemoryStream writer = _files[path]; writer.Write(data, offset, count); writer.Flush(); } #endregion #region Tests public class FakeFileSystemTests { [Fact] public void EmptyFileSystem_DirectoryExists_ReturnsFalse() { IFileSystem ut = new FakeFileSystem(); Assert.False(ut.DirectoryExists("test")); } [Fact] public void EmptyFileSystem_FileExists_ReturnsFalse() { IFileSystem ut = new FakeFileSystem(); Assert.False(ut.FileExists("test")); } [Fact] public void CreateDirectory_AddsParentDirectories() { IFileSystem ut = new FakeFileSystem(); ut.CreateDirectory(@"test\path"); Assert.True(ut.DirectoryExists(@"test\path")); Assert.True(ut.DirectoryExists(@"test")); } [Fact] public void AddFile_AddsParentDirectories() { IFileSystem ut = new FakeFileSystem(); ((FakeFileSystem)ut).AddFile(@"test\path\file.txt"); Assert.True(ut.FileExists(@"test\path\file.txt")); Assert.True(ut.DirectoryExists(@"test\path")); Assert.True(ut.DirectoryExists(@"test")); } [Fact] public void AddFile_CanRetrieveTextContent() { IFileSystem ut = new FakeFileSystem(); string filePath = @"test\path\file.txt"; ((FakeFileSystem)ut).AddFile(filePath, "sample content"); Assert.Equal("sample content", ut.ReadFileText(filePath)); } [Fact] public void EnumerateFiles_OnlyReturnsFilesInDirectory() { IFileSystem ut = new FakeFileSystem(); ((FakeFileSystem)ut).AddFile(@"test\fail.txt"); ((FakeFileSystem)ut).AddFile(@"test\path\subfolder\fail.txt"); ((FakeFileSystem)ut).AddFile(@"test\path\file1.txt"); ((FakeFileSystem)ut).AddFile(@"test\path\file2.txt"); Assert.Equal(2, ut.EnumerateFiles(@"test\path").Count()); Assert.False(ut.EnumerateFiles(@"test\path").Any(s => s.Equals(Path.GetFileName("fail.txt"), StringComparison.OrdinalIgnoreCase))); } [Fact] public void WriteFileText_IfFileDoesNotExit_CreatesNewFile() { IFileSystem ut = new FakeFileSystem(); string filePath = @"test\path\file.txt"; ut.WriteFileText(filePath, "test"); Assert.True(ut.FileExists(filePath)); Assert.Equal("test", ut.ReadFileText(filePath)); } } #endregion } }
using System; using System.Collections; using System.Collections.Generic; using System.Globalization; using System.Runtime.InteropServices; using System.Runtime.Serialization; using GlmSharp.Swizzle; // ReSharper disable InconsistentNaming namespace GlmSharp { /// <summary> /// A matrix of type int with 4 columns and 2 rows. /// </summary> [Serializable] [StructLayout(LayoutKind.Sequential)] public struct imat4x2 : IEnumerable<int>, IEquatable<imat4x2> { #region Fields /// <summary> /// Column 0, Rows 0 /// </summary> public int m00; /// <summary> /// Column 0, Rows 1 /// </summary> public int m01; /// <summary> /// Column 1, Rows 0 /// </summary> public int m10; /// <summary> /// Column 1, Rows 1 /// </summary> public int m11; /// <summary> /// Column 2, Rows 0 /// </summary> public int m20; /// <summary> /// Column 2, Rows 1 /// </summary> public int m21; /// <summary> /// Column 3, Rows 0 /// </summary> public int m30; /// <summary> /// Column 3, Rows 1 /// </summary> public int m31; #endregion #region Constructors /// <summary> /// Component-wise constructor /// </summary> public imat4x2(int m00, int m01, int m10, int m11, int m20, int m21, int m30, int m31) { this.m00 = m00; this.m01 = m01; this.m10 = m10; this.m11 = m11; this.m20 = m20; this.m21 = m21; this.m30 = m30; this.m31 = m31; } /// <summary> /// Constructs this matrix from a imat2. Non-overwritten fields are from an Identity matrix. /// </summary> public imat4x2(imat2 m) { this.m00 = m.m00; this.m01 = m.m01; this.m10 = m.m10; this.m11 = m.m11; this.m20 = 0; this.m21 = 0; this.m30 = 0; this.m31 = 0; } /// <summary> /// Constructs this matrix from a imat3x2. Non-overwritten fields are from an Identity matrix. /// </summary> public imat4x2(imat3x2 m) { this.m00 = m.m00; this.m01 = m.m01; this.m10 = m.m10; this.m11 = m.m11; this.m20 = m.m20; this.m21 = m.m21; this.m30 = 0; this.m31 = 0; } /// <summary> /// Constructs this matrix from a imat4x2. Non-overwritten fields are from an Identity matrix. /// </summary> public imat4x2(imat4x2 m) { this.m00 = m.m00; this.m01 = m.m01; this.m10 = m.m10; this.m11 = m.m11; this.m20 = m.m20; this.m21 = m.m21; this.m30 = m.m30; this.m31 = m.m31; } /// <summary> /// Constructs this matrix from a imat2x3. Non-overwritten fields are from an Identity matrix. /// </summary> public imat4x2(imat2x3 m) { this.m00 = m.m00; this.m01 = m.m01; this.m10 = m.m10; this.m11 = m.m11; this.m20 = 0; this.m21 = 0; this.m30 = 0; this.m31 = 0; } /// <summary> /// Constructs this matrix from a imat3. Non-overwritten fields are from an Identity matrix. /// </summary> public imat4x2(imat3 m) { this.m00 = m.m00; this.m01 = m.m01; this.m10 = m.m10; this.m11 = m.m11; this.m20 = m.m20; this.m21 = m.m21; this.m30 = 0; this.m31 = 0; } /// <summary> /// Constructs this matrix from a imat4x3. Non-overwritten fields are from an Identity matrix. /// </summary> public imat4x2(imat4x3 m) { this.m00 = m.m00; this.m01 = m.m01; this.m10 = m.m10; this.m11 = m.m11; this.m20 = m.m20; this.m21 = m.m21; this.m30 = m.m30; this.m31 = m.m31; } /// <summary> /// Constructs this matrix from a imat2x4. Non-overwritten fields are from an Identity matrix. /// </summary> public imat4x2(imat2x4 m) { this.m00 = m.m00; this.m01 = m.m01; this.m10 = m.m10; this.m11 = m.m11; this.m20 = 0; this.m21 = 0; this.m30 = 0; this.m31 = 0; } /// <summary> /// Constructs this matrix from a imat3x4. Non-overwritten fields are from an Identity matrix. /// </summary> public imat4x2(imat3x4 m) { this.m00 = m.m00; this.m01 = m.m01; this.m10 = m.m10; this.m11 = m.m11; this.m20 = m.m20; this.m21 = m.m21; this.m30 = 0; this.m31 = 0; } /// <summary> /// Constructs this matrix from a imat4. Non-overwritten fields are from an Identity matrix. /// </summary> public imat4x2(imat4 m) { this.m00 = m.m00; this.m01 = m.m01; this.m10 = m.m10; this.m11 = m.m11; this.m20 = m.m20; this.m21 = m.m21; this.m30 = m.m30; this.m31 = m.m31; } /// <summary> /// Constructs this matrix from a series of column vectors. Non-overwritten fields are from an Identity matrix. /// </summary> public imat4x2(ivec2 c0, ivec2 c1) { this.m00 = c0.x; this.m01 = c0.y; this.m10 = c1.x; this.m11 = c1.y; this.m20 = 0; this.m21 = 0; this.m30 = 0; this.m31 = 0; } /// <summary> /// Constructs this matrix from a series of column vectors. Non-overwritten fields are from an Identity matrix. /// </summary> public imat4x2(ivec2 c0, ivec2 c1, ivec2 c2) { this.m00 = c0.x; this.m01 = c0.y; this.m10 = c1.x; this.m11 = c1.y; this.m20 = c2.x; this.m21 = c2.y; this.m30 = 0; this.m31 = 0; } /// <summary> /// Constructs this matrix from a series of column vectors. Non-overwritten fields are from an Identity matrix. /// </summary> public imat4x2(ivec2 c0, ivec2 c1, ivec2 c2, ivec2 c3) { this.m00 = c0.x; this.m01 = c0.y; this.m10 = c1.x; this.m11 = c1.y; this.m20 = c2.x; this.m21 = c2.y; this.m30 = c3.x; this.m31 = c3.y; } #endregion #region Properties /// <summary> /// Creates a 2D array with all values (address: Values[x, y]) /// </summary> public int[,] Values => new[,] { { m00, m01 }, { m10, m11 }, { m20, m21 }, { m30, m31 } }; /// <summary> /// Creates a 1D array with all values (internal order) /// </summary> public int[] Values1D => new[] { m00, m01, m10, m11, m20, m21, m30, m31 }; /// <summary> /// Gets or sets the column nr 0 /// </summary> public ivec2 Column0 { get { return new ivec2(m00, m01); } set { m00 = value.x; m01 = value.y; } } /// <summary> /// Gets or sets the column nr 1 /// </summary> public ivec2 Column1 { get { return new ivec2(m10, m11); } set { m10 = value.x; m11 = value.y; } } /// <summary> /// Gets or sets the column nr 2 /// </summary> public ivec2 Column2 { get { return new ivec2(m20, m21); } set { m20 = value.x; m21 = value.y; } } /// <summary> /// Gets or sets the column nr 3 /// </summary> public ivec2 Column3 { get { return new ivec2(m30, m31); } set { m30 = value.x; m31 = value.y; } } /// <summary> /// Gets or sets the row nr 0 /// </summary> public ivec4 Row0 { get { return new ivec4(m00, m10, m20, m30); } set { m00 = value.x; m10 = value.y; m20 = value.z; m30 = value.w; } } /// <summary> /// Gets or sets the row nr 1 /// </summary> public ivec4 Row1 { get { return new ivec4(m01, m11, m21, m31); } set { m01 = value.x; m11 = value.y; m21 = value.z; m31 = value.w; } } #endregion #region Static Properties /// <summary> /// Predefined all-zero matrix /// </summary> public static imat4x2 Zero { get; } = new imat4x2(0, 0, 0, 0, 0, 0, 0, 0); /// <summary> /// Predefined all-ones matrix /// </summary> public static imat4x2 Ones { get; } = new imat4x2(1, 1, 1, 1, 1, 1, 1, 1); /// <summary> /// Predefined identity matrix /// </summary> public static imat4x2 Identity { get; } = new imat4x2(1, 0, 0, 1, 0, 0, 0, 0); /// <summary> /// Predefined all-MaxValue matrix /// </summary> public static imat4x2 AllMaxValue { get; } = new imat4x2(int.MaxValue, int.MaxValue, int.MaxValue, int.MaxValue, int.MaxValue, int.MaxValue, int.MaxValue, int.MaxValue); /// <summary> /// Predefined diagonal-MaxValue matrix /// </summary> public static imat4x2 DiagonalMaxValue { get; } = new imat4x2(int.MaxValue, 0, 0, int.MaxValue, 0, 0, 0, 0); /// <summary> /// Predefined all-MinValue matrix /// </summary> public static imat4x2 AllMinValue { get; } = new imat4x2(int.MinValue, int.MinValue, int.MinValue, int.MinValue, int.MinValue, int.MinValue, int.MinValue, int.MinValue); /// <summary> /// Predefined diagonal-MinValue matrix /// </summary> public static imat4x2 DiagonalMinValue { get; } = new imat4x2(int.MinValue, 0, 0, int.MinValue, 0, 0, 0, 0); #endregion #region Functions /// <summary> /// Returns an enumerator that iterates through all fields. /// </summary> public IEnumerator<int> GetEnumerator() { yield return m00; yield return m01; yield return m10; yield return m11; yield return m20; yield return m21; yield return m30; yield return m31; } /// <summary> /// Returns an enumerator that iterates through all fields. /// </summary> IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); #endregion /// <summary> /// Returns the number of Fields (4 x 2 = 8). /// </summary> public int Count => 8; /// <summary> /// Gets/Sets a specific indexed component (a bit slower than direct access). /// </summary> public int this[int fieldIndex] { get { switch (fieldIndex) { case 0: return m00; case 1: return m01; case 2: return m10; case 3: return m11; case 4: return m20; case 5: return m21; case 6: return m30; case 7: return m31; default: throw new ArgumentOutOfRangeException("fieldIndex"); } } set { switch (fieldIndex) { case 0: this.m00 = value; break; case 1: this.m01 = value; break; case 2: this.m10 = value; break; case 3: this.m11 = value; break; case 4: this.m20 = value; break; case 5: this.m21 = value; break; case 6: this.m30 = value; break; case 7: this.m31 = value; break; default: throw new ArgumentOutOfRangeException("fieldIndex"); } } } /// <summary> /// Gets/Sets a specific 2D-indexed component (a bit slower than direct access). /// </summary> public int this[int col, int row] { get { return this[col * 2 + row]; } set { this[col * 2 + row] = value; } } /// <summary> /// Returns true iff this equals rhs component-wise. /// </summary> public bool Equals(imat4x2 rhs) => (((m00.Equals(rhs.m00) && m01.Equals(rhs.m01)) && (m10.Equals(rhs.m10) && m11.Equals(rhs.m11))) && ((m20.Equals(rhs.m20) && m21.Equals(rhs.m21)) && (m30.Equals(rhs.m30) && m31.Equals(rhs.m31)))); /// <summary> /// Returns true iff this equals rhs type- and component-wise. /// </summary> public override bool Equals(object obj) { if (ReferenceEquals(null, obj)) return false; return obj is imat4x2 && Equals((imat4x2) obj); } /// <summary> /// Returns true iff this equals rhs component-wise. /// </summary> public static bool operator ==(imat4x2 lhs, imat4x2 rhs) => lhs.Equals(rhs); /// <summary> /// Returns true iff this does not equal rhs (component-wise). /// </summary> public static bool operator !=(imat4x2 lhs, imat4x2 rhs) => !lhs.Equals(rhs); /// <summary> /// Returns a hash code for this instance. /// </summary> public override int GetHashCode() { unchecked { return ((((((((((((((m00.GetHashCode()) * 397) ^ m01.GetHashCode()) * 397) ^ m10.GetHashCode()) * 397) ^ m11.GetHashCode()) * 397) ^ m20.GetHashCode()) * 397) ^ m21.GetHashCode()) * 397) ^ m30.GetHashCode()) * 397) ^ m31.GetHashCode(); } } /// <summary> /// Returns a transposed version of this matrix. /// </summary> public imat2x4 Transposed => new imat2x4(m00, m10, m20, m30, m01, m11, m21, m31); /// <summary> /// Returns the minimal component of this matrix. /// </summary> public int MinElement => Math.Min(Math.Min(Math.Min(Math.Min(Math.Min(Math.Min(Math.Min(m00, m01), m10), m11), m20), m21), m30), m31); /// <summary> /// Returns the maximal component of this matrix. /// </summary> public int MaxElement => Math.Max(Math.Max(Math.Max(Math.Max(Math.Max(Math.Max(Math.Max(m00, m01), m10), m11), m20), m21), m30), m31); /// <summary> /// Returns the euclidean length of this matrix. /// </summary> public float Length => (float)Math.Sqrt((((m00*m00 + m01*m01) + (m10*m10 + m11*m11)) + ((m20*m20 + m21*m21) + (m30*m30 + m31*m31)))); /// <summary> /// Returns the squared euclidean length of this matrix. /// </summary> public float LengthSqr => (((m00*m00 + m01*m01) + (m10*m10 + m11*m11)) + ((m20*m20 + m21*m21) + (m30*m30 + m31*m31))); /// <summary> /// Returns the sum of all fields. /// </summary> public int Sum => (((m00 + m01) + (m10 + m11)) + ((m20 + m21) + (m30 + m31))); /// <summary> /// Returns the euclidean norm of this matrix. /// </summary> public float Norm => (float)Math.Sqrt((((m00*m00 + m01*m01) + (m10*m10 + m11*m11)) + ((m20*m20 + m21*m21) + (m30*m30 + m31*m31)))); /// <summary> /// Returns the one-norm of this matrix. /// </summary> public float Norm1 => (((Math.Abs(m00) + Math.Abs(m01)) + (Math.Abs(m10) + Math.Abs(m11))) + ((Math.Abs(m20) + Math.Abs(m21)) + (Math.Abs(m30) + Math.Abs(m31)))); /// <summary> /// Returns the two-norm of this matrix. /// </summary> public float Norm2 => (float)Math.Sqrt((((m00*m00 + m01*m01) + (m10*m10 + m11*m11)) + ((m20*m20 + m21*m21) + (m30*m30 + m31*m31)))); /// <summary> /// Returns the max-norm of this matrix. /// </summary> public int NormMax => Math.Max(Math.Max(Math.Max(Math.Max(Math.Max(Math.Max(Math.Max(Math.Abs(m00), Math.Abs(m01)), Math.Abs(m10)), Math.Abs(m11)), Math.Abs(m20)), Math.Abs(m21)), Math.Abs(m30)), Math.Abs(m31)); /// <summary> /// Returns the p-norm of this matrix. /// </summary> public double NormP(double p) => Math.Pow((((Math.Pow((double)Math.Abs(m00), p) + Math.Pow((double)Math.Abs(m01), p)) + (Math.Pow((double)Math.Abs(m10), p) + Math.Pow((double)Math.Abs(m11), p))) + ((Math.Pow((double)Math.Abs(m20), p) + Math.Pow((double)Math.Abs(m21), p)) + (Math.Pow((double)Math.Abs(m30), p) + Math.Pow((double)Math.Abs(m31), p)))), 1 / p); /// <summary> /// Executes a matrix-matrix-multiplication imat4x2 * imat2x4 -> imat2. /// </summary> public static imat2 operator*(imat4x2 lhs, imat2x4 rhs) => new imat2(((lhs.m00 * rhs.m00 + lhs.m10 * rhs.m01) + (lhs.m20 * rhs.m02 + lhs.m30 * rhs.m03)), ((lhs.m01 * rhs.m00 + lhs.m11 * rhs.m01) + (lhs.m21 * rhs.m02 + lhs.m31 * rhs.m03)), ((lhs.m00 * rhs.m10 + lhs.m10 * rhs.m11) + (lhs.m20 * rhs.m12 + lhs.m30 * rhs.m13)), ((lhs.m01 * rhs.m10 + lhs.m11 * rhs.m11) + (lhs.m21 * rhs.m12 + lhs.m31 * rhs.m13))); /// <summary> /// Executes a matrix-matrix-multiplication imat4x2 * imat3x4 -> imat3x2. /// </summary> public static imat3x2 operator*(imat4x2 lhs, imat3x4 rhs) => new imat3x2(((lhs.m00 * rhs.m00 + lhs.m10 * rhs.m01) + (lhs.m20 * rhs.m02 + lhs.m30 * rhs.m03)), ((lhs.m01 * rhs.m00 + lhs.m11 * rhs.m01) + (lhs.m21 * rhs.m02 + lhs.m31 * rhs.m03)), ((lhs.m00 * rhs.m10 + lhs.m10 * rhs.m11) + (lhs.m20 * rhs.m12 + lhs.m30 * rhs.m13)), ((lhs.m01 * rhs.m10 + lhs.m11 * rhs.m11) + (lhs.m21 * rhs.m12 + lhs.m31 * rhs.m13)), ((lhs.m00 * rhs.m20 + lhs.m10 * rhs.m21) + (lhs.m20 * rhs.m22 + lhs.m30 * rhs.m23)), ((lhs.m01 * rhs.m20 + lhs.m11 * rhs.m21) + (lhs.m21 * rhs.m22 + lhs.m31 * rhs.m23))); /// <summary> /// Executes a matrix-matrix-multiplication imat4x2 * imat4 -> imat4x2. /// </summary> public static imat4x2 operator*(imat4x2 lhs, imat4 rhs) => new imat4x2(((lhs.m00 * rhs.m00 + lhs.m10 * rhs.m01) + (lhs.m20 * rhs.m02 + lhs.m30 * rhs.m03)), ((lhs.m01 * rhs.m00 + lhs.m11 * rhs.m01) + (lhs.m21 * rhs.m02 + lhs.m31 * rhs.m03)), ((lhs.m00 * rhs.m10 + lhs.m10 * rhs.m11) + (lhs.m20 * rhs.m12 + lhs.m30 * rhs.m13)), ((lhs.m01 * rhs.m10 + lhs.m11 * rhs.m11) + (lhs.m21 * rhs.m12 + lhs.m31 * rhs.m13)), ((lhs.m00 * rhs.m20 + lhs.m10 * rhs.m21) + (lhs.m20 * rhs.m22 + lhs.m30 * rhs.m23)), ((lhs.m01 * rhs.m20 + lhs.m11 * rhs.m21) + (lhs.m21 * rhs.m22 + lhs.m31 * rhs.m23)), ((lhs.m00 * rhs.m30 + lhs.m10 * rhs.m31) + (lhs.m20 * rhs.m32 + lhs.m30 * rhs.m33)), ((lhs.m01 * rhs.m30 + lhs.m11 * rhs.m31) + (lhs.m21 * rhs.m32 + lhs.m31 * rhs.m33))); /// <summary> /// Executes a matrix-vector-multiplication. /// </summary> public static ivec2 operator*(imat4x2 m, ivec4 v) => new ivec2(((m.m00 * v.x + m.m10 * v.y) + (m.m20 * v.z + m.m30 * v.w)), ((m.m01 * v.x + m.m11 * v.y) + (m.m21 * v.z + m.m31 * v.w))); /// <summary> /// Executes a component-wise * (multiply). /// </summary> public static imat4x2 CompMul(imat4x2 A, imat4x2 B) => new imat4x2(A.m00 * B.m00, A.m01 * B.m01, A.m10 * B.m10, A.m11 * B.m11, A.m20 * B.m20, A.m21 * B.m21, A.m30 * B.m30, A.m31 * B.m31); /// <summary> /// Executes a component-wise / (divide). /// </summary> public static imat4x2 CompDiv(imat4x2 A, imat4x2 B) => new imat4x2(A.m00 / B.m00, A.m01 / B.m01, A.m10 / B.m10, A.m11 / B.m11, A.m20 / B.m20, A.m21 / B.m21, A.m30 / B.m30, A.m31 / B.m31); /// <summary> /// Executes a component-wise + (add). /// </summary> public static imat4x2 CompAdd(imat4x2 A, imat4x2 B) => new imat4x2(A.m00 + B.m00, A.m01 + B.m01, A.m10 + B.m10, A.m11 + B.m11, A.m20 + B.m20, A.m21 + B.m21, A.m30 + B.m30, A.m31 + B.m31); /// <summary> /// Executes a component-wise - (subtract). /// </summary> public static imat4x2 CompSub(imat4x2 A, imat4x2 B) => new imat4x2(A.m00 - B.m00, A.m01 - B.m01, A.m10 - B.m10, A.m11 - B.m11, A.m20 - B.m20, A.m21 - B.m21, A.m30 - B.m30, A.m31 - B.m31); /// <summary> /// Executes a component-wise + (add). /// </summary> public static imat4x2 operator+(imat4x2 lhs, imat4x2 rhs) => new imat4x2(lhs.m00 + rhs.m00, lhs.m01 + rhs.m01, lhs.m10 + rhs.m10, lhs.m11 + rhs.m11, lhs.m20 + rhs.m20, lhs.m21 + rhs.m21, lhs.m30 + rhs.m30, lhs.m31 + rhs.m31); /// <summary> /// Executes a component-wise + (add) with a scalar. /// </summary> public static imat4x2 operator+(imat4x2 lhs, int rhs) => new imat4x2(lhs.m00 + rhs, lhs.m01 + rhs, lhs.m10 + rhs, lhs.m11 + rhs, lhs.m20 + rhs, lhs.m21 + rhs, lhs.m30 + rhs, lhs.m31 + rhs); /// <summary> /// Executes a component-wise + (add) with a scalar. /// </summary> public static imat4x2 operator+(int lhs, imat4x2 rhs) => new imat4x2(lhs + rhs.m00, lhs + rhs.m01, lhs + rhs.m10, lhs + rhs.m11, lhs + rhs.m20, lhs + rhs.m21, lhs + rhs.m30, lhs + rhs.m31); /// <summary> /// Executes a component-wise - (subtract). /// </summary> public static imat4x2 operator-(imat4x2 lhs, imat4x2 rhs) => new imat4x2(lhs.m00 - rhs.m00, lhs.m01 - rhs.m01, lhs.m10 - rhs.m10, lhs.m11 - rhs.m11, lhs.m20 - rhs.m20, lhs.m21 - rhs.m21, lhs.m30 - rhs.m30, lhs.m31 - rhs.m31); /// <summary> /// Executes a component-wise - (subtract) with a scalar. /// </summary> public static imat4x2 operator-(imat4x2 lhs, int rhs) => new imat4x2(lhs.m00 - rhs, lhs.m01 - rhs, lhs.m10 - rhs, lhs.m11 - rhs, lhs.m20 - rhs, lhs.m21 - rhs, lhs.m30 - rhs, lhs.m31 - rhs); /// <summary> /// Executes a component-wise - (subtract) with a scalar. /// </summary> public static imat4x2 operator-(int lhs, imat4x2 rhs) => new imat4x2(lhs - rhs.m00, lhs - rhs.m01, lhs - rhs.m10, lhs - rhs.m11, lhs - rhs.m20, lhs - rhs.m21, lhs - rhs.m30, lhs - rhs.m31); /// <summary> /// Executes a component-wise / (divide) with a scalar. /// </summary> public static imat4x2 operator/(imat4x2 lhs, int rhs) => new imat4x2(lhs.m00 / rhs, lhs.m01 / rhs, lhs.m10 / rhs, lhs.m11 / rhs, lhs.m20 / rhs, lhs.m21 / rhs, lhs.m30 / rhs, lhs.m31 / rhs); /// <summary> /// Executes a component-wise / (divide) with a scalar. /// </summary> public static imat4x2 operator/(int lhs, imat4x2 rhs) => new imat4x2(lhs / rhs.m00, lhs / rhs.m01, lhs / rhs.m10, lhs / rhs.m11, lhs / rhs.m20, lhs / rhs.m21, lhs / rhs.m30, lhs / rhs.m31); /// <summary> /// Executes a component-wise * (multiply) with a scalar. /// </summary> public static imat4x2 operator*(imat4x2 lhs, int rhs) => new imat4x2(lhs.m00 * rhs, lhs.m01 * rhs, lhs.m10 * rhs, lhs.m11 * rhs, lhs.m20 * rhs, lhs.m21 * rhs, lhs.m30 * rhs, lhs.m31 * rhs); /// <summary> /// Executes a component-wise * (multiply) with a scalar. /// </summary> public static imat4x2 operator*(int lhs, imat4x2 rhs) => new imat4x2(lhs * rhs.m00, lhs * rhs.m01, lhs * rhs.m10, lhs * rhs.m11, lhs * rhs.m20, lhs * rhs.m21, lhs * rhs.m30, lhs * rhs.m31); /// <summary> /// Executes a component-wise % (modulo). /// </summary> public static imat4x2 operator%(imat4x2 lhs, imat4x2 rhs) => new imat4x2(lhs.m00 % rhs.m00, lhs.m01 % rhs.m01, lhs.m10 % rhs.m10, lhs.m11 % rhs.m11, lhs.m20 % rhs.m20, lhs.m21 % rhs.m21, lhs.m30 % rhs.m30, lhs.m31 % rhs.m31); /// <summary> /// Executes a component-wise % (modulo) with a scalar. /// </summary> public static imat4x2 operator%(imat4x2 lhs, int rhs) => new imat4x2(lhs.m00 % rhs, lhs.m01 % rhs, lhs.m10 % rhs, lhs.m11 % rhs, lhs.m20 % rhs, lhs.m21 % rhs, lhs.m30 % rhs, lhs.m31 % rhs); /// <summary> /// Executes a component-wise % (modulo) with a scalar. /// </summary> public static imat4x2 operator%(int lhs, imat4x2 rhs) => new imat4x2(lhs % rhs.m00, lhs % rhs.m01, lhs % rhs.m10, lhs % rhs.m11, lhs % rhs.m20, lhs % rhs.m21, lhs % rhs.m30, lhs % rhs.m31); /// <summary> /// Executes a component-wise ^ (xor). /// </summary> public static imat4x2 operator^(imat4x2 lhs, imat4x2 rhs) => new imat4x2(lhs.m00 ^ rhs.m00, lhs.m01 ^ rhs.m01, lhs.m10 ^ rhs.m10, lhs.m11 ^ rhs.m11, lhs.m20 ^ rhs.m20, lhs.m21 ^ rhs.m21, lhs.m30 ^ rhs.m30, lhs.m31 ^ rhs.m31); /// <summary> /// Executes a component-wise ^ (xor) with a scalar. /// </summary> public static imat4x2 operator^(imat4x2 lhs, int rhs) => new imat4x2(lhs.m00 ^ rhs, lhs.m01 ^ rhs, lhs.m10 ^ rhs, lhs.m11 ^ rhs, lhs.m20 ^ rhs, lhs.m21 ^ rhs, lhs.m30 ^ rhs, lhs.m31 ^ rhs); /// <summary> /// Executes a component-wise ^ (xor) with a scalar. /// </summary> public static imat4x2 operator^(int lhs, imat4x2 rhs) => new imat4x2(lhs ^ rhs.m00, lhs ^ rhs.m01, lhs ^ rhs.m10, lhs ^ rhs.m11, lhs ^ rhs.m20, lhs ^ rhs.m21, lhs ^ rhs.m30, lhs ^ rhs.m31); /// <summary> /// Executes a component-wise | (bitwise-or). /// </summary> public static imat4x2 operator|(imat4x2 lhs, imat4x2 rhs) => new imat4x2(lhs.m00 | rhs.m00, lhs.m01 | rhs.m01, lhs.m10 | rhs.m10, lhs.m11 | rhs.m11, lhs.m20 | rhs.m20, lhs.m21 | rhs.m21, lhs.m30 | rhs.m30, lhs.m31 | rhs.m31); /// <summary> /// Executes a component-wise | (bitwise-or) with a scalar. /// </summary> public static imat4x2 operator|(imat4x2 lhs, int rhs) => new imat4x2(lhs.m00 | rhs, lhs.m01 | rhs, lhs.m10 | rhs, lhs.m11 | rhs, lhs.m20 | rhs, lhs.m21 | rhs, lhs.m30 | rhs, lhs.m31 | rhs); /// <summary> /// Executes a component-wise | (bitwise-or) with a scalar. /// </summary> public static imat4x2 operator|(int lhs, imat4x2 rhs) => new imat4x2(lhs | rhs.m00, lhs | rhs.m01, lhs | rhs.m10, lhs | rhs.m11, lhs | rhs.m20, lhs | rhs.m21, lhs | rhs.m30, lhs | rhs.m31); /// <summary> /// Executes a component-wise &amp; (bitwise-and). /// </summary> public static imat4x2 operator&(imat4x2 lhs, imat4x2 rhs) => new imat4x2(lhs.m00 & rhs.m00, lhs.m01 & rhs.m01, lhs.m10 & rhs.m10, lhs.m11 & rhs.m11, lhs.m20 & rhs.m20, lhs.m21 & rhs.m21, lhs.m30 & rhs.m30, lhs.m31 & rhs.m31); /// <summary> /// Executes a component-wise &amp; (bitwise-and) with a scalar. /// </summary> public static imat4x2 operator&(imat4x2 lhs, int rhs) => new imat4x2(lhs.m00 & rhs, lhs.m01 & rhs, lhs.m10 & rhs, lhs.m11 & rhs, lhs.m20 & rhs, lhs.m21 & rhs, lhs.m30 & rhs, lhs.m31 & rhs); /// <summary> /// Executes a component-wise &amp; (bitwise-and) with a scalar. /// </summary> public static imat4x2 operator&(int lhs, imat4x2 rhs) => new imat4x2(lhs & rhs.m00, lhs & rhs.m01, lhs & rhs.m10, lhs & rhs.m11, lhs & rhs.m20, lhs & rhs.m21, lhs & rhs.m30, lhs & rhs.m31); /// <summary> /// Executes a component-wise left-shift with a scalar. /// </summary> public static imat4x2 operator<<(imat4x2 lhs, int rhs) => new imat4x2(lhs.m00 << rhs, lhs.m01 << rhs, lhs.m10 << rhs, lhs.m11 << rhs, lhs.m20 << rhs, lhs.m21 << rhs, lhs.m30 << rhs, lhs.m31 << rhs); /// <summary> /// Executes a component-wise right-shift with a scalar. /// </summary> public static imat4x2 operator>>(imat4x2 lhs, int rhs) => new imat4x2(lhs.m00 >> rhs, lhs.m01 >> rhs, lhs.m10 >> rhs, lhs.m11 >> rhs, lhs.m20 >> rhs, lhs.m21 >> rhs, lhs.m30 >> rhs, lhs.m31 >> rhs); /// <summary> /// Executes a component-wise lesser-than comparison. /// </summary> public static bmat4x2 operator<(imat4x2 lhs, imat4x2 rhs) => new bmat4x2(lhs.m00 < rhs.m00, lhs.m01 < rhs.m01, lhs.m10 < rhs.m10, lhs.m11 < rhs.m11, lhs.m20 < rhs.m20, lhs.m21 < rhs.m21, lhs.m30 < rhs.m30, lhs.m31 < rhs.m31); /// <summary> /// Executes a component-wise lesser-than comparison with a scalar. /// </summary> public static bmat4x2 operator<(imat4x2 lhs, int rhs) => new bmat4x2(lhs.m00 < rhs, lhs.m01 < rhs, lhs.m10 < rhs, lhs.m11 < rhs, lhs.m20 < rhs, lhs.m21 < rhs, lhs.m30 < rhs, lhs.m31 < rhs); /// <summary> /// Executes a component-wise lesser-than comparison with a scalar. /// </summary> public static bmat4x2 operator<(int lhs, imat4x2 rhs) => new bmat4x2(lhs < rhs.m00, lhs < rhs.m01, lhs < rhs.m10, lhs < rhs.m11, lhs < rhs.m20, lhs < rhs.m21, lhs < rhs.m30, lhs < rhs.m31); /// <summary> /// Executes a component-wise lesser-or-equal comparison. /// </summary> public static bmat4x2 operator<=(imat4x2 lhs, imat4x2 rhs) => new bmat4x2(lhs.m00 <= rhs.m00, lhs.m01 <= rhs.m01, lhs.m10 <= rhs.m10, lhs.m11 <= rhs.m11, lhs.m20 <= rhs.m20, lhs.m21 <= rhs.m21, lhs.m30 <= rhs.m30, lhs.m31 <= rhs.m31); /// <summary> /// Executes a component-wise lesser-or-equal comparison with a scalar. /// </summary> public static bmat4x2 operator<=(imat4x2 lhs, int rhs) => new bmat4x2(lhs.m00 <= rhs, lhs.m01 <= rhs, lhs.m10 <= rhs, lhs.m11 <= rhs, lhs.m20 <= rhs, lhs.m21 <= rhs, lhs.m30 <= rhs, lhs.m31 <= rhs); /// <summary> /// Executes a component-wise lesser-or-equal comparison with a scalar. /// </summary> public static bmat4x2 operator<=(int lhs, imat4x2 rhs) => new bmat4x2(lhs <= rhs.m00, lhs <= rhs.m01, lhs <= rhs.m10, lhs <= rhs.m11, lhs <= rhs.m20, lhs <= rhs.m21, lhs <= rhs.m30, lhs <= rhs.m31); /// <summary> /// Executes a component-wise greater-than comparison. /// </summary> public static bmat4x2 operator>(imat4x2 lhs, imat4x2 rhs) => new bmat4x2(lhs.m00 > rhs.m00, lhs.m01 > rhs.m01, lhs.m10 > rhs.m10, lhs.m11 > rhs.m11, lhs.m20 > rhs.m20, lhs.m21 > rhs.m21, lhs.m30 > rhs.m30, lhs.m31 > rhs.m31); /// <summary> /// Executes a component-wise greater-than comparison with a scalar. /// </summary> public static bmat4x2 operator>(imat4x2 lhs, int rhs) => new bmat4x2(lhs.m00 > rhs, lhs.m01 > rhs, lhs.m10 > rhs, lhs.m11 > rhs, lhs.m20 > rhs, lhs.m21 > rhs, lhs.m30 > rhs, lhs.m31 > rhs); /// <summary> /// Executes a component-wise greater-than comparison with a scalar. /// </summary> public static bmat4x2 operator>(int lhs, imat4x2 rhs) => new bmat4x2(lhs > rhs.m00, lhs > rhs.m01, lhs > rhs.m10, lhs > rhs.m11, lhs > rhs.m20, lhs > rhs.m21, lhs > rhs.m30, lhs > rhs.m31); /// <summary> /// Executes a component-wise greater-or-equal comparison. /// </summary> public static bmat4x2 operator>=(imat4x2 lhs, imat4x2 rhs) => new bmat4x2(lhs.m00 >= rhs.m00, lhs.m01 >= rhs.m01, lhs.m10 >= rhs.m10, lhs.m11 >= rhs.m11, lhs.m20 >= rhs.m20, lhs.m21 >= rhs.m21, lhs.m30 >= rhs.m30, lhs.m31 >= rhs.m31); /// <summary> /// Executes a component-wise greater-or-equal comparison with a scalar. /// </summary> public static bmat4x2 operator>=(imat4x2 lhs, int rhs) => new bmat4x2(lhs.m00 >= rhs, lhs.m01 >= rhs, lhs.m10 >= rhs, lhs.m11 >= rhs, lhs.m20 >= rhs, lhs.m21 >= rhs, lhs.m30 >= rhs, lhs.m31 >= rhs); /// <summary> /// Executes a component-wise greater-or-equal comparison with a scalar. /// </summary> public static bmat4x2 operator>=(int lhs, imat4x2 rhs) => new bmat4x2(lhs >= rhs.m00, lhs >= rhs.m01, lhs >= rhs.m10, lhs >= rhs.m11, lhs >= rhs.m20, lhs >= rhs.m21, lhs >= rhs.m30, lhs >= rhs.m31); } }
using Kitware.VTK; using System; // input file is +C:\VTK\Common\Testing\Tcl\rtImageTest.tcl // output file is +rtImageTest1.cs /// <summary> /// The testing class derived from rtImageTest1 /// </summary> public class TclToCsScriptTestDriver { static vtkTesting rtTester; static vtkRenderWindow tempRenderWindow; static vtkRenderWindowInteractor tempRenderWindowInteractor; static vtkObject tempViewer; static vtkWindowToImageFilter tempw2i; static string VTK_DATA_ROOT; static string test; static int threshold; static int rtResult; ///<summary> ///Executes a static method using reflection ///</summary> public static object executeMethod(System.Type t, string toExecute, object[] commands) { System.Reflection.MethodInfo methodInfo = t.GetMethod(toExecute); if (methodInfo == null) { return null; } return methodInfo.Invoke(null, commands); } /// <summary> /// The main entry method called by the CSharp driver /// </summary> /// <param name="argv"></param> public static void Main(String[] argv) { try { bool fail_on_image_diff = false; //Prefix Content is: "" int argc = 0; if (argv != null) { argc = argv.Length; } // setup some common things for testing[] vtkMath.RandomSeed(6); // create the testing class to do the work[] rtTester = new vtkTesting(); for (int i = 1; i < argc; i++) { rtTester.AddArgument(argv[i]); if (argv[i] == "--fail-on-image-diff") { fail_on_image_diff = true; } } // string auto_path = ""; // VTK_DATA_ROOT = rtTester.GetDataRoot(); // load in the script[] if (0 == argv.Length) { test = GetTestNameInteractively(); } else { test = argv[0]; } //The class that we are about to execute the test in System.Type t = System.Type.GetType(test + "Class"); if (null == t) { throw new System.ArgumentException(System.String.Format( "error: could not create a Type object for '{0}'...\n\n{1}\n{2}\n{3}\n{4}\n\n{5}\n\n", test + "Class", "Typo?", "Did you follow the C# test driver naming convention?", "Did you add the test to the CMakeLists.txt file?", "Did you reconfigure/rebuild after adding the test?", "Test 'method' name should equal 'file name without extension'... Test 'public class' name should be the same but with 'Class' appended..." )); } // set the default threshold, the Tcl script may change this[] threshold = -1; executeMethod(t, "Setthreshold", new object[] { threshold }); executeMethod(t, "SetVTK_DATA_ROOT", new object[] { VTK_DATA_ROOT }); //run the test executeMethod(t, test, new object[] { argv }); tempRenderWindowInteractor = (vtkRenderWindowInteractor)executeMethod(t, "Getiren", new object[] { }); tempRenderWindow = (vtkRenderWindow)executeMethod(t, "GetrenWin", new object[] { }); tempViewer = (vtkObject)executeMethod(t, "Getviewer", new object[] { }); tempw2i = (vtkWindowToImageFilter)executeMethod(t, "Getw2i", new object[] { }); //update the threshold from what the test made it threshold = (int)executeMethod(t, "Getthreshold", new object[] { }); if (tempRenderWindowInteractor != null) { tempRenderWindow.Render(); } // run the event loop quickly to map any tkwidget windows[] // current directory[] rtResult = 0; if (fail_on_image_diff && rtTester.IsValidImageSpecified() != 0) { // look for a renderWindow ImageWindow or ImageViewer[] // first check for some common names[] if (tempRenderWindow != null) { rtTester.SetRenderWindow(tempRenderWindow); if ((threshold) == -1) { threshold = 10; } } else { if ((threshold) == -1) { threshold = 5; } if (tempViewer != null) { if (tempViewer.IsA("vtkImageViewer") != 0) { tempRenderWindow = ((vtkImageViewer)tempViewer).GetRenderWindow(); } else if (tempViewer.IsA("vtkImageViewer2") != 0) { tempRenderWindow = ((vtkImageViewer2)tempViewer).GetRenderWindow(); } else { throw new System.Exception(""); } rtTester.SetRenderWindow(tempRenderWindow); if (tempViewer.IsA("vtkImageViewer") != 0) { ((vtkImageViewer)tempViewer).Render(); } else if (tempViewer.IsA("vtkImageViewer2") != 0) { ((vtkImageViewer2)tempViewer).Render(); } } else { tempRenderWindow = (vtkRenderWindow)executeMethod(t, "GetimgWin", new object[] { }); if (tempRenderWindow != null) { rtTester.SetRenderWindow(tempRenderWindow); tempRenderWindow.Render(); } } } if (tempRenderWindow == null) { throw new System.Exception("tempRenderWindow cannot be null for IsValidImageSpecified case..."); } rtResult = rtTester.RegressionTest(threshold); } if (rtTester.IsInteractiveModeSpecified() != 0) { if (tempRenderWindowInteractor != null) { tempRenderWindowInteractor.Start(); } } // Force other objects that may have holds on the render window // to let go: // rtTester.SetRenderWindow(null); if (null != tempw2i) { tempw2i.SetInput(null); } executeMethod(t, "deleteAllVTKObjects", new object[] { }); deleteAllVTKObjects(); // Force a garbage collection prior to exiting the test // so that any memory leaks reported are likely to be // *actual* leaks of some sort rather than false reports: // System.GC.Collect(); System.GC.WaitForPendingFinalizers(); // Fail the tests that have image diffs if fail_on_image_diff is on: // if (fail_on_image_diff && 0 == rtResult) { throw new System.Exception("error: image RegressionTest failed"); } // Test finished without throwing any exceptions... // Therefore, it passed. Exit with a zero ExitCode. // System.Environment.ExitCode = 0; } catch (System.Exception exc) { // Catch anything, spit it out to the console so it can be captured // by ctest. Exit with a non-zero ExitCode. // System.Console.Error.WriteLine("================================================================================"); System.Console.Error.WriteLine(""); System.Console.Error.WriteLine("TclToCsScript C# test driver caught System.Exception:"); System.Console.Error.WriteLine(""); System.Console.Error.WriteLine("{0}", exc.ToString()); System.Console.Error.WriteLine(""); System.Console.Error.WriteLine("================================================================================"); System.Console.Error.WriteLine(""); System.Environment.ExitCode = 2345; } } /// <summary> /// Returns the variable in the index [index] of the stringarray [arr] /// </summary> /// <param name="arr"></param> /// <param name="index"></param> public static string lindex(string[] arr, int index) { return arr[index]; } ///<summary>Deletes all static objects created</summary> public static void deleteAllVTKObjects() { //clean up vtk objects rtTester.Dispose(); if (tempViewer != null) { tempViewer.Dispose(); } if (tempRenderWindow != null) { tempRenderWindow.Dispose(); } if (tempRenderWindowInteractor != null) { tempRenderWindowInteractor.Dispose(); } if (tempw2i != null) { tempw2i.Dispose(); } } /// <summary> /// Gets the tests that currently compile /// </summary> public static string[] GetAvailableTests() { System.Collections.ArrayList testList = new System.Collections.ArrayList(); System.Reflection.Assembly assy = System.Reflection.Assembly.GetExecutingAssembly(); foreach (System.Type et in assy.GetExportedTypes()) { if (et.IsClass) { foreach (System.Reflection.MethodInfo mInfo in et.GetMethods()) { if (et.Name == mInfo.Name + "Class") { testList.Add(mInfo.Name); } } } } return (string[])testList.ToArray(System.Type.GetType("System.String")); } /// <summary> /// Returns the temp directory /// </summary> public static string GetTempDirectory() { return rtTester.GetTempDirectory(); } /// <summary> /// get the names of the tests /// </summary> public static string GetTestNameInteractively() { string s = "Available tests:\n"; string[] tests = GetAvailableTests(); int i = 0; foreach (string xyz in tests) { s = System.String.Format("{0} {1}: {2}\n", s, i, xyz); ++i; } s = System.String.Format("{0}To run a test, enter the test number: ", s); System.Console.Write(s); string choice = System.Console.ReadLine(); int choiceNumber = -1; try { choiceNumber = System.Convert.ToInt32(choice); if (choiceNumber < 0 || choiceNumber >= tests.Length) { throw new System.ArgumentOutOfRangeException(System.String.Format( "'{0}' is an invalid test number.\nExiting without running a test.\n\n", choice)); } } catch (System.Exception) { System.Console.Error.Write(System.String.Format( "'{0}' is an invalid test number.\nExiting without running a test.\n\n", choice)); throw; } return tests[choiceNumber]; } }
using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Search; using Lucene.Net.Spatial.Prefix.Tree; using Lucene.Net.Util; using Spatial4n.Core.Shapes; using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics; using System.IO; namespace Lucene.Net.Spatial.Prefix { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /// <summary> /// Traverses a <see cref="SpatialPrefixTree">SpatialPrefixTree</see> indexed field, using the template &amp; /// visitor design patterns for subclasses to guide the traversal and collect /// matching documents. /// <para/> /// Subclasses implement <see cref="Filter.GetDocIdSet(AtomicReaderContext, IBits)"/> /// by instantiating a custom <see cref="VisitorTemplate"/> subclass (i.e. an anonymous inner class) and implement the /// required methods. /// /// @lucene.internal /// </summary> public abstract class AbstractVisitingPrefixTreeFilter : AbstractPrefixTreeFilter { // Historical note: this code resulted from a refactoring of RecursivePrefixTreeFilter, // which in turn came out of SOLR-2155 protected readonly int m_prefixGridScanLevel;//at least one less than grid.getMaxLevels() protected AbstractVisitingPrefixTreeFilter(IShape queryShape, string fieldName, SpatialPrefixTree grid, int detailLevel, int prefixGridScanLevel) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) : base(queryShape, fieldName, grid, detailLevel) { this.m_prefixGridScanLevel = Math.Max(0, Math.Min(prefixGridScanLevel, grid.MaxLevels - 1)); if (Debugging.AssertsEnabled) Debugging.Assert(detailLevel <= grid.MaxLevels); } public override bool Equals(object o) { if (!base.Equals(o)) { return false;//checks getClass == o.getClass & instanceof } //Ignore prefixGridScanLevel as it is merely a tuning parameter. return true; } public override int GetHashCode() { int result = base.GetHashCode(); return result; } #region Nested type: VisitorTemplate /// <summary> /// An abstract class designed to make it easy to implement predicates or /// other operations on a <see cref="SpatialPrefixTree"/> indexed field. An instance /// of this class is not designed to be re-used across AtomicReaderContext /// instances so simply create a new one for each call to, say a /// <see cref="Lucene.Net.Search.Filter.GetDocIdSet(Lucene.Net.Index.AtomicReaderContext, Lucene.Net.Util.IBits)"/>. /// The <see cref="GetDocIdSet()"/> method here starts the work. It first checks /// that there are indexed terms; if not it quickly returns null. Then it calls /// <see cref="Start()">Start()</see> so a subclass can set up a return value, like an /// <see cref="Lucene.Net.Util.FixedBitSet"/>. Then it starts the traversal /// process, calling <see cref="FindSubCellsToVisit(Lucene.Net.Spatial.Prefix.Tree.Cell)"/> /// which by default finds the top cells that intersect <c>queryShape</c>. If /// there isn't an indexed cell for a corresponding cell returned for this /// method then it's short-circuited until it finds one, at which point /// <see cref="Visit(Lucene.Net.Spatial.Prefix.Tree.Cell)"/> is called. At /// some depths, of the tree, the algorithm switches to a scanning mode that /// calls <see cref="VisitScanned(Lucene.Net.Spatial.Prefix.Tree.Cell)"/> /// for each leaf cell found. /// /// @lucene.internal /// </summary> public abstract class VisitorTemplate : BaseTermsEnumTraverser { /* Future potential optimizations: * Can a polygon query shape be optimized / made-simpler at recursive depths (e.g. intersection of shape + cell box) * RE "scan" vs divide & conquer performance decision: We should use termsEnum.docFreq() as an estimate on the number of places at this depth. It would be nice if termsEnum knew how many terms start with the current term without having to repeatedly next() & test to find out. * Perhaps don't do intermediate seek()'s to cells above detailLevel that have Intersects relation because we won't be collecting those docs any way. However seeking does act as a short-circuit. So maybe do some percent of the time or when the level is above some threshold. * Each shape.relate(otherShape) result could be cached since much of the same relations will be invoked when multiple segments are involved. */ protected readonly bool m_hasIndexedLeaves;//if false then we can skip looking for them private VNode curVNode;//current pointer, derived from query shape private readonly BytesRef curVNodeTerm = new BytesRef();//curVNode.cell's term. // LUCENENET: marked readonly private Cell scanCell; private BytesRef thisTerm; //the result of termsEnum.term() protected VisitorTemplate(AbstractVisitingPrefixTreeFilter outerInstance, AtomicReaderContext context, IBits acceptDocs, bool hasIndexedLeaves) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) : base(outerInstance, context, acceptDocs) { this.m_hasIndexedLeaves = hasIndexedLeaves; } public virtual DocIdSet GetDocIdSet() { if (Debugging.AssertsEnabled) Debugging.Assert(curVNode == null, "Called more than once?"); if (m_termsEnum == null) { return null; } //advance if (!m_termsEnum.MoveNext()) { return null;// all done } thisTerm = m_termsEnum.Term; curVNode = new VNode(null); curVNode.Reset(m_outerInstance.m_grid.WorldCell); Start(); AddIntersectingChildren(); while (thisTerm != null)//terminates for other reasons too! { //Advance curVNode pointer if (curVNode.children != null) { //-- HAVE CHILDREN: DESCEND // LUCENENET NOTE: Must call this line before calling MoveNext() // on the enumerator. //if we put it there then it has something PreSiblings(curVNode); // LUCENENET IMPORTANT: Must not call this inline with Debug.Assert // because the compiler removes Debug.Assert statements in release mode!! bool hasNext = curVNode.children.MoveNext(); if (Debugging.AssertsEnabled) Debugging.Assert(hasNext); curVNode = curVNode.children.Current; } else { //-- NO CHILDREN: ADVANCE TO NEXT SIBLING VNode parentVNode = curVNode.parent; while (true) { if (parentVNode == null) { goto main_break;// all done } if (parentVNode.children.MoveNext()) { //advance next sibling curVNode = parentVNode.children.Current; break; } else { //reached end of siblings; pop up PostSiblings(parentVNode); parentVNode.children = null; //GC parentVNode = parentVNode.parent; } } } //Seek to curVNode's cell (or skip if termsEnum has moved beyond) curVNodeTerm.Bytes = curVNode.cell.GetTokenBytes(); curVNodeTerm.Length = curVNodeTerm.Bytes.Length; int compare = m_termsEnum.Comparer.Compare(thisTerm, curVNodeTerm); if (compare > 0) { // leap frog (termsEnum is beyond where we would otherwise seek) if (Debugging.AssertsEnabled) Debugging.Assert(!m_context.AtomicReader.GetTerms(m_outerInstance.m_fieldName).GetEnumerator().SeekExact(curVNodeTerm), "should be absent"); } else { if (compare < 0) { // Seek ! TermsEnum.SeekStatus seekStatus = m_termsEnum.SeekCeil(curVNodeTerm); if (seekStatus == TermsEnum.SeekStatus.END) { break;// all done } thisTerm = m_termsEnum.Term; if (seekStatus == TermsEnum.SeekStatus.NOT_FOUND) { continue; // leap frog } } // Visit! bool descend = Visit(curVNode.cell); //advance if (!m_termsEnum.MoveNext()) { thisTerm = null; break;// all done } thisTerm = m_termsEnum.Term; if (descend) { AddIntersectingChildren(); } } ; }//main loop main_break: { } return Finish(); } /// <summary> /// Called initially, and whenever <see cref="Visit(Lucene.Net.Spatial.Prefix.Tree.Cell)"/> /// returns true. /// </summary> /// <exception cref="IOException"></exception> private void AddIntersectingChildren() { if (Debugging.AssertsEnabled) Debugging.Assert(thisTerm != null); Cell cell = curVNode.cell; if (cell.Level >= m_outerInstance.m_detailLevel) { throw IllegalStateException.Create("Spatial logic error"); } //Check for adjacent leaf (happens for indexed non-point shapes) if (m_hasIndexedLeaves && cell.Level != 0) { //If the next indexed term just adds a leaf marker ('+') to cell, // then add all of those docs if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(thisTerm, curVNodeTerm));//TODO refactor to use method on curVNode.cell scanCell = m_outerInstance.m_grid.GetCell(thisTerm.Bytes, thisTerm.Offset, thisTerm.Length, scanCell); if (scanCell.Level == cell.Level && scanCell.IsLeaf) { VisitLeaf(scanCell); //advance if (!m_termsEnum.MoveNext()) { return;// all done } thisTerm = m_termsEnum.Term; } } //Decide whether to continue to divide & conquer, or whether it's time to // scan through terms beneath this cell. // Scanning is a performance optimization trade-off. //TODO use termsEnum.docFreq() as heuristic bool scan = cell.Level >= ((AbstractVisitingPrefixTreeFilter)m_outerInstance).m_prefixGridScanLevel;//simple heuristic if (!scan) { //Divide & conquer (ultimately termsEnum.seek()) IEnumerator<Cell> subCellsIter = FindSubCellsToVisit(cell); if (!subCellsIter.MoveNext()) { return;//not expected } curVNode.children = new VNodeCellIterator(subCellsIter, new VNode(curVNode)); } else { //Scan (loop of termsEnum.next()) Scan(m_outerInstance.m_detailLevel); } } /// <summary> /// Called when doing a divide &amp; conquer to find the next intersecting cells /// of the query shape that are beneath <paramref name="cell"/>. <paramref name="cell"/> is /// guaranteed to have an intersection and thus this must return some number /// of nodes. /// </summary> protected internal virtual IEnumerator<Cell> FindSubCellsToVisit(Cell cell) { return cell.GetSubCells(m_outerInstance.m_queryShape).GetEnumerator(); } /// <summary> /// Scans (<c>termsEnum.MoveNext()</c>) terms until a term is found that does /// not start with curVNode's cell. If it finds a leaf cell or a cell at /// level <paramref name="scanDetailLevel"/> then it calls /// <see cref="VisitScanned(Lucene.Net.Spatial.Prefix.Tree.Cell)"/>. /// </summary> /// <exception cref="IOException"></exception> protected internal virtual void Scan(int scanDetailLevel) { // LUCENENET specific - on the first loop, we need to check for null, // but on each subsequent loop, we can use the result of MoveNext() if (!(thisTerm is null) && StringHelper.StartsWith(thisTerm, curVNodeTerm)) //TODO refactor to use method on curVNode.cell { bool moved; do { scanCell = m_outerInstance.m_grid.GetCell(thisTerm.Bytes, thisTerm.Offset, thisTerm.Length, scanCell); int termLevel = scanCell.Level; if (termLevel < scanDetailLevel) { if (scanCell.IsLeaf) VisitScanned(scanCell); } else if (termLevel == scanDetailLevel) { if (!scanCell.IsLeaf)//LUCENE-5529 VisitScanned(scanCell); } } while ((moved = m_termsEnum.MoveNext()) && StringHelper.StartsWith(thisTerm = m_termsEnum.Term, curVNodeTerm)); // LUCENENET: Enusure we set thisTerm to null if the iteration ends if (!moved) thisTerm = null; } } #region Nested type: VNodeCellIterator /// <summary> /// Used for <see cref="VNode.children"/>. /// </summary> private class VNodeCellIterator : IEnumerator<VNode> { internal readonly IEnumerator<Cell> cellIter; private readonly VNode vNode; private bool first = true; internal VNodeCellIterator(IEnumerator<Cell> cellIter, VNode vNode) { //term loop this.cellIter = cellIter; this.vNode = vNode; } //it always removes #region IEnumerator<VNode> Members public void Dispose() { cellIter.Dispose(); } public bool MoveNext() { //if (Debugging.AssertsEnabled) Debugging.Assert(cellIter.Current != null); // LUCENENET NOTE: The consumer of this class calls // cellIter.MoveNext() before it is instantiated. // So, the first call here // to MoveNext() must not move the cursor. bool result; if (!first) { result = cellIter.MoveNext(); } else { result = true; first = false; } // LUCENENET NOTE: Need to skip this call // if there are no more results because null // is not allowed if (result == true) { vNode.Reset(cellIter.Current); } return result; } public void Reset() { cellIter.Reset(); } public VNode Current => vNode; object IEnumerator.Current => Current; #endregion } #endregion /// <summary>Called first to setup things.</summary> /// <exception cref="IOException"></exception> protected internal abstract void Start(); /// <summary>Called last to return the result.</summary> /// <exception cref="IOException"></exception> protected internal abstract DocIdSet Finish(); /// <summary> /// Visit an indexed cell returned from /// <see cref="FindSubCellsToVisit(Lucene.Net.Spatial.Prefix.Tree.Cell)"/>. /// </summary> /// <param name="cell">An intersecting cell.</param> /// <returns> /// true to descend to more levels. It is an error to return true /// if cell.Level == detailLevel /// </returns> /// <exception cref="IOException"></exception> protected internal abstract bool Visit(Cell cell); /// <summary>Called after visit() returns true and an indexed leaf cell is found.</summary> /// <remarks> /// Called after Visit() returns true and an indexed leaf cell is found. An /// indexed leaf cell means associated documents generally won't be found at /// further detail levels. /// </remarks> /// <exception cref="IOException"></exception> protected internal abstract void VisitLeaf(Cell cell); /// <summary> /// The cell is either indexed as a leaf or is the last level of detail. It /// might not even intersect the query shape, so be sure to check for that. /// </summary> /// <exception cref="IOException"></exception> protected internal abstract void VisitScanned(Cell cell); protected internal virtual void PreSiblings(VNode vNode) { } protected internal virtual void PostSiblings(VNode vNode) { } //class VisitorTemplate } #endregion #region Nested type: VNode /// <summary> /// A Visitor node/cell found via the query shape for <see cref="VisitorTemplate"/>. /// Sometimes these are reset(cell). It's like a LinkedList node but forms a /// tree. /// /// @lucene.internal /// </summary> public class VNode { //Note: The VNode tree adds more code to debug/maintain v.s. a flattened // LinkedList that we used to have. There is more opportunity here for // custom behavior (see preSiblings & postSiblings) but that's not // leveraged yet. Maybe this is slightly more GC friendly. internal readonly VNode parent;//only null at the root internal IEnumerator<VNode> children;//null, then sometimes set, then null internal Cell cell;//not null (except initially before reset()) /// <summary>Call <see cref="Reset(Cell)"/> after to set the cell.</summary> internal VNode(VNode parent) { // remember to call reset(cell) after this.parent = parent; } internal virtual void Reset(Cell cell) { if (Debugging.AssertsEnabled) Debugging.Assert(cell != null); this.cell = cell; if (Debugging.AssertsEnabled) Debugging.Assert(children == null); } } #endregion } }
// Copyright (c) Sven Groot (Ookii.org) 2006 // See license.txt for details using System; using System.Collections.Generic; using System.Text; using System.IO; using System.ComponentModel; using Microsoft.Win32; using Ookii.Dialogs.Wpf.Interop; namespace Ookii.Dialogs.Wpf { /// <summary> /// Prompts the user to open a file. /// </summary> /// <remarks> /// <para> /// Windows Vista provides a new style of common file dialog, with several new features (both from /// the user's and the programmers perspective). /// </para> /// <para> /// This class will use the Vista-style file dialogs if possible, and automatically fall back to the old-style /// dialog on versions of Windows older than Vista. This class is aimed at applications that /// target both Windows Vista and older versions of Windows, and therefore does not provide any /// of the new APIs provided by Vista's file dialogs. /// </para> /// <para> /// This class precisely duplicates the public interface of <see cref="OpenFileDialog"/> so you can just replace /// any instances of <see cref="OpenFileDialog"/> with the <see cref="VistaOpenFileDialog"/> without any further changes /// to your code. /// </para> /// </remarks> /// <threadsafety instance="false" static="true" /> [Description("Prompts the user to open a file.")] public sealed class VistaOpenFileDialog : VistaFileDialog { private bool _showReadOnly; private bool _readOnlyChecked; private const int _openDropDownId = 0x4002; private const int _openItemId = 0x4003; private const int _readOnlyItemId = 0x4004; /// <summary> /// Creates a new instance of <see cref="VistaOpenFileDialog" /> class. /// </summary> public VistaOpenFileDialog() { if( !IsVistaFileDialogSupported ) DownlevelDialog = new OpenFileDialog(); } #region Public Properties /// <summary> /// Gets or sets a value indicating whether the dialog box displays a warning if the user specifies a file name that does not exist. /// </summary> /// <value> /// <see langword="true" /> if the dialog box displays a warning if the user specifies a file name that does not exist; otherwise, <see langword="false" />. The default value is <see langword="true" />. /// </value> [DefaultValue(true), Description("A value indicating whether the dialog box displays a warning if the user specifies a file name that does not exist.")] public override bool CheckFileExists { get { return base.CheckFileExists; } set { base.CheckFileExists = value; } } /// <summary> /// Gets or sets a value indicating whether the dialog box allows multiple files to be selected. /// </summary> /// <value> /// <see langword="true" /> if the dialog box allows multiple files to be selected together or concurrently; otherwise, <see langword="false" />. /// The default value is <see langword="false" />. /// </value> [Description("A value indicating whether the dialog box allows multiple files to be selected."), DefaultValue(false), Category("Behavior")] public bool Multiselect { get { if( DownlevelDialog != null ) return ((OpenFileDialog)DownlevelDialog).Multiselect; return GetOption(NativeMethods.FOS.FOS_ALLOWMULTISELECT); } set { if( DownlevelDialog != null ) ((OpenFileDialog)DownlevelDialog).Multiselect = value; SetOption(NativeMethods.FOS.FOS_ALLOWMULTISELECT, value); } } /// <summary> /// Gets or sets a value indicating whether the dialog box contains a read-only check box. /// </summary> /// <value> /// <see langword="true" /> if the dialog box contains a read-only check box; otherwise, <see langword="false" />. The default value is <see langword="false" />. /// </value> /// <remarks> /// If the Vista style dialog is used, this property can only be used to determine whether the user chose /// Open as read-only on the dialog; setting it in code will have no effect. /// </remarks> [Description("A value indicating whether the dialog box contains a read-only check box."), Category("Behavior"), DefaultValue(false)] public bool ShowReadOnly { get { if( DownlevelDialog != null ) return ((OpenFileDialog)DownlevelDialog).ShowReadOnly; return _showReadOnly; } set { if( DownlevelDialog != null ) ((OpenFileDialog)DownlevelDialog).ShowReadOnly = value; else _showReadOnly = value; } } /// <summary> /// Gets or sets a value indicating whether the read-only check box is selected. /// </summary> /// <value> /// <see langword="true" /> if the read-only check box is selected; otherwise, <see langword="false" />. The default value is <see langword="false" />. /// </value> [DefaultValue(false), Description("A value indicating whether the read-only check box is selected."), Category("Behavior")] public bool ReadOnlyChecked { get { if( DownlevelDialog != null ) return ((OpenFileDialog)DownlevelDialog).ReadOnlyChecked; return _readOnlyChecked; } set { if( DownlevelDialog != null ) ((OpenFileDialog)DownlevelDialog).ReadOnlyChecked = value; else _readOnlyChecked = value; } } #endregion #region Public Methods /// <summary> /// Resets all properties to their default values. /// </summary> public override void Reset() { base.Reset(); if( DownlevelDialog == null ) { CheckFileExists = true; _showReadOnly = false; _readOnlyChecked = false; } } /// <summary> /// Opens the file selected by the user, with read-only permission. The file is specified by the FileName property. /// </summary> /// <returns>A Stream that specifies the read-only file selected by the user.</returns> /// <exception cref="System.ArgumentNullException">The file name is <see langword="null" />.</exception> public System.IO.Stream OpenFile() { if( DownlevelDialog != null ) return ((OpenFileDialog)DownlevelDialog).OpenFile(); else { string fileName = FileName; return new FileStream(fileName, FileMode.Open, FileAccess.Read); } } #endregion #region Internal Methods internal override Ookii.Dialogs.Wpf.Interop.IFileDialog CreateFileDialog() { return new Ookii.Dialogs.Wpf.Interop.NativeFileOpenDialog(); } internal override void SetDialogProperties(Ookii.Dialogs.Wpf.Interop.IFileDialog dialog) { base.SetDialogProperties(dialog); if( _showReadOnly ) { Ookii.Dialogs.Wpf.Interop.IFileDialogCustomize customize = (Ookii.Dialogs.Wpf.Interop.IFileDialogCustomize)dialog; customize.EnableOpenDropDown(_openDropDownId); customize.AddControlItem(_openDropDownId, _openItemId, ComDlgResources.LoadString(ComDlgResources.ComDlgResourceId.OpenButton)); customize.AddControlItem(_openDropDownId, _readOnlyItemId, ComDlgResources.LoadString(ComDlgResources.ComDlgResourceId.ReadOnly)); } } internal override void GetResult(Ookii.Dialogs.Wpf.Interop.IFileDialog dialog) { if( Multiselect ) { Ookii.Dialogs.Wpf.Interop.IShellItemArray results; ((Ookii.Dialogs.Wpf.Interop.IFileOpenDialog)dialog).GetResults(out results); uint count; results.GetCount(out count); string[] fileNames = new string[count]; for( uint x = 0; x < count; ++x ) { Ookii.Dialogs.Wpf.Interop.IShellItem item; results.GetItemAt(x, out item); string name; item.GetDisplayName(NativeMethods.SIGDN.SIGDN_FILESYSPATH, out name); fileNames[x] = name; } FileNamesInternal = fileNames; } else FileNamesInternal = null; if( ShowReadOnly ) { Ookii.Dialogs.Wpf.Interop.IFileDialogCustomize customize = (Ookii.Dialogs.Wpf.Interop.IFileDialogCustomize)dialog; int selected; customize.GetSelectedControlItem(_openDropDownId, out selected); _readOnlyChecked = (selected == _readOnlyItemId); } base.GetResult(dialog); } #endregion } }
using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.ComponentModel.Composition; using System.Linq; using System.Linq.Expressions; using System.Reflection; using Clide.Sdk; using FromTo = System.Tuple<System.Type, System.Type>; namespace Clide { /// <summary> /// Default implementation of the <see cref="IAdapterService"/>. /// </summary> [Export(typeof(IAdapterService))] [PartCreationPolicy(CreationPolicy.Shared)] internal class AdapterService : IAdapterService { static readonly MethodInfo AdaptExpressionGenerator = typeof(AdapterService).GetMethod("GetAdaptExpression", BindingFlags.NonPublic | BindingFlags.Static); ConcurrentDictionary<Type, IEnumerable<TypeInheritance>> cachedOrderedTypeHierarchies = new ConcurrentDictionary<Type, IEnumerable<TypeInheritance>>(); ConcurrentDictionary<FromTo, Func<IAdapter, object, object>> cachedAdaptMethods = new ConcurrentDictionary<FromTo, Func<IAdapter, object, object>>(); ConcurrentDictionary<FromTo, IAdapter> cachedFromToAdapters = new ConcurrentDictionary<FromTo, IAdapter>(); List<AdapterInfo> allAdapters; /// <summary> /// Initializes the adapter service with the given set of adapters. /// </summary> internal AdapterService(params IAdapter[] adapters) : this((IEnumerable<IAdapter>)adapters) { } /// <summary> /// Initializes the adapter service with the given set of adapters. /// </summary> [ImportingConstructor] public AdapterService([ImportMany] IEnumerable<IAdapter> adapters) { var genericAdapter = typeof(IAdapter<,>); allAdapters = adapters // Multiple implementations of IAdapter<TFrom, TTo> supported per adapter for convenience. .SelectMany(adapter => adapter .GetType() .GetInterfaces() // Keep only the implementations of the generic interface. .Where(conversion => conversion.IsGenericType && conversion.GetGenericTypeDefinition() == genericAdapter) .Select(conversion => new AdapterInfo { Adapter = adapter, From = conversion.GetGenericArguments()[0], To = conversion.GetGenericArguments()[1] })) .ToList(); var duplicates = allAdapters .GroupBy(info => new FromTo(info.From, info.To)) .ToDictionary(group => group.Key, group => group.ToList()) .Where(group => group.Value.Count > 1) .SelectMany(group => group.Value) .ToList(); if (duplicates.Count > 0) throw new ArgumentException("Duplicate adapters: " + string.Join(Environment.NewLine, duplicates.Select(adapter => string.Format("{0}->{1}: {2}", adapter.From, adapter.To, adapter.Adapter)))); } public IAdaptable<TSource> Adapt<TSource>(TSource source) where TSource : class => new Adaptable<TSource>(this, source); TTarget Adapt<TSource, TTarget>(TSource source) where TSource : class where TTarget : class { // Null always adapts to null. if (source == null) return default(TTarget); // We first try the most specific source type, the // actual implementation. var sourceType = source.GetType(); var targetType = typeof(TTarget); if (sourceType.FullName == "System.__ComObject") sourceType = typeof(TSource); // Avoid the more costly conversion if types are // directly assignable. if (targetType.IsAssignableFrom(sourceType) || targetType.IsAssignableFrom(typeof(TSource))) return source as TTarget; var fromTo = new FromTo(sourceType, targetType); var adapter = cachedFromToAdapters.GetOrAdd(fromTo, FindAdapter); // Only retry if the TSource hasn't been looked up already, i.e. // if the instance type was a COM object. if (adapter == null && sourceType != typeof(TSource)) { // Try again but with the explicit TSource we were passed-in. fromTo = new FromTo(typeof(TSource), targetType); adapter = cachedFromToAdapters.GetOrAdd(fromTo, FindAdapter); if (adapter == null) return default(TTarget); } else if (adapter == null) { return default(TTarget); } var adaptMethod = GetAdaptMethod(fromTo, adapter); return adaptMethod.Invoke(adapter, source) as TTarget; } IAdapter FindAdapter(FromTo fromTo) { var fromType = fromTo.Item1; var toType = fromTo.Item2; var fromInheritance = GetInheritance(fromType); // Search by inheritance proximity of the target and source type var adapter = allAdapters // Filter out those that are compatible both for the source and the target. .Where(info => toType.IsAssignableFrom(info.To) && info.From.IsAssignableFrom(fromType)) .Select(info => new { Adapter = info.Adapter, // Gets the distance between the requested From type to the adapter From type. FromInheritance = fromInheritance.FirstOrDefault(x => x.Type == info.From), // Gets the distance between the requested To type to the adapter To type. ToInheritance = GetInheritance(info.To).FirstOrDefault(x => x.Type == toType) }) // We first order by the most specific adapter with regards to the source type .OrderBy(info => info.FromInheritance.Distance) // Then we get the most specific (meaning most derived type, hence descending) // of the target type. This means that the more generic adapters will only be // used when no other more specific adapter exists. This allows implementers // to only provide the most specific conversions, and have those apply to // more generic adapt requests. .ThenByDescending(info => info.ToInheritance.Distance) .Select(info => info.Adapter) .FirstOrDefault(); return adapter; } Func<IAdapter, object, object> GetAdaptMethod(FromTo fromTo, IAdapter adapter) { return cachedAdaptMethods.GetOrAdd( fromTo, key => ((Expression<Func<IAdapter, object, object>>) AdaptExpressionGenerator.MakeGenericMethod(key.Item1, key.Item2).Invoke(null, null)) .Compile()); } static Expression<Func<IAdapter, object, object>> GetAdaptExpression<TFrom, TTo>() { return (adapter, source) => ((IAdapter<TFrom, TTo>)adapter).Adapt((TFrom)source); } IEnumerable<TypeInheritance> GetInheritance(Type sourceType) { return cachedOrderedTypeHierarchies.GetOrAdd( sourceType, type => type.GetInheritanceTree() .Inheritance .Traverse(TraverseKind.BreadthFirst, t => t.Inheritance) .Concat(new[] { new TypeInheritance(sourceType, 0) }) .OrderBy(t => t.Distance) .Distinct() // If there are duplicates, take the farthest type .GroupBy(t => t.Type) .Select(group => group.OrderByDescending(h => h.Distance).First()) // Do a final order by distance. .OrderBy(t => t.Distance) .ToList()); } class AdapterInfo { public IAdapter Adapter; public Type From; public Type To; } class Adaptable<TSource> : IAdaptable<TSource> where TSource : class { AdapterService service; TSource source; public Adaptable(AdapterService service, TSource source) { this.service = service; this.source = source; } public T As<T>() where T : class => service.Adapt<TSource, T>(source); } } }
//http://www.codeproject.com/KB/cs/lsadotnet.aspx using System; using System.Text; using System.Runtime.InteropServices; namespace dropkick.Tasks.Security.LocalPolicy { public class LsaUtility { // Import the LSA functions [DllImport("advapi32.dll", PreserveSig = true)] private static extern UInt32 LsaOpenPolicy( ref LSA_UNICODE_STRING SystemName, ref LSA_OBJECT_ATTRIBUTES ObjectAttributes, Int32 DesiredAccess, out IntPtr PolicyHandle ); [DllImport("advapi32.dll", SetLastError = true, PreserveSig = true)] private static extern long LsaAddAccountRights( IntPtr PolicyHandle, IntPtr AccountSid, LSA_UNICODE_STRING[] UserRights, long CountOfRights); [DllImport("advapi32")] public static extern void FreeSid(IntPtr pSid); [DllImport("advapi32.dll", CharSet = CharSet.Auto, SetLastError = true, PreserveSig = true)] private static extern bool LookupAccountName( string lpSystemName, string lpAccountName, IntPtr psid, ref int cbsid, StringBuilder domainName, ref int cbdomainLength, ref int use); [DllImport("advapi32.dll")] private static extern bool IsValidSid(IntPtr pSid); [DllImport("advapi32.dll")] private static extern long LsaClose(IntPtr ObjectHandle); [DllImport("kernel32.dll")] private static extern int GetLastError(); [DllImport("advapi32.dll")] private static extern long LsaNtStatusToWinError(long status); // define the structures [StructLayout(LayoutKind.Sequential)] private struct LSA_UNICODE_STRING { public UInt16 Length; public UInt16 MaximumLength; public IntPtr Buffer; } [StructLayout(LayoutKind.Sequential)] private struct LSA_OBJECT_ATTRIBUTES { public int Length; public IntPtr RootDirectory; public LSA_UNICODE_STRING ObjectName; public UInt32 Attributes; public IntPtr SecurityDescriptor; public IntPtr SecurityQualityOfService; } // enum all policies private enum LSA_AccessPolicy : long { POLICY_VIEW_LOCAL_INFORMATION = 0x00000001L, POLICY_VIEW_AUDIT_INFORMATION = 0x00000002L, POLICY_GET_PRIVATE_INFORMATION = 0x00000004L, POLICY_TRUST_ADMIN = 0x00000008L, POLICY_CREATE_ACCOUNT = 0x00000010L, POLICY_CREATE_SECRET = 0x00000020L, POLICY_CREATE_PRIVILEGE = 0x00000040L, POLICY_SET_DEFAULT_QUOTA_LIMITS = 0x00000080L, POLICY_SET_AUDIT_REQUIREMENTS = 0x00000100L, POLICY_AUDIT_LOG_ADMIN = 0x00000200L, POLICY_SERVER_ADMIN = 0x00000400L, POLICY_LOOKUP_NAMES = 0x00000800L, POLICY_NOTIFICATION = 0x00001000L } /// <summary>Adds a privilege to an account</summary> /// <param name="systemName">The computer to apply the rights to</param> /// <param name="accountName">Name of an account - "domain\account" or only "account"</param> /// <param name="privilegeName">Name ofthe privilege</param> /// <returns>The windows error code returned by LsaAddAccountRights</returns> public static long SetRight(string systemName, String accountName, String privilegeName) { long winErrorCode = 0; //contains the last error //pointer an size for the SID IntPtr sid = IntPtr.Zero; int sidSize = 0; //StringBuilder and size for the domain name StringBuilder domainName = new StringBuilder(); int nameSize = 0; //account-type variable for lookup int accountType = 0; //get required buffer size LookupAccountName(String.Empty, accountName, sid, ref sidSize, domainName, ref nameSize, ref accountType); //allocate buffers domainName = new StringBuilder(nameSize); sid = Marshal.AllocHGlobal(sidSize); //lookup the SID for the account bool result = LookupAccountName(String.Empty, accountName, sid, ref sidSize, domainName, ref nameSize, ref accountType); //say what you're doing //Console.WriteLine("LookupAccountName result = " + result); //Console.WriteLine("IsValidSid: " + IsValidSid(sid)); //Console.WriteLine("LookupAccountName domainName: " + domainName.ToString()); if (!result) { winErrorCode = GetLastError(); Console.WriteLine("LookupAccountName failed: " + winErrorCode); } else { //initialize an empty unicode-string LSA_UNICODE_STRING system = new LSA_UNICODE_STRING(); if (systemName != null) { system.Buffer = Marshal.StringToHGlobalUni(systemName); system.Length = (UInt16)(privilegeName.Length * UnicodeEncoding.CharSize); system.MaximumLength = (UInt16)((privilegeName.Length + 1) * UnicodeEncoding.CharSize); } //combine all policies int access = (int)( LSA_AccessPolicy.POLICY_AUDIT_LOG_ADMIN | LSA_AccessPolicy.POLICY_CREATE_ACCOUNT | LSA_AccessPolicy.POLICY_CREATE_PRIVILEGE | LSA_AccessPolicy.POLICY_CREATE_SECRET | LSA_AccessPolicy.POLICY_GET_PRIVATE_INFORMATION | LSA_AccessPolicy.POLICY_LOOKUP_NAMES | LSA_AccessPolicy.POLICY_NOTIFICATION | LSA_AccessPolicy.POLICY_SERVER_ADMIN | LSA_AccessPolicy.POLICY_SET_AUDIT_REQUIREMENTS | LSA_AccessPolicy.POLICY_SET_DEFAULT_QUOTA_LIMITS | LSA_AccessPolicy.POLICY_TRUST_ADMIN | LSA_AccessPolicy.POLICY_VIEW_AUDIT_INFORMATION | LSA_AccessPolicy.POLICY_VIEW_LOCAL_INFORMATION ); //initialize a pointer for the policy handle IntPtr policyHandle = IntPtr.Zero; //these attributes are not used, but LsaOpenPolicy wants them to exists LSA_OBJECT_ATTRIBUTES ObjectAttributes = new LSA_OBJECT_ATTRIBUTES(); ObjectAttributes.Length = 0; ObjectAttributes.RootDirectory = IntPtr.Zero; ObjectAttributes.Attributes = 0; ObjectAttributes.SecurityDescriptor = IntPtr.Zero; ObjectAttributes.SecurityQualityOfService = IntPtr.Zero; //get a policy handle uint resultPolicy = LsaOpenPolicy(ref system, ref ObjectAttributes, access, out policyHandle); winErrorCode = LsaNtStatusToWinError(resultPolicy); if (winErrorCode != 0) { Console.WriteLine("OpenPolicy failed: " + winErrorCode); } else { //Now that we have the SID an the policy, //we can add rights to the account. //initialize an unicode-string for the privilege name LSA_UNICODE_STRING[] userRights = new LSA_UNICODE_STRING[1]; userRights[0] = new LSA_UNICODE_STRING(); userRights[0].Buffer = Marshal.StringToHGlobalUni(privilegeName); userRights[0].Length = (UInt16)(privilegeName.Length * UnicodeEncoding.CharSize); userRights[0].MaximumLength = (UInt16)((privilegeName.Length + 1) * UnicodeEncoding.CharSize); //add the right to the account long res = LsaAddAccountRights(policyHandle, sid, userRights, 1); winErrorCode = LsaNtStatusToWinError(res); if (winErrorCode != 0) { Console.WriteLine("LsaAddAccountRights failed: " + winErrorCode); } LsaClose(policyHandle); } FreeSid(sid); } return winErrorCode; } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System.Collections.Generic; using System.Diagnostics; using System.Threading; using System.Threading.Tasks; using Xunit; namespace System.Linq.Parallel.Tests { // a key part of cancellation testing is 'promptness'. Those tests appear in pfxperfunittests. // the tests here are only regarding basic API correctness and sanity checking. public static class WithCancellationTests { [Fact] public static void PreCanceledToken_ForAll() { OperationCanceledException caughtException = null; var cs = new CancellationTokenSource(); cs.Cancel(); IEnumerable<int> throwOnFirstEnumerable = Enumerables<int>.ThrowOnEnumeration(); try { throwOnFirstEnumerable .AsParallel() .WithCancellation(cs.Token) .ForAll((x) => { Debug.WriteLine(x.ToString()); }); } catch (OperationCanceledException ex) { caughtException = ex; } Assert.NotNull(caughtException); Assert.Equal(cs.Token, caughtException.CancellationToken); } [Fact] public static void PreCanceledToken_SimpleEnumerator() { OperationCanceledException caughtException = null; var cs = new CancellationTokenSource(); cs.Cancel(); IEnumerable<int> throwOnFirstEnumerable = Enumerables<int>.ThrowOnEnumeration(); try { var query = throwOnFirstEnumerable .AsParallel() .WithCancellation(cs.Token); foreach (var item in query) { } } catch (OperationCanceledException ex) { caughtException = ex; } Assert.NotNull(caughtException); Assert.Equal(cs.Token, caughtException.CancellationToken); } [Fact] public static void MultiplesWithCancellationIsIllegal() { InvalidOperationException caughtException = null; try { CancellationTokenSource cs = new CancellationTokenSource(); CancellationToken ct = cs.Token; var query = Enumerable.Range(1, 10).AsParallel().WithDegreeOfParallelism(2).WithDegreeOfParallelism(2); query.ToArray(); } catch (InvalidOperationException ex) { caughtException = ex; //Program.TestHarness.Log("IOE caught. message = " + ex.Message); } Assert.NotNull(caughtException); } [Fact] public static void CTT_Sorting_ToArray() { int size = 10000; CancellationTokenSource tokenSource = new CancellationTokenSource(); OperationCanceledException caughtException = null; try { Enumerable.Range(1, size).AsParallel() .WithCancellation(tokenSource.Token) .Select(i => { tokenSource.Cancel(); return i; }) .ToArray(); } catch (OperationCanceledException ex) { caughtException = ex; } Assert.NotNull(caughtException); Assert.Equal(tokenSource.Token, caughtException.CancellationToken); } /// <summary> /// /// [Regression Test] /// This issue occurred because the QuerySettings structure was not being deep-cloned during /// query-opening. As a result, the concurrent inner-enumerators (for the RHS operators) /// that occur in SelectMany were sharing CancellationState that they should not have. /// The result was that enumerators could falsely believe they had been canceled when /// another inner-enumerator was disposed. /// /// Note: the failure was intermittent. this test would fail about 1 in 2 times on mikelid1 (4-core). /// </summary> /// <returns></returns> [Fact] public static void CloningQuerySettingsForSelectMany() { var plinq_src = ParallelEnumerable.Range(0, 1999).AsParallel(); Exception caughtException = null; try { var inner = ParallelEnumerable.Range(0, 20).AsParallel().Select(_item => _item); var output = plinq_src .SelectMany( _x => inner, (_x, _y) => _x ) .ToArray(); } catch (Exception ex) { caughtException = ex; } Assert.Null(caughtException); } // [Regression Test] // Use of the async channel can block both the consumer and producer threads.. before the cancellation work // these had no means of being awoken. // // However, only the producers need to wake up on cancellation as the consumer // will wake up once all the producers have gone away (via AsynchronousOneToOneChannel.SetDone()) // // To specifically verify this test, we want to know that the Async channels were blocked in TryEnqueChunk before Dispose() is called // -> this was verified manually, but is not simple to automate [Fact] [OuterLoop] // explicit timeouts / delays public static void ChannelCancellation_ProducerBlocked() { Debug.WriteLine("PlinqCancellationTests.ChannelCancellation_ProducerBlocked()"); Debug.WriteLine(" Query running (should be few seconds max).."); var query1 = Enumerable.Range(0, 100000000) //provide 100million elements to ensure all the cores get >64K ints. Good up to 1600cores .AsParallel() .Select(x => x); var enumerator1 = query1.GetEnumerator(); enumerator1.MoveNext(); Task.Delay(1000).Wait(); enumerator1.MoveNext(); enumerator1.Dispose(); //can potentially hang Debug.WriteLine(" Done (success)."); } /// <summary> /// [Regression Test] /// This issue occurred because aggregations like Sum or Average would incorrectly /// wrap OperationCanceledException with AggregateException. /// </summary> [Fact] public static void AggregatesShouldntWrapOCE() { var cs = new CancellationTokenSource(); cs.Cancel(); // Expect OperationCanceledException rather than AggregateException or something else try { Enumerable.Range(0, 1000).AsParallel().WithCancellation(cs.Token).Sum(x => x); } catch (OperationCanceledException) { return; } catch (Exception e) { Assert.True(false, string.Format("PlinqCancellationTests.AggregatesShouldntWrapOCE: > Failed: got {0}, expected OperationCanceledException", e.GetType().ToString())); } Assert.True(false, string.Format("PlinqCancellationTests.AggregatesShouldntWrapOCE: > Failed: no exception occurred, expected OperationCanceledException")); } // Plinq suppresses OCE(externalCT) occurring in worker threads and then throws a single OCE(ct) // if a manual OCE(ct) is thrown but ct is not canceled, Plinq should not suppress it, else things // get confusing... // ONLY an OCE(ct) for ct.IsCancellationRequested=true is co-operative cancellation [Fact] public static void OnlySuppressOCEifCTCanceled() { AggregateException caughtException = null; CancellationTokenSource cts = new CancellationTokenSource(); CancellationToken externalToken = cts.Token; try { Enumerable.Range(1, 10).AsParallel() .WithCancellation(externalToken) .Select( x => { if (x % 2 == 0) throw new OperationCanceledException(externalToken); return x; } ) .ToArray(); } catch (AggregateException ae) { caughtException = ae; } Assert.NotNull(caughtException); } // a specific repro where inner queries would see an ODE on the merged cancellation token source // when the implementation involved disposing and recreating the token on each worker thread [Fact] public static void Cancellation_ODEIssue() { AggregateException caughtException = null; try { Enumerable.Range(0, 1999).ToArray() .AsParallel().AsUnordered() .WithExecutionMode(ParallelExecutionMode.ForceParallelism) .Zip<int, int, int>( Enumerable.Range(1000, 20).Select<int, int>(_item => (int)_item).AsParallel().AsUnordered(), (first, second) => { throw new OperationCanceledException(); }) .ForAll(x => { }); } catch (AggregateException ae) { caughtException = ae; } //the failure was an ODE coming out due to an ephemeral disposed merged cancellation token source. Assert.True(caughtException != null, "Cancellation_ODEIssue: We expect an aggregate exception with OCEs in it."); } [Fact] [OuterLoop] // explicit timeouts / delays public static void CancellationSequentialWhere() { IEnumerable<int> src = Enumerable.Repeat(0, int.MaxValue); CancellationTokenSource tokenSrc = new CancellationTokenSource(); var q = src.AsParallel().WithCancellation(tokenSrc.Token).Where(x => false).TakeWhile(x => true); Task task = Task.Run( () => { try { foreach (var x in q) { } Assert.True(false, string.Format("PlinqCancellationTests.CancellationSequentialWhere: > Failed: OperationCanceledException was not caught.")); } catch (OperationCanceledException oce) { if (oce.CancellationToken != tokenSrc.Token) { Assert.True(false, string.Format("PlinqCancellationTests.CancellationSequentialWhere: > Failed: Wrong cancellation token.")); } } } ); // We wait for 100 ms. If we canceled the token source immediately, the cancellation // would occur at the query opening time. The goal of this test is to test cancellation // at query execution time. Task.Delay(100).Wait(); //Thread.Sleep(100); tokenSrc.Cancel(); task.Wait(); } [Fact] [OuterLoop] // explicit timeouts / delays public static void CancellationSequentialElementAt() { IEnumerable<int> src = Enumerable.Repeat(0, int.MaxValue); CancellationTokenSource tokenSrc = new CancellationTokenSource(); Task task = Task.Run( () => { try { int res = src.AsParallel() .WithCancellation(tokenSrc.Token) .Where(x => true) .TakeWhile(x => true) .ElementAt(int.MaxValue - 1); Assert.True(false, string.Format("PlinqCancellationTests.CancellationSequentialElementAt: > Failed: OperationCanceledException was not caught.")); } catch (OperationCanceledException oce) { Assert.Equal(oce.CancellationToken, tokenSrc.Token); } } ); // We wait for 100 ms. If we canceled the token source immediately, the cancellation // would occur at the query opening time. The goal of this test is to test cancellation // at query execution time. Task.Delay(100).Wait(); tokenSrc.Cancel(); task.Wait(); } [Fact] [OuterLoop] // explicit timeouts / delays public static void CancellationSequentialDistinct() { IEnumerable<int> src = Enumerable.Repeat(0, int.MaxValue); CancellationTokenSource tokenSrc = new CancellationTokenSource(); Task task = Task.Run( () => { try { var q = src.AsParallel() .WithCancellation(tokenSrc.Token) .Distinct() .TakeWhile(x => true); foreach (var x in q) { } Assert.True(false, string.Format("PlinqCancellationTests.CancellationSequentialDistinct: > Failed: OperationCanceledException was not caught.")); } catch (OperationCanceledException oce) { Assert.Equal(oce.CancellationToken, tokenSrc.Token); } } ); // We wait for 100 ms. If we canceled the token source immediately, the cancellation // would occur at the query opening time. The goal of this test is to test cancellation // at query execution time. Task.Delay(100).Wait(); tokenSrc.Cancel(); task.Wait(); } // Regression test for an issue causing ODE if a queryEnumerator is disposed before moveNext is called. [Fact] public static void ImmediateDispose() { var queryEnumerator = Enumerable.Range(1, 10).AsParallel().Select(x => x).GetEnumerator(); queryEnumerator.Dispose(); } // REPRO 1 -- cancellation [Fact] public static void SetOperationsThrowAggregateOnCancelOrDispose_1() { CancellationTokenSource cs = new CancellationTokenSource(); var plinq_src = Enumerable.Range(0, 5000000).Select(x => { cs.Cancel(); return x; }); try { var plinq = plinq_src .AsParallel().WithCancellation(cs.Token) .WithDegreeOfParallelism(1) .Union(Enumerable.Range(0, 10).AsParallel()); var walker = plinq.GetEnumerator(); while (walker.MoveNext()) { var item = walker.Current; } Assert.True(false, string.Format("PlinqCancellationTests.SetOperationsThrowAggregateOnCancelOrDispose_1: OperationCanceledException was expected, but no exception occurred.")); } catch (OperationCanceledException) { //This is expected. } catch (Exception e) { Assert.True(false, string.Format("PlinqCancellationTests.SetOperationsThrowAggregateOnCancelOrDispose_1: OperationCanceledException was expected, but a different exception occurred. " + e.ToString())); } } // throwing a fake OCE(ct) when the ct isn't canceled should produce an AggregateException. [Fact] public static void SetOperationsThrowAggregateOnCancelOrDispose_2() { try { CancellationTokenSource cs = new CancellationTokenSource(); var plinq = Enumerable.Range(0, 50) .AsParallel().WithCancellation(cs.Token) .WithDegreeOfParallelism(1) .Union(Enumerable.Range(0, 10).AsParallel().Select<int, int>(x => { throw new OperationCanceledException(cs.Token); })); var walker = plinq.GetEnumerator(); while (walker.MoveNext()) { } Assert.True(false, string.Format("PlinqCancellationTests.SetOperationsThrowAggregateOnCancelOrDispose_2: failed. AggregateException was expected, but no exception occurred.")); } catch (AggregateException) { // expected } catch (Exception e) { Assert.True(false, string.Format("PlinqCancellationTests.SetOperationsThrowAggregateOnCancelOrDispose_2. failed. AggregateException was expected, but some other exception occurred." + e.ToString())); } } // Changes made to hash-partitioning (April'09) lost the cancellation checks during the // main repartitioning loop (matrix building). [Fact] public static void HashPartitioningCancellation() { OperationCanceledException caughtException = null; CancellationTokenSource cs = new CancellationTokenSource(); //Without ordering var queryUnordered = Enumerable.Range(0, int.MaxValue) .Select(x => { if (x == 0) cs.Cancel(); return x; }) .AsParallel() .WithCancellation(cs.Token) .Intersect(Enumerable.Range(0, 1000000).AsParallel()); try { foreach (var item in queryUnordered) { } } catch (OperationCanceledException oce) { caughtException = oce; } Assert.NotNull(caughtException); caughtException = null; //With ordering var queryOrdered = Enumerable.Range(0, int.MaxValue) .Select(x => { if (x == 0) cs.Cancel(); return x; }) .AsParallel().AsOrdered() .WithCancellation(cs.Token) .Intersect(Enumerable.Range(0, 1000000).AsParallel()); try { foreach (var item in queryOrdered) { } } catch (OperationCanceledException oce) { caughtException = oce; } Assert.NotNull(caughtException); } // If a query is cancelled and immediately disposed, the dispose should not throw an OCE. [Fact] public static void CancelThenDispose() { try { CancellationTokenSource cancel = new CancellationTokenSource(); var q = ParallelEnumerable.Range(0, 1000).WithCancellation(cancel.Token).Select(x => x); IEnumerator<int> e = q.GetEnumerator(); e.MoveNext(); cancel.Cancel(); e.Dispose(); } catch (Exception e) { Assert.True(false, string.Format("PlinqCancellationTests.CancelThenDispose: > Failed. Expected no exception, got " + e.GetType())); } } [Fact] public static void DontDoWorkIfTokenAlreadyCanceled() { OperationCanceledException oce = null; CancellationTokenSource cs = new CancellationTokenSource(); var query = Enumerable.Range(0, 100000000) .Select(x => { if (x > 0) // to avoid the "Error:unreachable code detected" throw new ArgumentException("User-delegate exception."); return x; }) .AsParallel() .WithCancellation(cs.Token) .Select(x => x); cs.Cancel(); try { foreach (var item in query) //We expect an OperationCancelledException during the MoveNext { } } catch (OperationCanceledException ex) { oce = ex; } Assert.NotNull(oce); } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.Collections.Generic; using System.IO; using System.Reflection; using System.Text; using System.Xml; using Microsoft.Build.Evaluation; using Microsoft.Build.Construction; using Microsoft.Build.Shared; using InvalidProjectFileException = Microsoft.Build.Exceptions.InvalidProjectFileException; using Xunit; namespace Microsoft.Build.UnitTests.OM.Construction { /// <summary> /// Tests for the ProjectChooseElement class (and for ProjectWhenElement and ProjectOtherwiseElement) /// </summary> public class ProjectChooseElement_Tests { /// <summary> /// Read choose with unexpected attribute /// </summary> [Fact] public void ReadInvalidAttribute() { Assert.Throws<InvalidProjectFileException>(() => { string content = @" <Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003' > <Choose X='Y'/> </Project> "; ProjectRootElement.Create(XmlReader.Create(new StringReader(content))); } ); } /// <summary> /// Read choose with unexpected Condition attribute. /// Condition is not currently allowed on Choose. /// </summary> [Fact] public void ReadInvalidConditionAttribute() { Assert.Throws<InvalidProjectFileException>(() => { string content = @" <Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003' > <Choose Condition='true'/> </Project> "; ProjectRootElement.Create(XmlReader.Create(new StringReader(content))); } ); } /// <summary> /// Read choose with unexpected child /// </summary> [Fact] public void ReadInvalidChild() { Assert.Throws<InvalidProjectFileException>(() => { string content = @" <Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003' > <Choose> <X/> </Choose> </Project> "; ProjectRootElement.Create(XmlReader.Create(new StringReader(content))); } ); } /// <summary> /// Read choose with a When containing no Condition attribute /// </summary> [Fact] public void ReadInvalidWhen() { Assert.Throws<InvalidProjectFileException>(() => { string content = @" <Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003' > <Choose> <When> <PropertyGroup><x/></PropertyGroup> </When> <Otherwise> <PropertyGroup><y/></PropertyGroup> </Otherwise> </Choose> </Project> "; ProjectRootElement.Create(XmlReader.Create(new StringReader(content))); } ); } /// <summary> /// Read choose with only an otherwise /// </summary> [Fact] public void ReadInvalidOnlyOtherwise() { Assert.Throws<InvalidProjectFileException>(() => { string content = @" <Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003' > <Choose> <Otherwise/> </Choose> </Project> "; ProjectRootElement.Create(XmlReader.Create(new StringReader(content))); } ); } /// <summary> /// Read choose with two otherwises /// </summary> [Fact] public void ReadInvalidTwoOtherwise() { Assert.Throws<InvalidProjectFileException>(() => { string content = @" <Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003' > <Choose> <Otherwise/> <Otherwise/> </Choose> </Project> "; ProjectRootElement.Create(XmlReader.Create(new StringReader(content))); } ); } /// <summary> /// Read choose with otherwise before when /// </summary> [Fact] public void ReadInvalidOtherwiseBeforeWhen() { Assert.Throws<InvalidProjectFileException>(() => { string content = @" <Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003' > <Choose> <Otherwise/> <When Condition='c'/> </Choose> </Project> "; ProjectRootElement.Create(XmlReader.Create(new StringReader(content))); } ); } /// <summary> /// Read empty choose /// </summary> /// <remarks> /// One might think this should work but 2.0 required at least one When. /// </remarks> [Fact] public void ReadInvalidEmptyChoose() { Assert.Throws<InvalidProjectFileException>(() => { string content = @" <Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003' > <Choose/> </Project> "; ProjectRootElement project = ProjectRootElement.Create(XmlReader.Create(new StringReader(content))); ProjectChooseElement choose = (ProjectChooseElement)Helpers.GetFirst(project.Children); Assert.Null(Helpers.GetFirst(choose.Children)); } ); } /// <summary> /// Read choose with only a when /// </summary> [Fact] public void ReadChooseOnlyWhen() { string content = @" <Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003' > <Choose> <When Condition='c'/> </Choose> </Project> "; ProjectRootElement project = ProjectRootElement.Create(XmlReader.Create(new StringReader(content))); ProjectChooseElement choose = (ProjectChooseElement)Helpers.GetFirst(project.Children); Assert.Equal(1, Helpers.Count(choose.WhenElements)); Assert.Null(choose.OtherwiseElement); } /// <summary> /// Read basic choose /// </summary> [Fact] public void ReadChooseBothWhenOtherwise() { string content = @" <Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003' > <Choose> <When Condition='c1'/> <When Condition='c2'/> <Otherwise/> </Choose> </Project> "; ProjectRootElement project = ProjectRootElement.Create(XmlReader.Create(new StringReader(content))); ProjectChooseElement choose = (ProjectChooseElement)Helpers.GetFirst(project.Children); List<ProjectWhenElement> whens = Helpers.MakeList(choose.WhenElements); Assert.Equal(2, whens.Count); Assert.Equal("c1", whens[0].Condition); Assert.Equal("c2", whens[1].Condition); Assert.NotNull(choose.OtherwiseElement); } /// <summary> /// Test stack overflow is prevented. /// </summary> [Fact] public void ExcessivelyNestedChoose() { Assert.Throws<InvalidProjectFileException>(() => { StringBuilder builder1 = new StringBuilder(); StringBuilder builder2 = new StringBuilder(); for (int i = 0; i < 52; i++) { builder1.Append("<Choose><When Condition='true'>"); builder2.Append("</When></Choose>"); } string content = "<Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003'>"; content += builder1.ToString(); content += builder2.ToString(); content += @"</Project>"; ProjectRootElement.Create(XmlReader.Create(new StringReader(content))); } ); } /// <summary> /// Setting a When's condition should dirty the project /// </summary> [Fact] public void SettingWhenConditionDirties() { string content = @" <Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003' > <Choose> <When Condition='true'> <PropertyGroup> <p>v1</p> </PropertyGroup> </When> </Choose> </Project> "; Project project = new Project(XmlReader.Create(new StringReader(content))); ProjectChooseElement choose = Helpers.GetFirst(project.Xml.ChooseElements); ProjectWhenElement when = Helpers.GetFirst(choose.WhenElements); when.Condition = "false"; Assert.Equal("v1", project.GetPropertyValue("p")); project.ReevaluateIfNecessary(); Assert.Equal(String.Empty, project.GetPropertyValue("p")); } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System.Collections.Generic; using System.Diagnostics; using Xunit; namespace System.Collections.ObjectModel.Tests { /// <summary> /// Tests the public properties and constructor in ObservableCollection<T>. /// </summary> public class ReadOnlyObservableCollectionTests { [Fact] public static void Ctor_Tests() { string[] anArray = new string[] { "one", "two", "three", "four", "five" }; ReadOnlyObservableCollection<string> readOnlyCol = new ReadOnlyObservableCollection<string>(new ObservableCollection<string>(anArray)); IReadOnlyList_T_Test<string> helper = new IReadOnlyList_T_Test<string>(readOnlyCol, anArray); helper.InitialItems_Tests(); IList<string> readOnlyColAsIList = readOnlyCol; Assert.True(readOnlyColAsIList.IsReadOnly, "ReadOnlyObservableCollection should be readOnly."); } [Fact] public static void Ctor_Tests_Negative() { ReadOnlyObservableCollection<string> collection; Assert.Throws<ArgumentNullException>(() => collection = new ReadOnlyObservableCollection<string>(null)); } [Fact] public static void GetItemTests() { string[] anArray = new string[] { "one", "two", "three", "four", "five" }; ReadOnlyObservableCollection<string> readOnlyCol = new ReadOnlyObservableCollection<string>(new ObservableCollection<string>(anArray)); IReadOnlyList_T_Test<string> helper = new IReadOnlyList_T_Test<string>(readOnlyCol, anArray); helper.Item_get_Tests(); } [Fact] public static void GetItemTests_Negative() { string[] anArray = new string[] { "one", "two", "three", "four", "five" }; ReadOnlyObservableCollection<string> readOnlyCol = new ReadOnlyObservableCollection<string>(new ObservableCollection<string>(anArray)); IReadOnlyList_T_Test<string> helper = new IReadOnlyList_T_Test<string>(readOnlyCol, anArray); helper.Item_get_Tests_Negative(); } /// <summary> /// Tests that contains returns true when the item is in the collection /// and false otherwise. /// </summary> [Fact] public static void ContainsTests() { string[] anArray = new string[] { "one", "two", "three", "four", "five" }; ReadOnlyObservableCollection<string> readOnlyCol = new ReadOnlyObservableCollection<string>(new ObservableCollection<string>(anArray)); for (int i = 0; i < anArray.Length; i++) { string item = anArray[i]; Assert.True(readOnlyCol.Contains(item), "ReadOnlyCol did not contain item: " + anArray[i] + " at index: " + i); } Assert.False(readOnlyCol.Contains("randomItem"), "ReadOnlyCol should not have contained non-existent item"); Assert.False(readOnlyCol.Contains(null), "ReadOnlyCol should not have contained null"); } /// <summary> /// Tests that the collection can be copied into a destination array. /// </summary> [Fact] public static void CopyToTest() { string[] anArray = new string[] { "one", "two", "three", "four" }; ReadOnlyObservableCollection<string> readOnlyCol = new ReadOnlyObservableCollection<string>(new ObservableCollection<string>(anArray)); string[] aCopy = new string[anArray.Length]; readOnlyCol.CopyTo(aCopy, 0); for (int i = 0; i < anArray.Length; ++i) Assert.Equal(anArray[i], aCopy[i]); // copy observable collection starting in middle, where array is larger than source. aCopy = new string[anArray.Length + 2]; int offsetIndex = 1; readOnlyCol.CopyTo(aCopy, offsetIndex); for (int i = 0; i < aCopy.Length; i++) { string value = aCopy[i]; if (i == 0) Assert.True(null == value, "Should not have a value since we did not start copying there."); else if (i == (aCopy.Length - 1)) Assert.True(null == value, "Should not have a value since the collection is shorter than the copy array.."); else { int indexInCollection = i - offsetIndex; Assert.Equal(readOnlyCol[indexInCollection], aCopy[i]); } } } /// <summary> /// Tests that: /// ArgumentOutOfRangeException is thrown when the Index is >= collection.Count /// or Index < 0. /// ArgumentException when the destination array does not have enough space to /// contain the source Collection. /// ArgumentNullException when the destination array is null. /// </summary> [Fact] public static void CopyToTest_Negative() { string[] anArray = new string[] { "one", "two", "three", "four" }; ReadOnlyObservableCollection<string> readOnlyCol = new ReadOnlyObservableCollection<string>(new ObservableCollection<string>(anArray)); int[] iArrInvalidValues = new Int32[] { -1, -2, -100, -1000, -10000, -100000, -1000000, -10000000, -100000000, -1000000000, Int32.MinValue }; foreach (var index in iArrInvalidValues) { string[] aCopy = new string[anArray.Length]; Assert.Throws<ArgumentOutOfRangeException>(() => readOnlyCol.CopyTo(aCopy, index)); } int[] iArrLargeValues = new Int32[] { anArray.Length, Int32.MaxValue, Int32.MaxValue / 2, Int32.MaxValue / 10 }; foreach (var index in iArrLargeValues) { string[] aCopy = new string[anArray.Length]; Assert.Throws<ArgumentException>(() => readOnlyCol.CopyTo(aCopy, index)); } Assert.Throws<ArgumentNullException>(() => readOnlyCol.CopyTo(null, 1)); string[] copy = new string[anArray.Length - 1]; Assert.Throws<ArgumentException>(() => readOnlyCol.CopyTo(copy, 0)); copy = new string[0]; Assert.Throws<ArgumentException>(() => readOnlyCol.CopyTo(copy, 0)); } /// <summary> /// Tests that the index of an item can be retrieved when the item is /// in the collection and -1 otherwise. /// </summary> [Fact] public static void IndexOfTest() { string[] anArray = new string[] { "one", "two", "three", "four" }; ReadOnlyObservableCollection<string> readOnlyCollection = new ReadOnlyObservableCollection<string>(new ObservableCollection<string>(anArray)); for (int i = 0; i < anArray.Length; ++i) Assert.Equal(i, readOnlyCollection.IndexOf(anArray[i])); Assert.Equal(-1, readOnlyCollection.IndexOf("seven")); Assert.Equal(-1, readOnlyCollection.IndexOf(null)); // testing that the first occurance is the index returned. ObservableCollection<int> intCol = new ObservableCollection<int>(); for (int i = 0; i < 4; ++i) intCol.Add(i % 2); ReadOnlyObservableCollection<int> intReadOnlyCol = new ReadOnlyObservableCollection<int>(intCol); Assert.Equal(0, intReadOnlyCol.IndexOf(0)); Assert.Equal(1, intReadOnlyCol.IndexOf(1)); IList colAsIList = (IList)intReadOnlyCol; var index = colAsIList.IndexOf("stringObj"); Assert.Equal(-1, index); } /// <summary> /// Tests that a ReadOnlyDictionary cannot be modified. That is, that /// Add, Remove, Clear does not work. /// </summary> [Fact] public static void CannotModifyDictionaryTests_Negative() { string[] anArray = new string[] { "one", "two", "three", "four", "five" }; ReadOnlyObservableCollection<string> readOnlyCol = new ReadOnlyObservableCollection<string>(new ObservableCollection<string>(anArray)); IReadOnlyList_T_Test<string> helper = new IReadOnlyList_T_Test<string>(); IList<string> readOnlyColAsIList = readOnlyCol; Assert.Throws<NotSupportedException>(() => readOnlyColAsIList.Add("seven")); Assert.Throws<NotSupportedException>(() => readOnlyColAsIList.Insert(0, "nine")); Assert.Throws<NotSupportedException>(() => readOnlyColAsIList.Remove("one")); Assert.Throws<NotSupportedException>(() => readOnlyColAsIList.RemoveAt(0)); Assert.Throws<NotSupportedException>(() => readOnlyColAsIList.Clear()); helper.VerifyReadOnlyCollection(readOnlyCol, anArray); } [Fact] public static void DebuggerAttribute_Tests() { DebuggerAttributes.ValidateDebuggerDisplayReferences(new ReadOnlyObservableCollection<int>(new ObservableCollection<int>())); DebuggerAttributes.ValidateDebuggerTypeProxyProperties(new ReadOnlyObservableCollection<int>(new ObservableCollection<int>())); } } internal class IReadOnlyList_T_Test<T> { private readonly IReadOnlyList<T> _collection; private readonly T[] _expectedItems; /// <summary> /// Initializes a new instance of the IReadOnlyList_T_Test. /// </summary> /// <param name="collection">The collection to run the tests on.</param> /// <param name="expectedItems">The items expected to be in the collection.</param> public IReadOnlyList_T_Test(IReadOnlyList<T> collection, T[] expectedItems) { _collection = collection; _expectedItems = expectedItems; } public IReadOnlyList_T_Test() { } /// <summary> /// This verifies that the collection contains the expected items. /// </summary> public void InitialItems_Tests() { // Verify Count returns the expected value Assert.Equal(_expectedItems.Length, _collection.Count); // Verify the initial items in the collection VerifyReadOnlyCollection(_collection, _expectedItems); } /// <summary> /// Runs all of the valid tests on get Item. /// </summary> public void Item_get_Tests() { // Verify get_Item with valid item on Collection Verify_get(_collection, _expectedItems); } /// <summary> /// Runs all of the argument checking(invalid) tests on get Item. /// </summary> public void Item_get_Tests_Negative() { // Verify get_Item with index=Int32.MinValue Assert.Throws<ArgumentOutOfRangeException>(() => { T item = _collection[Int32.MinValue]; }); // Verify that the collection was not mutated VerifyReadOnlyCollection(_collection, _expectedItems); // Verify get_Item with index=-1 Assert.Throws<ArgumentOutOfRangeException>(() => { T item = _collection[-1]; }); // Verify that the collection was not mutated VerifyReadOnlyCollection(_collection, _expectedItems); if (_expectedItems.Length == 0) { // Verify get_Item with index=0 on Empty collection Assert.Throws<ArgumentOutOfRangeException>(() => { T item = _collection[0]; }); // Verify that the collection was not mutated VerifyReadOnlyCollection(_collection, _expectedItems); } else { // Verify get_Item with index=Count on Empty collection Assert.Throws<ArgumentOutOfRangeException>(() => { T item = _collection[_expectedItems.Length]; }); // Verify that the collection was not mutated VerifyReadOnlyCollection(_collection, _expectedItems); } } #region Helper Methods /// <summary> /// Verifies that the items in the collection match the expected items. /// </summary> internal void VerifyReadOnlyCollection(IReadOnlyList<T> collection, T[] items) { Verify_get(collection, items); VerifyGenericEnumerator(collection, items); VerifyEnumerator(collection, items); } /// <summary> /// Verifies that you can get all items that should be in the collection. /// </summary> private void Verify_get(IReadOnlyList<T> collection, T[] items) { Assert.Equal(items.Length, collection.Count); for (int i = 0; i < items.Length; i++) { int itemsIndex = i; Assert.Equal(items[itemsIndex], collection[i]); } } /// <summary> /// Verifies that the generic enumerator retrieves the correct items. /// </summary> private void VerifyGenericEnumerator(IReadOnlyList<T> collection, T[] expectedItems) { IEnumerator<T> enumerator = collection.GetEnumerator(); int iterations = 0; int expectedCount = expectedItems.Length; // There is a sequential order to the collection, so we're testing for that. while ((iterations < expectedCount) && enumerator.MoveNext()) { T currentItem = enumerator.Current; T tempItem; // Verify we have not gotten more items then we expected Assert.True(iterations < expectedCount, "Err_9844awpa More items have been returned fromt the enumerator(" + iterations + " items) than are in the expectedElements(" + expectedCount + " items)"); // Verify Current returned the correct value Assert.Equal(currentItem, expectedItems[iterations]); // Verify Current always returns the same value every time it is called for (int i = 0; i < 3; i++) { tempItem = enumerator.Current; Assert.Equal(currentItem, tempItem); } iterations++; } Assert.Equal(expectedCount, iterations); for (int i = 0; i < 3; i++) { Assert.False(enumerator.MoveNext(), "Err_2929ahiea Expected MoveNext to return false after" + iterations + " iterations"); } enumerator.Dispose(); } /// <summary> /// Verifies that the non-generic enumerator retrieves the correct items. /// </summary> private void VerifyEnumerator(IReadOnlyList<T> collection, T[] expectedItems) { IEnumerator enumerator = collection.GetEnumerator(); int iterations = 0; int expectedCount = expectedItems.Length; // There is no sequential order to the collection, so we're testing that all the items // in the readonlydictionary exist in the array. bool[] itemsVisited = new bool[expectedCount]; bool itemFound; while ((iterations < expectedCount) && enumerator.MoveNext()) { object currentItem = enumerator.Current; object tempItem; // Verify we have not gotten more items then we expected Assert.True(iterations < expectedCount, "Err_9844awpa More items have been returned fromt the enumerator(" + iterations + " items) then are in the expectedElements(" + expectedCount + " items)"); // Verify Current returned the correct value itemFound = false; for (int i = 0; i < itemsVisited.Length; ++i) { if (!itemsVisited[i] && expectedItems[i].Equals(currentItem)) { itemsVisited[i] = true; itemFound = true; break; } } Assert.True(itemFound, "Err_1432pauy Current returned unexpected value=" + currentItem); // Verify Current always returns the same value every time it is called for (int i = 0; i < 3; i++) { tempItem = enumerator.Current; Assert.Equal(currentItem, tempItem); } iterations++; } for (int i = 0; i < expectedCount; ++i) { Assert.True(itemsVisited[i], "Err_052848ahiedoi Expected Current to return true for item: " + expectedItems[i] + "index: " + i); } Assert.Equal(expectedCount, iterations); for (int i = 0; i < 3; i++) { Assert.False(enumerator.MoveNext(), "Err_2929ahiea Expected MoveNext to return false after" + iterations + " iterations"); } } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections.Generic; using Xunit; namespace System.CodeDom.Tests { public abstract class CodeCollectionTestBase<TCollection, TItem> where TCollection: class where TItem : class, new() { public abstract TCollection Ctor(); public abstract TCollection CtorArray(TItem[] array); public abstract TCollection CtorCollection(TCollection collection); public abstract int Count(TCollection collection); public abstract TItem GetItem(TCollection collection, int index); public abstract void SetItem(TCollection collection, int index, TItem value); public abstract void AddRange(TCollection collection, TItem[] array); public abstract void AddRange(TCollection collection, TCollection value); public abstract object Add(TCollection collection, TItem seed); public abstract void Insert(TCollection collection, int index, TItem value); public abstract void Remove(TCollection collection, TItem value); public abstract int IndexOf(TCollection collection, TItem value); public abstract bool Contains(TCollection collection, TItem value); public abstract void CopyTo(TCollection collection, TItem[] array, int index); [Fact] public void Ctor_Empty() { var collection = Ctor(); Assert.Equal(0, Count(collection)); } public static IEnumerable<object[]> AddRange_TestData() { yield return new object[] { new TItem[0] }; yield return new object[] { new TItem[] { new TItem() } }; yield return new object[] { new TItem[] { new TItem(), new TItem() } }; } [Theory] [MemberData(nameof(AddRange_TestData))] public void Ctor_Array_Works(TItem[] value) { var collection = CtorArray(value); VerifyCollection(collection, value); } [Theory] [MemberData(nameof(AddRange_TestData))] public void Ctor_CodeStatementCollection_Works(TItem[] value) { var collection = CtorCollection(CtorArray(value)); VerifyCollection(collection, value); } [Theory] [MemberData(nameof(AddRange_TestData))] public void AddRange_CodeStatementArray_Works(TItem[] value) { var collection = Ctor(); AddRange(collection, value); VerifyCollection(collection, value); } [Theory] [MemberData(nameof(AddRange_TestData))] public void AddRange_CodeStatementCollection_Works(TItem[] value) { var collection = Ctor(); AddRange(collection, CtorCollection(CtorArray(value))); VerifyCollection(collection, value); } [Fact] public void AddRange_Null_ThrowsArgumentNullException() { AssertExtensions.Throws<ArgumentNullException>("value", () => CtorArray(null)); AssertExtensions.Throws<ArgumentNullException>("value", () => CtorCollection(null)); var collection = Ctor(); AssertExtensions.Throws<ArgumentNullException>("value", () => AddRange(collection, (TItem[])null)); AssertExtensions.Throws<ArgumentNullException>("value", () => AddRange(collection, (TCollection)null)); } [Fact] public void AddRange_NullObjectInValue_ThrowsArgumentNullException() { AssertExtensions.Throws<ArgumentNullException>("value", () => CtorArray(new TItem[] { null })); var collection = Ctor(); AssertExtensions.Throws<ArgumentNullException>("value", () => AddRange(collection, new TItem[] { null })); } [Fact] public void Add_CodeStatement_Insert_Remove() { var collection = Ctor(); var value1 = new TItem(); Assert.Equal(0, Add(collection, value1)); Assert.Equal(1, Count(collection)); Assert.Equal(value1, GetItem(collection, 0)); var value2 = new TItem(); Insert(collection, 0, value2); Assert.Equal(2, Count(collection)); Assert.Same(value2, GetItem(collection, 0)); Remove(collection, value1); Assert.Equal(1, Count(collection)); Remove(collection, value2); Assert.Equal(0, Count(collection)); } [Fact] public void Add_Null_ThrowsArgumentNullException() { var collection = Ctor(); AssertExtensions.Throws<ArgumentNullException>("value", () => Add(collection, null)); } [Theory] [InlineData(-1)] [InlineData(1)] public void Insert_InvalidIndex_ThrowsArgumentOutOfRangeException(int index) { var collection = Ctor(); AssertExtensions.Throws<ArgumentOutOfRangeException>("index", () => Insert(collection, index, new TItem())); } [Fact] public void Insert_Null_ThrowsArgumentNullException() { var collection = Ctor(); AssertExtensions.Throws<ArgumentNullException>("value", () => Insert(collection, 0, null)); } [Fact] public void Remove_Null_ThrowsArgumentNullException() { var collection = Ctor(); AssertExtensions.Throws<ArgumentNullException>("value", () => Remove(collection, null)); } [Fact] public void Remove_NoSuchObject_ThrowsArgumentException() { var collection = Ctor(); AssertExtensions.Throws<ArgumentException>(null, () => Remove(collection, new TItem())); } [Fact] public void Contains_NoSuchObject_ReturnsMinusOne() { var collection = Ctor(); Assert.False(Contains(collection, null)); Assert.False(Contains(collection, new TItem())); } [Fact] public void IndexOf_NoSuchObject_ReturnsMinusOne() { var collection = Ctor(); Assert.Equal(-1, IndexOf(collection, null)); Assert.Equal(-1, IndexOf(collection, new TItem())); } [Theory] [InlineData(-1)] [InlineData(0)] public void Item_InvalidIndex_ThrowsArgumentOutOfRangeException(int index) { var collection = Ctor(); AssertExtensions.Throws<ArgumentOutOfRangeException>("index", () => GetItem(collection, index)); AssertExtensions.Throws<ArgumentOutOfRangeException>("index", () => SetItem(collection, index, new TItem())); } [Fact] public void ItemSet_Get_ReturnsExpected() { var value1 = new TItem(); var value2 = new TItem(); var collection = Ctor(); Add(collection, value1); SetItem(collection, 0, value2); Assert.Equal(1, Count(collection)); Assert.Same(value2, GetItem(collection, 0)); } private void VerifyCollection(TCollection collection, TItem[] contents) { Assert.Equal(contents.Length, Count(collection)); for (int i = 0; i < contents.Length; i++) { TItem content = GetItem(collection, i); Assert.Equal(i, IndexOf(collection, content)); Assert.True(Contains(collection, content)); Assert.Same(content, GetItem(collection, i)); } const int Index = 1; var copy = new TItem[contents.Length + Index]; CopyTo(collection, copy, Index); Assert.Null(copy[0]); for (int i = Index; i < copy.Length; i++) { Assert.Same(contents[i - Index], copy[i]); } } } }
// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! using gaxgrpc = Google.Api.Gax.Grpc; using lro = Google.LongRunning; using wkt = Google.Protobuf.WellKnownTypes; using grpccore = Grpc.Core; using moq = Moq; using st = System.Threading; using stt = System.Threading.Tasks; using xunit = Xunit; namespace Google.Cloud.Metastore.V1Alpha.Tests { /// <summary>Generated unit tests.</summary> public sealed class GeneratedDataprocMetastoreClientTest { [xunit::FactAttribute] public void GetServiceRequestObject() { moq::Mock<DataprocMetastore.DataprocMetastoreClient> mockGrpcClient = new moq::Mock<DataprocMetastore.DataprocMetastoreClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetServiceRequest request = new GetServiceRequest { ServiceName = ServiceName.FromProjectLocationService("[PROJECT]", "[LOCATION]", "[SERVICE]"), }; Service expectedResponse = new Service { ServiceName = ServiceName.FromProjectLocationService("[PROJECT]", "[LOCATION]", "[SERVICE]"), CreateTime = new wkt::Timestamp(), UpdateTime = new wkt::Timestamp(), Labels = { { "key8a0b6e3c", "value60c16320" }, }, HiveMetastoreConfig = new HiveMetastoreConfig(), NetworkAsNetworkName = NetworkName.FromProjectNetwork("[PROJECT]", "[NETWORK]"), EndpointUri = "endpoint_uri59c03c94", Port = -78310000, State = Service.Types.State.Deleting, StateMessage = "state_message46cf28c0", ArtifactGcsUri = "artifact_gcs_uri4d2b3985", Tier = Service.Types.Tier.Developer, MetadataIntegration = new MetadataIntegration(), MaintenanceWindow = new MaintenanceWindow(), Uid = "uida2d37198", MetadataManagementActivity = new MetadataManagementActivity(), ReleaseChannel = Service.Types.ReleaseChannel.Stable, }; mockGrpcClient.Setup(x => x.GetService(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); DataprocMetastoreClient client = new DataprocMetastoreClientImpl(mockGrpcClient.Object, null); Service response = client.GetService(request); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetServiceRequestObjectAsync() { moq::Mock<DataprocMetastore.DataprocMetastoreClient> mockGrpcClient = new moq::Mock<DataprocMetastore.DataprocMetastoreClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetServiceRequest request = new GetServiceRequest { ServiceName = ServiceName.FromProjectLocationService("[PROJECT]", "[LOCATION]", "[SERVICE]"), }; Service expectedResponse = new Service { ServiceName = ServiceName.FromProjectLocationService("[PROJECT]", "[LOCATION]", "[SERVICE]"), CreateTime = new wkt::Timestamp(), UpdateTime = new wkt::Timestamp(), Labels = { { "key8a0b6e3c", "value60c16320" }, }, HiveMetastoreConfig = new HiveMetastoreConfig(), NetworkAsNetworkName = NetworkName.FromProjectNetwork("[PROJECT]", "[NETWORK]"), EndpointUri = "endpoint_uri59c03c94", Port = -78310000, State = Service.Types.State.Deleting, StateMessage = "state_message46cf28c0", ArtifactGcsUri = "artifact_gcs_uri4d2b3985", Tier = Service.Types.Tier.Developer, MetadataIntegration = new MetadataIntegration(), MaintenanceWindow = new MaintenanceWindow(), Uid = "uida2d37198", MetadataManagementActivity = new MetadataManagementActivity(), ReleaseChannel = Service.Types.ReleaseChannel.Stable, }; mockGrpcClient.Setup(x => x.GetServiceAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Service>(stt::Task.FromResult(expectedResponse), null, null, null, null)); DataprocMetastoreClient client = new DataprocMetastoreClientImpl(mockGrpcClient.Object, null); Service responseCallSettings = await client.GetServiceAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Service responseCancellationToken = await client.GetServiceAsync(request, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetService() { moq::Mock<DataprocMetastore.DataprocMetastoreClient> mockGrpcClient = new moq::Mock<DataprocMetastore.DataprocMetastoreClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetServiceRequest request = new GetServiceRequest { ServiceName = ServiceName.FromProjectLocationService("[PROJECT]", "[LOCATION]", "[SERVICE]"), }; Service expectedResponse = new Service { ServiceName = ServiceName.FromProjectLocationService("[PROJECT]", "[LOCATION]", "[SERVICE]"), CreateTime = new wkt::Timestamp(), UpdateTime = new wkt::Timestamp(), Labels = { { "key8a0b6e3c", "value60c16320" }, }, HiveMetastoreConfig = new HiveMetastoreConfig(), NetworkAsNetworkName = NetworkName.FromProjectNetwork("[PROJECT]", "[NETWORK]"), EndpointUri = "endpoint_uri59c03c94", Port = -78310000, State = Service.Types.State.Deleting, StateMessage = "state_message46cf28c0", ArtifactGcsUri = "artifact_gcs_uri4d2b3985", Tier = Service.Types.Tier.Developer, MetadataIntegration = new MetadataIntegration(), MaintenanceWindow = new MaintenanceWindow(), Uid = "uida2d37198", MetadataManagementActivity = new MetadataManagementActivity(), ReleaseChannel = Service.Types.ReleaseChannel.Stable, }; mockGrpcClient.Setup(x => x.GetService(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); DataprocMetastoreClient client = new DataprocMetastoreClientImpl(mockGrpcClient.Object, null); Service response = client.GetService(request.Name); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetServiceAsync() { moq::Mock<DataprocMetastore.DataprocMetastoreClient> mockGrpcClient = new moq::Mock<DataprocMetastore.DataprocMetastoreClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetServiceRequest request = new GetServiceRequest { ServiceName = ServiceName.FromProjectLocationService("[PROJECT]", "[LOCATION]", "[SERVICE]"), }; Service expectedResponse = new Service { ServiceName = ServiceName.FromProjectLocationService("[PROJECT]", "[LOCATION]", "[SERVICE]"), CreateTime = new wkt::Timestamp(), UpdateTime = new wkt::Timestamp(), Labels = { { "key8a0b6e3c", "value60c16320" }, }, HiveMetastoreConfig = new HiveMetastoreConfig(), NetworkAsNetworkName = NetworkName.FromProjectNetwork("[PROJECT]", "[NETWORK]"), EndpointUri = "endpoint_uri59c03c94", Port = -78310000, State = Service.Types.State.Deleting, StateMessage = "state_message46cf28c0", ArtifactGcsUri = "artifact_gcs_uri4d2b3985", Tier = Service.Types.Tier.Developer, MetadataIntegration = new MetadataIntegration(), MaintenanceWindow = new MaintenanceWindow(), Uid = "uida2d37198", MetadataManagementActivity = new MetadataManagementActivity(), ReleaseChannel = Service.Types.ReleaseChannel.Stable, }; mockGrpcClient.Setup(x => x.GetServiceAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Service>(stt::Task.FromResult(expectedResponse), null, null, null, null)); DataprocMetastoreClient client = new DataprocMetastoreClientImpl(mockGrpcClient.Object, null); Service responseCallSettings = await client.GetServiceAsync(request.Name, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Service responseCancellationToken = await client.GetServiceAsync(request.Name, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetServiceResourceNames() { moq::Mock<DataprocMetastore.DataprocMetastoreClient> mockGrpcClient = new moq::Mock<DataprocMetastore.DataprocMetastoreClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetServiceRequest request = new GetServiceRequest { ServiceName = ServiceName.FromProjectLocationService("[PROJECT]", "[LOCATION]", "[SERVICE]"), }; Service expectedResponse = new Service { ServiceName = ServiceName.FromProjectLocationService("[PROJECT]", "[LOCATION]", "[SERVICE]"), CreateTime = new wkt::Timestamp(), UpdateTime = new wkt::Timestamp(), Labels = { { "key8a0b6e3c", "value60c16320" }, }, HiveMetastoreConfig = new HiveMetastoreConfig(), NetworkAsNetworkName = NetworkName.FromProjectNetwork("[PROJECT]", "[NETWORK]"), EndpointUri = "endpoint_uri59c03c94", Port = -78310000, State = Service.Types.State.Deleting, StateMessage = "state_message46cf28c0", ArtifactGcsUri = "artifact_gcs_uri4d2b3985", Tier = Service.Types.Tier.Developer, MetadataIntegration = new MetadataIntegration(), MaintenanceWindow = new MaintenanceWindow(), Uid = "uida2d37198", MetadataManagementActivity = new MetadataManagementActivity(), ReleaseChannel = Service.Types.ReleaseChannel.Stable, }; mockGrpcClient.Setup(x => x.GetService(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); DataprocMetastoreClient client = new DataprocMetastoreClientImpl(mockGrpcClient.Object, null); Service response = client.GetService(request.ServiceName); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetServiceResourceNamesAsync() { moq::Mock<DataprocMetastore.DataprocMetastoreClient> mockGrpcClient = new moq::Mock<DataprocMetastore.DataprocMetastoreClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetServiceRequest request = new GetServiceRequest { ServiceName = ServiceName.FromProjectLocationService("[PROJECT]", "[LOCATION]", "[SERVICE]"), }; Service expectedResponse = new Service { ServiceName = ServiceName.FromProjectLocationService("[PROJECT]", "[LOCATION]", "[SERVICE]"), CreateTime = new wkt::Timestamp(), UpdateTime = new wkt::Timestamp(), Labels = { { "key8a0b6e3c", "value60c16320" }, }, HiveMetastoreConfig = new HiveMetastoreConfig(), NetworkAsNetworkName = NetworkName.FromProjectNetwork("[PROJECT]", "[NETWORK]"), EndpointUri = "endpoint_uri59c03c94", Port = -78310000, State = Service.Types.State.Deleting, StateMessage = "state_message46cf28c0", ArtifactGcsUri = "artifact_gcs_uri4d2b3985", Tier = Service.Types.Tier.Developer, MetadataIntegration = new MetadataIntegration(), MaintenanceWindow = new MaintenanceWindow(), Uid = "uida2d37198", MetadataManagementActivity = new MetadataManagementActivity(), ReleaseChannel = Service.Types.ReleaseChannel.Stable, }; mockGrpcClient.Setup(x => x.GetServiceAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Service>(stt::Task.FromResult(expectedResponse), null, null, null, null)); DataprocMetastoreClient client = new DataprocMetastoreClientImpl(mockGrpcClient.Object, null); Service responseCallSettings = await client.GetServiceAsync(request.ServiceName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Service responseCancellationToken = await client.GetServiceAsync(request.ServiceName, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetMetadataImportRequestObject() { moq::Mock<DataprocMetastore.DataprocMetastoreClient> mockGrpcClient = new moq::Mock<DataprocMetastore.DataprocMetastoreClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetMetadataImportRequest request = new GetMetadataImportRequest { MetadataImportName = MetadataImportName.FromProjectLocationServiceMetadataImport("[PROJECT]", "[LOCATION]", "[SERVICE]", "[METADATA_IMPORT]"), }; MetadataImport expectedResponse = new MetadataImport { MetadataImportName = MetadataImportName.FromProjectLocationServiceMetadataImport("[PROJECT]", "[LOCATION]", "[SERVICE]", "[METADATA_IMPORT]"), Description = "description2cf9da67", CreateTime = new wkt::Timestamp(), UpdateTime = new wkt::Timestamp(), State = MetadataImport.Types.State.Updating, DatabaseDump = new MetadataImport.Types.DatabaseDump(), }; mockGrpcClient.Setup(x => x.GetMetadataImport(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); DataprocMetastoreClient client = new DataprocMetastoreClientImpl(mockGrpcClient.Object, null); MetadataImport response = client.GetMetadataImport(request); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetMetadataImportRequestObjectAsync() { moq::Mock<DataprocMetastore.DataprocMetastoreClient> mockGrpcClient = new moq::Mock<DataprocMetastore.DataprocMetastoreClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetMetadataImportRequest request = new GetMetadataImportRequest { MetadataImportName = MetadataImportName.FromProjectLocationServiceMetadataImport("[PROJECT]", "[LOCATION]", "[SERVICE]", "[METADATA_IMPORT]"), }; MetadataImport expectedResponse = new MetadataImport { MetadataImportName = MetadataImportName.FromProjectLocationServiceMetadataImport("[PROJECT]", "[LOCATION]", "[SERVICE]", "[METADATA_IMPORT]"), Description = "description2cf9da67", CreateTime = new wkt::Timestamp(), UpdateTime = new wkt::Timestamp(), State = MetadataImport.Types.State.Updating, DatabaseDump = new MetadataImport.Types.DatabaseDump(), }; mockGrpcClient.Setup(x => x.GetMetadataImportAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<MetadataImport>(stt::Task.FromResult(expectedResponse), null, null, null, null)); DataprocMetastoreClient client = new DataprocMetastoreClientImpl(mockGrpcClient.Object, null); MetadataImport responseCallSettings = await client.GetMetadataImportAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); MetadataImport responseCancellationToken = await client.GetMetadataImportAsync(request, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetMetadataImport() { moq::Mock<DataprocMetastore.DataprocMetastoreClient> mockGrpcClient = new moq::Mock<DataprocMetastore.DataprocMetastoreClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetMetadataImportRequest request = new GetMetadataImportRequest { MetadataImportName = MetadataImportName.FromProjectLocationServiceMetadataImport("[PROJECT]", "[LOCATION]", "[SERVICE]", "[METADATA_IMPORT]"), }; MetadataImport expectedResponse = new MetadataImport { MetadataImportName = MetadataImportName.FromProjectLocationServiceMetadataImport("[PROJECT]", "[LOCATION]", "[SERVICE]", "[METADATA_IMPORT]"), Description = "description2cf9da67", CreateTime = new wkt::Timestamp(), UpdateTime = new wkt::Timestamp(), State = MetadataImport.Types.State.Updating, DatabaseDump = new MetadataImport.Types.DatabaseDump(), }; mockGrpcClient.Setup(x => x.GetMetadataImport(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); DataprocMetastoreClient client = new DataprocMetastoreClientImpl(mockGrpcClient.Object, null); MetadataImport response = client.GetMetadataImport(request.Name); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetMetadataImportAsync() { moq::Mock<DataprocMetastore.DataprocMetastoreClient> mockGrpcClient = new moq::Mock<DataprocMetastore.DataprocMetastoreClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetMetadataImportRequest request = new GetMetadataImportRequest { MetadataImportName = MetadataImportName.FromProjectLocationServiceMetadataImport("[PROJECT]", "[LOCATION]", "[SERVICE]", "[METADATA_IMPORT]"), }; MetadataImport expectedResponse = new MetadataImport { MetadataImportName = MetadataImportName.FromProjectLocationServiceMetadataImport("[PROJECT]", "[LOCATION]", "[SERVICE]", "[METADATA_IMPORT]"), Description = "description2cf9da67", CreateTime = new wkt::Timestamp(), UpdateTime = new wkt::Timestamp(), State = MetadataImport.Types.State.Updating, DatabaseDump = new MetadataImport.Types.DatabaseDump(), }; mockGrpcClient.Setup(x => x.GetMetadataImportAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<MetadataImport>(stt::Task.FromResult(expectedResponse), null, null, null, null)); DataprocMetastoreClient client = new DataprocMetastoreClientImpl(mockGrpcClient.Object, null); MetadataImport responseCallSettings = await client.GetMetadataImportAsync(request.Name, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); MetadataImport responseCancellationToken = await client.GetMetadataImportAsync(request.Name, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetMetadataImportResourceNames() { moq::Mock<DataprocMetastore.DataprocMetastoreClient> mockGrpcClient = new moq::Mock<DataprocMetastore.DataprocMetastoreClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetMetadataImportRequest request = new GetMetadataImportRequest { MetadataImportName = MetadataImportName.FromProjectLocationServiceMetadataImport("[PROJECT]", "[LOCATION]", "[SERVICE]", "[METADATA_IMPORT]"), }; MetadataImport expectedResponse = new MetadataImport { MetadataImportName = MetadataImportName.FromProjectLocationServiceMetadataImport("[PROJECT]", "[LOCATION]", "[SERVICE]", "[METADATA_IMPORT]"), Description = "description2cf9da67", CreateTime = new wkt::Timestamp(), UpdateTime = new wkt::Timestamp(), State = MetadataImport.Types.State.Updating, DatabaseDump = new MetadataImport.Types.DatabaseDump(), }; mockGrpcClient.Setup(x => x.GetMetadataImport(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); DataprocMetastoreClient client = new DataprocMetastoreClientImpl(mockGrpcClient.Object, null); MetadataImport response = client.GetMetadataImport(request.MetadataImportName); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetMetadataImportResourceNamesAsync() { moq::Mock<DataprocMetastore.DataprocMetastoreClient> mockGrpcClient = new moq::Mock<DataprocMetastore.DataprocMetastoreClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetMetadataImportRequest request = new GetMetadataImportRequest { MetadataImportName = MetadataImportName.FromProjectLocationServiceMetadataImport("[PROJECT]", "[LOCATION]", "[SERVICE]", "[METADATA_IMPORT]"), }; MetadataImport expectedResponse = new MetadataImport { MetadataImportName = MetadataImportName.FromProjectLocationServiceMetadataImport("[PROJECT]", "[LOCATION]", "[SERVICE]", "[METADATA_IMPORT]"), Description = "description2cf9da67", CreateTime = new wkt::Timestamp(), UpdateTime = new wkt::Timestamp(), State = MetadataImport.Types.State.Updating, DatabaseDump = new MetadataImport.Types.DatabaseDump(), }; mockGrpcClient.Setup(x => x.GetMetadataImportAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<MetadataImport>(stt::Task.FromResult(expectedResponse), null, null, null, null)); DataprocMetastoreClient client = new DataprocMetastoreClientImpl(mockGrpcClient.Object, null); MetadataImport responseCallSettings = await client.GetMetadataImportAsync(request.MetadataImportName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); MetadataImport responseCancellationToken = await client.GetMetadataImportAsync(request.MetadataImportName, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetBackupRequestObject() { moq::Mock<DataprocMetastore.DataprocMetastoreClient> mockGrpcClient = new moq::Mock<DataprocMetastore.DataprocMetastoreClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetBackupRequest request = new GetBackupRequest { BackupName = BackupName.FromProjectLocationServiceBackup("[PROJECT]", "[LOCATION]", "[SERVICE]", "[BACKUP]"), }; Backup expectedResponse = new Backup { BackupName = BackupName.FromProjectLocationServiceBackup("[PROJECT]", "[LOCATION]", "[SERVICE]", "[BACKUP]"), CreateTime = new wkt::Timestamp(), EndTime = new wkt::Timestamp(), State = Backup.Types.State.Active, ServiceRevision = new Service(), Description = "description2cf9da67", }; mockGrpcClient.Setup(x => x.GetBackup(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); DataprocMetastoreClient client = new DataprocMetastoreClientImpl(mockGrpcClient.Object, null); Backup response = client.GetBackup(request); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetBackupRequestObjectAsync() { moq::Mock<DataprocMetastore.DataprocMetastoreClient> mockGrpcClient = new moq::Mock<DataprocMetastore.DataprocMetastoreClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetBackupRequest request = new GetBackupRequest { BackupName = BackupName.FromProjectLocationServiceBackup("[PROJECT]", "[LOCATION]", "[SERVICE]", "[BACKUP]"), }; Backup expectedResponse = new Backup { BackupName = BackupName.FromProjectLocationServiceBackup("[PROJECT]", "[LOCATION]", "[SERVICE]", "[BACKUP]"), CreateTime = new wkt::Timestamp(), EndTime = new wkt::Timestamp(), State = Backup.Types.State.Active, ServiceRevision = new Service(), Description = "description2cf9da67", }; mockGrpcClient.Setup(x => x.GetBackupAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Backup>(stt::Task.FromResult(expectedResponse), null, null, null, null)); DataprocMetastoreClient client = new DataprocMetastoreClientImpl(mockGrpcClient.Object, null); Backup responseCallSettings = await client.GetBackupAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Backup responseCancellationToken = await client.GetBackupAsync(request, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetBackup() { moq::Mock<DataprocMetastore.DataprocMetastoreClient> mockGrpcClient = new moq::Mock<DataprocMetastore.DataprocMetastoreClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetBackupRequest request = new GetBackupRequest { BackupName = BackupName.FromProjectLocationServiceBackup("[PROJECT]", "[LOCATION]", "[SERVICE]", "[BACKUP]"), }; Backup expectedResponse = new Backup { BackupName = BackupName.FromProjectLocationServiceBackup("[PROJECT]", "[LOCATION]", "[SERVICE]", "[BACKUP]"), CreateTime = new wkt::Timestamp(), EndTime = new wkt::Timestamp(), State = Backup.Types.State.Active, ServiceRevision = new Service(), Description = "description2cf9da67", }; mockGrpcClient.Setup(x => x.GetBackup(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); DataprocMetastoreClient client = new DataprocMetastoreClientImpl(mockGrpcClient.Object, null); Backup response = client.GetBackup(request.Name); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetBackupAsync() { moq::Mock<DataprocMetastore.DataprocMetastoreClient> mockGrpcClient = new moq::Mock<DataprocMetastore.DataprocMetastoreClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetBackupRequest request = new GetBackupRequest { BackupName = BackupName.FromProjectLocationServiceBackup("[PROJECT]", "[LOCATION]", "[SERVICE]", "[BACKUP]"), }; Backup expectedResponse = new Backup { BackupName = BackupName.FromProjectLocationServiceBackup("[PROJECT]", "[LOCATION]", "[SERVICE]", "[BACKUP]"), CreateTime = new wkt::Timestamp(), EndTime = new wkt::Timestamp(), State = Backup.Types.State.Active, ServiceRevision = new Service(), Description = "description2cf9da67", }; mockGrpcClient.Setup(x => x.GetBackupAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Backup>(stt::Task.FromResult(expectedResponse), null, null, null, null)); DataprocMetastoreClient client = new DataprocMetastoreClientImpl(mockGrpcClient.Object, null); Backup responseCallSettings = await client.GetBackupAsync(request.Name, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Backup responseCancellationToken = await client.GetBackupAsync(request.Name, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetBackupResourceNames() { moq::Mock<DataprocMetastore.DataprocMetastoreClient> mockGrpcClient = new moq::Mock<DataprocMetastore.DataprocMetastoreClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetBackupRequest request = new GetBackupRequest { BackupName = BackupName.FromProjectLocationServiceBackup("[PROJECT]", "[LOCATION]", "[SERVICE]", "[BACKUP]"), }; Backup expectedResponse = new Backup { BackupName = BackupName.FromProjectLocationServiceBackup("[PROJECT]", "[LOCATION]", "[SERVICE]", "[BACKUP]"), CreateTime = new wkt::Timestamp(), EndTime = new wkt::Timestamp(), State = Backup.Types.State.Active, ServiceRevision = new Service(), Description = "description2cf9da67", }; mockGrpcClient.Setup(x => x.GetBackup(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); DataprocMetastoreClient client = new DataprocMetastoreClientImpl(mockGrpcClient.Object, null); Backup response = client.GetBackup(request.BackupName); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetBackupResourceNamesAsync() { moq::Mock<DataprocMetastore.DataprocMetastoreClient> mockGrpcClient = new moq::Mock<DataprocMetastore.DataprocMetastoreClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetBackupRequest request = new GetBackupRequest { BackupName = BackupName.FromProjectLocationServiceBackup("[PROJECT]", "[LOCATION]", "[SERVICE]", "[BACKUP]"), }; Backup expectedResponse = new Backup { BackupName = BackupName.FromProjectLocationServiceBackup("[PROJECT]", "[LOCATION]", "[SERVICE]", "[BACKUP]"), CreateTime = new wkt::Timestamp(), EndTime = new wkt::Timestamp(), State = Backup.Types.State.Active, ServiceRevision = new Service(), Description = "description2cf9da67", }; mockGrpcClient.Setup(x => x.GetBackupAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Backup>(stt::Task.FromResult(expectedResponse), null, null, null, null)); DataprocMetastoreClient client = new DataprocMetastoreClientImpl(mockGrpcClient.Object, null); Backup responseCallSettings = await client.GetBackupAsync(request.BackupName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Backup responseCancellationToken = await client.GetBackupAsync(request.BackupName, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } } }
using System; using System.Collections.Generic; using System.Linq; using System.Security.Claims; using System.Threading; using System.Threading.Tasks; using Microsoft.AspNetCore.Identity; using OrchardCore.Modules; using Microsoft.Extensions.Caching.Memory; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Localization; using Microsoft.Extensions.Logging; using OrchardCore.Environment.Cache; using OrchardCore.Roles.Models; using OrchardCore.Security; using OrchardCore.Security.Services; using YesSql; namespace OrchardCore.Roles.Services { public class RoleStore : IRoleClaimStore<IRole>, IRoleProvider { private const string Key = "RolesManager.Roles"; private readonly ISession _session; private readonly ISignal _signal; private readonly IMemoryCache _memoryCache; private readonly IServiceProvider _serviceProvider; public RoleStore(ISession session, IMemoryCache memoryCache, ISignal signal, IStringLocalizer<RoleStore> stringLocalizer, IServiceProvider serviceProvider, ILogger<RoleStore> logger) { _memoryCache = memoryCache; _signal = signal; T = stringLocalizer; _session = session; _serviceProvider = serviceProvider; Logger = logger; } public ILogger Logger { get; } public IStringLocalizer<RoleStore> T; public void Dispose() { } public Task<RolesDocument> GetRolesAsync() { return _memoryCache.GetOrCreateAsync(Key, async (entry) => { var roles = await _session.Query<RolesDocument>().FirstOrDefaultAsync(); if (roles == null) { roles = new RolesDocument(); _session.Save(roles); } entry.ExpirationTokens.Add(_signal.GetToken(Key)); return roles; }); } public void UpdateRoles(RolesDocument roles) { roles.Serial++; _session.Save(roles); _memoryCache.Set(Key, roles); } public async Task<IEnumerable<string>> GetRoleNamesAsync() { var roles = await GetRolesAsync(); return roles.Roles.Select(x => x.RoleName).OrderBy(x => x).ToList(); } #region IRoleStore<IRole> public async Task<IdentityResult> CreateAsync(IRole role, CancellationToken cancellationToken) { if (role == null) { throw new ArgumentNullException(nameof(role)); } var roles = await GetRolesAsync(); roles.Roles.Add((Role)role); UpdateRoles(roles); return IdentityResult.Success; } public async Task<IdentityResult> DeleteAsync(IRole role, CancellationToken cancellationToken) { if (role == null) { throw new ArgumentNullException(nameof(role)); } var orchardRole = (Role)role; if (String.Equals(orchardRole.NormalizedRoleName, "ANONYMOUS") || String.Equals(orchardRole.NormalizedRoleName, "AUTHENTICATED")) { return IdentityResult.Failed(new IdentityError { Description = T["Can't delete system roles."] }); } var roleRemovedEventHandlers = _serviceProvider.GetRequiredService<IEnumerable<IRoleRemovedEventHandler>>(); await roleRemovedEventHandlers.InvokeAsync(x => x.RoleRemovedAsync(orchardRole.RoleName), Logger); var roles = await GetRolesAsync(); roles.Roles.Remove(orchardRole); UpdateRoles(roles); return IdentityResult.Success; } public async Task<IRole> FindByIdAsync(string roleId, CancellationToken cancellationToken) { var roles = await GetRolesAsync(); var role = roles.Roles.FirstOrDefault(x => x.RoleName == roleId); return role; } public async Task<IRole> FindByNameAsync(string normalizedRoleName, CancellationToken cancellationToken) { var roles = await GetRolesAsync(); var role = roles.Roles.FirstOrDefault(x => x.NormalizedRoleName == normalizedRoleName); return role; } public Task<string> GetNormalizedRoleNameAsync(IRole role, CancellationToken cancellationToken) { if (role == null) { throw new ArgumentNullException(nameof(role)); } return Task.FromResult(((Role)role).NormalizedRoleName); } public Task<string> GetRoleIdAsync(IRole role, CancellationToken cancellationToken) { if (role == null) { throw new ArgumentNullException(nameof(role)); } return Task.FromResult(role.RoleName.ToUpperInvariant()); } public Task<string> GetRoleNameAsync(IRole role, CancellationToken cancellationToken) { if (role == null) { throw new ArgumentNullException(nameof(role)); } return Task.FromResult(role.RoleName); } public Task SetNormalizedRoleNameAsync(IRole role, string normalizedName, CancellationToken cancellationToken) { if (role == null) { throw new ArgumentNullException(nameof(role)); } ((Role)role).NormalizedRoleName = normalizedName; return Task.CompletedTask; } public Task SetRoleNameAsync(IRole role, string roleName, CancellationToken cancellationToken) { if (role == null) { throw new ArgumentNullException(nameof(role)); } ((Role)role).RoleName = roleName; return Task.CompletedTask; } public async Task<IdentityResult> UpdateAsync(IRole role, CancellationToken cancellationToken) { if (role == null) { throw new ArgumentNullException(nameof(role)); } var roles = await GetRolesAsync(); var existingRole = roles.Roles.FirstOrDefault(x => x.RoleName == role.RoleName); roles.Roles.Remove(existingRole); roles.Roles.Add((Role)role); UpdateRoles(roles); return IdentityResult.Success; } #endregion #region IRoleClaimStore<IRole> public Task AddClaimAsync(IRole role, Claim claim, CancellationToken cancellationToken = default(CancellationToken)) { if (role == null) { throw new ArgumentNullException(nameof(role)); } if (claim == null) { throw new ArgumentNullException(nameof(claim)); } ((Role)role).RoleClaims.Add(new RoleClaim { ClaimType = claim.Type, ClaimValue = claim.Value }); return Task.CompletedTask; } public Task<IList<Claim>> GetClaimsAsync(IRole role, CancellationToken cancellationToken = default(CancellationToken)) { if (role == null) { throw new ArgumentNullException(nameof(role)); } return Task.FromResult<IList<Claim>>(((Role)role).RoleClaims.Select(x => x.ToClaim()).ToList()); } public Task RemoveClaimAsync(IRole role, Claim claim, CancellationToken cancellationToken = default(CancellationToken)) { if (role == null) { throw new ArgumentNullException(nameof(role)); } if (claim == null) { throw new ArgumentNullException(nameof(claim)); } ((Role)role).RoleClaims.RemoveAll(x => x.ClaimType == claim.Type && x.ClaimValue == claim.Value); return Task.CompletedTask; } #endregion } }